repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
rodrigolucianocosta/ProjectParking
|
ProjectParking/Parking/django-localflavor-1.1/tests/test_generic.py
|
1
|
12114
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.exceptions import ValidationError, ImproperlyConfigured
from django.test import SimpleTestCase, TestCase
from django.utils import formats
from localflavor.generic.models import BICField, IBANField
from localflavor.generic.validators import BICValidator, IBANValidator
from localflavor.generic.forms import DateField, DateTimeField, SplitDateTimeField, BICFormField, IBANFormField
class DateTimeFieldTestCase(SimpleTestCase):
default_date_input_formats = (
'%Y-%m-%d', '%d/%m/%Y', '%d/%m/%y', '%b %d %Y', '%b %d, %Y',
'%d %b %Y', '%d %b, %Y', '%B %d %Y', '%B %d, %Y', '%d %B %Y',
'%d %B, %Y',
)
default_datetime_input_formats = (
'%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M', '%Y-%m-%d', '%d/%m/%Y %H:%M:%S',
'%d/%m/%Y %H:%M', '%d/%m/%Y', '%d/%m/%y %H:%M:%S', '%d/%m/%y %H:%M',
'%d/%m/%y',
)
def assertInputFormats(self, field, formats):
self.assertSequenceEqual(field.input_formats, formats)
class DateFieldTests(DateTimeFieldTestCase):
def setUp(self):
self.default_input_formats = self.default_date_input_formats
def test_init_no_input_formats(self):
field = DateField()
self.assertInputFormats(field, self.default_input_formats)
def test_init_empty_input_formats(self):
field = DateField(input_formats=())
self.assertInputFormats(field, self.default_input_formats)
def test_init_custom_input_formats(self):
input_formats = ('%m/%d/%Y', '%m/%d/%y')
field = DateField(input_formats=input_formats)
self.assertInputFormats(field, input_formats)
class DateTimeFieldTests(DateTimeFieldTestCase):
def setUp(self):
self.default_input_formats = self.default_datetime_input_formats
def test_init_no_input_formats(self):
field = DateTimeField()
self.assertInputFormats(field, self.default_input_formats)
def test_init_empty_input_formats(self):
field = DateTimeField(input_formats=())
self.assertInputFormats(field, self.default_input_formats)
def test_init_custom_input_formats(self):
input_formats = ('%m/%d/%Y %H:%M', '%m/%d/%y %H:%M')
field = DateTimeField(input_formats=input_formats)
self.assertInputFormats(field, input_formats)
class SplitDateTimeFieldTests(DateTimeFieldTestCase):
default_time_input_formats = formats.get_format_lazy('TIME_INPUT_FORMATS')
def test_init_no_input_formats(self):
field = SplitDateTimeField()
date_field, time_field = field.fields
self.assertInputFormats(date_field, self.default_date_input_formats)
self.assertInputFormats(time_field, self.default_time_input_formats)
def test_init_empty_input_formats(self):
field = SplitDateTimeField(input_date_formats=(),
input_time_formats=())
date_field, time_field = field.fields
self.assertInputFormats(date_field, self.default_date_input_formats)
self.assertInputFormats(time_field, ())
def test_init_custom_input_formats(self):
date_input_formats = ('%m/%d/%Y', '%m/%d/%y')
time_input_formats = ('%H:%M', '%H:%M:%S')
field = SplitDateTimeField(input_date_formats=date_input_formats,
input_time_formats=time_input_formats)
date_field, time_field = field.fields
self.assertInputFormats(date_field, date_input_formats)
self.assertInputFormats(time_field, time_input_formats)
class IBANTests(TestCase):
def test_iban_validator(self):
valid = [
'GB82WeST12345698765432',
'GB82 WEST 1234 5698 7654 32',
'GR1601101250000000012300695',
'GR16-0110-1250-0000-0001-2300-695',
'GB29NWBK60161331926819',
'GB29N-WB K6016-13319-26819',
'SA0380000000608010167519',
'SA0380 0 0000 06 0 8 0 1 0 1 6 7 519 ',
'CH9300762011623852957',
'IL620108000000099999999',
'EE982200221111099080',
]
invalid = {
'GB82WEST1234569876543': 'GB IBANs must contain 22 characters.',
'CA34CIBC123425345': 'CA is not a valid country code for IBAN.',
'GB29ÉWBK60161331926819': 'is not a valid character for IBAN.',
'SA0380000000608019167519': 'Not a valid IBAN.',
'EE012200221111099080': 'Not a valid IBAN.',
}
for iban in valid:
IBANValidator(iban)
for iban in invalid:
self.assertRaisesMessage(ValidationError, invalid[iban], IBANValidator(), iban)
def test_iban_fields(self):
""" Test the IBAN model and form field. """
valid = {
'NL02ABNA0123456789': 'NL02ABNA0123456789',
'Nl02aBNa0123456789': 'NL02ABNA0123456789',
'NL02 ABNA 0123 4567 89': 'NL02ABNA0123456789',
'NL02-ABNA-0123-4567-89': 'NL02ABNA0123456789',
'NL91ABNA0417164300': 'NL91ABNA0417164300',
'NL91 ABNA 0417 1643 00': 'NL91ABNA0417164300',
'NL91-ABNA-0417-1643-00': 'NL91ABNA0417164300',
'MU17BOMM0101101030300200000MUR': 'MU17BOMM0101101030300200000MUR',
'MU17 BOMM 0101 1010 3030 0200 000M UR': 'MU17BOMM0101101030300200000MUR',
'MU 17BO MM01011010 3030-02 000-00M UR': 'MU17BOMM0101101030300200000MUR',
'BE68539007547034': 'BE68539007547034',
'BE68 5390 0754 7034': 'BE68539007547034',
'BE-685390075470 34': 'BE68539007547034',
}
invalid = {
'NL02ABNA012345678999': ['NL IBANs must contain 18 characters.'],
'NL02 ABNA 0123 4567 8999': ['NL IBANs must contain 18 characters.'],
'NL91ABNB0417164300': ['Not a valid IBAN.'],
'NL91 ABNB 0417 1643 00': ['Not a valid IBAN.'],
'MU17BOMM0101101030300200000MUR12345': [
'MU IBANs must contain 30 characters.',
'Ensure this value has at most 34 characters (it has 35).'],
'MU17 BOMM 0101 1010 3030 0200 000M UR12 345': [
'MU IBANs must contain 30 characters.',
'Ensure this value has at most 34 characters (it has 35).'],
# This IBAN should only be valid only if the Nordea extensions are turned on.
'EG1100006001880800100014553': ['EG is not a valid country code for IBAN.'],
'EG11 0000 6001 8808 0010 0014 553': ['EG is not a valid country code for IBAN.']
}
self.assertFieldOutput(IBANFormField, valid=valid, invalid=invalid)
# Test valid inputs for model field.
iban_model_field = IBANField()
for input, output in valid.items():
self.assertEqual(iban_model_field.clean(input, None), output)
# Invalid inputs for model field.
for input, errors in invalid.items():
with self.assertRaises(ValidationError) as context_manager:
iban_model_field.clean(input, None)
# The error messages for models are in a different order.
errors.reverse()
self.assertEqual(context_manager.exception.messages, errors)
def test_nordea_extensions(self):
""" Test a valid IBAN in the Nordea extensions. """
iban_validator = IBANValidator(use_nordea_extensions=True)
# Run the validator to ensure there are no ValidationErrors raised.
iban_validator('Eg1100006001880800100014553')
def test_form_field_formatting(self):
iban_form_field = IBANFormField()
self.assertEqual(iban_form_field.prepare_value('NL02ABNA0123456789'), 'NL02 ABNA 0123 4567 89')
self.assertEqual(iban_form_field.prepare_value('NL02 ABNA 0123 4567 89'), 'NL02 ABNA 0123 4567 89')
self.assertIsNone(iban_form_field.prepare_value(None))
def test_include_countries(self):
""" Test the IBAN model and form include_countries feature. """
include_countries = ('NL', 'BE', 'LU')
valid = {
'NL02ABNA0123456789': 'NL02ABNA0123456789',
'BE68539007547034': 'BE68539007547034',
'LU280019400644750000': 'LU280019400644750000'
}
invalid = {
# This IBAN is valid but not for the configured countries.
'GB82WEST12345698765432': ['GB IBANs are not allowed in this field.']
}
self.assertFieldOutput(IBANFormField, field_kwargs={'include_countries': include_countries},
valid=valid, invalid=invalid)
# Test valid inputs for model field.
iban_model_field = IBANField(include_countries=include_countries)
for input, output in valid.items():
self.assertEqual(iban_model_field.clean(input, None), output)
# Invalid inputs for model field.
for input, errors in invalid.items():
with self.assertRaises(ValidationError) as context_manager:
iban_model_field.clean(input, None)
# The error messages for models are in a different order.
errors.reverse()
self.assertEqual(context_manager.exception.messages, errors)
def test_misconfigured_include_countries(self):
""" Test that an IBAN field or model raises an error when asked to validate a country not part of IBAN.
"""
# Test an unassigned ISO 3166-1 country code so that the tests will work even if a country joins IBAN.
self.assertRaises(ImproperlyConfigured, IBANValidator, include_countries=('JJ',))
self.assertRaises(ImproperlyConfigured, IBANValidator, use_nordea_extensions=True, include_countries=('JJ',))
# Test a Nordea IBAN when Nordea extensions are turned off.
self.assertRaises(ImproperlyConfigured, IBANValidator, include_countries=('AO',))
class BICTests(TestCase):
def test_bic_validator(self):
valid = [
'DEUTDEFF',
'deutdeff',
'NEDSZAJJXXX',
'NEDSZAJJxxx',
'DABADKKK',
'daBadKkK',
'UNCRIT2B912',
'DSBACNBXSHA',
None,
]
invalid = {
'NEDSZAJJXX': 'BIC codes have either 8 or 11 characters.',
'': 'BIC codes have either 8 or 11 characters.',
'CIBCJJH2': 'JJ is not a valid country code.',
'DÉUTDEFF': 'is not a valid institution code.'
}
bic_validator = BICValidator()
for bic in valid:
bic_validator(bic)
for bic in invalid:
self.assertRaisesMessage(ValidationError, invalid[bic], BICValidator(), bic)
def test_form_field_formatting(self):
bic_form_field = BICFormField()
self.assertEqual(bic_form_field.prepare_value('deutdeff'), 'DEUTDEFF')
self.assertIsNone(bic_form_field.prepare_value(None))
self.assertEqual(bic_form_field.to_python(None), '')
def test_bic_model_field(self):
valid = {
'DEUTDEFF': 'DEUTDEFF',
'NEDSZAJJXXX': 'NEDSZAJJXXX',
'DABADKKK': 'DABADKKK',
'UNCRIT2B912': 'UNCRIT2B912',
'DSBACNBXSHA': 'DSBACNBXSHA'
}
invalid = {
'NEDSZAJJXX': ['BIC codes have either 8 or 11 characters.'],
'CIBCJJH2': ['JJ is not a valid country code.'],
'D3UTDEFF': ['D3UT is not a valid institution code.']
}
self.assertFieldOutput(BICFormField, valid=valid, invalid=invalid)
bic_model_field = BICField()
# Test valid inputs for model field.
for input, output in valid.items():
self.assertEqual(bic_model_field.clean(input, None), output)
self.assertIsNone(bic_model_field.to_python(None))
# Invalid inputs for model field.
for input, errors in invalid.items():
with self.assertRaises(ValidationError) as context_manager:
bic_model_field.clean(input, None)
self.assertEqual(errors, context_manager.exception.messages)
|
mpl-2.0
| 2,808,876,877,879,975,000 | 38.581699 | 117 | 0.621697 | false |
nprapps/dailygraphics
|
fabfile/flat.py
|
1
|
2586
|
#!/usr/bin/env python
# _*_ coding:utf-8 _*_
import copy
from fnmatch import fnmatch
import hashlib
import mimetypes
import os
from boto.s3.key import Key
import app_config
import utils
def deploy_file(src, dst, headers={}):
"""
Deploy a single file to S3, if the local version is different.
"""
bucket = utils.get_bucket(app_config.S3_BUCKET['bucket_name'])
k = bucket.get_key(dst)
s3_md5 = None
if k:
s3_md5 = k.etag.strip('"')
else:
k = Key(bucket)
k.key = dst
file_headers = copy.copy(headers)
if app_config.S3_BUCKET == app_config.STAGING_S3_BUCKET:
policy = 'private'
else:
policy = 'public-read'
if 'Content-Type' not in headers:
file_headers['Content-Type'] = mimetypes.guess_type(src)[0]
if file_headers['Content-Type'] == 'text/html':
# Force character encoding header
file_headers['Content-Type'] = '; '.join([
file_headers['Content-Type'],
'charset=utf-8'])
with open(src, 'rb') as f:
local_md5 = hashlib.md5()
local_md5.update(f.read())
local_md5 = local_md5.hexdigest()
if local_md5 == s3_md5:
print 'Skipping %s (has not changed)' % src
else:
print 'Uploading %s --> %s' % (src, dst)
k.set_contents_from_filename(src, file_headers, policy=policy)
def deploy_folder(src, dst, headers={}, ignore=[]):
"""
Deploy a folder to S3, checking each file to see if it has changed.
"""
to_deploy = []
for local_path, subdirs, filenames in os.walk(src, topdown=True):
rel_path = os.path.relpath(local_path, src)
for name in filenames:
if name.startswith('.'):
continue
src_path = os.path.join(local_path, name)
skip = False
for pattern in ignore:
if fnmatch(src_path, pattern):
skip = True
break
if skip:
continue
if rel_path == '.':
dst_path = os.path.join(dst, name)
else:
dst_path = os.path.join(dst, rel_path, name)
to_deploy.append((src_path, dst_path))
for src, dst in to_deploy:
deploy_file(src, dst, headers)
def delete_folder(dst):
"""
Delete a folder from S3.
"""
bucket = utils.get_bucket(app_config.S3_BUCKET['bucket_name'])
for key in bucket.list(prefix='%s/' % dst):
print 'Deleting %s' % (key.key)
key.delete()
|
mit
| -9,011,076,063,537,600,000 | 24.60396 | 71 | 0.549884 | false |
Diblo/Pikaptcha
|
pikaptcha/console.py
|
1
|
9224
|
import argparse
import sys
import pikaptcha
from pikaptcha.ptcexceptions import *
from pikaptcha.tos import *
from pikaptcha.gmailv import *
from pikaptcha.url import *
from pgoapi.exceptions import AuthException, ServerSideRequestThrottlingException, NotLoggedInException
import pprint
import threading
import getopt
import urllib2
import imaplib
import string
import re
def parse_arguments(args):
"""Parse the command line arguments for the console commands.
Args:
args (List[str]): List of string arguments to be parsed.
Returns:
Namespace: Namespace with the parsed arguments.
"""
parser = argparse.ArgumentParser(
description='Pokemon Trainer Club Account Creator'
)
parser.add_argument(
'-u', '--username', type=str, default=None,
help='Username for the new account (defaults to random string).'
)
parser.add_argument(
'-p', '--password', type=str, default=None,
help='Password for the new account (defaults to random string).'
)
parser.add_argument(
'-e', '--email', type=str, default=None,
help='Email for the new account (defaults to random email-like string).'
)
parser.add_argument(
'-m', '--plusmail', type=str, default=None,
help='Email template for the new account. Use something like aaaa@gmail.com (defaults to nothing).'
)
parser.add_argument(
'-av', '--autoverify', type=bool, default=False,
help='Append the argument -av True if you want to use autoverify with +mail.'
)
parser.add_argument(
'-b', '--birthday', type=str, default=None,
help='Birthday for the new account. Must be YYYY-MM-DD. (defaults to a random birthday).'
)
parser.add_argument(
'-c','--count', type=int,default=1,
help='Number of accounts to generate.'
)
parser.add_argument(
'-r','--recaptcha', type=str, default=None,
help='Your 2captcha key from settings'
)
parser.add_argument(
'-gm', '--googlemail', type=str, default=None,
help='This is the mail for the google account when auto verify is activate (Only required if plus mail is different from google mail)'
)
parser.add_argument(
'-gp','--googlepass', type=str, default=None,
help='This is the password for the google account and is require to activate auto verify when using the plus mail'
)
parser.add_argument(
'-t','--textfile', type=str, default="usernames.txt",
help='This is the location you want to save usernames.txt'
)
parser.add_argument(
'-of','--outputformat', type=str, default="compact",
help='If you choose compact, you get user:pass. If you choose pkgo, you get -u user -p pass'
)
parser.add_argument(
'-it','--inputtext', type=str, default=None,
help='This is the location you want to read usernames in the format user:pass'
)
parser.add_argument(
'-sn','--startnum', type=int, default=None,
help='If you specify both -u and -c, it will append a number to the end. This allows you to choose where to start from'
)
parser.add_argument(
'-ct','--captchatimeout', type=int, default=1000,
help='Allows you to set the time to timeout captcha and forget that account (and forgeit $0.003).'
)
parser.add_argument(
'-l','--location', type=str, default="40.7127837,-74.005941",
help='This is the location that will be spoofed when we verify TOS'
)
parser.add_argument(
'-px','--proxy', type=str, default=None,
help='Proxy to be used when accepting the Terms of Services. Must be host:port (ex. 1.1.1.1:80). Must be a HTTPS proxy.'
)
return parser.parse_args(args)
def _verify_autoverify_email(settings):
if (settings['args'].googlepass is not None and settings['args'].plusmail == None and settings['args'].googlemail == None):
raise PTCInvalidEmailException("You have to specify a plusmail (--plusmail or -m) or a google email (--googlemail or -gm) to use autoverification.")
def _verify_plusmail_format(settings):
if (settings['args'].plusmail != None and not re.match(r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)", settings['args'].plusmail)):
raise PTCInvalidEmailException("Invalid email format to use with plusmail.")
def _verify_twocaptcha_balance(settings):
if (settings['args'].recaptcha != None and settings['balance'] == 'ERROR_KEY_DOES_NOT_EXIST'):
raise PTCTwocaptchaException("2captcha key does not exist.")
if (settings['args'].recaptcha != None and float(settings['balance']) < float(settings['args'].count)*0.003):
raise PTCTwocaptchaException("It does not seem like you have enough balance for this run. Lower the count or increase your balance.")
def _verify_settings(settings):
verifications=[_verify_autoverify_email, _verify_plusmail_format, _verify_twocaptcha_balance]
for verification in verifications:
try:
verification(settings)
except PTCException, e:
print e.message
print "Terminating."
sys.exit()
return True
def entry():
"""Main entry point for the package console commands"""
args = parse_arguments(sys.argv[1:])
captchabal = None
if args.recaptcha != None:
captchabal = "Failed"
while(captchabal == "Failed"):
captchabal = openurl("http://2captcha.com/res.php?key=" + args.recaptcha + "&action=getbalance")
print("Your 2captcha balance is: " + captchabal)
print("This run will cost you approximately: " + str(float(args.count)*0.003))
username = args.username
if args.inputtext != None:
print("Reading accounts from: " + args.inputtext)
lines = [line.rstrip('\n') for line in open(args.inputtext, "r")]
args.count = len(lines)
if _verify_settings({'args':args, 'balance':captchabal}):
if (args.googlepass is not None):
with open(args.textfile, "a") as ulist:
ulist.write("The following accounts use the email address: " + args.plusmail + "\n")
ulist.close()
for x in range(0,args.count):
print("Making account #" + str(x+1))
if ((args.username != None) and (args.count != 1) and (args.inputtext == None)):
if(args.startnum == None):
username = args.username + str(x+1)
else:
username = args.username + str(args.startnum+x)
if (args.inputtext != None):
username = ((lines[x]).split(":"))[0]
args.password = ((lines[x]).split(":"))[1]
error_msg = None
try:
try:
account_info = pikaptcha.random_account(username, args.password, args.email, args.birthday, args.plusmail, args.recaptcha, args.captchatimeout)
print(' Username: {}'.format(account_info["username"]))
print(' Password: {}'.format(account_info["password"]))
print(' Email : {}'.format(account_info["email"]))
# Accept Terms Service
accept_tos(account_info["username"], account_info["password"], args.location, args.proxy)
# Verify email
if (args.googlepass is not None):
if (args.googlemail is not None):
email_verify(args.googlemail, args.googlepass)
else:
email_verify(args.plusmail, args.googlepass)
# Append usernames
with open(args.textfile, "a") as ulist:
if args.outputformat == "pkgo":
ulist.write(" -u " + account_info["username"]+" -p "+account_info["password"]+"")
elif args.outputformat == "pkgocsv":
ulist.write("ptc,"+account_info["username"]+","+account_info["password"]+"\n")
else:
ulist.write(account_info["username"]+":"+account_info["password"]+"\n")
ulist.close()
# Handle account creation failure exceptions
except PTCInvalidPasswordException as err:
error_msg = 'Invalid password: {}'.format(err)
except (PTCInvalidEmailException, PTCInvalidNameException) as err:
error_msg = 'Failed to create account! {}'.format(err)
except PTCException as err:
error_msg = 'Failed to create account! General error: {}'.format(err)
except Exception:
import traceback
error_msg = "Generic Exception: " + traceback.format_exc()
if error_msg:
if args.count == 1:
sys.exit(error_msg)
print(error_msg)
with open(args.textfile, "a") as ulist:
ulist.write("\n")
ulist.close()
|
gpl-3.0
| -3,893,772,485,044,673,500 | 44.215686 | 163 | 0.591067 | false |
csaez/mauto
|
mauto/tests/main_tests.py
|
1
|
1059
|
import mauto
from nose import with_setup
def setup():
return mauto.new_macro("testsuite")
def setup_in_memory():
return mauto.new_macro("testsuite", save=False)
def teardown():
mauto.remove_macro("testsuite")
@with_setup(setup, teardown)
def test_list_macros():
return len(mauto.list_macros()) >= 1
@with_setup(setup, teardown)
def test_new_macro():
mauto.remove_macro("testsuite")
assert mauto.new_macro("testsuite")
@with_setup(setup, teardown)
def test_get_macro():
return mauto.get_macro("testsuite")
@with_setup(setup, teardown)
def test_remove_macro():
mauto.remove_macro("testsuite")
@with_setup(setup, teardown)
def test_save_macro():
assert mauto.save_macro("testsuite")
def test_show():
assert mauto.show is not None
@with_setup(setup, teardown)
def test_get_filepath():
fp = mauto.get_filepath("testsuite")
print fp
assert "testsuite.json" in fp
@with_setup(setup_in_memory, teardown)
def test_get_filepath2():
fp = mauto.get_filepath("testsuite")
assert fp is None
|
mit
| 2,857,118,178,507,721,700 | 17.578947 | 51 | 0.691218 | false |
ScottBuchanan/eden
|
modules/s3db/deploy.py
|
1
|
114755
|
# -*- coding: utf-8 -*-
""" Sahana Eden Deployments Model
@copyright: 2011-2015 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ("S3DeploymentModel",
"S3DeploymentAlertModel",
"deploy_rheader",
"deploy_apply",
"deploy_alert_select_recipients",
"deploy_Inbox",
"deploy_response_select_mission",
)
try:
# try stdlib (Python 2.6)
import json
except ImportError:
try:
# try external module
import simplejson as json
except:
# fallback to pure-Python module
import gluon.contrib.simplejson as json
from gluon import *
from ..s3 import *
from s3layouts import S3AddResourceLink
# =============================================================================
class S3DeploymentModel(S3Model):
names = ("deploy_mission",
"deploy_mission_id",
"deploy_mission_document",
"deploy_application",
"deploy_assignment",
"deploy_assignment_appraisal",
"deploy_assignment_experience",
)
def model(self):
T = current.T
db = current.db
add_components = self.add_components
configure = self.configure
crud_strings = current.response.s3.crud_strings
define_table = self.define_table
super_link = self.super_link
messages = current.messages
NONE = messages["NONE"]
UNKNOWN_OPT = messages.UNKNOWN_OPT
human_resource_id = self.hrm_human_resource_id
# ---------------------------------------------------------------------
# Mission
#
mission_status_opts = {1 : T("Closed"),
2 : T("Open")
}
tablename = "deploy_mission"
define_table(tablename,
super_link("doc_id", "doc_entity"),
Field("name",
label = T("Name"),
represent = self.deploy_mission_name_represent,
requires = IS_NOT_EMPTY(),
),
# @ToDo: Link to location via link table
# link table could be event_event_location for IFRC (would still allow 1 multi-country event to have multiple missions)
self.gis_location_id(),
# @ToDo: Link to event_type via event_id link table instead of duplicating
self.event_type_id(),
self.org_organisation_id(),
Field("code", length = 24,
represent = lambda v: s3_unicode(v) if v else NONE,
),
Field("status", "integer",
default = 2,
label = T("Status"),
represent = lambda opt: \
mission_status_opts.get(opt,
UNKNOWN_OPT),
requires = IS_IN_SET(mission_status_opts),
),
# @todo: change into real fields written onaccept?
Field.Method("hrquantity",
deploy_mission_hrquantity),
Field.Method("response_count",
deploy_mission_response_count),
s3_comments(),
*s3_meta_fields())
# CRUD Form
crud_form = S3SQLCustomForm("name",
"event_type_id",
"location_id",
"code",
"status",
# Files
S3SQLInlineComponent(
"document",
name = "file",
label = T("Files"),
fields = ["file", "comments"],
filterby = dict(field = "file",
options = "",
invert = True,
)
),
# Links
S3SQLInlineComponent(
"document",
name = "url",
label = T("Links"),
fields = ["url", "comments"],
filterby = dict(field = "url",
options = None,
invert = True,
)
),
#S3SQLInlineComponent("document",
#name = "file",
#label = T("Attachments"),
#fields = ["file",
#"comments",
#],
#),
"comments",
"created_on",
)
# Profile
list_layout = deploy_MissionProfileLayout()
alert_widget = dict(label = "Alerts",
insert = lambda r, list_id, title, url: \
A(title,
_href=r.url(component="alert",
method="create"),
_class="action-btn profile-add-btn"),
label_create = "Create Alert",
type = "datalist",
list_fields = ["modified_on",
"mission_id",
"message_id",
"subject",
"body",
],
tablename = "deploy_alert",
context = "mission",
list_layout = list_layout,
pagesize = 10,
)
list_fields = ["created_on",
"mission_id",
"comments",
"human_resource_id$id",
"human_resource_id$person_id",
"human_resource_id$organisation_id",
"message_id$body",
"message_id$from_address",
"message_id$attachment.document_id$file",
]
response_widget = dict(label = "Responses",
insert = False,
type = "datalist",
tablename = "deploy_response",
# Can't be 'response' as this clobbers web2py global
function = "response_message",
list_fields = list_fields,
context = "mission",
list_layout = list_layout,
# The popup datalist isn't currently functional (needs card layout applying) and not ideal UX anyway
#pagesize = 10,
pagesize = None,
)
hr_label = current.deployment_settings.get_deploy_hr_label()
if hr_label == "Member":
label = "Members Deployed"
label_create = "Deploy New Member"
elif hr_label == "Staff":
label = "Staff Deployed"
label_create = "Deploy New Staff"
elif hr_label == "Volunteer":
label = "Volunteers Deployed"
label_create = "Deploy New Volunteer"
assignment_widget = dict(label = label,
insert = lambda r, list_id, title, url: \
A(title,
_href=r.url(component="assignment",
method="create"),
_class="action-btn profile-add-btn"),
label_create = label_create,
tablename = "deploy_assignment",
type = "datalist",
#type = "datatable",
#actions = dt_row_actions,
list_fields = [
"human_resource_id$id",
"human_resource_id$person_id",
"human_resource_id$organisation_id",
"start_date",
"end_date",
"job_title_id",
"job_title",
"appraisal.rating",
"mission_id",
],
context = "mission",
list_layout = list_layout,
pagesize = None, # all records
)
docs_widget = dict(label = "Documents & Links",
label_create = "Add New Document / Link",
type = "datalist",
tablename = "doc_document",
context = ("~.doc_id", "doc_id"),
icon = "attachment",
# Default renderer:
#list_layout = s3db.doc_document_list_layouts,
)
# Table configuration
profile = URL(c="deploy", f="mission", args=["[id]", "profile"])
configure(tablename,
create_next = profile,
crud_form = crud_form,
delete_next = URL(c="deploy", f="mission", args="summary"),
filter_widgets = [
S3TextFilter(["name",
"code",
"event_type_id$name",
],
label=T("Search")
),
S3LocationFilter("location_id",
label=messages.COUNTRY,
widget="multiselect",
levels=["L0"],
hidden=True
),
S3OptionsFilter("event_type_id",
widget="multiselect",
hidden=True
),
S3OptionsFilter("status",
options=mission_status_opts,
hidden=True
),
S3DateFilter("created_on",
hide_time=True,
hidden=True
),
],
list_fields = ["name",
(T("Date"), "created_on"),
"event_type_id",
(T("Country"), "location_id"),
"code",
(T("Responses"), "response_count"),
(T(label), "hrquantity"),
"status",
],
orderby = "deploy_mission.created_on desc",
profile_cols = 1,
profile_header = lambda r: \
deploy_rheader(r, profile=True),
profile_widgets = [alert_widget,
response_widget,
assignment_widget,
docs_widget,
],
summary = [{"name": "rheader",
"common": True,
"widgets": [{"method": self.add_button}]
},
{"name": "table",
"label": "Table",
"widgets": [{"method": "datatable"}]
},
{"name": "report",
"label": "Report",
"widgets": [{"method": "report",
"ajax_init": True}],
},
{"name": "map",
"label": "Map",
"widgets": [{"method": "map",
"ajax_init": True}],
},
],
super_entity = "doc_entity",
update_next = profile,
)
# Components
add_components(tablename,
deploy_assignment = "mission_id",
deploy_alert = "mission_id",
deploy_response = "mission_id",
)
# CRUD Strings
crud_strings[tablename] = Storage(
label_create = T("Create Mission"),
title_display = T("Mission"),
title_list = T("Missions"),
title_update = T("Edit Mission Details"),
title_upload = T("Import Missions"),
label_list_button = T("List Missions"),
label_delete_button = T("Delete Mission"),
msg_record_created = T("Mission added"),
msg_record_modified = T("Mission Details updated"),
msg_record_deleted = T("Mission deleted"),
msg_list_empty = T("No Missions currently registered"))
# Reusable field
represent = S3Represent(lookup = tablename,
linkto = URL(f="mission",
args=["[id]", "profile"]),
show_link = True)
mission_id = S3ReusableField("mission_id", "reference %s" % tablename,
label = T("Mission"),
ondelete = "CASCADE",
represent = represent,
requires = IS_ONE_OF(db,
"deploy_mission.id",
represent),
)
# ---------------------------------------------------------------------
# Link table to link documents to missions, responses or assignments
#
tablename = "deploy_mission_document"
define_table(tablename,
mission_id(),
self.msg_message_id(),
self.doc_document_id(),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Application of human resources
# - agreement that an HR is generally available for assignments
# - can come with certain restrictions
#
tablename = "deploy_application"
define_table(tablename,
human_resource_id(empty = False,
label = T(hr_label)),
Field("active", "boolean",
default = True,
label = T("Roster Status"),
represent = lambda opt: T("active") if opt else T("inactive"),
),
*s3_meta_fields())
configure(tablename,
delete_next = URL(c="deploy", f="human_resource", args="summary"),
)
# ---------------------------------------------------------------------
# Assignment of human resources
# - actual assignment of an HR to a mission
#
tablename = "deploy_assignment"
define_table(tablename,
mission_id(),
human_resource_id(empty = False,
label = T(hr_label)),
self.hrm_job_title_id(),
Field("job_title",
label = T("Position"),
),
# These get copied to hrm_experience
# rest of fields may not be filled-out, but are in attachments
s3_date("start_date", # Only field visible when deploying from Mission profile
label = T("Start Date"),
),
s3_date("end_date",
label = T("End Date"),
start_field = "deploy_assignment_start_date",
default_interval = 12,
),
*s3_meta_fields())
# Table configuration
configure(tablename,
context = {"mission": "mission_id",
},
onaccept = self.deploy_assignment_onaccept,
filter_widgets = [
S3TextFilter(["human_resource_id$person_id$first_name",
"human_resource_id$person_id$middle_name",
"human_resource_id$person_id$last_name",
"mission_id$code",
],
label=T("Search")
),
S3OptionsFilter("mission_id$event_type_id",
widget="multiselect",
hidden=True
),
S3LocationFilter("mission_id$location_id",
label=messages.COUNTRY,
widget="multiselect",
levels=["L0"],
hidden=True
),
S3OptionsFilter("job_title_id",
widget="multiselect",
hidden=True,
),
S3DateFilter("start_date",
hide_time=True,
hidden=True,
),
],
summary = [
{"name": "table",
"label": "Table",
"widgets": [{"method": "datatable"}]
},
{"name": "report",
"label": "Report",
"widgets": [{"method": "report",
"ajax_init": True}]
},
],
)
# Components
add_components(tablename,
hrm_appraisal = {"name": "appraisal",
"link": "deploy_assignment_appraisal",
"joinby": "assignment_id",
"key": "appraisal_id",
"autodelete": False,
},
)
assignment_id = S3ReusableField("assignment_id",
"reference %s" % tablename,
ondelete = "CASCADE")
# ---------------------------------------------------------------------
# Link Assignments to Appraisals
#
tablename = "deploy_assignment_appraisal"
define_table(tablename,
assignment_id(empty = False),
Field("appraisal_id", self.hrm_appraisal),
*s3_meta_fields())
configure(tablename,
ondelete_cascade = \
self.deploy_assignment_appraisal_ondelete_cascade,
)
# ---------------------------------------------------------------------
# Link Assignments to Experience
#
tablename = "deploy_assignment_experience"
define_table(tablename,
assignment_id(empty = False),
Field("experience_id", self.hrm_experience),
*s3_meta_fields())
configure(tablename,
ondelete_cascade = \
self.deploy_assignment_experience_ondelete_cascade,
)
# ---------------------------------------------------------------------
# Assignment of assets
#
# @todo: deploy_asset_assignment
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
#
return dict(deploy_mission_id = mission_id,
)
# -------------------------------------------------------------------------
def defaults(self):
"""
Safe defaults for model-global names in case module is disabled
"""
dummy = S3ReusableField("dummy_id", "integer",
readable = False,
writable = False)
return dict(deploy_mission_id = lambda **attr: dummy("mission_id"),
)
# -------------------------------------------------------------------------
@staticmethod
def add_button(r, widget_id=None, visible=True, **attr):
# Check permission only here, i.e. when the summary is
# actually being rendered:
if current.auth.s3_has_permission("create", r.tablename):
return A(S3Method.crud_string(r.tablename,
"label_create"),
_href=r.url(method="create", id=0, vars={}),
_class="action-btn",
)
else:
return ""
# -------------------------------------------------------------------------
@staticmethod
def deploy_mission_name_represent(name):
table = current.s3db.deploy_mission
mission = current.db(table.name == name).select(table.id,
limitby=(0, 1)
).first()
if not mission:
return name
return A(name,
_href=URL(c="deploy", f="mission",
args=[mission.id, "profile"]))
# -------------------------------------------------------------------------
@staticmethod
def deploy_assignment_onaccept(form):
"""
Create/update linked hrm_experience record for assignment
@param form: the form
"""
db = current.db
s3db = current.s3db
form_vars = form.vars
assignment_id = form_vars.id
fields = ("human_resource_id",
"mission_id",
"job_title",
"job_title_id",
"start_date",
"end_date",
)
if any(key not in form_vars for key in fields):
# Need to reload the record
atable = db.deploy_assignment
query = (atable.id == assignment_id)
qfields = [atable[f] for f in fields]
row = db(query).select(limitby=(0, 1), *qfields).first()
if row:
data = dict((k, row[k]) for k in fields)
else:
# No such record
return
else:
# Can use form vars
data = dict((k, form_vars[k]) for k in fields)
hr = mission = None
# Lookup person details
human_resource_id = data.pop("human_resource_id")
if human_resource_id:
hrtable = s3db.hrm_human_resource
hr = db(hrtable.id == human_resource_id).select(hrtable.person_id,
hrtable.type,
limitby=(0, 1)
).first()
if hr:
data["person_id"] = hr.person_id
data["employment_type"] = hr.type
# Lookup mission details
mission_id = data.pop("mission_id")
if mission_id:
mtable = db.deploy_mission
mission = db(mtable.id == mission_id).select(mtable.location_id,
mtable.organisation_id,
limitby=(0, 1)
).first()
if mission:
data["location_id"] = mission.location_id
data["organisation_id"] = mission.organisation_id
if hr and mission:
etable = s3db.hrm_experience
# Lookup experience record for this assignment
ltable = s3db.deploy_assignment_experience
query = ltable.assignment_id == assignment_id
link = db(query).select(ltable.experience_id,
limitby=(0, 1)
).first()
if link:
# Update experience
db(etable.id == link.experience_id).update(**data)
else:
# Create experience record
experience_id = etable.insert(**data)
# Create link
ltable = db.deploy_assignment_experience
ltable.insert(assignment_id = assignment_id,
experience_id = experience_id,
)
return
# -------------------------------------------------------------------------
@staticmethod
def deploy_assignment_experience_ondelete_cascade(row, tablename=None):
"""
Remove linked hrm_experience record
@param row: the link to be deleted
@param tablename: the tablename (ignored)
"""
s3db = current.s3db
# Lookup experience ID
table = s3db.deploy_assignment_experience
link = current.db(table.id == row.id).select(table.id,
table.experience_id,
limitby=(0, 1)).first()
if not link:
return
else:
# Prevent infinite cascade
link.update_record(experience_id=None)
s3db.resource("hrm_experience", id=link.experience_id).delete()
# -------------------------------------------------------------------------
@staticmethod
def deploy_assignment_appraisal_ondelete_cascade(row, tablename=None):
"""
Remove linked hrm_appraisal record
@param row: the link to be deleted
@param tablename: the tablename (ignored)
"""
s3db = current.s3db
# Lookup experience ID
table = s3db.deploy_assignment_appraisal
link = current.db(table.id == row.id).select(table.id,
table.appraisal_id,
limitby=(0, 1)).first()
if not link:
return
else:
# Prevent infinite cascade
link.update_record(appraisal_id=None)
s3db.resource("hrm_appraisal", id=link.appraisal_id).delete()
# =============================================================================
class S3DeploymentAlertModel(S3Model):
names = ("deploy_alert",
"deploy_alert_recipient",
"deploy_response",
)
def model(self):
T = current.T
db = current.db
add_components = self.add_components
configure = self.configure
crud_strings = current.response.s3.crud_strings
define_table = self.define_table
NONE = current.messages["NONE"]
human_resource_id = self.hrm_human_resource_id
message_id = self.msg_message_id
mission_id = self.deploy_mission_id
hr_label = current.deployment_settings.get_deploy_hr_label()
contact_method_opts = {1: T("Email"),
2: T("SMS"),
#3: T("Twitter"),
#9: T("All"),
9: T("Both"),
}
# ---------------------------------------------------------------------
# Alert
# - also the PE representing its Recipients
#
tablename = "deploy_alert"
define_table(tablename,
self.super_link("pe_id", "pr_pentity"),
mission_id(
requires = IS_ONE_OF(db,
"deploy_mission.id",
S3Represent(lookup="deploy_mission"),
filterby="status",
filter_opts=(2,),
),
),
Field("contact_method", "integer",
default = 1,
label = T("Send By"),
represent = lambda opt: \
contact_method_opts.get(opt, NONE),
requires = IS_IN_SET(contact_method_opts),
),
Field("subject", length=78, # RFC 2822
label = T("Subject"),
# Not used by SMS
#requires = IS_NOT_EMPTY(),
),
Field("body", "text",
label = T("Message"),
represent = lambda v: v or NONE,
requires = IS_NOT_EMPTY(),
),
# Link to the Message once sent
message_id(readable = False),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
label_create = T("Create Alert"),
title_display = T("Alert Details"),
title_list = T("Alerts"),
title_update = T("Edit Alert Details"),
title_upload = T("Import Alerts"),
label_list_button = T("List Alerts"),
label_delete_button = T("Delete Alert"),
msg_record_created = T("Alert added"),
msg_record_modified = T("Alert Details updated"),
msg_record_deleted = T("Alert deleted"),
msg_list_empty = T("No Alerts currently registered"))
# CRUD Form
crud_form = S3SQLCustomForm("mission_id",
"contact_method",
"subject",
"body",
"modified_on",
)
# Table Configuration
configure(tablename,
super_entity = "pr_pentity",
context = {"mission": "mission_id"},
crud_form = crud_form,
list_fields = ["mission_id",
"contact_method",
"subject",
"body",
"alert_recipient.human_resource_id",
],
)
# Components
add_components(tablename,
deploy_alert_recipient = {"name": "recipient",
"joinby": "alert_id",
},
hrm_human_resource = {"name": "select",
"link": "deploy_alert_recipient",
"joinby": "alert_id",
"key": "human_resource_id",
"autodelete": False,
},
)
# Custom method to send alerts
self.set_method("deploy", "alert",
method = "send",
action = self.deploy_alert_send)
# Reusable field
represent = S3Represent(lookup=tablename)
alert_id = S3ReusableField("alert_id", "reference %s" % tablename,
label = T("Alert"),
ondelete = "CASCADE",
represent = represent,
requires = IS_ONE_OF(db, "deploy_alert.id",
represent),
)
# ---------------------------------------------------------------------
# Recipients of the Alert
#
tablename = "deploy_alert_recipient"
define_table(tablename,
alert_id(),
human_resource_id(empty = False,
label = T(hr_label)),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
label_create = T("Add Recipient"),
title_display = T("Recipient Details"),
title_list = T("Recipients"),
title_update = T("Edit Recipient Details"),
title_upload = T("Import Recipients"),
label_list_button = T("List Recipients"),
label_delete_button = T("Delete Recipient"),
msg_record_created = T("Recipient added"),
msg_record_modified = T("Recipient Details updated"),
msg_record_deleted = T("Recipient deleted"),
msg_list_empty = T("No Recipients currently defined"))
# ---------------------------------------------------------------------
# Responses to Alerts
#
tablename = "deploy_response"
define_table(tablename,
mission_id(),
human_resource_id(label = T(hr_label)),
message_id(label = T("Message"),
writable = False),
s3_comments(),
*s3_meta_fields())
crud_form = S3SQLCustomForm("mission_id",
"human_resource_id",
"message_id",
"comments",
# @todo:
#S3SQLInlineComponent("document"),
)
# Table Configuration
configure(tablename,
context = {"mission": "mission_id"},
crud_form = crud_form,
#editable = False,
insertable = False,
update_onaccept = self.deploy_response_update_onaccept,
)
# CRUD Strings
NO_MESSAGES = T("No Messages found")
crud_strings[tablename] = Storage(
title_display = T("Response Message"),
title_list = T("Response Messages"),
title_update = T("Edit Response Details"),
label_list_button = T("All Response Messages"),
label_delete_button = T("Delete Message"),
msg_record_deleted = T("Message deleted"),
msg_no_match = NO_MESSAGES,
msg_list_empty = NO_MESSAGES)
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
#
return dict()
# -------------------------------------------------------------------------
@staticmethod
def deploy_alert_send(r, **attr):
"""
Custom Method to send an Alert
"""
alert_id = r.id
if r.representation != "html" or not alert_id or r.component:
raise HTTP(501, BADMETHOD)
# Must have permission to update the alert in order to send it
authorised = current.auth.s3_has_permission("update", "deploy_alert",
record_id = alert_id)
if not authorised:
r.unauthorised()
T = current.T
record = r.record
# Always redirect to the Mission Profile
mission_id = record.mission_id
next_url = URL(f="mission", args=[mission_id, "profile"])
# Check whether the alert has already been sent
# - alerts should be read-only after creation
if record.message_id:
current.session.error = T("This Alert has already been sent!")
redirect(next_url)
db = current.db
s3db = current.s3db
table = s3db.deploy_alert
contact_method = record.contact_method
# Check whether there are recipients
ltable = db.deploy_alert_recipient
query = (ltable.alert_id == alert_id) & \
(ltable.deleted == False)
if contact_method == 9:
# Save a subsequent query
recipients = db(query).select(ltable.human_resource_id)
else:
recipients = db(query).select(ltable.id,
limitby=(0, 1)).first()
if not recipients:
current.session.error = T("This Alert has no Recipients yet!")
redirect(next_url)
# Send Message
message = record.body
msg = current.msg
if contact_method == 2:
# Send SMS
message_id = msg.send_by_pe_id(record.pe_id,
contact_method = "SMS",
message=message,
)
elif contact_method == 9:
# Send both
# Create separate alert for this
id = table.insert(body = message,
contact_method = 2,
mission_id = mission_id,
created_by = record.created_by,
created_on = record.created_on,
)
new_alert = dict(id=id)
s3db.update_super(table, new_alert)
# Add Recipients
for row in recipients:
ltable.insert(alert_id = id,
human_resource_id = row.human_resource_id,
)
# Send SMS
message_id = msg.send_by_pe_id(new_alert["pe_id"],
contact_method = "SMS",
message=message,
)
# Update the Alert to show it's been Sent
db(table.id == id).update(message_id=message_id)
if contact_method in (1, 9):
# Send Email
# Embed the mission_id to parse replies
# = @ToDo: Use a Message Template to add Footer (very simple one for RDRT)
message = "%s\n:mission_id:%s:" % (message, mission_id)
# Lookup from_address
# @ToDo: Allow multiple channels to be defined &
# select the appropriate one for this mission
ctable = s3db.msg_email_channel
channel = db(ctable.deleted == False).select(ctable.username,
ctable.server,
limitby = (0, 1)
).first()
if not channel:
current.session.error = T("Need to configure an Email Address!")
redirect(URL(f="email_channel"))
from_address = "%s@%s" % (channel.username, channel.server)
message_id = msg.send_by_pe_id(record.pe_id,
subject=record.subject,
message=message,
from_address=from_address,
)
# Update the Alert to show it's been Sent
data = dict(message_id=message_id)
if contact_method == 2:
# Clear the Subject
data["subject"] = None
elif contact_method == 9:
# Also modify the contact_method to show that this is the email one
data["contact_method"] = 1
db(table.id == alert_id).update(**data)
# Return to the Mission Profile
current.session.confirmation = T("Alert Sent")
redirect(next_url)
# -------------------------------------------------------------------------
@staticmethod
def deploy_response_update_onaccept(form):
"""
Update the doc_id in all attachments (doc_document) to the
hrm_human_resource the response is linked to.
@param form: the form
"""
db = current.db
s3db = current.s3db
data = form.vars
if not data or "id" not in data:
return
# Get message ID and human resource ID
if "human_resource_id" not in data or "message_id" not in data:
rtable = s3db.deploy_response
response = db(rtable.id == data.id).select(rtable.human_resource_id,
rtable.message_id,
limitby=(0, 1)
).first()
if not response:
return
human_resource_id = response.human_resource_id
message_id = response.message_id
else:
human_resource_id = data.human_resource_id
message_id = data.message_id
# Update doc_id in all attachments (if any)
dtable = s3db.doc_document
ltable = s3db.deploy_mission_document
query = (ltable.message_id == response.message_id) & \
(dtable.id == ltable.document_id) & \
(ltable.deleted == False) & \
(dtable.deleted == False)
attachments = db(query).select(dtable.id)
if attachments:
# Get the doc_id from the hrm_human_resource
doc_id = None
if human_resource_id:
htable = s3db.hrm_human_resource
hr = db(htable.id == human_resource_id).select(htable.doc_id,
limitby=(0, 1)
).first()
if hr:
doc_id = hr.doc_id
db(dtable.id.belongs(attachments)).update(doc_id=doc_id)
return
# =============================================================================
def deploy_rheader(r, tabs=[], profile=False):
""" Deployment Resource Headers """
if r.representation != "html":
# RHeaders only used in interactive views
return None
record = r.record
if not record:
# List or Create form: rheader makes no sense here
return None
has_permission = current.auth.s3_has_permission
T = current.T
table = r.table
tablename = r.tablename
rheader = None
resourcename = r.name
if resourcename == "alert":
alert_id = r.id
db = current.db
ltable = db.deploy_alert_recipient
query = (ltable.alert_id == alert_id) & \
(ltable.deleted == False)
recipients = db(query).count()
unsent = not r.record.message_id
authorised = has_permission("update", tablename, record_id=alert_id)
if unsent and authorised:
send_button = BUTTON(T("Send Alert"), _class="alert-send-btn")
if recipients:
send_button.update(_onclick="window.location.href='%s'" %
URL(c="deploy",
f="alert",
args=[alert_id, "send"]))
else:
send_button.update(_disabled="disabled")
else:
send_button = ""
# Tabs
tabs = [(T("Message"), None),
(T("Recipients (%(number)s Total)") %
dict(number=recipients),
"recipient"),
]
if unsent and authorised:
# Insert tab to select recipients
tabs.insert(1, (T("Select Recipients"), "select"))
rheader_tabs = s3_rheader_tabs(r, tabs)
rheader = DIV(TABLE(TR(TH("%s: " % table.mission_id.label),
table.mission_id.represent(record.mission_id),
send_button,
),
TR(TH("%s: " % table.subject.label),
record.subject
),
), rheader_tabs, _class="alert-rheader")
elif resourcename == "mission":
if not profile and not r.component:
rheader = ""
else:
crud_string = S3Method.crud_string
record = r.record
title = crud_string(r.tablename, "title_display")
if record:
title = "%s: %s" % (title, record.name)
edit_btn = ""
if profile and \
current.auth.s3_has_permission("update",
"deploy_mission",
record_id=r.id):
crud_button = S3CRUD.crud_button
edit_btn = crud_button(T("Edit"),
_href=r.url(method="update"))
label = lambda f, table=table, record=record, **attr: \
TH("%s: " % table[f].label, **attr)
value = lambda f, table=table, record=record, **attr: \
TD(table[f].represent(record[f]), **attr)
rheader = DIV(H2(title),
TABLE(TR(label("event_type_id"),
value("event_type_id"),
label("location_id"),
value("location_id"),
label("code"),
value("code"),
),
TR(label("created_on"),
value("created_on"),
label("status"),
value("status"),
),
TR(label("comments"),
value("comments",
_class="mission-comments",
_colspan="6",
),
),
),
_class="mission-rheader"
)
if edit_btn:
rheader[-1][0].append(edit_btn)
else:
rheader = H2(title)
return rheader
# =============================================================================
def deploy_mission_hrquantity(row):
""" Number of human resources deployed """
if hasattr(row, "deploy_mission"):
row = row.deploy_mission
try:
mission_id = row.id
except AttributeError:
return 0
db = current.db
table = db.deploy_assignment
count = table.id.count()
row = db(table.mission_id == mission_id).select(count).first()
if row:
return row[count]
else:
return 0
# =============================================================================
def deploy_mission_response_count(row):
""" Number of responses to a mission """
if hasattr(row, "deploy_mission"):
row = row.deploy_mission
try:
mission_id = row.id
except AttributeError:
return 0
db = current.db
table = db.deploy_response
count = table.id.count()
row = db(table.mission_id == mission_id).select(count).first()
if row:
return row[count]
else:
return 0
# =============================================================================
def deploy_member_filter(status=False):
"""
Filter widgets for members (hrm_human_resource), used in
custom methods for member selection, e.g. deploy_apply
or deploy_alert_select_recipients
"""
T = current.T
widgets = [S3TextFilter(["person_id$first_name",
"person_id$middle_name",
"person_id$last_name",
],
label=T("Name"),
),
S3OptionsFilter("organisation_id",
filter=True,
hidden=True,
),
S3OptionsFilter("credential.job_title_id",
# @ToDo: Label setting
label = T("Sector"),
hidden=True,
),
]
settings = current.deployment_settings
if settings.get_org_regions():
if settings.get_org_regions_hierarchical():
widgets.insert(1, S3HierarchyFilter("organisation_id$region_id",
lookup="org_region",
hidden=True,
none=T("No Region"),
))
else:
widgets.insert(1, S3OptionsFilter("organisation_id$region_id",
widget="multiselect",
filter=True,
))
if status:
# Additional filter for roster status (default=active), allows
# to explicitly include inactive roster members when selecting
# alert recipients (only used there)
widgets.insert(1, S3OptionsFilter("application.active",
cols = 2,
default = True,
# Don't hide otherwise default
# doesn't apply:
#hidden = False,
label = T("Status"),
options = {"True": T("active"),
"False": T("inactive"),
},
))
return widgets
# =============================================================================
class deploy_Inbox(S3Method):
def apply_method(self, r, **attr):
"""
Custom method for email inbox, provides a datatable with bulk-delete
option
@param r: the S3Request
@param attr: the controller attributes
"""
T = current.T
s3db = current.s3db
response = current.response
s3 = response.s3
resource = self.resource
if r.http == "POST":
deleted = 0
post_vars = r.post_vars
if all([n in post_vars for n in ("delete", "selected", "mode")]):
selected = post_vars.selected
if selected:
selected = selected.split(",")
else:
selected = []
if selected:
# Handle exclusion filter
if post_vars.mode == "Exclusive":
if "filterURL" in post_vars:
filters = S3URLQuery.parse_url(post_vars.ajaxURL)
else:
filters = None
query = ~(FS("id").belongs(selected))
mresource = s3db.resource("msg_email",
filter=query,
vars=filters,
)
if response.s3.filter:
mresource.add_filter(response.s3.filter)
rows = mresource.select(["id"], as_rows=True)
selected = [str(row.id) for row in rows]
query = (FS("id").belongs(selected))
mresource = s3db.resource("msg_email", filter=query)
else:
mresource = resource
# Delete the messages
deleted = mresource.delete(format=r.representation)
if deleted:
response.confirmation = T("%(number)s messages deleted") % \
dict(number=deleted)
else:
response.warning = T("No messages could be deleted")
# List fields
list_fields = ["id",
"date",
"from_address",
"subject",
"body",
(T("Attachments"), "attachment.document_id"),
]
# Truncate message body
table = resource.table
table.body.represent = lambda body: DIV(XML(body),
_class="s3-truncate")
s3_trunk8()
# Data table filter & sorting
get_vars = r.get_vars
totalrows = resource.count()
if "pageLength" in get_vars:
display_length = get_vars["pageLength"]
if display_length == "None":
display_length = None
else:
display_length = int(display_length)
else:
display_length = 25
if display_length:
limit = 4 * display_length
else:
limit = None
filter, orderby, left = resource.datatable_filter(list_fields, get_vars)
resource.add_filter(filter)
# Extract the data
data = resource.select(list_fields,
start=0,
limit=limit,
orderby=orderby,
left=left,
count=True,
represent=True)
# Instantiate the data table
filteredrows = data["numrows"]
dt = S3DataTable(data["rfields"], data["rows"])
dt_id = "datatable"
# Bulk actions
# @todo: user confirmation
dt_bulk_actions = [(T("Delete"), "delete")]
if r.representation == "html":
# Action buttons
s3.actions = [{"label": str(T("Link to Mission")),
"_class": "action-btn link",
"url": URL(f="email_inbox", args=["[id]", "select"]),
},
]
S3CRUD.action_buttons(r,
editable=False,
read_url = r.url(method="read", id="[id]"),
delete_url = r.url(method="delete", id="[id]"),
)
# Export not needed
s3.no_formats = True
# Render data table
items = dt.html(totalrows,
filteredrows,
dt_id,
dt_ajax_url=URL(c = "deploy",
f = "email_inbox",
extension = "aadata",
vars = {},
),
dt_bulk_actions = dt_bulk_actions,
dt_pageLength = display_length,
dt_pagination = "true",
dt_searching = "true",
)
response.view = "list_filter.html"
return {"items": items,
"title": S3CRUD.crud_string(resource.tablename, "title_list"),
}
elif r.representation == "aadata":
# Ajax refresh
echo = int(get_vars.draw) if "draw" in get_vars else None
response = current.response
response.headers["Content-Type"] = "application/json"
return dt.json(totalrows,
filteredrows,
dt_id,
echo,
dt_bulk_actions = dt_bulk_actions)
else:
r.error(405, current.ERROR.BAD_FORMAT)
# =============================================================================
def deploy_apply(r, **attr):
"""
Custom method to select new RDRT members
@todo: make workflow re-usable for manual assignments
"""
# Requires permission to create deploy_application
authorised = current.auth.s3_has_permission("create", "deploy_application")
if not authorised:
r.unauthorised()
T = current.T
s3db = current.s3db
get_vars = r.get_vars
response = current.response
#settings = current.deployment_settings
if r.http == "POST":
added = 0
post_vars = r.post_vars
if all([n in post_vars for n in ("add", "selected", "mode")]):
selected = post_vars.selected
if selected:
selected = selected.split(",")
else:
selected = []
db = current.db
atable = s3db.deploy_application
if selected:
# Handle exclusion filter
if post_vars.mode == "Exclusive":
if "filterURL" in post_vars:
filters = S3URLQuery.parse_url(post_vars.ajaxURL)
else:
filters = None
query = ~(FS("id").belongs(selected))
hresource = s3db.resource("hrm_human_resource",
filter=query, vars=filters)
rows = hresource.select(["id"], as_rows=True)
selected = [str(row.id) for row in rows]
query = (atable.human_resource_id.belongs(selected)) & \
(atable.deleted != True)
rows = db(query).select(atable.id,
atable.active)
rows = dict((row.id, row) for row in rows)
for human_resource_id in selected:
try:
hr_id = int(human_resource_id.strip())
except ValueError:
continue
if hr_id in rows:
row = rows[hr_id]
if not row.active:
row.update_record(active=True)
added += 1
else:
atable.insert(human_resource_id=human_resource_id,
active=True)
added += 1
# @ToDo: Move 'RDRT' label to settings
current.session.confirmation = T("%(number)s RDRT members added") % \
dict(number=added)
if added > 0:
redirect(URL(f="human_resource", args=["summary"], vars={}))
else:
redirect(URL(f="application", vars={}))
elif r.http == "GET":
# Filter widgets
filter_widgets = deploy_member_filter()
# List fields
list_fields = ["id",
"person_id",
"job_title_id",
"organisation_id",
]
# Data table
resource = r.resource
totalrows = resource.count()
if "pageLength" in get_vars:
display_length = get_vars["pageLength"]
if display_length == "None":
display_length = None
else:
display_length = int(display_length)
else:
display_length = 25
if display_length:
limit = 4 * display_length
else:
limit = None
filter, orderby, left = resource.datatable_filter(list_fields, get_vars)
resource.add_filter(filter)
data = resource.select(list_fields,
start=0,
limit=limit,
orderby=orderby,
left=left,
count=True,
represent=True)
filteredrows = data["numrows"]
dt = S3DataTable(data["rfields"], data["rows"])
dt_id = "datatable"
# Bulk actions
# @todo: generalize label
dt_bulk_actions = [(T("Add as RDRT Members"), "add")]
if r.representation == "html":
# Page load
resource.configure(deletable = False)
#dt.defaultActionButtons(resource)
profile_url = URL(f = "human_resource",
args = ["[id]", "profile"])
S3CRUD.action_buttons(r,
deletable = False,
read_url = profile_url,
update_url = profile_url)
response.s3.no_formats = True
# Data table (items)
items = dt.html(totalrows,
filteredrows,
dt_id,
dt_pageLength=display_length,
dt_ajax_url=URL(c="deploy",
f="application",
extension="aadata",
vars={},
),
dt_searching="false",
dt_pagination="true",
dt_bulk_actions=dt_bulk_actions,
)
# Filter form
if filter_widgets:
# Where to retrieve filtered data from:
_vars = resource.crud._remove_filters(r.get_vars)
filter_submit_url = r.url(vars=_vars)
# Where to retrieve updated filter options from:
filter_ajax_url = URL(f="human_resource",
args=["filter.options"],
vars={})
get_config = resource.get_config
filter_clear = get_config("filter_clear", True)
filter_formstyle = get_config("filter_formstyle", None)
filter_submit = get_config("filter_submit", True)
filter_form = S3FilterForm(filter_widgets,
clear=filter_clear,
formstyle=filter_formstyle,
submit=filter_submit,
ajax=True,
url=filter_submit_url,
ajaxurl=filter_ajax_url,
_class="filter-form",
_id="datatable-filter-form",
)
fresource = current.s3db.resource(resource.tablename)
alias = resource.alias if r.component else None
ff = filter_form.html(fresource,
r.get_vars,
target="datatable",
alias=alias)
else:
ff = ""
output = dict(items = items,
# @todo: generalize
title = T("Add RDRT Members"),
list_filter_form = ff)
response.view = "list_filter.html"
return output
elif r.representation == "aadata":
# Ajax refresh
if "draw" in get_vars:
echo = int(get_vars.draw)
else:
echo = None
items = dt.json(totalrows,
filteredrows,
dt_id,
echo,
dt_bulk_actions=dt_bulk_actions)
response.headers["Content-Type"] = "application/json"
return items
else:
r.error(501, current.ERROR.BAD_FORMAT)
else:
r.error(405, current.ERROR.BAD_METHOD)
# =============================================================================
def deploy_alert_select_recipients(r, **attr):
"""
Custom method to select Recipients for an Alert
"""
alert_id = r.id
if r.representation not in ("html", "aadata") or \
not alert_id or \
not r.component:
r.error(405, current.ERROR.BAD_METHOD)
# Must have permission to update the alert in order to add recipients
authorised = current.auth.s3_has_permission("update", "deploy_alert",
record_id = alert_id)
if not authorised:
r.unauthorised()
T = current.T
s3db = current.s3db
response = current.response
member_query = FS("application.active") != None
if r.http == "POST":
added = 0
post_vars = r.post_vars
if all([n in post_vars for n in ("select", "selected", "mode")]):
selected = post_vars.selected
if selected:
selected = selected.split(",")
else:
selected = []
db = current.db
# Handle exclusion filter
if post_vars.mode == "Exclusive":
if "filterURL" in post_vars:
filters = S3URLQuery.parse_url(post_vars.filterURL)
else:
filters = None
query = member_query & \
(~(FS("id").belongs(selected)))
hresource = s3db.resource("hrm_human_resource",
filter=query, vars=filters)
rows = hresource.select(["id"], as_rows=True)
selected = [str(row.id) for row in rows]
rtable = s3db.deploy_alert_recipient
query = (rtable.alert_id == alert_id) & \
(rtable.human_resource_id.belongs(selected)) & \
(rtable.deleted != True)
rows = db(query).select(rtable.human_resource_id)
skip = set(row.human_resource_id for row in rows)
for human_resource_id in selected:
try:
hr_id = int(human_resource_id.strip())
except ValueError:
continue
if hr_id in skip:
continue
rtable.insert(alert_id=alert_id,
human_resource_id=human_resource_id,
)
added += 1
if not selected:
response.warning = T("No Recipients Selected!")
else:
response.confirmation = T("%(number)s Recipients added to Alert") % \
dict(number=added)
get_vars = r.get_vars or {}
representation = r.representation
settings = current.deployment_settings
resource = s3db.resource("hrm_human_resource",
filter=member_query, vars=r.get_vars)
# Filter widgets (including roster status)
filter_widgets = deploy_member_filter(status=True)
if filter_widgets and representation == "html":
# Apply filter defaults
resource.configure(filter_widgets = filter_widgets)
S3FilterForm.apply_filter_defaults(r, resource)
# List fields
list_fields = ["id",
"person_id",
"job_title_id",
"organisation_id",
]
# Data table
totalrows = resource.count()
if "pageLength" in get_vars:
display_length = get_vars["pageLength"]
if display_length == "None":
display_length = None
else:
display_length = int(display_length)
else:
display_length = 25
if display_length:
limit = 4 * display_length
else:
limit = None
filter, orderby, left = resource.datatable_filter(list_fields, get_vars)
resource.add_filter(filter)
data = resource.select(list_fields,
start=0,
limit=limit,
orderby=orderby,
left=left,
count=True,
represent=True)
filteredrows = data["numrows"]
dt = S3DataTable(data["rfields"], data["rows"])
dt_id = "datatable"
# Bulk actions
dt_bulk_actions = [(T("Select as Recipients"), "select")]
if representation == "html":
# Page load
resource.configure(deletable = False)
#dt.defaultActionButtons(resource)
response.s3.no_formats = True
# Data table (items)
items = dt.html(totalrows,
filteredrows,
dt_id,
dt_ajax_url=r.url(representation="aadata"),
dt_bulk_actions=dt_bulk_actions,
dt_pageLength=display_length,
dt_pagination="true",
dt_searching="false",
)
# Filter form
if filter_widgets:
# Where to retrieve filtered data from:
_vars = resource.crud._remove_filters(r.get_vars)
filter_submit_url = r.url(vars=_vars)
# Where to retrieve updated filter options from:
filter_ajax_url = URL(f="human_resource",
args=["filter.options"],
vars={})
get_config = resource.get_config
filter_clear = get_config("filter_clear", True)
filter_formstyle = get_config("filter_formstyle", None)
filter_submit = get_config("filter_submit", True)
filter_form = S3FilterForm(filter_widgets,
clear=filter_clear,
formstyle=filter_formstyle,
submit=filter_submit,
ajax=True,
url=filter_submit_url,
ajaxurl=filter_ajax_url,
_class="filter-form",
_id="datatable-filter-form",
)
fresource = current.s3db.resource(resource.tablename)
alias = resource.alias if r.component else None
ff = filter_form.html(fresource,
r.get_vars,
target="datatable",
alias=alias)
else:
ff = ""
output = dict(items=items,
title=T("Select Recipients"),
list_filter_form=ff)
# Maintain RHeader for consistency
if attr.get("rheader"):
rheader = attr["rheader"](r)
if rheader:
output["rheader"] = rheader
response.view = "list_filter.html"
return output
elif representation == "aadata":
# Ajax refresh
if "draw" in get_vars:
echo = int(get_vars.draw)
else:
echo = None
items = dt.json(totalrows,
filteredrows,
dt_id,
echo,
dt_bulk_actions=dt_bulk_actions)
response.headers["Content-Type"] = "application/json"
return items
else:
r.error(501, current.ERROR.BAD_FORMAT)
# =============================================================================
def deploy_response_select_mission(r, **attr):
"""
Custom method to Link a Response to a Mission &/or Human Resource
"""
message_id = r.record.message_id if r.record else None
if r.representation not in ("html", "aadata") or not message_id or not r.component:
r.error(405, current.ERROR.BAD_METHOD)
T = current.T
db = current.db
s3db = current.s3db
atable = s3db.msg_attachment
dtable = db.doc_document
query = (atable.message_id == message_id) & \
(atable.document_id == dtable.id)
atts = db(query).select(dtable.id,
dtable.file,
dtable.name,
)
response = current.response
mission_query = FS("mission.status") == 2
get_vars = r.get_vars or {}
mission_id = get_vars.get("mission_id", None)
if mission_id:
hr_id = get_vars.get("hr_id", None)
if not hr_id:
# @ToDo: deployment_setting for 'Member' label
current.session.warning = T("No Member Selected!")
# Can still link to the mission, member can be set
# manually in the mission profile
s3db.deploy_response.insert(message_id = message_id,
mission_id = mission_id,
)
else:
s3db.deploy_response.insert(message_id = message_id,
mission_id = mission_id,
human_resource_id = hr_id,
)
# Are there any attachments?
if atts:
ltable = s3db.deploy_mission_document
if hr_id:
# Set documents to the Member's doc_id
hrtable = s3db.hrm_human_resource
doc_id = db(hrtable.id == hr_id).select(hrtable.doc_id,
limitby=(0, 1)
).first().doc_id
for a in atts:
# Link to Mission
document_id = a.id
ltable.insert(mission_id = mission_id,
message_id = message_id,
document_id = document_id)
if hr_id:
db(dtable.id == document_id).update(doc_id = doc_id)
#mission = XML(A(T("Mission"),
# _href=URL(c="deploy", f="mission",
# args=[mission_id, "profile"])))
#current.session.confirmation = T("Response linked to %(mission)s") % \
# dict(mission=mission)
current.session.confirmation = T("Response linked to Mission")
redirect(URL(c="deploy", f="email_inbox"))
settings = current.deployment_settings
resource = s3db.resource("deploy_mission",
filter=mission_query, vars=r.get_vars)
# Filter widgets
filter_widgets = s3db.get_config("deploy_mission", "filter_widgets")
# List fields
list_fields = s3db.get_config("deploy_mission", "list_fields")
list_fields.insert(0, "id")
# Data table
totalrows = resource.count()
if "pageLength" in get_vars:
display_length = get_vars["pageLength"]
if display_length == "None":
display_length = None
else:
display_length = int(display_length)
else:
display_length = 25
if display_length:
limit = 4 * display_length
else:
limit = None
filter, orderby, left = resource.datatable_filter(list_fields, get_vars)
if not orderby:
# Most recent missions on top
orderby = "deploy_mission.created_on desc"
resource.add_filter(filter)
data = resource.select(list_fields,
start=0,
limit=limit,
orderby=orderby,
left=left,
count=True,
represent=True)
filteredrows = data["numrows"]
dt = S3DataTable(data["rfields"], data["rows"])
dt_id = "datatable"
if r.representation == "html":
# Page load
resource.configure(deletable = False)
record = r.record
action_vars = dict(mission_id="[id]")
# Can we identify the Member?
from ..s3.s3parser import S3Parsing
from_address = record.from_address
hr_id = S3Parsing().lookup_human_resource(from_address)
if hr_id:
action_vars["hr_id"] = hr_id
s3 = response.s3
s3.actions = [dict(label=str(T("Select Mission")),
_class="action-btn",
url=URL(f="email_inbox",
args=[r.id, "select"],
vars=action_vars,
)),
]
s3.no_formats = True
# Data table (items)
items = dt.html(totalrows,
filteredrows,
dt_id,
dt_ajax_url=r.url(representation="aadata"),
dt_pageLength=display_length,
dt_pagination="true",
dt_searching="false",
)
# Filter form
if filter_widgets:
# Where to retrieve filtered data from:
_vars = resource.crud._remove_filters(r.get_vars)
filter_submit_url = r.url(vars=_vars)
# Where to retrieve updated filter options from:
filter_ajax_url = URL(f="mission",
args=["filter.options"],
vars={})
get_config = resource.get_config
filter_clear = get_config("filter_clear", True)
filter_formstyle = get_config("filter_formstyle", None)
filter_submit = get_config("filter_submit", True)
filter_form = S3FilterForm(filter_widgets,
clear=filter_clear,
formstyle=filter_formstyle,
submit=filter_submit,
ajax=True,
url=filter_submit_url,
ajaxurl=filter_ajax_url,
_class="filter-form",
_id="datatable-filter-form",
)
fresource = s3db.resource(resource.tablename)
alias = resource.alias if r.component else None
ff = filter_form.html(fresource,
r.get_vars,
target="datatable",
alias=alias)
else:
ff = ""
output = dict(items=items,
title=T("Select Mission"),
list_filter_form=ff)
# Add RHeader
if hr_id:
from_address = A(from_address,
_href=URL(c="deploy", f="human_resource",
args=[hr_id, "profile"],
)
)
row = ""
else:
id = "deploy_response_human_resource_id__row"
# @ToDo: deployment_setting for 'Member' label
title = T("Select Member")
label = "%s:" % title
field = s3db.deploy_response.human_resource_id
# @ToDo: Get fancier & auto-click if there is just a single Mission
script = \
'''S3.update_links=function(){
var value=$('#deploy_response_human_resource_id').val()
if(value){
$('.action-btn.link').each(function(){
var url=this.href
var posn=url.indexOf('&hr_id=')
if(posn>0){url=url.split('&hr_id=')[0]+'&hr_id='+value
}else{url+='&hr_id='+value}
$(this).attr('href',url)})}}'''
s3.js_global.append(script)
post_process = '''S3.update_links()'''
widget = S3HumanResourceAutocompleteWidget(post_process=post_process)
widget = widget(field, None)
comment = DIV(_class="tooltip",
_title="%s|%s" % (title,
current.messages.AUTOCOMPLETE_HELP))
# @ToDo: Handle non-callable formstyles
row = s3.crud.formstyle(id, label, widget, comment)
if isinstance(row, tuple):
row = TAG[""](row[0],
row[1],
)
# Any attachments?
if atts:
attachments = TABLE(TR(TH("%s: " % T("Attachments"))))
for a in atts:
url = URL(c="default", f="download",
args=a.file)
attachments.append(TR(TD(A(ICON("attachment"),
a.name,
_href=url))))
else:
attachments = ""
# @ToDo: Add Reply button
rheader = DIV(row,
TABLE(TR(TH("%s: " % T("From")),
from_address,
),
TR(TH("%s: " % T("Date")),
record.created_on,
),
TR(TH("%s: " % T("Subject")),
record.subject,
),
TR(TH("%s: " % T("Message Text")),
),
),
DIV(record.body, _class="message-body s3-truncate"),
attachments,
)
output["rheader"] = rheader
s3_trunk8(lines=5)
response.view = "list_filter.html"
return output
elif r.representation == "aadata":
# Ajax refresh
if "draw" in get_vars:
echo = int(get_vars.draw)
else:
echo = None
items = dt.json(totalrows,
filteredrows,
dt_id,
echo,
dt_bulk_actions=dt_bulk_actions)
response.headers["Content-Type"] = "application/json"
return items
else:
r.error(501, current.ERROR.BAD_FORMAT)
# =============================================================================
class deploy_MissionProfileLayout(S3DataListLayout):
""" DataList layout for Mission Profile """
# -------------------------------------------------------------------------
def __init__(self, profile="deploy_mission"):
""" Constructor """
super(deploy_MissionProfileLayout, self).__init__(profile=profile)
self.dcount = {}
self.avgrat = {}
self.deployed = set()
self.appraisals = {}
self.use_regions = current.deployment_settings.get_org_regions()
# -------------------------------------------------------------------------
def prep(self, resource, records):
"""
Bulk lookups for cards
@param resource: the resource
@param records: the records as returned from S3Resource.select
"""
db = current.db
s3db = current.s3db
tablename = resource.tablename
if tablename == "deploy_alert":
# Recipients, aggregated by alert
record_ids = set(record["_row"]["deploy_alert.id"] for record in records)
htable = s3db.hrm_human_resource
number_of_recipients = htable.id.count()
rtable = s3db.deploy_alert_recipient
alert_id = rtable.alert_id
use_regions = self.use_regions
if use_regions:
otable = s3db.org_organisation
region_id = otable.region_id
fields = [alert_id, region_id, number_of_recipients]
left = [htable.on(htable.id==rtable.human_resource_id),
otable.on(otable.id==htable.organisation_id),
]
groupby = [alert_id, region_id]
else:
fields = [alert_id, number_of_recipients]
left = [htable.on(htable.id==rtable.human_resource_id)]
groupby = [alert_id]
query = (alert_id.belongs(record_ids)) & \
(rtable.deleted != True)
rows = current.db(query).select(left=left,
groupby=groupby,
*fields)
recipient_numbers = {}
for row in rows:
alert = row[alert_id]
if alert in recipient_numbers:
recipient_numbers[alert].append(row)
else:
recipient_numbers[alert] = [row]
self.recipient_numbers = recipient_numbers
# Representations of the region_ids
if use_regions:
# not needed with regions = False
represent = otable.region_id.represent
represent.none = current.T("No Region")
region_ids = [row[region_id] for row in rows]
self.region_names = represent.bulk(region_ids)
else:
self.region_names = {}
elif tablename == "deploy_response":
dcount = self.dcount
avgrat = self.avgrat
deployed = self.deployed
mission_id = None
for record in records:
raw = record["_row"]
human_resource_id = raw["hrm_human_resource.id"]
if human_resource_id:
dcount[human_resource_id] = 0
avgrat[human_resource_id] = None
if not mission_id:
# Should be the same for all rows
mission_id = raw["deploy_response.mission_id"]
hr_ids = dcount.keys()
if hr_ids:
# Number of previous deployments
table = s3db.deploy_assignment
human_resource_id = table.human_resource_id
deployment_count = table.id.count()
query = (human_resource_id.belongs(hr_ids)) & \
(table.deleted != True)
rows = db(query).select(human_resource_id,
deployment_count,
groupby = human_resource_id,
)
for row in rows:
dcount[row[human_resource_id]] = row[deployment_count]
# Members deployed for this mission
query = (human_resource_id.belongs(hr_ids)) & \
(table.mission_id == mission_id) & \
(table.deleted != True)
rows = db(query).select(human_resource_id)
for row in rows:
deployed.add(row[human_resource_id])
# Average appraisal rating
atable = s3db.hrm_appraisal
htable = s3db.hrm_human_resource
human_resource_id = htable.id
average_rating = atable.rating.avg()
query = (human_resource_id.belongs(hr_ids)) & \
(htable.person_id == atable.person_id) & \
(atable.deleted != True) & \
(atable.rating != None) & \
(atable.rating > 0)
rows = db(query).select(human_resource_id,
average_rating,
groupby = human_resource_id,
)
for row in rows:
avgrat[row[human_resource_id]] = row[average_rating]
elif tablename == "deploy_assignment":
record_ids = set(record["_row"]["deploy_assignment.id"]
for record in records)
atable = s3db.hrm_appraisal
ltable = s3db.deploy_assignment_appraisal
query = (ltable.assignment_id.belongs(record_ids)) & \
(ltable.deleted != True) & \
(atable.id == ltable.appraisal_id)
rows = current.db(query).select(ltable.assignment_id,
ltable.appraisal_id,
)
appraisals = {}
for row in rows:
appraisals[row.assignment_id] = row.appraisal_id
self.appraisals = appraisals
return
# -------------------------------------------------------------------------
def render_header(self, list_id, item_id, resource, rfields, record):
"""
Render the card header
@param list_id: the HTML ID of the list
@param item_id: the HTML ID of the item
@param resource: the S3Resource to render
@param rfields: the S3ResourceFields to render
@param record: the record as dict
"""
# No card header in this layout
return None
# -------------------------------------------------------------------------
def render_body(self, list_id, item_id, resource, rfields, record):
"""
Render the card body
@param list_id: the HTML ID of the list
@param item_id: the HTML ID of the item
@param resource: the S3Resource to render
@param rfields: the S3ResourceFields to render
@param record: the record as dict
"""
db = current.db
s3db = current.s3db
has_permission = current.auth.s3_has_permission
table = resource.table
tablename = resource.tablename
T = current.T
pkey = str(resource._id)
raw = record["_row"]
record_id = raw[pkey]
# Specific contents and workflow
contents = workflow = None
if tablename == "deploy_alert":
# Message subject as title
subject = record["deploy_alert.subject"]
total_recipients = 0
rows = self.recipient_numbers.get(record_id)
if rows:
# Labels
hr_label = current.deployment_settings.get_deploy_hr_label()
HR_LABEL = T(hr_label)
if hr_label == "Member":
HRS_LABEL = T("Members")
elif hr_label == "Staff":
HRS_LABEL = HR_LABEL
elif hr_label == "Volunteer":
HRS_LABEL = T("Volunteers")
htable = s3db.hrm_human_resource
rcount = htable.id.count()
if not self.use_regions:
total_recipients = rows[0][rcount]
label = HR_LABEL if total_recipients == 1 else HRS_LABEL
link = URL(f = "alert", args = [record_id, "recipient"])
recipients = SPAN(A("%s %s" % (total_recipients, label),
_href=link,
),
)
else:
region = s3db.org_organisation.region_id
region_names = self.region_names
UNKNOWN_OPT = current.messages.UNKNOWN_OPT
recipients = []
no_region = None
for row in rows:
# Region
region_id = row[region]
region_name = region_names.get(region_id, UNKNOWN_OPT)
region_filter = {
"recipient.human_resource_id$" \
"organisation_id$region_id__belongs": region_id
}
# Number of recipients
num = row[rcount]
total_recipients += num
label = HR_LABEL if num == 1 else HRS_LABEL
# Link
link = URL(f = "alert",
args = [record_id, "recipient"],
vars = region_filter)
# Recipient list item
recipient = SPAN("%s (" % region_name,
A("%s %s" % (num, label),
_href=link,
),
")",
)
if region_id:
recipients.extend([recipient, ", "])
else:
no_region = [recipient, ", "]
# Append "no region" at the end of the list
if no_region:
recipients.extend(no_region)
recipients = TAG[""](recipients[:-1])
else:
recipients = T("No Recipients Selected")
# Modified-date corresponds to sent-date
modified_on = record["deploy_alert.modified_on"]
# Has this alert been sent?
sent = True if raw["deploy_alert.message_id"] else False
if sent:
status = SPAN(ICON("sent"),
T("sent"), _class="alert-status")
else:
status = SPAN(ICON("unsent"),
T("not sent"), _class="red alert-status")
# Message
message = record["deploy_alert.body"]
# Contents
contents = DIV(
DIV(
DIV(subject,
_class="card-title"),
DIV(recipients,
_class="card-category"),
_class="media-heading"
),
DIV(modified_on, status, _class="card-subtitle"),
DIV(message, _class="message-body s3-truncate"),
_class="media-body",
)
# Workflow
if not sent and total_recipients and \
has_permission("update", table, record_id=record_id):
send = A(ICON("mail"),
SPAN(T("Send this Alert"),
_class="card-action"),
_onclick="window.location.href='%s'" %
URL(c="deploy", f="alert",
args=[record_id, "send"]),
_class="action-lnk",
)
workflow = [send]
elif tablename == "deploy_response":
human_resource_id = raw["hrm_human_resource.id"]
# Title linked to member profile
if human_resource_id:
person_id = record["hrm_human_resource.person_id"]
profile_url = URL(f="human_resource", args=[human_resource_id, "profile"])
profile_title = T("Open Member Profile (in a new tab)")
person = A(person_id,
_href=profile_url,
_target="_blank",
_title=profile_title)
else:
person_id = "%s (%s)" % \
(T("Unknown"), record["msg_message.from_address"])
person = person_id
# Organisation
organisation = record["hrm_human_resource.organisation_id"]
# Created_on corresponds to received-date
created_on = record["deploy_response.created_on"]
# Message Data
message = record["msg_message.body"]
# Dropdown of available documents
documents = raw["doc_document.file"]
if documents:
if not isinstance(documents, list):
documents = [documents]
bootstrap = current.response.s3.formstyle == "bootstrap"
if bootstrap:
docs = UL(_class="dropdown-menu",
_role="menu",
)
else:
docs = SPAN(_id="attachments",
_class="profile-data-value",
)
retrieve = db.doc_document.file.retrieve
for doc in documents:
try:
doc_name = retrieve(doc)[0]
except (IOError, TypeError):
doc_name = current.messages["NONE"]
doc_url = URL(c="default", f="download",
args=[doc])
if bootstrap:
doc_item = LI(A(ICON("file"),
" ",
doc_name,
_href=doc_url,
),
_role="menuitem",
)
else:
doc_item = A(ICON("file"),
" ",
doc_name,
_href=doc_url,
)
docs.append(doc_item)
docs.append(", ")
if bootstrap:
docs = DIV(A(ICON("attachment"),
SPAN(_class="caret"),
_class="btn dropdown-toggle",
_href="#",
**{"_data-toggle": "dropdown"}
),
doc_list,
_class="btn-group attachments dropdown pull-right",
)
else:
# Remove final comma
docs.components.pop()
docs = DIV(LABEL("%s:" % T("Attachments"),
_class = "profile-data-label",
_for="attachments",
),
docs,
_class = "profile-data",
)
else:
docs = ""
# Number of previous deployments and average rating
# (looked up in-bulk in self.prep)
if hasattr(self, "dcount"):
dcount = self.dcount.get(human_resource_id, 0)
if hasattr(self, "avgrat"):
avgrat = self.avgrat.get(human_resource_id)
dcount_id = "profile-data-dcount-%s" % record_id
avgrat_id = "profile-data-avgrat-%s" % record_id
dinfo = DIV(LABEL("%s:" % T("Previous Deployments"),
_for=dcount_id,
_class="profile-data-label"),
SPAN(dcount,
_id=dcount_id,
_class="profile-data-value"),
LABEL("%s:" % T("Average Rating"),
_for=avgrat_id,
_class="profile-data-label"),
SPAN(avgrat,
_id=avgrat_id,
_class="profile-data-value"),
_class="profile-data",
)
# Comments
comments_id = "profile-data-comments-%s" % record_id
comments = DIV(LABEL("%s:" % T("Comments"),
_for=comments_id,
_class="profile-data-label"),
SPAN(record["deploy_response.comments"],
_id=comments_id,
_class="profile-data-value s3-truncate"),
_class="profile-data",
)
# Contents
contents = DIV(
DIV(
DIV(person,
_class="card-title"),
DIV(organisation,
_class="card-category"),
_class="media-heading",
),
DIV(created_on, _class="card-subtitle"),
DIV(message, _class="message-body s3-truncate"),
docs,
dinfo,
comments,
_class="media-body",
)
# Workflow
if human_resource_id:
if hasattr(self, "deployed") and human_resource_id in self.deployed:
deploy = A(ICON("deployed"),
SPAN(T("Member Deployed"),
_class="card-action"),
_class="action-lnk",
)
elif has_permission("create", "deploy_assignment"):
mission_id = raw["deploy_response.mission_id"]
url = URL(f="mission",
args=[mission_id, "assignment", "create"],
vars={"member_id": human_resource_id})
deploy = A(ICON("deploy"),
SPAN(T("Deploy this Member"),
_class="card-action"),
_href=url,
_class="action-lnk"
)
else:
deploy = None
if deploy:
workflow = [deploy]
elif tablename == "deploy_assignment":
human_resource_id = raw["hrm_human_resource.id"]
# Title linked to member profile
profile_url = URL(f="human_resource", args=[human_resource_id, "profile"])
profile_title = T("Open Member Profile (in a new tab)")
person = A(record["hrm_human_resource.person_id"],
_href=profile_url,
_target="_blank",
_title=profile_title)
# Organisation
organisation = record["hrm_human_resource.organisation_id"]
fields = dict((rfield.colname, rfield) for rfield in rfields)
render = lambda colname: self.render_column(item_id,
fields[colname],
record)
# Contents
contents = DIV(
DIV(
DIV(person,
_class="card-title"),
DIV(organisation,
_class="card-category"),
_class="media-heading"),
render("deploy_assignment.start_date"),
render("deploy_assignment.end_date"),
render("deploy_assignment.job_title_id"),
render("deploy_assignment.job_title"),
render("hrm_appraisal.rating"),
_class="media-body",
)
# Workflow actions
appraisal = self.appraisals.get(record_id)
person_id = raw["hrm_human_resource.person_id"]
if appraisal and \
has_permission("update", "hrm_appraisal", record_id=appraisal.id):
# Appraisal already uploaded => edit
EDIT_APPRAISAL = T("Open Appraisal")
url = URL(c="deploy", f="person",
args=[person_id,
"appraisal",
appraisal.id,
"update.popup"
],
vars={"refresh": list_id,
"record": record_id
})
edit = A(ICON("attachment"),
SPAN(EDIT_APPRAISAL, _class="card-action"),
_href=url,
_class="s3_modal action-lnk",
_title=EDIT_APPRAISAL,
)
workflow = [edit]
elif has_permission("update", table, record_id=record_id):
# No appraisal uploaded yet => upload
# Currently we assume that anyone who can edit the
# assignment can upload the appraisal
_class = "action-lnk"
UPLOAD_APPRAISAL = T("Upload Appraisal")
mission_id = raw["deploy_assignment.mission_id"]
url = URL(c="deploy", f="person",
args=[person_id,
"appraisal",
"create.popup"
],
vars={"mission_id": mission_id,
"refresh": list_id,
"record": record_id,
})
upload = A(ICON("upload"),
SPAN(UPLOAD_APPRAISAL, _class="card-action"),
_href=url,
_class="s3_modal action-lnk",
_title=UPLOAD_APPRAISAL,
)
workflow = [upload]
body = DIV(_class="media")
# Body icon
icon = self.render_icon(list_id, resource)
if icon:
body.append(icon)
# Toolbox and workflow actions
toolbox = self.render_toolbox(list_id, resource, record)
if toolbox:
if workflow:
toolbox.insert(0, DIV(workflow, _class="card-actions"))
body.append(toolbox)
# Contents
if contents:
body.append(contents)
return body
# -------------------------------------------------------------------------
def render_icon(self, list_id, resource):
"""
Render the body icon
@param list_id: the list ID
@param resource: the S3Resource
"""
tablename = resource.tablename
if tablename == "deploy_alert":
icon = "alert.png"
elif tablename == "deploy_response":
icon = "email.png"
elif tablename == "deploy_assignment":
icon = "member.png"
else:
return None
return A(IMG(_src=URL(c="static", f="themes",
args=["IFRC", "img", icon]),
_class="media-object",
),
_class="pull-left",
)
# -------------------------------------------------------------------------
def render_toolbox(self, list_id, resource, record):
"""
Render the toolbox
@param list_id: the HTML ID of the list
@param resource: the S3Resource to render
@param record: the record as dict
"""
table = resource.table
tablename = resource.tablename
record_id = record[str(resource._id)]
open_url = update_url = None
if tablename == "deploy_alert":
open_url = URL(f="alert", args=[record_id])
elif tablename == "deploy_response":
update_url = URL(f="response_message",
args=[record_id, "update.popup"],
vars={"refresh": list_id,
"record": record_id,
"profile": self.profile,
},
)
elif tablename == "deploy_assignment":
update_url = URL(c="deploy", f="assignment",
args=[record_id, "update.popup"],
vars={"refresh": list_id,
"record": record_id,
"profile": self.profile,
},
)
has_permission = current.auth.s3_has_permission
crud_string = S3Method.crud_string
toolbox = DIV(_class="edit-bar fright")
if update_url and \
has_permission("update", table, record_id=record_id):
btn = A(ICON("edit"),
_href=update_url,
_class="s3_modal",
_title=crud_string(tablename, "title_update"))
toolbox.append(btn)
elif open_url:
btn = A(ICON("file-alt"),
_href=open_url,
_title=crud_string(tablename, "title_display"))
toolbox.append(btn)
if has_permission("delete", table, record_id=record_id):
btn = A(ICON("delete"),
_class="dl-item-delete",
_title=crud_string(tablename, "label_delete_button"))
toolbox.append(btn)
return toolbox
# -------------------------------------------------------------------------
def render_column(self, item_id, rfield, record):
"""
Render a data column.
@param item_id: the HTML element ID of the item
@param rfield: the S3ResourceField for the column
@param record: the record (from S3Resource.select)
"""
colname = rfield.colname
if colname not in record:
return None
value = record[colname]
value_id = "%s-%s" % (item_id, rfield.colname.replace(".", "_"))
label = LABEL("%s:" % rfield.label,
_for = value_id,
_class = "profile-data-label")
value = SPAN(value,
_id = value_id,
_class = "profile-data-value")
return TAG[""](label, value)
# END =========================================================================
|
mit
| -2,361,572,793,898,173,000 | 39.250789 | 140 | 0.406309 | false |
DavidNorman/tensorflow
|
tensorflow/python/ops/parallel_for/array_test.py
|
1
|
11942
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for vectorization of array kernels."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.compat import compat
from tensorflow.python.eager import backprop
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import tensor_array_grad # pylint: disable=unused-import
from tensorflow.python.ops.parallel_for import control_flow_ops as pfor_control_flow_ops
from tensorflow.python.ops.parallel_for.test_util import PForTestCase
from tensorflow.python.platform import test
@test_util.run_all_in_graph_and_eager_modes
class ArrayTest(PForTestCase):
def test_gather(self):
x = random_ops.random_uniform([3, 3, 3])
x2 = array_ops.placeholder_with_default(x, shape=None) # Has dynamic shape.
def loop_fn(i):
outputs = []
x_i = array_ops.gather(x, i)
for y in [x, x2, x_i]:
axes = [0] if y is x_i else [0, 2, -1]
for axis in axes:
outputs.append(array_ops.gather(y, 2, axis=axis))
outputs.append(array_ops.gather(y, i, axis=axis))
outputs.append(array_ops.gather(y, [i], axis=axis))
outputs.append(array_ops.gather(y, [i, 2], axis=axis))
outputs.append(array_ops.gather(y, [[2, i], [i, 1]], axis=axis))
return outputs
self._test_loop_fn(loop_fn, 3)
def test_gather_nd(self):
x = random_ops.random_uniform([3, 3, 3])
def loop_fn(i):
outputs = []
x_i = array_ops.gather(x, i)
outputs.append(array_ops.gather_nd(x_i, [0], batch_dims=0))
outputs.append(array_ops.gather_nd(x_i, [i], batch_dims=0))
outputs.append(array_ops.gather_nd(x_i, [[i], [i], [i]], batch_dims=1))
return outputs
self._test_loop_fn(loop_fn, 3)
def test_shape(self):
x = random_ops.random_uniform([3, 2, 3])
def loop_fn(i):
x_i = array_ops.gather(x, i)
return array_ops.shape(x_i), array_ops.shape(x_i, out_type=dtypes.int64)
self._test_loop_fn(loop_fn, 3)
def test_size(self):
x = random_ops.random_uniform([3, 2, 3])
def loop_fn(i):
x_i = array_ops.gather(x, i)
return array_ops.size(x_i), array_ops.size(x_i, out_type=dtypes.int64)
self._test_loop_fn(loop_fn, 3)
def test_rank(self):
x = random_ops.random_uniform([3, 2, 3])
def loop_fn(i):
x_i = array_ops.gather(x, i)
return array_ops.rank(x_i)
self._test_loop_fn(loop_fn, 3)
def test_shape_n(self):
x = random_ops.random_uniform([3, 2, 3])
y = random_ops.random_uniform([3])
def loop_fn(i):
x_i = array_ops.gather(x, i)
y_i = array_ops.gather(y, i)
return array_ops.shape_n([x_i, x, y, y_i]), array_ops.shape_n(
[x_i, x, y, y_i], out_type=dtypes.int64)
self._test_loop_fn(loop_fn, 3)
def test_reshape(self):
x = random_ops.random_uniform([3, 2, 3])
def loop_fn(i):
x1 = array_ops.gather(x, i)
return array_ops.reshape(x1, [-1]), array_ops.reshape(x1, [1, 3, 1, -1])
self._test_loop_fn(loop_fn, 3)
def test_broadcast_to(self):
x = random_ops.random_uniform([3, 2, 1, 3])
def loop_fn(i):
x1 = array_ops.gather(x, i)
return (array_ops.broadcast_to(x1, [2, 2, 3]),
array_ops.broadcast_to(x1, [1, 2, 1, 3]))
self._test_loop_fn(loop_fn, 3)
def test_expand_dims(self):
x = random_ops.random_uniform([3, 2, 3])
def loop_fn(i):
x1 = array_ops.gather(x, i)
return array_ops.expand_dims(
x1, axis=-1), array_ops.expand_dims(
x1, axis=1)
self._test_loop_fn(loop_fn, 3)
def test_one_hot(self):
indices = random_ops.random_uniform(
[3, 2, 3], minval=0, maxval=4, dtype=dtypes.int32)
def loop_fn(i):
indices_i = array_ops.gather(indices, i)
return (array_ops.one_hot(indices_i, depth=4, on_value=2., off_value=-2.),
array_ops.one_hot(indices_i, depth=4, axis=1))
self._test_loop_fn(loop_fn, 3)
def test_searchsorted(self):
sorted_inputs = math_ops.cumsum(random_ops.random_uniform([3, 2, 4]),
axis=-1)
values = random_ops.random_uniform([2, 3], minval=-1, maxval=4.5)
def loop_fn(i):
inputs_i = array_ops.gather(sorted_inputs, i)
return [array_ops.searchsorted(inputs_i, values, out_type=dtypes.int32,
side="left"), # creates LowerBound op.
array_ops.searchsorted(inputs_i, values, out_type=dtypes.int64,
side="right")] # creates UpperBound op.
self._test_loop_fn(loop_fn, 3)
def test_slice(self):
x = random_ops.random_uniform([3, 2, 3])
def loop_fn(i):
x1 = array_ops.gather(x, i)
return array_ops.slice(x1, begin=(0, 1), size=(2, 1))
self._test_loop_fn(loop_fn, 3)
def test_tile(self):
x = random_ops.random_uniform([3, 2, 3])
def loop_fn(i):
x1 = array_ops.gather(x, i)
return array_ops.tile(x1, [2, 1])
self._test_loop_fn(loop_fn, 3)
def test_tile_loop_dependent(self):
x = random_ops.random_uniform([3, 2, 3])
def loop_fn(i):
x1 = array_ops.gather(x, i)
return array_ops.tile(x1, [i, 1])
with self.assertRaisesRegexp(ValueError, "expected to be loop invariant"):
pfor_control_flow_ops.pfor(loop_fn, 2)
def test_pack(self):
x = random_ops.random_uniform([3, 2, 3])
y = random_ops.random_uniform([2, 3])
def loop_fn(i):
x1 = array_ops.gather(x, i)
return array_ops.stack([x1, y], axis=-1)
self._test_loop_fn(loop_fn, 1)
def test_unpack(self):
x = random_ops.random_uniform([3, 2, 3, 4])
def loop_fn(i):
x_i = array_ops.gather(x, i)
return array_ops.unstack(
x_i, 4, axis=-1), array_ops.unstack(
x_i, 3, axis=1)
self._test_loop_fn(loop_fn, 3)
def test_pad(self):
x = random_ops.random_uniform([3, 2, 3])
padding = constant_op.constant([[1, 2], [3, 4]])
def loop_fn(i):
x1 = array_ops.gather(x, i)
return array_ops.pad(x1, padding, mode="CONSTANT")
self._test_loop_fn(loop_fn, 3)
def test_split(self):
x = random_ops.random_uniform([3, 2, 3])
def loop_fn(i):
x1 = array_ops.gather(x, i)
return array_ops.split(x1, 2, axis=0), array_ops.split(x1, 3, axis=-1)
self._test_loop_fn(loop_fn, 3)
def test_split_v(self):
x = random_ops.random_uniform([3, 6, 3])
def loop_fn(i):
x1 = array_ops.gather(x, i)
return (array_ops.split(x1, [2, 1, 3], axis=0),
array_ops.split(x1, [3], axis=-1))
self._test_loop_fn(loop_fn, 3)
def test_squeeze(self):
x = random_ops.random_uniform([5, 1, 2, 1])
def loop_fn(i):
x1 = array_ops.gather(x, i)
return (array_ops.squeeze(x1, axis=0),
array_ops.squeeze(x1, axis=-1),
array_ops.squeeze(x1))
self._test_loop_fn(loop_fn, 3)
def test_transpose(self):
x = random_ops.random_uniform([3, 2, 3, 4])
def loop_fn(i):
x1 = array_ops.gather(x, i)
return array_ops.transpose(x1, [2, 1, 0])
self._test_loop_fn(loop_fn, 3)
def test_zeros_like(self):
x = random_ops.random_uniform([3, 2, 3])
def loop_fn(i):
x1 = array_ops.gather(x, i)
z = array_ops.zeros_like(x1),
return z, z + x1
self._test_loop_fn(loop_fn, 3)
def test_concat_v2(self):
x = random_ops.random_uniform([3, 2, 3])
y = random_ops.random_uniform([2, 3])
def loop_fn(i):
x1 = array_ops.gather(x, i)
return array_ops.concat(
[x1, x1, y], axis=0), array_ops.concat(
[x1, x1, y], axis=-1)
self._test_loop_fn(loop_fn, 3)
def test_unary_cwise_ops(self):
for op in [array_ops.identity, array_ops.stop_gradient]:
with backprop.GradientTape(persistent=True) as g:
x = random_ops.random_uniform([3, 5])
g.watch(x)
# pylint: disable=cell-var-from-loop
def loop_fn(i):
with g:
x1 = array_ops.gather(x, i)
y = op(x1) + x1
loss = nn.l2_loss(y)
return op(x), y, g.gradient(loss, x1)
# pylint: enable=cell-var-from-loop
self._test_loop_fn(loop_fn, 3)
def test_identity_n(self):
x = random_ops.random_uniform([3, 4])
def loop_fn(i):
return array_ops.identity_n([x, array_ops.gather(x, i)])
self._test_loop_fn(loop_fn, 3)
def test_matrix_band_part(self):
x = random_ops.random_uniform([3, 4, 2, 2])
for num_lower, num_upper in ((0, -1), (-1, 0), (1, 1)):
# pylint: disable=cell-var-from-loop
def loop_fn(i):
return array_ops.matrix_band_part(
array_ops.gather(x, i),
num_lower=num_lower,
num_upper=num_upper)
# pylint: enable=cell-var-from-loop
self._test_loop_fn(loop_fn, 3)
def test_matrix_diag(self):
x = random_ops.random_uniform([3, 2, 4])
def loop_fn(i):
diagonal = array_ops.gather(x, i)
if compat.forward_compatible(2019, 10, 31):
return array_ops.matrix_diag(diagonal, k=(0, 1), num_rows=4, num_cols=5)
return array_ops.matrix_diag(diagonal)
self._test_loop_fn(loop_fn, 3)
def test_matrix_diag_part(self):
x = random_ops.random_uniform([3, 4, 6])
def loop_fn(i):
input = array_ops.gather(x, i) # pylint: disable=redefined-builtin
if compat.forward_compatible(2019, 10, 31):
return array_ops.matrix_diag_part(input, k=(-2, 0), padding_value=3)
return array_ops.matrix_diag_part(input)
self._test_loop_fn(loop_fn, 3)
def test_matrix_set_diag(self):
matrices = random_ops.random_uniform([3, 4, 4])
diags = random_ops.random_uniform([3, 4])
if compat.forward_compatible(2019, 10, 31):
bands = random_ops.random_uniform([3, 3, 4])
def loop_fn(i):
matrix_i = array_ops.gather(matrices, i)
diag_i = array_ops.gather(diags, i)
results = [
array_ops.matrix_set_diag(matrix_i, diag_i),
array_ops.matrix_set_diag(matrices[0, ...], diag_i),
array_ops.matrix_set_diag(matrix_i, diags[0, ...])
]
if compat.forward_compatible(2019, 10, 31):
k = (-1, 1)
band_i = array_ops.gather(bands, i)
results.extend([
array_ops.matrix_set_diag(matrix_i, band_i, k=k),
array_ops.matrix_set_diag(matrices[0, ...], band_i, k=k),
array_ops.matrix_set_diag(matrix_i, bands[0, ...], k=k)
])
return results
self._test_loop_fn(loop_fn, 3)
def test_strided_slice(self):
with backprop.GradientTape(persistent=True) as g:
x = random_ops.random_uniform([3, 3, 4, 4, 2, 2, 2])
g.watch(x)
def loop_fn(i):
with g:
x_i = array_ops.gather(x, i)
y = x_i[:2, ::2, 1::3, ..., array_ops.newaxis, 1]
loss = nn.l2_loss(y)
return y, g.gradient(loss, x_i)
self._test_loop_fn(loop_fn, 3)
if __name__ == "__main__":
test.main()
|
apache-2.0
| 4,554,197,278,193,086,500 | 29.699229 | 88 | 0.595629 | false |
ip-tools/ip-navigator
|
patzilla/util/web/uwsgi/uwsgidecorators.py
|
1
|
9668
|
# https://github.com/unbit/uwsgi/blob/master/uwsgidecorators.py
from functools import partial
import sys
from threading import Thread
try:
import cPickle as pickle
except:
import pickle
import uwsgi
if uwsgi.masterpid() == 0:
raise Exception(
"you have to enable the uWSGI master process to use this module")
spooler_functions = {}
mule_functions = {}
postfork_chain = []
def get_free_signal():
for signum in range(0, 256):
if not uwsgi.signal_registered(signum):
return signum
raise Exception("No free uwsgi signal available")
def manage_spool_request(vars):
f = spooler_functions[vars['ud_spool_func']]
if 'args' in vars:
args = pickle.loads(vars.pop('args'))
kwargs = pickle.loads(vars.pop('kwargs'))
ret = f(*args, **kwargs)
else:
ret = f(vars)
if not 'ud_spool_ret' in vars:
return ret
return int(vars['ud_spool_ret'])
def postfork_chain_hook():
for f in postfork_chain:
f()
uwsgi.spooler = manage_spool_request
uwsgi.post_fork_hook = postfork_chain_hook
class postfork(object):
def __init__(self, f):
if callable(f):
self.wid = 0
self.f = f
else:
self.f = None
self.wid = f
postfork_chain.append(self)
def __call__(self, *args, **kwargs):
if self.f:
if self.wid > 0 and self.wid != uwsgi.worker_id():
return
return self.f()
self.f = args[0]
class _spoolraw(object):
def __call__(self, *args, **kwargs):
arguments = self.base_dict
if not self.pass_arguments:
if len(args) > 0:
arguments.update(args[0])
if kwargs:
arguments.update(kwargs)
else:
spooler_args = {}
for key in ('message_dict', 'spooler', 'priority', 'at', 'body'):
if key in kwargs:
spooler_args.update({key: kwargs.pop(key)})
arguments.update(spooler_args)
arguments.update({'args': pickle.dumps(args), 'kwargs': pickle.dumps(kwargs)})
return uwsgi.spool(arguments)
# For backward compatibility (uWSGI < 1.9.13)
def spool(self, *args, **kwargs):
return self.__class__.__call__(self, *args, **kwargs)
def __init__(self, f, pass_arguments):
if not 'spooler' in uwsgi.opt:
raise Exception(
"you have to enable the uWSGI spooler to use @%s decorator" % self.__class__.__name__)
self.f = f
spooler_functions[self.f.__name__] = self.f
# For backward compatibility (uWSGI < 1.9.13)
self.f.spool = self.__call__
self.pass_arguments = pass_arguments
self.base_dict = {'ud_spool_func': self.f.__name__}
class _spool(_spoolraw):
def __call__(self, *args, **kwargs):
self.base_dict['ud_spool_ret'] = str(uwsgi.SPOOL_OK)
return _spoolraw.__call__(self, *args, **kwargs)
class _spoolforever(_spoolraw):
def __call__(self, *args, **kwargs):
self.base_dict['ud_spool_ret'] = str(uwsgi.SPOOL_RETRY)
return _spoolraw.__call__(self, *args, **kwargs)
def spool_decorate(f=None, pass_arguments=False, _class=_spoolraw):
if not f:
return partial(_class, pass_arguments=pass_arguments)
return _class(f, pass_arguments)
def spoolraw(f=None, pass_arguments=False):
return spool_decorate(f, pass_arguments)
def spool(f=None, pass_arguments=False):
return spool_decorate(f, pass_arguments, _spool)
def spoolforever(f=None, pass_arguments=False):
return spool_decorate(f, pass_arguments, _spoolforever)
class mulefunc(object):
def __init__(self, f):
if callable(f):
self.fname = f.__name__
self.mule = 0
mule_functions[f.__name__] = f
else:
self.mule = f
self.fname = None
def real_call(self, *args, **kwargs):
uwsgi.mule_msg(pickle.dumps(
{
'service': 'uwsgi_mulefunc',
'func': self.fname,
'args': args,
'kwargs': kwargs
}
), self.mule)
def __call__(self, *args, **kwargs):
if not self.fname:
self.fname = args[0].__name__
mule_functions[self.fname] = args[0]
return self.real_call
return self.real_call(*args, **kwargs)
def mule_msg_dispatcher(message):
msg = pickle.loads(message)
if msg['service'] == 'uwsgi_mulefunc':
return mule_functions[msg['func']](*msg['args'], **msg['kwargs'])
uwsgi.mule_msg_hook = mule_msg_dispatcher
class rpc(object):
def __init__(self, name):
self.name = name
def __call__(self, f):
uwsgi.register_rpc(self.name, f)
return f
class farm_loop(object):
def __init__(self, f, farm):
self.f = f
self.farm = farm
def __call__(self):
if uwsgi.mule_id() == 0:
return
if not uwsgi.in_farm(self.farm):
return
while True:
message = uwsgi.farm_get_msg()
if message:
self.f(message)
class farm(object):
def __init__(self, name=None, **kwargs):
self.name = name
def __call__(self, f):
postfork_chain.append(farm_loop(f, self.name))
class mule_brain(object):
def __init__(self, f, num):
self.f = f
self.num = num
def __call__(self):
if uwsgi.mule_id() == self.num:
try:
self.f()
except:
exc = sys.exc_info()
sys.excepthook(exc[0], exc[1], exc[2])
sys.exit(1)
class mule_brainloop(mule_brain):
def __call__(self):
if uwsgi.mule_id() == self.num:
while True:
try:
self.f()
except:
exc = sys.exc_info()
sys.excepthook(exc[0], exc[1], exc[2])
sys.exit(1)
class mule(object):
def __init__(self, num):
self.num = num
def __call__(self, f):
postfork_chain.append(mule_brain(f, self.num))
class muleloop(mule):
def __call__(self, f):
postfork_chain.append(mule_brainloop(f, self.num))
class mulemsg_loop(object):
def __init__(self, f, num):
self.f = f
self.num = num
def __call__(self):
if uwsgi.mule_id() == self.num:
while True:
message = uwsgi.mule_get_msg()
if message:
self.f(message)
class mulemsg(object):
def __init__(self, num):
self.num = num
def __call__(self, f):
postfork_chain.append(mulemsg_loop(f, self.num))
class signal(object):
def __init__(self, num, **kwargs):
self.num = num
self.target = kwargs.get('target', '')
def __call__(self, f):
uwsgi.register_signal(self.num, self.target, f)
return f
class timer(object):
def __init__(self, secs, **kwargs):
self.num = kwargs.get('signum', get_free_signal())
self.secs = secs
self.target = kwargs.get('target', '')
def __call__(self, f):
uwsgi.register_signal(self.num, self.target, f)
uwsgi.add_timer(self.num, self.secs)
return f
class cron(object):
def __init__(self, minute, hour, day, month, dayweek, **kwargs):
self.num = kwargs.get('signum', get_free_signal())
self.minute = minute
self.hour = hour
self.day = day
self.month = month
self.dayweek = dayweek
self.target = kwargs.get('target', '')
def __call__(self, f):
uwsgi.register_signal(self.num, self.target, f)
uwsgi.add_cron(self.num, self.minute, self.hour,
self.day, self.month, self.dayweek)
return f
class rbtimer(object):
def __init__(self, secs, **kwargs):
self.num = kwargs.get('signum', get_free_signal())
self.secs = secs
self.target = kwargs.get('target', '')
def __call__(self, f):
uwsgi.register_signal(self.num, self.target, f)
uwsgi.add_rb_timer(self.num, self.secs)
return f
class filemon(object):
def __init__(self, fsobj, **kwargs):
self.num = kwargs.get('signum', get_free_signal())
self.fsobj = fsobj
self.target = kwargs.get('target', '')
def __call__(self, f):
uwsgi.register_signal(self.num, self.target, f)
uwsgi.add_file_monitor(self.num, self.fsobj)
return f
class erlang(object):
def __init__(self, name):
self.name = name
def __call__(self, f):
uwsgi.erlang_register_process(self.name, f)
return f
class lock(object):
def __init__(self, f):
self.f = f
def __call__(self, *args, **kwargs):
# ensure the spooler will not call it
if uwsgi.i_am_the_spooler():
return
uwsgi.lock()
try:
return self.f(*args, **kwargs)
finally:
uwsgi.unlock()
class thread(object):
def __init__(self, f):
self.f = f
def __call__(self, *args):
t = Thread(target=self.f, args=args)
t.daemon = True
t.start()
return self.f
class harakiri(object):
def __init__(self, seconds):
self.s = seconds
def real_call(self, *args, **kwargs):
uwsgi.set_user_harakiri(self.s)
r = self.f(*args, **kwargs)
uwsgi.set_user_harakiri(0)
return r
def __call__(self, f):
self.f = f
return self.real_call
|
agpl-3.0
| 957,391,275,148,115,200 | 23.789744 | 102 | 0.543029 | false |
perrygeo/Fiona
|
fiona/transform.py
|
4
|
3374
|
"""Coordinate and geometry warping and reprojection"""
from fiona._transform import _transform, _transform_geom
def transform(src_crs, dst_crs, xs, ys):
"""Transform coordinates from one reference system to another.
Parameters
----------
src_crs: str or dict
A string like 'EPSG:4326' or a dict of proj4 parameters like
{'proj': 'lcc', 'lat_0': 18.0, 'lat_1': 18.0, 'lon_0': -77.0}
representing the coordinate reference system on the "source"
or "from" side of the transformation.
dst_crs: str or dict
A string or dict representing the coordinate reference system
on the "destination" or "to" side of the transformation.
xs: sequence of float
A list or tuple of x coordinate values. Must have the same
length as the ``ys`` parameter.
ys: sequence of float
A list or tuple of y coordinate values. Must have the same
length as the ``xs`` parameter.
Returns
-------
xp, yp: list of float
A pair of transformed coordinate sequences. The elements of
``xp`` and ``yp`` correspond exactly to the elements of the
``xs`` and ``ys`` input parameters.
Examples
--------
>>> transform('EPSG:4326', 'EPSG:26953', [-105.0], [40.0])
([957097.0952383667], [378940.8419189212])
"""
# Function is implemented in the _transform C extension module.
return _transform(src_crs, dst_crs, xs, ys)
def transform_geom(
src_crs, dst_crs, geom,
antimeridian_cutting=False, antimeridian_offset=10.0, precision=-1):
"""Transform a geometry obj from one reference system to another.
Parameters
----------
src_crs: str or dict
A string like 'EPSG:4326' or a dict of proj4 parameters like
{'proj': 'lcc', 'lat_0': 18.0, 'lat_1': 18.0, 'lon_0': -77.0}
representing the coordinate reference system on the "source"
or "from" side of the transformation.
dst_crs: str or dict
A string or dict representing the coordinate reference system
on the "destination" or "to" side of the transformation.
geom: obj
A GeoJSON-like geometry object with 'type' and 'coordinates'
members.
antimeridian_cutting: bool, optional
``True`` to cut output geometries in two at the antimeridian,
the default is ``False`.
antimeridian_offset: float, optional
A distance in decimal degrees from the antimeridian, outside of
which geometries will not be cut.
precision: int, optional
Optional rounding precision of output coordinates, in number
of decimal places.
Returns
-------
obj
A new GeoJSON-like geometry with transformed coordinates. Note
that if the output is at the antimeridian, it may be cut and
of a different geometry ``type`` than the input, e.g., a
polygon input may result in multi-polygon output.
Examples
--------
>>> transform_geom(
... 'EPSG:4326', 'EPSG:26953',
... {'type': 'Point', 'coordinates': [-105.0, 40.0]})
{'type': 'Point', 'coordinates': (957097.0952383667, 378940.8419189212)}
"""
# Function is implemented in the _transform C extension module.
return _transform_geom(
src_crs, dst_crs, geom,
antimeridian_cutting, antimeridian_offset, precision)
|
bsd-3-clause
| -7,297,159,703,876,337,000 | 35.673913 | 76 | 0.635448 | false |
thopiekar/Uranium
|
UM/View/RenderBatch.py
|
1
|
14238
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the LGPLv3 or higher.
import copy
from UM.Logger import Logger
from UM.Math.Vector import Vector
from UM.View.GL.OpenGL import OpenGL
from UM.View.GL.OpenGLContext import OpenGLContext
from PyQt5.QtGui import QOpenGLVertexArrayObject
vertexBufferProperty = "__gl_vertex_buffer"
indexBufferProperty = "__gl_index_buffer"
## The RenderBatch class represent a batch of objects that should be rendered.
#
# Each RenderBatch contains a list of objects to render and all state related
# to those objects. It tries to minimize changes to state between render the
# individual objects. This means that for example the ShaderProgram used is
# only bound once, at the start of rendering. There are a few values, like
# the model-view-projection matrix that are updated for each object.
#
# Currently RenderBatch objects are created each frame including the
# VertexArrayObject (VAO). This is done to greatly simplify managing
# RenderBatch-changes. Whenever (sets of) RenderBatches are managed throughout
# the lifetime of a session, crossing multiple frames, the usage of VAO's can
# improve performance by reusing them.
class RenderBatch():
## The type of render batch.
#
# This determines some basic state values, like blending on/off and additionally
# is used to determine sorting order.
class RenderType:
NoType = 0 ## No special state changes are done.
Solid = 1 ## Depth testing and depth writing are enabled.
Transparent = 2 ## Depth testing is enabled, depth writing is disabled.
Overlay = 3 ## Depth testing is disabled.
## The mode to render objects in. These correspond to OpenGL render modes.
class RenderMode:
Points = 0x0000
Lines = 0x0001
LineLoop = 0x0002
LineStrip = 0x0003
Triangles = 0x0004
TriangleStrip = 0x0005
TriangleFan = 0x0006
## Blending mode.
class BlendMode:
NoBlending = 0 ## Blending disabled.
Normal = 1 ## Standard alpha blending, mixing source and destination values based on respective alpha channels.
Additive = 2 ## Additive blending, the value of the rendered pixel is added to the color already in the buffer.
## Init method.
#
# \param shader The shader to use for this batch.
# \param kwargs Keyword arguments.
# Possible values:
# - type: The RenderType to use for this batch. Defaults to RenderType.Solid.
# - mode: The RenderMode to use for this batch. Defaults to RenderMode.Triangles.
# - backface_cull: Whether to enable or disable backface culling. Defaults to True.
# - range: A tuple indicating the start and end of a range of triangles to render. Defaults to None.
# - sort: A modifier to influence object sorting. Lower values will cause the object to be rendered before others. Mostly relevant to Transparent mode.
# - blend_mode: The BlendMode to use to render this batch. Defaults to NoBlending when type is Solid, Normal when type is Transparent or Overlay.
# - state_setup_callback: A callback function to be called just after the state has been set up but before rendering.
# This can be used to do additional alterations to the state that can not be done otherwise.
# The callback is passed the OpenGL bindings object as first and only parameter.
# - state_teardown_callback: A callback similar to state_setup_callback, but called after everything was rendered, to handle cleaning up state changes made in state_setup_callback.
def __init__(self, shader, **kwargs):
self._shader = shader
self._render_type = kwargs.get("type", self.RenderType.Solid)
self._render_mode = kwargs.get("mode", self.RenderMode.Triangles)
self._backface_cull = kwargs.get("backface_cull", False)
self._render_range = kwargs.get("range", None)
self._sort_weight = kwargs.get("sort", 0)
self._blend_mode = kwargs.get("blend_mode", None)
if not self._blend_mode:
self._blend_mode = self.BlendMode.NoBlending if self._render_type == self.RenderType.Solid else self.BlendMode.Normal
self._state_setup_callback = kwargs.get("state_setup_callback", None)
self._state_teardown_callback = kwargs.get("state_teardown_callback", None)
self._items = []
self._view_matrix = None
self._projection_matrix = None
self._view_projection_matrix = None
self._gl = OpenGL.getInstance().getBindingsObject()
## The RenderType for this batch.
@property
def renderType(self):
return self._render_type
## The RenderMode for this batch.
@property
def renderMode(self):
return self._render_mode
## The shader for this batch.
@property
def shader(self):
return self._shader
## Whether backface culling is enabled or not.
@property
def backfaceCull(self):
return self._backface_cull
## The range of elements to render.
#
# \return The range of elements to render, as a tuple of (start, end)
@property
def renderRange(self):
return self._render_range
## The items to render.
#
# \return A list of tuples, where each item is (transform_matrix, mesh, extra_uniforms)
@property
def items(self):
return self._items
## Less-than comparison method.
#
# This sorts RenderType.Solid before RenderType.Transparent
# and RenderType.Transparent before RenderType.Overlay.
def __lt__(self, other):
if self._render_type == other._render_type:
return self._sort_weight < other._sort_weight
if self._render_type == self.RenderType.Solid:
return True
if self._render_type == self.RenderType.Transparent and other._render_type != self.RenderType.Solid:
return True
return False
## Add an item to render to this batch.
#
# \param transformation The transformation matrix to use for rendering the item.
# \param mesh The mesh to render with the transform matrix.
# \param uniforms A dict of additional uniform bindings to set when rendering the item.
# Note these are set specifically for this item.
def addItem(self, transformation, mesh, uniforms = None):
if not transformation:
Logger.log("w", "Tried to add an item to batch without transformation")
return
if not mesh:
Logger.log("w", "Tried to add an item to batch without mesh")
return
self._items.append({ "transformation": transformation, "mesh": mesh, "uniforms": uniforms})
## Render the batch.
#
# \param camera The camera to render from.
def render(self, camera):
if camera is None:
Logger.log("e", "Unable to render batch without a camera.")
return
self._shader.bind()
if self._backface_cull:
self._gl.glEnable(self._gl.GL_CULL_FACE)
else:
self._gl.glDisable(self._gl.GL_CULL_FACE)
if self._render_type == self.RenderType.Solid:
self._gl.glEnable(self._gl.GL_DEPTH_TEST)
self._gl.glDepthMask(self._gl.GL_TRUE)
elif self._render_type == self.RenderType.Transparent:
self._gl.glEnable(self._gl.GL_DEPTH_TEST)
self._gl.glDepthMask(self._gl.GL_FALSE)
elif self._render_type == self.RenderType.Overlay:
self._gl.glDisable(self._gl.GL_DEPTH_TEST)
if self._blend_mode == self.BlendMode.NoBlending:
self._gl.glDisable(self._gl.GL_BLEND)
elif self._blend_mode == self.BlendMode.Normal:
self._gl.glEnable(self._gl.GL_BLEND)
self._gl.glBlendFunc(self._gl.GL_SRC_ALPHA, self._gl.GL_ONE_MINUS_SRC_ALPHA)
elif self._blend_mode == self.BlendMode.Additive:
self._gl.glEnable(self._gl.GL_BLEND)
self._gl.glBlendFunc(self._gl.GL_SRC_ALPHA, self._gl.GL_ONE)
if self._state_setup_callback:
self._state_setup_callback(self._gl)
self._view_matrix = camera.getWorldTransformation().getInverse()
self._projection_matrix = camera.getProjectionMatrix()
self._view_projection_matrix = camera.getProjectionMatrix().multiply(self._view_matrix)
self._shader.updateBindings(
view_matrix = self._view_matrix,
projection_matrix = self._projection_matrix,
view_projection_matrix = self._view_projection_matrix,
view_position = camera.getWorldPosition(),
light_0_position = camera.getWorldPosition() + Vector(0, 50, 0)
)
# The VertexArrayObject (VAO) works like a VCR, recording buffer activities in the GPU.
# When the same buffers are used elsewhere, one can bind this VertexArrayObject to
# the context instead of uploading all buffers again.
if OpenGLContext.properties["supportsVertexArrayObjects"]:
vao = QOpenGLVertexArrayObject()
vao.create()
if not vao.isCreated():
Logger.log("e", "VAO not created. Hell breaks loose")
vao.bind()
for item in self._items:
self._renderItem(item)
if self._state_teardown_callback:
self._state_teardown_callback(self._gl)
self._shader.release()
def _renderItem(self, item):
transformation = item["transformation"]
mesh = item["mesh"]
normal_matrix = None
if mesh.hasNormals():
normal_matrix = copy.deepcopy(transformation)
normal_matrix.setRow(3, [0, 0, 0, 1])
normal_matrix.setColumn(3, [0, 0, 0, 1])
normal_matrix = normal_matrix.getInverse().getTransposed()
model_view_matrix = copy.deepcopy(transformation).preMultiply(self._view_matrix)
model_view_projection_matrix = copy.deepcopy(transformation).preMultiply(self._view_projection_matrix)
self._shader.updateBindings(
model_matrix = transformation,
normal_matrix = normal_matrix,
model_view_matrix = model_view_matrix,
model_view_projection_matrix = model_view_projection_matrix
)
if item["uniforms"] is not None:
self._shader.updateBindings(**item["uniforms"])
vertex_buffer = OpenGL.getInstance().createVertexBuffer(mesh)
vertex_buffer.bind()
if self._render_range is None:
index_buffer = OpenGL.getInstance().createIndexBuffer(mesh)
else:
# glDrawRangeElements does not work as expected and did not get the indices field working..
# Now we're just uploading a clipped part of the array and the start index always becomes 0.
index_buffer = OpenGL.getInstance().createIndexBuffer(
mesh, force_recreate = True, index_start = self._render_range[0], index_stop = self._render_range[1])
if index_buffer is not None:
index_buffer.bind()
self._shader.enableAttribute("a_vertex", "vector3f", 0)
offset = mesh.getVertexCount() * 3 * 4
if mesh.hasNormals():
self._shader.enableAttribute("a_normal", "vector3f", offset)
offset += mesh.getVertexCount() * 3 * 4
if mesh.hasColors():
self._shader.enableAttribute("a_color", "vector4f", offset)
offset += mesh.getVertexCount() * 4 * 4
if mesh.hasUVCoordinates():
self._shader.enableAttribute("a_uvs", "vector2f", offset)
offset += mesh.getVertexCount() * 2 * 4
for attribute_name in mesh.attributeNames():
attribute = mesh.getAttribute(attribute_name)
self._shader.enableAttribute(attribute["opengl_name"], attribute["opengl_type"], offset)
if attribute["opengl_type"] == "vector2f":
offset += mesh.getVertexCount() * 2 * 4
elif attribute["opengl_type"] == "vector4f":
offset += mesh.getVertexCount() * 4 * 4
elif attribute["opengl_type"] == "int":
offset += mesh.getVertexCount() * 4
elif attribute["opengl_type"] == "float":
offset += mesh.getVertexCount() * 4
else:
Logger.log("e", "Attribute with name [%s] uses non implemented type [%s]." % (attribute["opengl_name"], attribute["opengl_type"]))
self._shader.disableAttribute(attribute["opengl_name"])
if mesh.hasIndices():
if self._render_range is None:
if self._render_mode == self.RenderMode.Triangles:
self._gl.glDrawElements(self._render_mode, mesh.getFaceCount() * 3 , self._gl.GL_UNSIGNED_INT, None)
else:
self._gl.glDrawElements(self._render_mode, mesh.getFaceCount(), self._gl.GL_UNSIGNED_INT, None)
else:
if self._render_mode == self.RenderMode.Triangles:
self._gl.glDrawRangeElements(self._render_mode, self._render_range[0], self._render_range[1], self._render_range[1] - self._render_range[0], self._gl.GL_UNSIGNED_INT, None)
else:
self._gl.glDrawElements(self._render_mode, self._render_range[1] - self._render_range[0], self._gl.GL_UNSIGNED_INT, None)
else:
self._gl.glDrawArrays(self._render_mode, 0, mesh.getVertexCount())
self._shader.disableAttribute("a_vertex")
self._shader.disableAttribute("a_normal")
self._shader.disableAttribute("a_color")
self._shader.disableAttribute("a_uvs")
for attribute_name in mesh.attributeNames():
attribute = mesh.getAttribute(attribute_name)
self._shader.disableAttribute(attribute.get("opengl_name"))
vertex_buffer.release()
if index_buffer is not None:
index_buffer.release()
|
lgpl-3.0
| -598,423,587,618,263,700 | 43.914826 | 200 | 0.631128 | false |
nate1001/chess_jay
|
gui.py
|
1
|
5079
|
'''
Copyright Nate Carson 2012
'''
from PyQt4 import QtCore, QtGui, QtSvg
import db
import data
def setAttackSum(self, forces):
for force in forces:
self._squares[force.tid].setAttackSum(force.c_white, force.c_black)
def setAttackSum(self, c_white, c_black):
c_white, c_black = c_white or 0, c_black or 0
factor = 30
color = QtGui.QColor(
min(128 + c_black * factor, 255),
max(128 - (c_white + c_black) * (factor/2), 0),
min(128 + c_white * factor, 255)
)
'''
val = (c_white - c_black) * 32
o = abs(c_white) + abs(c_black) * 64
color = QtGui.QColor(
min(max(128 - val, 0), 255)
,128
,max(min(128 + val, 255), 0)
,min(o, 255)
)
#self._piece and self._piece.setOpacity(255)
#self._piece and self._piece.setZValue(100)
self.setBrush(QtGui.QBrush(color))
'''
def setByMove(self, move):
if self._current_move is None:
self.setByString(move.boardstring())
# if we are going forward one move
elif self._current_move.halfmove() + 1 == move.halfmove():
self.movePiece(move.start, move.end)
# else re-init board
else:
self.setByString(move.boardstring())
self._current_move = move
class MainWindow(QtGui.QMainWindow):
def __init__(self):
super(MainWindow, self).__init__()
self.scene = BoardScene(settings.initial_pos)
layout = QtGui.QHBoxLayout()
#layout.addWidget(self.toolBox)
self.view = ChessView(self.scene)
layout.addWidget(self.view)
self.widget = QtGui.QWidget()
self.widget.setLayout(layout)
self.setCentralWidget(self.widget)
self.setWindowTitle("Chess Analyzer")
self.createActions()
self.createMenus()
self.createDocks()
def createActions(self):
self.action_exit = QtGui.QAction("Quit", self,
shortcut="Ctrl+Q", statusTip="Quit",
triggered=self.close)
self.action_show_square_labels = QtGui.QAction("Square Labels", self,
checkable=True, checked=settings.show_labels, statusTip="",
triggered=self.view.toggleLabels)
self.action_show_guides = QtGui.QAction("Guides", self,
checkable=True, checked=settings.show_guides, statusTip="",
triggered=self.view.toggleGuides)
def createMenus(self):
self.file_menu = self.menuBar().addMenu("&File")
self.file_menu.addAction(self.action_exit)
self.view_menu = self.menuBar().addMenu("&View")
self.view_menu.addAction(self.action_show_square_labels)
self.view_menu.addAction(self.action_show_guides)
def createDocks(self):
self.move_dock = Dock(MoveList, 'Moves', self,
self.scene.setBoard,
self.scene.setAttacked,
self.scene.setProtected,
self.scene.setAttackSum,
)
self.game_dock = Dock(GameList, 'Games', self, self.move_dock.items)
class Dock(QtGui.QDockWidget):
def __init__(self, list_class, name, parent, *args):
super(Dock, self).__init__(name, parent)
self.items = list_class(self, *args)
self.setWidget(self.items)
parent.addDockWidget(QtCore.Qt.RightDockWidgetArea, self)
parent.view_menu.addAction(self.toggleViewAction())
class DBList(QtGui.QListWidget):
def __init__(self, parent, *args):
super(DBList, self).__init__(parent, *args)
self.activated.connect(self.onActivate)
self.currentRowChanged.connect(self.onRowChanged)
self.load()
def _init(self, *args):
select = self.select(*args)
self.data = [row for row in select]
# clear any previous items
while self.takeItem(0):
pass
self.addItems([str(row) for row in self.data])
self.update()
def onActivate(self, index):
datum = self.data[index.row()]
self.doActivate(datum)
def onRowChanged(self, index):
datum = self.data[index]
self.doActivate(datum)
def select(self):
return self.klass.select()
def doActivate(self, index):
raise NotImplementedError
def load(self, *args):
raise NotImplementedError
class GameList(DBList):
klass = db.Games
def __init__(self, parent, move_list):
super(GameList, self).__init__(parent)
self.move_list = move_list
def load(self):
self._init()
def doActivate(self, game):
self.move_list.load(game.id)
class MoveList(DBList):
klass = db.Moves
def __init__(self, parent, callback, attacked_callback, protected_callback, attacksum_callback):
super(MoveList, self).__init__(parent)
self.callback = callback
self.attacked_callback = attacked_callback
self.protected_callback = protected_callback
self.attacksum_callback = attacksum_callback
def select(self, *args):
return self.klass.select(args[0])
def load(self, *args):
if args:
self._init(args)
def doActivate(self, move):
self.callback(move)
forces = db.Force.select(move.fen)
self.attacksum_callback(forces)
#attacked = db.Attacked.select(move.fen)
#self.attacked_callback(attacked)
#protected = db.Protected.select(move.fen)
#self.protected_callback(protected)
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
mainWindow = MainWindow()
s = settings.board_size
l = settings.square_size
mainWindow.setGeometry(100, 50, l + s*1.1 +100, l + s*1.2 + 100)
mainWindow.show()
sys.exit(app.exec_())
|
gpl-3.0
| -2,556,928,310,813,353,500 | 21.674107 | 97 | 0.689703 | false |
jimsrc/seatos
|
mixed/figs/sheaths.paper/src/together4.py
|
1
|
11024
|
#!/usr/bin/env ipython
from pylab import *
import numpy as np
import console_colors as ccl
from scipy.io.netcdf import netcdf_file
import os, sys
import matplotlib.patches as patches
import matplotlib.transforms as transforms
from numpy import array
from matplotlib.gridspec import GridSpec
import matplotlib.pyplot as plt
class gral:
def __init__(self):
self.name='name'
TS = 11
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def makefig(ax, mc, sh, TEXT, TEXT_LOC, YLIMS, varname):
LW = 0.3 # linewidth
MS = 1.5
fmc,fsh = 3.0, 1.0 # escaleos temporales
if(varname == 'Temp'):
mc.med /= 1.0e4; sh.med /= 1.0e4
mc.avr /= 1.0e4; sh.avr /= 1.0e4
mc.std_err /= 1.0e4; sh.std_err /= 1.0e4
YLIMS[0] /= 1.0e4; YLIMS[1] /= 1.0e4
TEXT_LOC['mc'][1] /= 1.0e4
TEXT_LOC['sh'][1] /= 1.0e4
# curvas del mc
time = fsh+fmc*mc.tnorm
cc = time>=fsh
ax.plot(time[cc], mc.avr[cc], 'o-', color='black', markersize=MS, label='mean', lw=LW)
ax.plot(time[cc], mc.med[cc], 'o-', color='red', alpha=.8, markersize=MS, markeredgecolor='none', label='median', lw=LW)
# sombra del mc
inf = mc.avr + mc.std_err/np.sqrt(mc.nValues)
sup = mc.avr - mc.std_err/np.sqrt(mc.nValues)
ax.fill_between(time[cc], inf[cc], sup[cc], facecolor='gray', alpha=0.5)
trans = transforms.blended_transform_factory(
ax.transData, ax.transAxes)
rect1 = patches.Rectangle((fsh, 0.), width=fmc, height=1,
transform=trans, color='blue',
alpha=0.3)
ax.add_patch(rect1)
# curvas del sheath
time = fsh*sh.tnorm
cc = time<=fsh
ax.plot(time[cc], sh.avr[cc], 'o-', color='black', markersize=MS, lw=LW)
ax.plot(time[cc], sh.med[cc], 'o-', color='red', alpha=.8, markersize=MS, markeredgecolor='none', lw=LW)
# sombra del sheath
inf = sh.avr + sh.std_err/np.sqrt(sh.nValues)
sup = sh.avr - sh.std_err/np.sqrt(sh.nValues)
ax.fill_between(time[cc], inf[cc], sup[cc], facecolor='gray', alpha=0.5)
#trans = transforms.blended_transform_factory(
# ax.transData, ax.transAxes)
rect1 = patches.Rectangle((0., 0.), width=fsh, height=1,
transform=trans, color='orange',
alpha=0.3)
ax.add_patch(rect1)
#ax.legend(loc='best', fontsize=10)
ax.tick_params(labelsize=TS)
ax.grid()
ax.set_xlim(-2.0, 7.0)
ax.set_ylim(YLIMS)
ax.text(TEXT_LOC['mc'][0], TEXT_LOC['mc'][1], TEXT['mc'], fontsize=7)
ax.text(TEXT_LOC['sh'][0], TEXT_LOC['sh'][1], TEXT['sh'], fontsize=7)
if(varname in ('beta','Temp', 'rmsB', 'rmsBoB')):
ax.set_yscale('log')
else:
ax.set_yscale('linear')
return ax
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
stf = {}
stf['B'] = {
'label': 'B [nT]',
'ylims': [5., 29.],
'text_loc_1': {'mc':[4.5, 15.0], 'sh':[-1.95, 12.0]},
'text_loc_2': {'mc':[4.5, 18.0], 'sh':[-1.95, 12.0]},
'text_loc_3': {'mc':[4.5, 12.0], 'sh':[-1.95, 12.0]},
'nrow': 1
}
stf['V'] = {
'label': 'Vsw [km/s]',
'ylims': [350., 800.],
'text_loc_1': {'mc':[4.5, 500.0], 'sh':[-1.95, 520.0]},
'text_loc_2': {'mc':[4.5, 600.0], 'sh':[-1.95, 600.0]},
'text_loc_3': {'mc':[4.5, 410.0], 'sh':[-1.95, 600.0]},
'nrow': 2
}
stf['rmsBoB'] = {
'label': 'rmsBoB [1]',
'ylims': [0.015, 0.21],
'text_loc_1': {'mc':[4.5, 0.020], 'sh':[-1.95, 0.02]},
'text_loc_2': {'mc':[4.5, 0.095], 'sh':[-1.95, 0.02]},
'text_loc_3': {'mc':[4.5, 0.099], 'sh':[-1.95, 0.02]},
'nrow': 6
}
stf['rmsB'] = {
'label': 'rmsB [nT]',
'ylims': [0.1, 4.0],
'text_loc_1': {'mc':[4.5, 1.0], 'sh':[-1.95, 1.3]},
'text_loc_2': {'mc':[4.5, 1.0], 'sh':[-1.95, 1.3]},
'text_loc_3': {'mc':[4.5, 1.0], 'sh':[-1.95, 1.3]},
'nrow': 1
}
stf['beta'] = {
'label': '$\\beta$ [1]',
'ylims': [0.02, 10.0],
'text_loc_1': {'mc':[4.5, 0.1], 'sh':[-1.95, 0.2]},
'text_loc_2': {'mc':[4.5, 0.1], 'sh':[-1.95, 0.2]},
'text_loc_3': {'mc':[4.5, 0.1], 'sh':[-1.95, 0.2]},
'nrow': 5
}
stf['Pcc'] = {
'label': '$n_p$ [$cm^{-3}$]',
'ylims': [1, 23],
'text_loc_1': {'mc':[4.5, 14], 'sh':[-1.95, 16.0]},
'text_loc_2': {'mc':[4.5, 14], 'sh':[-1.95, 16.0]},
'text_loc_3': {'mc':[4.5, 11], 'sh':[-1.95, 18.0]},
'nrow': 3
}
stf['Temp'] = {
'label': 'T ($\\times 10^4$) [K]',
'ylims': [1e4, 100e4],
'text_loc_1': {'mc':[4.5, 18.0e4], 'sh':[-1.95, 20.0e4]},
'text_loc_2': {'mc':[4.5, 2.0e4], 'sh':[-1.95, 20.0e4]},
'text_loc_3': {'mc':[4.5, 2.0e4], 'sh':[-1.95, 20.0e4]},
'nrow': 4
}
stf['AlphaRatio'] = {
'label': 'alpha ratio [1]',
'ylims': [0.02, 0.09],
'text_loc_1': {'mc':[4.5, 0.022], 'sh':[-1.95, 0.07]},
'text_loc_2': {'mc':[4.5, 0.022], 'sh':[-1.95, 0.07]},
'text_loc_3': {'mc':[4.5, 0.022], 'sh':[-1.95, 0.07]}
}
stf['CRs'] = {
'label': '$n_{GCR}$ [%]',
'ylims': [-8.0, 2.0],
'text_loc_1': {'mc':[4.5, -4.0], 'sh':[-1.95, -4.5]},
'text_loc_2': {'mc':[4.5, -7.0], 'sh':[-1.95, -4.5]},
'text_loc_3': {'mc':[4.5, -7.5], 'sh':[-1.95, -4.5]},
'nrow': 2
}
TEXT = {}
dir_figs = sys.argv[1] #'../figs'
#dir_inp_mc = '../../../../mcs/ascii/MCflag2/wShiftCorr/_test_Vmc_'
#dir_inp_sh = '../../../../sheaths/ascii/MCflag2/wShiftCorr/_test_Vmc_'
dir_inp_mc = os.environ['RIGHT']
dir_inp_sh = os.environ['LEFT']
vlo = [100.0, 450.0, 550.0]
vhi = [450.0, 550.0, 3000.0]
nvars = len(stf.keys())
print " input: "
print " %s " % dir_inp_mc
print " %s \n" % dir_inp_sh
print " vlo, vhi: ", (vlo, vhi), '\n'
print " nvars: ", nvars
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
i=2
#fig = figure(1, figsize=(12, 15))
f = plt.figure(1, figsize=(7, 5.8))
nr = 1 # scale for row size
gs = GridSpec(nrows=3*nr, ncols=2*3)
gs.update(left=0.1, right=0.98, hspace=0.13, wspace=0.15)
for i in range(3):
fname_inp = 'MCflag2_2before.4after_fgap0.2_Wang90.0_vlo.%3.1f.vhi.%3.1f' % (vlo[i], vhi[i])
fname_inp_nro_mc = dir_inp_mc + '/n.events_' + fname_inp + '.txt'
fname_inp_nro_sh = dir_inp_sh + '/n.events_' + fname_inp + '.txt'
#n = 1 # number of row
print " ______ col %d ______" % i
for varname in ('rmsB', 'CRs'):
# abro el file para averiguar el nro de eventos
fnro_mc = open(fname_inp_nro_mc, 'r')
fnro_sh = open(fname_inp_nro_sh, 'r')
for lmc, lsh in zip(fnro_mc, fnro_sh):
l_mc = lmc.split()
l_sh = lsh.split()
if varname==l_mc[0]: # nombre de la variable
n = stf[varname]['nrow']
ax = plt.subplot(gs[(n-1)*nr:n*nr, (2*i):(2*(i+1))])
Nfinal_mc, Nfinal_sh = int(l_mc[1]), int(l_sh[1]) # nmbr of events
fnro_mc.close(); fnro_sh.close()
break
print " %s"%varname, ' Nfinal_mc:%d' % Nfinal_mc, 'Nfinal_sh:%d' % Nfinal_sh
mc, sh = gral(), gral()
fname_inp_mc = dir_inp_mc + '/' + fname_inp + '_%s.txt' % varname
fname_inp_sh = dir_inp_sh + '/' + fname_inp + '_%s.txt' % varname
mc.tnorm, mc.med, mc.avr, mc.std_err, mc.nValues = np.loadtxt(fname_inp_mc).T
sh.tnorm, sh.med, sh.avr, sh.std_err, sh.nValues = np.loadtxt(fname_inp_sh).T
# nro de datos con mas del 80% non-gap data
TEXT['mc'] = ' N: %d' % Nfinal_mc
TEXT['sh'] = ' N: %d' % Nfinal_sh
if(vlo[i]==100.0):
TEXT_LOC = stf[varname]['text_loc_1'] #1.7, 12.0
elif(vlo[i]==450.0):
TEXT_LOC = stf[varname]['text_loc_2'] #1.7, 12.0
elif(vlo[i]==550.0):
TEXT_LOC = stf[varname]['text_loc_3'] #1.7, 12.0
else:
print " ----> ERROR con 'v_lo'!"
raise SystemExit
ylims = array(stf[varname]['ylims']) #[4., 17.]
ylabel = stf[varname]['label'] #'B [nT]'
ax = makefig(ax, mc, sh, TEXT, TEXT_LOC, ylims, varname)
# ticks & labels x
ax.tick_params(labelsize=TS)
if n==2: #n==nvars-1:
ax.set_xlabel('time normalized to\nsheath/MC passage [1]', fontsize=11)
#ax.xaxis.set_ticklabels([-1,0,1,2,3])
xticks = [-2,-1,0,1,2,3,4,5,6,7]
ax.set_xticks(xticks)
ax.set_xticklabels(xticks)
else:
ax.set_xlabel('')
#ax.get_xaxis().set_ticks([])
ax.xaxis.set_ticklabels([])
# ticks & labels y
if i==0:
ax.set_ylabel(ylabel, fontsize=15)
else:
ax.set_ylabel('')
#ax.get_yaxis().set_ticks([])
ax.yaxis.set_ticklabels([])
#+++++++++++++++++++++++++ nCR & model-fit
#dirs = {}
#dirs['sheath'] = '../../../../sheaths/ascii/MCflag2/wShiftCorr/_test_Vmc_'
#dirs['mc'] = '../../../../mcs/ascii/MCflag2/wShiftCorr/_test_Vmc_'
#dirs['fname_inputs'] = 'MCflag2_2before.4after_fgap0.2_Wang90.0'
#dirs['figs'] = dir_figs
#
#par = {}
#par['lo'] = {
# 'vlo': 100.0,
# 'vhi': 450.0,
# 'tau': 2.36,
# 'bp' : 0.0,
# 'q' : -9.373,
# 'off': 0.89,
# 'bo' : 16.15
#}
#par['mid'] = {
# 'vlo': 450.0,
# 'vhi': 550.0,
# 'tau': 4.18,
# 'bp' : -0.9,
# 'q' : -6.02,
# 'off': 0.0,
# 'bo' : 11.87
#}
#par['hi'] = {
# 'vlo': 550.0,
# 'vhi': 3000.0,
# 'tau': 5.78,
# 'bp' : -0.18,
# 'q' : -5.53,
# 'off': 1.01,
# 'bo' : 14.48
#}
#
#from funcs import build_plot
#n = 3; i=0
#for i, name in zip(range(3), ('lo', 'mid', 'hi')):
# ax = plt.subplot(gs[(n-1)*nr:n*nr, (2*i):(2*(i+1))])
# build_plot(dirs, par[name], ax)
# if i==0:
# ax.set_ylabel('$n_{GCR}$ [%]', fontsize=15)
# else:
# ax.set_ylabel('')
# ax.yaxis.set_ticklabels([])
#+++++++++++++++++++++++++++++++++++++++++
#fig.tight_layout()
#fname_fig = dir_figs + '/fig_vlo.%3.1f_vhi.%3.1f_%s.png'%(vlo, vhi, varname)
fname_fig = '%s/figs_splitted_3.png' % dir_figs
savefig(fname_fig, dpi=150, bbox_inches='tight')
close()
print "\n output en:\n %s\n" % fname_fig
#EOF
|
mit
| 3,020,075,155,036,923,400 | 35.026144 | 124 | 0.444757 | false |
wevoice/wesub
|
apps/videos/templatetags/recent_activity.py
|
1
|
1465
|
# Amara, universalsubtitles.org
#
# Copyright (C) 2013 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
from datetime import date
from django import template
from django.conf import settings
from django.utils.dateformat import format as date_format
from activity.models import ActivityRecord
register = template.Library()
LIMIT = settings.RECENT_ACTIVITIES_ONPAGE
@register.inclusion_tag('videos/_recent_activity.html')
def recent_activity(user):
qs = ActivityRecord.objects.for_user(user)
return {
'records': qs[:LIMIT],
'user_info': user
}
@register.inclusion_tag('videos/_video_activity.html')
def video_activity(video, user):
qs = ActivityRecord.objects.for_video(video)
return {
'records': qs[:LIMIT],
'video': video,
'user': user
}
|
agpl-3.0
| 1,169,272,093,930,581,000 | 29.520833 | 74 | 0.734471 | false |
jenca-cloud/jenca-authentication
|
storage/storage.py
|
1
|
6752
|
"""
A storage service for use by a Jenca Cloud authentication service.
"""
import os
from flask import Flask, json, jsonify, request, make_response
from flask.ext.sqlalchemy import SQLAlchemy
from flask_jsonschema import JsonSchema, ValidationError
from flask_negotiate import consumes
from requests import codes
db = SQLAlchemy()
class User(db.Model):
"""
A user has an email address and a password hash.
"""
email = db.Column(db.String, primary_key=True)
password_hash = db.Column(db.String)
def create_app(database_uri):
"""
Create an application with a database in a given location.
:param database_uri: The location of the database for the application.
:type database_uri: string
:return: An application instance.
:rtype: ``Flask``
"""
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = database_uri
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
db.init_app(app)
with app.app_context():
db.create_all()
return app
SQLALCHEMY_DATABASE_URI = os.environ.get('SQLALCHEMY_DATABASE_URI',
'sqlite:///:memory:')
POSTGRES_HOST = os.environ.get('POSTGRES_HOST', None)
POSTGRES_USER = os.environ.get('POSTGRES_USER', 'username')
POSTGRES_PASSWORD = os.environ.get('POSTGRES_PASSWORD', 'password')
POSTGRES_DATABASE = os.environ.get('POSTGRES_DATABASE', 'jenca-authorisation')
if POSTGRES_HOST is not None:
if POSTGRES_HOST.find('env:') == 0:
POSTGRES_HOST = os.environ.get(POSTGRES_HOST.split(':')[1])
SQLALCHEMY_DATABASE_URI = "postgres://%s:%s@%s/%s" % (
POSTGRES_USER,
POSTGRES_PASSWORD,
POSTGRES_HOST,
POSTGRES_DATABASE
)
app = create_app(database_uri=SQLALCHEMY_DATABASE_URI)
# Inputs can be validated using JSON schema.
# Schemas are in app.config['JSONSCHEMA_DIR'].
# See https://github.com/mattupstate/flask-jsonschema for details.
app.config['JSONSCHEMA_DIR'] = os.path.join(app.root_path, 'schemas')
jsonschema = JsonSchema(app)
def load_user_from_id(user_id):
"""
:param user_id: The ID of the user Flask is trying to load.
:type user_id: string
:return: The user which has the email address ``user_id`` or ``None`` if
there is no such user.
:rtype: ``User`` or ``None``.
"""
return User.query.filter_by(email=user_id).first()
@app.errorhandler(ValidationError)
def on_validation_error(error):
"""
:resjson string title: An explanation that there was a validation error.
:resjson string message: The precise validation error.
:status 400:
"""
return jsonify(
title='There was an error validating the given arguments.',
# By default on Python 2 errors will look like:
# "u'password' is a required property".
# This removes all "u'"s, and so could be dangerous.
detail=error.message.replace("u'", "'"),
), codes.BAD_REQUEST
@app.route('/users/<email>', methods=['GET', 'DELETE'])
@consumes('application/json')
def specific_user_route(email):
"""
**DELETE**:
Delete a particular user.
:reqheader Content-Type: application/json
:resheader Content-Type: application/json
:resjson string email: The email address of the deleted user.
:resjson string password_hash: The password hash of the deleted user.
:status 200: The user has been deleted.
:status 404: There is no user with the given ``email``.
**GET**:
Get information about particular user.
:reqheader Content-Type: application/json
:resheader Content-Type: application/json
:resjson string email: The email address of the user.
:resjson string password_hash: The password hash of the user.
:status 200: The requested user's information is returned.
:status 404: There is no user with the given ``email``.
"""
user = load_user_from_id(email)
if user is None:
return jsonify(
title='The requested user does not exist.',
detail='No user exists with the email "{email}"'.format(
email=email),
), codes.NOT_FOUND
elif request.method == 'DELETE':
db.session.delete(user)
db.session.commit()
return_data = jsonify(email=user.email, password_hash=user.password_hash)
return return_data, codes.OK
@jsonschema.validate('users', 'create')
def create_user():
"""
Create a new user. See ``users_route`` for details.
"""
email = request.json['email']
password_hash = request.json['password_hash']
if load_user_from_id(email) is not None:
return jsonify(
title='There is already a user with the given email address.',
detail='A user already exists with the email "{email}"'.format(
email=email),
), codes.CONFLICT
user = User(email=email, password_hash=password_hash)
db.session.add(user)
db.session.commit()
return jsonify(email=email, password_hash=password_hash), codes.CREATED
@app.route('/users', methods=['GET', 'POST'])
@consumes('application/json')
def users_route():
"""
**POST**:
Create a new user.
:param email: The email address of the new user.
:type email: string
:param password_hash: A password hash to associate with the given ``email``
address.
:type password_hash: string
:reqheader Content-Type: application/json
:resheader Content-Type: application/json
:resjson string email: The email address of the new user.
:resjson string password_hash: The password hash of the new user.
:status 200: A user with the given ``email`` and ``password_hash`` has been
created.
:status 409: There already exists a user with the given ``email``.
**GET**:
Get information about all users.
:reqheader Content-Type: application/json
:resheader Content-Type: application/json
:resjsonarr string email: The email address of a user.
:resjsonarr string password_hash: The password hash of a user.
:status 200: Information about all users is returned.
"""
if request.method == 'POST':
return create_user()
# It the method type is not POST it is GET.
details = [
{'email': user.email, 'password_hash': user.password_hash} for user
in User.query.all()]
return make_response(
json.dumps(details),
codes.OK,
{'Content-Type': 'application/json'})
if __name__ == '__main__': # pragma: no cover
# Specifying 0.0.0.0 as the host tells the operating system to listen on
# all public IPs. This makes the server visible externally.
# See http://flask.pocoo.org/docs/0.10/quickstart/#a-minimal-application
app.run(host='0.0.0.0', port=5001)
|
mit
| -8,743,100,910,322,706,000 | 30.551402 | 79 | 0.659656 | false |
wxwilcke/MINOS
|
directives/optima_D1.MP.py
|
1
|
8757
|
#!/usr/bin/python3
import logging
from operator import itemgetter
from multiprocessing import Process, Manager, Pool, cpu_count
from functools import partial
from math import floor
from timeit import default_timer as timer
import rdflib
from .abstract_instruction_set import AbstractInstructionSet
from readers import rdf
from writers import rule_set, pickler
from samplers import by_definition as sampler
from algorithms.semantic_rule_learning import generate_semantic_item_sets
from algorithms.semantic_rule_learning_mp import generate_semantic_association_rules,\
generate_common_behaviour_sets,\
extend_common_behaviour_sets,\
evaluate_rules
NUM_CORES_PER_CPU = 2
NUM_OF_WORKERS = cpu_count() * NUM_CORES_PER_CPU
class Directive(AbstractInstructionSet):
def __init__(self, time=""):
self.time = time
self.logger = logging.getLogger(__name__)
def print_header(self):
header = "OPTIMA: Artefact Production Events with 3 attributes"
print(header)
print('-' * len(header))
def load_dataset(self, abox, tbox):
# read graphs
kg_i = rdf.read(local_path=abox)
kg_s = rdf.read(local_path=tbox)
# sample by pattern
pattern = (None,
rdflib.RDF.type,
rdflib.URIRef("http://purl.org/crmeh#EHE1002_ContextFindProductionEvent"))
# define context
# spoor with vulling
context = [rdflib.URIRef("http://purl.org/dc/elements/1.1/source"),
[rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P4F_has_time-span"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P1F_is_identified_by"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P2F_has_type"),
rdflib.RDF.value],
[rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P108F_has_produced"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P2F_has_type"),
rdflib.RDF.value]]
kg_i_sampled = kg_i.sample(sampler, patterns=[pattern], context=context, strict_context=False)
return (kg_i_sampled, kg_s)
def run_program(self, dataset, parameters):
self.logger.info("Starting run\nParameters:\n{}".format(
"\n".join(["\t{}: {}".format(k,v) for k,v in parameters.items()])))
self.logger.info("Distributing load over {} cores".format(NUM_OF_WORKERS))
kg_i, kg_s = dataset
# fit model
t0 = timer()
# MP manager
manager = Manager()
# generate semantic item sets from sampled graph
si_sets = manager.dict(generate_semantic_item_sets(kg_i))
# generate common behaviour sets
work = manager.Queue()
keys = list(si_sets.keys())
slices = self.diagonal_matrix_slicer(keys)
cbs_sets = manager.list()
pool = []
for i in range(NUM_OF_WORKERS):
p = Process(target=generate_common_behaviour_sets, args=(si_sets,
cbs_sets,
work,
parameters["similarity_threshold"]))
p.daemon = True
p.start()
pool.append(p)
for slce in slices:
work.put(slce)
for p in pool:
work.put(None)
# join shared variables
for p in pool:
p.join()
# extend common behaviour sets
cbs_size = 2
cbs_sets_extended = manager.list(cbs_sets)
while cbs_size < parameters["max_cbs_size"]:
func = partial(extend_common_behaviour_sets, cbs_sets_extended, parameters["similarity_threshold"])
slices = self.diagonal_matrix_slicer(cbs_sets_extended)
cbs_sets_extention = manager.list()
with Pool(processes=NUM_OF_WORKERS) as pool:
it = pool.imap_unordered(func=func, iterable=slices)
while True:
try:
cbs_subset = next(it)
cbs_sets_extention.extend(cbs_subset)
except StopIteration:
break
cbs_sets.extend(cbs_sets_extention)
cbs_sets_extended = cbs_sets_extention
cbs_size *= 2
# generate semantic item sets from sampled graph association rules
rules = manager.list()
work = manager.Queue()
size = max(1, floor(len(cbs_sets) / NUM_OF_WORKERS))
slices = [slice(i, i+size) for i in range(0, len(cbs_sets), size)]
pool = []
for i in range(NUM_OF_WORKERS):
p = Process(target=generate_semantic_association_rules, args=(kg_i,
kg_s,
cbs_sets,
work,
rules,
parameters["minimal_local_support"]))
p.daemon = True
p.start()
pool.append(p)
for slce in slices:
work.put(slce)
for p in pool:
work.put(None)
# join shared variables
for p in pool:
p.join()
# calculate support and confidence, skip those not meeting minimum requirements
final_rule_set = manager.list()
work = manager.Queue()
size = max(1, floor(len(rules) / NUM_OF_WORKERS))
slices = [slice(i, i+size) for i in range(0, len(rules), size)]
pool = []
for i in range(NUM_OF_WORKERS):
p = Process(target=evaluate_rules, args=(kg_i,
rules,
work,
final_rule_set,
parameters["minimal_support"],
parameters["minimal_confidence"]))
p.daemon = True
p.start()
pool.append(p)
for slce in slices:
work.put(slce)
for p in pool:
work.put(None)
# join shared variables
for p in pool:
p.join()
# sorting rules on both support and confidence
final_rule_set.sort(key=itemgetter(2, 1), reverse=True)
# time took
t1 = timer()
dt = t1 - t0
self.logger.info("Program completed in {:.3f} ms".format(dt))
print(" Program completed in {:.3f} ms".format(dt))
self.logger.info("Found {} rules".format(len(final_rule_set)))
print(" Found {} rules".format(len(final_rule_set)))
return final_rule_set
def write_to_file(self, path="./of/latest", output=[]):
overwrite = False
compress = True
print(" Writing output to {}...".format(path))
rule_set.pretty_write(output, path, overwrite, compress)
pickler.write(output, path+".pickle", overwrite)
def run(self, abox, tbox, output_path):
self.print_header()
print(" {}\n".format(self.time))
parameters = {}
parameters["similarity_threshold"] = .9
parameters["max_cbs_size"] = 2
parameters["minimal_local_support"] = 0.7
parameters["minimal_support"] = 0.0
parameters["minimal_confidence"] = 0.5
print(" Importing Data Sets...")
dataset = self.load_dataset(abox, tbox)
print(" Initiated Pattern Learning...")
output = self.run_program(dataset, parameters)
if len(output) > 0:
self.write_to_file(output_path, output)
def diagonal_matrix_slicer(self, items=[]):
slices = []
n = len(items)
total_work_load = sum(range(n))
avg_work_load = total_work_load / NUM_OF_WORKERS
work_load_start = n
work_load_end = work_load_start
while len(slices) < NUM_OF_WORKERS:
work_load = 0
while work_load < avg_work_load and work_load_start > 0:
work_load_start -= 1
work_load = sum(range(work_load_end, work_load_start, -1))
slices.append(range(n-work_load_end, n-work_load_start))
work_load_end = work_load_start
return slices
|
gpl-3.0
| 6,560,480,861,544,011,000 | 35.18595 | 111 | 0.518328 | false |
robotichead/NearBeach
|
tests/settings.py
|
1
|
3530
|
"""
Django Settings for TESTING PURPOSES
Do not utilise this settings.py file for your own project. Even if it is not
a production environment.
This file is only for the automatic testing and is not build for server use.
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'jz0k8%ecl#k!z+(9+5(^do1w!11ysus21m41m@i9c#u)*vk($o'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'NearBeach.apps.NearBeachConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'NearBeach.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
#WSGI_APPLICATION = 'untitled.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
# }
# }
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'test_db',
'USER': 'root',
'PASSWORD': 'rootpw',
'HOST': '127.0.0.1',
'PORT': '3306',
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
PROJECT_PATH = os.path.abspath(os.path.dirname(__file__))
PRIVATE_MEDIA_ROOT = os.path.join(PROJECT_PATH, 'private')
PRIVATE_MEDIA_SERVER = 'DefaultServer'
PRIVATE_MEDIA_URL = '/private/'
STATIC_URL = '/static/'
STATIC_ROOT= os.path.join(BASE_DIR,'static/')
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR,'media/')
|
mit
| 4,166,642,644,931,561,000 | 24.955882 | 91 | 0.665722 | false |
CleverChuk/ices
|
Python/multijob_module.py
|
1
|
3479
|
"""
Author: Chukwubuikem Ume-Ugwa
Email: chubiyke@gmail.com
MIT License
Copyright (c) 2017 CleverChuk
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from dataparser import *
from multiprocessing import Pool, Manager
import os
import time
manager = Manager()
heightD = manager.dict() # holds values for minimum height of each particle
TSIZE = 8 # data type size in bytes
N_OFFSETS = 44 # number of data
FCOLOR = genColor(N_OFFSETS, manager)
# Dimension of the simulation bed
xsize = 78
ysize = 112
zsize = 104
hOut = "HeightData"
def startChild(fname):
# DISALLOWED IN PYTHON
iam.fn = fname
dictn = iam.manager.dict()
mylist = iam.manager.list()
pool = Pool()
# passing offset multiplier to the producer task
pool.map(iam.producer, [i for i in range(1 , iam.N_OFFSETS)], 1)
# Feeds task from producers into the list
for i, j in self.dictn.items():
mylist.append(j[0])
# single process to handle plotting
proc = Process(target=iam.consumer, args=(mylist, ))
proc.start()
proc.join()
def multijob(fname):
"""
Handles reading and plotting of data in file with name fname
"""
print("Starting multijob from process: %d" % os.getpid())
fig = plt.figure()
axis = Axes3D(fig)
heightL = manager.list()
axis = Axes3D(fig)
axis.set_xlim([0,ysize])
axis.set_ylim([0,ysize])
axis.set_zlim([0,ysize])
axis.view_init(elev = 40, azim = 50)
coords = manager.list()
rho = readsingle(fname)
for i in range(1, N_OFFSETS):
eta_s = readsingle(fname, i * TSIZE)
# eta_s = process(rho, filter_eta(eta_s))
coords.append(getcoords(eta_s, xsize, ysize, zsize))
heightL.append(max(coords[-1][-2]) - min(coords[-1][-2]))
writtable(hOut,str(heightL).strip('[]'))
plot(coords, fig, axis, count = "ALL", fcolor = FCOLOR, multijob = (True,fname))
print("Finished multijob from process: %d" % os.getpid())
if __name__ == "__main__":
print("Starting mutiple jobs in a process task")
import timeit, sys
start_time = timeit.default_timer()
if(os.path.exists(hOut)):
os.remove(hOut)
pool = Pool()
files = list()
MAXCOUNT = 4
STEP = 2
START = 0
FNAME = "fullT{0}.dat"
## file with filesname to work on
for i in range(START, MAXCOUNT, STEP):
files.append(FNAME.format(i))
pool.map(multijob, files, 1)
elapsed = timeit.default_timer() - start_time
print("total time %d seconds" % elapsed)
print("Finished multiple job in a process task")
|
mit
| -8,221,127,842,817,743,000 | 22.666667 | 83 | 0.716873 | false |
rudisherry666/paigow
|
mainsite/settings.py
|
1
|
6720
|
# Django settings for paigow project.
import os
import sys
# The project starts at mainsite/ rather than top-level at the application,
# but we use a lot of things from the paigow/ folder. Create a global for
# the paigow folder as well.
PROJECT_PATH = os.path.realpath(os.path.dirname(__file__)) # mainsite/
(PAIGOW_APP_PATH, DUMMY) = os.path.split(os.path.dirname(__file__))
PAIGOW_PATH = PAIGOW_APP_PATH + "/paigow"
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Rudi Sherry', 'rudisherry666@gmail.com'),
)
MANAGERS = ADMINS
# set up the database. For local development it's easiest to
# use sqlite3, so we do that, and we depend on the developer having
# set up an environment variable on their machine called "LOCAL_DEV"
# which is set to 'true'.
try:
if (bool(os.environ.get('LOCAL_DEV', False))):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': PROJECT_PATH + '/database/paigow.sqlite',
'USER': '', # not needed since we're local, default is always trusted
'PASSWORD': '',
'HOST': '',
}
}
else:
# In heroku (where we deploy on the web), we use postgres; existence
# of the postgres database is set up by previous commands to heroku
# (TBD: make that some sort of automatic script), and the connection
# to it from python is set up by the file 'requirements.txt' which
# includes psycopg2 (the python extension for postgres).
#
# dj_database_url is an egg that uses DATABASE_URL to find the
# correct database, and that is also set up by previous heroku
# commands.
import dj_database_url
DATABASES = {
'default': dj_database_url.config(default='postgres://localhost')
}
except:
# uh-oh, something went wrong but we don't know what.
print "Unexpected error creating DATABASES:", sys.exc_info()
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Los_Angeles'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
PROJECT_PATH + "/static/",
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 's1upu83yei)f#39&1473$atc63=80*q==jv*c%n#f03crfm68r'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
# this allows the messages app/framework to get messages into pages
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.request',
'django.contrib.messages.context_processors.messages',
)
ROOT_URLCONF = 'mainsite.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'paigow.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
PAIGOW_PATH + '/templates/',
PROJECT_PATH + '/templates/',
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'paigow',
)
# For testing we get fixtures from here
FIXTURE_DIRS = (
PAIGOW_PATH + '/fixtures/',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
apache-2.0
| 5,428,095,244,374,039,000 | 32.768844 | 86 | 0.70372 | false |
puiterwijk/HttpCA
|
Signer/httpca_signer/database.py
|
1
|
1817
|
# Copyright (c) 2013, Patrick Uiterwijk <puiterwijk@gmail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Patrick Uiterwijk nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Patrick Uiterwijk BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from configuration import config
engine = create_engine(config.get('database', 'URI'), echo=bool(config.get('database', 'echo')))
session = sessionmaker(bind=engine)
|
bsd-3-clause
| -9,116,164,682,939,716,000 | 55.78125 | 96 | 0.773803 | false |
mola/qgis
|
python/plugins/GdalTools/tools/widgetPluginBase.py
|
1
|
5107
|
# -*- coding: utf-8 -*-
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
from qgis.gui import *
from dialogBase import GdalToolsBaseDialog as BaseDialog
import GdalTools_utils as Utils
class GdalToolsBasePluginWidget:
def __init__(self, iface, commandName, helpFileBaseName = None, parent = None):
self.iface = iface
self.base = BaseDialog(parent, iface, self, self.windowTitle(), commandName)
self.connect(self.base, SIGNAL("processError(QProcess::ProcessError)"), self.onError)
self.connect(self.base, SIGNAL("processFinished(int, QProcess::ExitStatus)"), self.onFinished)
self.connect(self.base, SIGNAL("okClicked()"), self.onRun)
self.connect(self.base, SIGNAL("closeClicked()"), self.onClosing)
self.connect(self.base, SIGNAL("helpClicked()"), self.onHelp)
self.connect(self.base, SIGNAL("finished(bool)"), self.finished)
def someValueChanged(self):
self.emit(SIGNAL("valuesChanged(const QStringList &)"), self.getArguments())
def exec_(self):
self.someValueChanged()
return self.base.exec_()
def show_(self):
self.someValueChanged()
return self.base.show()
def setCommandViewerEnabled(self, enable):
self.base.setCommandViewerEnabled(enable)
self.someValueChanged()
def onRun(self):
self.base.onRun()
def onClosing(self):
self.base.onClosing()
def onHelp(self):
self.base.onHelp()
def onFinished(self, exitCode, status):
self.base.onFinished(exitCode, status)
def onError(self, error):
self.base.onError(error)
def getArguments(self):
pass
def getInputFileName(self):
pass
def getOutputFileName(self):
pass
def addLayerIntoCanvas(self, fileInfo):
pass
def finished(self, load):
outFn = self.getOutputFileName()
if outFn == None:
return
outFn = QString(outFn)
if outFn.isEmpty():
QMessageBox.warning(self, self.tr( "Warning" ), self.tr( "No output file created." ) )
return
fileInfo = QFileInfo(outFn)
if fileInfo.exists():
if load:
self.addLayerIntoCanvas(fileInfo)
QMessageBox.information(self, self.tr( "Finished" ), self.tr( "Processing completed." ) )
else:
QMessageBox.warning(self, self.tr( "Warning" ), self.tr( "%1 not created." ).arg( outFn ) )
# This method is useful to set up options for the command. It sets for each passed widget:
# 1. its passed signals to connect to the BasePluginWidget.someValueChanged() slot,
# 2. its enabler checkbox or enabled status,
# 3. its status as visible (hide) if the installed gdal version is greater or equal (lesser) then the passed version
#
# wdgts_sgnls_chk_ver_list: list of wdgts_sgnls_chk_ver
# wdgts_sgnls_chk_ver: tuple containing widgets, signals, enabler checkbox or enabled status, required version
def setParamsStatus(self, wdgts_sgnls_chk_ver_list):
if isinstance(wdgts_sgnls_chk_ver_list, list):
for wdgts_sgnls_chk_ver in wdgts_sgnls_chk_ver_list:
self.setParamsStatus(wdgts_sgnls_chk_ver)
return
wdgts_sgnls_chk_ver = wdgts_sgnls_chk_ver_list
if not isinstance(wdgts_sgnls_chk_ver, tuple):
return
if len(wdgts_sgnls_chk_ver) > 0:
wdgts = wdgts_sgnls_chk_ver[0]
else:
wdgts = None
if len(wdgts_sgnls_chk_ver) > 1:
sgnls = wdgts_sgnls_chk_ver[1]
else:
sgnls = None
if len(wdgts_sgnls_chk_ver) > 2:
chk = wdgts_sgnls_chk_ver[2]
else:
chk = None
if len(wdgts_sgnls_chk_ver) > 3:
ver = wdgts_sgnls_chk_ver[3]
else:
ver = None
if isinstance(wdgts, list):
for wdgt in wdgts:
self.setParamsStatus((wdgt, sgnls, chk, ver))
return
wdgt = wdgts
if not isinstance(wdgt, QWidget):
return
# if check version fails, disable the widget then hide both it and its enabler checkbox
if ver != None:
if not isinstance(ver, Utils.Version):
ver = Utils.Version(ver)
gdalVer = Utils.GdalConfig.version()
if gdalVer != None and ver > gdalVer:
wdgt.setVisible(False)
if isinstance(chk, QWidget):
chk.setVisible(False)
chk.setChecked(False)
sgnls = None
chk = False
# connects the passed signals to the BasePluginWidget.someValueChanged slot
if isinstance(sgnls, list):
for sgnl in sgnls:
self.setParamsStatus((wdgt, sgnl, chk))
return
sgnl = sgnls
if sgnl != None:
self.connect(wdgt, sgnl, self.someValueChanged)
# set the passed checkbox as widget enabler
if isinstance(chk, bool):
wdgt.setEnabled(chk)
if ( isinstance(chk, QAbstractButton) or isinstance(chk, QGroupBox) ) and \
chk.isCheckable():
wdgt.setEnabled(chk.isChecked())
self.connect(chk, SIGNAL("toggled(bool)"), wdgt.setEnabled)
self.connect(chk, SIGNAL("toggled(bool)"), self.someValueChanged)
|
gpl-2.0
| 3,448,043,704,059,595,300 | 30.91875 | 118 | 0.644801 | false |
ImpregnableProgrammer/Advent-of-Code
|
2016/Day_10.py
|
1
|
2375
|
import re
# First Part
def First_Part(s):
Bot_Dict = {}
g=0
s=s.split('\n')
while 1:
p=re.sub('(?<=output )\d+',lambda k:str(-int(k.group(0))-1),s[g%len(s)])
G=re.findall('-?\d+',p)
if p[:3]=='bot' and G[0] in Bot_Dict.keys() and len(Bot_Dict[G[0]])>1:
if sorted(Bot_Dict[G[0]],key=int)==['17','61']:
print(G[0])
break
s.pop(g%len(s))
if G[1] not in Bot_Dict.keys():
Bot_Dict[G[1]]=[]
if G[2] not in Bot_Dict.keys():
Bot_Dict[G[2]]=[]
X=len(Bot_Dict[G[1]])
Y=len(Bot_Dict[G[2]])
Bot_Dict[G[1]]+=(G[1][0]=='-' or (G[1][0]!='-' and X<2)) and [min(Bot_Dict[G[0]],key=int)] or []
Bot_Dict[G[2]]+=(G[2][0]=='-' or (G[2][0]!='-' and Y<2)) and [max(Bot_Dict[G[0]],key=int)] or []
Bot_Dict[G[0]]=(G[1][0]!='-' and X>1) and [min(Bot_Dict[G[0]],key=int)] or (G[2][0]!='-' and Y>1) and [max(Bot_Dict[G[0]],key=int)] or []
elif p[:5]=='value':
s.pop(g%len(s))
if G[1] not in Bot_Dict.keys():
Bot_Dict[G[1]]=[]
Bot_Dict[G[1]]+=len(Bot_Dict[G[1]])<2 and [G[0]] or []
g+=1
# Second Part
def Second_Part(s):
Bot_Dict = {}
g=0
s=s.split('\n')
while 1:
p=re.sub('(?<=output )\d+',lambda k:str(-int(k.group(0))-1),s[g%len(s)])
G=re.findall('-?\d+',p)
if p[:3]=='bot' and G[0] in Bot_Dict.keys() and len(Bot_Dict[G[0]])>1:
s.pop(g%len(s))
if G[1] not in Bot_Dict.keys():
Bot_Dict[G[1]]=[]
if G[2] not in Bot_Dict.keys():
Bot_Dict[G[2]]=[]
X=len(Bot_Dict[G[1]])
Y=len(Bot_Dict[G[2]])
Bot_Dict[G[1]]+=(G[1][0]=='-' or (G[1][0]!='-' and X<2)) and [min(Bot_Dict[G[0]],key=int)] or []
Bot_Dict[G[2]]+=(G[2][0]=='-' or (G[2][0]!='-' and Y<2)) and [max(Bot_Dict[G[0]],key=int)] or []
Bot_Dict[G[0]]=(G[1][0]!='-' and X>1) and [min(Bot_Dict[G[0]],key=int)] or (G[2][0]!='-' and Y>1) and [max(Bot_Dict[G[0]],key=int)] or []
elif p[:5]=='value':
s.pop(g%len(s))
if G[1] not in Bot_Dict.keys():
Bot_Dict[G[1]]=[]
Bot_Dict[G[1]]+=len(Bot_Dict[G[1]])<2 and [G[0]] or []
g+=1
if len(s)<1:
j=1
for o in Bot_Dict.keys():
if 0>int(o)>-4:
j*=int(Bot_Dict[o][0])
print(j)
break
|
gpl-3.0
| -5,439,650,238,929,213,000 | 35.538462 | 145 | 0.442526 | false |
clld/clldfabric
|
clldfabric/varnish.py
|
1
|
2713
|
"""
deploy with varnish:
- apt-get install varnish
- create /etc/default/varnish
- create /etc/varnish/main.vcl
- create /etc/varnish/sites.vcl
- create /etc/varnish/sites/
(and require it to contain the correct include!)
- create /etc/varnish/sites/{app.name}.vcl
- /etc/init.d/varnish restart
- adapt nginx site config
- /etc/init.d/nginx reload
"""
from fabric.contrib.files import append, exists
from fabtools import require
from fabtools import service
from clldfabric.util import (
create_file_as_root, upload_template_as_root, get_template_variables, http_auth,
)
from clldfabric.config import App
DEFAULT = """
START=yes
NFILES=131072
MEMLOCK=82000
# Default varnish instance name is the local nodename. Can be overridden with
# the -n switch, to have more instances on a single server.
# INSTANCE=$(uname -n)
DAEMON_OPTS="-a :6081 \
-T localhost:6082 \
-t 3600 \
-f /etc/varnish/main.vcl \
-S /etc/varnish/secret \
-s file,/var/lib/varnish/$INSTANCE/varnish_storage.bin,10G"
"""
MAIN_VCL = """
sub vcl_recv {
set req.http.Host = regsub(req.http.Host, "^www\.", "");
set req.http.Host = regsub(req.http.Host, ":80$", "");
}
include "/etc/varnish/sites.vcl";
"""
SITE_VCL_TEMPLATE = """
backend {app.name} {{
.host = "127.0.0.1";
.port = "{app.port}";
}}
sub vcl_recv {{
if (req.http.host ~ "^{app.domain}$") {{ set req.backend = {app.name}; }}
}}
sub vcl_fetch {{
set beresp.ttl = 3600s;
return(deliver);
}}
"""
def cache(app): # pragma: no cover
"""require an app to be put behind varnish
"""
require.deb.package('varnish')
create_file_as_root('/etc/default/varnish', DEFAULT)
create_file_as_root('/etc/varnish/main.vcl', MAIN_VCL)
sites_vcl = '/etc/varnish/sites.vcl'
site_config_dir = '/etc/varnish/sites'
site_config = '/'.join(site_config_dir, '{app.name}.vcl'.format(app=app))
include = 'include "%s";' % site_config
if exists(sites_vcl):
append(sites_vcl, include, use_sudo=True)
else:
create_file_as_root(sites_vcl, include + '\n')
require.files.directory(site_config_dir, use_sudo=True)
create_file_as_root(site_config, SITE_VCL_TEMPLATE.format(app=app))
service.restart('varnish')
template_vars = get_template_variables(App(app.name, 6081, domain=app.domain))
template_vars['SITE'] = True
upload_template_as_root(app.nginx_site, 'nginx-app.conf', template_vars)
service.reload('nginx')
def uncache(app): # pragma: no cover
tv = get_template_variables(app)
tv['auth'] = http_auth(app)
create_file_as_root(app.nginx_site, SITE_TEMPLATE.format(**tv))
service.reload('nginx')
|
apache-2.0
| -8,543,023,170,142,532,000 | 27.260417 | 84 | 0.653151 | false |
udrg/kalibr
|
aslam_offline_calibration/kalibr/python/kalibr_imu_camera_calibration/IccCalibrator.py
|
4
|
9778
|
import aslam_backend as aopt
import aslam_splines as asp
import IccUtil as util
import incremental_calibration as inc
import kalibr_common as kc
import sm
import gc
import numpy as np
import multiprocessing
import sys
# make numpy print prettier
np.set_printoptions(suppress=True)
CALIBRATION_GROUP_ID = 0
HELPER_GROUP_ID = 1
def addSplineDesignVariables(problem, dvc, setActive=True, group_id=HELPER_GROUP_ID):
for i in range(0,dvc.numDesignVariables()):
dv = dvc.designVariable(i)
dv.setActive(setActive)
problem.addDesignVariable(dv, group_id)
class IccCalibrator(object):
def __init__(self):
self.ImuList = []
def initDesignVariables(self, problem, poseSpline, noTimeCalibration, noChainExtrinsics=True, \
estimateGravityLength=False, initialGravityEstimate=np.array([0.0,9.81,0.0])):
# Initialize the system pose spline (always attached to imu0)
self.poseDv = asp.BSplinePoseDesignVariable( poseSpline )
addSplineDesignVariables(problem, self.poseDv)
# Add the calibration target orientation design variable. (expressed as gravity vector in target frame)
if estimateGravityLength:
self.gravityDv = aopt.EuclideanPointDv( initialGravityEstimate )
else:
self.gravityDv = aopt.EuclideanDirection( initialGravityEstimate )
self.gravityExpression = self.gravityDv.toExpression()
self.gravityDv.setActive( True )
problem.addDesignVariable(self.gravityDv, HELPER_GROUP_ID)
#Add all DVs for all IMUs
for imu in self.ImuList:
imu.addDesignVariables( problem )
#Add all DVs for the camera chain
self.CameraChain.addDesignVariables( problem, noTimeCalibration, noChainExtrinsics )
def addPoseMotionTerms(self, problem, tv, rv):
wt = 1.0/tv;
wr = 1.0/rv
W = np.diag([wt,wt,wt,wr,wr,wr])
asp.addMotionErrorTerms(problem, self.poseDv, W, errorOrder)
#add camera to sensor list (create list if necessary)
def registerCamChain(self, sensor):
self.CameraChain = sensor
def registerImu(self, sensor):
self.ImuList.append( sensor )
def buildProblem( self,
splineOrder=6,
poseKnotsPerSecond=70,
biasKnotsPerSecond=70,
doPoseMotionError=False,
mrTranslationVariance=1e6,
mrRotationVariance=1e5,
doBiasMotionError=True,
blakeZisserCam=-1,
huberAccel=-1,
huberGyro=-1,
noTimeCalibration=False,
noChainExtrinsics=True,
maxIterations=20,
gyroNoiseScale=1.0,
accelNoiseScale=1.0,
timeOffsetPadding=0.02,
verbose=False ):
print "\tSpline order: %d" % (splineOrder)
print "\tPose knots per second: %d" % (poseKnotsPerSecond)
print "\tDo pose motion regularization: %s" % (doPoseMotionError)
print "\t\txddot translation variance: %f" % (mrTranslationVariance)
print "\t\txddot rotation variance: %f" % (mrRotationVariance)
print "\tBias knots per second: %d" % (biasKnotsPerSecond)
print "\tDo bias motion regularization: %s" % (doBiasMotionError)
print "\tBlake-Zisserman on reprojection errors %s" % blakeZisserCam
print "\tAcceleration Huber width (sigma): %f" % (huberAccel)
print "\tGyroscope Huber width (sigma): %f" % (huberGyro)
print "\tDo time calibration: %s" % (not noTimeCalibration)
print "\tMax iterations: %d" % (maxIterations)
print "\tTime offset padding: %f" % (timeOffsetPadding)
############################################
## initialize camera chain
############################################
#estimate the timeshift for all cameras to the main imu
self.noTimeCalibration = noTimeCalibration
if not noTimeCalibration:
for cam in self.CameraChain.camList:
cam.findTimeshiftCameraImuPrior(self.ImuList[0], verbose)
#obtain orientation prior between main imu and camera chain (if no external input provided)
#and initial estimate for the direction of gravity
self.CameraChain.findOrientationPriorCameraChainToImu(self.ImuList[0])
estimatedGravity = self.CameraChain.getEstimatedGravity()
############################################
## init optimization problem
############################################
#initialize a pose spline using the camera poses in the camera chain
poseSpline = self.CameraChain.initializePoseSplineFromCameraChain(splineOrder, poseKnotsPerSecond, timeOffsetPadding)
# Initialize bias splines for all IMUs
for imu in self.ImuList:
imu.initBiasSplines(poseSpline, splineOrder, biasKnotsPerSecond)
# Now I can build the problem
problem = inc.CalibrationOptimizationProblem()
# Initialize all design variables.
self.initDesignVariables(problem, poseSpline, noTimeCalibration, noChainExtrinsics, initialGravityEstimate = estimatedGravity)
############################################
## add error terms
############################################
#Add calibration target reprojection error terms for all camera in chain
self.CameraChain.addCameraChainErrorTerms(problem, self.poseDv, blakeZissermanDf=blakeZisserCam, timeOffsetPadding=timeOffsetPadding)
# Initialize IMU error terms.
for imu in self.ImuList:
imu.addAccelerometerErrorTerms(problem, self.poseDv, self.gravityExpression, mSigma=huberAccel, accelNoiseScale=accelNoiseScale)
imu.addGyroscopeErrorTerms(problem, self.poseDv, mSigma=huberGyro, gyroNoiseScale=gyroNoiseScale, g_w=self.gravityExpression)
# Add the bias motion terms.
if doBiasMotionError:
imu.addBiasMotionTerms(problem)
# Add the pose motion terms.
if doPoseMotionError:
self.addPoseMotionTerms(problem, mrTranslationVariance, mrRotationVariance)
# Add a gravity prior
self.problem = problem
def optimize(self, options=None, maxIterations=30, recoverCov=False):
if options is None:
options = aopt.Optimizer2Options()
options.verbose = True
options.doLevenbergMarquardt = True
options.levenbergMarquardtLambdaInit = 10.0
options.nThreads = max(1,multiprocessing.cpu_count()-1)
options.convergenceDeltaX = 1e-5
options.convergenceDeltaJ = 1e-2
options.maxIterations = maxIterations
options.trustRegionPolicy = aopt.LevenbergMarquardtTrustRegionPolicy(options.levenbergMarquardtLambdaInit)
options.linearSolver = aopt.BlockCholeskyLinearSystemSolver()
#run the optimization
self.optimizer = aopt.Optimizer2(options)
self.optimizer.setProblem(self.problem)
optimizationFailed=False
try:
retval = self.optimizer.optimize()
if retval.linearSolverFailure:
optimizationFailed = True
except:
optimizationFailed = True
if optimizationFailed:
sm.logError("Optimization failed!")
raise RuntimeError("Optimization failed!")
#free some memory
del self.optimizer
gc.collect()
if recoverCov:
self.recoverCovariance()
def recoverCovariance(self):
#Covariance ordering (=dv ordering)
#ORDERING: N=num cams
# 1. transformation imu-cam0 --> 6
# 2. camera time2imu --> 1*numCams (only if enabled)
print "Recovering covariance..."
estimator = inc.IncrementalEstimator(CALIBRATION_GROUP_ID)
rval = estimator.addBatch(self.problem, True)
est_stds = np.sqrt(estimator.getSigma2Theta().diagonal())
#split and store the variance
self.std_trafo_ic = np.array(est_stds[0:6])
self.std_times = np.array(est_stds[6:])
def saveImuSetParametersYaml(self, resultFile):
imuSetConfig = kc.ImuSetParameters(resultFile, True)
for imu in self.ImuList:
imuConfig = imu.getImuConfig()
imuSetConfig.addImuParameters(imu_parameters=imuConfig)
imuSetConfig.writeYaml(resultFile)
def saveCamChainParametersYaml(self, resultFile):
chain = self.CameraChain.chainConfig
nCams = len(self.CameraChain.camList)
# Calibration results
for camNr in range(0,nCams):
#cam-cam baselines
if camNr > 0:
T_cB_cA, baseline = self.CameraChain.getResultBaseline(camNr-1, camNr)
chain.setExtrinsicsLastCamToHere(camNr, T_cB_cA)
#imu-cam trafos
T_ci = self.CameraChain.getResultTrafoImuToCam(camNr)
chain.setExtrinsicsImuToCam(camNr, T_ci)
if not self.noTimeCalibration:
#imu to cam timeshift
timeshift = float(self.CameraChain.getResultTimeShift(camNr))
chain.setTimeshiftCamImu(camNr, timeshift)
try:
chain.writeYaml(resultFile)
except:
print "ERROR: Could not write parameters to file: {0}\n".format(resultFile)
|
bsd-3-clause
| 5,931,766,575,764,862,000 | 40.786325 | 141 | 0.614645 | false |
bruckhaus/challenges
|
python_challenges/project_euler/p003_largest_prime_factor.py
|
1
|
1769
|
__author__ = 'tilmannbruckhaus'
# Largest prime factor
# Problem 3
# The prime factors of 13195 are 5, 7, 13 and 29.
# What is the largest prime factor of the number 600851475143 ?
class LargestPrimeFactor:
def __init__(self):
pass
@staticmethod
def is_divisible(number, divisor):
return number % divisor == 0
@staticmethod
def is_prime(number):
result = True
candidate = 2
while candidate <= number ** 0.5:
if LargestPrimeFactor.is_divisible(number, candidate):
result = False
candidate += 1
return result
@staticmethod
def find(n):
if __name__ == '__main__':
print "\nFinding largest prime factor of", n
L = LargestPrimeFactor
result = 1
candidate = 2
while candidate <= n ** 0.5:
if L.is_divisible(n, candidate) and L.is_prime(candidate):
factor = n / candidate
if L.is_prime(factor):
return factor
else:
result = candidate
n = factor
if __name__ == '__main__':
print "found prime factor: ", result
else:
candidate += 1
return result
if __name__ == '__main__':
print LargestPrimeFactor.find(2)
print LargestPrimeFactor.find(3)
print LargestPrimeFactor.find(4)
print LargestPrimeFactor.find(7)
print LargestPrimeFactor.find(15)
print LargestPrimeFactor.find(44)
print LargestPrimeFactor.find(99)
print LargestPrimeFactor.find(111)
print LargestPrimeFactor.find(1577)
print LargestPrimeFactor.find(19 * 37 * 83)
print LargestPrimeFactor.find(600851475143)
|
mit
| 2,894,715,118,117,097,500 | 28.483333 | 70 | 0.573205 | false |
mfriesen/tentacle
|
src/tentacle/dht/dht_bucket_routing_table.py
|
1
|
4577
|
from tentacle.dht.routing_table import DHTRoutingTable, distance
from math import pow
MAX_BUCKET_SIZE = 8
class DHTBucket(object):
def __init__(self):
self._nodes = dict()
def add_node(self, dhtNode):
self._nodes[dhtNode._id] = dhtNode
def is_bucket_full(self):
return len(self._nodes) >= MAX_BUCKET_SIZE
def is_empty(self):
return len(self._nodes) == 0
def values(self):
return self._nodes.values()
def truncate(self, compare_node_id):
if len(self._nodes) > MAX_BUCKET_SIZE:
distance_map = dict()
for s in self._nodes:
distance_map[distance(compare_node_id, s)] = s
l = sorted(distance_map)
for i in range(0, len(self._nodes) - MAX_BUCKET_SIZE):
del self._nodes[distance_map[l[i]]]
class DHTBucketNode(object):
def __init__(self, min_, max_):
self._bucket = DHTBucket()
self._min = int(min_)
self._max = int(max_)
self._left = None
self._right = None
def add_node(self, dhtNode):
self._bucket.add_node(dhtNode)
def is_bucket_full(self):
return self._bucket.is_bucket_full()
def is_node_id_within_bucket(self, node_id):
return (self._min < node_id) and (node_id <= self._max)
class DHTBucketRoutingTable(DHTRoutingTable):
_root = None
def __init__(self, id_):
self._root = DHTBucketNode(min_ = 0, max_ = pow(2, 160))
self._id = id_
def __create_node__(self, min_, max_):
bucketNode = DHTBucketNode(min_ = min_, max_ = max_)
return bucketNode
def add_node(self, dhtNode):
bucketNode = self.__find_bucket__(self._root, dhtNode)
bucketNode.add_node(dhtNode)
if bucketNode.is_bucket_full():
self.__split_bucket__(bucketNode)
def __find_bucket__(self, bucketNode, dhtNode):
if bucketNode is not None and bucketNode.is_node_id_within_bucket(dhtNode._id):
if bucketNode._left is not None and bucketNode._left.is_node_id_within_bucket(dhtNode._id):
bucketNode = self.__find_bucket__(bucketNode._left, dhtNode)
if bucketNode._right is not None and bucketNode._right.is_node_id_within_bucket(dhtNode._id):
bucketNode = self.__find_bucket__(bucketNode._right, dhtNode)
return bucketNode
def __split_bucket__(self, bucketNode):
if bucketNode.is_bucket_full():
half = (bucketNode._max - bucketNode._min) / 2
left_node = self.__create_node__(bucketNode._min, bucketNode._min + half)
right_node = self.__create_node__(bucketNode._min + half + 1, bucketNode._max)
for node_id in bucketNode._bucket._nodes:
dhtNode = bucketNode._bucket._nodes[node_id]
if right_node.is_node_id_within_bucket(dhtNode._id):
right_node.add_node(dhtNode)
elif left_node.is_node_id_within_bucket(dhtNode._id):
left_node.add_node(dhtNode)
if not left_node._bucket.is_empty() and not right_node._bucket.is_empty() and bucketNode.is_node_id_within_bucket(self._id):
bucketNode._bucket = None
bucketNode._left = left_node
bucketNode._right = right_node
self.__split_bucket__(left_node)
self.__split_bucket__(right_node)
else: # only keep the closest nodes
bucketNode._bucket.truncate(self._id)
def find_closest_nodes(self, id_):
bucket = DHTBucket()
self.__find_closest_nodes__(self._root, bucket, id_)
return bucket
def __find_closest_nodes__(self, bucketNode, bucket, id_):
if bucketNode is not None and bucketNode.is_node_id_within_bucket(id_):
self.__find_closest_nodes__(bucketNode._left, bucket, id_)
self.__find_closest_nodes__(bucketNode._right, bucket, id_)
if bucketNode._bucket is not None and not bucket.is_bucket_full():
for node_id in bucketNode._bucket._nodes:
dhtNode = bucketNode._bucket._nodes[node_id]
bucket.add_node(dhtNode)
bucket.truncate(id_)
|
apache-2.0
| 9,090,170,006,814,538,000 | 33.938931 | 136 | 0.544243 | false |
ylatuya/Flumotion
|
flumotion/test/test_component_providers.py
|
1
|
11789
|
# vi:si:et:sw=4:sts=4:ts=4
#
# Flumotion - a streaming media server
# Copyright (C) 2004,2005,2006,2007 Fluendo, S.L. (www.fluendo.com).
# All rights reserved.
# This file may be distributed and/or modified under the terms of
# the GNU General Public License version 2 as published by
# the Free Software Foundation.
# This file is distributed without any warranty; without even the implied
# warranty of merchantability or fitness for a particular purpose.
# See "LICENSE.GPL" in the source distribution for more information.
# Licensees having purchased or holding a valid Flumotion Advanced
# Streaming Server license may use this file in accordance with the
# Flumotion Advanced Streaming Server Commercial License Agreement.
# See "LICENSE.Flumotion" in the source distribution for more information.
# Headers in this file shall remain intact.
import os
import shutil
import tempfile
from twisted.internet import defer, reactor
from twisted.trial import unittest
import twisted.copyright
if twisted.copyright.version == "SVN-Trunk":
SKIP_MSG = "Twisted 2.0.1 thread pool is broken for tests"
else:
SKIP_MSG = None
from flumotion.common import testsuite
from flumotion.component.misc.httpserver import localpath
from flumotion.component.misc.httpserver import localprovider
from flumotion.component.misc.httpserver import cachedprovider
from flumotion.component.misc.httpserver.fileprovider \
import InsecureError, NotFoundError, CannotOpenError
attr = testsuite.attr
class LocalPath(testsuite.TestCase):
def setUp(self):
self.path = tempfile.mkdtemp(suffix=".flumotion.test")
a = os.path.join(self.path, 'a')
open(a, "w").write('test file a')
B = os.path.join(self.path, 'B')
os.mkdir(B)
c = os.path.join(self.path, 'B', 'c')
open(c, "w").write('test file c')
def tearDown(self):
shutil.rmtree(self.path, ignore_errors=True)
def testExistingPath(self):
local = localpath.LocalPath(self.path)
self.failUnless(isinstance(local, localpath.LocalPath))
def testChildExistingFile(self):
child = localpath.LocalPath(self.path).child('a')
self.failUnless(isinstance(child, localpath.LocalPath))
def testChildExistingDir(self):
child = localpath.LocalPath(self.path).child('B')
self.failUnless(isinstance(child, localpath.LocalPath))
def testChildTraversingDir(self):
local = localpath.LocalPath(self.path)
child = local.child('B').child('c')
self.failUnless(isinstance(child, localpath.LocalPath))
def testChildNonExistingFile(self):
child = localpath.LocalPath(self.path).child('foo')
self.failUnless(isinstance(child, localpath.LocalPath))
def testChildTraversingNonExistingDir(self):
local = localpath.LocalPath(self.path)
child = local.child('foo').child('bar')
self.failUnless(isinstance(child, localpath.LocalPath))
def testChildInsecurePathTooDeep(self):
local = localpath.LocalPath(self.path)
self.assertRaises(InsecureError, local.child, 'B/c')
def testChildInsecurePathTooDeepAndNonExisting(self):
local = localpath.LocalPath(self.path)
self.assertRaises(InsecureError, local.child, 'foo/bar')
def testChildInsecurePathRoot(self):
local = localpath.LocalPath(self.path)
self.assertRaises(InsecureError, local.child, '/foo')
def testChildInsecurePathUp(self):
local = localpath.LocalPath(self.path)
self.assertRaises(InsecureError, local.child, '..')
class LocalPathCachedProvider(testsuite.TestCase):
skip = SKIP_MSG
def setUp(self):
self.path = tempfile.mkdtemp(suffix=".flumotion.test")
a = os.path.join(self.path, 'a')
open(a, "w").write('test file a')
B = os.path.join(self.path, 'B')
os.mkdir(B)
c = os.path.join(self.path, 'B', 'c')
open(c, "w").write('test file c')
plugProps = {"properties": {"path": self.path}}
self.fileProviderPlug = \
cachedprovider.FileProviderLocalCachedPlug(plugProps)
return self.fileProviderPlug.start(component=None)
def tearDown(self):
d = defer.maybeDeferred(self.fileProviderPlug.stop, component=None)
def _rmTempDir(result):
shutil.rmtree(self.path, ignore_errors=True)
d.addBoth(_rmTempDir)
return d
def testExistingPath(self):
local = self.fileProviderPlug.getRootPath()
self.failUnless(isinstance(local, cachedprovider.LocalPath))
def testChildExistingFile(self):
child = self.fileProviderPlug.getRootPath().child('a')
self.failUnless(isinstance(child, localpath.LocalPath))
def testChildExistingDir(self):
child = self.fileProviderPlug.getRootPath().child('B')
self.failUnless(isinstance(child, localpath.LocalPath))
def testChildTraversingDir(self):
local = self.fileProviderPlug.getRootPath()
child = local.child('B').child('c')
self.failUnless(isinstance(child, localpath.LocalPath))
def testChildNonExistingFile(self):
child = self.fileProviderPlug.getRootPath().child('foo')
self.failUnless(isinstance(child, localpath.LocalPath))
def testChildTraversingNonExistingDir(self):
local = self.fileProviderPlug.getRootPath()
child = local.child('foo').child('bar')
self.failUnless(isinstance(child, localpath.LocalPath))
def testChildInsecurePathTooDeep(self):
local = self.fileProviderPlug.getRootPath()
self.assertRaises(InsecureError, local.child, 'B/c')
def testChildInsecurePathTooDeepAndNonExisting(self):
local = self.fileProviderPlug.getRootPath()
self.assertRaises(InsecureError, local.child, 'foo/bar')
def testChildInsecurePathRoot(self):
local = self.fileProviderPlug.getRootPath()
self.assertRaises(InsecureError, local.child, '/foo')
def testChildInsecurePathUp(self):
local = self.fileProviderPlug.getRootPath()
self.assertRaises(InsecureError, local.child, '..')
def testOpenExisting(self):
child = self.fileProviderPlug.getRootPath().child('a')
child.open()
def testOpenTraversingExistingDir(self):
local = self.fileProviderPlug.getRootPath()
child = local.child('B').child('c')
child.open()
def testOpendir(self):
local = self.fileProviderPlug.getRootPath()
self.assertRaises(CannotOpenError, local.open)
def testOpenNonExisting(self):
local = self.fileProviderPlug.getRootPath()
child = local.child('foo')
self.assertRaises(NotFoundError, child.open)
def testOpenTraversingNonExistingDir(self):
local = self.fileProviderPlug.getRootPath()
child = local.child('foo').child('bar')
self.assertRaises(NotFoundError, child.open)
class LocalPathLocalProvider(testsuite.TestCase):
def setUp(self):
self.path = tempfile.mkdtemp(suffix=".flumotion.test")
a = os.path.join(self.path, 'a')
open(a, "w").write('test file a')
B = os.path.join(self.path, 'B')
os.mkdir(B)
c = os.path.join(self.path, 'B', 'c')
open(c, "w").write('test file c')
self.local = localprovider.LocalPath(self.path)
def tearDown(self):
shutil.rmtree(self.path, ignore_errors=True)
def testOpenExisting(self):
child = self.local.child('a')
child.open()
def testOpenTraversingExistingDir(self):
child = self.local.child('B').child('c')
child.open()
def testOpendir(self):
self.assertRaises(CannotOpenError, self.local.open)
def testOpenNonExisting(self):
child = self.local.child('foo')
self.assertRaises(NotFoundError, child.open)
def testOpenTraversingNonExistingDir(self):
child = self.local.child('foo').child('bar')
self.assertRaises(NotFoundError, child.open)
class CachedProviderFileTest(testsuite.TestCase):
skip = SKIP_MSG
def setUp(self):
from twisted.python import threadpool
reactor.threadpool = threadpool.ThreadPool(0, 10)
reactor.threadpool.start()
self.src_path = tempfile.mkdtemp(suffix=".src")
self.cache_path = tempfile.mkdtemp(suffix=".cache")
plugProps = {"properties": {"path": self.src_path,
"cache-dir": self.cache_path}}
self.fileProviderPlug = \
cachedprovider.FileProviderLocalCachedPlug(plugProps)
d = self.fileProviderPlug.start(None)
self.dataSize = 7
self.data = "foo bar"
# the old parameter assures newer files will be taken into account
# (avoid timing problems), like in testModifySrc
self.testFileName = self.createFile('a', self.data, old=True)
return d
def _tearDown(self):
shutil.rmtree(self.src_path, ignore_errors=True)
shutil.rmtree(self.cache_path, ignore_errors=True)
reactor.threadpool.stop()
reactor.threadpool = None
def tearDown(self):
d = defer.maybeDeferred(self.fileProviderPlug.stop, None)
d.addCallback(lambda _: self._tearDown())
return d
@attr('slow')
def testModifySrc(self):
newData = "bar foo"
d = self.openFile('a')
d.addCallback(self.readFile, self.dataSize)
d.addCallback(pass_through, self.cachedFile.close)
d.addCallback(pass_through, self.createFile, 'a', newData)
d.addCallback(lambda _: self.openFile('a'))
d.addCallback(self.readFile, self.dataSize)
d.addCallback(pass_through, self.cachedFile.close)
d.addCallback(self.assertEqual, newData)
return d
def testSeekend(self):
d = self.openFile('a')
d.addCallback(pass_through, self.cachedFile.seek, self.dataSize-5)
d.addCallback(self.readFile, 5)
d.addCallback(pass_through, self.cachedFile.close)
d.addCallback(self.assertEqual, self.data[-5:])
return d
@attr('slow')
def testCachedFile(self):
d = self.openFile('a')
d.addCallback(self.readFile, self.dataSize)
d.addCallback(delay, 1)
d.addCallback(pass_through, self.cachedFile.close)
d.addCallback(lambda _: self.getCachePath(self.testFileName))
d.addCallback(self.checkPathExists)
return d
def testSimpleIntegrity(self):
d = self.openFile('a')
d.addCallback(self.readFile, self.dataSize)
d.addCallback(pass_through, self.cachedFile.close)
d.addCallback(lambda data:
self.failUnlessEqual(self.data, data))
return d
def getCachePath(self, path):
return self.fileProviderPlug.cache.getCachePath(path)
def getTempPath(self, path):
return self.fileProviderPlug.getTempPath(path)
def checkPathExists(self, p):
self.failUnless(os.path.exists(p))
def createFile(self, name, data, old=False):
testFileName = os.path.join(self.src_path, name)
testFile = open(testFileName, "w")
testFile.write(data)
testFile.close()
if old:
stats = os.stat(testFileName)
os.utime(testFileName, (1, 1))
return testFileName
def openFile(self, name):
self.cachedFile = \
self.fileProviderPlug.getRootPath().child(name).open()
return defer.succeed(self.cachedFile)
def readFile(self, _, size):
return self.cachedFile.read(size)
def pass_through(result, fun, *args, **kwargs):
fun(*args, **kwargs)
return result
def delay(ret, t):
d = defer.Deferred()
reactor.callLater(t, d.callback, ret)
return d
|
gpl-2.0
| -2,112,570,596,379,799,300 | 33.270349 | 75 | 0.668929 | false |
kadaradam/ScavengeSurvive
|
misc/coordheatmap.py
|
2
|
1715
|
import timeit
import os
import copy
import io
import re
import itertools as IT
from PIL import Image, ImageDraw, ImageColor, ImageFont
import heatmap
o = re.compile(r'CreateObject\(([0-9]+),\s*([\-\+]?[0-9]*\.[0-9]+),\s*([\-\+]?[0-9]*\.[0-9]+),\s*([\-\+]?[0-9]*\.[0-9]+),\s*([\-\+]?[0-9]*\.[0-9]+),\s*([\-\+]?[0-9]*\.[0-9]+),\s*([\-\+]?[0-9]*\.[0-9]+)\);')
DOT_RADIUS = 6
class Object():
def __init__(self, x, y, z):
self.x = x
self.y = y
self.z = z
def load_obj(filename):
objs = []
with io.open(filename) as f:
for l in f:
r = o.match(l)
if r:
objs.append(Object(
float(r.group(2)), # x
float(r.group(3)), # y
float(r.group(4)))) # z
return objs
def draw_obj(im, draw, objs):
print(len(objs), "objs spawns being drawn")
x = 0.0
y = 0.0
for s in objs:
x = s.x + 3000
y = 6000 - (s.y + 3000)
draw.ellipse([x - DOT_RADIUS, y - DOT_RADIUS, x + DOT_RADIUS, y + DOT_RADIUS], outline=(255, 255, 255), fill=(0, 0, 0))
def generate_obj_heatmap(im, draw, objs):
points = []
for l in objs:
points.append([int(l.x + 3000), int(l.y + 3000)])
hm = heatmap.Heatmap(libpath="C:\\Python34\\Lib\\site-packages\\heatmap\\cHeatmap-x86.dll")
hmimg = hm.heatmap(
points,
dotsize=150,
size=(6000, 6000),
scheme='classic',
area=((0, 0), (6000, 6000)))
im.paste(hmimg, mask=hmimg)
im.save("object-heatmap.jpg")
def core():
objs = []
objs += load_obj("in")
# Initialise PIL stuff
mapimg = Image.open("gtasa-blank-1.0.jpg")
draw = ImageDraw.Draw(mapimg)
# Generate dots
draw_obj(mapimg, draw, objs)
mapimg.save("object-map.jpg")
# generate heatmap
generate_obj_heatmap(copy.copy(mapimg), draw, objs)
if __name__ == '__main__':
main()
|
gpl-3.0
| -9,063,381,471,335,428,000 | 18.055556 | 206 | 0.579592 | false |
lonnon/passphrase
|
passphrase.py
|
1
|
1042
|
#!/usr/bin/env python
# passphrase
#
# Generates a passphrase from random words (four by default, number may
# be specified as first argument on command line). By default, words are
# between 6 and 10 characters in length. Different minimum and maximum
# word lengths may be specified in the seconda and third command line
# arguments, respectively.
import random
import sys
import os
import glob
import re
try: length = int(sys.argv[1])
except IndexError: length = 4
try: minimum = int(sys.argv[2])
except IndexError: minimum = 6
try: maximum = int(sys.argv[3])
except IndexError: maximum = 10
if minimum > maximum:
maximum = minimum
dictionaries = {int(re.search(r'[0-9]+$', f).group()):f for f in glob.glob('dictionary_letters_*')}
words = list()
for i in range(minimum, maximum + 1):
with open(dictionaries[i], mode='r', encoding='utf-8') as dictionary:
for line in dictionary:
words.append(line.strip())
r = random.Random()
output = [r.choice(words) for n in range(length)]
print(' '.join(output))
print
|
mit
| -5,795,021,236,046,037,000 | 27.944444 | 99 | 0.709213 | false |
nkrim/passwordgen
|
src/worddict.py
|
1
|
4612
|
# Copyright 2017 Noah Krim
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
from shutil import copyfileobj
from tempfile import TemporaryFile, mkstemp
from .utils import *
class WordDictionary:
class LengthSetMap:
def __init__(self):
self._words = [{''}]
def __bool__(self):
return self.maxlength() > 0
def __getitem__(self, length):
return self._words[length]
def __iter__(self):
return self._words.__iter__()
def __len__(self):
return len(self._words)
def __str__(self):
return '\n'.join(','.join(sorted(word_set)) for word_set in self._words[1:])
def add(self, word, length=-1):
if length < 0:
length = len(word)
while length >= len(self._words):
self._words.append(set())
self._words[length].add(word)
def maxlength(self):
for i in reversed(range(len(self._words))):
if len(self._words[i]) > 0:
return i
return 0
def __init__(self, words_file, wordmap=None):
self.words_file = words_file
if not wordmap:
wordmap = WordDictionary.parse(self.words_file, formatted=True)
self.wordmap = wordmap
def getWordPool(self, length_lower=None, length_upper=None):
if not length_upper:
length_upper = length_lower
if length_lower != None:
pool = {w for lenset in self.wordmap[length_lower:length_upper+1] for w in lenset}
else:
pool = set()
if not pool:
pool = {w for lenset in self.wordmap[1:] for w in lenset}
if not pool:
pool = {w for w in self.wordmap[0]}
return pool
@staticmethod
def parse(file_path, formatted=False):
wordmap = WordDictionary.LengthSetMap()
if formatted:
length = 1
with open(file_path, 'r') as f:
for line in f:
for w in line.split(','):
w = w.strip()
if w:
wordmap.add(w, length)
length += 1
else:
sub_re = re.compile(r'[\-\']')
split_re = re.compile(r'[^a-zA-Z]+')
with open(file_path, 'r') as f:
for line in f:
for w in split_re.split(sub_re.sub('', line)):
if w:
wordmap.add(w)
return wordmap
@staticmethod
def backup(words_file):
# Copy old `words.txt` to `words.txt.old`
try:
with open(words_file, 'r') as f:
with open(words_file+'.old', 'w') as old:
copyfileobj(f, old)
except IOError:
printerr('No formatted words file could be found at %r, skipping backup' % words_file)
except:
printerr('Could not backup words file from %r to %r' % (words_file, words_file+'.old'))
else:
return True
return False
@staticmethod
def revert(words_file):
# Revert `words.txt.old` to `words.txt`
_, temp_file = mkstemp()
old_file = words_file+'.old'
try:
with open(old_file, 'r') as old:
with open(temp_file, 'w') as temp:
copyfileobj(old, temp)
except IOError:
printerr('No backup file found at %r' % old_file)
except:
printerr('Could not load backup file %r' % old_file)
else:
if WordDictionary.backup(words_file):
try:
with open(temp_file, 'r') as temp:
with open(words_file, 'w') as f:
copyfileobj(temp, f)
except IOError:
printerr('No words file found at %r' % words_file)
except:
printerr('Could not revert backup to %r, attempting to restore overwritten backup' % words_file)
try:
with open(temp_file, 'r') as temp:
with open(old_file, 'w') as old:
copyfileobj(temp, old)
except:
printerr('Could not restore the overwritten backup. Backup is lost.')
else:
os.remove(temp_file)
return True
os.remove(temp_file)
return False
@staticmethod
def setWordsFile(words_file, file_path, backup=True, formatted=False):
# Read input file
try:
wordmap = WordDictionary.parse(file_path, formatted)
except FileNotFoundError:
printerr('Could not find file %r' % file_path)
return None
# Backup words file
if backup:
WordDictionary.backup(words_file)
# Write new words file
try:
with open(words_file, 'w') as f:
f.write(str(wordmap))
except Exception as e:
printerr('Could not write new words file: %s' % e)
return None
# Return wordmap
return WordDictionary(words_file, wordmap)
|
apache-2.0
| 5,221,395,958,406,475,000 | 26.458333 | 101 | 0.656982 | false |
wavefrontHQ/python-client
|
wavefront_api_client/api/access_policy_api.py
|
1
|
11678
|
# coding: utf-8
"""
Wavefront REST API
<p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer <<API-TOKEN>>\" to your HTTP requests.</p> # noqa: E501
OpenAPI spec version: v2
Contact: chitimba@wavefront.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from wavefront_api_client.api_client import ApiClient
class AccessPolicyApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_access_policy(self, **kwargs): # noqa: E501
"""Get the access policy # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_access_policy(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: ResponseContainerAccessPolicy
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_access_policy_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_access_policy_with_http_info(**kwargs) # noqa: E501
return data
def get_access_policy_with_http_info(self, **kwargs): # noqa: E501
"""Get the access policy # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_access_policy_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: ResponseContainerAccessPolicy
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_access_policy" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/accesspolicy', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerAccessPolicy', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_access_policy(self, **kwargs): # noqa: E501
"""Update the access policy # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_access_policy(async_req=True)
>>> result = thread.get()
:param async_req bool
:param AccessPolicy body: Example Body: <pre>{ \"policyRules\": [{ \"name\": \"rule name\", \"description\": \"desc\", \"action\": \"ALLOW\", \"subnet\": \"12.148.72.0/23\" }] }</pre>
:return: ResponseContainerAccessPolicy
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_access_policy_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.update_access_policy_with_http_info(**kwargs) # noqa: E501
return data
def update_access_policy_with_http_info(self, **kwargs): # noqa: E501
"""Update the access policy # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_access_policy_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param AccessPolicy body: Example Body: <pre>{ \"policyRules\": [{ \"name\": \"rule name\", \"description\": \"desc\", \"action\": \"ALLOW\", \"subnet\": \"12.148.72.0/23\" }] }</pre>
:return: ResponseContainerAccessPolicy
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_access_policy" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/accesspolicy', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerAccessPolicy', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def validate_url(self, **kwargs): # noqa: E501
"""Validate a given url and ip address # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.validate_url(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ip:
:return: ResponseContainerAccessPolicyAction
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.validate_url_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.validate_url_with_http_info(**kwargs) # noqa: E501
return data
def validate_url_with_http_info(self, **kwargs): # noqa: E501
"""Validate a given url and ip address # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.validate_url_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ip:
:return: ResponseContainerAccessPolicyAction
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ip'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method validate_url" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'ip' in params:
query_params.append(('ip', params['ip'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/accesspolicy/validate', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerAccessPolicyAction', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
apache-2.0
| -3,054,239,166,946,904,600 | 36.073016 | 409 | 0.577582 | false |
back-to/streamlink
|
src/streamlink/plugins/crunchyroll.py
|
1
|
13696
|
import argparse
import datetime
import re
import logging
from uuid import uuid4
from streamlink.plugin import Plugin, PluginError, PluginArguments, PluginArgument
from streamlink.plugin.api import validate
from streamlink.stream import HLSStream
log = logging.getLogger(__name__)
STREAM_WEIGHTS = {
"low": 240,
"mid": 420,
"high": 720,
"ultra": 1080,
}
STREAM_NAMES = {
"120k": "low",
"328k": "mid",
"864k": "high"
}
def parse_timestamp(ts):
"""Takes ISO 8601 format(string) and converts into a utc datetime(naive)"""
return (
datetime.datetime.strptime(ts[:-7], "%Y-%m-%dT%H:%M:%S") +
datetime.timedelta(hours=int(ts[-5:-3]), minutes=int(ts[-2:])) *
int(ts[-6:-5] + "1")
)
_url_re = re.compile(r"""
http(s)?://(\w+\.)?crunchyroll\.
(?:
com|de|es|fr|co.jp
)
(?:/[^/&?]+)?
/[^/&?]+-(?P<media_id>\d+)
""", re.VERBOSE)
_api_schema = validate.Schema({
"error": bool,
validate.optional("code"): validate.text,
validate.optional("message"): validate.text,
validate.optional("data"): object,
})
_media_schema = validate.Schema(
{
"stream_data": validate.any(
None,
{
"streams": validate.all(
[{
"quality": validate.any(validate.text, None),
"url": validate.url(
scheme="http",
path=validate.endswith(".m3u8")
),
validate.optional("video_encode_id"): validate.text
}]
)
}
)
},
validate.get("stream_data")
)
_login_schema = validate.Schema({
"auth": validate.text,
"expires": validate.all(
validate.text,
validate.transform(parse_timestamp)
),
"user": {
"username": validate.any(validate.text, None),
"email": validate.text
}
})
_session_schema = validate.Schema(
{
"session_id": validate.text
},
validate.get("session_id")
)
class CrunchyrollAPIError(Exception):
"""Exception thrown by the Crunchyroll API when an error occurs"""
def __init__(self, msg, code):
Exception.__init__(self, msg)
self.msg = msg
self.code = code
class CrunchyrollAPI(object):
_api_url = "https://api.crunchyroll.com/{0}.0.json"
_default_locale = "en_US"
_user_agent = "Dalvik/1.6.0 (Linux; U; Android 4.4.2; Android SDK built for x86 Build/KK)"
_version_code = 444
_version_name = "2.1.10"
_access_token = "Scwg9PRRZ19iVwD"
_access_type = "com.crunchyroll.crunchyroid"
def __init__(self, cache, session, session_id=None, locale=_default_locale):
"""Abstract the API to access to Crunchyroll data.
Can take saved credentials to use on it's calls to the API.
"""
self.cache = cache
self.session = session
self.session_id = session_id
if self.session_id: # if the session ID is setup don't use the cached auth token
self.auth = None
else:
self.auth = cache.get("auth")
self.device_id = cache.get("device_id") or self.generate_device_id()
self.locale = locale
self.headers = {
"X-Android-Device-Is-GoogleTV": "0",
"X-Android-Device-Product": "google_sdk_x86",
"X-Android-Device-Model": "Android SDK built for x86",
"Using-Brightcove-Player": "1",
"X-Android-Release": "4.4.2",
"X-Android-SDK": "19",
"X-Android-Application-Version-Name": self._version_name,
"X-Android-Application-Version-Code": str(self._version_code),
'User-Agent': self._user_agent
}
def _api_call(self, entrypoint, params=None, schema=None):
"""Makes a call against the api.
:param entrypoint: API method to call.
:param params: parameters to include in the request data.
:param schema: schema to use to validate the data
"""
url = self._api_url.format(entrypoint)
# Default params
params = params or {}
if self.session_id:
params.update({
"session_id": self.session_id
})
else:
params.update({
"device_id": self.device_id,
"device_type": self._access_type,
"access_token": self._access_token,
"version": self._version_code
})
params.update({
"locale": self.locale.replace('_', ''),
})
if self.session_id:
params["session_id"] = self.session_id
# The certificate used by Crunchyroll cannot be verified in some environments.
res = self.session.http.post(url, data=params, headers=self.headers, verify=False)
json_res = self.session.http.json(res, schema=_api_schema)
if json_res["error"]:
err_msg = json_res.get("message", "Unknown error")
err_code = json_res.get("code", "unknown_error")
raise CrunchyrollAPIError(err_msg, err_code)
data = json_res.get("data")
if schema:
data = schema.validate(data, name="API response")
return data
def generate_device_id(self):
device_id = str(uuid4())
# cache the device id
self.cache.set("device_id", 365 * 24 * 60 * 60)
log.debug("Device ID: {0}".format(device_id))
return device_id
def start_session(self):
"""
Starts a session against Crunchyroll's server.
Is recommended that you call this method before making any other calls
to make sure you have a valid session against the server.
"""
params = {}
if self.auth:
params["auth"] = self.auth
self.session_id = self._api_call("start_session", params, schema=_session_schema)
log.debug("Session created with ID: {0}".format(self.session_id))
return self.session_id
def login(self, username, password):
"""
Authenticates the session to be able to access restricted data from
the server (e.g. premium restricted videos).
"""
params = {
"account": username,
"password": password
}
login = self._api_call("login", params, schema=_login_schema)
self.auth = login["auth"]
self.cache.set("auth", login["auth"], expires_at=login["expires"])
return login
def authenticate(self):
data = self._api_call("authenticate", {"auth": self.auth}, schema=_login_schema)
self.auth = data["auth"]
self.cache.set("auth", data["auth"], expires_at=data["expires"])
return data
def get_info(self, media_id, fields=None, schema=None):
"""
Returns the data for a certain media item.
:param media_id: id that identifies the media item to be accessed.
:param fields: list of the media"s field to be returned. By default the
API returns some fields, but others are not returned unless they are
explicity asked for. I have no real documentation on the fields, but
they all seem to start with the "media." prefix (e.g. media.name,
media.stream_data).
:param schema: validation schema to use
"""
params = {"media_id": media_id}
if fields:
params["fields"] = ",".join(fields)
return self._api_call("info", params, schema=schema)
class Crunchyroll(Plugin):
arguments = PluginArguments(
PluginArgument(
"username",
metavar="USERNAME",
requires=["password"],
help="A Crunchyroll username to allow access to restricted streams."
),
PluginArgument(
"password",
sensitive=True,
metavar="PASSWORD",
nargs="?",
const=None,
default=None,
help="""
A Crunchyroll password for use with --crunchyroll-username.
If left blank you will be prompted.
"""
),
PluginArgument(
"purge-credentials",
action="store_true",
help="""
Purge cached Crunchyroll credentials to initiate a new session
and reauthenticate.
"""
),
PluginArgument(
"session-id",
sensitive=True,
metavar="SESSION_ID",
help="""
Set a specific session ID for crunchyroll, can be used to bypass
region restrictions. If using an authenticated session ID, it is
recommended that the authentication parameters be omitted as the
session ID is account specific.
Note: The session ID will be overwritten if authentication is used
and the session ID does not match the account.
"""
),
# Deprecated, uses the general locale setting
PluginArgument(
"locale",
metavar="LOCALE",
help=argparse.SUPPRESS
)
)
@classmethod
def can_handle_url(self, url):
return _url_re.match(url)
@classmethod
def stream_weight(cls, key):
weight = STREAM_WEIGHTS.get(key)
if weight:
return weight, "crunchyroll"
return Plugin.stream_weight(key)
def _get_streams(self):
api = self._create_api()
match = _url_re.match(self.url)
media_id = int(match.group("media_id"))
try:
# the media.stream_data field is required, no stream data is returned otherwise
info = api.get_info(media_id, fields=["media.stream_data"], schema=_media_schema)
except CrunchyrollAPIError as err:
raise PluginError(u"Media lookup error: {0}".format(err.msg))
if not info:
return
streams = {}
# The adaptive quality stream sometimes a subset of all the other streams listed, ultra is no included
has_adaptive = any([s[u"quality"] == u"adaptive" for s in info[u"streams"]])
if has_adaptive:
self.logger.debug(u"Loading streams from adaptive playlist")
for stream in filter(lambda x: x[u"quality"] == u"adaptive", info[u"streams"]):
for q, s in HLSStream.parse_variant_playlist(self.session, stream[u"url"]).items():
# rename the bitrates to low, mid, or high. ultra doesn't seem to appear in the adaptive streams
name = STREAM_NAMES.get(q, q)
streams[name] = s
# If there is no adaptive quality stream then parse each individual result
for stream in info[u"streams"]:
if stream[u"quality"] != u"adaptive":
# the video_encode_id indicates that the stream is not a variant playlist
if u"video_encode_id" in stream:
streams[stream[u"quality"]] = HLSStream(self.session, stream[u"url"])
else:
# otherwise the stream url is actually a list of stream qualities
for q, s in HLSStream.parse_variant_playlist(self.session, stream[u"url"]).items():
# rename the bitrates to low, mid, or high. ultra doesn't seem to appear in the adaptive streams
name = STREAM_NAMES.get(q, q)
streams[name] = s
return streams
def _create_api(self):
"""Creates a new CrunchyrollAPI object, initiates it's session and
tries to authenticate it either by using saved credentials or the
user's username and password.
"""
if self.options.get("purge_credentials"):
self.cache.set("session_id", None, 0)
self.cache.set("auth", None, 0)
self.cache.set("session_id", None, 0)
# use the crunchyroll locale as an override, for backwards compatibility
locale = self.get_option("locale") or self.session.localization.language_code
api = CrunchyrollAPI(self.cache,
self.session,
session_id=self.get_option("session_id"),
locale=locale)
if not self.get_option("session_id"):
self.logger.debug("Creating session with locale: {0}", locale)
api.start_session()
if api.auth:
self.logger.debug("Using saved credentials")
login = api.authenticate()
self.logger.info("Successfully logged in as '{0}'",
login["user"]["username"] or login["user"]["email"])
elif self.options.get("username"):
try:
self.logger.debug("Attempting to login using username and password")
api.login(self.options.get("username"),
self.options.get("password"))
login = api.authenticate()
self.logger.info("Logged in as '{0}'",
login["user"]["username"] or login["user"]["email"])
except CrunchyrollAPIError as err:
raise PluginError(u"Authentication error: {0}".format(err.msg))
else:
self.logger.warning(
"No authentication provided, you won't be able to access "
"premium restricted content"
)
return api
__plugin__ = Crunchyroll
|
bsd-2-clause
| -3,545,327,950,360,098,000 | 34.208226 | 120 | 0.555637 | false |
chjost/clebsch_gordan
|
group/quat.py
|
1
|
6929
|
"""Class for the quaternions with inversion."""
import numpy as np
import utils
# quaternion parameters for the group O from Table 71.1 in:
# Simon L. Altmann, Peter Herzig, "Point-Group Theory Tables",
# Second Edition (corrected), Wien (2011)
V12 = np.sqrt(0.5) # sqrt(1/2)
# [[ lambda, Lambda_1, Lambda_2, Lambda_3 ]]
qPar = np.asarray(
[[ 1.0, 0.0, 0.0, 0.0 ],
[ 0.0, 1.0, 0.0, 0.0 ],
[ 0.0, 0.0, 1.0, 0.0 ],
[ 0.0, 0.0, 0.0, 1.0 ],
[ 0.5, 0.5, 0.5, 0.5 ],
[ 0.5,-0.5,-0.5, 0.5 ],
[ 0.5, 0.5,-0.5,-0.5 ],
[ 0.5,-0.5, 0.5,-0.5 ],
[ 0.5,-0.5,-0.5,-0.5 ],
[ 0.5, 0.5, 0.5,-0.5 ],
[ 0.5,-0.5, 0.5, 0.5 ],
[ 0.5, 0.5,-0.5, 0.5 ],
[ V12, V12, 0.0, 0.0 ],
[ V12, 0.0, V12, 0.0 ],
[ V12, 0.0, 0.0, V12 ],
[ V12,-V12, 0.0, 0.0 ],
[ V12, 0.0,-V12, 0.0 ],
[ V12, 0.0, 0.0,-V12 ],
[ 0.0, V12, V12, 0.0 ],
[ 0.0,-V12, V12, 0.0 ],
[ 0.0, V12, 0.0, V12 ],
[ 0.0, 0.0,-V12,-V12 ],
[ 0.0, V12, 0.0,-V12 ],
[ 0.0, 0.0,-V12, V12 ]])
class QNew(object):
def __init__(self):
self.q = np.zeros((4,))
self.i = int(1)
self.prec = 1e-6
@classmethod
def create_from_vector(cls, vector, inversion):
tmp = cls()
_vec = np.asarray(vector)
tmp.q = _vec.copy()
_inv = int(inversion)
tmp.i = _inv
return tmp
def __eq__(self, other):
if not isinstance(other, QNew):
return False
if np.allclose(self.q, other.q) and self.i == other.i:
return True
return False
def __ne__(self, other):
if not isinstance(other, QNew):
return True
if not np.allclose(self.q, other.q) or self.i != other.i:
return True
return False
def __abs__(self):
return np.sqrt(np.dot(self.q, self.q))
def __neg__(self):
self.q = -self.q
self.i = -self.i
return self
def __mul__(self, other):
q1 = self.q
q2 = other.q
tvec = np.zeros_like(q1)
tvec[0] = q1[0]*q2[0] - q1[1]*q2[1] - q1[2]*q2[2] - q1[3]*q2[3]
tvec[1] = q1[0]*q2[1] + q1[1]*q2[0] + q1[2]*q2[3] - q1[3]*q2[2]
tvec[2] = q1[0]*q2[2] - q1[1]*q2[3] + q1[2]*q2[0] + q1[3]*q2[1]
tvec[3] = q1[0]*q2[3] + q1[1]*q2[2] - q1[2]*q2[1] + q1[3]*q2[0]
tinv = self.i * other.i
return QNew.create_from_vector(tvec, tinv)
def conj(self):
tvec = self.q * np.asarray([1., -1., -1., -1.])
return QNew.create_from_vector(tvec, self.i)
def norm(self):
return np.dot(self.q, self.q)
def __str__(self):
return "[%r, %r, %r, %r] with inversion %d" % (self.q[0], self.q[1], self.q[2], self.q[3], self.i)
def comp(self, vec):
return np.allclose(self.q, vec)
# code inspired by the quaternion package of moble
# https://github.com/moble/quaternion
def rotation_matrix(self, inv=False):
n = self.norm()
if np.abs(n) < self.prec:
raise ZeroDivisionError("Norm of quaternion is zero.")
_q = self.q
if np.abs(1-n) < self.prec:
res = np.array(
[[1-2*(_q[2]**2 + _q[3]**2), 2*(_q[1]*_q[2] - _q[3]*_q[0]),
2*(_q[1]*_q[3] + _q[2]*_q[0])],
[2*(_q[1]*_q[2] + _q[3]*_q[0]), 1-2*(_q[1]**2 + _q[3]**2),
2*(_q[2]*_q[3] - _q[1]*_q[0])],
[2*(_q[1]*_q[3] - _q[2]*_q[0]), 2*(_q[2]*_q[3] + _q[1]*_q[0]),
1-2*(_q[1]**2 + _q[2]**2)]])
else:
res = np.array(
[[1-2*(_q[2]**2 + _q[3]**2)/n, 2*(_q[1]*_q[2] - _q[3]*_q[0])/n,
2*(_q[1]*_q[3] + _q[2]*_q[0])/n],
[2*(_q[1]*_q[2] + _q[3]*_q[0])/n, 1-2*(_q[1]**2 + _q[3]**2)/n,
2*(_q[2]*_q[3] - _q[1]*_q[0])/n],
[2*(_q[1]*_q[3] - _q[2]*_q[0])/n, 2*(_q[2]*_q[3] + _q[1]*_q[0])/n,
1-2*(_q[1]**2 + _q[2]**2)/n]])
if inv:
res *= self.i
return res
def base(self, inv=False):
pass
def R(self, j, mp, m):
"""compute transformation matrix element
j j 0 1 2 3
R (Q) = R (Q , Q , Q , Q )
m'm m'm
-j
in __
j \ j j
[R.u] = /__ u R (Q) ,
m m'=j m' m'm
according to the formula:
__ ___________________________
j \ /(j-m')(j+m' )(j-m )(j+m) j+m-k j-m'-k m'-m+k k
R = /__ \/ ( k )(m'-m+k)(m'-m+k)( k ) (a) (a*) (b) (-b*)
m'm k
0 3 2 1
where a := Q - i.Q ; b := -Q -i.Q .
first three arguments to be provided as multiplicities:
[J] = 2j+1, [M] = 2m+1, [MP] = 2m'+1, these are always integer
[-3/2] --> -2; [-1] --> -1; [-1/2] --> 0; [0] --> 1; [1/2] --> 2, etc.
"""
a = complex( self.q[0], -self.q[3] )
ac = complex( self.q[0], self.q[3] ) # complex conjugate of a
b = complex(-self.q[2], -self.q[1] )
mbc = complex( self.q[2], -self.q[1] ) # - complex conjugate of b
res = complex( 0.0 )
j_p_mp = ( j + mp - 2 ) // 2 # j+m'
j_m_mp = ( j - mp ) // 2 # j-m'
j_p_m = ( j + m - 2 ) // 2 # j+m
j_m_m = ( j - m ) // 2 # j-m
if j_p_mp < 0 or j_m_mp < 0 or j_p_m < 0 or j_m_m < 0:
return res
# prepare constant arrays
mp_m_m = j_p_mp - j_p_m
n = np.asarray([j_m_mp, j_p_mp, j_m_m, j_p_m])
kp = np.asarray([0, mp_m_m, mp_m_m, 0])
_a = np.asarray([a, ac, b, mbc])
aexp = np.asarray([j_p_m, j_m_mp, mp_m_m, 0])
# get range for loop
k_mx = int(j_p_m if (j_p_m < j_m_mp) else j_m_mp)
k_mn = int(-j_p_mp+j_p_m if (-j_p_mp+j_p_m > 0) else 0)
for k in range(k_mn, k_mx+1):
_k = kp + k
factor = np.sqrt(np.prod(utils.binomial(n, _k))*complex(1.))
_aexp = aexp + np.asarray([-k, -k, k, k])
prod = np.prod(np.power(_a, _aexp))
res += factor * prod
return res
def R_matrix(self, j):
multi = int(2*j+1)
res = np.zeros((multi, multi), dtype=complex)
# the sorting is important, start at largest m
# and decrease
for im in range(multi):
m = j - im
for imp in range(multi):
mp = j - imp
res[im, imp] = self.R(multi, int(2*m+1), int(2*mp+1))
if j%2:
res *= self.i
return res
def omega(self):
return 2*np.arccos(self.q[0])
if __name__ == "__main__":
print("for checks execute the test script")
|
gpl-3.0
| 2,441,700,839,144,792,600 | 33.645 | 106 | 0.405542 | false |
kmee/odoo-brazil-banking
|
l10n_br_account_banking_payment_cnab/model/payment_mode.py
|
1
|
2946
|
# -*- coding: utf-8 -*-
# #############################################################################
#
#
# Copyright (C) 2012 KMEE (http://www.kmee.com.br)
# @author Fernando Marcato Rodrigues
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
from openerp.addons import decimal_precision as dp
from ..constantes import TIPO_SERVICO, FORMA_LANCAMENTO, \
COMPLEMENTO_TIPO_SERVICO, CODIGO_FINALIDADE_TED, AVISO_FAVORECIDO
class PaymentMode(models.Model):
_inherit = "payment.mode"
condicao_emissao_papeleta = fields.Selection(
[('1', 'Banco emite e Processa'),
('2', 'Cliente emite e banco processa'), ],
u'Condição Emissão de Papeleta', default='1')
cnab_percent_interest = fields.Float(string=u"Percentual de Juros",
digits=dp.get_precision('Account'))
comunicacao_2 = fields.Char("Comunicação para o sacador avalista")
tipo_servico = fields.Selection(
selection=TIPO_SERVICO,
string=u'Tipo de Serviço',
help=u'Campo G025 do CNAB'
)
forma_lancamento = fields.Selection(
selection=FORMA_LANCAMENTO,
string=u'Forma Lançamento',
help=u'Campo G029 do CNAB'
)
codigo_convenio = fields.Char(
size=20,
string=u'Código do Convênio no Banco',
help=u'Campo G007 do CNAB',
default=u'0001222130126',
)
codigo_finalidade_doc = fields.Selection(
selection=COMPLEMENTO_TIPO_SERVICO,
string=u'Complemento do Tipo de Serviço',
help=u'Campo P005 do CNAB'
)
codigo_finalidade_ted = fields.Selection(
selection=CODIGO_FINALIDADE_TED,
string=u'Código Finalidade da TED',
help=u'Campo P011 do CNAB'
)
codigo_finalidade_complementar = fields.Char(
size=2,
string=u'Código de finalidade complementar',
help=u'Campo P013 do CNAB',
)
aviso_ao_favorecido = fields.Selection(
selection=AVISO_FAVORECIDO,
string=u'Aviso ao Favorecido',
help=u'Campo P006 do CNAB',
default=0,
)
# A exportação CNAB não se encaixa somente nos parâmetros de
# débito e crédito.
|
agpl-3.0
| -2,867,324,621,362,343,400 | 37.025974 | 79 | 0.625683 | false |
cortesi/qtile
|
libqtile/widget/prompt.py
|
1
|
27051
|
# -*- coding: utf-8 -*-
# Copyright (c) 2010-2011 Aldo Cortesi
# Copyright (c) 2010 Philip Kranz
# Copyright (c) 2011 Mounier Florian
# Copyright (c) 2011 Paul Colomiets
# Copyright (c) 2011-2012 roger
# Copyright (c) 2011-2012, 2014 Tycho Andersen
# Copyright (c) 2012 Dustin Lacewell
# Copyright (c) 2012 Laurie Clark-Michalek
# Copyright (c) 2012-2014 Craig Barnes
# Copyright (c) 2013 Tao Sauvage
# Copyright (c) 2014 ramnes
# Copyright (c) 2014 Sean Vig
# Copyright (C) 2015, Juan Riquelme González
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import glob
import os
import pickle
import six
import string
from collections import OrderedDict, deque
from libqtile.log_utils import logger
from libqtile.command import _SelectError
from . import base
from .. import bar, command, hook, pangocffi, utils, xcbq, xkeysyms
class NullCompleter(object):
def __init__(self, qtile):
self.qtile = qtile
self.thisfinal = ""
def actual(self):
return self.thisfinal
def reset(self):
pass
def complete(self, txt):
return txt
class FileCompleter(object):
def __init__(self, qtile, _testing=False):
self._testing = _testing
self.qtile = qtile
self.thisfinal = None
self.reset()
def actual(self):
return self.thisfinal
def reset(self):
self.lookup = None
def complete(self, txt):
"""Returns the next completion for txt, or None if there is no completion"""
if not self.lookup:
self.lookup = []
if txt == "" or txt[0] not in "~/":
txt = "~/" + txt
path = os.path.expanduser(txt)
if os.path.isdir(path):
files = glob.glob(os.path.join(path, "*"))
prefix = txt
else:
files = glob.glob(path + "*")
prefix = os.path.dirname(txt)
prefix = prefix.rstrip("/") or "/"
for f in files:
display = os.path.join(prefix, os.path.basename(f))
if os.path.isdir(f):
display += "/"
self.lookup.append((display, f))
self.lookup.sort()
self.offset = -1
self.lookup.append((txt, txt))
self.offset += 1
if self.offset >= len(self.lookup):
self.offset = 0
ret = self.lookup[self.offset]
self.thisfinal = ret[1]
return ret[0]
class QshCompleter(object):
def __init__(self, qtile):
self.qtile = qtile
self.client = command.CommandRoot(self.qtile)
self.thisfinal = None
self.reset()
def actual(self):
return self.thisfinal
def reset(self):
self.lookup = None
self.path = ''
self.offset = -1
def complete(self, txt):
txt = txt.lower()
if not self.lookup:
self.lookup = []
path = txt.split('.')[:-1]
self.path = '.'.join(path)
term = txt.split('.')[-1]
if len(self.path) > 0:
self.path += '.'
contains_cmd = 'self.client.%s_contains' % self.path
try:
contains = eval(contains_cmd)
except AttributeError:
contains = []
for obj in contains:
if obj.lower().startswith(term):
self.lookup.append((obj, obj))
commands_cmd = 'self.client.%scommands()' % self.path
try:
commands = eval(commands_cmd)
except (command.CommandError, AttributeError):
commands = []
for cmd in commands:
if cmd.lower().startswith(term):
self.lookup.append((cmd + '()', cmd + '()'))
self.offset = -1
self.lookup.append((term, term))
self.offset += 1
if self.offset >= len(self.lookup):
self.offset = 0
ret = self.lookup[self.offset]
self.thisfinal = self.path + ret[0]
return self.path + ret[0]
class GroupCompleter(object):
def __init__(self, qtile):
self.qtile = qtile
self.thisfinal = None
self.lookup = None
self.offset = None
def actual(self):
"""Returns the current actual value"""
return self.thisfinal
def reset(self):
self.lookup = None
self.offset = -1
def complete(self, txt):
"""Returns the next completion for txt, or None if there is no completion"""
txt = txt.lower()
if not self.lookup:
self.lookup = []
for group in self.qtile.groupMap.keys():
if group.lower().startswith(txt):
self.lookup.append((group, group))
self.lookup.sort()
self.offset = -1
self.lookup.append((txt, txt))
self.offset += 1
if self.offset >= len(self.lookup):
self.offset = 0
ret = self.lookup[self.offset]
self.thisfinal = ret[1]
return ret[0]
class WindowCompleter(object):
def __init__(self, qtile):
self.qtile = qtile
self.thisfinal = None
self.lookup = None
self.offset = None
def actual(self):
"""Returns the current actual value"""
return self.thisfinal
def reset(self):
self.lookup = None
self.offset = -1
def complete(self, txt):
"""Returns the next completion for txt, or None if there is no completion"""
if not self.lookup:
self.lookup = []
for wid, window in self.qtile.windowMap.items():
if window.group and window.name.lower().startswith(txt):
self.lookup.append((window.name, wid))
self.lookup.sort()
self.offset = -1
self.lookup.append((txt, txt))
self.offset += 1
if self.offset >= len(self.lookup):
self.offset = 0
ret = self.lookup[self.offset]
self.thisfinal = ret[1]
return ret[0]
class CommandCompleter(object):
"""
Parameters
==========
_testing :
disables reloading of the lookup table to make testing possible.
"""
DEFAULTPATH = "/bin:/usr/bin:/usr/local/bin"
def __init__(self, qtile, _testing=False):
self.lookup = None
self.offset = -1
self.thisfinal = None
self._testing = _testing
def actual(self):
"""Returns the current actual value"""
return self.thisfinal
def executable(self, fpath):
return os.access(fpath, os.X_OK)
def reset(self):
self.lookup = None
self.offset = -1
def complete(self, txt):
"""Returns the next completion for txt, or None if there is no completion"""
if not self.lookup:
if not self._testing:
# Lookup is a set of (display value, actual value) tuples.
self.lookup = []
if txt and txt[0] in "~/":
path = os.path.expanduser(txt)
if os.path.isdir(path):
files = glob.glob(os.path.join(path, "*"))
prefix = txt
else:
files = glob.glob(path + "*")
prefix = os.path.dirname(txt)
prefix = prefix.rstrip("/") or "/"
for f in files:
if self.executable(f):
display = os.path.join(prefix, os.path.basename(f))
if os.path.isdir(f):
display += "/"
self.lookup.append((display, f))
else:
dirs = os.environ.get("PATH", self.DEFAULTPATH).split(":")
for d in dirs:
try:
d = os.path.expanduser(d)
for cmd in glob.iglob(os.path.join(d, "%s*" % txt)):
if self.executable(cmd):
self.lookup.append(
(
os.path.basename(cmd),
cmd
),
)
except OSError:
pass
self.lookup.sort()
self.offset = -1
self.lookup.append((txt, txt))
self.offset += 1
if self.offset >= len(self.lookup):
self.offset = 0
ret = self.lookup[self.offset]
self.thisfinal = ret[1]
return ret[0]
class Prompt(base._TextBox):
"""A widget that prompts for user input
Input should be started using the ``.startInput()`` method on this class.
"""
completers = {
"file": FileCompleter,
"qshell": QshCompleter,
"cmd": CommandCompleter,
"group": GroupCompleter,
"window": WindowCompleter,
None: NullCompleter
}
orientations = base.ORIENTATION_HORIZONTAL
defaults = [("cursor", True, "Show a cursor"),
("cursorblink", 0.5, "Cursor blink rate. 0 to disable."),
("cursor_color", "bef098",
"Color for the cursor and text over it."),
("prompt", "{prompt}: ", "Text displayed at the prompt"),
("record_history", True, "Keep a record of executed commands"),
("max_history", 100,
"Commands to keep in history. 0 for no limit."),
("ignore_dups_history", False,
"Don't store duplicates in history"),
("bell_style", "audible",
"Alert at the begin/end of the command history. " +
"Possible values: 'audible', 'visual' and None."),
("visual_bell_color", "ff0000",
"Color for the visual bell (changes prompt background)."),
("visual_bell_time", 0.2,
"Visual bell duration (in seconds).")]
def __init__(self, name="prompt", **config):
base._TextBox.__init__(self, "", bar.CALCULATED, **config)
self.add_defaults(Prompt.defaults)
self.name = name
self.active = False
self.completer = None
# Define key handlers (action to do when hit an specific key)
self.keyhandlers = {
xkeysyms.keysyms['Tab']: self._trigger_complete,
xkeysyms.keysyms['BackSpace']: self._delete_char(),
xkeysyms.keysyms['Delete']: self._delete_char(False),
xkeysyms.keysyms['KP_Delete']: self._delete_char(False),
xkeysyms.keysyms['Escape']: self._unfocus,
xkeysyms.keysyms['Return']: self._send_cmd,
xkeysyms.keysyms['KP_Enter']: self._send_cmd,
xkeysyms.keysyms['Up']: self._get_prev_cmd,
xkeysyms.keysyms['KP_Up']: self._get_prev_cmd,
xkeysyms.keysyms['Down']: self._get_next_cmd,
xkeysyms.keysyms['KP_Down']: self._get_next_cmd,
xkeysyms.keysyms['Left']: self._move_cursor(),
xkeysyms.keysyms['KP_Left']: self._move_cursor(),
xkeysyms.keysyms['Right']: self._move_cursor("right"),
xkeysyms.keysyms['KP_Right']: self._move_cursor("right"),
}
printables = [int(hex(x), 16) for x in range(127)]
printables = {x: self._write_char for x in printables if
chr(x) in string.printable}
self.keyhandlers.update(printables)
if self.bell_style == "visual":
self.original_background = self.background
# If history record is on, get saved history or create history record
if self.record_history:
self.history_path = os.path.join(utils.get_cache_dir(),
'prompt_history')
if os.path.exists(self.history_path):
with open(self.history_path, 'rb') as f:
try:
self.history = pickle.load(f)
if self.ignore_dups_history:
self._dedup_history()
except: # noqa: E722
# unfortunately, pickle doesn't wrap its errors, so we
# can't detect what's a pickle error and what's not.
logger.exception("failed to load prompt history")
self.history = {x: deque(maxlen=self.max_history)
for x in self.completers}
# self.history of size does not match.
if len(self.history) != len(self.completers):
self.history = {x: deque(maxlen=self.max_history)
for x in self.completers}
if self.max_history != \
self.history[list(self.history)[0]].maxlen:
self.history = {x: deque(self.history[x],
self.max_history)
for x in self.completers}
else:
self.history = {x: deque(maxlen=self.max_history)
for x in self.completers}
def _configure(self, qtile, bar):
self.markup = True
base._TextBox._configure(self, qtile, bar)
def f(win):
if self.active and not win == self.bar.window:
self._unfocus()
hook.subscribe.client_focus(f)
def startInput(self, prompt, callback,
complete=None, strict_completer=False):
"""Run the prompt
Displays a prompt and starts to take one line of keyboard input from
the user. When done, calls the callback with the input string as
argument. If history record is enabled, also allows to browse between
previous commands with ↑ and ↓, and execute them (untouched or
modified). When history is exhausted, fires an alert. It tries to
mimic, in some way, the shell behavior.
Parameters
==========
complete :
Tab-completion. Can be None, "cmd", "file", "group", "qshell" or
"window".
prompt :
text displayed at the prompt, e.g. "spawn: "
callback :
function to call with returned value.
complete :
completer to use.
strict_completer :
When True the return value wil be the exact completer result where
available.
"""
if self.cursor and self.cursorblink and not self.active:
self.timeout_add(self.cursorblink, self._blink)
self.display = self.prompt.format(prompt=prompt)
self.display = pangocffi.markup_escape_text(self.display)
self.active = True
self.userInput = ""
self.archivedInput = ""
self.show_cursor = self.cursor
self.cursor_position = 0
self.callback = callback
self.completer = self.completers[complete](self.qtile)
self.strict_completer = strict_completer
self._update()
self.bar.widget_grab_keyboard(self)
if self.record_history:
self.completer_history = self.history[complete]
self.position = len(self.completer_history)
def calculate_length(self):
if self.text:
width = min(
self.layout.width,
self.bar.width
) + self.actual_padding * 2
return width
else:
return 0
def _blink(self):
self.show_cursor = not self.show_cursor
self._update()
if self.active:
self.timeout_add(self.cursorblink, self._blink)
def _highlight_text(self, text):
color = utils.hex(self.cursor_color)
text = '<span foreground="{0}">{1}</span>'.format(color, text)
if self.show_cursor:
text = '<u>{}</u>'.format(text)
return text
def _update(self):
if self.active:
self.text = self.archivedInput or self.userInput
cursor = pangocffi.markup_escape_text(" ")
if self.cursor_position < len(self.text):
txt1 = self.text[:self.cursor_position]
txt2 = self.text[self.cursor_position]
txt3 = self.text[self.cursor_position + 1:]
for text in (txt1, txt2, txt3):
text = pangocffi.markup_escape_text(text)
txt2 = self._highlight_text(txt2)
self.text = "{0}{1}{2}{3}".format(txt1, txt2, txt3, cursor)
else:
self.text = pangocffi.markup_escape_text(self.text)
self.text += self._highlight_text(cursor)
self.text = self.display + self.text
else:
self.text = ""
self.bar.draw()
def _trigger_complete(self):
# Trigger the auto completion in user input
self.userInput = self.completer.complete(self.userInput)
self.cursor_position = len(self.userInput)
def _history_to_input(self):
# Move actual command (when exploring history) to user input and update
# history position (right after the end)
if self.archivedInput:
self.userInput = self.archivedInput
self.archivedInput = ""
self.position = len(self.completer_history)
def _insert_before_cursor(self, charcode):
# Insert a character (given their charcode) in input, before the cursor
txt1 = self.userInput[:self.cursor_position]
txt2 = self.userInput[self.cursor_position:]
self.userInput = txt1 + chr(charcode) + txt2
self.cursor_position += 1
def _delete_char(self, backspace=True):
# Return a function that deletes character from the input text.
# If backspace is True, function will emulate backspace, else Delete.
def f():
self._history_to_input()
step = -1 if backspace else 0
if not backspace and self.cursor_position == len(self.userInput):
self._alert()
elif len(self.userInput) > 0 and self.cursor_position + step > -1:
txt1 = self.userInput[:self.cursor_position + step]
txt2 = self.userInput[self.cursor_position + step + 1:]
self.userInput = txt1 + txt2
if step:
self.cursor_position += step
else:
self._alert()
return f
def _write_char(self):
# Add pressed (legal) char key to user input.
# No LookupString in XCB... oh, the shame! Unicode users beware!
self._history_to_input()
self._insert_before_cursor(self.key)
def _unfocus(self):
# Remove focus from the widget
self.active = False
self._update()
self.bar.widget_ungrab_keyboard()
def _send_cmd(self):
# Send the prompted text for execution
self._unfocus()
if self.strict_completer:
self.userInput = self.actual_value or self.userInput
del self.actual_value
self._history_to_input()
if self.userInput:
# If history record is activated, also save command in history
if self.record_history:
# ensure no dups in history
if self.ignore_dups_history and (self.userInput in self.completer_history):
self.completer_history.remove(self.userInput)
self.position -= 1
self.completer_history.append(self.userInput)
if self.position < self.max_history:
self.position += 1
if six.PY3:
os.makedirs(os.path.dirname(self.history_path), exist_ok=True)
else:
try:
os.makedirs(os.path.dirname(self.history_path))
except OSError: # file exists
pass
with open(self.history_path, mode='wb') as f:
pickle.dump(self.history, f, protocol=2)
self.callback(self.userInput)
def _alert(self):
# Fire an alert (audible or visual), if bell style is not None.
if self.bell_style == "audible":
self.qtile.conn.conn.core.Bell(0)
elif self.bell_style == "visual":
self.background = self.visual_bell_color
self.timeout_add(self.visual_bell_time, self._stop_visual_alert)
def _stop_visual_alert(self):
self.background = self.original_background
self._update()
def _get_prev_cmd(self):
# Get the previous command in history.
# If there isn't more previous commands, ring system bell
if self.record_history:
if not self.position:
self._alert()
else:
self.position -= 1
self.archivedInput = self.completer_history[self.position]
self.cursor_position = len(self.archivedInput)
def _get_next_cmd(self):
# Get the next command in history.
# If the last command was already reached, ring system bell.
if self.record_history:
if self.position == len(self.completer_history):
self._alert()
elif self.position < len(self.completer_history):
self.position += 1
if self.position == len(self.completer_history):
self.archivedInput = ""
else:
self.archivedInput = self.completer_history[self.position]
self.cursor_position = len(self.archivedInput)
def _cursor_to_left(self):
# Move cursor to left, if possible
if self.cursor_position:
self.cursor_position -= 1
else:
self._alert()
def _cursor_to_right(self):
# move cursor to right, if possible
command = self.archivedInput or self.userInput
if self.cursor_position < len(command):
self.cursor_position += 1
else:
self._alert()
def _move_cursor(self, direction="left"):
# Move the cursor to left or right, according to direction
if direction == "left":
return self._cursor_to_left
elif direction == "right":
return self._cursor_to_right
def _get_keyhandler(self, k):
# Return the action (a function) to do according the pressed key (k).
self.key = k
if k in self.keyhandlers:
if k != xkeysyms.keysyms['Tab']:
self.actual_value = self.completer.actual()
self.completer.reset()
return self.keyhandlers[k]
def handle_KeyPress(self, e):
"""KeyPress handler for the minibuffer.
Currently only supports ASCII characters.
"""
mask = xcbq.ModMasks["shift"] | xcbq.ModMasks["lock"]
state = 1 if e.state & mask else 0
keysym = self.qtile.conn.code_to_syms[e.detail][state]
handle_key = self._get_keyhandler(keysym)
if handle_key:
handle_key()
del self.key
self._update()
def cmd_fake_keypress(self, key):
class Dummy(object):
pass
d = Dummy()
keysym = xcbq.keysyms[key]
d.detail = self.qtile.conn.keysym_to_keycode(keysym)
d.state = 0
self.handle_KeyPress(d)
def cmd_info(self):
"""Returns a dictionary of info for this object"""
return dict(
name=self.name,
width=self.width,
text=self.text,
active=self.active,
)
def cmd_exec_general(
self, prompt, object_name, cmd_name, selector=None, completer=None):
"""
Execute a cmd of any object. For example layout, group, window, widget
, etc with a string that is obtained from startInput.
Parameters
==========
prompt :
Text displayed at the prompt.
object_name :
Name of a object in Qtile. This string has to be 'layout', 'widget',
'bar', 'window' or 'screen'.
cmd_name :
Execution command of selected object using object_name and selector.
selector :
This value select a specific object within a object list that is
obtained by object_name.
If this value is None, current object is selected. e.g. current layout,
current window and current screen.
completer:
Completer to use.
config example:
Key([alt, 'shift'], 'a',
lazy.widget['prompt'].exec_general(
'section(add)',
'layout',
'add_section'))
"""
try:
obj = self.qtile.select([(object_name, selector)])
except _SelectError:
logger.warn("cannot select a object")
return
cmd = obj.command(cmd_name)
if not cmd:
logger.warn("command not found")
return
def f(args):
if args:
cmd(args)
self.startInput(prompt, f, completer)
def _dedup_history(self):
"""Filter the history deque, clearing all duplicate values."""
self.history = {x: self._dedup_deque(self.history[x])
for x in self.completers}
def _dedup_deque(self, dq):
return deque(_LastUpdatedOrderedDict.fromkeys(dq))
class _LastUpdatedOrderedDict(OrderedDict):
"""Store items in the order the keys were last added."""
def __setitem__(self, key, value):
if key in self:
del self[key]
OrderedDict.__setitem__(self, key, value)
|
mit
| -8,826,329,168,637,638,000 | 35.548649 | 91 | 0.542742 | false |
TheGhostHuCodes/spy_dir
|
spy_dir.py
|
1
|
2182
|
#!/usr/bin/env python
import os
import os.path as pt
import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
import argparse
#TODO: take decimal places as parameter for printing.
def sizeof_pp(num):
for unit in ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB']:
if abs(num) < 1024.0:
return "%3.2f %s" % (num, unit)
num /= 1024.0
return "%.2f %s" % (num, 'Yi')
def xtic_formatter(num, tick_index):
return sizeof_pp(num)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='.')
parser.add_argument('dir_path', metavar='Path', type=str, help='')
parser.add_argument('-p', '--plot', action='store_true')
args = parser.parse_args()
sizes = []
symlink_count = 0
for root, dirs, files in os.walk(args.dir_path, followlinks=False):
for name in files:
fullpath = pt.join(root, name)
if not os.path.islink(fullpath):
sizes.append(pt.getsize(fullpath))
else:
symlink_count += 1
sizes.sort()
print("Searching in directory: {0}".format(args.dir_path))
print("Files Inspected: {0}".format(len(sizes)))
print("Maxfilesize: " + sizeof_pp(sizes[-1]))
print("Symlinks found: {0}".format(symlink_count))
percentile = 95
index = len(sizes) * (percentile / 100.)
print("{0}% of files smaller than: ~".format(percentile) + sizeof_pp(
sizes[int(index)]))
sizesArray = np.asarray(sizes)
if (args.plot):
bins = min(len(sizes) / 10, 200)
plt.figure(figsize=(8, 8))
ax = plt.subplot(111)
# Adjust y-axis to show bins of height 1 and max bin height.
n, _, _ = plt.hist(sizesArray, bins, log=True)
plt.ylim(0.5, max(n) * 1.1)
plt.xlabel("File Size (bytes)")
plt.ylabel("Log(Number of Files)")
plt.title("File size histogram for: {0}".format(args.dir_path))
x_formatter = mpl.ticker.ScalarFormatter(useOffset=False)
x_formatter.set_scientific(False)
x_format = mpl.ticker.FuncFormatter(xtic_formatter)
ax.xaxis.set_major_formatter(x_format)
plt.show()
|
apache-2.0
| -7,296,327,831,960,544,000 | 32.569231 | 73 | 0.597617 | false |
jfunction/capetown_loadshedding_map
|
convert_json.py
|
1
|
1792
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# convert_json.py
#
# Copyright 2014 Jared <jarednorman@hotmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
import json
import collections
def main():
with open('capetown_tables_json_1.json','rb') as f_in:
d = json.JSONDecoder(object_pairs_hook=collections.OrderedDict).decode(f_in.read())
stages = sorted(d.keys())
for stage in stages:
time_dict = d[stage]
times = sorted(time_dict.keys())
for time in times:
days_dict = time_dict[time]
start_time = int(time.split(':00 to ')[0])
#end_time = start_time + 2.5
days = ["Monday","Tuesday","Wednesday","Thursday","Friday","Saturday","Sunday"]
for i, day in enumerate(days):
shedding_zones = str(days_dict[day])
if not shedding_zones:
shedding_zones = []
else:
shedding_zones = shedding_zones.split(', ')
days_dict[day] = shedding_zones
#time_dict[start_time] = time_dict.pop(time)
with open('capetown_tables_json_2.json','wb') as f_out:
f_out.write(json.dumps(d,indent=2))
return 0
if __name__ == '__main__':
main()
|
gpl-2.0
| 7,618,190,367,658,015,000 | 32.185185 | 85 | 0.679129 | false |
enthought/traitsgui
|
enthought/pyface/workbench/i_view.py
|
1
|
4139
|
#------------------------------------------------------------------------------
# Copyright (c) 2005, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#
# Author: Enthought, Inc.
# Description: <Enthought pyface package component>
#------------------------------------------------------------------------------
""" The interface for workbench views. """
# Standard library imports.
import logging
# Enthought library imports.
from enthought.pyface.api import ImageResource
from enthought.traits.api import Bool, Enum, Float, Instance, List, Str, \
implements
from enthought.util.camel_case import camel_case_to_words
# Local imports.
from i_perspective_item import IPerspectiveItem
from i_workbench_part import IWorkbenchPart, MWorkbenchPart
from perspective_item import PerspectiveItem
# Logging.
logger = logging.getLogger(__name__)
class IView(IWorkbenchPart, IPerspectiveItem):
""" The interface for workbench views. """
# Is the view busy? (i.e., should the busy cursor (often an hourglass) be
# displayed?).
busy = Bool(False)
# The category that the view belongs to (this can used to group views when
# they are displayed to the user).
category = Str('General')
# An image used to represent the view to the user (shown in the view tab
# and in the view chooser etc).
image = Instance(ImageResource)
# Whether the view is visible or not.
visible = Bool(False)
###########################################################################
# 'IView' interface.
###########################################################################
def activate(self):
""" Activate the view.
"""
def hide(self):
""" Hide the view.
"""
def show(self):
""" Show the view.
"""
class MView(MWorkbenchPart, PerspectiveItem):
""" Mixin containing common code for toolkit-specific implementations. """
implements(IView)
#### 'IView' interface ####################################################
# Is the view busy? (i.e., should the busy cursor (often an hourglass) be
# displayed?).
busy = Bool(False)
# The category that the view belongs to (this can be used to group views
# when they are displayed to the user).
category = Str('General')
# An image used to represent the view to the user (shown in the view tab
# and in the view chooser etc).
image = Instance(ImageResource)
# Whether the view is visible or not.
visible = Bool(False)
###########################################################################
# 'IWorkbenchPart' interface.
###########################################################################
def _id_default(self):
""" Trait initializer. """
id = '%s.%s' % (type(self).__module__, type(self).__name__)
logger.warn('view %s has no Id - using <%s>' % (self, id))
# If no Id is specified then use the name.
return id
def _name_default(self):
""" Trait initializer. """
name = camel_case_to_words(type(self).__name__)
logger.warn('view %s has no name - using <%s>' % (self, name))
return name
###########################################################################
# 'IView' interface.
###########################################################################
def activate(self):
""" Activate the view.
"""
self.window.activate_view(self)
return
def hide(self):
""" Hide the view. """
self.window.hide_view(self)
return
def show(self):
""" Show the view. """
self.window.show_view(self)
return
#### EOF ######################################################################
|
bsd-3-clause
| 3,023,312,889,010,713,600 | 28.147887 | 79 | 0.525489 | false |
gyllstar/appleseed
|
pox/openflow/libopenflow_01.py
|
1
|
126928
|
# Copyright 2011 James McCauley
#
# This file is part of POX.
#
# POX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# POX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with POX. If not, see <http://www.gnu.org/licenses/>.
# This file was originally based on pyopenflow.py from NOX, which was
# autogenerated from openflow.h via a program by KK Yap. It has been
# substantially altered since then.
import struct
import operator
import collections
import sys
from pox.lib.packet.packet_base import packet_base
from pox.lib.packet.ethernet import ethernet
from pox.lib.packet.vlan import vlan
from pox.lib.packet.ipv4 import ipv4
from pox.lib.packet.udp import udp
from pox.lib.packet.tcp import tcp
from pox.lib.packet.icmp import icmp
from pox.lib.packet.arp import arp
from pox.lib.addresses import *
from pox.lib.util import assert_type
from pox.lib.util import initHelper
from pox.lib.util import hexdump
_PAD = b'\x00'
_PAD2 = _PAD*2
_PAD3 = _PAD*3
_PAD4 = _PAD*4
_PAD6 = _PAD*6
EMPTY_ETH = EthAddr(None)
MAX_XID = 0x7fFFffFF
_nextXID = 1
#USE_MPLS_MATCH = False
def generateXID ():
global _nextXID
r = _nextXID
_nextXID += 1
_nextXID = (_nextXID + 1) % (MAX_XID + 1)
return r
def xid_generator(start=1):
""" generate a xid sequence. Wraps at 2**31-1 """
n = start % (MAX_XID + 1)
while True:
yield n
n = ( n + 1 ) % (MAX_XID + 1)
def _format_body (body, prefix):
if hasattr(body, 'show'):
#TODO: Check this (spacing may well be wrong)
return body.show(prefix + ' ')
else:
return prefix + hexdump(body).replace("\n", "\n" + prefix)
TABLE_ALL = 0xff
TABLE_EMERGENCY = 0xfe
# Structure definitions
#1. Openflow Header
class ofp_header (object):
def __init__ (self, **kw):
self.version = OFP_VERSION
self.header_type = 0
self.length = 8
self.xid = None
initHelper(self, kw)
def _assert (self):
if self.header_type not in ofp_type_map:
return (False, "type is not a known message type")
return (True, None)
def pack (self, assertstruct=True):
if self.xid is None:
self.xid = generateXID()
if(assertstruct):
if(not ofp_header._assert(self)[0]):
raise RuntimeError("assertstruct failed")
packed = ""
packed += struct.pack("!BBHL", self.version, self.header_type, self.length,
self.xid)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
(self.version, self.header_type, self.length, self.xid) = struct.unpack_from("!BBHL", binaryString, 0)
return binaryString[8:]
def __len__ (self):
return self.length
def __eq__ (self, other):
if type(self) != type(other): return False
if self.version != other.version: return False
if self.header_type != other.header_type: return False
if self.length != other.length: return False
if self.xid != other.xid: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'version: ' + str(self.version) + '\n'
outstr += prefix + 'type: ' + str(self.header_type)# + '\n'
outstr += " (" + ofp_type_map.get(self.header_type, "Unknown") + ")\n"
outstr += prefix + 'length: ' + str(self.length) + '\n'
outstr += prefix + 'xid: ' + str(self.xid) + '\n'
return outstr
def __str__ (self):
return self.__class__.__name__ + "\n " + self.show(' ').strip()
#2. Common Structures
##2.1 Port Structures
class ofp_phy_port (object):
def __init__ (self, **kw):
self.port_no = 0
self.hw_addr = EMPTY_ETH
self.name = ""
self.config = 0
self.state = 0
self.curr = 0
self.advertised = 0
self.supported = 0
self.peer = 0
initHelper(self, kw)
def _assert (self):
if not isinstance(self.hw_addr, bytes) and not isinstance(self.hw_addr, EthAddr):
return (False, "hw_addr is not bytes or EthAddr")
if(len(self.hw_addr) != 6):
return (False, "hw_addr is not of size 6")
if(not isinstance(self.name, str)):
return (False, "name is not string")
if(len(self.name) > 16):
return (False, "name is not of size 16")
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += struct.pack("!H", self.port_no)
packed += self.hw_addr if isinstance(self.hw_addr, bytes) else self.hw_addr.toRaw()
packed += self.name.ljust(16,'\0')
packed += struct.pack("!LLLLLL", self.config, self.state, self.curr, self.advertised, self.supported, self.peer)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 48):
return binaryString
(self.port_no,) = struct.unpack_from("!H", binaryString, 0)
self.hw_addr = EthAddr(binaryString[2:8])
self.name = binaryString[8:24].replace("\0","")
(self.config, self.state, self.curr, self.advertised, self.supported, self.peer) = struct.unpack_from("!LLLLLL", binaryString, 24)
return binaryString[48:]
def __len__ (self):
return 48
def __eq__ (self, other):
if type(self) != type(other): return False
if self.port_no != other.port_no: return False
if self.hw_addr != other.hw_addr: return False
if self.name != other.name: return False
if self.config != other.config: return False
if self.state != other.state: return False
if self.curr != other.curr: return False
if self.advertised != other.advertised: return False
if self.supported != other.supported: return False
if self.peer != other.peer: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def __hash__(self, *args, **kwargs):
return self.port_no.__hash__() + self.hw_addr.toInt().__hash__() + \
self.name.__hash__() + self.config.__hash__() + \
self.state.__hash__() + self.curr.__hash__() + \
self.advertised.__hash__() + self.supported.__hash__() + \
self.peer.__hash__()
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'port_no: ' + str(self.port_no) + '\n'
outstr += prefix + 'hw_addr: ' + str(EthAddr(self.hw_addr)) + '\n'
outstr += prefix + 'name: ' + str(self.name) + '\n'
outstr += prefix + 'config: ' + str(self.config) + '\n'
outstr += prefix + 'state: ' + str(self.state) + '\n'
outstr += prefix + 'curr: ' + str(self.curr) + '\n'
outstr += prefix + 'advertised: ' + str(self.advertised) + '\n'
outstr += prefix + 'supported: ' + str(self.supported) + '\n'
outstr += prefix + 'peer: ' + str(self.peer) + '\n'
return outstr
def __repr__(self):
return self.show()
ofp_port_config_rev_map = {
'OFPPC_PORT_DOWN' : 1,
'OFPPC_NO_STP' : 2,
'OFPPC_NO_RECV' : 4,
'OFPPC_NO_RECV_STP' : 8,
'OFPPC_NO_FLOOD' : 16,
'OFPPC_NO_FWD' : 32,
'OFPPC_NO_PACKET_IN' : 64,
}
ofp_port_state_rev_map = {
'OFPPS_STP_LISTEN' : 0,
'OFPPS_LINK_DOWN' : 1,
'OFPPS_STP_LEARN' : 256,
'OFPPS_STP_FORWARD' : 512,
'OFPPS_STP_BLOCK' : 768,
}
OFPPS_STP_MASK = 768
ofp_port_features_rev_map = {
'OFPPF_10MB_HD' : 1,
'OFPPF_10MB_FD' : 2,
'OFPPF_100MB_HD' : 4,
'OFPPF_100MB_FD' : 8,
'OFPPF_1GB_HD' : 16,
'OFPPF_1GB_FD' : 32,
'OFPPF_10GB_FD' : 64,
'OFPPF_COPPER' : 128,
'OFPPF_FIBER' : 256,
'OFPPF_AUTONEG' : 512,
'OFPPF_PAUSE' : 1024,
'OFPPF_PAUSE_ASYM' : 2048,
}
##2.2 Queue Structures
class ofp_packet_queue (object):
def __init__ (self, **kw):
self.queue_id = 0
self.length = 0
self.properties = []
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += struct.pack("!LH", self.queue_id, self.length)
packed += _PAD2 # Pad
for i in self.properties:
packed += i.pack(assertstruct)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
(self.queue_id, self.length) = struct.unpack_from("!LH", binaryString, 0)
return binaryString[8:]
def __len__ (self):
l = 8
for i in self.properties:
l += len(i)
return l
def __eq__ (self, other):
if type(self) != type(other): return False
if self.queue_id != other.queue_id: return False
if self.length != other.length: return False
if self.properties != other.properties: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'queue_id: ' + str(self.queue_id) + '\n'
outstr += prefix + 'len: ' + str(self.length) + '\n'
outstr += prefix + 'properties: \n'
for obj in self.properties:
outstr += obj.show(prefix + ' ')
return outstr
ofp_queue_properties_rev_map = {
'OFPQT_MIN_RATE' : 0,
}
OFPQT_NONE = 0
class ofp_queue_prop_header (object):
def __init__ (self, **kw):
self.property = 0
self.length = 8
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += struct.pack("!HH", self.property, self.length)
packed += _PAD4 # Pad
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
(self.property, self.length) = struct.unpack_from("!HH", binaryString, 0)
return binaryString[8:]
def __len__ (self):
return 8
def __eq__ (self, other):
if type(self) != type(other): return False
if self.property != other.property: return False
if self.length != other.length: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'property: ' + str(self.property) + '\n'
outstr += prefix + 'len: ' + str(self.length) + '\n'
return outstr
class ofp_queue_prop_min_rate (object):
def __init__ (self, **kw):
self.prop_header = ofp_queue_prop_header()
self.rate = 0
initHelper(self, kw)
def _assert (self):
if(not isinstance(self.prop_header, ofp_queue_prop_header)):
return (False, "prop_header is not class ofp_queue_prop_header")
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += self.prop_header.pack()
packed += struct.pack("!H", self.rate)
packed += _PAD6
return packed
def unpack (self, binaryString):
if (len(binaryString) < 16):
return binaryString
self.prop_header.unpack(binaryString[0:])
(self.rate,) = struct.unpack_from("!H", binaryString, 8)
return binaryString[16:]
def __len__ (self):
return 16
def __eq__ (self, other):
if type(self) != type(other): return False
if self.prop_header != other.prop_header: return False
if self.rate != other.rate: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'prop_header: \n'
self.prop_header.show(prefix + ' ')
outstr += prefix + 'rate: ' + str(self.rate) + '\n'
return outstr
##2.3 Flow Match Structures
class ofp_match (object):
@classmethod
def from_packet (cls, packet, in_port = None):
""" get a match that matches this packet, asuming it came in on in_port in_port
@param packet an instance of 'ethernet'
"""
assert_type("packet", packet, ethernet, none_ok=False)
match = cls()
if in_port is not None:
match.in_port = in_port
match.dl_src = packet.src
match.dl_dst = packet.dst
match.dl_type = packet.type
p = packet.next
# if isinstance(p, mpls):
# match.mpls_label = p.label
# match.mpls_tc = p.tc
# else:
# match.mpls_label = 0
# match.mpls_tc = 0
if isinstance(p, vlan):
match.dl_vlan = p.id
match.dl_vlan_pcp = p.pcp
p = p.next
else:
match.dl_vlan = OFP_VLAN_NONE
match.dl_vlan_pcp = 0
if isinstance(p, ipv4):
match.nw_src = p.srcip
match.nw_dst = p.dstip
match.nw_proto = p.protocol
match.nw_tos = p.tos
p = p.next
if isinstance(p, udp) or isinstance(p, tcp):
match.tp_src = p.srcport
match.tp_dst = p.dstport
elif isinstance(p, icmp):
match.tp_src = p.type
match.tp_dst = p.code
elif isinstance(p, arp):
if p.opcode <= 255:
match.nw_proto = p.opcode
match.nw_src = p.protosrc
match.nw_dst = p.protodst
return match
def optimize (self):
"""
Reduce the number of wildcards used.
"""
#TODO: Fix for optional cases (i.e. ARP)
if self.dl_vlan == OFP_VLAN_NONE:
self.dl_vlan_pcp = 0
#TODO: What do we do when something is "behind" a wildcard?
# e.g., does nw_src count if dl_type is wild or only if it's 0x0800?
if self.dl_type is not None:
if self.dl_type != 0x0800:
# Not IP
if self.dl_type != 0x0806:
# Not IP or ARP
self.nw_src = IPAddr(0)
self.nw_dst = IPAddr(0)
eelf.nw_proto = 0
self.nw_tos = 0
self.tp_src = 0
self.tp_dst = 0
else:
# It's IP
if self.nw_proto != 6 and self.nw_proto != 17 and self.nw_proto != 1:
# Not TCP, UDP, or ICMP
self.tp_src = 0
self.tp_dst = 0
self.wildcards = self._normalize_wildcards(self.wildcards)
return self # for chaining
def clone (self):
n = ofp_match()
for k,v in ofp_match_data.iteritems():
setattr(n, '_' + k, getattr(self, '_' + k))
n.wildcards = self.wildcards
return n
def __init__ (self, **kw):
for k,v in ofp_match_data.iteritems():
setattr(self, '_' + k, v[0])
self.wildcards = self._normalize_wildcards(OFPFW_ALL)
# This is basically initHelper(), but tweaked slightly since this
# class does some magic of its own.
for k,v in kw.iteritems():
if not hasattr(self, '_'+k) and not hasattr(self, k):
raise TypeError(self.__class__.__name__ + " constructor got "
+ "unexpected keyword argument '" + k + "'")
setattr(self, k, v)
def get_nw_dst (self):
if (self.wildcards & OFPFW_NW_DST_ALL) == OFPFW_NW_DST_ALL: return (None, 0)
w = (self.wildcards & OFPFW_NW_DST_MASK) >> OFPFW_NW_DST_SHIFT
return (self._nw_dst,32-w if w <= 32 else 0)
def get_nw_src (self):
if (self.wildcards & OFPFW_NW_SRC_ALL) == OFPFW_NW_SRC_ALL: return (None, 0)
w = (self.wildcards & OFPFW_NW_SRC_MASK) >> OFPFW_NW_SRC_SHIFT
return (self._nw_src,32-w if w <= 32 else 0)
def set_nw_dst (self, *args, **kw):
a = self._make_addr(*args, **kw)
if a == None:
self._nw_src = ofp_match_data['nw_dst'][0]
self.wildcards &= ~OFPFW_NW_DST_MASK
self.wildcards |= ofp_match_data['nw_dst'][1]
return
self._nw_dst = a[0]
self.wildcards &= ~OFPFW_NW_DST_MASK
self.wildcards |= ((32-a[1]) << OFPFW_NW_DST_SHIFT)
def set_nw_src (self, *args, **kw):
a = self._make_addr(*args, **kw)
# self.internal_links.add(Link(edge, edge.ports[port_no], host, host.interfaces[0]))
if a == None:
self._nw_src = ofp_match_data['nw_src'][0]
self.wildcards &= ~OFPFW_NW_SRC_MASK
self.wildcards |= ofp_match_data['nw_src'][1]
return
self._nw_src = a[0]
self.wildcards &= ~OFPFW_NW_SRC_MASK
self.wildcards |= ((32-a[1]) << OFPFW_NW_SRC_SHIFT)
def _make_addr (self, ipOrIPAndBits, bits=None):
if ipOrIPAndBits == None: return None
b = None
if type(ipOrIPAndBits) is tuple:
ip = ipOrIPAndBits[0]
b = int(ipOrIPAndBits[1])
if (type(ipOrIPAndBits) is str) and (len(ipOrIPAndBits) != 4):
if ipOrIPAndBits.find('/') != -1:
s = ipOrIPAndBits.split('/')
ip = s[0]
b = int(s[1]) if b is None else b
else:
ip = ipOrIPAndBits
b = 32 if b is None else b
else:
ip = ipOrIPAndBits
b = 32 if b is None else b
if type(ip) is str:
ip = IPAddr(ip)
if bits != None: b = bits
if b > 32: b = 32
elif b < 0: b = 0
return (ip, b)
def __setattr__ (self, name, value):
if name not in ofp_match_data:
self.__dict__[name] = value
return
if name == 'nw_dst' or name == 'nw_src':
# Special handling
getattr(self, 'set_' + name)(value)
return value
if value is None:
setattr(self, '_' + name, ofp_match_data[name][0])
self.wildcards |= ofp_match_data[name][1]
else:
setattr(self, '_' + name, value)
self.wildcards = self.wildcards & ~ofp_match_data[name][1]
return value
def __getattr__ (self, name):
if name in ofp_match_data:
if (self.wildcards & ofp_match_data[name][1]) == ofp_match_data[name][1]:
# It's wildcarded -- always return None
return None
if name == 'nw_dst' or name == 'nw_src':
# Special handling
return getattr(self, 'get_' + name)()[0]
return self.__dict__['_' + name]
raise AttributeError("attribute not found: "+name)
def _assert (self):
#if not isinstance(self._dl_src, list):
# return "self.dl_src is not list"
#if len(self._dl_src) != 6:
# return "self.dl_src is not of size 6"
#if not isinstance(self._dl_dst, list):
# return "self.dl_dst is not list"
if len(self._dl_dst) != 6:
return "self.dl_dst is not of size 6"
return None
def pack (self, assertstruct=True, flow_mod=False):
if(assertstruct):
if self._assert() is not None:
raise RuntimeError(self._assert())
packed = ""
packed += struct.pack("!LH", self._wire_wildcards(self.wildcards) if flow_mod else self.wildcards, self.in_port or 0)
if self.dl_src == None:
packed += EMPTY_ETH.toRaw()
elif type(self.dl_src) is bytes:
packed += self.dl_src
else:
packed += self.dl_src.toRaw()
if self.dl_dst == None:
packed += EMPTY_ETH.toRaw()
elif type(self.dl_dst) is bytes:
packed += self.dl_dst
else:
packed += self.dl_dst.toRaw()
def check_ip(val):
return (val or 0) if self.dl_type == 0x0800 else 0
def check_ip_or_arp(val):
return (val or 0) if self.dl_type == 0x0800 or self.dl_type == 0x0806 else 0
def check_tp(val):
return (val or 0) if self.dl_type == 0x0800 and self.nw_proto in (1,6,17) else 0
packed += struct.pack("!HB", self.dl_vlan or 0, self.dl_vlan_pcp or 0)
packed += _PAD # Hardcode padding
packed += struct.pack("!HBB", self.dl_type or 0, check_ip(self.nw_tos), check_ip_or_arp(self.nw_proto))
packed += _PAD2 # Hardcode padding
def fix (addr):
if addr is None: return 0
if type(addr) is int: return addr & 0xffFFffFF
if type(addr) is long: return addr & 0xffFFffFF
return addr.toUnsigned()
packed += struct.pack("!LLHH", check_ip_or_arp(fix(self.nw_src)), check_ip_or_arp(fix(self.nw_dst)),
check_tp(self.tp_src), check_tp(self.tp_dst))
# if USE_MPLS_MATCH:
# packed += struct.pack("!IBxxx", self.mpls_label or 0, self.mpls_tc or 0)
return packed
def _normalize_wildcards (self, wildcards):
""" nw_src and nw_dst values greater than 32 mean the same thing as 32.
We normalize them here just to be clean and so that comparisons act
as you'd want them to. """
if ((wildcards & OFPFW_NW_SRC_MASK) >> OFPFW_NW_SRC_SHIFT) > 32:
wildcards &= ~OFPFW_NW_SRC_MASK
wildcards |= (32 << OFPFW_NW_SRC_SHIFT)
if ((wildcards & OFPFW_NW_DST_MASK) >> OFPFW_NW_DST_SHIFT) > 32:
wildcards &= ~OFPFW_NW_DST_MASK
wildcards |= (32 << OFPFW_NW_DST_SHIFT)
return wildcards
def _wire_wildcards(self, wildcards):
""" Normallize the wildcard bits to the openflow wire representation. Note this
atrocity from the OF1.1 spec:
Protocol-specific fields within ofp_match will be ignored within
a single table when the corresponding protocol is not specified in the
match. The MPLS match fields will be ignored unless the Ethertype is
specified as MPLS. Likewise, the IP header and transport header fields
will be ignored unless the Ethertype is specified as either IPv4 or
ARP. The tp_src and tp_dst fields will be ignored unless the network
protocol specified is as TCP, UDP or SCTP. Fields that are ignored
don't need to be wildcarded and should be set to 0.
"""
if self.dl_type == 0x0800:
# IP
if self.nw_proto not in (1,6,17):
# not TCP/UDP/ICMP -> Clear TP wildcards for the wire
return wildcards & ~(OFPFW_TP_SRC | OFPFW_TP_DST)
else:
return wildcards
elif self.dl_type == 0x0806:
# ARP: clear NW_TOS / TP wildcards for the wire
return wildcards & ~( OFPFW_NW_TOS | OFPFW_TP_SRC | OFPFW_TP_DST)
else:
# not even IP. Clear NW/TP wildcards for the wire
return wildcards & ~( OFPFW_NW_TOS | OFPFW_NW_PROTO | OFPFW_NW_SRC_MASK | OFPFW_NW_DST_MASK | OFPFW_TP_SRC | OFPFW_TP_DST)
def _unwire_wildcards(self, wildcards):
""" Normallize the wildcard bits from the openflow wire representation. Note this
atrocity from the OF1.1 spec:
Protocol-specific fields within ofp_match will be ignored within
a single table when the corresponding protocol is not specified in the
match. The MPLS match fields will be ignored unless the Ethertype is
specified as MPLS. Likewise, the IP header and transport header fields
will be ignored unless the Ethertype is specified as either IPv4 or
ARP. The tp_src and tp_dst fields will be ignored unless the network
protocol specified is as TCP, UDP or SCTP. Fields that are ignored
don't need to be wildcarded and should be set to 0.
"""
if self._dl_type == 0x0800:
# IP
if self._nw_proto not in (1,6,17):
# not TCP/UDP/ICMP -> Set TP wildcards for the object
return wildcards | (OFPFW_TP_SRC | OFPFW_TP_DST)
else:
return wildcards
elif self._dl_type == 0x0806:
# ARP: Set NW_TOS / TP wildcards for the object
return wildcards | ( OFPFW_NW_TOS | OFPFW_TP_SRC | OFPFW_TP_DST)
else:
# not even IP. Set NW/TP wildcards for the object
return wildcards | ( OFPFW_NW_TOS | OFPFW_NW_PROTO | OFPFW_NW_SRC_MASK | OFPFW_NW_DST_MASK | OFPFW_TP_SRC | OFPFW_TP_DST)
@property
def is_wildcarded (self):
return self.wildcards & OFPFW_ALL != 0
@property
def is_exact (self):
return not self.is_wildcarded
def unpack (self, binaryString, flow_mod=False):
if (len(binaryString) < self.__len__()):
return binaryString
(wildcards, self._in_port) = struct.unpack_from("!LH", binaryString, 0)
self._dl_src = EthAddr(struct.unpack_from("!BBBBBB", binaryString, 6))
self._dl_dst = EthAddr(struct.unpack_from("!BBBBBB", binaryString, 12))
(self._dl_vlan, self._dl_vlan_pcp) = struct.unpack_from("!HB", binaryString, 18)
(self._dl_type, self._nw_tos, self._nw_proto) = struct.unpack_from("!HBB", binaryString, 22)
(self._nw_src, self._nw_dst, self._tp_src, self._tp_dst) = struct.unpack_from("!LLHH", binaryString, 28)
self._nw_src = IPAddr(self._nw_src)
self._nw_dst = IPAddr(self._nw_dst)
# if USE_MPLS_MATCH:
# (self.mpls_label, self.mpls_tc) = struct.unpack_from("!IBxxx", binaryString, 40)
self.wildcards = self._normalize_wildcards(self._unwire_wildcards(wildcards) if flow_mod else wildcards) # Overide
return binaryString[self.__len__():]
def __len__ (self):
# if USE_MPLS_MATCH:
# return 48
return 40
def hash_code (self):
'''
ofp_match is not properly hashable since it is mutable, but it can still be
useful to easily generate a hash code.
'''
h = self.wildcards
for f in ofp_match_data:
v = getattr(self, f)
if type(v) is int:
h ^= v
elif type(v) is long:
h ^= v
return int(h & 0x7fFFffFF)
def matches_with_wildcards (self, other, consider_other_wildcards=True):
"""
Test whether /this/ match completely encompasses the other match. Important for non-strict modify flow_mods etc.
"""
assert_type("other", other, ofp_match, none_ok=False)
# short cut for equal matches
if(self == other): return True
# only candidate if all wildcard bits in the *other* match are also set in this match (i.e., a submatch)
# first compare the bitmask part
if(consider_other_wildcards):
self_bits = self.wildcards & ~(OFPFW_NW_SRC_MASK | OFPFW_NW_DST_MASK)
other_bits = other.wildcards & ~(OFPFW_NW_SRC_MASK | OFPFW_NW_DST_MASK)
if( self_bits | other_bits != self_bits): return False
def match_fail(mine, others):
return mine != None and mine != others
if match_fail(self.in_port, other.in_port): return False
if match_fail(self.dl_vlan, other.dl_vlan): return False
if match_fail(self.dl_src, other.dl_src): return False
if match_fail(self.dl_dst, other.dl_dst): return False
if match_fail(self.dl_type, other.dl_type): return False
if match_fail(self.nw_proto, other.nw_proto): return False
if match_fail(self.tp_src, other.tp_src): return False
if match_fail(self.tp_dst, other.tp_dst): return False
if match_fail(self.dl_vlan_pcp, other.dl_vlan_pcp): return False
if match_fail(self.nw_tos, other.nw_tos): return False
self_nw_src = self.get_nw_src()
if(self_nw_src[0] != None):
other_nw_src = other.get_nw_src()
if self_nw_src[1] > other_nw_src[1] or not IPAddr(other_nw_src[0]).inNetwork((self_nw_src[0], 32-self_nw_src[1])): return False
self_nw_dst = self.get_nw_dst()
if(self_nw_dst[0] != None):
other_nw_dst = other.get_nw_dst()
if self_nw_dst[1] > other_nw_dst[1] or not IPAddr(other_nw_dst[0]).inNetwork((self_nw_dst[0], 32-self_nw_dst[1])): return False
return True
def __eq__ (self, other):
if type(self) != type(other): return False
if self.wildcards != other.wildcards: return False
if self.in_port != other.in_port: return False
if self.dl_src != other.dl_src: return False
if self.dl_dst != other.dl_dst: return False
if self.dl_vlan != other.dl_vlan: return False
if self.dl_vlan_pcp != other.dl_vlan_pcp: return False
if self.dl_type != other.dl_type: return False
if self.nw_tos != other.nw_tos: return False
if self.nw_proto != other.nw_proto: return False
if self.nw_src != other.nw_src: return False
if self.nw_dst != other.nw_dst: return False
if self.tp_src != other.tp_src: return False
if self.tp_dst != other.tp_dst: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def __str__ (self):
return self.__class__.__name__ + "\n " + self.show(' ').strip()
def show (self, prefix=''):
def binstr (n):
s = ''
while True:
s = ('1' if n & 1 else '0') + s
n >>= 1
if n == 0: break
return s
def safehex(n):
if n == None:
return "(None)"
else:
return hex(n)
def show_wildcards(w):
parts = [ k.lower()[len("OFPFW_"):] for (k,v) in ofp_flow_wildcards_rev_map.iteritems() if v & w == v ]
nw_src_bits = (w & OFPFW_NW_SRC_MASK) >> OFPFW_NW_SRC_SHIFT
if(nw_src_bits > 0): parts.append("nw_src(/%d)" % (32 - nw_src_bits))
nw_dst_bits = (w & OFPFW_NW_DST_MASK) >> OFPFW_NW_DST_SHIFT
if(nw_dst_bits > 0): parts.append("nw_dst(/%d)" % (32 - nw_dst_bits))
return "|".join(parts)
outstr = ''
outstr += prefix + 'wildcards: ' + show_wildcards(self.wildcards) + ' (' + binstr(self.wildcards) + ' = ' + hex(self.wildcards) + ')\n'
def append (f, formatter=str):
v = self.__getattr__(f)
if v is None: return ''
return prefix + f + ": " + formatter(v) + "\n"
outstr += append('in_port')
outstr += append('dl_src')
outstr += append('dl_dst')
outstr += append('dl_vlan')
outstr += append('dl_vlan_pcp')
outstr += append('dl_type', safehex)
outstr += append('nw_tos')
outstr += append('nw_proto')
outstr += append('nw_src')
outstr += append('nw_dst')
outstr += append('tp_src')
outstr += append('tp_dst')
# outstr += append('mpls_label')
# outstr += append('mpls_tc')
return outstr
ofp_flow_wildcards_rev_map = {
'OFPFW_IN_PORT' : 1,
'OFPFW_DL_VLAN' : 2,
'OFPFW_DL_SRC' : 4,
'OFPFW_DL_DST' : 8,
'OFPFW_DL_TYPE' : 16,
'OFPFW_NW_PROTO' : 32,
'OFPFW_TP_SRC' : 64,
'OFPFW_TP_DST' : 128,
# 'OFPFW_MPLS_LABEL' : 1 << 21,
# 'OFPFW_MPLS_TC' : 1 << 22,
'OFPFW_DL_VLAN_PCP' : 1048576,
'OFPFW_NW_TOS' : 1<<21,
}
OFPFW_NW_DST_BITS = 6
OFPFW_NW_SRC_BITS = 6
OFPFW_NW_SRC_SHIFT = 8
OFPFW_NW_DST_SHIFT = 14
OFPFW_NW_SRC_ALL = 8192
OFPFW_NW_SRC_MASK = 16128
OFPFW_NW_DST_ALL = 524288
OFPFW_NW_DST_MASK = 1032192
# Note: Need to handle all flags that are set in this
# glob-all masks in the packet handling methods. (Esp. ofp_match.from_packet)
# Otherwise, packets are not being matched as they should
OFPFW_ALL = ((1 << 22) - 1)
##2.4 Flow Action Structures
ofp_action_type_rev_map = {
'OFPAT_OUTPUT' : 0,
'OFPAT_SET_VLAN_VID' : 1,
'OFPAT_SET_VLAN_PCP' : 2,
'OFPAT_STRIP_VLAN' : 3,
'OFPAT_SET_DL_SRC' : 4,
'OFPAT_SET_DL_DST' : 5,
'OFPAT_SET_NW_SRC' : 6,
'OFPAT_SET_NW_DST' : 7,
'OFPAT_SET_NW_TOS' : 8,
'OFPAT_SET_TP_SRC' : 9,
'OFPAT_SET_TP_DST' : 10,
'OFPAT_ENQUEUE' : 11,
'OFPAT_SET_MPLS_LABEL':13,
'OFPAT_SET_MPLS_TC' : 14,
'OFPAT_SET_MPLS_TTL' : 15,
'OFPAT_DEC_MPLS_TTL' : 16,
'OFPAT_PUSH_MPLS' : 19,
'OFPAT_POP_MPLS' : 20,
'OFPAT_RESUBMIT' : 21,
'OFPAT_VENDOR' : 65535,
}
class ofp_action_header (object):
def __init__ (self, **kw):
self.type = None # Purposely bad
self.length = 8
self.data = b''
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += struct.pack("!HH", self.type, self.length)
packed += _PAD4 # Pad
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
(self.type, self.length) = struct.unpack_from("!HH", binaryString, 0)
if len(binaryString) < self.length: return binaryString
self.data = binaryString[8:8+self.length]
return binaryString[self.length:]
def __len__ (self):
return self.length
def __eq__ (self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
if self.length != other.length: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'type: ' + str(self.type) + '\n'
outstr += prefix + 'len: ' + str(self.length) + '\n'
return outstr
class ofp_action_output (object):
def __init__ (self, **kw):
self.type = OFPAT_OUTPUT
self.length = 8
self.port = None # Purposely bad -- require specification
self.max_len = 0xffFF
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if self.port != OFPP_CONTROLLER:
self.max_len = 0
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += struct.pack("!HHHH", self.type, self.length, self.port, self.max_len)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
(self.type, self.length, self.port, self.max_len) = struct.unpack_from("!HHHH", binaryString, 0)
return binaryString[8:]
def __len__ (self):
return 8
def __eq__ (self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
if self.length != other.length: return False
if self.port != other.port: return False
if self.max_len != other.max_len: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'type: ' + str(self.type) + '\n'
outstr += prefix + 'len: ' + str(self.length) + '\n'
outstr += prefix + 'port: ' + str(self.port) + '\n'
outstr += prefix + 'max_len: ' + str(self.max_len) + '\n'
return outstr
def __repr__(self):
return "ofp_action_output(port=%s)" % str(self.port)
class ofp_action_enqueue (object):
def __init__ (self, **kw):
self.type = OFPAT_ENQUEUE
self.length = 16
self.port = 0
self.queue_id = 0
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += struct.pack("!HHH", self.type, self.length, self.port)
packed += _PAD6 # Pad
packed += struct.pack("!L", self.queue_id)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 16):
return binaryString
(self.type, self.length, self.port) = struct.unpack_from("!HHH", binaryString, 0)
(self.queue_id,) = struct.unpack_from("!L", binaryString, 12)
return binaryString[16:]
def __len__ (self):
return 16
def __eq__ (self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
if self.length != other.length: return False
if self.port != other.port: return False
if self.queue_id != other.queue_id: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'type: ' + str(self.type) + '\n'
outstr += prefix + 'len: ' + str(self.length) + '\n'
outstr += prefix + 'port: ' + str(self.port) + '\n'
outstr += prefix + 'queue_id: ' + str(self.queue_id) + '\n'
return outstr
class ofp_action_push_mpls (object):
""" For now a push mpls action, but we can use this for
push vlan too some day"""
unicast_mpls_ethertype = 0x8847
multicast_mpls_ethertype = 0x8848
def __init__ (self, **kw):
self.type = OFPAT_PUSH_MPLS
self.length = 8
self.ethertype = ofp_action_push_mpls.unicast_mpls_ethertype
initHelper(self, kw)
def _assert(self):
return ((self.ethertype == ofp_action_push_mpls.unicast_mpls_ethertype or
self.ethertype == ofp_action_push_mpls.multicast_mpls_ethertype),
None)
def pack (self, assertstruct = True):
if (assertstruct):
if not (self._assert()[0]):
return None
packed = ""
packed += struct.pack("!HHHxx", self.type, self.length, self.ethertype)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
(self.type, self.length, self.ethertype) = struct.unpack_from("!HHH", binaryString, 0)
return binaryString[8:]
def __len__ (self):
return self.length
def __eq__ (self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
if self.length != other.length: return False
if self.ethertype != other.ethertype: return False
return True
def __ne__ (self, other):
return not self.__eq__(other)
def show (self, prefix = ''):
outstr = ''
outstr += prefix + 'type: ' + str(self.type) + '\n'
outstr += prefix + 'len: ' + str(self.length) + '\n'
outstr += prefix + 'ethertype: ' + str(self.ethertype) + '\n'
return outstr
class ofp_action_mpls_label (object):
def __init__ (self, **kw):
self.type = OFPAT_SET_MPLS_LABEL
self.length = 8
self.mpls_label = 0
initHelper(self, kw)
def _assert(self):
return (True, None)
def pack (self, assertstruct = True):
if (assertstruct):
if not (self._assert()[0]):
return None
packed = ""
packed += struct.pack("!HHI", self.type, self.length, self.mpls_label)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
(self.type, self.length, self.mpls_label) = struct.unpack_from("!HHI", binaryString, 0)
return binaryString[8:]
def __len__ (self):
return self.length
def __eq__ (self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
if self.length != other.length: return False
if self.mpls_label != other.mpls_label: return False
return True
def __ne__ (self, other):
return not self.__eq__(other)
def show (self, prefix = ''):
outstr = ''
outstr += prefix + 'type: ' + str(self.type) + '\n'
outstr += prefix + 'len: ' + str(self.length) + '\n'
outstr += prefix + 'label: ' + str(self.mpls_label) + '\n'
return outstr
class ofp_action_mpls_tc (object):
def __init__ (self, **kw):
self.type = OFPAT_SET_MPLS_TC
self.length = 8
self.mpls_tc = 0
initHelper(self, kw)
def _assert(self):
return (True, None)
def pack (self, assertstruct = True):
if (assertstruct):
if not (self._assert()[0]):
return None
packed = ""
packed += struct.pack("!HHBxxx", self.type, self.length, self.mpls_tc)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
(self.type, self.length, self.mpls_tc) = struct.unpack_from("!HHB", binaryString, 0)
return binaryString[8:]
def __len__ (self):
return self.length
def __eq__ (self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
if self.length != other.length: return False
if self.mpls_tc != other.mpls_tc: return False
return True
def __ne__ (self, other):
return not self.__eq__(other)
def show (self, prefix = ''):
outstr = ''
outstr += prefix + 'type: ' + str(self.type) + '\n'
outstr += prefix + 'len: ' + str(self.length) + '\n'
outstr += prefix + 'tc: ' + str(self.mpls_tc) + '\n'
return outstr
class ofp_action_mpls_ttl (object):
def __init__ (self, **kw):
self.type = OFPAT_SET_MPLS_TTL
self.length = 8
self.mpls_ttl = 0
initHelper(self, kw)
def _assert(self):
return (True, None)
def pack (self, assertstruct = True):
if (assertstruct):
if not (self._assert()[0]):
return None
packed = ""
packed += struct.pack("!HHBxxx", self.type, self.length, self.mpls_ttl)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
(self.type, self.length, self.mpls_ttl) = struct.unpack_from("!HHB", binaryString, 0)
return binaryString[8:]
def __len__ (self):
return self.length
def __eq__ (self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
if self.length != other.length: return False
if self.mpls_ttl != other.mpls_ttl: return False
return True
def __ne__ (self, other):
return not self.__eq__(other)
def show (self, prefix = ''):
outstr = ''
outstr += prefix + 'type: ' + str(self.type) + '\n'
outstr += prefix + 'len: ' + str(self.length) + '\n'
outstr += prefix + 'ttl: ' + str(self.mpls_ttl) + '\n'
return outstr
class ofp_action_mpls_dec_ttl (ofp_action_header):
def __init__ (self, **kw):
super(ofp_action_mpls_dec_ttl, self).__init__(**kw)
self.type = OFPAT_DEC_MPLS_TTL
class ofp_action_resubmit (ofp_action_header):
def __init__ (self, **kw):
super(ofp_action_resubmit, self).__init__(**kw)
self.type = OFPAT_RESUBMIT
class ofp_action_pop_mpls (object):
def __init__ (self, **kw):
self.type = OFPAT_POP_MPLS
self.length = 8
self.ethertype = 0
initHelper(self, kw)
def _assert(self):
return (True, None)
def pack (self, assertstruct = True):
if (assertstruct):
if not (self._assert()[0]):
return None
packed = ""
packed += struct.pack("!HHHxx", self.type, self.length, self.ethertype)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
(self.type, self.length, self.ethertype) = struct.unpack_from("!HHH", binaryString, 0)
return binaryString[8:]
def __len__ (self):
return self.length
def __eq__ (self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
if self.length != other.length: return False
if self.ethertype != other.ethertype: return False
return True
def __ne__ (self, other):
return not self.__eq__(other)
def show (self, prefix = ''):
outstr = ''
outstr += prefix + 'type: ' + str(self.type) + '\n'
outstr += prefix + 'len: ' + str(self.length) + '\n'
outstr += prefix + 'ethertype: ' + str(self.ethertype) + '\n'
return outstr
class ofp_action_vlan_vid (object):
def __init__ (self, **kw):
self.type = OFPAT_SET_VLAN_VID
self.length = 8
self.vlan_vid = 0
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += struct.pack("!HHH", self.type, self.length, self.vlan_vid)
packed += _PAD2 # Pad
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
(self.type, self.length, self.vlan_vid) = struct.unpack_from("!HHH", binaryString, 0)
return binaryString[8:]
def __len__ (self):
return 8
def __eq__ (self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
if self.length != other.length: return False
if self.vlan_vid != other.vlan_vid: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'type: ' + str(self.type) + '\n'
outstr += prefix + 'len: ' + str(self.length) + '\n'
outstr += prefix + 'vlan_vid: ' + str(self.vlan_vid) + '\n'
return outstr
class ofp_action_vlan_pcp (object):
def __init__ (self, **kw):
self.type = OFPAT_SET_VLAN_PCP
self.length = 8
self.vlan_pcp = 0
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += struct.pack("!HHB", self.type, self.length, self.vlan_pcp)
packed += _PAD3 # Pad
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
(self.type, self.length, self.vlan_pcp) = struct.unpack_from("!HHB", binaryString, 0)
return binaryString[8:]
def __len__ (self):
return 8
def __eq__ (self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
if self.length != other.length: return False
if self.vlan_pcp != other.vlan_pcp: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'type: ' + str(self.type) + '\n'
outstr += prefix + 'len: ' + str(self.length) + '\n'
outstr += prefix + 'vlan_pcp: ' + str(self.vlan_pcp) + '\n'
return outstr
class ofp_action_dl_addr (object):
@classmethod
def set_dst (cls, dl_addr = None):
return cls(OFPAT_SET_DL_DST, dl_addr)
@classmethod
def set_src (cls, dl_addr = None):
return cls(OFPAT_SET_DL_SRC, dl_addr)
def __init__ (self, type = None, dl_addr = None):
"""
'type' should be OFPAT_SET_DL_SRC or OFPAT_SET_DL_DST.
"""
self.type = type
self.length = 16
self.dl_addr = EMPTY_ETH
if dl_addr is not None:
self.dl_addr = EthAddr(dl_addr)
def _assert (self):
if not isinstance(self.dl_addr, EthAddr) and not isinstance(self.dl_addr, bytes):
return (False, "dl_addr is not string or EthAddr")
if isinstance(self.dl_addr, bytes) and len(self.dl_addr) != 6:
return (False, "dl_addr is not of size 6")
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += struct.pack("!HH", self.type, self.length)
if isinstance(self.dl_addr, EthAddr):
packed += self.dl_addr.toRaw()
else:
packed += self.dl_addr
packed += _PAD6
return packed
def unpack (self, binaryString):
if (len(binaryString) < 16):
return binaryString
(self.type, self.length) = struct.unpack_from("!HH", binaryString, 0)
self.dl_addr = EthAddr(struct.unpack_from("!BBBBBB", binaryString, 4))
return binaryString[16:]
def __len__ (self):
return 16
def __eq__ (self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
if self.length != other.length: return False
if self.dl_addr != other.dl_addr: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'type: ' + str(self.type) + '\n'
outstr += prefix + 'len: ' + str(self.length) + '\n'
outstr += prefix + 'dl_addr: ' + str(self.dl_addr) + '\n'
return outstr
class ofp_action_nw_addr (object):
@classmethod
def set_dst (cls, nw_addr = None):
return cls(OFPAT_SET_NW_DST, nw_addr)
@classmethod
def set_src (cls, nw_addr = None):
return cls(OFPAT_SET_NW_SRC, nw_addr)
def __init__ (self, type = None, nw_addr = None):
"""
'type' should be OFPAT_SET_NW_SRC or OFPAT_SET_NW_DST
"""
self.type = type
self.length = 8
if nw_addr is not None:
self.nw_addr = IPAddr(nw_addr)
else:
self.nw_addr = IPAddr(0)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += struct.pack("!HHl", self.type, self.length, self.nw_addr.toSigned())
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
(self.type, self.length, self.nw_addr) = struct.unpack_from("!HHL", binaryString, 0)
self.nw_addr = IPAddr(self.nw_addr, networkOrder=False)
return binaryString[8:]
def __len__ (self):
return 8
def __eq__ (self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
if self.length != other.length: return False
if self.nw_addr != other.nw_addr: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'type: ' + str(self.type) + '\n'
outstr += prefix + 'len: ' + str(self.length) + '\n'
outstr += prefix + 'nw_addr: ' + str(self.nw_addr) + '\n'
return outstr
class ofp_action_nw_tos (object):
def __init__ (self, nw_tos = 0):
self.type = OFPAT_SET_NW_TOS
self.length = 8
self.nw_tos = nw_tos
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += struct.pack("!HHB", self.type, self.length, self.nw_tos)
packed += _PAD3
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
(self.type, self.length, self.nw_tos) = struct.unpack_from("!HHB", binaryString, 0)
return binaryString[8:]
def __len__ (self):
return 8
def __eq__ (self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
if self.length != other.length: return False
if self.nw_tos != other.nw_tos: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'type: ' + str(self.type) + '\n'
outstr += prefix + 'len: ' + str(self.length) + '\n'
outstr += prefix + 'nw_tos: ' + str(self.nw_tos) + '\n'
return outstr
class ofp_action_tp_port (object):
@classmethod
def set_dst (cls, tp_port = None):
return cls(OFPAT_SET_TP_DST, tp_port)
@classmethod
def set_src (cls, tp_port = None):
return cls(OFPAT_SET_TP_SRC, tp_port)
def __init__ (self, type=None, tp_port = 0):
"""
'type' is OFPAT_SET_TP_SRC/DST
"""
self.type = type
self.length = 8
self.tp_port = tp_port
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += struct.pack("!HHH", self.type, self.length, self.tp_port)
packed += _PAD2
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
(self.type, self.length, self.tp_port) = struct.unpack_from("!HHH", binaryString, 0)
return binaryString[8:]
def __len__ (self):
return 8
def __eq__ (self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
if self.length != other.length: return False
if self.tp_port != other.tp_port: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'type: ' + str(self.type) + '\n'
outstr += prefix + 'len: ' + str(self.length) + '\n'
outstr += prefix + 'tp_port: ' + str(self.tp_port) + '\n'
return outstr
class ofp_action_vendor_header (object):
def __init__ (self, **kw):
self.type = OFPAT_VENDOR
self.length = 8
self.vendor = 0
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += struct.pack("!HHL", self.type, self.length, self.vendor)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
(self.type, self.length, self.vendor) = struct.unpack_from("!HHL", binaryString, 0)
return binaryString[8:]
def __len__ (self):
return 8
def __eq__ (self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
if self.length != other.length: return False
if self.vendor != other.vendor: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'type: ' + str(self.type) + '\n'
outstr += prefix + 'len: ' + str(self.length) + '\n'
outstr += prefix + 'vendor: ' + str(self.vendor) + '\n'
return outstr
#3. Controller-to-Switch Messages
##3.1 Handshake
# was ofp_switch_features
class ofp_features_reply (ofp_header):
def __init__ (self, **kw):
ofp_header.__init__(self)
self.datapath_id = 0
self.n_buffers = 0
self.n_tables = 0
self.capabilities = 0
self.actions = 0
self.ports = []
initHelper(self, kw)
self.header_type = OFPT_FEATURES_REPLY
self.length = len(self)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += ofp_header.pack(self)
packed += struct.pack("!QLB", self.datapath_id, self.n_buffers, self.n_tables)
packed += _PAD3
packed += struct.pack("!LL", self.capabilities, self.actions)
for i in self.ports:
packed += i.pack(assertstruct)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 32):
return binaryString
ofp_header.unpack(self, binaryString[0:])
(self.datapath_id, self.n_buffers, self.n_tables) = struct.unpack_from("!QLB", binaryString, 8)
(self.capabilities, self.actions) = struct.unpack_from("!LL", binaryString, 24)
portCount = (self.length - 32) / OFP_PHY_PORT_BYTES
self.ports = []
for i in xrange(0, portCount):
p = ofp_phy_port()
p.unpack(binaryString[32+i*OFP_PHY_PORT_BYTES:])
self.ports.append(p)
return binaryString[self.length:]
def __len__ (self):
l = 32
for _ in self.ports:
l += OFP_PHY_PORT_BYTES
return l
def __eq__ (self, other):
if type(self) != type(other): return False
if not ofp_header.__eq__(self, other): return False
if self.datapath_id != other.datapath_id: return False
if self.n_buffers != other.n_buffers: return False
if self.n_tables != other.n_tables: return False
if self.capabilities != other.capabilities: return False
if self.actions != other.actions: return False
if self.ports != other.ports: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'header: \n'
outstr += ofp_header.show(self, prefix + ' ')
outstr += prefix + 'datapath_id: ' + str(self.datapath_id) + '\n'
outstr += prefix + 'n_buffers: ' + str(self.n_buffers) + '\n'
outstr += prefix + 'n_tables: ' + str(self.n_tables) + '\n'
outstr += prefix + 'capabilities: ' + str(self.capabilities) + '\n'
outstr += prefix + 'actions: ' + str(self.actions) + '\n'
outstr += prefix + 'ports: \n'
for obj in self.ports:
outstr += obj.show(prefix + ' ')
return outstr
ofp_switch_features = ofp_features_reply
ofp_capabilities_rev_map = {
'OFPC_FLOW_STATS' : 1,
'OFPC_TABLE_STATS' : 2,
'OFPC_PORT_STATS' : 4,
'OFPC_STP' : 8,
'OFPC_RESERVED' : 16,
'OFPC_IP_REASM' : 32,
'OFPC_QUEUE_STATS' : 64,
'OFPC_ARP_MATCH_IP' : 128,
}
##3.2 Switch Configuration
class ofp_switch_config (ofp_header):
def __init__ (self, **kw):
ofp_header.__init__(self)
self.header_type = OFPT_SET_CONFIG
self.length = 12
self.flags = 0
self.miss_send_len = OFP_DEFAULT_MISS_SEND_LEN
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += ofp_header.pack(self)
packed += struct.pack("!HH", self.flags, self.miss_send_len)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 12):
return binaryString
ofp_header.unpack(self, binaryString[0:])
(self.flags, self.miss_send_len) = struct.unpack_from("!HH", binaryString, 8)
return binaryString[12:]
def __len__ (self):
l = 12
return l
def __eq__ (self, other):
if type(self) != type(other): return False
if not ofp_header.__eq__(self, other): return False
if self.flags != other.flags: return False
if self.miss_send_len != other.miss_send_len: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'header: \n'
outstr += ofp_header.show(self, prefix + ' ')
outstr += prefix + 'flags: ' + str(self.flags) + '\n'
outstr += prefix + 'miss_send_len: ' + str(self.miss_send_len) + '\n'
return outstr
ofp_config_flags_rev_map = {
'OFPC_FRAG_NORMAL' : 0,
'OFPC_FRAG_DROP' : 1,
'OFPC_FRAG_REASM' : 2,
'OFPC_FRAG_MASK' : 3,
}
##3.3 Modify State Messages
class ofp_flow_mod (ofp_header):
def __init__ (self, **kw):
ofp_header.__init__(self)
self.header_type = OFPT_FLOW_MOD
if 'match' in kw:
self.match = None
else:
self.match = ofp_match()
self.cookie = 0
self.command = OFPFC_ADD
self.idle_timeout = 0
self.hard_timeout = 0
self.priority = OFP_DEFAULT_PRIORITY
self.buffer_id = -1
self.out_port = OFPP_NONE
self.flags = 0
self.actions = []
# ofp_flow_mod and ofp_packet_out do some special handling of 'actions'...
# Allow "action" as a synonym for "actions"
if 'action' in kw and 'actions' not in kw:
kw['actions'] = kw['action']
del kw['action']
initHelper(self, kw)
# Allow use of actions=<a single action> for kw args.
if not hasattr(self.actions, '__getitem__'):
self.actions = [self.actions]
def _assert (self):
if(not isinstance(self.match, ofp_match)):
return (False, "match is not class ofp_match")
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
self.length = len(self)
packed += ofp_header.pack(self)
packed += self.match.pack(flow_mod=True)
packed += struct.pack("!QHHHHLHH", self.cookie, self.command, self.idle_timeout, self.hard_timeout, self.priority, self.buffer_id & 0xffffffff, self.out_port, self.flags)
for i in self.actions:
packed += i.pack(assertstruct)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 72):
return binaryString
ofp_header.unpack(self, binaryString[0:])
self.match.unpack(binaryString[8:], flow_mod=True)
(self.cookie, self.command, self.idle_timeout, self.hard_timeout, self.priority, self.buffer_id, self.out_port, self.flags) = struct.unpack_from("!QHHHHLHH", binaryString, 8 + len(self.match))
if self.buffer_id == 0xffffffff:
self.buffer_id = -1
self.actions, offset = _unpack_actions(binaryString, self.length-(32 + len(self.match)), 32 + len(self.match))
assert offset == self.length
return binaryString[offset:]
def __len__ (self):
l = 32 + len(self.match)
for i in self.actions:
l += len(i)#.length()
return l
def __eq__ (self, other):
if type(self) != type(other): return False
if not ofp_header.__eq__(self, other): return False
if self.match != other.match: return False
if self.cookie != other.cookie: return False
if self.command != other.command: return False
if self.idle_timeout != other.idle_timeout: return False
if self.hard_timeout != other.hard_timeout: return False
if self.priority != other.priority: return False
if self.buffer_id != other.buffer_id: return False
if self.out_port != other.out_port: return False
if self.flags != other.flags: return False
if self.actions != other.actions: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'header: \n'
outstr += ofp_header.show(self, prefix + ' ')
outstr += prefix + 'match: \n'
outstr += self.match.show(prefix + ' ')
outstr += prefix + 'cookie: ' + str(self.cookie) + '\n'
outstr += prefix + 'command: ' + str(self.command) + '\n'
outstr += prefix + 'idle_timeout: ' + str(self.idle_timeout) + '\n'
outstr += prefix + 'hard_timeout: ' + str(self.hard_timeout) + '\n'
outstr += prefix + 'priority: ' + str(self.priority) + '\n'
outstr += prefix + 'buffer_id: ' + str(self.buffer_id) + '\n'
outstr += prefix + 'out_port: ' + str(self.out_port) + '\n'
outstr += prefix + 'flags: ' + str(self.flags) + '\n'
outstr += prefix + 'actions: \n'
for obj in self.actions:
outstr += obj.show(prefix + ' ')
return outstr
ofp_flow_mod_command_rev_map = {
'OFPFC_ADD' : 0,
'OFPFC_MODIFY' : 1,
'OFPFC_MODIFY_STRICT' : 2,
'OFPFC_DELETE' : 3,
'OFPFC_DELETE_STRICT' : 4,
}
ofp_flow_mod_flags_rev_map = {
'OFPFF_SEND_FLOW_REM' : 1,
'OFPFF_CHECK_OVERLAP' : 2,
'OFPFF_EMERG' : 4,
}
class ofp_port_mod (ofp_header):
def __init__ (self, **kw):
ofp_header.__init__(self)
self.header_type = OFPT_PORT_MOD
self.port_no = 0
self.hw_addr = EMPTY_ETH
self.config = 0
self.mask = 0
self.advertise = 0
self.length = 32
initHelper(self, kw)
def _assert (self):
if not isinstance(self.hw_addr, bytes) and not isinstance(self.hw_addr, EthAddr):
return (False, "hw_addr is not bytes or EthAddr")
if len(self.hw_addr) != 6:
return (False, "hw_addr is not of size 6")
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += ofp_header.pack(self)
packed += struct.pack("!H", self.port_no)
if isinstance(self.hw_addr, bytes):
packed += self.hw_addr
else:
packed += self.hw_addr.toRaw()
packed += struct.pack("!LLL", self.config, self.mask, self.advertise)
packed += _PAD4
return packed
def unpack (self, binaryString):
if (len(binaryString) < 32):
return binaryString
ofp_header.unpack(self, binaryString[0:])
(self.port_no,) = struct.unpack_from("!H", binaryString, 8)
self.hw_addr = EthAddr(binaryString[10:16])
(self.config, self.mask, self.advertise) = struct.unpack_from("!LLL", binaryString, 16)
return binaryString[32:]
def __len__ (self):
return 32
def __eq__ (self, other):
if type(self) != type(other): return False
if not ofp_header.__eq__(self, other): return False
if self.port_no != other.port_no: return False
if self.hw_addr != other.hw_addr: return False
if self.config != other.config: return False
if self.mask != other.mask: return False
if self.advertise != other.advertise: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'header: \n'
outstr += ofp_header.show(self, prefix + ' ')
outstr += prefix + 'port_no: ' + str(self.port_no) + '\n'
outstr += prefix + 'hw_addr: ' + str(EthAddr(self.hw_addr)) + '\n'
outstr += prefix + 'config: ' + str(self.config) + '\n'
outstr += prefix + 'mask: ' + str(self.mask) + '\n'
outstr += prefix + 'advertise: ' + str(self.advertise) + '\n'
return outstr
##3.4 Queue Configuration Messages
class ofp_queue_get_config_request (ofp_header):
def __init__ (self, **kw):
ofp_header.__init__(self)
self.header_type = OFPT_QUEUE_GET_CONFIG_REQUEST
self.port = 0
self.length = 12
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += ofp_header.pack(self)
packed += struct.pack("!H", self.port)
packed += _PAD2
return packed
def unpack (self, binaryString):
if (len(binaryString) < 12):
return binaryString
ofp_header.unpack(self, binaryString[0:])
(self.port,) = struct.unpack_from("!H", binaryString, 8)
return binaryString[12:]
def __len__ (self):
return 12
def __eq__ (self, other):
if type(self) != type(other): return False
if not ofp_header.__eq__(self, other): return False
if self.port != other.port: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'header: \n'
outstr += ofp_header.show(self, prefix + ' ')
outstr += prefix + 'port: ' + str(self.port) + '\n'
return outstr
class ofp_queue_get_config_reply (ofp_header):
def __init__ (self, **kw):
ofp_header.__init__(self)
self.header_type = OFPT_QUEUE_GET_CONFIG_REPLY
self.length = 16
self.port = 0
self.queues = []
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += ofp_header.pack(self)
packed += struct.pack("!H", self.port)
packed += _PAD6
for i in self.queues:
packed += i.pack(assertstruct)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 16):
return binaryString
ofp_header.unpack(self, binaryString[0:])
(self.port,) = struct.unpack_from("!H", binaryString, 8)
return binaryString[16:]
def __len__ (self):
l = 16
for i in self.queues:
l += len(i)
return l
def __eq__ (self, other):
if type(self) != type(other): return False
if not ofp_header.__eq__(self, other): return False
if self.port != other.port: return False
if self.queues != other.queues: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'header: \n'
outstr += ofp_header.show(self, prefix + ' ')
outstr += prefix + 'port: ' + str(self.port) + '\n'
outstr += prefix + 'queues: \n'
for obj in self.queues:
outstr += obj.show(prefix + ' ')
return outstr
##3.5 Read State Messages
class ofp_stats_request (ofp_header):
def __init__ (self, **kw):
ofp_header.__init__(self)
self.header_type = OFPT_STATS_REQUEST
self.type = None # Try to guess
self.flags = 0
self.body = b''
self._body_data = (None, None)
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
self.length = len(self)
if self.type is None:
if isinstance(self.body, ofp_flow_stats_request):
self.type = OFPST_FLOW
elif isinstance(self.body, ofp_aggregate_stats_request):
self.type = OFPST_AGGREGATE
elif self.body_data == b'':
self.type = OFPST_DESC # Maybe shouldn't assume this?
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += ofp_header.pack(self)
packed += struct.pack("!HH", self.type, self.flags)
packed += self.body_data
return packed
@property
def body_data (self):
if self._body_data[0] is not self.body:
if hasattr(self.body, 'pack'):
self._body_data = (self.body, self.body.pack())
else:
self._body_data = (self.body, self.body)
return self._body_data[1]
def unpack (self, binaryString):
if (len(binaryString) < 12):
return binaryString
ofp_header.unpack(self, binaryString[0:])
(self.type, self.flags) = struct.unpack_from("!HH", binaryString, 8)
self.body = binaryString[12:self.length]
assert self.length == len(self)
return binaryString[self.length:]
def __len__ (self):
return 12 + len(self.body_data)
def __eq__ (self, other):
if type(self) != type(other): return False
if not ofp_header.__eq__(self, other): return False
if self.type != other.type: return False
if self.flags != other.flags: return False
if self.body_data != other.body_data: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'header: \n'
outstr += ofp_header.show(self, prefix + ' ')
outstr += prefix + 'type: ' + str(self.type) + '\n'
outstr += prefix + 'flags: ' + str(self.flags) + '\n'
outstr += prefix + 'body:\n' + _format_body(self.body, prefix + ' ') + '\n'
return outstr
class ofp_stats_reply (ofp_header):
def __init__ (self, **kw):
ofp_header.__init__(self)
self.header_type = OFPT_STATS_REPLY
self.type = 0
self.flags = 0
self.body = b''
self._body_data = (None, None)
initHelper(self, kw)
def _assert (self):
return (True, None)
@property
def body_data (self):
if self._body_data[0] is not self.body:
def _pack(b):
return b.pack() if hasattr(b, 'pack') else b
data = b''
if isinstance(self.body, collections.Iterable):
for b in self.body:
data += _pack(b)
else:
data = _pack(self.body)
self._body_data = (self.body, data)
return self._body_data[1]
def pack (self, assertstruct=True):
if type == None or type == 0 and type(self.body) in ofp_stats_reply_class_to_type_map:
self.type = ofp_stats_reply_class_to_type_map[type(self.body)]
self.length = len(self)
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += ofp_header.pack(self)
packed += struct.pack("!HH", self.type, self.flags)
packed += self.body_data
return packed
def unpack (self, binaryString):
if (len(binaryString) < 12):
return binaryString
ofp_header.unpack(self, binaryString[0:])
(self.type, self.flags) = struct.unpack_from("!HH", binaryString, 8)
self.body = binaryString[12:self.length]
return binaryString[self.length:]
def __len__ (self):
l = 12
l += len(self.body)
return l
def __eq__ (self, other):
if type(self) != type(other): return False
if not ofp_header.__eq__(self, other): return False
if self.type != other.type: return False
if self.flags != other.flags: return False
if self.body != other.body: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'header: \n'
outstr += ofp_header.show(self, prefix + ' ')
outstr += prefix + 'type: ' + str(self.type) + '\n'
outstr += prefix + 'flags: ' + str(self.flags) + '\n'
outstr += prefix + 'body:\n' + _format_body(self.body, prefix + ' ') + '\n'
return outstr
ofp_stats_types_rev_map = {
'OFPST_DESC' : 0,
'OFPST_FLOW' : 1,
'OFPST_AGGREGATE' : 2,
'OFPST_TABLE' : 3,
'OFPST_PORT' : 4,
'OFPST_QUEUE' : 5,
'OFPST_VENDOR' : 65535,
}
ofp_stats_reply_flags_rev_map = {
'OFPSF_REPLY_MORE' : 1,
}
class ofp_desc_stats (object):
def __init__ (self, **kw):
self.mfr_desc= ""
self.hw_desc= ""
self.sw_desc= ""
self.serial_num= ""
self.dp_desc= ""
initHelper(self, kw)
def _assert (self):
if(not isinstance(self.mfr_desc, str)):
return (False, "mfr_desc is not string")
if(len(self.mfr_desc) > 256):
return (False, "mfr_desc is not of size 256")
if(not isinstance(self.hw_desc, str)):
return (False, "hw_desc is not string")
if(len(self.hw_desc) > 256):
return (False, "hw_desc is not of size 256")
if(not isinstance(self.sw_desc, str)):
return (False, "sw_desc is not string")
if(len(self.sw_desc) > 256):
return (False, "sw_desc is not of size 256")
if(not isinstance(self.serial_num, str)):
return (False, "serial_num is not string")
if(len(self.serial_num) > 32):
return (False, "serial_num is not of size 32")
if(not isinstance(self.dp_desc, str)):
return (False, "dp_desc is not string")
if(len(self.dp_desc) > 256):
return (False, "dp_desc is not of size 256")
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += self.mfr_desc.ljust(256,'\0')
packed += self.hw_desc.ljust(256,'\0')
packed += self.sw_desc.ljust(256,'\0')
packed += self.serial_num.ljust(32,'\0')
packed += self.dp_desc.ljust(256,'\0')
return packed
def unpack (self, binaryString):
if (len(binaryString) < 1056):
return binaryString
self.mfr_desc = binaryString[0:256].replace("\0","")
self.hw_desc = binaryString[256:512].replace("\0","")
self.sw_desc = binaryString[512:768].replace("\0","")
self.serial_num = binaryString[768:800].replace("\0","")
self.dp_desc = binaryString[800:1056].replace("\0","")
return binaryString[1056:]
def __len__ (self):
return 1056
def __eq__ (self, other):
if type(self) != type(other): return False
if self.mfr_desc != other.mfr_desc: return False
if self.hw_desc != other.hw_desc: return False
if self.sw_desc != other.sw_desc: return False
if self.serial_num != other.serial_num: return False
if self.dp_desc != other.dp_desc: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'mfr_desc: ' + str(self.mfr_desc) + '\n'
outstr += prefix + 'hw_desc: ' + str(self.hw_desc) + '\n'
outstr += prefix + 'sw_desc: ' + str(self.sw_desc) + '\n'
outstr += prefix + 'serial_num: ' + str(self.serial_num) + '\n'
outstr += prefix + 'dp_desc: ' + str(self.dp_desc) + '\n'
return outstr
class ofp_flow_stats_request (object):
def __init__ (self, **kw):
self.match = ofp_match()
self.table_id = TABLE_ALL
self.out_port = OFPP_NONE
initHelper(self, kw)
def _assert (self):
if(not isinstance(self.match, ofp_match)):
return (False, "match is not class ofp_match")
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += self.match.pack()
packed += struct.pack("!BBH", self.table_id, 0, self.out_port)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 44):
return binaryString
self.match.unpack(binaryString[0:])
(self.table_id, pad, self.out_port) = struct.unpack_from("!BBH", binaryString, len(self.match))
return binaryString[len(self)]
def __len__ (self):
return 4 + len(self.match)
def __eq__ (self, other):
if type(self) != type(other): return False
if self.match != other.match: return False
if self.table_id != other.table_id: return False
if self.out_port != other.out_port: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'match: \n'
self.match.show(prefix + ' ')
outstr += prefix + 'table_id: ' + str(self.table_id) + '\n'
outstr += prefix + 'out_port: ' + str(self.out_port) + '\n'
return outstr
class ofp_flow_stats (object):
def __init__ (self, **kw):
self.length = 0
self.table_id = 0
self.match = ofp_match()
self.duration_sec = 0
self.duration_nsec = 0
self.priority = OFP_DEFAULT_PRIORITY
self.idle_timeout = 0
self.hard_timeout = 0
self.cookie = 0
self.packet_count = 0
self.byte_count = 0
self.actions = []
initHelper(self, kw)
def _assert (self):
if(not isinstance(self.match, ofp_match)):
return (False, "match is not class ofp_match")
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += struct.pack("!HBB", self.length, self.table_id, 0)
packed += self.match.pack()
packed += struct.pack("!LLHHH", self.duration_sec, self.duration_nsec, self.priority, self.idle_timeout, self.hard_timeout)
packed += _PAD6 # Pad
packed += struct.pack("!QQQ", self.cookie, self.packet_count, self.byte_count)
for i in self.actions:
packed += i.pack(assertstruct)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 48 + len(self.match)):
return binaryString
(self.length, self.table_id, pad) = struct.unpack_from("!HBB", binaryString, 0)
self.match.unpack(binaryString[4:])
(self.duration_sec, self.duration_nsec, self.priority, self.idle_timeout, self.hard_timeout) = struct.unpack_from("!LLHHH", binaryString, 4 + len(self.match))
(self.cookie, self.packet_count, self.byte_count) = struct.unpack_from("!QQQ", binaryString, 24 + len(self.match))
self.actions,offset = _unpack_actions(binaryString, self.length - (48 + len(self.match)), 48 + len(self.match))
assert offset == self.length
assert self.length == len(self)
return binaryString[offset:]
def __len__ (self):
l = 48 + len(self.match)
for i in self.actions:
l += len(i)
return l
def __eq__ (self, other):
if type(self) != type(other): return False
if self.length != other.length: return False
if self.table_id != other.table_id: return False
if self.match != other.match: return False
if self.duration_sec != other.duration_sec: return False
if self.duration_nsec != other.duration_nsec: return False
if self.priority != other.priority: return False
if self.idle_timeout != other.idle_timeout: return False
if self.hard_timeout != other.hard_timeout: return False
if self.cookie != other.cookie: return False
if self.packet_count != other.packet_count: return False
if self.byte_count != other.byte_count: return False
if self.actions != other.actions: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'length: ' + str(self.length) + '\n'
outstr += prefix + 'table_id: ' + str(self.table_id) + '\n'
outstr += prefix + 'match: \n'
self.match.show(prefix + ' ')
outstr += prefix + 'duration_sec: ' + str(self.duration_sec) + '\n'
outstr += prefix + 'duration_nsec: ' + str(self.duration_nsec) + '\n'
outstr += prefix + 'priority: ' + str(self.priority) + '\n'
outstr += prefix + 'idle_timeout: ' + str(self.idle_timeout) + '\n'
outstr += prefix + 'hard_timeout: ' + str(self.hard_timeout) + '\n'
outstr += prefix + 'cookie: ' + str(self.cookie) + '\n'
outstr += prefix + 'packet_count: ' + str(self.packet_count) + '\n'
outstr += prefix + 'byte_count: ' + str(self.byte_count) + '\n'
outstr += prefix + 'actions: \n'
for obj in self.actions:
outstr += obj.show(prefix + ' ')
return outstr
class ofp_aggregate_stats_request (object):
def __init__ (self, **kw):
self.match = ofp_match()
self.table_id = TABLE_ALL
self.out_port = OFPP_NONE
initHelper(self, kw)
def _assert (self):
if(not isinstance(self.match, ofp_match)):
return (False, "match is not class ofp_match")
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += self.match.pack()
packed += struct.pack("!BBH", self.table_id, 0, self.out_port)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 4 + len(self.match)):
return binaryString
self.match.unpack(binaryString[0:])
(self.table_id, pad, self.out_port) = struct.unpack_from("!BBH", binaryString, len(self.match))
return binaryString[44:]
def __len__ (self):
return 44
def __eq__ (self, other):
if type(self) != type(other): return False
if self.match != other.match: return False
if self.table_id != other.table_id: return False
if self.out_port != other.out_port: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'match: \n'
self.match.show(prefix + ' ')
outstr += prefix + 'table_id: ' + str(self.table_id) + '\n'
outstr += prefix + 'out_port: ' + str(self.out_port) + '\n'
return outstr
class ofp_aggregate_stats (object):
def __init__ (self, **kw):
self.packet_count = 0
self.byte_count = 0
self.flow_count = 0
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += struct.pack("!QQL", self.packet_count, self.byte_count, self.flow_count)
packed += _PAD4 # Pad
return packed
def unpack (self, binaryString):
if (len(binaryString) < 24):
return binaryString
(self.packet_count, self.byte_count, self.flow_count) = struct.unpack_from("!QQL", binaryString, 0)
return binaryString[24:]
def __len__ (self):
return 24
def __eq__ (self, other):
if type(self) != type(other): return False
if self.packet_count != other.packet_count: return False
if self.byte_count != other.byte_count: return False
if self.flow_count != other.flow_count: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'packet_count: ' + str(self.packet_count) + '\n'
outstr += prefix + 'byte_count: ' + str(self.byte_count) + '\n'
outstr += prefix + 'flow_count: ' + str(self.flow_count) + '\n'
return outstr
ofp_aggregate_stats_reply = ofp_aggregate_stats
class ofp_table_stats (object):
def __init__ (self, **kw):
self.table_id = 0
self.name= ""
self.wildcards = 0
self.max_entries = 0
self.active_count = 0
self.lookup_count = 0
self.matched_count = 0
initHelper(self, kw)
def _assert (self):
if(not isinstance(self.name, str)):
return (False, "name is not string")
if(len(self.name) > 32):
return (False, "name is not of size 32")
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += struct.pack("!B", self.table_id)
packed += _PAD3
packed += self.name.ljust(32,'\0')
packed += struct.pack("!LLLQQ", self.wildcards, self.max_entries, self.active_count, self.lookup_count, self.matched_count)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 64):
return binaryString
(self.table_id,) = struct.unpack_from("!B", binaryString, 0)
self.name = binaryString[4:36].replace("\0","")
(self.wildcards, self.max_entries, self.active_count, self.lookup_count, self.matched_count) = struct.unpack_from("!LLLQQ", binaryString, 36)
return binaryString[64:]
def __len__ (self):
return 64
def __eq__ (self, other):
if type(self) != type(other): return False
if self.table_id != other.table_id: return False
if self.name != other.name: return False
if self.wildcards != other.wildcards: return False
if self.max_entries != other.max_entries: return False
if self.active_count != other.active_count: return False
if self.lookup_count != other.lookup_count: return False
if self.matched_count != other.matched_count: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'table_id: ' + str(self.table_id) + '\n'
outstr += prefix + 'name: ' + str(self.name) + '\n'
outstr += prefix + 'wildcards: ' + str(self.wildcards) + '\n'
outstr += prefix + 'max_entries: ' + str(self.max_entries) + '\n'
outstr += prefix + 'active_count: ' + str(self.active_count) + '\n'
outstr += prefix + 'lookup_count: ' + str(self.lookup_count) + '\n'
outstr += prefix + 'matched_count: ' + str(self.matched_count) + '\n'
return outstr
class ofp_port_stats_request (object):
def __init__ (self, **kw):
self.port_no = 0
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += struct.pack("!H", self.port_no)
packed += _PAD6
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
(self.port_no,) = struct.unpack_from("!H", binaryString, 0)
return binaryString[8:]
def __len__ (self):
return 8
def __eq__ (self, other):
if type(self) != type(other): return False
if self.port_no != other.port_no: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'port_no: ' + str(self.port_no) + '\n'
return outstr
class ofp_port_stats (object):
def __init__ (self, **kw):
self.port_no = 0
self.rx_packets = 0
self.tx_packets = 0
self.rx_bytes = 0
self.tx_bytes = 0
self.rx_dropped = 0
self.tx_dropped = 0
self.rx_errors = 0
self.tx_errors = 0
self.rx_frame_err = 0
self.rx_over_err = 0
self.rx_crc_err = 0
self.collisions = 0
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += struct.pack("!H", self.port_no)
packed += _PAD6
packed += struct.pack("!QQQQQQQQQQQQ", self.rx_packets, self.tx_packets, self.rx_bytes, self.tx_bytes, self.rx_dropped, self.tx_dropped, self.rx_errors, self.tx_errors, self.rx_frame_err, self.rx_over_err, self.rx_crc_err, self.collisions)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 104):
return binaryString
(self.port_no,) = struct.unpack_from("!H", binaryString, 0)
(self.rx_packets, self.tx_packets, self.rx_bytes, self.tx_bytes, self.rx_dropped, self.tx_dropped, self.rx_errors, self.tx_errors, self.rx_frame_err, self.rx_over_err, self.rx_crc_err, self.collisions) = struct.unpack_from("!QQQQQQQQQQQQ", binaryString, 8)
return binaryString[104:]
def __len__ (self):
return 104
def __eq__ (self, other):
if type(self) != type(other): return False
if self.port_no != other.port_no: return False
if self.rx_packets != other.rx_packets: return False
if self.tx_packets != other.tx_packets: return False
if self.rx_bytes != other.rx_bytes: return False
if self.tx_bytes != other.tx_bytes: return False
if self.rx_dropped != other.rx_dropped: return False
if self.tx_dropped != other.tx_dropped: return False
if self.rx_errors != other.rx_errors: return False
if self.tx_errors != other.tx_errors: return False
if self.rx_frame_err != other.rx_frame_err: return False
if self.rx_over_err != other.rx_over_err: return False
if self.rx_crc_err != other.rx_crc_err: return False
if self.collisions != other.collisions: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def __add__(self, other):
if type(self) != type(other): raise NotImplemented()
return ofp_port_stats(
port_no=OFPP_NONE,
rx_packets = self.rx_packets + other.rx_packets,
tx_packets = self.tx_packets + other.tx_packets,
rx_bytes = self.rx_bytes + other.rx_bytes,
tx_bytes = self.tx_bytes + other.tx_bytes,
rx_dropped = self.rx_dropped + other.rx_dropped,
tx_dropped = self.tx_dropped + other.tx_dropped,
rx_errors = self.rx_errors + other.rx_errors,
tx_errors = self.tx_errors + other.tx_errors,
rx_frame_err = self.rx_frame_err + other.rx_frame_err,
rx_over_err = self.rx_over_err + other.rx_over_err,
rx_crc_err = self.rx_crc_err + other.rx_crc_err,
collisions = self.collisions + other.collisions)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'port_no: ' + str(self.port_no) + '\n'
outstr += prefix + 'rx_packets: ' + str(self.rx_packets) + '\n'
outstr += prefix + 'tx_packets: ' + str(self.tx_packets) + '\n'
outstr += prefix + 'rx_bytes: ' + str(self.rx_bytes) + '\n'
outstr += prefix + 'tx_bytes: ' + str(self.tx_bytes) + '\n'
outstr += prefix + 'rx_dropped: ' + str(self.rx_dropped) + '\n'
outstr += prefix + 'tx_dropped: ' + str(self.tx_dropped) + '\n'
outstr += prefix + 'rx_errors: ' + str(self.rx_errors) + '\n'
outstr += prefix + 'tx_errors: ' + str(self.tx_errors) + '\n'
outstr += prefix + 'rx_frame_err: ' + str(self.rx_frame_err) + '\n'
outstr += prefix + 'rx_over_err: ' + str(self.rx_over_err) + '\n'
outstr += prefix + 'rx_crc_err: ' + str(self.rx_crc_err) + '\n'
outstr += prefix + 'collisions: ' + str(self.collisions) + '\n'
return outstr
class ofp_queue_stats_request (object):
def __init__ (self, **kw):
self.port_no = 0
self.queue_id = 0
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += struct.pack("!H", self.port_no)
packed += _PAD2
packed += struct.pack("!L", self.queue_id)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
(self.port_no,) = struct.unpack_from("!H", binaryString, 0)
(self.queue_id,) = struct.unpack_from("!L", binaryString, 4)
return binaryString[8:]
def __len__ (self):
return 8
def __eq__ (self, other):
if type(self) != type(other): return False
if self.port_no != other.port_no: return False
if self.queue_id != other.queue_id: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'port_no: ' + str(self.port_no) + '\n'
outstr += prefix + 'queue_id: ' + str(self.queue_id) + '\n'
return outstr
class ofp_queue_stats (object):
def __init__ (self, **kw):
self.port_no = 0
self.queue_id = 0
self.tx_bytes = 0
self.tx_packets = 0
self.tx_errors = 0
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += struct.pack("!H", self.port_no)
packed += _PAD2
packed += struct.pack("!LQQQ", self.queue_id, self.tx_bytes, self.tx_packets, self.tx_errors)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 32):
return binaryString
(self.port_no,) = struct.unpack_from("!H", binaryString, 0)
(self.queue_id, self.tx_bytes, self.tx_packets, self.tx_errors) = struct.unpack_from("!LQQQ", binaryString, 4)
return binaryString[32:]
def __len__ (self):
return 32
def __eq__ (self, other):
if type(self) != type(other): return False
if self.port_no != other.port_no: return False
if self.queue_id != other.queue_id: return False
if self.tx_bytes != other.tx_bytes: return False
if self.tx_packets != other.tx_packets: return False
if self.tx_errors != other.tx_errors: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'port_no: ' + str(self.port_no) + '\n'
outstr += prefix + 'queue_id: ' + str(self.queue_id) + '\n'
outstr += prefix + 'tx_bytes: ' + str(self.tx_bytes) + '\n'
outstr += prefix + 'tx_packets: ' + str(self.tx_packets) + '\n'
outstr += prefix + 'tx_errors: ' + str(self.tx_errors) + '\n'
return outstr
ofp_stats_reply_class_to_type_map = {
ofp_desc_stats : ofp_stats_types_rev_map['OFPST_DESC'],
ofp_flow_stats : ofp_stats_types_rev_map['OFPST_FLOW'],
ofp_aggregate_stats : ofp_stats_types_rev_map['OFPST_AGGREGATE'],
ofp_table_stats : ofp_stats_types_rev_map['OFPST_TABLE'],
ofp_port_stats : ofp_stats_types_rev_map['OFPST_PORT'],
ofp_queue_stats : ofp_stats_types_rev_map['OFPST_QUEUE']
}
##3.6 Send Packet Message
class ofp_packet_out (ofp_header):
def __init__ (self, **kw):
ofp_header.__init__(self)
self.header_type = OFPT_PACKET_OUT
self.buffer_id = -1
self.in_port = OFPP_NONE
self.actions = []
self._data = ''
# ofp_flow_mod and ofp_packet_out do some special handling of 'actions'...
# Allow "action" as a synonym for "actions"
if 'action' in kw and 'actions' not in kw:
kw['actions'] = kw['action']
del kw['action']
initHelper(self, kw)
# Allow use of actions=<a single action> for kw args.
if not hasattr(self.actions, '__getitem__'):
self.actions = [self.actions]
def _set_data(self, data):
assert_type("data", data, (packet_base, str))
if data is None:
self._data = ''
elif isinstance(data, packet_base):
self._data = data.pack()
else:
self._data = data
def _get_data(self):
return self._data
data = property(_get_data, _set_data)
def _assert (self):
if self.buffer_id != -1 and self.data != '':
return "can not have both buffer_id and data set"
return True
def pack (self, assertstruct=True):
if(assertstruct):
if self._assert() is not True:
raise RuntimeError(self._assert())
actions = b''.join((i.pack(assertstruct) for i in self.actions))
actions_len = len(actions)
self.length = 16 + actions_len
if self.data is not None:
self.length += len(self.data)
if self.data is not None:
return b''.join((ofp_header.pack(self),
struct.pack("!LHH", self.buffer_id & 0xffFFffFF, self.in_port, actions_len),
actions,
self.data))
else:
return b''.join((ofp_header.pack(self),
struct.pack("!LHH", self.buffer_id & 0xffFFffFF, self.in_port, actions_len),
actions))
def unpack (self, binaryString):
if (len(binaryString) < 16):
return binaryString
ofp_header.unpack(self, binaryString[0:])
(self.buffer_id, self.in_port, actions_len) = struct.unpack_from("!LHH", binaryString, 8)
if self.buffer_id == 0xffFFffFF:
self.buffer_id = -1
self.actions,offset = _unpack_actions(binaryString, actions_len, 16)
self.data = binaryString[offset:self.length] if offset < self.length else None
return binaryString[self.length:]
def __len__ (self):
return 16 + reduce(operator.add, (a.length for a in self.actions), 0) + (len(self.data) if self.data else 0)
def __eq__ (self, other):
if type(self) != type(other): return False
if not ofp_header.__eq__(self, other): return False
if self.buffer_id != other.buffer_id: return False
if self.in_port != other.in_port: return False
if self.actions != other.actions: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'header: \n'
outstr += ofp_header.show(self, prefix + ' ')
outstr += prefix + 'buffer_id: ' + str(self.buffer_id) + '\n'
outstr += prefix + 'in_port: ' + str(self.in_port) + '\n'
outstr += prefix + 'actions_len: ' + str(len(self.actions)) + '\n'
outstr += prefix + 'actions: \n'
for obj in self.actions:
if obj is None:
raise RuntimeError("An element of self.actions was None! Bad formatting...")
outstr += obj.show(prefix + ' ')
return outstr
##3.7 Barrier Message
class ofp_barrier_reply (ofp_header):
def __init__ (self, **kw):
ofp_header.__init__(self)
self.header_type = OFPT_BARRIER_REPLY
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
raise RuntimeError("assertstruct failed")
packed = ""
packed += ofp_header.pack(self)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
ofp_header.unpack(self, binaryString[0:])
return binaryString[8:]
def __len__ (self):
return 8
def __eq__ (self, other):
if type(self) != type(other): return False
if not ofp_header.__eq__(self, other): return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'header: \n'
outstr += ofp_header.show(self, prefix + ' ')
return outstr
class ofp_barrier_request (ofp_header):
def __init__ (self, **kw):
ofp_header.__init__(self)
self.header_type = OFPT_BARRIER_REQUEST
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += ofp_header.pack(self)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
ofp_header.unpack(self, binaryString[0:])
return binaryString[8:]
def __len__ (self):
return 8
def __eq__ (self, other):
if type(self) != type(other): return False
if not ofp_header.__eq__(self, other): return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'header: \n'
outstr += ofp_header.show(self, prefix + ' ')
return outstr
#4 Asynchronous Messages
class ofp_packet_in (ofp_header):
def __init__ (self, **kw):
ofp_header.__init__(self)
self.in_port = OFPP_NONE
self.buffer_id = -1
self.reason = 0
self.data = None
initHelper(self, kw)
self.header_type = OFPT_PACKET_IN
self._total_len = 0
def _set_data(self, data):
assert_type("data", data, (packet_base, str))
if data is None:
self._data = ''
elif isinstance(data, packet_base):
self._data = data.pack()
else:
self._data = data
def _get_data(self):
return self._data
data = property(_get_data, _set_data)
def _assert (self):
if not isinstance(self.data, str):
return (False,
"ofp_packet_in: data should be raw byte string, not %s" % str(type(self.data)))
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
raise AssertionError(self._assert()[1])
packed = ""
# need to update the self.length field for ofp_header.pack to put the correct value in the packed
# array. this sucks.
self.length = len(self)
self._total_len = len(self) # TODO: Is this correct?
packed += ofp_header.pack(self)
packed += struct.pack("!LHHBB", self.buffer_id & 0xffFFffFF, self._total_len, self.in_port, self.reason, 0)
packed += self.data
return packed
def unpack (self, binaryString):
if (len(binaryString) < 18):
return binaryString
ofp_header.unpack(self, binaryString[0:])
(self.buffer_id, self._total_len, self.in_port, self.reason, pad) = struct.unpack_from("!LHHBB", binaryString, 8)
if self.buffer_id == 0xFFffFFff:
self.buffer_id = -1
if (len(binaryString) < self.length):
return binaryString
self.data = binaryString[18:self.length]
return binaryString[self.length:]
def __len__ (self):
l = 18
l += len(self.data)*1
return l
def __eq__ (self, other):
if type(self) != type(other): return False
if not ofp_header.__eq__(self, other): return False
if self.buffer_id != other.buffer_id: return False
if self._total_len != other._total_len: return False
if self.in_port != other.in_port: return False
if self.reason != other.reason: return False
if self.data != other.data: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'header: \n'
outstr += ofp_header.show(self, prefix + ' ')
outstr += prefix + 'buffer_id: ' + str(self.buffer_id) + '\n'
outstr += prefix + '_total_len: ' + str(self._total_len) + '\n'
outstr += prefix + 'in_port: ' + str(self.in_port) + '\n'
outstr += prefix + 'reason: ' + str(self.reason) + '\n'
outstr += prefix + 'data: ' + repr(self.data) + '\n'
return outstr
ofp_packet_in_reason_rev_map = {
'OFPR_NO_MATCH' : 0,
'OFPR_ACTION' : 1,
}
class ofp_flow_removed (ofp_header):
def __init__ (self, **kw):
ofp_header.__init__(self)
self.header_type = OFPT_FLOW_REMOVED
self.match = ofp_match()
self.cookie = 0
self.priority = 0
self.reason = 0
self.duration_sec = 0
self.duration_nsec = 0
self.idle_timeout = 0
self.packet_count = 0
self.byte_count = 0
self.length = len(self)
initHelper(self, kw)
def _assert (self):
if(not isinstance(self.match, ofp_match)):
return (False, "match is not class ofp_match")
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += ofp_header.pack(self)
packed += self.match.pack()
packed += struct.pack("!QHB", self.cookie, self.priority, self.reason)
packed += _PAD
packed += struct.pack("!LLH", self.duration_sec, self.duration_nsec, self.idle_timeout)
packed += _PAD2
packed += struct.pack("!QQ", self.packet_count, self.byte_count)
return packed
def unpack (self, binaryString):
if (len(binaryString) < len(self)):
return binaryString
ofp_header.unpack(self, binaryString[0:])
self.match.unpack(binaryString[8:])
(self.cookie, self.priority, self.reason) = struct.unpack_from("!QHB", binaryString, 8 + len(self.match))
(self.duration_sec, self.duration_nsec, self.idle_timeout) = struct.unpack_from("!LLH", binaryString, 20 + len(self.match))
(self.packet_count, self.byte_count) = struct.unpack_from("!QQ", binaryString, 32 + len(self.match))
return binaryString[len(self):]
def __len__ (self):
return 48 + len(self.match)
def __eq__ (self, other):
if type(self) != type(other): return False
if not ofp_header.__eq__(self, other): return False
if self.match != other.match: return False
if self.cookie != other.cookie: return False
if self.priority != other.priority: return False
if self.reason != other.reason: return False
if self.duration_sec != other.duration_sec: return False
if self.duration_nsec != other.duration_nsec: return False
if self.idle_timeout != other.idle_timeout: return False
if self.packet_count != other.packet_count: return False
if self.byte_count != other.byte_count: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'header: \n'
outstr += ofp_header.show(self, prefix + ' ')
outstr += prefix + 'match: \n'
self.match.show(prefix + ' ')
outstr += prefix + 'cookie: ' + str(self.cookie) + '\n'
outstr += prefix + 'priority: ' + str(self.priority) + '\n'
outstr += prefix + 'reason: ' + str(self.reason) + '\n'
outstr += prefix + 'duration_sec: ' + str(self.duration_sec) + '\n'
outstr += prefix + 'duration_nsec: ' + str(self.duration_nsec) + '\n'
outstr += prefix + 'idle_timeout: ' + str(self.idle_timeout) + '\n'
outstr += prefix + 'packet_count: ' + str(self.packet_count) + '\n'
outstr += prefix + 'byte_count: ' + str(self.byte_count) + '\n'
return outstr
ofp_flow_removed_reason_rev_map = {
'OFPRR_IDLE_TIMEOUT' : 0,
'OFPRR_HARD_TIMEOUT' : 1,
'OFPRR_DELETE' : 2,
}
class ofp_port_status (ofp_header):
def __init__ (self, **kw):
ofp_header.__init__(self)
self.header_type = OFPT_PORT_STATUS
self.reason = 0
self.desc = ofp_phy_port()
self.length = 64
initHelper(self, kw)
def _assert (self):
if(not isinstance(self.desc, ofp_phy_port)):
return (False, "desc is not class ofp_phy_port")
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += ofp_header.pack(self)
packed += struct.pack("!B", self.reason)
packed += _PAD * 7 # Pad
packed += self.desc.pack()
return packed
def unpack (self, binaryString):
if (len(binaryString) < 64):
return binaryString
ofp_header.unpack(self, binaryString[0:])
(self.reason,) = struct.unpack_from("!B", binaryString, 8)
self.desc.unpack(binaryString[16:])
return binaryString[64:]
def __len__ (self):
return 64
def __eq__ (self, other):
if type(self) != type(other): return False
if not ofp_header.__eq__(self, other): return False
if self.reason != other.reason: return False
if self.desc != other.desc: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'header: \n'
outstr += ofp_header.show(self, prefix + ' ')
outstr += prefix + 'reason: ' + str(self.reason) + '\n'
outstr += prefix + 'desc: \n'
self.desc.show(prefix + ' ')
return outstr
ofp_port_reason_rev_map = {
'OFPPR_ADD' : 0,
'OFPPR_DELETE' : 1,
'OFPPR_MODIFY' : 2,
}
class ofp_error (ofp_header):
def __init__ (self, **kw):
ofp_header.__init__(self)
self.header_type = OFPT_ERROR
self.type = 0
self.code = 0
self.data = []
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
self.length = len(self)
packed = ""
packed += ofp_header.pack(self)
packed += struct.pack("!HH", self.type, self.code)
for i in self.data:
packed += struct.pack("!B",i)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 12):
return binaryString
ofp_header.unpack(self, binaryString[0:])
(self.type, self.code) = struct.unpack_from("!HH", binaryString, 8)
return binaryString[12:]
def __len__ (self):
l = 12
l += len(self.data)*1
return l
def __eq__ (self, other):
if type(self) != type(other): return False
if not ofp_header.__eq__(self, other): return False
if self.type != other.type: return False
if self.code != other.code: return False
if self.data != other.data: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'header: \n'
outstr += ofp_header.show(self, prefix + ' ')
t = self.type
c = self.code
if t < len(ofp_error_type):
n = ofp_error_type_map[t]
t = "%s (%i)" % (n, t)
n = 'ofp' + n.lower()[5:] + '_code_map'
if n in sys.modules[__name__].__dict__:
if c in sys.modules[__name__].__dict__[n]:
c = "%s (%i)" % (sys.modules[__name__].__dict__[n][c], c)
outstr += prefix + 'type: ' + str(t) + '\n'
outstr += prefix + 'code: ' + str(c) + '\n'
if len(self.data):
outstr += prefix + 'data: ' + str(self.data) + '\n'
return outstr.strip()
ofp_error_type_rev_map = {
'OFPET_HELLO_FAILED' : 0,
'OFPET_BAD_REQUEST' : 1,
'OFPET_BAD_ACTION' : 2,
'OFPET_FLOW_MOD_FAILED' : 3,
'OFPET_PORT_MOD_FAILED' : 4,
'OFPET_QUEUE_OP_FAILED' : 5,
}
ofp_hello_failed_code_rev_map = {
'OFPHFC_INCOMPATIBLE' : 0,
'OFPHFC_EPERM' : 1,
}
ofp_bad_request_code_rev_map = {
'OFPBRC_BAD_VERSION' : 0,
'OFPBRC_BAD_TYPE' : 1,
'OFPBRC_BAD_STAT' : 2,
'OFPBRC_BAD_VENDOR' : 3,
'OFPBRC_BAD_SUBTYPE' : 4,
'OFPBRC_EPERM' : 5,
'OFPBRC_BAD_LEN' : 6,
'OFPBRC_BUFFER_EMPTY' : 7,
'OFPBRC_BUFFER_UNKNOWN' : 8,
}
ofp_bad_action_code_rev_map = {
'OFPBAC_BAD_TYPE' : 0,
'OFPBAC_BAD_LEN' : 1,
'OFPBAC_BAD_VENDOR' : 2,
'OFPBAC_BAD_VENDOR_TYPE' : 3,
'OFPBAC_BAD_OUT_PORT' : 4,
'OFPBAC_BAD_ARGUMENT' : 5,
'OFPBAC_EPERM' : 6,
'OFPBAC_TOO_MANY' : 7,
'OFPBAC_BAD_QUEUE' : 8,
}
ofp_flow_mod_failed_code_rev_map = {
'OFPFMFC_ALL_TABLES_FULL' : 0,
'OFPFMFC_OVERLAP' : 1,
'OFPFMFC_EPERM' : 2,
'OFPFMFC_BAD_EMERG_TIMEOUT' : 3,
'OFPFMFC_BAD_COMMAND' : 4,
'OFPFMFC_UNSUPPORTED' : 5,
}
ofp_port_mod_failed_code_rev_map = {
'OFPPMFC_BAD_PORT' : 0,
'OFPPMFC_BAD_HW_ADDR' : 1,
}
ofp_queue_op_failed_code_rev_map = {
'OFPQOFC_BAD_PORT' : 0,
'OFPQOFC_BAD_QUEUE' : 1,
'OFPQOFC_EPERM' : 2,
}
#5. Symmetric Messages
class ofp_hello (ofp_header):
def __init__ (self, **kw):
ofp_header.__init__(self)
self.header_type = OFPT_HELLO
self.length = len(self)
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += ofp_header.pack(self)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
ofp_header.unpack(self, binaryString[0:])
return binaryString[8:]
def __len__ (self):
return 8
def __eq__ (self, other):
if type(self) != type(other): return False
if not ofp_header.__eq__(self, other): return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'header: \n'
outstr += ofp_header.show(self, prefix + ' ')
return outstr
class ofp_echo_request (ofp_header):
def __init__ (self, **kw):
ofp_header.__init__(self)
self.header_type = OFPT_ECHO_REQUEST
self.body = b''
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = b""
packed += ofp_header.pack(self)
packed += self.body
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
ofp_header.unpack(self, binaryString[0:])
# Note that we trust the header to be correct here
if len(binaryString) < self.length:
return binaryString
l = self.length - 8
self.body = binaryString[8:8+l]
return binaryString[8 + l:]
def __len__ (self):
return 8 + len(self.body)
def __eq__ (self, other):
if type(self) != type(other): return False
if not ofp_header.__eq__(self, other): return False
if self.body != other.body: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'header: \n'
outstr += ofp_header.show(self, prefix + ' ')
outstr += prefix + 'body:\n' + _format_body(self.body, prefix + ' ') + '\n'
return outstr
class ofp_echo_reply (ofp_header):
def __init__ (self, **kw):
ofp_header.__init__(self)
self.header_type = OFPT_ECHO_REPLY
self.body = b''
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = b""
packed += ofp_header.pack(self)
packed += self.body
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
ofp_header.unpack(self, binaryString[0:])
# Note that we trust the header to be correct here
if len(binaryString) < self.length:
return binaryString
l = self.length - 8
self.body = binaryString[8:8+l]
return binaryString[8 + l:]
def __len__ (self):
return 8 + len(self.body)
def __eq__ (self, other):
if type(self) != type(other): return False
if not ofp_header.__eq__(self, other): return False
if self.body != other.body: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'header: \n'
outstr += ofp_header.show(self, prefix + ' ')
outstr += prefix + 'body:\n' + _format_body(self.body, prefix + ' ') + '\n'
return outstr
class ofp_vendor_header (ofp_header):
def __init__ (self, **kw):
ofp_header.__init__(self)
self.header_type = OFPT_VENDOR
self.vendor = 0
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += ofp_header.pack(self)
packed += struct.pack("!L", self.vendor)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 12):
return binaryString
ofp_header.unpack(self, binaryString[0:])
(self.vendor,) = struct.unpack_from("!L", binaryString, 8)
return binaryString[12:]
def __len__ (self):
return 12
def __eq__ (self, other):
if type(self) != type(other): return False
if not ofp_header.__eq__(self, other): return False
if self.vendor != other.vendor: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'header: \n'
outstr += ofp_header.show(self, prefix + ' ')
outstr += prefix + 'vendor: ' + str(self.vendor) + '\n'
return outstr
class ofp_vendor (ofp_header):
def __init__ (self, **kw):
ofp_header.__init__(self)
self.header_type = OFPT_VENDOR
self.vendor = 0
self.data = b''
self.length = 12
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
self.length = 12 + len(self.data)
packed = ""
packed += ofp_header.pack(self)
packed += struct.pack("!L", self.vendor)
if hasattr(self.data, "pack"):
packed += self.data.pack()
else:
packed += self.data
return packed
def unpack (self, binaryString):
if (len(binaryString) < 12):
return binaryString
ofp_header.unpack(self, binaryString[0:])
(self.vendor,) = struct.unpack_from("!L", binaryString, 8)
if len(binaryString) < self.length:
return binaryString
self.data = binaryString[12:self.length]
return binaryString[self.length:]
def __len__ (self):
return 12 + len(self.data)
def __eq__ (self, other):
if type(self) != type(other): return False
if not ofp_header.__eq__(self, other): return False
if self.vendor != other.vendor: return False
if self.data != other.data: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'header: \n'
outstr += ofp_header.show(self, prefix + ' ')
outstr += prefix + 'vendor: ' + str(self.vendor) + '\n'
outstr += prefix + 'data: ' + ( repr(self.data[:8]) if len(self.data) < 8 else "[%d bytes]"%len(self.data) ) + '\n'
return outstr
class ofp_features_request (ofp_header):
def __init__ (self, **kw):
ofp_header.__init__(self)
self.header_type = OFPT_FEATURES_REQUEST
self.length = 8
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += ofp_header.pack(self)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
ofp_header.unpack(self, binaryString[0:])
return binaryString[8:]
def __len__ (self):
return 8
def __eq__ (self, other):
if type(self) != type(other): return False
if not ofp_header.__eq__(self, other): return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'header: \n'
outstr += ofp_header.show(self, prefix + ' ')
return outstr
class ofp_get_config_request (ofp_header):
def __init__ (self, **kw):
ofp_header.__init__(self)
self.header_type = OFPT_GET_CONFIG_REQUEST
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += ofp_header.pack(self)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 8):
return binaryString
ofp_header.unpack(self, binaryString[0:])
return binaryString[8:]
def __len__ (self):
return 8
def __eq__ (self, other):
if type(self) != type(other): return False
if not ofp_header.__eq__(self, other): return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'header: \n'
outstr += ofp_header.show(self, prefix + ' ')
return outstr
class ofp_get_config_reply (ofp_header):
def __init__ (self, **kw):
ofp_header.__init__(self)
self.header_type = OFPT_GET_CONFIG_REPLY
self.flags = 0
self.miss_send_len = OFP_DEFAULT_MISS_SEND_LEN
self.length = 12
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += ofp_header.pack(self)
packed += struct.pack("!HH", self.flags, self.miss_send_len)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 12):
return binaryString
ofp_header.unpack(self, binaryString[0:])
(self.flags, self.miss_send_len) = struct.unpack_from("!HH", binaryString, 8)
return binaryString[12:]
def __len__ (self):
return 12
def __eq__ (self, other):
if type(self) != type(other): return False
if not ofp_header.__eq__(self, other): return False
if self.flags != other.flags: return False
if self.miss_send_len != other.miss_send_len: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'header: \n'
outstr += ofp_header.show(self, prefix + ' ')
outstr += prefix + 'flags: ' + str(self.flags) + '\n'
outstr += prefix + 'miss_send_len: ' + str(self.miss_send_len) + '\n'
return outstr
class ofp_set_config (ofp_header):
def __init__ (self, **kw):
ofp_header.__init__(self)
self.header_type = OFPT_SET_CONFIG
self.flags = 0
self.miss_send_len = OFP_DEFAULT_MISS_SEND_LEN
self.length = 12
initHelper(self, kw)
def _assert (self):
return (True, None)
def pack (self, assertstruct=True):
if(assertstruct):
if(not self._assert()[0]):
return None
packed = ""
packed += ofp_header.pack(self)
packed += struct.pack("!HH", self.flags, self.miss_send_len)
return packed
def unpack (self, binaryString):
if (len(binaryString) < 12):
return binaryString
ofp_header.unpack(self, binaryString[0:])
(self.flags, self.miss_send_len) = struct.unpack_from("!HH", binaryString, 8)
return binaryString[12:]
def __len__ (self):
return 12
def __eq__ (self, other):
if type(self) != type(other): return False
if not ofp_header.__eq__(self, other): return False
if self.flags != other.flags: return False
if self.miss_send_len != other.miss_send_len: return False
return True
def __ne__ (self, other): return not self.__eq__(other)
def show (self, prefix=''):
outstr = ''
outstr += prefix + 'header: \n'
outstr += ofp_header.show(self, prefix + ' ')
outstr += prefix + 'flags: ' + str(self.flags) + '\n'
outstr += prefix + 'miss_send_len: ' + str(self.miss_send_len) + '\n'
return outstr
ofp_port_rev_map = {
'OFPP_MAX' : 65280,
'OFPP_IN_PORT' : 65528,
'OFPP_TABLE' : 65529,
'OFPP_NORMAL' : 65530,
'OFPP_FLOOD' : 65531,
'OFPP_ALL' : 65532,
'OFPP_CONTROLLER' : 65533,
'OFPP_LOCAL' : 65534,
'OFPP_NONE' : 65535,
}
ofp_type_rev_map = {
'OFPT_HELLO' : 0,
'OFPT_ERROR' : 1,
'OFPT_ECHO_REQUEST' : 2,
'OFPT_ECHO_REPLY' : 3,
'OFPT_VENDOR' : 4,
'OFPT_FEATURES_REQUEST' : 5,
'OFPT_FEATURES_REPLY' : 6,
'OFPT_GET_CONFIG_REQUEST' : 7,
'OFPT_GET_CONFIG_REPLY' : 8,
'OFPT_SET_CONFIG' : 9,
'OFPT_PACKET_IN' : 10,
'OFPT_FLOW_REMOVED' : 11,
'OFPT_PORT_STATUS' : 12,
'OFPT_PACKET_OUT' : 13,
'OFPT_FLOW_MOD' : 14,
'OFPT_PORT_MOD' : 15,
'OFPT_STATS_REQUEST' : 16,
'OFPT_STATS_REPLY' : 17,
'OFPT_BARRIER_REQUEST' : 18,
'OFPT_BARRIER_REPLY' : 19,
'OFPT_QUEUE_GET_CONFIG_REQUEST' : 20,
'OFPT_QUEUE_GET_CONFIG_REPLY' : 21,
}
# Table that maps an action type to a callable that creates that type
# (This is filled in by _init after the globals have been created)
_action_map = {}
def _unpack_actions (b, length, offset=0):
"""
Parses actions from a buffer
b is a buffer (bytes)
offset, if specified is where in b to start decoding
returns ([Actions], next_offset)
"""
if (len(b) - offset) < length: return ([], offset)
actions = []
end = length + offset
while offset < end:
(t,l) = struct.unpack_from("!HH", b, offset)
if (len(b) - offset) < l: return ([], offset)
a = _action_map.get(t)
if a is None:
# Use generic action header for unknown type
a = ofp_action_header()
else:
a = a()
a.unpack(b[offset:offset+l])
assert len(a) == l
actions.append(a)
offset += l
return (actions, offset)
def _init ():
def formatMap (name, m):
o = name + " = {\n"
vk = sorted([(v,k) for k,v in m.iteritems()])
maxlen = 2 + len(reduce(lambda a,b: a if len(a)>len(b) else b, (v for k,v in vk)))
fstr = " %-" + str(maxlen) + "s : %s,\n"
for v,k in vk:
o += fstr % ("'" + k + "'",v)
o += "}"
return o
"""
maps = []
for k,v in globals().iteritems():
if k.startswith("ofp_") and k.endswith("_map") and type(v) == dict:
maps.append((k,v))
for name,m in maps:
rev = {}
name = name[:-4]
names = globals()[name]
for n in names:
rev[n] = globals()[n]
globals()[name + '_rev_map'] = rev
print formatMap(name + "_rev_map", rev)
return
"""
maps = []
for k,v in globals().iteritems():
if k.startswith("ofp_") and k.endswith("_rev_map") and type(v) == dict:
maps.append((k[:-8],v))
for name,m in maps:
# Try to generate forward maps
forward = dict(((v,k) for k,v in m.iteritems()))
if len(forward) == len(m):
globals()[name + "_map"] = forward
else:
print name + "_rev_map is not a map"
# Try to generate lists
v = m.values()
v.sort()
if v[-1] != len(v)-1:
# Allow ones where the last value is a special value (e.g., VENDOR)
del v[-1]
if len(v) > 0 and v[0] == 0 and v[-1] == len(v)-1:
globals()[name] = v
# Generate gobals
for k,v in m.iteritems():
globals()[k] = v
_init()
# Fill in the action-to-class table
#TODO: Use the factory functions?
_action_map.update({
#TODO: special type for OFPAT_STRIP_VLAN?
OFPAT_OUTPUT : ofp_action_output,
OFPAT_SET_VLAN_VID : ofp_action_vlan_vid,
OFPAT_SET_VLAN_PCP : ofp_action_vlan_pcp,
OFPAT_SET_DL_SRC : ofp_action_dl_addr,
OFPAT_SET_DL_DST : ofp_action_dl_addr,
OFPAT_SET_NW_SRC : ofp_action_nw_addr,
OFPAT_SET_NW_DST : ofp_action_nw_addr,
OFPAT_SET_NW_TOS : ofp_action_nw_tos,
OFPAT_SET_TP_SRC : ofp_action_tp_port,
OFPAT_SET_TP_DST : ofp_action_tp_port,
OFPAT_ENQUEUE : ofp_action_enqueue,
OFPAT_PUSH_MPLS : ofp_action_push_mpls,
OFPAT_POP_MPLS : ofp_action_pop_mpls,
OFPAT_SET_MPLS_LABEL : ofp_action_mpls_label,
OFPAT_SET_MPLS_TC : ofp_action_mpls_tc,
OFPAT_SET_MPLS_TTL : ofp_action_mpls_ttl,
OFPAT_DEC_MPLS_TTL : ofp_action_mpls_dec_ttl,
OFPAT_RESUBMIT : ofp_action_resubmit
})
# Values from macro definitions
OFP_FLOW_PERMANENT = 0
OFP_DL_TYPE_ETH2_CUTOFF = 0x0600
DESC_STR_LEN = 256
OFPFW_ICMP_CODE = OFPFW_TP_DST
OFPQ_MIN_RATE_UNCFG = 0xffff
OFP_VERSION = 0x01
OFP_MAX_TABLE_NAME_LEN = 32
OFP_DL_TYPE_NOT_ETH_TYPE = 0x05ff
OFP_DEFAULT_MISS_SEND_LEN = 128
OFP_MAX_PORT_NAME_LEN = 16
OFP_SSL_PORT = 6633
OFPFW_ICMP_TYPE = OFPFW_TP_SRC
OFP_TCP_PORT = 6633
SERIAL_NUM_LEN = 32
OFP_DEFAULT_PRIORITY = 0x8000
OFP_ETH_ALEN = 6
OFP_VLAN_NONE = 0xffff
OFPQ_ALL = 0xffffffff
# Basic structure size definitions.
OFP_ACTION_DL_ADDR_BYTES = 16
OFP_ACTION_ENQUEUE_BYTES = 16
OFP_ACTION_HEADER_BYTES = 8
OFP_ACTION_NW_ADDR_BYTES = 8
OFP_ACTION_NW_TOS_BYTES = 8
OFP_ACTION_OUTPUT_BYTES = 8
OFP_ACTION_TP_PORT_BYTES = 8
OFP_ACTION_VENDOR_HEADER_BYTES = 8
OFP_ACTION_VLAN_PCP_BYTES = 8
OFP_ACTION_VLAN_VID_BYTES = 8
OFP_AGGREGATE_STATS_REPLY_BYTES = 24
OFP_AGGREGATE_STATS_REQUEST_BYTES = 44
OFP_DESC_STATS_BYTES = 1056
OFP_ERROR_MSG_BYTES = 12
OFP_FLOW_MOD_BYTES = 72
OFP_FLOW_REMOVED_BYTES = 88
OFP_FLOW_STATS_BYTES = 88
OFP_FLOW_STATS_REQUEST_BYTES = 44
OFP_HEADER_BYTES = 8
OFP_HELLO_BYTES = 8
OFP_MATCH_BYTES = 40
OFP_PACKET_IN_BYTES = 18
OFP_PACKET_OUT_BYTES = 16
OFP_PACKET_QUEUE_BYTES = 8
OFP_PHY_PORT_BYTES = 48
OFP_PORT_MOD_BYTES = 32
OFP_PORT_STATS_BYTES = 104
OFP_PORT_STATS_REQUEST_BYTES = 8
OFP_PORT_STATUS_BYTES = 64
OFP_QUEUE_GET_CONFIG_REPLY_BYTES = 16
OFP_QUEUE_GET_CONFIG_REQUEST_BYTES = 12
OFP_QUEUE_PROP_HEADER_BYTES = 8
OFP_QUEUE_PROP_MIN_RATE_BYTES = 16
OFP_QUEUE_STATS_BYTES = 32
OFP_QUEUE_STATS_REQUEST_BYTES = 8
OFP_STATS_REPLY_BYTES = 12
OFP_STATS_REQUEST_BYTES = 12
OFP_SWITCH_CONFIG_BYTES = 12
OFP_SWITCH_FEATURES_BYTES = 32
OFP_TABLE_STATS_BYTES = 64
OFP_VENDOR_HEADER_BYTES = 12
NO_BUFFER = 4294967295
ofp_match_data = {
# 'wildcards' : (0, 0),
'in_port' : (0, OFPFW_IN_PORT),
'dl_src' : (EMPTY_ETH, OFPFW_DL_SRC),
'dl_dst' : (EMPTY_ETH, OFPFW_DL_DST),
'dl_vlan' : (0, OFPFW_DL_VLAN),
'dl_vlan_pcp' : (0, OFPFW_DL_VLAN_PCP),
#'pad1' : (_PAD, 0),
'dl_type' : (0, OFPFW_DL_TYPE),
'nw_tos' : (0, OFPFW_NW_TOS),
'nw_proto' : (0, OFPFW_NW_PROTO),
#'pad2' : (_PAD2, 0),
'nw_src' : (0, OFPFW_NW_SRC_ALL),
'nw_dst' : (0, OFPFW_NW_DST_ALL),
'tp_src' : (0, OFPFW_TP_SRC),
'tp_dst' : (0, OFPFW_TP_DST),
# 'mpls_label': (0, OFPFW_MPLS_LABEL),
# 'mpls_tc': (0, OFPFW_MPLS_TC),
}
|
gpl-3.0
| -4,535,113,689,394,153,000 | 30.155621 | 260 | 0.603704 | false |
alvarop/silta
|
sw/examples/drivers/max31855.py
|
1
|
1403
|
#!/usr/bin/env python
#
# SPI example (using the STM32F407 discovery board)
#
import sys
import time
import ctypes
from silta import stm32f407
def bytes_to_int(byte_list):
num = 0
for byte in range(len(byte_list)):
num += byte_list[byte] << ((len(byte_list) - 1 - byte) * 8)
return num
class MAX31855(object):
def __init__(self, bridge, cs_pin):
self.bridge = bridge
self.cs_pin = cs_pin
self.last_fault = 0
# Set the CS line as an output
self.bridge.gpiocfg(self.cs_pin, 'output')
# Configure ~1.05MHz clock with CPOL=0,CPHA=0
self.bridge.spicfg(10500000, 0, 0)
# CS is active low in this case
self.bridge.gpio(self.cs_pin, 1)
def read(self):
# Read 32 bits
txbuff = [0x00, 0x00, 0x00, 0x00]
rval = self.bridge.spi(self.cs_pin, txbuff)
if isinstance(rval, list):
reg = bytes_to_int(rval)
fault = ((reg >> 16) & 1) == 1
if fault:
temperature = None
last_fault = reg & 0x7
else:
temperature = ctypes.c_int16((reg >> 16) & 0xFFFC).value >> 2
temperature = temperature * 0.25
return temperature
else:
print('SPI Error: ' + str(rval))
return None
def get_last_fault(self):
return last_fault
|
bsd-2-clause
| -733,949,490,846,603,800 | 22.779661 | 77 | 0.543835 | false |
dcastro9/patternrec_ps2
|
code/alcohol_script.py
|
1
|
5623
|
from Dataset import Dataset
from WTA_Hasher import WTAHasher
from kNN_Classifier import kNNClassifier
import numpy as np
import matplotlib.pyplot as plt
import copy
ds_train_dir = "../datasets/alcohol/alcoholism_training.csv"
ds_test_dir = "../datasets/alcohol/alcoholism_test.csv"
results_dir = "../final_results/alcohol/"
num_k_values = 10
weights = [1,1,1,1,1,3]
ds_orig = Dataset(ds_train_dir, name='Original Data')
ds_norm = Dataset(ds_train_dir, normalize=True, name='Normalized Data')
ds_norm_weigh = Dataset(ds_train_dir, normalize=True, weights=weights,
name='Norm & Weighted Data')
ds_whiten = Dataset(ds_train_dir, whiten=True, name='Whitened Data')
ds_orig_t = Dataset(ds_test_dir)
ds_norm_t = Dataset(ds_test_dir, normalize=True)
ds_norm_weigh_t = Dataset(ds_test_dir, normalize=True, weights=weights)
ds_whiten_t = Dataset(ds_test_dir, whiten=True)
alcohol_datasets = [[ds_orig, ds_orig_t],
[ds_norm, ds_norm_t],
[ds_norm_weigh, ds_norm_weigh_t],
[ds_whiten, ds_whiten_t]]
k_values = range(1,num_k_values*2,2)
color=['red','blue','green','black']
labels=['20%', '50%', '80%', '100%']
folds=['2-fold', '5-fold', 'N-fold']
for ds in alcohol_datasets:
train_data_all = ds[0].data
test_data = ds[1].data
# Accuracy for get 20%, 50%, 80% and 100% of the data.
# Each subset will have
train_accuracy = [[np.zeros(num_k_values), np.zeros(num_k_values), np.zeros(num_k_values)],
[np.zeros(num_k_values), np.zeros(num_k_values), np.zeros(num_k_values)],
[np.zeros(num_k_values), np.zeros(num_k_values), np.zeros(num_k_values)],
[np.zeros(num_k_values), np.zeros(num_k_values), np.zeros(num_k_values)]]
best_k_and_ds = [[0,0,0],[0,0,0],[0,0,0],[0,0,0]]
for it in range(5):
train_data_20, t = Dataset.getRandomPercent(train_data_all, 0.2)
train_data_50, t = Dataset.getRandomPercent(train_data_all, 0.5)
train_data_80, t = Dataset.getRandomPercent(train_data_all, 0.8)
all_training_data = [train_data_20,
train_data_50,
train_data_80,
train_data_all]
# Only run on train_data_all once.
if it > 0:
all_training_data = all_training_data[:-1]
for val in range(len(all_training_data)):
for k in k_values:
print str(it) + ": Training on: " + labels[val] + "for k value: " + str(k) + " for " + ds[0].name
# Do 2-5-N Fold Cross Validation.
cv_2 = Dataset.getkPartitions(all_training_data[val], 2)
cv_5 = Dataset.getkPartitions(all_training_data[val], 5)
cv_n = Dataset.getkPartitions(all_training_data[val],
len(all_training_data[val]))
cvs = [cv_2, cv_5, cv_n]
cross_val_accuracy = [0, 0, 0]
for cv_c in range(len(cvs)):
# Does f-Fold cross validation.
accuracy = 0
for fold in range(len(cvs[cv_c])):
td = copy.deepcopy(cvs[cv_c]) # Copy the cross validation dataset.
del td[fold] # Delete the item we're using for testing.
td_reshaped = []
for elem in td:
for item in elem:
td_reshaped.append(item)
knn = kNNClassifier(td_reshaped, k) # Initialize kNN.
accuracy += knn.test(cvs[cv_c][fold]) # Test.
accuracy /= len(cvs[cv_c])
if best_k_and_ds[val][cv_c] == 0:
best_k_and_ds[val][cv_c] = [k, td_reshaped, accuracy]
elif best_k_and_ds[val][cv_c][2] < accuracy:
best_k_and_ds[val][cv_c] = [k, td_reshaped, accuracy]
train_accuracy[val][cv_c][k/2] += accuracy
# Write results to file.
out_f = open(results_dir + ds[0].name + ".txt", 'w')
for cnt in range(len(train_accuracy)):
# Setup plot.
plt.xlabel('k Values')
plt.ylabel('Accuracy')
plt.title(ds[0].name)
average = True
if cnt == len(train_accuracy) - 1:
average = False
for fold in range(len(train_accuracy[cnt])):
if (average):
train_accuracy[cnt][fold] /= 5
plt.plot(k_values, train_accuracy[cnt][fold], color=color[fold],
label=folds[fold])
out_f.write(labels[cnt] + ":" + folds[fold] + ":" +
str(train_accuracy[cnt][fold]) + "\n")
# Save plot.
plt.legend()
plt.savefig(results_dir + ds[0].name + labels[cnt] + ".pdf")
plt.clf()
plt.cla()
# Now we test with the original test data provided.
out_f.write("\n\n Testing for best k & DS for:" + ds[0].name +"\n")
for val in range(len(best_k_and_ds)):
for fold in range(len(best_k_and_ds[val])):
knn = kNNClassifier(best_k_and_ds[val][fold][1],
best_k_and_ds[val][fold][0]) # Initialize kNN.
out = knn.test(test_data) # Test.
out_f.write(labels[val] + " with k:" +
str(best_k_and_ds[val][fold][0]) + " at " + folds[fold] +
" original accuracy:" + str(best_k_and_ds[val][fold][2]) +
" vs accuracy:" + str(out) + "\n")
# Close file.
out_f.close()
|
mit
| -64,797,038,294,500,720 | 44.354839 | 113 | 0.527121 | false |
OpusVL/odoo-trading-as
|
trading_as/res_partner.py
|
1
|
1361
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Trading As Brands
# Copyright (C) 2015 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api
class ResPartner(models.Model):
_inherit = 'res.partner'
brand = fields.Many2one(
string='Brand',
help=(
'The trading name to use for branding documents for this partner.\n' +
'If blank, the default company branding will be used.'
),
comodel_name='res.company.brand',
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
| -7,650,469,349,843,259,000 | 35.783784 | 82 | 0.622337 | false |
fedebell/Laboratorio3
|
Laboratorio2/GraficiBestFit/fitAnalistico.py
|
1
|
1562
|
import numpy
import pylab
from scipy.optimize import curve_fit
import math
import scipy.stats
V, dV, I, dI = pylab.loadtxt("data00.txt", unpack="True")
#Best fit analitico
#Dy e Dx sono le colonne di errori
x = V
Dx = dV
y = I
Dy = dI
#set error an statistical weight
sigma = Dy
w = 1/(sigma**2)
#determine the coefficients
c1 = (w*(x**2)).sum()
c2 = (w*y).sum()
c3 = (w*x).sum()
c4 = (w*x*y).sum()
c5 = (w).sum()
Dprime = c5*c1-c3**2
a = (c1*c2-c3*c4)/Dprime
b = (c5*c4-c3*c2)/Dprime
Da = numpy.sqrt(c1/Dprime)
Db = numpy.sqrt(c5/Dprime)
#define the linear function
#note how paramters are entered
#note the syntax
def ff(x, aa, bb):
return aa+bb*x
#calculate the chisquare for the best fit function
chi2 = ((w*(y-ff(x, a, b))**2)).sum()
#determine the ndof
ndof = len(x)-2
#print results on the console
print("I_0 = ", a, " DI_0 = ", Da, "R = ", 1/b, Db/(b**2))
print(chi2, ndof)
#prepare a dummy xx array (with 100 linearly spaced points)
xx = numpy.linspace(min(x), max(x), 100)
pylab.title("Best fit analitico")
pylab.xlabel("Delta V (V)")
pylab.ylabel("I (mA)") #modificare gradi
pylab.grid(color = "gray")
pylab.xlim(0, 1.1*max(V))
pylab.ylim(0, 1.1*max(I))
pylab.grid(color = "gray")
pylab.errorbar(V, I, dI, dV, "o", color="black")
#plot the fitting curve
pylab.plot(xx, ff(xx, a, b), color = 'red')
chisq = (((I - ff(V, a, b))/dI)**2).sum()
ndof = len(V) - 2 #Tolgo due parametri estratti dal fit
p=1.0-scipy.stats.chi2.cdf(chisq, ndof)
print("Chisquare/ndof 2 = %f/%d" % (chisq, ndof))
print("p = ", p)
pylab.show()
|
gpl-3.0
| -3,018,592,897,091,491,300 | 20.39726 | 59 | 0.637004 | false |
claudep/translate
|
translate/tools/pydiff.py
|
1
|
12227
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2005, 2006 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""diff tool like GNU diff, but lets you have special options
that are useful in dealing with PO files"""
import difflib
import fnmatch
import os
import sys
import time
from argparse import ArgumentParser
lineterm = "\n"
def main():
"""main program for pydiff"""
parser = ArgumentParser()
# GNU diff like options
parser.add_argument("-i", "--ignore-case", default=False, action="store_true",
help='Ignore case differences in file contents.')
parser.add_argument("-U", "--unified", type=int, metavar="NUM", default=3,
dest="unified_lines",
help='Output NUM (default 3) lines of unified context')
parser.add_argument("-r", "--recursive", default=False, action="store_true",
help='Recursively compare any subdirectories found.')
parser.add_argument("-N", "--new-file", default=False, action="store_true",
help='Treat absent files as empty.')
parser.add_argument("--unidirectional-new-file", default=False,
action="store_true",
help='Treat absent first files as empty.')
parser.add_argument("-s", "--report-identical-files", default=False,
action="store_true",
help='Report when two files are the same.')
parser.add_argument("-x", "--exclude", default=["CVS", "*.po~"],
action="append", metavar="PAT",
help='Exclude files that match PAT.')
# our own options
parser.add_argument("--fromcontains", type=str, default=None,
metavar="TEXT",
help='Only show changes where fromfile contains TEXT')
parser.add_argument("--tocontains", type=str, default=None,
metavar="TEXT",
help='Only show changes where tofile contains TEXT')
parser.add_argument("--contains", type=str, default=None,
metavar="TEXT",
help='Only show changes where fromfile or tofile contains TEXT')
parser.add_argument("-I", "--ignore-case-contains", default=False, action="store_true",
help='Ignore case differences when matching any of the changes')
parser.add_argument("--accelerator", dest="accelchars", default="",
metavar="ACCELERATORS",
help="ignores the given accelerator characters when matching")
parser.add_argument("fromfile", nargs=1)
parser.add_argument("tofile", nargs=1)
args = parser.parse_args()
fromfile, tofile = args.fromfile[0], args.tofile[0]
if fromfile == "-" and tofile == "-":
parser.error("Only one of fromfile and tofile can be read from stdin")
if os.path.isdir(fromfile):
if os.path.isdir(tofile):
differ = DirDiffer(fromfile, tofile, args)
else:
parser.error("File %s is a directory while file %s is a regular file" %
(fromfile, tofile))
else:
if os.path.isdir(tofile):
parser.error("File %s is a regular file while file %s is a directory" %
(fromfile, tofile))
else:
differ = FileDiffer(fromfile, tofile, args)
differ.writediff(sys.stdout)
class DirDiffer:
"""generates diffs between directories"""
def __init__(self, fromdir, todir, options):
"""Constructs a comparison between the two dirs using the
given options"""
self.fromdir = fromdir
self.todir = todir
self.options = options
def isexcluded(self, difffile):
"""checks if the given filename has been excluded from the diff"""
for exclude_pat in self.options.exclude:
if fnmatch.fnmatch(difffile, exclude_pat):
return True
return False
def writediff(self, outfile):
"""writes the actual diff to the given file"""
fromfiles = os.listdir(self.fromdir)
tofiles = os.listdir(self.todir)
difffiles = dict.fromkeys(fromfiles + tofiles).keys()
difffiles.sort()
for difffile in difffiles:
if self.isexcluded(difffile):
continue
from_ok = (difffile in fromfiles or self.options.new_file or
self.options.unidirectional_new_file)
to_ok = (difffile in tofiles or self.options.new_file)
if from_ok and to_ok:
fromfile = os.path.join(self.fromdir, difffile)
tofile = os.path.join(self.todir, difffile)
if os.path.isdir(fromfile):
if os.path.isdir(tofile):
if self.options.recursive:
differ = DirDiffer(fromfile, tofile, self.options)
differ.writediff(outfile)
else:
outfile.write("Common subdirectories: %s and %s\n" %
(fromfile, tofile))
else:
outfile.write("File %s is a directory while file %s is a regular file\n" %
(fromfile, tofile))
else:
if os.path.isdir(tofile):
parser.error("File %s is a regular file while file %s is a directory\n" %
(fromfile, tofile))
else:
filediffer = FileDiffer(fromfile, tofile, self.options)
filediffer.writediff(outfile)
elif from_ok:
outfile.write("Only in %s: %s\n" % (self.fromdir, difffile))
elif to_ok:
outfile.write("Only in %s: %s\n" % (self.todir, difffile))
class FileDiffer:
"""generates diffs between files"""
def __init__(self, fromfile, tofile, options):
"""Constructs a comparison between the two files using the given
options"""
self.fromfile = fromfile
self.tofile = tofile
self.options = options
def writediff(self, outfile):
"""writes the actual diff to the given file"""
validfiles = True
if os.path.exists(self.fromfile):
with open(self.fromfile, 'U') as fh:
self.from_lines = fh.readlines()
fromfiledate = os.stat(self.fromfile).st_mtime
elif self.fromfile == "-":
self.from_lines = sys.stdin.readlines()
fromfiledate = time.time()
elif self.options.new_file or self.options.unidirectional_new_file:
self.from_lines = []
fromfiledate = 0
else:
outfile.write("%s: No such file or directory\n" % self.fromfile)
validfiles = False
if os.path.exists(self.tofile):
with open(self.tofile, 'U') as fh:
self.to_lines = fh.readlines()
tofiledate = os.stat(self.tofile).st_mtime
elif self.tofile == "-":
self.to_lines = sys.stdin.readlines()
tofiledate = time.time()
elif self.options.new_file:
self.to_lines = []
tofiledate = 0
else:
outfile.write("%s: No such file or directory\n" % self.tofile)
validfiles = False
if not validfiles:
return
fromfiledate = time.ctime(fromfiledate)
tofiledate = time.ctime(tofiledate)
compare_from_lines = self.from_lines
compare_to_lines = self.to_lines
if self.options.ignore_case:
compare_from_lines = [line.lower() for line in compare_from_lines]
compare_to_lines = [line.lower() for line in compare_to_lines]
matcher = difflib.SequenceMatcher(None, compare_from_lines, compare_to_lines)
groups = matcher.get_grouped_opcodes(self.options.unified_lines)
started = False
fromstring = '--- %s\t%s%s' % (self.fromfile, fromfiledate, lineterm)
tostring = '+++ %s\t%s%s' % (self.tofile, tofiledate, lineterm)
for group in groups:
hunk = "".join([line for line in self.unified_diff(group)])
if self.options.fromcontains:
if self.options.ignore_case_contains:
hunk_from_lines = "".join([line.lower() for line in self.get_from_lines(group)])
else:
hunk_from_lines = "".join(self.get_from_lines(group))
for accelerator in self.options.accelchars:
hunk_from_lines = hunk_from_lines.replace(accelerator, "")
if self.options.fromcontains not in hunk_from_lines:
continue
if self.options.tocontains:
if self.options.ignore_case_contains:
hunk_to_lines = "".join([line.lower() for line in self.get_to_lines(group)])
else:
hunk_to_lines = "".join(self.get_to_lines(group))
for accelerator in self.options.accelchars:
hunk_to_lines = hunk_to_lines.replace(accelerator, "")
if self.options.tocontains not in hunk_to_lines:
continue
if self.options.contains:
if self.options.ignore_case_contains:
hunk_lines = "".join([line.lower() for line in self.get_from_lines(group) + self.get_to_lines(group)])
else:
hunk_lines = "".join(self.get_from_lines(group) + self.get_to_lines(group))
for accelerator in self.options.accelchars:
hunk_lines = hunk_lines.replace(accelerator, "")
if self.options.contains not in hunk_lines:
continue
if not started:
outfile.write(fromstring)
outfile.write(tostring)
started = True
outfile.write(hunk)
if not started and self.options.report_identical_files:
outfile.write("Files %s and %s are identical\n" %
(self.fromfile, self.tofile))
def get_from_lines(self, group):
"""returns the lines referred to by group, from the fromfile"""
from_lines = []
for tag, i1, i2, j1, j2 in group:
from_lines.extend(self.from_lines[i1:i2])
return from_lines
def get_to_lines(self, group):
"""returns the lines referred to by group, from the tofile"""
to_lines = []
for tag, i1, i2, j1, j2 in group:
to_lines.extend(self.to_lines[j1:j2])
return to_lines
def unified_diff(self, group):
"""takes the group of opcodes and generates a unified diff line
by line"""
i1, i2, j1, j2 = group[0][1], group[-1][2], group[0][3], group[-1][4]
yield "@@ -%d,%d +%d,%d @@%s" % (i1 + 1, i2 - i1, j1 + 1, j2 - j1, lineterm)
for tag, i1, i2, j1, j2 in group:
if tag == 'equal':
for line in self.from_lines[i1:i2]:
yield ' ' + line
continue
if tag == 'replace' or tag == 'delete':
for line in self.from_lines[i1:i2]:
yield '-' + line
if tag == 'replace' or tag == 'insert':
for line in self.to_lines[j1:j2]:
yield '+' + line
if __name__ == "__main__":
main()
|
gpl-2.0
| -3,536,485,142,706,997,000 | 43.300725 | 122 | 0.559254 | false |
swiharta/radres
|
polls/pygooglechart.py
|
1
|
22228
|
"""
PyGoogleChart - A complete Python wrapper for the Google Chart API
http://pygooglechart.slowchop.com/
Copyright 2007 Gerald Kaszuba
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import os
import urllib
import urllib2
import math
import random
import re
# Helper variables and functions
# -----------------------------------------------------------------------------
__version__ = '0.1.2'
reo_colour = re.compile('^([A-Fa-f0-9]{2,2}){3,4}$')
def _check_colour(colour):
if not reo_colour.match(colour):
raise InvalidParametersException('Colours need to be in ' \
'RRGGBB or RRGGBBAA format. One of your colours has %s' % \
colour)
# Exception Classes
# -----------------------------------------------------------------------------
class PyGoogleChartException(Exception):
pass
class DataOutOfRangeException(PyGoogleChartException):
pass
class UnknownDataTypeException(PyGoogleChartException):
pass
class NoDataGivenException(PyGoogleChartException):
pass
class InvalidParametersException(PyGoogleChartException):
pass
class BadContentTypeException(PyGoogleChartException):
pass
# Data Classes
# -----------------------------------------------------------------------------
class Data(object):
def __init__(self, data):
assert(type(self) != Data) # This is an abstract class
self.data = data
class SimpleData(Data):
enc_map = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'
def __repr__(self):
encoded_data = []
for data in self.data:
sub_data = []
for value in data:
if value is None:
sub_data.append('_')
elif value >= 0 and value <= SimpleData.max_value:
sub_data.append(SimpleData.enc_map[value])
else:
raise DataOutOfRangeException()
encoded_data.append(''.join(sub_data))
return 'chd=s:' + ','.join(encoded_data)
@staticmethod
def max_value():
return 61
class TextData(Data):
def __repr__(self):
encoded_data = []
for data in self.data:
sub_data = []
for value in data:
if value is None:
sub_data.append(-1)
elif value >= 0 and value <= TextData.max_value:
sub_data.append(str(float(value)))
else:
raise DataOutOfRangeException()
encoded_data.append(','.join(sub_data))
return 'chd=t:' + '|'.join(encoded_data)
@staticmethod
def max_value():
return 100
class ExtendedData(Data):
enc_map = \
'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-.'
def __repr__(self):
encoded_data = []
enc_size = len(ExtendedData.enc_map)
for data in self.data:
sub_data = []
for value in data:
if value is None:
sub_data.append('__')
elif value >= 0 and value <= ExtendedData.max_value:
first, second = divmod(int(value), enc_size)
sub_data.append('%s%s' % (
ExtendedData.enc_map[first],
ExtendedData.enc_map[second]))
else:
raise DataOutOfRangeException( \
'Item #%i "%s" is out of range' % (data.index(value), \
value))
encoded_data.append(''.join(sub_data))
return 'chd=e:' + ','.join(encoded_data)
@staticmethod
def max_value():
return 4095
# Axis Classes
# -----------------------------------------------------------------------------
class Axis(object):
BOTTOM = 'x'
TOP = 't'
LEFT = 'y'
RIGHT = 'r'
TYPES = (BOTTOM, TOP, LEFT, RIGHT)
def __init__(self, axis_index, axis_type, **kw):
assert(axis_type in Axis.TYPES)
self.has_style = False
self.axis_index = axis_index
self.axis_type = axis_type
self.positions = None
def set_index(self, axis_index):
self.axis_index = axis_index
def set_positions(self, positions):
self.positions = positions
def set_style(self, colour, font_size=None, alignment=None):
_check_colour(colour)
self.colour = colour
self.font_size = font_size
self.alignment = alignment
self.has_style = True
def style_to_url(self):
bits = []
bits.append(str(self.axis_index))
bits.append(self.colour)
if self.font_size is not None:
bits.append(str(self.font_size))
if self.alignment is not None:
bits.append(str(self.alignment))
return ','.join(bits)
def positions_to_url(self):
bits = []
bits.append(str(self.axis_index))
bits += [str(a) for a in self.positions]
return ','.join(bits)
class LabelAxis(Axis):
def __init__(self, axis_index, axis_type, values, **kwargs):
Axis.__init__(self, axis_index, axis_type, **kwargs)
self.values = [str(a) for a in values]
def __repr__(self):
return '%i:|%s' % (self.axis_index, '|'.join(self.values))
class RangeAxis(Axis):
def __init__(self, axis_index, axis_type, low, high, **kwargs):
Axis.__init__(self, axis_index, axis_type, **kwargs)
self.low = low
self.high = high
def __repr__(self):
return '%i,%s,%s' % (self.axis_index, self.low, self.high)
# Chart Classes
# -----------------------------------------------------------------------------
class Chart(object):
"""Abstract class for all chart types.
width are height specify the dimensions of the image. title sets the title
of the chart. legend requires a list that corresponds to datasets.
"""
BASE_URL = 'http://chart.apis.google.com/chart?'
BACKGROUND = 'bg'
CHART = 'c'
SOLID = 's'
LINEAR_GRADIENT = 'lg'
LINEAR_STRIPES = 'ls'
def __init__(self, width, height, title=None, legend=None, colours=None):
assert(type(self) != Chart) # This is an abstract class
assert(isinstance(width, int))
assert(isinstance(height, int))
self.width = width
self.height = height
self.data = []
self.set_title(title)
self.set_legend(legend)
self.set_colours(colours)
self.fill_types = {
Chart.BACKGROUND: None,
Chart.CHART: None,
}
self.fill_area = {
Chart.BACKGROUND: None,
Chart.CHART: None,
}
# self.axis = {
# Axis.TOP: None,
# Axis.BOTTOM: None,
# Axis.LEFT: None,
# Axis.RIGHT: None,
# }
self.axis = []
self.markers = []
# URL generation
# -------------------------------------------------------------------------
def get_url(self):
url_bits = self.get_url_bits()
return self.BASE_URL + '&'.join(url_bits)
def get_url_bits(self):
url_bits = []
# required arguments
url_bits.append(self.type_to_url())
url_bits.append('chs=%ix%i' % (self.width, self.height))
url_bits.append(self.data_to_url())
# optional arguments
if self.title:
url_bits.append('chtt=%s' % self.title)
if self.legend:
url_bits.append('chdl=%s' % '|'.join(self.legend))
if self.colours:
url_bits.append('chco=%s' % ','.join(self.colours))
ret = self.fill_to_url()
if ret:
url_bits.append(ret)
ret = self.axis_to_url()
if ret:
url_bits.append(ret)
if self.markers:
url_bits.append(self.markers_to_url())
return url_bits
# Downloading
# -------------------------------------------------------------------------
def download(self, file_name):
opener = urllib2.urlopen(self.get_url())
if opener.headers['content-type'] != 'image/png':
raise BadContentTypeException('Server responded with a ' \
'content-type of %s' % opener.headers['content-type'])
open(file_name, 'wb').write(urllib.urlopen(self.get_url()).read())
# Simple settings
# -------------------------------------------------------------------------
def set_title(self, title):
if title:
self.title = urllib.quote(title)
else:
self.title = None
def set_legend(self, legend):
# legend needs to be a list, tuple or None
assert(isinstance(legend, list) or isinstance(legend, tuple) or
legend is None)
if legend:
self.legend = [urllib.quote(a) for a in legend]
else:
self.legend = None
# Chart colours
# -------------------------------------------------------------------------
def set_colours(self, colours):
# colours needs to be a list, tuple or None
assert(isinstance(colours, list) or isinstance(colours, tuple) or
colours is None)
# make sure the colours are in the right format
if colours:
for col in colours:
_check_colour(col)
self.colours = colours
# Background/Chart colours
# -------------------------------------------------------------------------
def fill_solid(self, area, colour):
assert(area in (Chart.BACKGROUND, Chart.CHART))
_check_colour(colour)
self.fill_area[area] = colour
self.fill_types[area] = Chart.SOLID
def _check_fill_linear(self, angle, *args):
assert(isinstance(args, list) or isinstance(args, tuple))
assert(angle >= 0 and angle <= 90)
assert(len(args) % 2 == 0)
args = list(args) # args is probably a tuple and we need to mutate
for a in xrange(len(args) / 2):
col = args[a * 2]
offset = args[a * 2 + 1]
_check_colour(col)
assert(offset >= 0 and offset <= 1)
args[a * 2 + 1] = str(args[a * 2 + 1])
return args
def fill_linear_gradient(self, area, angle, *args):
assert(area in (Chart.BACKGROUND, Chart.CHART))
args = self._check_fill_linear(angle, *args)
self.fill_types[area] = Chart.LINEAR_GRADIENT
self.fill_area[area] = ','.join([str(angle)] + args)
def fill_linear_stripes(self, area, angle, *args):
assert(area in (Chart.BACKGROUND, Chart.CHART))
args = self._check_fill_linear(angle, *args)
self.fill_types[area] = Chart.LINEAR_STRIPES
self.fill_area[area] = ','.join([str(angle)] + args)
def fill_to_url(self):
areas = []
for area in (Chart.BACKGROUND, Chart.CHART):
if self.fill_types[area]:
areas.append('%s,%s,%s' % (area, self.fill_types[area], \
self.fill_area[area]))
if areas:
return 'chf=' + '|'.join(areas)
# Data
# -------------------------------------------------------------------------
def data_class_detection(self, data):
"""
Detects and returns the data type required based on the range of the
data given. The data given must be lists of numbers within a list.
"""
assert(isinstance(data, list) or isinstance(data, tuple))
max_value = None
for a in data:
assert(isinstance(a, list) or isinstance(a, tuple))
if max_value is None or max(a) > max_value:
max_value = max(a)
for data_class in (SimpleData, TextData, ExtendedData):
if max_value <= data_class.max_value():
return data_class
raise DataOutOfRangeException()
def add_data(self, data):
self.data.append(data)
return len(self.data) - 1 # return the "index" of the data set
def data_to_url(self, data_class=None):
if not data_class:
data_class = self.data_class_detection(self.data)
if not issubclass(data_class, Data):
raise UnknownDataTypeException()
return repr(data_class(self.data))
# Axis Labels
# -------------------------------------------------------------------------
def set_axis_labels(self, axis_type, values):
assert(axis_type in Axis.TYPES)
values = [ urllib.quote(a) for a in values ]
axis_index = len(self.axis)
axis = LabelAxis(axis_index, axis_type, values)
self.axis.append(axis)
return axis_index
def set_axis_range(self, axis_type, low, high):
assert(axis_type in Axis.TYPES)
axis_index = len(self.axis)
axis = RangeAxis(axis_index, axis_type, low, high)
self.axis.append(axis)
return axis_index
def set_axis_positions(self, axis_index, positions):
try:
self.axis[axis_index].set_positions(positions)
except IndexError:
raise InvalidParametersException('Axis index %i has not been ' \
'created' % axis)
def set_axis_style(self, axis_index, colour, font_size=None, \
alignment=None):
try:
self.axis[axis_index].set_style(colour, font_size, alignment)
except IndexError:
raise InvalidParametersException('Axis index %i has not been ' \
'created' % axis)
def axis_to_url(self):
available_axis = []
label_axis = []
range_axis = []
positions = []
styles = []
index = -1
for axis in self.axis:
available_axis.append(axis.axis_type)
if isinstance(axis, RangeAxis):
range_axis.append(repr(axis))
if isinstance(axis, LabelAxis):
label_axis.append(repr(axis))
if axis.positions:
positions.append(axis.positions_to_url())
if axis.has_style:
styles.append(axis.style_to_url())
if not available_axis:
return
url_bits = []
url_bits.append('chxt=%s' % ','.join(available_axis))
if label_axis:
url_bits.append('chxl=%s' % '|'.join(label_axis))
if range_axis:
url_bits.append('chxr=%s' % '|'.join(range_axis))
if positions:
url_bits.append('chxp=%s' % '|'.join(positions))
if styles:
url_bits.append('chxs=%s' % '|'.join(styles))
return '&'.join(url_bits)
# Markers, Ranges and Fill area (chm)
# -------------------------------------------------------------------------
def markers_to_url(self):
return 'chm=%s' % '|'.join([','.join(a) for a in self.markers])
def add_marker(self, index, point, marker_type, colour, size):
self.markers.append((marker_type, colour, str(index), str(point), \
str(size)))
def add_horizontal_range(self, colour, start, stop):
self.markers.append(('r', colour, '1', str(start), str(stop)))
def add_vertical_range(self, colour, start, stop):
self.markers.append(('R', colour, '1', str(start), str(stop)))
def add_fill_range(self, colour, index_start, index_end):
self.markers.append(('b', colour, str(index_start), str(index_end), \
'1'))
def add_fill_simple(self, colour):
self.markers.append(('B', colour, '1', '1', '1'))
class ScatterChart(Chart):
def __init__(self, *args, **kwargs):
Chart.__init__(self, *args, **kwargs)
def type_to_url(self):
return 'cht=s'
class LineChart(Chart):
def __init__(self, *args, **kwargs):
assert(type(self) != LineChart) # This is an abstract class
Chart.__init__(self, *args, **kwargs)
self.line_styles = {}
self.grid = None
def set_line_style(self, index, thickness=1, line_segment=None, \
blank_segment=None):
value = []
value.append(str(thickness))
if line_segment:
value.append(str(line_segment))
value.append(str(blank_segment))
self.line_styles[index] = value
def set_grid(self, x_step, y_step, line_segment=1, \
blank_segment=0):
self.grid = '%s,%s,%s,%s' % (x_step, y_step, line_segment, \
blank_segment)
def get_url_bits(self):
url_bits = Chart.get_url_bits(self)
if self.line_styles:
style = []
# for index, values in self.line_style.items():
for index in xrange(max(self.line_styles) + 1):
if index in self.line_styles:
values = self.line_styles[index]
else:
values = ('1', )
style.append(','.join(values))
url_bits.append('chls=%s' % '|'.join(style))
if self.grid:
url_bits.append('chg=%s' % self.grid)
return url_bits
class SimpleLineChart(LineChart):
def type_to_url(self):
return 'cht=lc'
class XYLineChart(LineChart):
def type_to_url(self):
return 'cht=lxy'
class BarChart(Chart):
def __init__(self, *args, **kwargs):
assert(type(self) != BarChart) # This is an abstract class
Chart.__init__(self, *args, **kwargs)
self.bar_width = None
def set_bar_width(self, bar_width):
self.bar_width = bar_width
def get_url_bits(self):
url_bits = Chart.get_url_bits(self)
url_bits.append('chbh=%i' % self.bar_width)
return url_bits
class StackedHorizontalBarChart(BarChart):
def type_to_url(self):
return 'cht=bhs'
class StackedVerticalBarChart(BarChart):
def type_to_url(self):
return 'cht=bvs'
class GroupedBarChart(BarChart):
def __init__(self, *args, **kwargs):
assert(type(self) != GroupedBarChart) # This is an abstract class
BarChart.__init__(self, *args, **kwargs)
self.bar_spacing = None
def set_bar_spacing(self, spacing):
self.bar_spacing = spacing
def get_url_bits(self):
# Skip 'BarChart.get_url_bits' and call Chart directly so the parent
# doesn't add "chbh" before we do.
url_bits = Chart.get_url_bits(self)
if self.bar_spacing is not None:
if self.bar_width is None:
raise InvalidParametersException('Bar width is required to ' \
'be set when setting spacing')
url_bits.append('chbh=%i,%i' % (self.bar_width, self.bar_spacing))
else:
url_bits.append('chbh=%i' % self.bar_width)
return url_bits
class GroupedHorizontalBarChart(GroupedBarChart):
def type_to_url(self):
return 'cht=bhg'
class GroupedVerticalBarChart(GroupedBarChart):
def type_to_url(self):
return 'cht=bvg'
class PieChart(Chart):
def __init__(self, *args, **kwargs):
assert(type(self) != PieChart) # This is an abstract class
Chart.__init__(self, *args, **kwargs)
self.pie_labels = []
def set_pie_labels(self, labels):
self.pie_labels = [urllib.quote(a) for a in labels]
def get_url_bits(self):
url_bits = Chart.get_url_bits(self)
if self.pie_labels:
url_bits.append('chl=%s' % '|'.join(self.pie_labels))
return url_bits
class PieChart2D(PieChart):
def type_to_url(self):
return 'cht=p'
class PieChart3D(PieChart):
def type_to_url(self):
return 'cht=p3'
class VennChart(Chart):
def type_to_url(self):
return 'cht=v'
def test():
chart = GroupedVerticalBarChart(320, 200)
chart = PieChart2D(320, 200)
chart = ScatterChart(320, 200)
chart = SimpleLineChart(320, 200)
sine_data = [math.sin(float(a) / 10) * 2000 + 2000 for a in xrange(100)]
random_data = [a * random.random() * 30 for a in xrange(40)]
random_data2 = [random.random() * 4000 for a in xrange(10)]
# chart.set_bar_width(50)
# chart.set_bar_spacing(0)
chart.add_data(sine_data)
chart.add_data(random_data)
chart.add_data(random_data2)
# chart.set_line_style(1, thickness=2)
# chart.set_line_style(2, line_segment=10, blank_segment=5)
# chart.set_title('heloooo')
# chart.set_legend(('sine wave', 'random * x'))
# chart.set_colours(('ee2000', 'DDDDAA', 'fF03f2'))
# chart.fill_solid(Chart.BACKGROUND, '123456')
# chart.fill_linear_gradient(Chart.CHART, 20, '004070', 1, '300040', 0,
# 'aabbcc00', 0.5)
# chart.fill_linear_stripes(Chart.CHART, 20, '204070', .2, '300040', .2,
# 'aabbcc00', 0.2)
axis_left_index = chart.set_axis_range(Axis.LEFT, 0, 10)
axis_left_index = chart.set_axis_range(Axis.LEFT, 0, 10)
axis_left_index = chart.set_axis_range(Axis.LEFT, 0, 10)
axis_right_index = chart.set_axis_range(Axis.RIGHT, 5, 30)
axis_bottom_index = chart.set_axis_labels(Axis.BOTTOM, [1, 25, 95])
chart.set_axis_positions(axis_bottom_index, [1, 25, 95])
chart.set_axis_style(axis_bottom_index, '003050', 15)
# chart.set_pie_labels(('apples', 'oranges', 'bananas'))
# chart.set_grid(10, 10)
# for a in xrange(0, 100, 10):
# chart.add_marker(1, a, 'a', 'AACA20', 10)
chart.add_horizontal_range('00A020', .2, .5)
chart.add_vertical_range('00c030', .2, .4)
chart.add_fill_simple('303030A0')
chart.download('test.png')
url = chart.get_url()
print url
if 0:
data = urllib.urlopen(chart.get_url()).read()
open('meh.png', 'wb').write(data)
os.system('start meh.png')
if __name__ == '__main__':
test()
|
mit
| 5,788,875,282,920,004,000 | 30.131653 | 79 | 0.549667 | false |
marvin-jens/fast_ska
|
setup.py
|
1
|
2027
|
from setuptools import setup
from setuptools.extension import Extension
try:
from Cython.Distutils import build_ext
from Cython.Build import cythonize
except ImportError:
use_cython = False
else:
use_cython = True
cmdclass = { }
ext_modules = [ ]
if use_cython:
ext_modules += [
#Extension("ska_kmers", [ "ska_kmers.pyx" ], extra_compile_args=['-fopenmp'], extra_link_args=['-fopenmp'],),
Extension("ska_kmers", [ "ska_kmers.pyx" ], ),
]
cmdclass.update({ 'build_ext': build_ext })
else:
ext_modules += [
Extension("ska_kmers", [ "ska_kmers.c" ]),
]
setup(
name = "fast_ska",
version = "0.9.3",
description='A fast Cython implementation of the "Streaming K-mer Assignment" algorithm initially described in Lambert et al. 2014 (PMID: 24837674)',
url = 'https://github.com/marvin-jens/fast_ska',
author = 'Marvin Jens',
author_email = 'mjens@mit.edu',
license = 'MIT',
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 4 - Beta',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Bio-Informatics',
],
keywords = 'rna rbns k-mer kmer statistics biology bioinformatics',
install_requires=['cython','numpy'],
scripts=['ska'],
cmdclass = cmdclass,
ext_modules=ext_modules,
#ext_modules = cythonize("ska_kmers.pyx")
)
|
mit
| 7,618,767,076,867,865,000 | 30.671875 | 153 | 0.622595 | false |
the-virtual-brain/tvb-hpc
|
examples/btest.py
|
1
|
1617
|
import numpy as np
import loopy as lp
target = lp.CudaTarget()
kernel = lp.make_kernel(
"{ [i_node,j_node]: 0<=i_node,j_node<n_node}",
"""
<float32> coupling_value = params(1)
<float32> speed_value = params(0)
<float32> dt=0.1
<float32> M_PI_F = 2.0
<float32> rec_n = 1.0f / n_node
<float32> rec_speed_dt = 1.0f / speed_value / dt
<float32> omega = 10.0 * 2.0 * M_PI_F / 1e3
<float32> sig = sqrt(dt) * sqrt(2.0 * 1e-5)
<float32> rand = 1.0
for i_node
tavg[i_node]=0.0f {id = clear}
end
for i_node
<float32> theta_i = state[i_node] {id = coupling1, dep=*}
<float32> sum = 0.0 {id = coupling2}
for j_node
<float32> wij = weights[j_node] {id = coupling3, dep=coupling1:coupling2}
if wij != 0.0
<int> dij = lengths[j_node] * rec_speed_dt {id = coupling4, dep=coupling3}
<float32> theta_j = state[j_node]
sum = sum + wij * sin(theta_j - theta_i)
end
end
theta_i = theta_i + dt * (omega + coupling_value * rec_n * sum) {id = out1, dep=coupling4}
theta_i = theta_i + (sig * rand) {id = out2, dep=out1}
theta_i = wrap_2_pi(theta_i) {id = out3, dep=out2}
tavg[i_node] = tavg[i_node] + sin(theta_i) {id = out4, dep=out3}
state[i_node] = theta_i {dep=*coupling1}
end
""", assumptions="n_node>=0")
kernel = lp.add_dtypes(kernel, dict(tavg=np.float32, state=np.float32, weights=np.float32, lengths=np.float32))
kernel = kernel.copy(target=lp.CudaTarget())
code = lp.generate_code_v2(kernel)
print (kernel)
print (code.host_code())
print (code.device_code())
|
apache-2.0
| 4,293,833,689,267,461,600 | 31.34 | 111 | 0.589363 | false |
zaxliu/deepnap
|
experiments/kdd-exps/experiment_DynaQNN_130_Feb10_2317.py
|
1
|
5180
|
# System built-in modules
import time
from datetime import datetime
import sys
import os
from multiprocessing import Pool
# Project dependency modules
import pandas as pd
pd.set_option('mode.chained_assignment', None) # block warnings due to DataFrame value assignment
import lasagne
# Project modules
sys.path.append('../')
from sleep_control.traffic_emulator import TrafficEmulator
from sleep_control.traffic_server import TrafficServer
from sleep_control.controller import QController, DummyController, NController
from sleep_control.integration import Emulation
from sleep_control.env_models import SJTUModel
from rl.qtable import QAgent
from rl.qnn_theano import QAgentNN
from rl.mixin import PhiMixin, DynaMixin
sys_stdout = sys.stdout
log_prefix = '_'.join(['msg'] + os.path.basename(__file__).replace('.', '_').split('_')[1:5])
log_file_name = "{}_{}.log".format(log_prefix, sys.argv[1])
# Composite classes
class Dyna_QAgentNN(DynaMixin, QAgentNN):
def __init__(self, **kwargs):
super(Dyna_QAgentNN, self).__init__(**kwargs)
# Parameters
# |- Data
location = 'dmW'
# |- Agent
# |- QAgent
actions = [(True, None), (False, 'serve_all')]
gamma, alpha = 0.9, 0.9 # TD backup
explore_strategy, epsilon = 'epsilon', 0.02 # exploration
# |- QAgentNN
# | - Phi
# phi_length = 5
# dim_state = (1, phi_length, 3+2)
# range_state_slice = [(0, 10), (0, 10), (0, 10), (0, 1), (0, 1)]
# range_state = [[range_state_slice]*phi_length]
# | - No Phi
phi_length = 0
dim_state = (1, 1, 3)
range_state = ((((0, 10), (0, 10), (0, 10)),),)
# | - Other params
momentum, learning_rate = 0.9, 0.01 # SGD
num_buffer, memory_size, batch_size, update_period, freeze_period = 2, 200, 100, 4, 16
reward_scaling, reward_scaling_update, rs_period = 1, 'adaptive', 32 # reward scaling
# |- Env model
model_type, traffic_window_size = 'IPP', 50
stride, n_iter, adjust_offset = 2, 3, 1e-22
eval_period, eval_len = 4, 100
n_belief_bins, max_queue_len = 0, 20
Rs, Rw, Rf, Co, Cw = 1.0, -1.0, -10.0, -5.0, -0.5
traffic_params = (model_type, traffic_window_size,
stride, n_iter, adjust_offset,
eval_period, eval_len,
n_belief_bins)
queue_params = (max_queue_len,)
beta = 0.5 # R = (1-beta)*ServiceReward + beta*Cost
reward_params = (Rs, Rw, Rf, Co, Cw, beta)
# |- DynaQ
num_sim = 2
# |- Env
# |- Time
start_time = pd.to_datetime("2014-10-15 09:40:00")
total_time = pd.Timedelta(days=7)
time_step = pd.Timedelta(seconds=2)
backoff_epochs = num_buffer*memory_size+phi_length
head_datetime = start_time - time_step*backoff_epochs
tail_datetime = head_datetime + total_time
TOTAL_EPOCHS = int(total_time/time_step)
# |- Reward
rewarding = {'serve': Rs, 'wait': Rw, 'fail': Rf}
# load from processed data
session_df =pd.read_csv(
filepath_or_buffer='../data/trace_{}.dat'.format(location),
parse_dates=['startTime_datetime', 'endTime_datetime']
)
te = TrafficEmulator(
session_df=session_df, time_step=time_step,
head_datetime=head_datetime, tail_datetime=tail_datetime,
rewarding=rewarding,
verbose=2)
ts = TrafficServer(cost=(Co, Cw), verbose=2)
env_model = SJTUModel(traffic_params, queue_params, reward_params, 2)
agent = Dyna_QAgentNN(
env_model=env_model, num_sim=num_sim,
dim_state=dim_state, range_state=range_state,
f_build_net = None,
batch_size=batch_size, learning_rate=learning_rate, momentum=momentum,
reward_scaling=reward_scaling, reward_scaling_update=reward_scaling_update, rs_period=rs_period,
update_period=update_period, freeze_period=freeze_period,
memory_size=memory_size, num_buffer=num_buffer,
# Below is QAgent params
actions=actions, alpha=alpha, gamma=gamma,
explore_strategy=explore_strategy, epsilon=epsilon,
verbose=2)
c = QController(agent=agent)
emu = Emulation(te=te, ts=ts, c=c, beta=beta)
# Heavyliftings
t = time.time()
sys.stdout = sys_stdout
log_path = './log/'
if os.path.isfile(log_path+log_file_name):
print "Log file {} already exist. Experiment cancelled.".format(log_file_name)
else:
log_file = open(log_path+log_file_name,"w")
print datetime.now().strftime('[%Y-%m-%d %H:%M:%S]'),
print '{}%'.format(int(100.0*emu.epoch/TOTAL_EPOCHS)),
print log_file_name
time.sleep(1)
sys.stdout = log_file
while emu.epoch is not None and emu.epoch<TOTAL_EPOCHS:
# log time
print "Epoch {},".format(emu.epoch),
left = emu.te.head_datetime + emu.te.epoch*emu.te.time_step
right = left + emu.te.time_step
print "{} - {}".format(left.strftime("%Y-%m-%d %H:%M:%S"), right.strftime("%Y-%m-%d %H:%M:%S"))
emu.step()
print
if emu.epoch%(0.05*TOTAL_EPOCHS)==0:
sys.stdout = sys_stdout
print datetime.now().strftime('[%Y-%m-%d %H:%M:%S]'),
print '{}%'.format(int(100.0*emu.epoch/TOTAL_EPOCHS)),
print log_file_name
time.sleep(1)
sys.stdout = log_file
sys.stdout = sys_stdout
log_file.close()
print
print log_file_name,
print '{:.3f} sec,'.format(time.time()-t),
print '{:.3f} min'.format((time.time()-t)/60)
|
bsd-3-clause
| 2,491,388,140,847,407,000 | 33.304636 | 103 | 0.653089 | false |
ebeshero/Pittsburgh_Frankenstein
|
collateXPrep/svg_tester_collation.py
|
1
|
6475
|
from typing import Pattern
from collatex import *
from xml.dom import pulldom
import string
import re
import json
import glob
regexWhitespace = re.compile(r'\s+')
regexNonWhitespace = re.compile(r'\S+')
regexEmptyTag = re.compile(r'/>$')
regexBlankLine = re.compile(r'\n{2,}')
regexLeadingBlankLine = re.compile(r'^\n')
regexPageBreak = re.compile(r'<pb.+?/>')
RE_MARKUP = re.compile(r'<.+?>')
RE_AMP = re.compile(r'&')
RE_AND = re.compile(r'and')
RE_mOrning = re.compile(r'mOrning')
# Element types: xml, div, head, p, hi, pb, note, lg, l; comment()
# Tags to ignore, with content to keep: xml, comment, anchor
# Structural elements: div, p, lg, l
# Inline elements (empty) retained in normalization: pb, milestone, xi:include
# Inline and block elements (with content) retained in normalization: note, hi, head, ab
# GIs fall into one three classes
# 2017-05-21 ebb: Due to trouble with pulldom parsing XML comments, I have converted these to comment elements
# 2017-05-22 ebb: I've set anchor elements with @xml:ids to be the indicators of collation "chunks" to process together
ignore = ['xml']
inlineEmpty = ['milestone', 'anchor', 'include', 'pb']
inlineContent = ['hi']
blockElement = ['p', 'div', 'lg', 'l', 'head', 'comment', 'note', 'ab', 'cit', 'quote', 'bibl', 'header']
def normalizeSpace(inText):
"""Replaces all whitespace spans with single space characters"""
if regexNonWhitespace.search(inText):
return regexWhitespace.sub('\n', inText)
else:
return ''
def extract(input_xml):
"""Process entire input XML document, firing on events"""
# Start pulling; it continues automatically
doc = pulldom.parseString(input_xml)
output = ''
for event, node in doc:
# elements to ignore: xml
if event == pulldom.START_ELEMENT and node.localName in ignore:
continue
# copy comments intact
elif event == pulldom.COMMENT:
doc.expandNode(node)
output += node.toxml()
# empty inline elements: pb, milestone
elif event == pulldom.START_ELEMENT and node.localName in inlineEmpty:
output += node.toxml()
# non-empty inline elements: note, hi, head, l, lg, div, p, ab,
elif event == pulldom.START_ELEMENT and node.localName in inlineContent:
output += regexEmptyTag.sub('>', node.toxml())
elif event == pulldom.END_ELEMENT and node.localName in inlineContent:
output += '</' + node.localName + '>'
elif event == pulldom.START_ELEMENT and node.localName in blockElement:
output += '\n<' + node.localName + '>\n'
elif event == pulldom.END_ELEMENT and node.localName in blockElement:
output += '\n</' + node.localName + '>'
elif event == pulldom.CHARACTERS:
output += normalizeSpace(node.data)
else:
continue
return output
# def normalize(inputText):
# return regexPageBreak.sub('',inputText)
def normalize(inputText):
return RE_AMP.sub('and',\
RE_MARKUP.sub('', inputText)).lower()
def processToken(inputText):
return {"t": inputText + ' ', "n": normalize(inputText)}
def processWitness(inputWitness, id):
return {'id': id, 'tokens' : [processToken(token) for token in inputWitness]}
fms_input = '''It was on a dreary night of November that I beheld the frame on whic my man completeed, and with an
anxiety that almost amounted to agony I collected the instruments of life around me that I might infuse a
spark of being into the lifeless thing that lay at my feet. '''
f1818_input = '''From this time Elizabeth Lavenza became my playfellow, and, as we grew older, my
friend. She was docile and good tempered, yet gay and playful as a summer insect.
Although she was lively and animated, her feelings were strong and deep, and her
disposition uncommonly affectionate. No one could better enjoy liberty, yet no one could
submit with more grace than she did to constraint and caprice.'''
fThomas_input=''' '''
f1823_input = '''<p>The following morning the rain poured down in torrents, and thick mists hid the summits
of the mountains. I rose early, but felt unusually melancholy. The rain depressed me; my
old feelings recurred, and I was miserable. I knew how disappointed my father would be
at this sudden change, and I wished to avoid him until I had recovered myself so far as
to be enabled to conceal those feelings that overpowered me. I knew that they would
remain that day at the inn; <pb xml:id="F1823_v1_218" n="199"/>and as I had ever inured
myself to rain, moisture, and cold, I resolved to go alone to the summit of Montanvert.
I remembered the effect that the view of the tremendous and ever-moving glacier had
produced upon my mind when I first saw it. It had then filled me with a sublime ecstacy
that gave wings to the soul, and allowed it to soar from the obscure world to light and
joy. The sight of the awful and majestic in nature had indeed always the effect of
solemnizing my mind, and causing me to forget the passing cares of life. I determined to
go alone, for I was well acquainted with the path, and the presence of another would
destroy the solitary grandeur of the scene.</p>'''
f1831_input = ''' '''
fms_tokens = regexLeadingBlankLine.sub('',regexBlankLine.sub('\n', extract(fms_input))).split('\n')
f1818_tokens = regexLeadingBlankLine.sub('',regexBlankLine.sub('\n', extract(f1818_input))).split('\n')
fThomas_tokens = regexLeadingBlankLine.sub('',regexBlankLine.sub('\n', extract(fThomas_input))).split('\n')
f1823_tokens = regexLeadingBlankLine.sub('',regexBlankLine.sub('\n', extract(f1823_input))).split('\n')
f1831_input = regexLeadingBlankLine.sub('',regexBlankLine.sub('\n', extract(f1831_input))).split('\n')
f1818_tokenlist = processWitness(f1818_tokens, 'f1818')
f1823_tokenlist = processWitness(f1823_tokens, 'f1823')
collation_input = {"witnesses": [f1818_tokenlist, f1823_tokenlist]}
table = collate(collation_input, segmentation=True, output="svg")
outputFile = open('C10-NormalizedTokens/C-10portion' + '.svg', 'w')
print(table, file=outputFile)
# table = collate(collation_input, segmentation=True, layout='vertical')
# test = normalize(f1818_input)
# print(test, table)
# print(f1818_tokenlist)
|
agpl-3.0
| 7,758,116,176,600,863,000 | 50.388889 | 119 | 0.681699 | false |
kedz/sumpy
|
sumpy/io.py
|
1
|
4037
|
import os
import re
import pandas as pd
def load_duc_docset(input_source):
docs = DucSgmlReader().read(input_source)
return docs
def load_duc_abstractive_summaries(input_source):
models = DucAbstractSgmlReader().read(input_source)
return models
class FileInput(object):
def gather_paths(self, source):
"""Determines the type of source and return an iterator over input
document paths. If source is a str or unicode
object, determine if it is also a directory and return an iterator
for all directory files; otherwise treat as a single document input.
If source is any other iterable, treat as an iterable of file
paths."""
if isinstance(source, str) or isinstance(source, unicode):
if os.path.isdir(source):
paths = [os.path.join(source, fname)
for fname in os.listdir(source)]
for path in paths:
yield path
else:
yield source
else:
try:
for path in source:
yield path
except TypeError:
print source, 'is not iterable'
class DucSgmlReader(FileInput):
def read(self, input_source):
docs = []
for path in self.gather_paths(input_source):
with open(path, u"r") as f:
sgml = "".join(f.readlines())
m = re.search(r"<TEXT>(.*?)</TEXT>", sgml, flags=re.DOTALL)
if m is None:
raise Exception("TEXT not found in " + path)
text = m.group(1).strip()
text_clean = re.sub(r"<[^>]*?>", r"", text)
docs.append(text_clean)
return docs
class DucAbstractSgmlReader(FileInput):
def read(self, input_source):
docs = []
for path in self.gather_paths(input_source):
with open(path, u"r") as f:
sgml = "".join(f.readlines())
m = re.search(r"<SUM[^>]+>(.*?)</SUM>", sgml, flags=re.DOTALL)
if m is None:
raise Exception("SUM not found in " + path)
text = m.group(1).strip()
docs.append(text)
return docs
class MeadDocSentReader(FileInput):
docsent_patt = (r"<DOCSENT DID='([^']+)'\s+DOCNO='([^']+)'\s+"
r"LANG='([^']+)'\s+CORR-DOC='([^']+)'>")
sent_patt = (r"<S PAR=['\"]([^']+)['\"]\s+"
r"RSNT=['\"]([^']+)['\"]\s+"
r"SNO=['\"]([^']+)['\"]>(.*?)</S>")
def read(self, input_source):
docs = []
for path in self.gather_paths(input_source):
sents = []
with open(path, u"r") as f:
xml = "".join(f.readlines())
m = re.search(self.docsent_patt, xml, flags=re.DOTALL)
if m is None:
raise Exception("DOCSENT not found in " + path)
doc_id = m.group(1)
lang = m.group(3)
for s in re.finditer(self.sent_patt, xml, flags=re.DOTALL):
par = int(s.group(1))
rsnt = s.group(2)
sno = s.group(3)
text = s.group(4).strip()
if par > 1:
sents.append(text)
#sents.append({u"doc id": doc_id, u"sent id": int(rsnt),
# u"type": u"body" if par > 1 else u"headline",
# u"text": text.decode("utf-8")})
docs.append("\n".join(sents).decode("utf-8"))
#df = pd.DataFrame(
# sents, columns=[u"doc id", u"type", u"sent id", u"text"])
#df.set_index([u"doc id", u"sent id"], inplace=True)
return docs
def load_demo_docs():
import pkg_resources
input_source = pkg_resources.resource_filename(
"sumpy",
os.path.join("data", "mead_example_docs"))
return MeadDocSentReader().read(input_source)
|
apache-2.0
| 687,597,557,010,788,500 | 37.447619 | 80 | 0.494179 | false |
fabianekc/n7
|
n7.py
|
1
|
2906
|
#!/usr/bin/python
import urllib, inflect, string, json, sys, Algorithmia
# tests
# python n7.py '{"h2t":"http://slashdot.org", "auth":"API_KEY"}'
# python n7.py '{"url":"http://derstandard.at"}'
# python n7.py '{"text":"life is a miracle"}'
# initialize
p = inflect.engine()
text = ""
offset = 7
start_line = -1
end_line = -1
new_text = []
new_line = []
table = string.maketrans("", "")
dict_url = "https://raw.githubusercontent.com/fabianekc/n7/master/nounlist.txt"
# parse input; sample URL: 'http://www.gutenberg.org/cache/epub/97/pg97.txt'
input = json.loads(str(sys.argv[1]))
if 'url' in input:
text = urllib.urlopen(input['url']).read()
elif 'h2t' in input:
if 'auth' in input:
client = Algorithmia.client(input['auth'])
text = client.algo('util/Html2Text/0.1.3').pipe(input['h2t'])
else:
print("Error: provide authentication when using the html2text preprocessing from Algorithmia")
sys.exit()
elif 'text' in input:
text = input['text']
else:
text = urllib.urlopen(input).read()
if 'offset' in input:
offset = input['offset']
if 'dict' in input:
dict_url = input['dict']
if 'start' in input:
start_line = input['start']
if 'end' in input:
end_line = input['end']
if text == "":
print("Error: no input text provided")
sys.exit()
if isinstance(text, str):
text = text.decode('utf-8')
text = text.encode('ascii', 'replace')
text_split = text.split('\n')
if end_line > -1:
text_split = text_split[0:end_line]
if start_line > -1:
text_split = text_split[start_line:]
dict = urllib.urlopen(dict_url).read().split()
ld = len(dict)
# iterate over text
for line in text_split:
for word in line.split():
# when replacing words we need to take care for
# - punc: punctuation
# - sipl: singular / plural
# - new vs final: uppercase / capitalize / lowercase
punc = word.translate(table, string.punctuation)
sipl = p.singular_noun(punc)
if sipl:
new = sipl
else:
new = punc
if (new.lower() in dict):
if punc == word:
if sipl:
final = p.plural(dict[(dict.index(new.lower())+offset)%ld])
else:
final = dict[dict.index(new.lower())+offset]
else:
if sipl:
final = word.replace(punc, p.plural(dict[(dict.index(new.lower())+offset)%ld]))
else:
final = word.replace(punc, dict[(dict.index(new.lower())+offset)%ld])
if new.lower() != new:
if new.upper() == new:
final = final.upper()
else:
final = final.capitalize()
else:
final = word
new_line.append(final)
new_text.append(" ".join(new_line))
new_line = []
print "\n".join(new_text)
|
mit
| -1,707,470,970,441,594,400 | 30.934066 | 102 | 0.569511 | false |
mathiasertl/django-ca
|
ca/django_ca/management/commands/init_ca.py
|
1
|
10835
|
# This file is part of django-ca (https://github.com/mathiasertl/django-ca).
#
# django-ca is free software: you can redistribute it and/or modify it under the terms of the GNU
# General Public License as published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# django-ca is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with django-ca. If not,
# see <http://www.gnu.org/licenses/>.
"""Management command to create a certificate authority.
.. seealso:: https://docs.djangoproject.com/en/dev/howto/custom-management-commands/
"""
import os
import pathlib
import typing
from datetime import timedelta
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import ec
from django.core.management.base import CommandError
from django.core.management.base import CommandParser
from django.utils import timezone
from ... import ca_settings
from ...extensions import IssuerAlternativeName
from ...extensions import NameConstraints
from ...models import CertificateAuthority
from ...subject import Subject
from ...tasks import cache_crl
from ...tasks import generate_ocsp_key
from ...tasks import run_task
from ...typehints import ParsableKeyType
from ..actions import ExpiresAction
from ..actions import MultipleURLAction
from ..actions import PasswordAction
from ..actions import URLAction
from ..base import BaseCommand
from ..mixins import CertificateAuthorityDetailMixin
class Command(CertificateAuthorityDetailMixin, BaseCommand):
"""Implement :command:`manage.py init_ca`."""
help = "Create a certificate authority."
def add_arguments(self, parser: CommandParser) -> None:
self.add_general_args(parser)
self.add_algorithm(parser)
self.add_key_type(parser)
self.add_key_size(parser)
self.add_ecc_curve(parser)
parser.add_argument(
"--expires",
metavar="DAYS",
action=ExpiresAction,
default=timedelta(365 * 10),
help="CA certificate expires in DAYS days (default: %(default)s).",
)
self.add_ca(
parser,
"--parent",
no_default=True,
help_text="Make the CA an intermediate CA of the named CA. By default, this is a new root CA.",
)
parser.add_argument("name", help="Human-readable name of the CA")
self.add_subject(
parser,
help_text="""The subject of the CA in the format "/key1=value1/key2=value2/...",
valid keys are %s. If "CN" is not set, the name is used."""
% self.valid_subject_keys,
)
self.add_password(
parser,
help_text="Optional password used to encrypt the private key. If no argument is passed, "
"you will be prompted.",
)
parser.add_argument(
"--path",
type=pathlib.PurePath,
help="Path where to store Certificate Authorities (relative to CA_DIR).",
)
parser.add_argument(
"--parent-password",
nargs="?",
action=PasswordAction,
metavar="PASSWORD",
prompt="Password for parent CA: ",
help="Password for the private key of any parent CA.",
)
group = parser.add_argument_group(
"Default hostname",
"The default hostname is used to compute default URLs for services like OCSP. The hostname is "
"usually configured in your settings (current setting: %s), but you can override that value "
"here. The value must be just the hostname and optionally a port, *without* a protocol, e.g. "
'"ca.example.com" or "ca.example.com:8000".' % ca_settings.CA_DEFAULT_HOSTNAME,
)
group = group.add_mutually_exclusive_group()
group.add_argument(
"--default-hostname",
metavar="HOSTNAME",
help="Override the the default hostname configured in your settings.",
)
group.add_argument(
"--no-default-hostname",
dest="default_hostname",
action="store_false",
help="Disable any default hostname configured in your settings.",
)
self.add_acme_group(parser)
group = parser.add_argument_group(
"pathlen attribute",
"""Maximum number of CAs that can appear below this one. A pathlen of zero (the default) means it
can only be used to sign end user certificates and not further CAs.""",
)
group = group.add_mutually_exclusive_group()
group.add_argument(
"--pathlen", default=0, type=int, help="Maximum number of sublevel CAs (default: %(default)s)."
)
group.add_argument(
"--no-pathlen",
action="store_const",
const=None,
dest="pathlen",
help="Do not add a pathlen attribute.",
)
group = parser.add_argument_group(
"X509 v3 certificate extensions for CA",
"""Extensions added to the certificate authority itself. These options cannot be changed without
creating a new authority.""",
)
group.add_argument(
"--ca-crl-url",
action=MultipleURLAction,
help="URL to a certificate revokation list. Can be given multiple times.",
)
group.add_argument("--ca-ocsp-url", metavar="URL", action=URLAction, help="URL of an OCSP responder.")
group.add_argument(
"--ca-issuer-url",
metavar="URL",
action=URLAction,
help="URL to the certificate of your CA (in DER format).",
)
nc_group = parser.add_argument_group(
"Name Constraints", "Add name constraints to the CA, limiting what certificates this CA can sign."
)
nc_group.add_argument(
"--permit-name",
metavar="NAME",
action="append",
default=[],
help="Add the given name to the permitted-subtree.",
)
nc_group.add_argument(
"--exclude-name",
metavar="NAME",
action="append",
default=[],
help="Add the given name to the excluded-subtree.",
)
self.add_ca_args(parser)
def handle( # type: ignore[override] # pylint: disable=too-many-arguments,too-many-locals
self,
name: str,
subject: Subject,
parent: typing.Optional[CertificateAuthority],
expires: timedelta,
key_size: int,
key_type: ParsableKeyType,
ecc_curve: typing.Optional[ec.EllipticCurve],
algorithm: hashes.HashAlgorithm,
pathlen: typing.Optional[int],
password: typing.Optional[bytes],
parent_password: typing.Optional[bytes],
crl_url: typing.List[str],
ocsp_url: typing.Optional[str],
issuer_url: typing.Optional[str],
ca_crl_url: typing.List[str],
ca_ocsp_url: typing.Optional[str],
ca_issuer_url: typing.Optional[str],
permit_name: typing.List[str],
exclude_name: typing.List[str],
caa: str,
website: str,
tos: str,
**options: typing.Any
) -> None:
if not os.path.exists(ca_settings.CA_DIR): # pragma: no cover
# TODO: set permissions
os.makedirs(ca_settings.CA_DIR)
# In case of CAs, we silently set the expiry date to that of the parent CA if the user specified a
# number of days that would make the CA expire after the parent CA.
#
# The reasoning is simple: When issuing the child CA, the default is automatically after that of the
# parent if it wasn't issued on the same day.
if parent and timezone.now() + expires > parent.expires:
expires = parent.expires # type: ignore[assignment]
if parent and not parent.allows_intermediate_ca:
raise CommandError("Parent CA cannot create intermediate CA due to pathlen restrictions.")
if not parent and ca_crl_url:
raise CommandError("CRLs cannot be used to revoke root CAs.")
if not parent and ca_ocsp_url:
raise CommandError("OCSP cannot be used to revoke root CAs.")
# See if we can work with the private key
if parent:
self.test_private_key(parent, parent_password)
# Set CommonName to name if not set in subject
if "CN" not in subject:
subject["CN"] = name
name_constraints = NameConstraints({"value": {"permitted": permit_name, "excluded": exclude_name}})
issuer_alternative_name = options[IssuerAlternativeName.key]
if issuer_alternative_name is None:
issuer_alternative_name = ""
kwargs = {}
for opt in ["path", "default_hostname"]:
if options[opt] is not None:
kwargs[opt] = options[opt]
if ca_settings.CA_ENABLE_ACME: # pragma: no branch; never False because parser throws error already
# These settings are only there if ACME is enabled
for opt in ["acme_enabled", "acme_requires_contact"]:
if options[opt] is not None:
kwargs[opt] = options[opt]
try:
ca = CertificateAuthority.objects.init(
name=name,
subject=subject,
expires=expires,
algorithm=algorithm,
parent=parent,
pathlen=pathlen,
issuer_url=issuer_url,
issuer_alt_name=",".join(issuer_alternative_name),
crl_url=crl_url,
ocsp_url=ocsp_url,
ca_issuer_url=ca_issuer_url,
ca_crl_url=ca_crl_url,
ca_ocsp_url=ca_ocsp_url,
name_constraints=name_constraints,
password=password,
parent_password=parent_password,
ecc_curve=ecc_curve,
key_type=key_type,
key_size=key_size,
caa=caa,
website=website,
terms_of_service=tos,
**kwargs
)
except Exception as ex:
raise CommandError(ex) from ex
# Generate OCSP keys and cache CRLs
run_task(generate_ocsp_key, serial=ca.serial, password=password)
run_task(cache_crl, serial=ca.serial, password=password)
|
gpl-3.0
| -3,941,580,537,189,006,000 | 38.115523 | 110 | 0.602492 | false |
hipikat/django-revkom
|
revkom/utils/mixins.py
|
1
|
4147
|
"""
Generic mixins for classes.
"""
from functools import partial
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
class GetSettingsMixin(object):
"""
A generic class mixin which adds a _get_settings() method, which will
return a tuple of settings or throw appropriate errors if they aren't
defined. TODO: Extend to allow default settings or warn instaed of error.
TODO: Create methods at class-creation time; allow renaming.
"""
def get_setting(self, setting):
try:
return getattr(settings, setting)
except AttributeError:
raise ImproperlyConfigured(
"The class {} requires the setting {} to be defined.".format(
self.__class__.__name__, setting))
def get_settings(self, *get_list, **default_list):
setting_list = []
for setting in get_list:
setting_list.append(self._get_setting(setting))
for setting, default in default_list.iteritems():
setting_list.append(getattr(settings, setting, default))
return setting_list
class InstanceProp(object):
def accessors(self):
access = [self.getter]
if hasattr(self, setter):
access.append(self.setter)
if hasattr(self, deleter):
access.append(self.deleter)
def getter(self):
raise NotImplementedError
class BooleanProp(InstanceProp):
def __init__(self, default):
self._value = bool(default) if default else None
def getter(self):
return self._value
def setter(self, value):
self._value = bool(value)
class StringProp(InstanceProp):
# TODO: Should this be unicode or something?
def __init__(self, default):
self._value = str(default) if default else None
def getter(self):
return self._value
def setter(self, value):
self._value = str(value)
class PropMetaclass(type):
_prop_types = {
'boolean': BooleanProp,
'string': StringProp,
}
def __new__(mcls, name, bases, attrs):
#def print_foo(self):
# print('foo')
#attrs['print_bar'] = print_foo
def _add_prop(ptype_class, self, prop_name, *init_args, **init_kwargs):
"""
Add a property called prop_name to self. The property's getter, setter
and deleter are taken from methods (with those names) attached to a new
instance of ``ptype_class``, where they exist. The propety is
initialised with ``init_args`` and ``init_kwargs``.
"""
# TODO: Warn if property already exists?
#import pdb; pdb.set_trace()
prop_instance = ptype_class(*init_args, **init_kwargs)
accessors = [prop_instance.getter]
if hasattr(prop_instance, 'setter'):
accessors.append(prop_instance.setter)
if hasattr(prop_instance, 'deleter'):
accessors.append(prop_instance.deleter)
prop = property(*accessors)
setattr(self, prop_name, prop)
#attrs['_add_prop'] = _add_prop
for ptype_name, ptype_class in mcls._prop_types.iteritems():
prop_adder_name = "add_%s_prop" % ptype_name
prop_adder_func = partial(_add_prop, ptype_class)
attrs[prop_adder_name] = prop_adder_func
#setattr(instance, prop_adder_name, prop_adder_func)
return super(PropMetaclass, mcls).__new__(mcls, name, bases, attrs)
# def __new__(cls, *args, **kwargs):
# """
# Return a new instance of cls, with methods like ``add_boolean_prop(...)``
# attached, ready to be used in the instance's ``__init__(...)`` method.
# """
# instance = super(PropMixin, cls).__new__(cls, *args, **kwargs)
# for ptype_name, ptype_class in cls._prop_types.iteritems():
# prop_adder_name = "add_%s_prop" % ptype_name
# prop_adder_func = partial(instance._add_prop, ptype_class)
# setattr(instance, prop_adder_name, prop_adder_func)
# return instance
|
bsd-2-clause
| 7,571,993,220,421,842,000 | 33.558333 | 83 | 0.600434 | false |
alaw1290/CS591B1
|
analysis/test_data_weighted_sums.py
|
1
|
2907
|
import pickle
import numpy as np
import cf_recommender as cf
import similarity_functions as sf
import movie_reviews_compiler as mrc
path = '../data/'
def run_test_weighted_sums(cosine=True):
'''compute the predictions for masked values in the testing set (user review vectors) using the training set (critic review matrix)
model for predictions: weighted sum of critics using cosine similiarity'''
#get testing data
audience_names = pickle.load(open(path + 'audience_names.pkl','rb'))
audience_review_test_set = pickle.load(open(path + 'audience_test_data.pkl','rb'))
#get training data
movie_critic, critic_movie, matrix, movie_keys, critic_keys = mrc.import_pickle()
#compute average ratings for weighted sum
avg_ratings = {}
for i in range(len(matrix)):
avg_ratings[critic_keys[i]] = sum(matrix[i])/len([0 for j in matrix[i] if j != 0])
#store results for pickle
weighted_sums_results = {}
for aud_review_index in range(len(audience_review_test_set)):
name = audience_names[aud_review_index].split("'s")[0]
print('\nTest Vector: ' + name)
test_vector = audience_review_test_set[aud_review_index]
#find indicies of masks for testing
reviewed_indicies = [i for i in range(len(test_vector)) if test_vector[i] != 0]
#if there are more than 1 reviews for the user:
if(len(reviewed_indicies) > 1):
actual_vals = []
prediced_vals = []
av = []
pv = []
for mask in reviewed_indicies:
#mask selected index
vector = [i for i in test_vector]
vector[mask] = 0
#compute predicted value
if(cosine):
critics_sim = sf.run_cosine(vector,matrix,movie_critic,movie_keys,critic_keys)
else:
critics_sim = sf.run_pearson(vector,matrix,movie_critic,movie_keys,critic_keys)
result_vector = cf.weighted_sums(vector,critics_sim,movie_keys,critic_keys,movie_critic, avg_ratings)
print('\tPredicted for index ' + str(mask) + ': ' + str(result_vector[mask]))
print('\tActual for index ' + str(mask) + ': ' + str(test_vector[mask]))
prediced_vals.append(result_vector[mask])
actual_vals.append(test_vector[mask])
av.append((mask,test_vector[mask]))
pv.append((mask,result_vector[mask]))
#calculate accuracy using the root mean square error value
RMSE = float(((sum([(actual_vals[i]-prediced_vals[i])**2 for i in range(len(reviewed_indicies))]))/len(reviewed_indicies))**0.5)
print('\n\tRMSE for Test Vector: ' + str(RMSE))
weighted_sums_results[name] = {'actual':av,'predicted':pv,'RMSE':RMSE}
else:
print('\n\tOnly 1 review not predictable')
weighted_sums_results[name] = 'Error'
#export weighted sums results
if(cosine):
pickle.dump(weighted_sums_results, open(path + "weighted_sums_results_cosine.pkl", "wb" ) )
else:
pickle.dump(weighted_sums_results, open(path + "weighted_sums_results_pearson.pkl", "wb" ) )
return weighted_sums_results
|
mit
| 2,743,727,718,878,611,500 | 34.024096 | 132 | 0.687994 | false |
csirtgadgets/bearded-avenger
|
test/zsqlite/test_store_sqlite_tokens.py
|
1
|
5082
|
import logging
import os
import tempfile
from argparse import Namespace
import pytest
from cif.store import Store
from cifsdk.utils import setup_logging
import arrow
from datetime import datetime
from pprint import pprint
from cifsdk.exceptions import AuthError
args = Namespace(debug=True, verbose=None)
setup_logging(args)
logger = logging.getLogger(__name__)
@pytest.fixture
def store():
dbfile = tempfile.mktemp()
with Store(store_type='sqlite', dbfile=dbfile) as s:
s._load_plugin(dbfile=dbfile)
s.token_create_admin()
yield s
s = None
if os.path.isfile(dbfile):
os.unlink(dbfile)
@pytest.fixture
def indicator():
return {
'indicator': 'example.com',
'tags': 'botnet',
'provider': 'csirtgadgets.org',
'group': 'everyone',
'lasttime': arrow.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
'itype': 'fqdn',
}
def test_store_sqlite_tokens(store):
t = store.store.tokens.admin_exists()
assert t
t = list(store.store.tokens.search({'token': t}))
assert len(t) > 0
t = t[0]['token']
assert store.store.tokens.update_last_activity_at(t, datetime.now())
assert store.store.tokens.check(t, 'read')
assert store.store.tokens.read(t)
assert store.store.tokens.write(t)
assert store.store.tokens.admin(t)
assert store.store.tokens.last_activity_at(t) is not None
assert store.store.tokens.update_last_activity_at(t, datetime.now())
def test_store_sqlite_tokens_groups(store):
t = store.store.tokens.admin_exists()
assert t
assert store.store.tokens.edit({'token': t, 'write': False})
assert store.store.tokens.delete({'token': t})
# groups
t = store.store.tokens.create({
'username': 'test',
'groups': ['staff', 'everyone'],
'read': True,
'write': True
})
assert t
assert t['groups'] == ['staff', 'everyone']
assert t['write']
assert t['read']
assert not t['admin']
i = None
try:
i = store.store.indicators.create(t, {
'indicator': 'example.com',
'group': 'staff2',
'provider': 'example.com',
'tags': ['test'],
'itype': 'fqdn'
})
except AuthError as e:
pass
assert i == 0
i = store.store.indicators.create(t, {
'indicator': 'example.com',
'group': 'staff',
'provider': 'example.com',
'tags': ['test'],
'itype': 'fqdn'
})
assert i
x = store.store.indicators.search(t, {'indicator': 'example.com'})
assert len(list(x)) > 0
x = store.store.indicators.search(t, {'itype': 'fqdn'})
assert len(list(x)) > 0
def test_store_sqlite_tokens_groups2(store, indicator):
t = store.store.tokens.create({
'username': 'test',
'groups': ['staff'],
'read': True,
'write': True
})
i = None
try:
i = store.store.indicators.create(t, {
'indicator': 'example.com',
'group': 'staff2',
'provider': 'example.com',
'tags': ['test'],
'itype': 'fqdn',
'lasttime': arrow.utcnow().datetime,
'reporttime': arrow.utcnow().datetime
})
except AuthError as e:
pass
assert (i is None or i == 0)
def test_store_sqlite_tokens_groups3(store, indicator):
t = store.store.tokens.create({
'username': 'test',
'groups': ['staff'],
'write': True
})
t2 = store.store.tokens.create({
'username': 'test',
'groups': ['staff2'],
'read': True,
})
i = store.store.indicators.create(t, {
'indicator': 'example.com',
'group': 'staff',
'provider': 'example.com',
'tags': ['test'],
'itype': 'fqdn',
'lasttime': arrow.utcnow().datetime,
'reporttime': arrow.utcnow().datetime
})
assert i
i = store.store.indicators.search(t2, {'itype': 'fqdn'})
assert len(list(i)) == 0
i = store.store.indicators.search(t2, {'indicator': 'example.com'})
assert len(list(i)) == 0
def test_store_sqlite_tokens_groups4(store, indicator):
t = store.store.tokens.create({
'username': 'test',
'groups': ['staff', 'staff2'],
'write': True,
'read': True
})
i = store.store.indicators.create(t, {
'indicator': 'example.com',
'group': 'staff',
'provider': 'example.com',
'tags': ['test'],
'itype': 'fqdn',
'lasttime': arrow.utcnow().datetime,
'reporttime': arrow.utcnow().datetime
})
assert i
i = store.store.indicators.create(t, {
'indicator': 'example.com',
'group': 'staff2',
'provider': 'example.com',
'tags': ['test'],
'itype': 'fqdn',
'lasttime': arrow.utcnow().datetime,
'reporttime': arrow.utcnow().datetime
})
assert i
i = store.store.indicators.search(t['token'], {'itype': 'fqdn', 'groups': 'staff'})
assert len(list(i)) == 1
|
mpl-2.0
| 5,524,347,594,183,835,000 | 23.4375 | 87 | 0.561196 | false |
tbenthompson/taskloaf
|
taskloaf/promise.py
|
1
|
5194
|
import asyncio
import taskloaf
from .refcounting import Ref
from .object_ref import put, is_ref, ObjectRef
import logging
logger = logging.getLogger(__name__)
def await_handler(args):
req_addr = args[0]
pr = args[1]
async def await_wrapper():
result_ref = await pr._get_future()
taskloaf.ctx().messenger.send(
req_addr, taskloaf.ctx().protocol.SETRESULT, [pr, result_ref]
)
taskloaf.ctx().executor.run_work(await_wrapper)
class Promise:
def __init__(self, running_on):
def on_delete(_id):
del taskloaf.ctx().promises[_id]
self.ref = Ref(on_delete)
self.running_on = running_on
self.ensure_future_exists()
def encode_capnp(self, msg):
self.ref.encode_capnp(msg.ref)
msg.runningOn = self.running_on
@classmethod
def decode_capnp(cls, msg):
out = Promise.__new__(Promise)
out.ref = Ref.decode_capnp(msg.ref)
out.running_on = msg.runningOn
return out
def ensure_future_exists(self):
taskloaf.ctx().promises[self.ref._id] = asyncio.Future(
loop=taskloaf.ctx().executor.ioloop
)
def _get_future(self):
return taskloaf.ctx().promises[self.ref._id]
def __await__(self):
if taskloaf.ctx().name != self.ref.owner:
self.ensure_future_exists()
taskloaf.ctx().messenger.send(
self.ref.owner, taskloaf.ctx().protocol.AWAIT, [self]
)
result_ref = yield from self._get_future().__await__()
out = yield from result_ref.get().__await__()
if isinstance(out, TaskExceptionCapture):
raise out.e
return out
def set_result(self, result):
self._get_future().set_result(result)
def then(self, f, to=None):
return task(f, self, to=to)
def next(self, f, to=None):
return self.then(lambda x: f(), to)
class TaskExceptionCapture:
def __init__(self, e):
self.e = e
def task_runner(pr, in_f, *in_args):
async def task_wrapper():
f = await ensure_obj(in_f)
args = []
for a in in_args:
args.append(await ensure_obj(a))
try:
result = await taskloaf.ctx().executor.wait_for_work(f, *args)
# catches all exceptions except system-exiting exceptions that inherit
# from BaseException
except Exception as e:
logger.exception("exception during task")
result = TaskExceptionCapture(e)
_unwrap_promise(pr, result)
taskloaf.ctx().executor.run_work(task_wrapper)
def _unwrap_promise(pr, result):
if isinstance(result, Promise):
def unwrap_then(x):
_unwrap_promise(pr, x)
result.then(unwrap_then)
else:
result_ref = put(result)
if pr.ref.owner == taskloaf.ctx().name:
pr.set_result(result_ref)
else:
taskloaf.ctx().messenger.send(
pr.ref.owner,
taskloaf.ctx().protocol.SETRESULT,
[pr, result_ref],
)
def task_handler(args):
task_runner(args[1], args[2], *args[3:])
def set_result_handler(args):
args[1].set_result(args[2])
# f and args can be provided in two forms:
# -- a python object (f should be callable or awaitable)
# -- a dref to a serialized object in the memory manager
# if f is a function and the task is being run locally, f is never serialized,
# but when the task is being run remotely, f is entered into the
def task(f, *args, to=None):
ctx = taskloaf.ctx()
if to is None:
to = ctx.name
out_pr = Promise(to)
if to == ctx.name:
task_runner(out_pr, f, *args)
else:
msg_objs = [out_pr, ensure_ref(f)] + [ensure_ref(a) for a in args]
ctx.messenger.send(to, ctx.protocol.TASK, msg_objs)
return out_pr
async def _ensure_obj_helper(x):
if type(x) == Promise:
return await x
else:
return x
async def ensure_obj(maybe_ref):
if is_ref(maybe_ref):
return await _ensure_obj_helper(await maybe_ref.get())
if type(maybe_ref) == Promise:
return await maybe_ref
else:
return maybe_ref
def ensure_ref(v):
if is_ref(v):
return v
return put(v)
class TaskMsg:
@staticmethod
def serialize(args):
pr = args[0]
objrefs = args[1:]
m = taskloaf.message_capnp.Message.new_message()
m.init("task")
pr.encode_capnp(m.task.promise)
m.task.init("objrefs", len(objrefs))
for i, ref in enumerate(objrefs):
ref.encode_capnp(m.task.objrefs[i])
return m
@staticmethod
def deserialize(msg):
out = [msg.sourceName, Promise.decode_capnp(msg.task.promise)]
for i in range(len(msg.task.objrefs)):
out.append(ObjectRef.decode_capnp(msg.task.objrefs[i]))
return out
def when_all(ps, to=None):
if to is None:
to = ps[0].running_on
async def wait_for_all():
results = []
for i, p in enumerate(ps):
results.append(await p)
return results
return task(wait_for_all, to=to)
|
mit
| -699,904,581,837,523,300 | 25.100503 | 78 | 0.587986 | false |
AndrewBMartin/pygurobi
|
pygurobi/pygurobi.py
|
1
|
31972
|
"""
Functions to support rapid interactive modification of Gurobi models.
For reference on Gurobi objects such as Models, Variables, and Constraints, see
http://www.gurobi.com/documentation/7.0/refman/py_python_api_overview.html.
"""
import csv
import json
try:
import gurobipy as gp
except ImportError:
raise ImportError("gurobipy not installed. Please see {0} to download".format(
"https://www.gurobi.com/documentation/6.5/quickstart_mac/the_gurobi_python_interfac.html"))
# Assuming that constraints are of the form:
# constraintName(index1,index2,...,indexN).
# Asuming that variables are of the form:
# variableName[index1,index2,...,indexN]
CON_BRACKET_L = "("
CON_BRACKET_R = ")"
VAR_BRACKET_L = "["
VAR_BRACKET_R = "]"
# 13 July 2016 - Need to sort out capitalization here for attributes
# Attributes of a Gurobi variable
VAR_ATTRS = ["LB", "UB", "Obj", "VType", "VarName", "X", "Xn", "RC",
"BarX", "Start", "VarHintVal", "VarHintPri", "BranchPriority",
"VBasis", "PStart", "IISLB", "IISUB", "PWLObjCvx",
"SAObjLow", "SAObjUp", "SALBLow", "SALBUp",
"SAUBLow", "SAUBUp", "UnbdRay"]
# Attributes of a Gurobi constraint
CON_ATTRS = ["Sense", "RHS", "ConstrName", "Pi", "Slack",
"CBasis", "DStart", "Lazy", "IISConstr",
"SARHSLow", "SARHSUp", "FarkasDual"]
def read_model(filename):
"""
Read a model using gurobipy.
"""
m = gp.read(filename)
return m
def reoptimize(m):
"""
Update, reset, and optimize
a model.
"""
m.update()
m.reset()
m.optimize()
def get_variable_attrs():
"""
Return a list of variable attributes.
Details of attributes found at the Gurobi
website:
http://www.gurobi.com/documentation/6.5/refman/attributes.html
"""
return VAR_ATTRS
def get_constraint_attrs():
"""
Return a list of constraint attributes.
Details of attributes found at the Gurobi
website:
http://www.gurobi.com/documentation/6.5/refman/attributes.html
"""
return CON_ATTRS
def list_constraints(model):
"""
Print to screen the constraint sets in the model.
Show the name of each constraint set along with the
number of constraints in that set.
A constraint set is composed of all constraints
sharing the same string identifier before the indices:
A(2,3,4) and A(1,2,3) are in the same constraint set, A;
A(2,3,4) and B(2,3,4) are in constraint sets A and B, respectively
"""
sets = {}
constraints = model.getConstrs()
# Assuming constraint set name separated from indicies by
for c in constraints:
name = c.constrName
split_name = name.split(CON_BRACKET_L)
set_name = split_name[0]
if set_name not in sets:
sets[set_name] = 1
else:
sets[set_name] += 1
print "Constraint set, Number of constraints"
print "\n".join(["{0}, {1}".format(name, number) for name, number
in sorted(sets.items())])
def list_variables(model):
"""
Print to screen the variable sets in the model.
Show the name of each variable set along with the
number of variables in that set.
A variable set is composed of all variables
sharing the same string identifier before the indices:
A[2,3,4] and A[1,2,3] are in the same variable set, A;
A[2,3,4] and B[2,3,4] are in variable sets A and B, respectively
"""
sets = {}
variables = model.getVars()
# Assuming constraint set name separated from indicies by
for v in variables:
name = v.varName
split_name = name.split(VAR_BRACKET_L)
set_name = split_name[0]
if set_name not in sets:
sets[set_name] = 1
else:
sets[set_name] += 1
print "Variable set, Number of variables"
print "\n".join(["{0}, {1}".format(name, number) for name, number
in sorted(sets.items())])
def get_variables(model, name="", approx=False, filter_values={}, exclude=False):
"""
Return a list of variables from the model
selected by variable set name.
A variable set is composed of all variables
sharing the same string identifier before the indices:
A[2,3,4] and A[1,2,3] are in the same variable set, A;
A[2,3,4] and B[2,3,4] are in varaible sets A and B, respectively
PyGurobi by default assumes that *variable names* are separated
from indices by square brackets "[" and "]",
For example, variables look like x[i,j] - "x" in the variable set name,
and "i" and "j" and the variable's index values.
See the source code for more details.
"""
variables = []
if not name:
variables = model.getVars()
if not approx:
variables = [v for v in model.getVars()
if v.varName.split(VAR_BRACKET_L)[0] == name]
else:
variables = [v for v in model.getVars()
if name in v.varName.split(VAR_BRACKET_L)[0]]
if filter_values:
variables = filter_variables(variables, filter_values,
exclude=exclude)
return variables
def check_attr(attr, attributes):
"""
Check if the attr string case-insensitively corresponds to a
Gurobi attribute.
"""
for a in attributes:
if attr == a:
return True
if attr.lower() == a.lower():
return True
return False
def check_variable_attr(attr):
"""
Check if a string corresponds to a variable attribute.
Case-insensitive.
"""
var_attrs = get_variable_attrs()
return check_attr(attr, var_attrs)
def check_constraint_attr(attr):
"""
Check if a string corresponds to a constraint attribute.
Attributes are case-insensitive.
"""
con_attrs = get_constraint_attrs()
return check_attr(attr, con_attrs)
def get_variables_attr(attr, model="", name="", variables=""):
"""
Return a dictionary of variables names and their
corresponding attribute value.
Specifiy either model and name parameters or supply a list of variables
"""
if not attr:
raise AttributeError("No attributes specified")
if not check_variable_attr(attr):
raise AttributeError("{0}\n{1}\n{2}".format(
"Attribute: {0} not a variable attribute.".format(attr),
"Get list of all variables attributes with the",
"get_variable_attrs() method."))
# Make a list of attributes at the top and check against
# them to make sure that the specified attribute belongs.
if not model and not variables:
raise ValueError("No model or variable list given")
variables = variables_check(model, name, variables)
return {v.varName: getattr(v, attr) for v in variables}
def print_variables_attr(attr, model="", name="", variables=""):
"""
Print to screen a dictionary of variables names and their
corresponding attribute value.
Specifiy either model and name parameters or supply a list of variables
"""
var_dict = get_variables_attr(attr, model=model,
name=name, variables=variables)
print "\n".join(["{0}, {1}".format(v, k) for v, k in
sorted(var_dict.items())])
def set_variables_attr(attr, val, model="", name="", variables=""):
"""
Set an attribute of a model variable set.
Specifiy either model and name parameters or supply a list of variables
"""
if not attr or not val:
raise AttributeError("No attribute or value specified")
return
if not check_variable_attr(attr):
raise AttributeError("{0}\n{1}\n{2}".format(
"Attribute: {0} not a variable attribute.".format(attr),
"Get list of all variables attributes with the",
"get_variable_attrs() method."))
if not model and not variables:
raise ValueError("No model or variables specified")
variables = variables_check(model, name, variables)
for v in variables:
setattr(v, attr, val)
def zero_all_objective_coeffs(model):
"""
Set all objective coefficients in a model to zero.
"""
if not model:
raise ValueError("No model given")
for v in model.getVars():
v.Obj = 0
def set_variables_bounds(lb="", ub="", model="", name="", variables=""):
"""
Set the lower bound and/or upper bound for a variables set.
Specifiy either model and name parameters or supply a list of variables
"""
if lb:
set_variables_attr("lb", val=lb, model=model,
name=name, variables=variables)
if ub:
set_variables_attr("ub", val=ub, model=model,
name=name, variables=variables)
def remove_variables_from_model(model, name="", variables=""):
"""
Remove the given variables from the model.
Specifiy either model and name parameters or supply a list of constraints
"""
if not model and not variables:
raise ValueError("No model or variables given")
if not model:
raise ValueError("No model given")
variables = variables_check(model, name, variables)
for v in variables:
model.remove(v)
def variables_check(model, name, variables):
"""
Return the appropriate
variables based on the information supplied.
"""
if variables:
return variables
if model and name:
variables = get_variables(model, name)
if model and not name:
variables = model.getVars()
if not variables:
print "No variables found for\nmodel: {0},\nname: {1}".format(
model, name)
return variables
def get_variable_index_value(variable, index):
"""
Return the value of the given index
for a given variable.
Variable names are assumed to be given
as A[a,c,d, ....,f]
"""
value = variable.varName.split(",")[index].strip()
if VAR_BRACKET_R in value:
value = value[:-1]
elif VAR_BRACKET_L in value:
value = value.split(VAR_BRACKET_L)[1]
# Not expecting many variable index values to
# to be floats
if value.isdigit:
try:
value = int(value)
except ValueError:
pass
return value
def get_linexp_from_variables(variables):
"""
Return a linear expression from the supplied list
of variables.
"""
linexp = gp.LinExpr()
for v in variables:
linexp += v
return linexp
def sum_variables_by_index(index, model="", name="", variables=""):
"""
Return a dictionary mapping index values to the sum
of the solution values of all matching variables.
Specifiy either model and name parameters or supply a list of variables
"""
var_dict = get_variables_by_index(index, model=model, name=name,
variables=variables)
if not var_dict:
raise ValueError("No variables found".format(index))
new_dict = {index_name: sum([v.X for v in index_vars])
for index_name, index_vars in
sorted(var_dict.items())}
return new_dict
def print_dict(dictionary):
"""
Print a dictionary to screen.
"""
print "\n".join(["{0}, {1}".format(index_name, index_value)
for index_name, index_value in
sorted(dictionary.items())])
def print_variables_sum_by_index(index, model="", name="", variables=""):
"""
Print a dictionary of variables, summed by index.
"""
var_dict = sum_variables_by_index(index, model=model,
name=name, variables=variables)
print_dict(var_dict)
def get_variables_by_index(index, model="", name="", variables=""):
"""
Return a dictionary mapping index values to lists of
matching variables.
Specifiy either model and name parameters or supply a list of variables
"""
if index != 0 and not index:
raise IndexError("No index given")
if not model and not variables:
raise ValueError("No model or variables given")
if not (name and model) and not variables:
raise ValueError("No variables specified")
variables = variables_check(model, name, variables)
var_dict = {}
for v in variables:
value = get_variable_index_value(v, index)
if value not in var_dict:
var_dict[value] = [v]
else:
var_dict[value].append(v)
return var_dict
def filter_variables(variables, filter_values, exclude=False):
"""
Return a new list of variables that match the filter values
from the given variables list.
"""
if not variables:
raise ValueError("variables not given")
if not filter_values:
raise ValueError("Dictionary of filter values not given")
new_vars = []
for v in variables:
add = True
for index, value in filter_values.iteritems():
key = get_variable_index_value(v, index)
if key != value:
add = False
break
if add:
new_vars.append(v)
if exclude:
new_vars = [v for v in (set(variables)-set(new_vars))]
return new_vars
def get_variables_by_index_values(model, name, index_values, exclude=False):
variables = get_variables(model, name, index_values, exclude)
return variables
def get_variables_by_two_indices(index1, index2, model="", name="", variables=""):
"""
Return a dictionary of variables mapping index1 values
to dictionaries mapping
index2 values to matching variables.
Specifiy either model and name parameters or supply a list of variables
"""
two_indices_dict = {}
index1_dict = get_variables_by_index(index1, model=model, name=name,
variables=variables)
for key, value in index1_dict.iteritems():
two_indices_dict[key] = get_variables_by_index(index2, variables=value)
return two_indices_dict
def print_variables(variables):
"""
Print a list of variables to look good.
"""
print "\n".join([v.varName for v in variables])
def sum_variables_by_two_indices(index1, index2, model="", name="", variables=""):
"""
Return a dictionary mapping index1 values
to dictionaries of the given variables summed over index2.
"""
two_indices_dict = get_variables_by_two_indices(index1, index2,
model=model, name=name, variables=variables)
if not two_indices_dict:
raise ValueError("Inputs did not match with model variables")
new_dict = {}
for key, var_dict in two_indices_dict.iteritems():
new_dict[key] = {index_name: sum([v.X for v in index_vars])
for index_name, index_vars in
sorted(var_dict.items())}
return new_dict
def print_two_indices_dict(indices_dict):
"""
Print to screen a two level nested dictionary.
"""
for key, value in indices_dict.iteritems():
print "\n{0}".format(key)
print_dict(value)
def get_linexp_by_index(index, model="", name="", variables=""):
"""
Return a dictionary of index values to Gurobi linear expressions
corresponding to the summation of variables that match the index
value for the given index number.
Specifiy either model and name parameters or supply a list of variables.
"""
linexps = {}
variables = variables_check(model, name, variables)
for v in variables:
value = get_variable_index_value(v, index)
if value not in linexps:
linexps[value] = gp.LinExpr(v)
else:
linexps[value] += v
return linexps
def print_constraints(constraints):
"""
Print constraints in an aesthetically pleasing way.
"""
print "\n".join([c.constrName for c in constraints])
def get_constraints_multiple(model, names_list, approx=False):
"""
Return a list of constraints given by the constraint
set names in names_list.
"""
cons_list = []
for name in names_list:
cons_list.extend(get_constraints(model, name, approx))
return cons_list
def filter_constraints(constraints, filter_values, exclude=False):
"""
Return a new list of constraints that match the filter values from
the given constraints list.
"""
if not constraints:
raise ValueError("constraints not given")
if not filter_values:
raise ValueError("Dictionary of filter values not given")
new_cons = []
for c in constraints:
add = True
for index, value in filter_values.iteritems():
key = get_constraint_index_value(c, index)
try:
key.replace('"', "")
except AttributeError:
pass
if key != value:
add = False
break
if add:
new_cons.append(c)
if exclude:
# May want to add sorting by varName here
new_cons = [c for c in (set(constraints)-set(new_cons))]
return new_cons
def get_constraints(model, name="", approx=False, filter_values={},
exclude=False):
"""
Return a list of constraints from the model
selected by constraint set name.
A constraint set is composed of all constraints
sharing the same string identifier before the indices:
A(2,3,4) and A(1,2,3) are in the same constraint set, A;
A(2,3,4) and B(2,3,4) are in constraint sets A and B, respectively
PyGurobi by default assumes that constraint set names are
separated from indices by round brackets
"(" and ")". For example, constraints look like env(r,t) - where "env"
in the constraint set name
and "r" and "t" are the index values. See the source for more details.
"""
if not name:
return model.getConstrs()
constraints = []
if not approx:
constraints = [c for c in model.getConstrs()
if c.constrName.split(CON_BRACKET_L)[0] == name]
else:
constraints = [c for c in model.getConstrs()
if name in c.constrName.split(CON_BRACKET_L)[0]]
if filter_values:
constraints = filter_constraints(constraints, filter_values, exclude)
return constraints
def constraints_check(model, name, constraints):
"""
Check to see whether the user specified a list
of constraints or expects them to be retrieved
from the model.
"""
if constraints:
return constraints
if model and name:
constraints = get_constraints(model, name)
elif model and not name:
constraints = model.getConstrs()
return constraints
def get_constraints_attr(attr, model="", name="", constraints=""):
"""
Return a dictionary of constraint names and their
corresponding attribute value.
Specifiy either model and name parameters or supply a list of constraints
"""
if not attr:
raise AttributeError("No attributes specified")
if not check_constraint_attr(attr):
raise AttributeError("{0}\n{1}\n{2}".format(
"Attribute: {0} not a constraint attribute.".format(attr),
"Get list of all variables attributes with the",
"get_constraint_attrs() method."))
# Check if the attr supplied is not a viable model attribute
if not model and not constraints:
raise ValueError("No model or constraint list given")
constraints = constraints_check(model, name, constraints)
return {c.constrName: getattr(c, attr) for c in constraints}
def print_constraints_attr(attr, model="", name="", constraints=""):
"""
Print to screen a list of constraint attribute values
given by the constraints specified in the names parameter.
Specifiy either model and name parameters or supply a list of constraints
"""
constraints = get_constraints_attr(attr, model=model,
name=name, constraints=constraints)
print "\n".join(["{0}, {1}".format(c, k)
for c, k in sorted(constraints.items())])
def set_constraints_attr(attr, val, model="", name="", constraints=""):
"""
Set an attribute of a model constraint set.
Specifiy either model and name parameters or supply a list of constraints
"""
if not attr or not val:
raise AttributeError("No attribute or value specified")
if not check_constraint_attr(attr):
raise AttributeError("{0}\n{1}\n{2}".format(
"Attribute: {0} not a variable attribute.".format(attr),
"Get list of all variables attributes with the",
"get_variable_attrs() method."))
if not model and not constraints:
raise ValueError("No model or constraints specified")
constraints = constraints_check(model, name, constraints)
for c in constraints:
setattr(c, attr, val)
def set_constraints_rhs_as_percent(percent, model="", name="", constraints=""):
"""
Set the right hand side (rhs) of a constraint set as a percentage of its current rhs.
Specifiy either model and name parameters or supply a list of constraints
"""
if percent != 0 and not percent:
print "Error: No percent specified."
return
try:
percent = float(percent)
except ValueError:
raise ValueError("Percent must be a number. Percent: {}".format(percent))
if not model and not constraints:
raise ValueError("No model or constraints specified.")
constraints = constraints_check(model, name, constraints)
for c in constraints:
cur_rhs = getattr(c, "rhs")
setattr(c, "rhs", percent*cur_rhs)
def remove_constraints_from_model(model, name="", constraints=""):
"""
Remove the given constraints from the model.
Specifiy either model and name parameters or supply a list of constraints
"""
if not model and not constraints:
raise ValueError("No model or constraints given")
if not model:
raise ValueError("No model given")
# This is needed for the case where a list of
# constraints is provided because a model object
# must be provided
if not constraints:
constraints = constraints_check(model, name, constraints)
for c in constraints:
model.remove(c)
def get_constraint_index_value(constraint, index):
"""
Return the value of the given index
for a given constraint.
Constraint names are assumed to be given
as A(a,c,d, ....,f)
"""
value = constraint.constrName.split(",")[index].strip()
if CON_BRACKET_R in value:
value = value[:-1]
elif CON_BRACKET_L in value:
value = value.split(CON_BRACKET_L)[1]
# Not expecting many constraint index values to
# to be floats
if value.isdigit:
try:
value = int(value)
except ValueError:
pass
return value
def get_constraints_by_index(index, model="", name="", constraints=""):
"""
Return a dictionary mapping index values to lists of
constraints having that index value.
Specifiy either model and name parameters or supply a list of constraints
"""
if index != 0 and not index:
raise IndexError("No index given")
if not model and not constraints:
raise ValueError("No model or constraints given")
if not (name and model) and not constraints:
raise ValueError("No constraints specified")
constraints = constraints_check(model, name, constraints)
con_dict = {}
for c in constraints:
value = get_constraint_index_value(c, index)
if value not in con_dict:
con_dict[value] = [c]
else:
con_dict[value].append(c)
return con_dict
def get_constraints_by_index_values(model, name, index_values, exclude=False):
"""
Return a list of constraints filtered by index values.
If exlude is False then return constraints that match the filters.
If exclude is True than return constraints that do not match the filters.
"""
constraints = get_constraints(model, name, index_values, exclude)
return constraints
def get_grb_sense_from_string(sense):
"""
Return the GRB constraint sense object
corresponding to the supplied string.
Convention follows the Gurobi docs:
https://www.gurobi.com/documentation/6.5/refman/sense.html#attr:Sense
"""
if sense == "<":
return gp.GRB.LESS_EQUAL
elif sense == ">":
return gp.GRB.GREATER_EQUAL
elif sense == "=":
return gp.GRB.EQUAL
else:
raise ValueError("Constraint sense is not '<', '>', '='")
def add_constraint_constant(model, variables, constant, sense="<",
con_name=""):
"""
Add constraint to model that says the sum of
variables must be equal, less than or equal, or, greater than or equal, a constant.
"""
if not variables:
raise ValueError("variables list not provided")
linexp = get_linexp_from_variables(variables)
sense = get_grb_sense_from_string(sense)
if not con_name:
model.addConstr(linexp, sense, constant)
else:
model.addConstr(linexp, sense, constant, con_name)
def check_if_name_a_variable(name, model):
"""
Check if the supplied name corresponds to
a variable set name in the given model.
"""
variables = get_variables(model, name)
if not variables:
return False
return True
def check_if_name_a_constraint(name, model):
"""
Check if the supplied name corresopnd to
a constraint set name in the given model.
"""
constraints = get_constraints(model, name)
if not constraints:
return False
return True
def add_constraint_variables(model, variables1, variables2,
sense="=", con_name=""):
"""
Add constraint to model that says the sum of
a list of variables must be equal, less than or equal,
or greater than or equal, the sum of another list of variables.
"""
if not variables1 or not variables2:
ValueError("Variables list not provided")
linexp1 = get_linexp_from_variables(variables1)
linexp2 = get_linexp_from_variables(variables2)
sense = get_grb_sense_from_string(sense)
if not con_name:
model.addConstr(linexp1, sense, linexp2)
else:
model.addConstr(linexp1, sense, linexp2, con_name)
def graph_by_index(model, variables, index, title="", y_axis="", x_axis=""):
"""
Display a graph of the variable against the specified index
using matplotlib.
Matplotlib must already be installed to use this.
See: http://matplotlib.org/faq/installing_faq.html
"""
try:
import matplotlib.pyplot as plot
except ImportError:
raise ImportError("{0}\n{1}".format(
"Module Matplotlib not found.",
"Please download and install Matplotlib to use this function."))
fig = plot.figure()
ax = fig.add_subplot(111)
variables_sum = sum_variables_by_index(index, variables=variables)
keys, values = zip(*variables_sum.items())
y = range(len(variables_sum))
if title:
ax.set_title(title)
if y_axis:
ax.set_ylabel(y_axis)
if x_axis:
ax.set_xlabel(x_axis)
ax.bar(y, values)
#ax.legend(keys)
plot.show()
def graph_by_two_indices(model, variables, index1, index2, title="",
y_axis="", x_axis=""):
"""
Display a graph of the variable summed over index2
given by index1.
Matplotlib must already be installed to use this.
See: http://matplotlib.org/faq/installing_faq.html
"""
try:
import matplotlib.pyplot as plot
except ImportError:
raise ImportError("{0}\n{1}".format(
"Module Matplotlib not found.",
"Please download and install Matplotlib to use this function."))
fig = plot.figure()
ax = fig.add_subplot(111)
# We need to do this in reverse order to prepare it for graphing
variables_sum = sum_variables_by_two_indices(index2, index1,
variables=variables)
keys, values = zip(*variables_sum.items())
colours = ["b", "g", "r", "c", "y", "m", "k", "w"]
y = range(len(values[0]))
print y
if title:
ax.set_title(title)
if y_axis:
ax.set_ylabel(y_axis)
if x_axis:
ax.set_xlabel(x_axis)
bars = []
prev_bars = [0 for bar in y]
colour_count = 0
for key, value in variables_sum.iteritems():
cur_bars = [k[1] for k in sorted(value.items(), key=lambda x: x[0])]
bars.append(ax.bar(y, cur_bars, bottom=prev_bars,
color=colours[colour_count]))
prev_bars = cur_bars
colour_count += 1
if colour_count == len(colours) - 1:
colour_count = 0
ax.legend(keys)
plot.show()
def print_variables_to_csv(file_name, model="", name="", variables=""):
"""
Print the specified variables to a csv file
given by the file_name parameter.
If no variables specified than all model
variables written.
"""
if ".csv" not in file_name:
raise ValueError("Non csv file specified")
with open(file_name, "wb+") as write_file:
writer = csv.writer(write_file)
headers = ["Variable name", "Value"]
writer.writerow(headers)
variables = variables_check(model, name, variables)
# This will put quotes around strings, because the variable
# names have commas in them.
writer.writerows([ [v.varName, v.X] for v in variables])
def print_variables_to_csv_by_index(file_name, index,
model="", name="", variables=""):
"""
Print the sums of variables by the specified index
to a csv file.
Default behaviour of the function is to overwrite
the given file_name.
"""
if ".csv" not in file_name:
raise ValueError("Non csv file specified")
with open(file_name, "wb+") as write_file:
writer = csv.writer(write_file)
headers = ["Index", "Value"]
writer.writerow(headers)
variables_dict = sum_variables_by_index(index, model=model,
name=name, variables=variables)
if not variables_dict:
raise ValueError("No variables found")
writer.writerows([ [key, value]
for key, value in sorted(variables_dict.items())])
def print_variables_to_json_by_index(file_name, index, model="",
name="", variables="", index_alias=""):
"""
Print the specified variables to a json file given by file_name
organized by the specified index.
Formatted for reading into nvD3 applications.
Default behaviour is to overwrite file if one exists in
file_name's location.
"""
if ".json" not in file_name:
raise ValueError("Non json file specified")
index_name = index
if index_alias:
index_name = index_alias
var_dict = sum_variables_by_index(index, model=model,
name=name, variables=variables)
data = {index_name: [{ index_name: var_dict }] }
json.dump(data, open(file_name, "wb"))
|
mit
| -5,063,125,041,988,328,000 | 26.777585 | 99 | 0.613412 | false |
JeffRoy/mi-dataset
|
mi/dataset/driver/pco2w_abc/imodem/pco2w_abc_imodem_telemetered_driver.py
|
1
|
2797
|
#!/usr/bin/env python
"""
@package mi.dataset.driver.pco2w_abc.imodem
@file mi-dataset/mi/dataset/driver/pco2w_abc/imodem/pco2w_abc_imodem_recovered_driver.py
@author Mark Worden
@brief Driver for the pco2w_abc_imodem instrument
Release notes:
Initial Release
"""
from mi.dataset.dataset_parser import DataSetDriverConfigKeys
from mi.dataset.dataset_driver import SimpleDatasetDriver
from mi.dataset.parser.pco2w_abc_imodem import Pco2wAbcImodemParser
from mi.dataset.parser.pco2w_abc_particles import \
Pco2wAbcParticleClassKey, \
Pco2wAbcImodemInstrumentBlankTelemeteredDataParticle, \
Pco2wAbcImodemInstrumentTelemeteredDataParticle, \
Pco2wAbcImodemPowerTelemeteredDataParticle, \
Pco2wAbcImodemControlTelemeteredDataParticle, \
Pco2wAbcImodemMetadataTelemeteredDataParticle
from mi.core.versioning import version
@version("15.6.0")
def parse(basePythonCodePath, sourceFilePath, particleDataHdlrObj):
"""
This is the method called by Uframe
:param basePythonCodePath This is the file system location of mi-dataset
:param sourceFilePath This is the full path and filename of the file to be parsed
:param particleDataHdlrObj Java Object to consume the output of the parser
:return particleDataHdlrObj
"""
with open(sourceFilePath, 'rU') as stream_handle:
driver = Pco2wAbcImodemTelemeteredDriver(basePythonCodePath, stream_handle, particleDataHdlrObj)
driver.processFileStream()
return particleDataHdlrObj
class Pco2wAbcImodemTelemeteredDriver(SimpleDatasetDriver):
"""
All this needs to do is create a concrete _build_parser method
"""
def _build_parser(self, stream_handle):
parser_config = {
DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.pco2w_abc_particles',
DataSetDriverConfigKeys.PARTICLE_CLASS: None,
DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: {
Pco2wAbcParticleClassKey.METADATA_PARTICLE_CLASS:
Pco2wAbcImodemMetadataTelemeteredDataParticle,
Pco2wAbcParticleClassKey.POWER_PARTICLE_CLASS:
Pco2wAbcImodemPowerTelemeteredDataParticle,
Pco2wAbcParticleClassKey.INSTRUMENT_PARTICLE_CLASS:
Pco2wAbcImodemInstrumentTelemeteredDataParticle,
Pco2wAbcParticleClassKey.INSTRUMENT_BLANK_PARTICLE_CLASS:
Pco2wAbcImodemInstrumentBlankTelemeteredDataParticle,
Pco2wAbcParticleClassKey.CONTROL_PARTICLE_CLASS:
Pco2wAbcImodemControlTelemeteredDataParticle,
}
}
parser = Pco2wAbcImodemParser(parser_config,
stream_handle,
self._exception_callback)
return parser
|
bsd-2-clause
| 3,377,995,578,879,004,000 | 37.315068 | 104 | 0.725063 | false |
BrigDan/pykcd
|
xkcdb.py
|
1
|
3602
|
#!/usr/bin/env python3
import gi
import xkcd
from random import SystemRandom
gi.require_version("Gtk","3.0")
from gi.repository import Gtk
rand = SystemRandom()
class myWindow(Gtk.Window):
def __init__(self):
Gtk.Window.__init__(self, title="XKCD Browser")
#self.cur_comic stores the number of the latest comic
self.cur_comic=xkcd.getLatestComicNum()
#image display
self.image = Gtk.Image()
self.image_area = Gtk.Box()
self.image.set_from_file('/tmp/xkcd.png')
self.image_area.set_center_widget(self.image)
self.image_area.show_all()
#random button
self.rand_btn = Gtk.Button.new_with_label("random")
self.rand_btn.connect ("clicked", self.on_random_clicked)
#next button
self.nxt_btn = Gtk.Button.new_with_label(">")
self.nxt_btn.connect("clicked", self.on_nxt_clicked)
if self.cur_comic == xkcd.getLatestComicNum():
self.nxt_btn.set_sensitive(False)
#fast next button
self.fst_nxt_btn = Gtk.Button.new_with_label(">>")
self.fst_nxt_btn.connect("clicked", self.on_fst_nxt_clicked)
latest = xkcd.getLatestComicNum()
if self.cur_comic > latest - 5:
self.fst_nxt_btn.set_sensitive(False)
#previous button
self.prv_btn = Gtk.Button.new_with_label("<")
self.prv_btn.connect("clicked", self.on_prv_clicked)
#fast previous button
self.fst_prv_btn = Gtk.Button.new_with_label("<<")
self.fst_prv_btn.connect("clicked", self.on_fst_prv_clicked)
#organise buttons ~~~~~~~~~~~~~~~~~
self.main_box = Gtk.VBox()
self.main_box.add(self.image_area)
self.button_box = Gtk.HButtonBox()
self.button_box.set_homogeneous(False)
self.button_box.pack_start(self.fst_prv_btn, False, True, 0)
self.button_box.pack_start(self.prv_btn, False, True, 0)
self.button_box.pack_start(self.rand_btn, False, True, 0)
self.button_box.pack_start(self.nxt_btn, False, True, 0)
self.button_box.pack_start(self.fst_nxt_btn, False, True, 0)
self.main_box.add(self.button_box)
self.add(self.main_box)
#initialise ~~~~~~~~~~~~~~~~~~~~~~~
self.connect("delete-event", Gtk.main_quit)
self.show_all()
def on_nxt_clicked(self, button):
self.cur_comic += 1
self.update_image()
def on_fst_nxt_clicked(self, button):
self.cur_comic += 5
self.update_image()
def on_prv_clicked(self, button):
self.cur_comic -= 1
self.update_image()
def on_fst_prv_clicked(self, button):
self.cur_comic -= 5
self.update_image()
def on_random_clicked(self, button):
self.cur_comic=rand.randint(1,xkcd.getLatestComicNum())
self.update_image()
#Whenever we want to update the comic displayed we'll use this
def update_image(self):
xkcd.getComic(self.cur_comic).download(output='/tmp/',outputFile='xkcd.png')
self.image.set_from_file('/tmp/xkcd.png')
latest = xkcd.getLatestComicNum()
if self.cur_comic == latest:
self.nxt_btn.set_sensitive(False)
elif self.cur_comic < latest:
self.nxt_btn.set_sensitive(True)
if self.cur_comic > latest - 5:
self.fst_nxt_btn.set_sensitive(False)
elif self.cur_comic <= latest - 5:
self.fst_nxt_btn.set_sensitive(True)
comic = xkcd.getLatestComic()
comic.download(output="/tmp/",outputFile="xkcd.png")
Display = myWindow()
Gtk.main()
|
gpl-3.0
| -4,089,844,444,127,332,000 | 32.981132 | 84 | 0.610217 | false |
TREND50/GRANDproto_DAQ
|
cmd_tektro.py
|
1
|
6783
|
#30/06/2017
#VELLEYEN Stephane
#############################
import os,sys
import vxi11
instr = vxi11.Instrument("192.168.1.10")
print(instr.ask("*IDN?"))
choix=sys.argv[1]
if choix =="1": #Choix des parametres
ch=sys.argv[2]
func=sys.argv[3]
freq=sys.argv[4]
vcc=sys.argv[5]
vcc=int(vcc)
vmax=vpp/2
vmin=vpp/-2
if vcc>300:
print "Erreur vcc>300mv"
vmax=150
vmin=-150
print "Defaut: ",vmax
print "Defaut: ",vmin
else:
print ""
voffset=0
phase=0
out="ON"
symm=100
width=500
lead=10
trail=10
delay=20
dcyc=50
out="ON"
if func=="RAMP":
#set ramp symmetry
symmcmd = "SOUR{0}:FUNC:RAMP:SYMM {1}".format(ch,symm)
print "Setting ramp symmetry:",symmcmd
instr.write(symmcmd)
elif func=="PULS":
#set pulse width
widthcmd = "SOUR{0}:PULS:WIDT {1}ns".format(ch,width)
print "Setting pulse width:",widthcmd
instr.write(widthcmd)
#set edges
leadcmd = "SOUR{0}:PULS:TRAN:LEAD {1}ns".format(ch,lead)
print "Setting pulse leading:",leadcmd
instr.write(leadcmd)
trailcmd = "SOUR{0}:PULS:TRAN:TRA {1}ns".format(ch,trail)
print "Setting pulse trailing:",trailcmd
instr.write(trailcmd)
#set pulse delay
delaycmd = "SOUR{0}:PULS:DEL {1}ms".format(ch,delay)
print "Setting pulse delay:",delaycmd
instr.write(delaycmd)
#set pulse DCYC
dcyccmd = "SOUR{0}:PULS:DCYC {1}".format(ch,dcyc)
print "Setting pulse delay:",dcyccmd
instr.write(dcyccmd)
#set function
funccmd = "SOUR{0}:FUNC {1}".format(ch,func)
print "Setting function:",funccmd
instr.write(funccmd)
#Set frequency
freqcmd = "SOUR{0}:FREQ:FIX {1}".format(ch,freq)
print "Setting frequency:",freqcmd
instr.write(freqcmd)
#set high level
vmaxcmd = "SOUR{0}:VOLTAGE:HIGH {1}mV".format(ch,vmax)
print "Setting HIGHT Voltage:",vmaxcmd
instr.write(vmaxcmd)
#set low level
vmincmd = "SOUR{0}:VOLTAGE:LOW {1}mV".format(ch,vmin)
print "Setting Low Voltage:",vmincmd
instr.write(vmincmd)
#set offset
voffcmd = "SOUR{0}:VOLTAGE:OFFS {1}V".format(ch,voffset)
print "Setting offset Voltage:",voffcmd
instr.write(voffcmd)
#set phase
phasecmd = "SOUR{0}:PHAS {1}DEG".format(ch,phase)
print "Setting phase:",phasecmd
instr.write(phasecmd)
#set OUTPUT ON
outcmd = "OUTP{0} {1}".format(ch,out)
print "Setting outout:",outcmd
instr.write(outcmd)
elif choix=="2":# Sinus 66MHz
ch=2
func="SIN"
freq=66 #MHz
voffset=0 #V
vmax=50 #mV
vmin=-50 #mV
phase=0 #DEG
out="ON"
vcc=vmax+abs(vmin)
if vcc>300:
print "Erreur vcc>300mv"
vmax=150 #mV
vmin=-150 #mV
print "Defaut: ",vmax
print "Defaut: ",vmin
else:
print ""
#set function
funccmd = "SOUR{0}:FUNC {1}".format(ch,func)
print "Setting function:",funccmd
instr.write(funccmd)
#Set frequency
freqcmd = "SOUR{0}:FREQ:FIX {1}MHz".format(ch,freq)
print "Setting frequency(MHz):",freqcmd
instr.write(freqcmd)
#set high level
vmaxcmd = "SOUR{0}:VOLTAGE:IMM:HIGH {1}mV".format(ch,vmax)
print "Setting HIGHT Voltage(mV):",vmaxcmd
instr.write(vmaxcmd)
#set low level
vmincmd = "SOUR{0}:VOLTAGE:IMM:LOW {1}mV".format(ch,vmin)
print "Setting Low Voltage(mV):",vmincmd
instr.write(vmincmd)
#set offset
voffcmd = "SOUR{0}:VOLTAGE:OFFS {1}V".format(ch,voffset)
print "Setting offset Voltage(V):",voffcmd
instr.write(voffcmd)
#set phase
phasecmd = "SOUR{0}:PHAS {1}DEG".format(ch,phase)
print "Setting phase:",phasecmd
instr.write(phasecmd)
#set OUTPUT ON
outcmd = "OUTP{0} {1}".format(ch,out)
print "Setting outout:",outcmd
instr.write(outcmd)
elif choix =="3":# Square
ch=2
func="SQU"
freq=100 #Hz
voffset=0 #V
vmax=50 #mV
vmin=-50 #mV
phase=0 #DEG
out="ON"
vcc=vmax+abs(vmin)
if vcc>300:
print "Erreur vcc>300mv"
vmax=150 #mV
vmin=-150 #mV
print "Defaut: ",vmax
print "Defaut: ",vmin
else:
print ""
#set function
funccmd = "SOUR{0}:FUNC {1}".format(ch,func)
print "Setting function:",funccmd
instr.write(funccmd)
#Set frequency
freqcmd = "SOUR{0}:FREQ:FIX {1}Hz".format(ch,freq)
print "Setting frequency:",freqcmd
instr.write(freqcmd)
#set high level
vmaxcmd = "SOUR{0}:VOLTAGE:HIGH {1}mV".format(ch,vmax)
print "Setting HIGHT Voltage:",vmaxcmd
instr.write(vmaxcmd)
#set low level
vmincmd = "SOUR{0}:VOLTAGE:LOW {1}mV".format(ch,vmin)
print "Setting Low Voltage:",vmincmd
instr.write(vmincmd)
#set offset
voffcmd = "SOUR{0}:VOLTAGE:OFFS {1}V".format(ch,voffset)
print "Setting offset Voltage:",voffcmd
instr.write(voffcmd)
#set phase
phasecmd = "SOUR{0}:PHAS {1}DEG".format(ch,phase)
print "Setting phase:",phasecmd
instr.write(phasecmd)
#set OUTPUT ON
outcmd = "OUTP{0} {1}".format(ch,out)
print "Setting outout:",outcmd
instr.write(outcmd)
else:# Defaut
ch=2
func="SIN"
freq=66 #MHz
vmax =50 #mV
vmin =-50 #mV
voffset=0 #V
phase=0 #DEG
symm=100
width=500
lead=10
trail=10
delay=20
dcyc=50
out="ON"
if func=="RAMP":
#set ramp symmetry
symmcmd = "SOUR{0}:FUNC:RAMP:SYMM {1}".format(ch,symm)
print "Setting ramp symmetry:",symmcmd
instr.write(symmcmd)
elif func=="PULS":
#set pulse width
widthcmd = "SOUR{0}:PULS:WIDT {1}ns".format(ch,width)
print "Setting pulse width:",widthcmd
instr.write(widthcmd)
#set edges
leadcmd = "SOUR{0}:PULS:TRAN:LEAD {1}ns".format(ch,lead)
print "Setting pulse leading:",leadcmd
instr.write(leadcmd)
trailcmd = "SOUR{0}:PULS:TRAN:TRA {1}ns".format(ch,trail)
print "Setting pulse trailing:",trailcmd
instr.write(trailcmd)
#set pulse delay
delaycmd = "SOUR{0}:PULS:DEL {1}ms".format(ch,delay)
print "Setting pulse delay:",delaycmd
instr.write(delaycmd)
#set pulse DCYC
dcyccmd = "SOUR{0}:PULS:DCYC {1}".format(ch,dcyc)
print "Setting pulse delay:",dcyccmd
instr.write(dcyccmd)
vcc=vmax+abs(vmin)
if vcc>300:
print "Erreur vcc>300mv"
vmax=150 #mV
vmin=-150 #mV
print "Defaut: ",vmax
print "Defaut: ",vmin
else:
print ""
#set function
funccmd = "SOUR{0}:FUNC {1}".format(ch,func)
print "Setting function:",funccmd
instr.write(funccmd)
#Set frequency
freqcmd = "SOUR{0}:FREQ:FIX {1}MHz".format(ch,freq)
print "Setting frequency:",freqcmd
instr.write(freqcmd)
#set high level
vmaxcmd = "SOUR{0}:VOLTAGE:HIGH {1}mV".format(ch,vmax)
print "Setting HIGHT Voltage:",vmaxcmd
instr.write(vmaxcmd)
#set low level
vmincmd = "SOUR{0}:VOLTAGE:LOW {1}mV".format(ch,vmin)
print "Setting Low Voltage:",vmincmd
instr.write(vmincmd)
#set offset
voffcmd = "SOUR{0}:VOLTAGE:OFFS {1}V".format(ch,voffset)
print "Setting offset Voltage:",voffcmd
instr.write(voffcmd)
#set phase
phasecmd = "SOUR{0}:PHAS {1}DEG".format(ch,phase)
print "Setting phase:",phasecmd
instr.write(phasecmd)
#set OUTPUT ON
outcmd = "OUTP{0} {1}".format(ch,out)
print "Setting outout:",outcmd
instr.write(outcmd)
|
gpl-3.0
| 8,711,170,751,405,201,000 | 19.369369 | 59 | 0.677871 | false |
tgbugs/pyontutils
|
neurondm/test/test_integration.py
|
1
|
2300
|
import unittest
from pathlib import Path
import pytest
from pyontutils.utils import get_working_dir
from pyontutils.config import auth
from pyontutils.integration_test_helper import _TestScriptsBase, Folders, Repo
import neurondm
class TestScripts(Folders, _TestScriptsBase):
""" woo! """
only = tuple()
lasts = tuple()
neurons = ('neurondm/example',
'neurondm/phenotype_namespaces',
'neurondm/models/allen_cell_types',
'neurondm/models/phenotype_direct',
'neurondm/models/basic_neurons',
'neurondm/models/huang2017',
'neurondm/models/ma2015',
'neurondm/models/cuts',
'neurondm/build',
'neurondm/sheets',)
skip = tuple()
olr = auth.get_path('ontology-local-repo')
if olr.exists():
ont_repo = Repo(olr)
# FIXME these aren't called?
post_load = lambda : (ont_repo.remove_diff_untracked(), ont_repo.checkout_diff_tracked())
post_main = lambda : (ont_repo.remove_diff_untracked(), ont_repo.checkout_diff_tracked())
### handle ontology branch behavior
checkout_ok = neurondm.core.ont_checkout_ok
print('checkout ok:', checkout_ok)
ont_branch = ont_repo.active_branch.name
if not checkout_ok and ont_branch != 'neurons':
neurons += ('neurondm/core', 'neurondm/lang',) # FIXME these two are ok for no repo but not wrong branch?!
skip += tuple(n.split('/')[-1] for n in neurons)
else:
lasts += tuple(f'neurondm/{s}.py' for s in neurons)
else:
skip += tuple(n.split('/')[-1] for n in neurons)
### build mains
mains = {} # NOTE mains run even if this is empty ? is this desired?
module_parent = Path(__file__).resolve().parent.parent.as_posix()
working_dir = get_working_dir(__file__)
if working_dir is None:
# python setup.py test will run from the module_parent folder
# I'm pretty the split was only implemented because I was trying
# to run all tests from the working_dir in one shot, but that has
# a number of problems with references to local vs installed packages
working_dir = module_parent
print(module_parent)
print(working_dir)
TestScripts.populate_tests(neurondm, working_dir, mains, skip=skip, lasts=lasts,
module_parent=module_parent, only=only, do_mains=True)
|
mit
| -2,036,565,728,753,498,400 | 33.328358 | 115 | 0.669565 | false |
akx/shoop
|
shoop/core/models/_units.py
|
1
|
1973
|
# -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import with_statement
from decimal import Decimal
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from parler.models import TranslatableModel, TranslatedFields
from shoop.core.fields import InternalIdentifierField
from shoop.utils.numbers import bankers_round, parse_decimal_string
__all__ = ("SalesUnit",)
@python_2_unicode_compatible
class SalesUnit(TranslatableModel):
identifier = InternalIdentifierField(unique=True)
decimals = models.PositiveSmallIntegerField(default=0, verbose_name=_(u"allowed decimals"))
translations = TranslatedFields(
name=models.CharField(max_length=128, verbose_name=_('name')),
short_name=models.CharField(max_length=128, verbose_name=_('short name')),
)
class Meta:
verbose_name = _('sales unit')
verbose_name_plural = _('sales units')
def __str__(self):
return str(self.safe_translation_getter("name", default=None))
@property
def allow_fractions(self):
return self.decimals > 0
@cached_property
def quantity_step(self):
"""
Get the quantity increment for the amount of decimals this unit allows.
For 0 decimals, this will be 1; for 1 decimal, 0.1; etc.
:return: Decimal in (0..1]
:rtype: Decimal
"""
# This particular syntax (`10 ^ -n`) is the same that `bankers_round` uses
# to figure out the quantizer.
return Decimal(10) ** (-int(self.decimals))
def round(self, value):
return bankers_round(parse_decimal_string(value), self.decimals)
|
agpl-3.0
| -1,413,495,837,681,102,800 | 30.822581 | 95 | 0.692854 | false |
xiangke/pycopia
|
process/pycopia/rsynclib.py
|
1
|
2736
|
#!/usr/bin/python2.4
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
#
# $Id$
#
# Copyright (C) 1999-2006 Keith Dart <keith@kdart.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
"""
Wrapper for the 'rsync' program. See the rsync manpage for more details.
"""
from pycopia import proctools
try:
RSYNC = proctools.which("rsync")
except ValueError:
raise ImportError, "rsync program not found!"
TESTED_VERSIONS = ["rsync version 2.5.5 protocol version 26"]
def rsync(src, dst, password=None, extraopts=None, logfile=None):
"""rsync(src, dst, [password, [extraopts, [logfile]]])
Usage: rsync [OPTION]... SRC [SRC]... [USER@]HOST:DEST
or rsync [OPTION]... [USER@]HOST:SRC DEST
or rsync [OPTION]... SRC [SRC]... DEST
or rsync [OPTION]... [USER@]HOST::SRC [DEST]
or rsync [OPTION]... SRC [SRC]... [USER@]HOST::DEST
or rsync [OPTION]... rsync://[USER@]HOST[:PORT]/SRC [DEST]
You might want to set the RSYNC_RSH environment variable first.
"""
opts = "-q"
if extraopts:
opts += extraopts
CMD = "%s %s %s %s" % (RSYNC, opts, src, dst)
rsync = proctools.spawnpty(CMD, logfile=logfile)
# assume a password will be requested if one is supplied here
if password is not None:
from pycopia import expect
ersync = expect.Expect(rsync)
ersync.expect("password:", timeout=2.0)
ersync.writeln(password)
del ersync
rsync.wait()
return rsync.exitstatus # user can check exit status for success
def rsync_version():
"""rsync_version() Return the version string for the rsync command on this
system."""
rsync = proctools.spawnpipe("rsync --version")
ver = rsync.readline() # version on first line of output
rsync.read() # discard rest
rsync.close()
return ver
def check_version():
"""Checks that the installed rsync program is the same one that this module was
tested with (and written for)."""
ver = rsync_version()[15:20]
for vs in TESTED_VERSIONS:
if ver == vs[15:20]:
return 1
return 0
if __name__ == "__main__":
if check_version():
print "your rsync version is good!"
else:
print "your rsync version is an untested one, beware of errors!"
|
lgpl-2.1
| 7,716,549,130,176,562,000 | 31.963855 | 83 | 0.660453 | false |
lmazuel/azure-sdk-for-python
|
azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/operation_list_result.py
|
1
|
1314
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class OperationListResult(Model):
"""The list of available operations for Data Lake Store.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar value: The results of the list operation.
:vartype value: list[~azure.mgmt.datalake.store.models.Operation]
:ivar next_link: The link (url) to the next page of results.
:vartype next_link: str
"""
_validation = {
'value': {'readonly': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Operation]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(self):
super(OperationListResult, self).__init__()
self.value = None
self.next_link = None
|
mit
| 546,748,436,018,200,060 | 31.85 | 76 | 0.577626 | false |
benagricola/exabgp
|
lib/exabgp/configuration/environment.py
|
1
|
9259
|
# encoding: utf-8
"""
environment.py
Created by Thomas Mangin on 2011-11-29.
Copyright (c) 2011-2015 Exa Networks. All rights reserved.
"""
# XXX: raised exception not caught
# XXX: reloading mid-program not possible
# XXX: validation for path, file, etc not correctly test (ie surely buggy)
import os
import sys
import pwd
import syslog
from exabgp.util.ip import isip
# ===================================================================== NoneDict
#
class NoneDict (dict):
def __getitem__ (self, name):
return None
nonedict = NoneDict()
# ================================================================== environment
# XXX: FIXME: Upper case for class !
class environment (object):
# class returned on issues
class Error (Exception):
pass
application = 'unset'
# the configuration to be set by the program
configuration = {}
# the final parsed settings
_settings = None
location = os.path.normpath(sys.argv[0]) if sys.argv[0].startswith('/') else os.path.normpath(os.path.join(os.getcwd(),sys.argv[0]))
log_levels = ['EMERG', 'ALERT', 'CRIT', 'CRITICAL', 'ERR', 'ERROR', 'WARNING', 'NOTICE', 'INFO', 'DEBUG']
@staticmethod
def setup (conf):
if environment._settings:
# nosetest is performing the setup multiple times, so we can not raise anymore
# raise RuntimeError('You already initialised the environment')
return environment._settings
environment._settings = _env(conf)
return environment._settings
@staticmethod
def settings ():
if not environment._settings:
raise RuntimeError('You can not have an import using settings() before main() initialised environment')
return environment._settings
@staticmethod
def root (path):
roots = environment.location.split(os.sep)
location = []
for index in range(len(roots)-1,-1,-1):
if roots[index] == 'lib':
if index:
location = roots[:index]
break
root = os.path.join(*location)
paths = [
os.path.normpath(os.path.join(os.path.join(os.sep,root,path))),
os.path.normpath(os.path.expanduser(environment.unquote(path))),
os.path.normpath(os.path.join('/',path)),
]
return paths
@staticmethod
def integer (_):
return int(_)
@staticmethod
def real (_):
return float(_)
@staticmethod
def lowunquote (_):
return _.strip().strip('\'"').lower()
@staticmethod
def unquote (_):
return _.strip().strip('\'"')
@staticmethod
def quote (_):
return "'%s'" % str(_)
@staticmethod
def nop (_):
return _
@staticmethod
def boolean (_):
return _.lower() in ('1','yes','on','enable','true')
@staticmethod
def api (_):
encoder = _.lower()
if encoder not in ('text','json'):
raise TypeError('invalid encoder')
return encoder
@staticmethod
def methods (_):
return _.upper().split()
@staticmethod
def list (_):
return "'%s'" % ' '.join(_)
@staticmethod
def lower (_):
return str(_).lower()
@staticmethod
def ip (_):
if isip(_):
return _
raise TypeError('ip %s is invalid' % _)
@staticmethod
def optional_ip (_):
if not _ or isip(_):
return _
raise TypeError('ip %s is invalid' % _)
@staticmethod
def user (_):
# XXX: incomplete
try:
pwd.getpwnam(_)
# uid = answer[2]
except KeyError:
raise TypeError('user %s is not found on this system' % _)
return _
@staticmethod
def folder (path):
paths = environment.root(path)
options = [p for p in paths if os.path.exists(path)]
if not options:
raise TypeError('%s does not exists' % path)
first = options[0]
if not first:
raise TypeError('%s does not exists' % first)
return first
@staticmethod
def path (path):
split = sys.argv[0].split('lib/exabgp')
if len(split) > 1:
prefix = os.sep.join(split[:1])
if prefix and path.startswith(prefix):
path = path[len(prefix):]
home = os.path.expanduser('~')
if path.startswith(home):
return "'~%s'" % path[len(home):]
return "'%s'" % path
@staticmethod
def conf (path):
first = environment.folder(path)
if not os.path.isfile(first):
raise TypeError('%s is not a file' % path)
return first
@staticmethod
def exe (path):
first = environment.conf(path)
if not os.access(first, os.X_OK):
raise TypeError('%s is not an executable' % first)
return first
@staticmethod
def syslog (path):
path = environment.unquote(path)
if path in ('stdout','stderr'):
return path
if path.startswith('host:'):
return path
return path
@staticmethod
def redirector (name):
if name == 'url' or name.startswith('icap://'):
return name
raise TypeError('invalid redirector protocol %s, options are url or header' % name)
@staticmethod
def syslog_value (log):
if log not in environment.log_levels:
if log == 'CRITICAL':
log = 'CRIT'
if log == 'ERROR':
log = 'ERR'
raise TypeError('invalid log level %s' % log)
return getattr(syslog,'LOG_%s' % log)
@staticmethod
def syslog_name (log):
for name in environment.log_levels:
if name == 'CRITICAL':
name = 'CRIT'
if name == 'ERROR':
name = 'ERR'
if getattr(syslog,'LOG_%s' % name) == log:
return name
raise TypeError('invalid log level %s' % log)
@staticmethod
def umask_read (_):
return int(_, 8)
@staticmethod
def umask_write (_):
return "'%s'" % (oct(_))
@staticmethod
def default ():
for section in sorted(environment.configuration):
if section in ('internal','debug'):
continue
for option in sorted(environment.configuration[section]):
values = environment.configuration[section][option]
default = "'%s'" % values['value'] if values['write'] in (environment.list,environment.path,environment.quote,environment.syslog) else values['value']
yield '%s.%s.%s %s: %s. default (%s)' % (environment.application,section,option,' '*(20-len(section)-len(option)),values['help'],default)
@staticmethod
def iter_ini (diff=False):
for section in sorted(environment._settings):
if section in ('internal','debug'):
continue
header = '\n[%s.%s]' % (environment.application,section)
for k in sorted(environment._settings[section]):
v = environment._settings[section][k]
if diff and environment.configuration[section][k]['read'](environment.configuration[section][k]['value']) == v:
continue
if header:
yield header
header = ''
yield '%s = %s' % (k,environment.configuration[section][k]['write'](v))
@staticmethod
def iter_env (diff=False):
for section,values in environment._settings.items():
if section in ('internal','debug'):
continue
for k,v in values.items():
if diff and environment.configuration[section][k]['read'](environment.configuration[section][k]['value']) == v:
continue
if environment.configuration[section][k]['write'] == environment.quote:
yield "%s.%s.%s='%s'" % (environment.application,section,k,v)
continue
yield "%s.%s.%s=%s" % (environment.application,section,k,environment.configuration[section][k]['write'](v))
# ========================================================================= _env
#
import ConfigParser
from exabgp.util.hashtable import HashTable
def _env (conf):
here = os.path.join(os.sep,*os.path.join(environment.location.split(os.sep)))
location, directory = os.path.split(here)
while directory:
if directory == 'lib':
location = os.path.join(location,'lib')
break
location, directory = os.path.split(location)
# we did not break - ie, we did not find the location in the normal path.
else:
# let's try to see if we are running from the QA folder (for unittesting)
location, directory = os.path.split(here)
while directory:
if directory == 'dev':
location = os.path.join(location,'lib')
break
location, directory = os.path.split(location)
else:
# oh ! bad, let set the path to something ...
location = '/lib'
_conf_paths = []
if conf:
_conf_paths.append(os.path.abspath(os.path.normpath(conf)))
if location:
_conf_paths.append(os.path.normpath(os.path.join(location,'etc',environment.application,'%s.env' % environment.application)))
_conf_paths.append(os.path.normpath(os.path.join('/','etc',environment.application,'%s.env' % environment.application)))
env = HashTable()
ini = ConfigParser.ConfigParser()
ini_files = [path for path in _conf_paths if os.path.exists(path)]
if ini_files:
ini.read(ini_files[0])
for section in environment.configuration:
default = environment.configuration[section]
for option in default:
convert = default[option]['read']
try:
proxy_section = '%s.%s' % (environment.application,section)
env_name = '%s.%s' % (proxy_section,option)
rep_name = env_name.replace('.','_')
if env_name in os.environ:
conf = os.environ.get(env_name)
elif rep_name in os.environ:
conf = os.environ.get(rep_name)
else:
conf = environment.unquote(ini.get(proxy_section,option,nonedict))
# name without an = or : in the configuration and no value
if conf is None:
conf = default[option]['value']
except (ConfigParser.NoSectionError,ConfigParser.NoOptionError):
conf = default[option]['value']
try:
env.setdefault(section,HashTable())[option] = convert(conf)
except TypeError:
raise environment.Error('invalid value for %s.%s : %s' % (section,option,conf))
return env
|
bsd-3-clause
| -5,254,560,539,620,552,000 | 26.232353 | 154 | 0.654066 | false |
kizniche/Mycodo
|
mycodo/inputs/rpi_signal_revolutions.py
|
1
|
4596
|
# coding=utf-8
import time
import copy
from mycodo.inputs.base_input import AbstractInput
# Measurements
measurements_dict = {
0: {
'measurement': 'revolutions',
'unit': 'rpm'
}
}
# Input information
INPUT_INFORMATION = {
'input_name_unique': 'SIGNAL_RPM',
'input_manufacturer': 'Raspberry Pi',
'input_name': 'Signal (Revolutions)',
'input_library': 'pigpio',
'measurements_name': 'RPM',
'measurements_dict': measurements_dict,
'options_enabled': [
'gpio_location',
'rpm_pulses_per_rev',
'weighting',
'sample_time',
'period',
'pre_output'
],
'options_disabled': ['interface'],
'dependencies_module': [
('internal', 'file-exists /opt/mycodo/pigpio_installed', 'pigpio')
],
'interfaces': ['GPIO'],
'weighting': 0.0,
'sample_time': 2.0,
'rpm_pulses_per_rev': 1.0
}
class InputModule(AbstractInput):
""" A sensor support class that monitors rpm """
def __init__(self, input_dev, testing=False):
super(InputModule, self).__init__(input_dev, testing=testing, name=__name__)
self.pigpio = None
self.gpio = None
self.weighting = None
self.rpm_pulses_per_rev = None
self.sample_time = None
if not testing:
self.initialize_input()
def initialize_input(self):
import pigpio
self.pigpio = pigpio
self.gpio = int(self.input_dev.gpio_location)
self.weighting = self.input_dev.weighting
self.rpm_pulses_per_rev = self.input_dev.rpm_pulses_per_rev
self.sample_time = self.input_dev.sample_time
def get_measurement(self):
""" Gets the revolutions """
pi = self.pigpio.pi()
if not pi.connected: # Check if pigpiod is running
self.logger.error("Could not connect to pigpiod. Ensure it is running and try again.")
return None
self.return_dict = copy.deepcopy(measurements_dict)
read_revolutions = ReadRPM(pi, self.gpio, self.pigpio, self.rpm_pulses_per_rev, self.weighting)
time.sleep(self.sample_time)
rpm = read_revolutions.RPM()
read_revolutions.cancel()
pi.stop()
self.value_set(0, rpm)
return self.return_dict
class ReadRPM:
"""
A class to read pulses and calculate the RPM
"""
def __init__(self, pi, gpio, pigpio, pulses_per_rev=1.0, weighting=0.0):
"""
Instantiate with the Pi and gpio of the RPM signal
to monitor.
Optionally the number of pulses for a complete revolution
may be specified. It defaults to 1.
Optionally a weighting may be specified. This is a number
between 0 and 1 and indicates how much the old reading
affects the new reading. It defaults to 0 which means
the old reading has no effect. This may be used to
smooth the data.
"""
self.pigpio = pigpio
self.pi = pi
self.gpio = gpio
self.pulses_per_rev = pulses_per_rev
self._watchdog = 200 # Milliseconds.
if weighting < 0.0:
weighting = 0.0
elif weighting > 0.99:
weighting = 0.99
self._new = 1.0 - weighting # Weighting for new reading.
self._old = weighting # Weighting for old reading.
self._high_tick = None
self._period = None
pi.set_mode(self.gpio, self.pigpio.INPUT)
self._cb = pi.callback(self.gpio, self.pigpio.RISING_EDGE, self._cbf)
pi.set_watchdog(self.gpio, self._watchdog)
def _cbf(self, gpio, level, tick):
if level == 1: # Rising edge.
if self._high_tick is not None:
t = self.pigpio.tickDiff(self._high_tick, tick)
if self._period is not None:
self._period = (self._old * self._period) + (self._new * t)
else:
self._period = t
self._high_tick = tick
elif level == 2: # Watchdog timeout.
if self._period is not None:
if self._period < 2000000000:
self._period += self._watchdog * 1000
def RPM(self):
"""
Returns the RPM.
"""
rpm = 0
if self._period is not None:
rpm = 60000000.0 / (self._period * self.pulses_per_rev)
return rpm
def cancel(self):
"""
Cancels the reader and releases resources.
"""
self.pi.set_watchdog(self.gpio, 0) # cancel watchdog
self._cb.cancel()
|
gpl-3.0
| 8,378,247,199,154,533,000 | 27.196319 | 103 | 0.57376 | false |
bootphon/crossitlearn
|
simple_dnn.py
|
1
|
32993
|
"""
A deep neural network with or w/o dropout in one file.
"""
import numpy
import theano
import sys
import math
from theano import tensor as T
from theano import shared
from theano.tensor.shared_randomstreams import RandomStreams
from collections import OrderedDict
BATCH_SIZE = 100
STACKSIZE = 69
def relu_f(vec):
""" Wrapper to quickly change the rectified linear unit function """
return (vec + abs(vec)) / 2.
def softplus_f(v):
return T.nnet.softplus(v)
def dropout(rng, x, p=0.5):
""" Zero-out random values in x with probability p using rng """
if p > 0. and p < 1.:
seed = rng.randint(2 ** 30)
srng = theano.tensor.shared_randomstreams.RandomStreams(seed)
mask = srng.binomial(n=1, p=1.-p, size=x.shape,
dtype=theano.config.floatX)
return x * mask
return x
def fast_dropout(rng, x):
""" Multiply activations by N(1,1) """
seed = rng.randint(2 ** 30)
srng = RandomStreams(seed)
mask = srng.normal(size=x.shape, avg=1., dtype=theano.config.floatX)
return x * mask
def build_shared_zeros(shape, name):
""" Builds a theano shared variable filled with a zeros numpy array """
return shared(value=numpy.zeros(shape, dtype=theano.config.floatX),
name=name, borrow=True)
class Linear(object):
""" Basic linear transformation layer (W.X + b) """
def __init__(self, rng, input, n_in, n_out, W=None, b=None, fdrop=False):
if W is None:
W_values = numpy.asarray(rng.uniform(
low=-numpy.sqrt(6. / (n_in + n_out)),
high=numpy.sqrt(6. / (n_in + n_out)),
size=(n_in, n_out)), dtype=theano.config.floatX)
W_values *= 4 # This works for sigmoid activated networks!
W = theano.shared(value=W_values, name='W', borrow=True)
if b is None:
b = build_shared_zeros((n_out,), 'b')
self.input = input
self.W = W
self.b = b
self.params = [self.W, self.b]
self.output = T.dot(self.input, self.W) + self.b
if fdrop:
self.output = fast_dropout(rng, self.output)
def __repr__(self):
return "Linear"
class SigmoidLayer(Linear):
""" Sigmoid activation layer (sigmoid(W.X + b)) """
def __init__(self, rng, input, n_in, n_out, W=None, b=None, fdrop=False):
super(SigmoidLayer, self).__init__(rng, input, n_in, n_out, W, b)
self.pre_activation = self.output
if fdrop:
self.pre_activation = fast_dropout(rng, self.pre_activation)
self.output = T.nnet.sigmoid(self.pre_activation)
class ReLU(Linear):
""" Rectified Linear Unit activation layer (max(0, W.X + b)) """
def __init__(self, rng, input, n_in, n_out, W=None, b=None, fdrop=False):
if b is None:
b = build_shared_zeros((n_out,), 'b')
super(ReLU, self).__init__(rng, input, n_in, n_out, W, b)
self.pre_activation = self.output
if fdrop:
self.pre_activation = fast_dropout(rng, self.pre_activation)
self.output = relu_f(self.pre_activation)
class SoftPlus(Linear):
def __init__(self, rng, input, n_in, n_out, W=None, b=None, fdrop=0.):
if b is None:
b_values = numpy.zeros((n_out,), dtype=theano.config.floatX)
b = theano.shared(value=b_values, name='b', borrow=True)
super(SoftPlus, self).__init__(rng, input, n_in, n_out, W, b)
self.pre_activation = self.output
if fdrop:
self.pre_activation = fast_dropout(rng, self.pre_activation, fdrop)
self.output = softplus_f(self.pre_activation)
class DatasetMiniBatchIterator(object):
""" Basic mini-batch iterator """
def __init__(self, x, y, batch_size=BATCH_SIZE, randomize=False):
self.x = x
self.y = y
self.batch_size = batch_size
self.randomize = randomize
from sklearn.utils import check_random_state
self.rng = check_random_state(42)
def __iter__(self):
n_samples = self.x.shape[0]
if self.randomize:
for _ in xrange(n_samples / BATCH_SIZE):
if BATCH_SIZE > 1:
i = int(self.rng.rand(1) * ((n_samples+BATCH_SIZE-1) / BATCH_SIZE))
else:
i = int(math.floor(self.rng.rand(1) * n_samples))
yield (i, self.x[i*self.batch_size:(i+1)*self.batch_size],
self.y[i*self.batch_size:(i+1)*self.batch_size])
else:
for i in xrange((n_samples + self.batch_size - 1)
/ self.batch_size):
yield (self.x[i*self.batch_size:(i+1)*self.batch_size],
self.y[i*self.batch_size:(i+1)*self.batch_size])
class LogisticRegression:
"""Multi-class Logistic Regression
"""
def __init__(self, rng, input, n_in, n_out, W=None, b=None):
if W != None:
self.W = W
else:
self.W = build_shared_zeros((n_in, n_out), 'W')
if b != None:
self.b = b
else:
self.b = build_shared_zeros((n_out,), 'b')
# P(Y|X) = softmax(W.X + b)
self.p_y_given_x = T.nnet.softmax(T.dot(input, self.W) + self.b)
self.y_pred = T.argmax(self.p_y_given_x, axis=1)
self.output = self.y_pred
self.params = [self.W, self.b]
def negative_log_likelihood(self, y):
return -T.mean(T.log(self.p_y_given_x)[T.arange(y.shape[0]), y])
def negative_log_likelihood_sum(self, y):
return -T.sum(T.log(self.p_y_given_x)[T.arange(y.shape[0]), y])
def log_loss(self, y):
# TODO
log_y_hat = T.log(self.p_y_given_x)
#ll = log_y_hat[T.arange(y.shape[0]), y] + log_y_hat[T.arange(y.shape[0]), 1-y]
#return -T.mean(ll)
def training_cost(self, y):
""" Wrapper for standard name """
return self.negative_log_likelihood_sum(y)
#return self.log_loss(y) TODO
def errors(self, y):
if y.ndim != self.y_pred.ndim:
raise TypeError("y should have the same shape as self.y_pred",
("y", y.type, "y_pred", self.y_pred.type))
if y.dtype.startswith('int'):
return T.mean(T.neq(self.y_pred, y))
else:
print("!!! y should be of int type")
return T.mean(T.neq(self.y_pred, numpy.asarray(y, dtype='int')))
class NeuralNet(object):
""" Neural network (not regularized, without dropout) """
def __init__(self, numpy_rng, theano_rng=None,
n_ins=40*3,
layers_types=[Linear, ReLU, ReLU, ReLU, LogisticRegression],
layers_sizes=[1024, 1024, 1024, 1024],
n_outs=62 * 3,
rho=0.95, eps=1.E-6,
max_norm=0.,
debugprint=False):
"""
TODO
"""
self.layers = []
self.params = []
self.n_layers = len(layers_types)
self.layers_types = layers_types
assert self.n_layers > 0
self.max_norm = max_norm
self._rho = rho # ``momentum'' for adadelta
self._eps = eps # epsilon for adadelta
self._accugrads = [] # for adadelta
self._accudeltas = [] # for adadelta
self._old_dxs = [] # for adadelta with Nesterov
if theano_rng == None:
theano_rng = RandomStreams(numpy_rng.randint(2 ** 30))
self.x = T.fmatrix('x')
self.y = T.ivector('y')
self.layers_ins = [n_ins] + layers_sizes
self.layers_outs = layers_sizes + [n_outs]
layer_input = self.x
for layer_type, n_in, n_out in zip(layers_types,
self.layers_ins, self.layers_outs):
this_layer = layer_type(rng=numpy_rng,
input=layer_input, n_in=n_in, n_out=n_out)
assert hasattr(this_layer, 'output')
self.params.extend(this_layer.params)
self._accugrads.extend([build_shared_zeros(t.shape.eval(),
'accugrad') for t in this_layer.params])
self._accudeltas.extend([build_shared_zeros(t.shape.eval(),
'accudelta') for t in this_layer.params])
self._old_dxs.extend([build_shared_zeros(t.shape.eval(),
'old_dxs') for t in this_layer.params])
self.layers.append(this_layer)
layer_input = this_layer.output
assert hasattr(self.layers[-1], 'training_cost')
assert hasattr(self.layers[-1], 'errors')
# TODO standardize cost
self.mean_cost = self.layers[-1].negative_log_likelihood(self.y)
self.cost = self.layers[-1].training_cost(self.y)
#self.mean_cost = self.layers[-1].training_cost(self.y) # TODO
if debugprint:
theano.printing.debugprint(self.cost)
self.errors = self.layers[-1].errors(self.y)
def __repr__(self):
dimensions_layers_str = map(lambda x: "x".join(map(str, x)),
zip(self.layers_ins, self.layers_outs))
return "_".join(map(lambda x: "_".join((x[0].__name__, x[1])),
zip(self.layers_types, dimensions_layers_str)))
def get_SGD_trainer(self):
""" Returns a plain SGD minibatch trainer with learning rate as param.
"""
batch_x = T.fmatrix('batch_x')
batch_y = T.ivector('batch_y')
learning_rate = T.fscalar('lr') # learning rate to use
# compute the gradients with respect to the model parameters
# using mean_cost so that the learning rate is not too dependent
# on the batch size
gparams = T.grad(self.mean_cost, self.params)
# compute list of weights updates
updates = OrderedDict()
for param, gparam in zip(self.params, gparams):
if self.max_norm:
W = param - gparam * learning_rate
col_norms = W.norm(2, axis=0)
desired_norms = T.clip(col_norms, 0, self.max_norm)
updates[param] = W * (desired_norms / (1e-6 + col_norms))
else:
updates[param] = param - gparam * learning_rate
train_fn = theano.function(inputs=[theano.Param(batch_x),
theano.Param(batch_y),
theano.Param(learning_rate)],
outputs=self.mean_cost,
updates=updates,
givens={self.x: batch_x, self.y: batch_y})
return train_fn
def get_adagrad_trainer(self):
""" Returns an Adagrad (Duchi et al. 2010) trainer using a learning rate.
"""
batch_x = T.fmatrix('batch_x')
batch_y = T.ivector('batch_y')
learning_rate = T.fscalar('lr') # learning rate to use
# compute the gradients with respect to the model parameters
gparams = T.grad(self.mean_cost, self.params)
# compute list of weights updates
updates = OrderedDict()
for accugrad, param, gparam in zip(self._accugrads, self.params, gparams):
# c.f. Algorithm 1 in the Adadelta paper (Zeiler 2012)
agrad = accugrad + gparam * gparam
dx = - (learning_rate / T.sqrt(agrad + self._eps)) * gparam
if self.max_norm:
W = param + dx
col_norms = W.norm(2, axis=0)
desired_norms = T.clip(col_norms, 0, self.max_norm)
updates[param] = W * (desired_norms / (1e-6 + col_norms))
else:
updates[param] = param + dx
updates[accugrad] = agrad
train_fn = theano.function(inputs=[theano.Param(batch_x),
theano.Param(batch_y),
theano.Param(learning_rate)],
outputs=self.mean_cost,
updates=updates,
givens={self.x: batch_x, self.y: batch_y})
return train_fn
def get_adadelta_trainer(self):
""" Returns an Adadelta (Zeiler 2012) trainer using self._rho and
self._eps params.
"""
batch_x = T.fmatrix('batch_x')
batch_y = T.ivector('batch_y')
# compute the gradients with respect to the model parameters
gparams = T.grad(self.mean_cost, self.params)
# compute list of weights updates
updates = OrderedDict()
for accugrad, accudelta, param, gparam in zip(self._accugrads,
self._accudeltas, self.params, gparams):
# c.f. Algorithm 1 in the Adadelta paper (Zeiler 2012)
agrad = self._rho * accugrad + (1 - self._rho) * gparam * gparam
dx = - T.sqrt((accudelta + self._eps)
/ (agrad + self._eps)) * gparam
updates[accudelta] = (self._rho * accudelta
+ (1 - self._rho) * dx * dx)
if self.max_norm:
W = param + dx
col_norms = W.norm(2, axis=0)
desired_norms = T.clip(col_norms, 0, self.max_norm)
updates[param] = W * (desired_norms / (1e-6 + col_norms))
else:
updates[param] = param + dx
updates[accugrad] = agrad
train_fn = theano.function(inputs=[theano.Param(batch_x),
theano.Param(batch_y)],
outputs=self.mean_cost,
updates=updates,
givens={self.x: batch_x, self.y: batch_y})
return train_fn
def score_classif(self, given_set):
""" Returns functions to get current classification errors. """
batch_x = T.fmatrix('batch_x')
batch_y = T.ivector('batch_y')
score = theano.function(inputs=[theano.Param(batch_x),
theano.Param(batch_y)],
outputs=self.errors,
givens={self.x: batch_x, self.y: batch_y})
def scoref():
""" returned function that scans the entire set given as input """
return [score(batch_x, batch_y) for batch_x, batch_y in given_set]
return scoref
class RegularizedNet(NeuralNet):
""" Neural net with L1 and L2 regularization """
def __init__(self, numpy_rng, theano_rng=None,
n_ins=100,
layers_types=[ReLU, ReLU, ReLU, LogisticRegression],
layers_sizes=[1024, 1024, 1024],
n_outs=2,
rho=0.9, eps=1.E-6,
L1_reg=0.,
L2_reg=0.,
max_norm=0.,
debugprint=False):
"""
TODO
"""
super(RegularizedNet, self).__init__(numpy_rng, theano_rng, n_ins,
layers_types, layers_sizes, n_outs, rho, eps, max_norm,
debugprint)
L1 = shared(0.)
for param in self.params:
L1 += T.sum(abs(param))
if L1_reg > 0.:
self.cost = self.cost + L1_reg * L1
L2 = shared(0.)
for param in self.params:
L2 += T.sum(param ** 2)
if L2_reg > 0.:
self.cost = self.cost + L2_reg * L2
class DropoutNet(NeuralNet):
""" Neural net with dropout (see Hinton's et al. paper) """
def __init__(self, numpy_rng, theano_rng=None,
n_ins=40*3,
layers_types=[ReLU, ReLU, ReLU, ReLU, LogisticRegression],
layers_sizes=[4000, 4000, 4000, 4000],
dropout_rates=[0.2, 0.5, 0.5, 0.5, 0.5],
n_outs=62 * 3,
rho=0.98, eps=1.E-6,
max_norm=0.,
fast_drop=True,
debugprint=False):
"""
TODO
"""
super(DropoutNet, self).__init__(numpy_rng, theano_rng, n_ins,
layers_types, layers_sizes, n_outs, rho, eps, max_norm,
debugprint)
self.dropout_rates = dropout_rates
if fast_drop:
if dropout_rates[0]:
dropout_layer_input = fast_dropout(numpy_rng, self.x)
else:
dropout_layer_input = self.x
else:
dropout_layer_input = dropout(numpy_rng, self.x, p=dropout_rates[0])
self.dropout_layers = []
for layer, layer_type, n_in, n_out, dr in zip(self.layers,
layers_types, self.layers_ins, self.layers_outs,
dropout_rates[1:] + [0]): # !!! we do not dropout anything
# from the last layer !!!
if dr:
if fast_drop:
this_layer = layer_type(rng=numpy_rng,
input=dropout_layer_input, n_in=n_in, n_out=n_out,
W=layer.W, b=layer.b, fdrop=True)
else:
this_layer = layer_type(rng=numpy_rng,
input=dropout_layer_input, n_in=n_in, n_out=n_out,
W=layer.W * 1. / (1. - dr),
b=layer.b * 1. / (1. - dr))
# N.B. dropout with dr==1 does not dropanything!!
this_layer.output = dropout(numpy_rng, this_layer.output, dr)
else:
this_layer = layer_type(rng=numpy_rng,
input=dropout_layer_input, n_in=n_in, n_out=n_out,
W=layer.W, b=layer.b)
assert hasattr(this_layer, 'output')
self.dropout_layers.append(this_layer)
dropout_layer_input = this_layer.output
assert hasattr(self.layers[-1], 'training_cost')
assert hasattr(self.layers[-1], 'errors')
# TODO standardize cost
# these are the dropout costs
self.mean_cost = self.dropout_layers[-1].negative_log_likelihood(self.y)
self.cost = self.dropout_layers[-1].training_cost(self.y)
# these is the non-dropout errors
self.errors = self.layers[-1].errors(self.y)
def __repr__(self):
return super(DropoutNet, self).__repr__() + "\n"\
+ "dropout rates: " + str(self.dropout_rates)
def add_fit_and_score(class_to_chg):
""" Mutates a class to add the fit() and score() functions to a NeuralNet.
"""
from types import MethodType
def fit(self, x_train, y_train, x_dev=None, y_dev=None,
max_epochs=20, early_stopping=True, split_ratio=0.1, # TODO 100+ epochs
method='adadelta', verbose=False, plot=False):
"""
TODO
"""
import time, copy
if x_dev == None or y_dev == None:
from sklearn.cross_validation import train_test_split
x_train, x_dev, y_train, y_dev = train_test_split(x_train, y_train,
test_size=split_ratio, random_state=42)
if method == 'sgd':
train_fn = self.get_SGD_trainer()
elif method == 'adagrad':
train_fn = self.get_adagrad_trainer()
elif method == 'adadelta':
train_fn = self.get_adadelta_trainer()
elif method == 'adadelta_rprop':
train_fn = self.get_adadelta_rprop_trainer()
train_set_iterator = DatasetMiniBatchIterator(x_train, y_train)
dev_set_iterator = DatasetMiniBatchIterator(x_dev, y_dev)
train_scoref = self.score_classif(train_set_iterator)
dev_scoref = self.score_classif(dev_set_iterator)
best_dev_loss = numpy.inf
epoch = 0
# TODO early stopping (not just cross val, also stop training)
if plot:
verbose = True
self._costs = []
self._train_errors = []
self._dev_errors = []
self._updates = []
while epoch < max_epochs:
if not verbose:
sys.stdout.write("\r%0.2f%%" % (epoch * 100./ max_epochs))
sys.stdout.flush()
avg_costs = []
timer = time.time()
for x, y in train_set_iterator:
if method == 'sgd' or 'adagrad' in method:
avg_cost = train_fn(x, y, lr=1.E-2)
elif 'adadelta' in method:
avg_cost = train_fn(x, y)
if type(avg_cost) == list:
avg_costs.append(avg_cost[0])
else:
avg_costs.append(avg_cost)
if verbose:
mean_costs = numpy.mean(avg_costs)
mean_train_errors = numpy.mean(train_scoref())
print(' epoch %i took %f seconds' %
(epoch, time.time() - timer))
print(' epoch %i, avg costs %f' %
(epoch, mean_costs))
print(' method %s, epoch %i, training error %f' %
(method, epoch, mean_train_errors))
if plot:
self._costs.append(mean_costs)
self._train_errors.append(mean_train_errors)
dev_errors = numpy.mean(dev_scoref())
if plot:
self._dev_errors.append(dev_errors)
if dev_errors < best_dev_loss:
best_dev_loss = dev_errors
best_params = copy.deepcopy(self.params)
if verbose:
print('!!! epoch %i, validation error of best model %f' %
(epoch, dev_errors))
epoch += 1
if not verbose:
print("")
for i, param in enumerate(best_params):
self.params[i] = param
def score(self, x, y):
""" error rates """
iterator = DatasetMiniBatchIterator(x, y)
scoref = self.score_classif(iterator)
return numpy.mean(scoref())
class_to_chg.fit = MethodType(fit, None, class_to_chg)
class_to_chg.score = MethodType(score, None, class_to_chg)
if __name__ == "__main__":
add_fit_and_score(DropoutNet)
add_fit_and_score(RegularizedNet)
def nudge_dataset(X, Y):
"""
This produces a dataset 5 times bigger than the original one,
by moving the 8x8 images in X around by 1px to left, right, down, up
"""
from scipy.ndimage import convolve
direction_vectors = [
[[0, 1, 0],
[0, 0, 0],
[0, 0, 0]],
[[0, 0, 0],
[1, 0, 0],
[0, 0, 0]],
[[0, 0, 0],
[0, 0, 1],
[0, 0, 0]],
[[0, 0, 0],
[0, 0, 0],
[0, 1, 0]]]
shift = lambda x, w: convolve(x.reshape((8, 8)), mode='constant',
weights=w).ravel()
X = numpy.concatenate([X] +
[numpy.apply_along_axis(shift, 1, X, vector)
for vector in direction_vectors])
Y = numpy.concatenate([Y for _ in range(5)], axis=0)
return X, Y
from sklearn import datasets, svm, naive_bayes
from sklearn import cross_validation, preprocessing
SPOKEN_WORDS = True
MNIST = False
DIGITS = False
NUDGE_DIGITS = True
FACES = False
TWENTYNEWSGROUPS = False
VERBOSE = True
SCALE = True
PLOT = True
def train_models(x_train, y_train, x_test, y_test, n_features, n_outs,
use_dropout=False, n_epochs=100, numpy_rng=None, # TODO 200+ epochs
svms=False, nb=False, deepnn=True, name=''):
if svms:
print("Linear SVM")
classifier = svm.SVC(gamma=0.001)
print(classifier)
classifier.fit(x_train, y_train)
print("score: %f" % classifier.score(x_test, y_test))
print("RBF-kernel SVM")
classifier = svm.SVC(kernel='rbf', class_weight='auto')
print(classifier)
classifier.fit(x_train, y_train)
print("score: %f" % classifier.score(x_test, y_test))
if nb:
print("Multinomial Naive Bayes")
classifier = naive_bayes.MultinomialNB()
print(classifier)
classifier.fit(x_train, y_train)
print("score: %f" % classifier.score(x_test, y_test))
if deepnn:
import warnings
warnings.filterwarnings("ignore") # TODO remove
if use_dropout:
n_epochs *= 4
pass
def new_dnn(dropout=False):
if dropout:
print("Dropout DNN")
return DropoutNet(numpy_rng=numpy_rng, n_ins=n_features,
#layers_types=[ReLU, ReLU, ReLU, ReLU, LogisticRegression],
layers_types=[SoftPlus, SoftPlus, SoftPlus, SoftPlus, LogisticRegression],
layers_sizes=[2000, 2000, 2000, 2000],
dropout_rates=[0.2, 0.5, 0.5, 0.5, 0.5],
n_outs=n_outs,
max_norm=4.,
fast_drop=False,
debugprint=0)
else:
print("Simple (regularized) DNN")
return RegularizedNet(numpy_rng=numpy_rng, n_ins=n_features,
#layers_types=[LogisticRegression],
#layers_sizes=[],
#layers_types=[ReLU, ReLU, ReLU, LogisticRegression],
#layers_types=[SoftPlus, SoftPlus, SoftPlus, LogisticRegression],
#layers_sizes=[1000, 1000, 1000],
layers_types=[ReLU, LogisticRegression],
layers_sizes=[200],
n_outs=n_outs,
#L1_reg=0.001/x_train.shape[0],
#L2_reg=0.001/x_train.shape[0],
L1_reg=0.,
L2_reg=1./x_train.shape[0],
max_norm=0.,
debugprint=0)
import matplotlib.pyplot as plt
plt.figure()
ax1 = plt.subplot(221)
ax2 = plt.subplot(222)
ax3 = plt.subplot(223)
ax4 = plt.subplot(224) # TODO updates of the weights
methods = ['adadelta']
for method in methods:
dnn = new_dnn(use_dropout)
print dnn
dnn.fit(x_train, y_train, max_epochs=n_epochs, method=method, verbose=VERBOSE, plot=PLOT)
test_error = dnn.score(x_test, y_test)
print("score: %f" % (1. - test_error))
ax1.plot(numpy.log10(dnn._costs), label=method)
#ax2.plot(numpy.log10(dnn._train_errors), label=method)
#ax3.plot(numpy.log10(dnn._dev_errors), label=method)
ax2.plot(dnn._train_errors, label=method)
ax3.plot(dnn._dev_errors, label=method)
#ax4.plot(dnn._updates, label=method) TODO
ax4.plot([test_error for _ in range(10)], label=method)
ax1.set_xlabel('epoch')
ax1.set_ylabel('cost (log10)')
ax2.set_xlabel('epoch')
ax2.set_ylabel('train error')
ax3.set_xlabel('epoch')
ax3.set_ylabel('dev error')
ax4.set_ylabel('test error')
plt.legend()
plt.savefig('training_log' + name + '.png')
if MNIST:
from sklearn.datasets import fetch_mldata
mnist = fetch_mldata('MNIST original')
X = numpy.asarray(mnist.data, dtype='float32')
if SCALE:
#X = preprocessing.scale(X)
X /= 255.
y = numpy.asarray(mnist.target, dtype='int32')
#target_names = mnist.target_names
print("Total dataset size:")
print("n samples: %d" % X.shape[0])
print("n features: %d" % X.shape[1])
print("n classes: %d" % len(set(y)))
x_train, x_test, y_train, y_test = cross_validation.train_test_split(
X, y, test_size=0.2, random_state=42)
train_models(x_train, y_train, x_test, y_test, X.shape[1],
len(set(y)), numpy_rng=numpy.random.RandomState(123),
name='MNIST')
if DIGITS:
digits = datasets.load_digits()
data = numpy.asarray(digits.data, dtype='float32')
target = numpy.asarray(digits.target, dtype='int32')
x = data
y = target
if NUDGE_DIGITS:
x, y = nudge_dataset(x, y)
if SCALE:
x = preprocessing.scale(x)
x_train, x_test, y_train, y_test = cross_validation.train_test_split(
x, y, test_size=0.2, random_state=42)
train_models(x_train, y_train, x_test, y_test, x.shape[1],
len(set(target)), numpy_rng=numpy.random.RandomState(123),
name='digits')
if FACES:
import logging
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(message)s')
lfw_people = datasets.fetch_lfw_people(min_faces_per_person=70,
resize=0.4)
X = numpy.asarray(lfw_people.data, dtype='float32')
if SCALE:
X = preprocessing.scale(X)
y = numpy.asarray(lfw_people.target, dtype='int32')
target_names = lfw_people.target_names
print("Total dataset size:")
print("n samples: %d" % X.shape[0])
print("n features: %d" % X.shape[1])
print("n classes: %d" % target_names.shape[0])
x_train, x_test, y_train, y_test = cross_validation.train_test_split(
X, y, test_size=0.2, random_state=42)
train_models(x_train, y_train, x_test, y_test, X.shape[1],
len(set(y)), numpy_rng=numpy.random.RandomState(123),
name='faces')
if TWENTYNEWSGROUPS:
from sklearn.feature_extraction.text import TfidfVectorizer
newsgroups_train = datasets.fetch_20newsgroups(subset='train')
vectorizer = TfidfVectorizer(encoding='latin-1', max_features=10000)
#vectorizer = HashingVectorizer(encoding='latin-1')
x_train = vectorizer.fit_transform(newsgroups_train.data)
x_train = numpy.asarray(x_train.todense(), dtype='float32')
y_train = numpy.asarray(newsgroups_train.target, dtype='int32')
newsgroups_test = datasets.fetch_20newsgroups(subset='test')
x_test = vectorizer.transform(newsgroups_test.data)
x_test = numpy.asarray(x_test.todense(), dtype='float32')
y_test = numpy.asarray(newsgroups_test.target, dtype='int32')
train_models(x_train, y_train, x_test, y_test, x_train.shape[1],
len(set(y_train)),
numpy_rng=numpy.random.RandomState(123),
svms=False, nb=True, deepnn=True,
name='20newsgroups')
if SPOKEN_WORDS:
# words done by "say", shapes of their filterbanks
#>>> shapes
#array([[62, 40],
# [65, 40],
# [58, 40],
# ...,
# [85, 40],
# [79, 40],
# [51, 40]])
#>>> shapes.mean(axis=0)
#array([ 70.87751196, 40. ])
#>>> shapes.std(axis=0)
#array([ 12.94580736, 0. ])
#>>> shapes.min(axis=0)
#array([39, 40])
words_fbanks = numpy.load("all_words_pascal1k.npz")
n_tokens = len([k for k in words_fbanks.keys()])
lexicon = set([w.split('_')[1] for w in words_fbanks.keys()])
lexicon = [w for w in lexicon] # we need an ordered collection
n_words = len(lexicon)
all_fbanks = numpy.concatenate([v for _, v in words_fbanks.iteritems()])
print all_fbanks.shape
mean = all_fbanks.mean(axis=0)
print mean.shape
std = all_fbanks.std(axis=0)
print std.shape
# take 69 fbanks in the middle of the word and pad with 0s if needed
X = numpy.zeros((n_tokens, 40*STACKSIZE), dtype='float32')
y = numpy.zeros(n_tokens, dtype='int32')
for i, (swf, fb) in enumerate(words_fbanks.iteritems()):
spkr, word, _ = swf.split('_')
l = fb.shape[0]
m = l/2
s = max(0, m - ((STACKSIZE-1) / 2))
e = min(l-1, m + ((STACKSIZE-1) / 2))
tmp = (fb - mean) / std
tmp = tmp[s:e+1].flatten()
diff = 40*STACKSIZE - tmp.shape[0]
if not diff:
X[i] = tmp
else:
X[i][diff/2:-diff/2] = tmp
y[i] = lexicon.index(word)
# train the DNN, with the training set as test set if let in this form:
train_models(X, y, X, y, X.shape[1],
len(set(y)),
numpy_rng=numpy.random.RandomState(123),
svms=False, nb=False, deepnn=True,
name='spoken_words')
|
mit
| -3,261,284,628,923,946,000 | 39.13747 | 105 | 0.519898 | false |
freieslabor/info-display
|
info_display/screens/event_schedule/management/commands/updateevents.py
|
1
|
1290
|
from django.core.management.base import BaseCommand
from django.conf import settings
from datetime import datetime
from pytz import timezone
from dateutil.tz import tzlocal
from icalendar import Calendar
import urllib.request
from ...models import Event, CalendarFeed
class Command(BaseCommand):
help = 'Updates event schedule and cleans past events.'
def handle(self, *args, **options):
# insert new events
for ical in CalendarFeed.objects.all():
with urllib.request.urlopen(ical.url) as f:
cal = Calendar.from_ical(f.read())
for event in cal.walk('vevent'):
# create datetime object and localize it
date = event['DTSTART'].dt
date_time = date.astimezone(timezone(settings.TIME_ZONE))
# create public transport schedule object and save it
schedule = Event(
id=event['UID'],
date=date_time,
title=event['SUMMARY']
)
schedule.save()
self.stdout.write('Successfully updated %s.' % ical.url)
# clear past events
Event.objects.filter(date__lt=datetime.now(tzlocal())).delete()
|
mpl-2.0
| -4,256,452,507,034,026,000 | 32.947368 | 77 | 0.584496 | false |
desihub/desispec
|
py/desispec/pipeline/control.py
|
1
|
46258
|
#
# See top-level LICENSE.rst file for Copyright information
#
# -*- coding: utf-8 -*-
"""
desispec.pipeline.control
===========================
Tools for controling pipeline production.
"""
from __future__ import absolute_import, division, print_function
import os
import sys
import re
import time
from collections import OrderedDict
import numpy as np
from desiutil.log import get_logger
from .. import io
from ..parallel import (dist_uniform, dist_discrete, dist_discrete_all,
stdouterr_redirected)
from .defs import (task_states, prod_options_name,
task_state_to_int, task_int_to_state)
from . import prod as pipeprod
from . import db as pipedb
from . import run as piperun
from . import tasks as pipetasks
from . import scriptgen as scriptgen
class clr:
HEADER = "\033[95m"
OKBLUE = "\033[94m"
OKGREEN = "\033[92m"
WARNING = "\033[93m"
FAIL = "\033[91m"
ENDC = "\033[0m"
def disable(self):
self.HEADER = ""
self.OKBLUE = ""
self.OKGREEN = ""
self.WARNING = ""
self.FAIL = ""
self.ENDC = ""
def create(root=None, data=None, redux=None, prod=None, force=False,
basis=None, calib=None, db_sqlite=False, db_sqlite_path=None,
db_postgres=False, db_postgres_host="nerscdb03.nersc.gov",
db_postgres_port=5432, db_postgres_name="desidev",
db_postgres_user="desidev_admin", db_postgres_authorized="desidev_ro",
nside=64 ):
"""Create (or re-create) a production.
Args:
root (str): value to use for DESI_ROOT.
data (str): value to use for DESI_SPECTRO_DATA.
redux (str): value to use for DESI_SPECTRO_REDUX.
prod (str): value to use for SPECPROD.
force (bool): if True, overwrite existing production DB.
basis (str): value to use for DESI_BASIS_TEMPLATES.
calib (str): value to use for DESI_SPECTRO_CALIB.
db_sqlite (bool): if True, use SQLite for the DB.
db_sqlite_path (str): override path to SQLite DB.
db_postgres (bool): if True, use PostgreSQL for the DB.
db_postgres_host (str): PostgreSQL hostname.
db_postgres_port (int): PostgreSQL connection port number.
db_postgres_name (str): PostgreSQL DB name.
db_postgres_user (str): PostgreSQL user name.
db_postgres_authorized (str): Additional PostgreSQL users to
authorize.
nside (int): HEALPix nside value used for spectral grouping.
"""
log = get_logger()
# Check desi root location
desiroot = None
if root is not None:
desiroot = os.path.abspath(root)
os.environ["DESI_ROOT"] = desiroot
elif "DESI_ROOT" in os.environ:
desiroot = os.environ["DESI_ROOT"]
else:
log.error("You must set DESI_ROOT in your environment or "
"set the root keyword argument")
raise RuntimeError("Invalid DESI_ROOT")
# Check raw data location
rawdir = None
if data is not None:
rawdir = os.path.abspath(data)
os.environ["DESI_SPECTRO_DATA"] = rawdir
elif "DESI_SPECTRO_DATA" in os.environ:
rawdir = os.environ["DESI_SPECTRO_DATA"]
else:
log.error("You must set DESI_SPECTRO_DATA in your environment or "
"set the data keyword argument")
raise RuntimeError("Invalid DESI_SPECTRO_DATA")
# Check production name
prodname = None
if prod is not None:
prodname = prod
os.environ["SPECPROD"] = prodname
elif "SPECPROD" in os.environ:
prodname = os.environ["SPECPROD"]
else:
log.error("You must set SPECPROD in your environment or "
"set the prod keyword argument")
raise RuntimeError("Invalid SPECPROD")
# Check spectro redux location
specdir = None
if redux is not None:
specdir = os.path.abspath(redux)
os.environ["DESI_SPECTRO_REDUX"] = specdir
elif "DESI_SPECTRO_REDUX" in os.environ:
specdir = os.environ["DESI_SPECTRO_REDUX"]
else:
log.error("You must set DESI_SPECTRO_REDUX in your environment or "
"set the redux keyword argument")
raise RuntimeError("Invalid DESI_SPECTRO_REDUX")
proddir = os.path.join(specdir, prodname)
if os.path.exists(proddir) and not force :
log.error("Production {} exists.\n"
"Either remove this directory if you want to start fresh\n"
"or use 'desi_pipe update' to update a production\n"
"or rerun with --force option.".format(proddir))
raise RuntimeError("production already exists")
# Check basis template location
if basis is not None:
basis = os.path.abspath(basis)
os.environ["DESI_BASIS_TEMPLATES"] = basis
elif "DESI_BASIS_TEMPLATES" in os.environ:
basis = os.environ["DESI_BASIS_TEMPLATES"]
else:
log.error("You must set DESI_BASIS_TEMPLATES in your environment or "
"set the basis keyword argument")
raise RuntimeError("Invalid DESI_BASIS_TEMPLATES")
# Check calibration location
if calib is not None:
calib = os.path.abspath(calib)
os.environ["DESI_SPECTRO_CALIB"] = calib
elif "DESI_SPECTRO_CALIB" in os.environ:
calib = os.environ["DESI_SPECTRO_CALIB"]
else:
log.error("You must set DESI_SPECTRO_CALIB in your environment "
" or set the calib keyword argument")
raise RuntimeError("Invalid DESI_SPECTRO_CALIB")
# Construct our DB connection string
dbpath = None
if db_postgres:
# We are creating a new postgres backend. Explicitly create the
# database, so that we can get the schema key.
db = pipedb.DataBasePostgres(host=db_postgres_host,
port=db_postgres_port, dbname=db_postgres_name,
user=db_postgres_user, schema=None,
authorize=db_postgres_authorized)
dbprops = [
"postgresql",
db_postgres_host,
"{}".format(db_postgres_port),
db_postgres_name,
db_postgres_user,
db.schema
]
dbpath = ":".join(dbprops)
os.environ["DESI_SPECTRO_DB"] = dbpath
elif db_sqlite:
# We are creating a new sqlite backend
if db_sqlite_path is not None:
# We are using a non-default path
dbpath = os.path.abspath(db_sqlite_path)
else:
# We are using sqlite with the default location
dbpath = os.path.join(proddir, "desi.db")
if not os.path.isdir(proddir):
os.makedirs(proddir)
# Create the database
db = pipedb.DataBaseSqlite(dbpath, "w")
os.environ["DESI_SPECTRO_DB"] = dbpath
elif "DESI_SPECTRO_DB" in os.environ:
# We are using an existing prod
dbpath = os.environ["DESI_SPECTRO_DB"]
else:
# Error- we have to get the DB info from somewhere
log.error("You must set DESI_SPECTRO_DB in your environment or "
"use the db_sqlite or db_postgres arguments")
raise RuntimeError("Invalid DESI_SPECTRO_DB")
pipeprod.update_prod(nightstr=None, hpxnside=nside)
# create setup shell snippet
setupfile = os.path.abspath(os.path.join(proddir, "setup.sh"))
with open(setupfile, "w") as s:
s.write("# Generated by desi_pipe\n")
s.write("export DESI_ROOT={}\n\n".format(desiroot))
s.write("export DESI_BASIS_TEMPLATES={}\n".format(basis))
s.write("export DESI_SPECTRO_CALIB={}\n\n".format(calib))
s.write("export DESI_SPECTRO_DATA={}\n\n".format(rawdir))
s.write("# Production originally created at\n")
s.write("# $DESI_SPECTRO_REDUX={}\n".format(specdir))
s.write("# $SPECPROD={}\n".format(prodname))
s.write("#\n")
s.write("# Support the ability to move the production\n")
s.write("# - get abspath to directory where this script is located\n")
s.write("# - unpack proddir=$DESI_SPECTRO_REDUX/$SPECPROD\n\n")
s.write('proddir=$(cd $(dirname "$BASH_SOURCE"); pwd)\n')
s.write("export DESI_SPECTRO_REDUX=$(dirname $proddir)\n")
s.write("export SPECPROD=$(basename $proddir)\n\n")
# s.write("export DESI_SPECTRO_REDUX={}\n".format(specdir))
# s.write("export SPECPROD={}\n".format(specprod))
s.write("export DESI_SPECTRO_DB={}\n".format(dbpath))
s.write("\n")
if "DESI_LOGLEVEL" in os.environ:
s.write("export DESI_LOGLEVEL=\"{}\"\n\n"\
.format(os.environ["DESI_LOGLEVEL"]))
else:
s.write("#export DESI_LOGLEVEL=\"DEBUG\"\n\n")
log.info("\n\nTo use this production, you should do:\n%> source {}\n\n"\
.format(setupfile))
return
def update(nightstr=None, nside=64, expid=None):
"""Update a production.
Args:
nightstr (str): Comma separated (YYYYMMDD) or regex pattern. Only
nights matching these patterns will be considered.
nside (int): HEALPix nside value used for spectral grouping.
expid (int): Only update the production for a single exposure ID.
"""
pipeprod.update_prod(nightstr=nightstr, hpxnside=nside, expid=expid)
return
def get_tasks_type(db, tasktype, states, nights, expid=None, spec=None):
"""Get tasks of one type that match certain criteria.
Args:
db (DataBase): the production DB.
tasktype (str): a valid task type.
states (list): list of task states to select.
nights (list): list of nights to select.
expid (int): exposure ID to select.
spec (int): spectrograph to select.
Returns:
(list): list of tasks meeting the criteria.
"""
ntlist = ",".join(nights)
if (expid is not None) and (len(nights) > 1):
raise RuntimeError("Only one night should be specified when "
"getting tasks for a single exposure.")
tasks = list()
with db.cursor() as cur:
if tasktype == "spectra" or tasktype == "redshift":
cmd = "select pixel from healpix_frame where night in ({})".format(ntlist)
cur.execute(cmd)
pixels = np.unique([ x for (x,) in cur.fetchall() ]).tolist()
pixlist = ",".join([ str(p) for p in pixels])
cmd = "select name,state from {} where pixel in ({})".format(tasktype, pixlist)
cur.execute(cmd)
tasks = [ x for (x, y) in cur.fetchall() if \
task_int_to_state[y] in states ]
else :
cmd = "select name, state from {} where night in ({})"\
.format(tasktype, ntlist)
if expid is not None:
cmd = "{} and expid = {}".format(cmd, expid)
if spec is not None:
cmd = "{} and spec = {}".format(cmd, spec)
cur.execute(cmd)
tasks = [ x for (x, y) in cur.fetchall() if \
task_int_to_state[y] in states ]
return tasks
def get_tasks(db, tasktypes, nights, states=None, expid=None, spec=None,
nosubmitted=False, taskfile=None):
"""Get tasks of multiple types that match certain criteria.
Args:
db (DataBase): the production DB.
tasktypes (list): list of valid task types.
states (list): list of task states to select.
nights (list): list of nights to select.
expid (int): exposure ID to select.
spec (int): spectrograph to select.
nosubmitted (bool): if True, ignore tasks that were already
submitted.
Returns:
list: all tasks of all types.
"""
all_tasks = list()
for tt in tasktypes:
tasks = get_tasks_type(db, tt, states, nights, expid=expid, spec=spec)
if nosubmitted:
if (tt != "spectra") and (tt != "redshift"):
sb = db.get_submitted(tasks)
tasks = [ x for x in tasks if not sb[x] ]
all_tasks.extend(tasks)
return all_tasks
def tasks(tasktypes, nightstr=None, states=None, expid=None, spec=None,
nosubmitted=False, db_postgres_user="desidev_ro", taskfile=None):
"""Get tasks of multiple types that match certain criteria.
Args:
tasktypes (list): list of valid task types.
nightstr (list): comma separated (YYYYMMDD) or regex pattern.
states (list): list of task states to select.
expid (int): exposure ID to select.
spec (int): spectrograph to select.
nosubmitted (bool): if True, ignore tasks that were already
submitted.
db_postgres_user (str): If using postgres, connect as this
user for read-only access"
taskfile (str): if set write to this file, else write to STDOUT.
"""
if states is None:
states = task_states
else:
for s in states:
if s not in task_states:
raise RuntimeError("Task state '{}' is not valid".format(s))
dbpath = io.get_pipe_database()
db = pipedb.load_db(dbpath, mode="r", user=db_postgres_user)
allnights = io.get_nights(strip_path=True)
nights = pipeprod.select_nights(allnights, nightstr)
ttypes = list()
for tt in pipedb.all_task_types():
if tt in tasktypes:
ttypes.append(tt)
all_tasks = get_tasks(db, ttypes, nights, states=states, expid=expid,
spec=spec, nosubmitted=nosubmitted)
pipeprod.task_write(taskfile, all_tasks)
return
def getready(db, nightstr=None):
"""Update forward dependencies in the database.
Update database for one or more nights to ensure that forward
dependencies know that they are ready to run.
Args:
db (DataBase): the production DB.
nightstr (list): comma separated (YYYYMMDD) or regex pattern.
"""
allnights = io.get_nights(strip_path=True)
nights = pipeprod.select_nights(allnights, nightstr)
for nt in nights:
db.getready(night=nt)
return
def check_tasks(tasks, db=None):
"""Check the state of pipeline tasks.
If the database handle is given, use the DB for checking. Otherwise
use the filesystem.
Args:
tasks (list): list of tasks to check.
db (DataBase): the database to use.
Returns:
OrderedDict: dictionary of the state of each task.
"""
states = pipedb.check_tasks(tasks, db=db)
tskstate = OrderedDict()
for tsk in tasks:
tskstate[tsk] = states[tsk]
return tskstate
def sync(db, nightstr=None, specdone=False):
"""Synchronize DB state based on the filesystem.
This scans the filesystem for all tasks for the specified nights,
and updates the states accordingly.
Args:
db (DataBase): the production DB.
nightstr (list): comma separated (YYYYMMDD) or regex pattern.
specdone: If true, set spectra to done if files exist.
"""
allnights = io.get_nights(strip_path=True)
nights = pipeprod.select_nights(allnights, nightstr)
for nt in nights:
db.sync(nt,specdone=specdone)
return
def cleanup(db, tasktypes, failed=False, submitted=False, expid=None):
"""Clean up stale tasks in the DB.
Args:
db (DataBase): the production DB.
tasktypes (list): list of valid task types.
failed (bool): also clear failed states.
submitted (bool): also clear submitted flag.
expid (int): only clean this exposure ID.
"""
exid = None
if expid is not None and expid >= 0:
exid = expid
db.cleanup(tasktypes=tasktypes, expid=exid, cleanfailed=failed,
cleansubmitted=submitted)
return
def dryrun(tasks, nersc=None, nersc_queue="regular", nersc_maxtime=0,
nersc_maxnodes=0, nersc_shifter=None, mpi_procs=1, mpi_run="",
procs_per_node=0, nodb=False, db_postgres_user="desidev_ro", force=False):
"""Print equivalent command line jobs.
For the specified tasks, print the equivalent stand-alone commands
that would be run on each task. A pipeline job calls the internal
desispec.scripts entrypoints directly.
Args:
tasks (list): list of tasks to run.
nersc (str): if not None, the name of the nersc machine to use
(cori-haswell | cori-knl).
nersc_queue (str): the name of the queue to use
(regular | debug | realtime).
nersc_maxtime (int): if specified, restrict the runtime to this
number of minutes.
nersc_maxnodes (int): if specified, restrict the job to use this
number of nodes.
nersc_shifter (str): the name of the shifter image to use.
mpi_run (str): if specified, and if not using NERSC, use this
command to launch MPI executables in the shell scripts. Default
is to not use MPI.
mpi_procs (int): if not using NERSC, the number of MPI processes
to use in shell scripts.
procs_per_node (int): if specified, use only this number of
processes per node. Default runs one process per core.
nodb (bool): if True, do not use the production DB.
db_postgres_user (str): If using postgres, connect as this
user for read-only access"
force (bool): if True, print commands for all tasks, not just the ones
in a ready state.
"""
tasks_by_type = pipedb.task_sort(tasks)
(db, opts) = pipeprod.load_prod("r", user=db_postgres_user)
if nodb:
db = None
ppn = None
if procs_per_node > 0:
ppn = procs_per_node
if nersc is None:
# Not running at NERSC
if ppn is None:
ppn = mpi_procs
for tt, tlist in tasks_by_type.items():
piperun.dry_run(tt, tlist, opts, mpi_procs,
ppn, db=db, launch="mpirun -n", force=force)
else:
# Running at NERSC
hostprops = scriptgen.nersc_machine(nersc,
nersc_queue)
for tt, tlist in tasks_by_type.items():
joblist = scriptgen.nersc_job_size(tt, tlist,
nersc, nersc_queue, nersc_maxtime,
nersc_maxnodes, nodeprocs=ppn, db=db)
launch="srun -n"
for (jobnodes, jobppn, jobtime, jobworkers, jobtasks) in joblist:
jobprocs = jobnodes * jobppn
piperun.dry_run(tt, jobtasks, opts, jobprocs,
jobppn, db=db, launch=launch, force=force)
return
def gen_scripts(tasks_by_type, nersc=None, nersc_queue="regular",
nersc_maxtime=0, nersc_maxnodes=0, nersc_shifter=None, mpi_procs=1,
mpi_run="", procs_per_node=0, nodb=False, out=None, debug=False,
db_postgres_user="desidev_ro"):
"""Generate scripts to run tasks of one or more types.
If multiple task type keys are contained in the dictionary, they will
be packed into a single batch job.
Args:
tasks_by_type (dict): each key is the task type and the value is
a list of tasks.
nersc (str): if not None, the name of the nersc machine to use
(cori-haswell | cori-knl).
nersc_queue (str): the name of the queue to use
(regular | debug | realtime).
nersc_maxtime (int): if specified, restrict the runtime to this
number of minutes.
nersc_maxnodes (int): if specified, restrict the job to use this
number of nodes.
nersc_shifter (str): the name of the shifter image to use.
mpi_run (str): if specified, and if not using NERSC, use this
command to launch MPI executables in the shell scripts. Default
is to not use MPI.
mpi_procs (int): if not using NERSC, the number of MPI processes
to use in shell scripts.
procs_per_node (int): if specified, use only this number of
processes per node. Default runs one process per core.
nodb (bool): if True, do not use the production DB.
out (str): Put task scripts and logs in this directory relative to
the production 'scripts' directory. Default puts task directory
in the main scripts directory.
debug (bool): if True, enable DEBUG log level in generated scripts.
db_postgres_user (str): If using postgres, connect as this
user for read-only access"
Returns:
list: the generated script files
"""
ttypes = list(tasks_by_type.keys())
if len(ttypes)==0 :
return None
jobname = ttypes[0]
if len(ttypes) > 1:
jobname = "{}-{}".format(ttypes[0], ttypes[-1])
proddir = os.path.abspath(io.specprod_root())
import datetime
now = datetime.datetime.now()
outtaskdir = "{}_{:%Y%m%d-%H%M%S-%f}".format(jobname, now)
if out is None:
outdir = os.path.join(proddir, io.get_pipe_rundir(),
io.get_pipe_scriptdir(), outtaskdir)
else:
outdir = os.path.join(proddir, io.get_pipe_rundir(),
io.get_pipe_scriptdir(), out, outtaskdir)
if not os.path.isdir(outdir):
os.makedirs(outdir)
mstr = "run"
if nersc is not None:
mstr = nersc
outscript = os.path.join(outdir, mstr)
outlog = os.path.join(outdir, mstr)
(db, opts) = pipeprod.load_prod("r", user=db_postgres_user)
if nodb:
db = None
ppn = None
if procs_per_node > 0:
ppn = procs_per_node
# FIXME: Add openmp / multiproc function to task classes and
# call them here.
scripts = None
if nersc is None:
# Not running at NERSC
scripts = scriptgen.batch_shell(tasks_by_type,
outscript, outlog, mpirun=mpi_run,
mpiprocs=mpi_procs, openmp=1, db=db)
else:
# Running at NERSC
scripts = scriptgen.batch_nersc(tasks_by_type,
outscript, outlog, jobname, nersc, nersc_queue,
nersc_maxtime, nersc_maxnodes, nodeprocs=ppn,
openmp=False, multiproc=False, db=db,
shifterimg=nersc_shifter, debug=debug)
return scripts
def script(taskfile, nersc=None, nersc_queue="regular",
nersc_maxtime=0, nersc_maxnodes=0, nersc_shifter=None, mpi_procs=1,
mpi_run="", procs_per_node=0, nodb=False, out=None, debug=False,
db_postgres_user="desidev_ro"):
"""Generate pipeline scripts for a taskfile.
This gets tasks from the taskfile and sorts them by type. Then it
generates the scripts.
Args:
taskfile (str): read tasks from this file (if not specified,
read from STDIN).
nersc (str): if not None, the name of the nersc machine to use
(cori-haswell | cori-knl).
nersc_queue (str): the name of the queue to use
(regular | debug | realtime).
nersc_maxtime (int): if specified, restrict the runtime to this
number of minutes.
nersc_maxnodes (int): if specified, restrict the job to use this
number of nodes.
nersc_shifter (str): the name of the shifter image to use.
mpi_run (str): if specified, and if not using NERSC, use this
command to launch MPI executables in the shell scripts. Default
is to not use MPI.
mpi_procs (int): if not using NERSC, the number of MPI processes
to use in shell scripts.
procs_per_node (int): if specified, use only this number of
processes per node. Default runs one process per core.
nodb (bool): if True, do not use the production DB.
out (str): Put task scripts and logs in this directory relative to
the production 'scripts' directory. Default puts task directory
in the main scripts directory.
debug (bool): if True, enable DEBUG log level in generated scripts.
db_postgres_user (str): If using postgres, connect as this
user for read-only access"
Returns:
list: the generated script files
"""
tasks = pipeprod.task_read(taskfile)
scripts = list()
if len(tasks) > 0:
tasks_by_type = pipedb.task_sort(tasks)
scripts = gen_scripts(
tasks_by_type,
nersc=nersc,
nersc_queue=nersc_queue,
nersc_maxtime=nersc_maxtime,
nersc_maxnodes=nersc_maxnodes,
nersc_shifter=nersc_shifter,
mpi_procs=mpi_procs,
mpi_run=mpi_run,
procs_per_node=procs_per_node,
nodb=nodb,
out=out,
debug=debug,
db_postgres_user=db_postgres_user)
else:
import warnings
warnings.warn("Input task list is empty", RuntimeWarning)
return scripts
def run_scripts(scripts, deps=None, slurm=False):
"""Run job scripts with optional dependecies.
This either submits the jobs to the scheduler or simply runs them
in order with subprocess.
Args:
scripts (list): list of pathnames of the scripts to run.
deps (list): optional list of job IDs which are dependencies for
these scripts.
slurm (bool): if True use slurm to submit the jobs.
Returns:
list: the job IDs returned by the scheduler.
"""
import subprocess as sp
log = get_logger()
depstr = ""
if deps is not None and len(deps)>0 :
depstr = "-d afterok"
for d in deps:
depstr = "{}:{}".format(depstr, d)
jobids = list()
if slurm:
# submit each job and collect the job IDs
for scr in scripts:
scom = "sbatch {} {}".format(depstr, scr)
#print("RUN SCRIPTS: {}".format(scom))
log.debug(time.asctime())
log.info(scom)
sout = sp.check_output(scom, shell=True, universal_newlines=True)
log.info(sout)
p = sout.split()
jid = re.sub(r'[^\d]', '', p[3])
jobids.append(jid)
else:
# run the scripts one at a time
for scr in scripts:
rcode = sp.call(scr, shell=True)
if rcode != 0:
log.warning("script {} had return code = {}".format(scr,
rcode))
return jobids
def run(taskfile, nosubmitted=False, depjobs=None, nersc=None,
nersc_queue="regular", nersc_maxtime=0, nersc_maxnodes=0,
nersc_shifter=None, mpi_procs=1, mpi_run="", procs_per_node=0, nodb=False,
out=None, debug=False):
"""Create job scripts and run them.
This gets tasks from the taskfile and sorts them by type. Then it
generates the scripts. Finally, it runs or submits those scripts
to the scheduler.
Args:
taskfile (str): read tasks from this file (if not specified,
read from STDIN).
nosubmitted (bool): if True, do not run jobs that have already
been submitted.
depjobs (list): list of job ID dependencies.
nersc (str): if not None, the name of the nersc machine to use
(cori-haswell | cori-knl).
nersc_queue (str): the name of the queue to use
(regular | debug | realtime).
nersc_maxtime (int): if specified, restrict the runtime to this
number of minutes.
nersc_maxnodes (int): if specified, restrict the job to use this
number of nodes.
nersc_shifter (str): the name of the shifter image to use.
mpi_run (str): if specified, and if not using NERSC, use this
command to launch MPI executables in the shell scripts. Default
is to not use MPI.
mpi_procs (int): if not using NERSC, the number of MPI processes
to use in shell scripts.
procs_per_node (int): if specified, use only this number of
processes per node. Default runs one process per core.
nodb (bool): if True, do not use the production DB.
out (str): Put task scripts and logs in this directory relative to
the production 'scripts' directory. Default puts task directory
in the main scripts directory.
debug (bool): if True, enable DEBUG log level in generated scripts.
Returns:
list: the job IDs returned by the scheduler.
"""
log = get_logger()
tasks = pipeprod.task_read(taskfile)
jobids = list()
if len(tasks) > 0:
tasks_by_type = pipedb.task_sort(tasks)
tasktypes = list(tasks_by_type.keys())
# We are packing everything into one job
scripts = gen_scripts(
tasks_by_type,
nersc=nersc,
nersc_queue=nersc_queue,
nersc_maxtime=nersc_maxtime,
nersc_maxnodes=nersc_maxnodes,
nersc_shifter=nersc_shifter,
mpi_procs=mpi_procs,
mpi_run=mpi_run,
procs_per_node=procs_per_node,
nodb=nodb,
out=out,
debug=debug)
log.info("wrote scripts {}".format(scripts))
deps = None
slurm = False
if nersc is not None:
slurm = True
if depjobs is not None:
deps = depjobs
# Run the jobs
if not nodb:
# We can use the DB, mark tasks as submitted.
if slurm:
dbpath = io.get_pipe_database()
db = pipedb.load_db(dbpath, mode="w")
for tt in tasktypes:
if (tt != "spectra") and (tt != "redshift"):
db.set_submitted_type(tt, tasks_by_type[tt])
jobids = run_scripts(scripts, deps=deps, slurm=slurm)
else:
import warnings
warnings.warn("Input task list is empty", RuntimeWarning)
return jobids
def chain(tasktypes, nightstr=None, states=None, expid=None, spec=None,
pack=False, nosubmitted=False, depjobs=None, nersc=None,
nersc_queue="regular", nersc_maxtime=0, nersc_maxnodes=0,
nersc_shifter=None, mpi_procs=1, mpi_run="", procs_per_node=0, nodb=False,
out=None, debug=False, dryrun=False):
"""Run a chain of jobs for multiple pipeline steps.
For the list of task types, get all ready tasks meeting the selection
criteria. Then either pack all tasks into one job or submit
each task type as its own job. Input job dependencies can be
specified, and dependencies are tracked between jobs in the chain.
Args:
tasktypes (list): list of valid task types.
nightstr (str): Comma separated (YYYYMMDD) or regex pattern. Only
nights matching these patterns will be considered.
states (list): list of task states to select.
nights (list): list of nights to select.
expid (int): exposure ID to select.
pack (bool): if True, pack all tasks into a single job.
nosubmitted (bool): if True, do not run jobs that have already
been submitted.
depjobs (list): list of job ID dependencies.
nersc (str): if not None, the name of the nersc machine to use
(cori-haswell | cori-knl).
nersc_queue (str): the name of the queue to use
(regular | debug | realtime).
nersc_maxtime (int): if specified, restrict the runtime to this
number of minutes.
nersc_maxnodes (int): if specified, restrict the job to use this
number of nodes.
nersc_shifter (str): the name of the shifter image to use.
mpi_run (str): if specified, and if not using NERSC, use this
command to launch MPI executables in the shell scripts. Default
is to not use MPI.
mpi_procs (int): if not using NERSC, the number of MPI processes
to use in shell scripts.
procs_per_node (int): if specified, use only this number of
processes per node. Default runs one process per core.
nodb (bool): if True, do not use the production DB.
out (str): Put task scripts and logs in this directory relative to
the production 'scripts' directory. Default puts task directory
in the main scripts directory.
debug (bool): if True, enable DEBUG log level in generated scripts.
dryrun (bool): if True, do not submit the jobs.
Returns:
list: the job IDs from the final step in the chain.
"""
log = get_logger()
machprops = None
if nersc is not None:
machprops = scriptgen.nersc_machine(nersc, nersc_queue)
if states is None:
states = task_states
else:
for s in states:
if s not in task_states:
raise RuntimeError("Task state '{}' is not valid".format(s))
ttypes = list()
for tt in pipetasks.base.default_task_chain:
if tt in tasktypes:
ttypes.append(tt)
if (machprops is not None) and (not pack):
if len(ttypes) > machprops["submitlimit"]:
log.error("Queue {} on machine {} limited to {} jobs."\
.format(nersc_queue, nersc,
machprops["submitlimit"]))
log.error("Use a different queue or shorter chains of tasks.")
raise RuntimeError("Too many jobs")
slurm = False
if nersc is not None:
slurm = True
dbpath = io.get_pipe_database()
db = pipedb.load_db(dbpath, mode="w")
allnights = io.get_nights(strip_path=True)
nights = pipeprod.select_nights(allnights, nightstr)
outdeps = None
indeps = None
if depjobs is not None:
indeps = depjobs
tasks_by_type = OrderedDict()
for tt in ttypes:
# Get the tasks. We select by state and submitted status.
tasks = get_tasks_type(db, tt, states, nights, expid=expid, spec=spec)
#print("CHAIN: ", tt, tasks)
if nosubmitted:
if (tt != "spectra") and (tt != "redshift"):
sb = db.get_submitted(tasks)
tasks = [ x for x in tasks if not sb[x] ]
#print("CHAIN: nosubmitted: ", tt, tasks)
if len(tasks) == 0:
import warnings
warnings.warn("Input task list for '{}' is empty".format(tt),
RuntimeWarning)
continue # might be tasks to do in other ttype
tasks_by_type[tt] = tasks
scripts = None
tscripts = None
if pack:
# We are packing everything into one job
scripts = gen_scripts(
tasks_by_type,
nersc=nersc,
nersc_queue=nersc_queue,
nersc_maxtime=nersc_maxtime,
nersc_maxnodes=nersc_maxnodes,
nersc_shifter=nersc_shifter,
mpi_procs=mpi_procs,
mpi_run=mpi_run,
procs_per_node=procs_per_node,
nodb=nodb,
out=out,
debug=debug)
if scripts is not None and len(scripts)>0 :
log.info("wrote scripts {}".format(scripts))
else:
# Generate individual scripts
tscripts = dict()
for tt in ttypes:
onetype = OrderedDict()
onetype[tt] = tasks_by_type[tt]
tscripts[tt] = gen_scripts(
onetype,
nersc=nersc,
nersc_queue=nersc_queue,
nersc_maxtime=nersc_maxtime,
nersc_maxnodes=nersc_maxnodes,
nersc_shifter=nersc_shifter,
mpi_procs=mpi_procs,
mpi_run=mpi_run,
procs_per_node=procs_per_node,
nodb=nodb,
out=out,
debug=debug)
if tscripts[tt] is not None :
log.info("wrote script {}".format(tscripts[tt]))
if dryrun :
log.warning("dry run: do not submit the jobs")
return None
# Run the jobs
if slurm:
for tt in ttypes:
if (tt != "spectra") and (tt != "redshift"):
if tt in tasks_by_type.keys() :
db.set_submitted_type(tt, tasks_by_type[tt])
outdeps = None
if pack:
# Submit one job
if scripts is not None and len(scripts)>0 :
outdeps = run_scripts(scripts, deps=indeps, slurm=slurm)
else:
# Loop over task types submitting jobs and tracking dependencies.
for tt in ttypes:
if tscripts[tt] is not None :
outdeps = run_scripts(tscripts[tt], deps=indeps,
slurm=slurm)
if outdeps is not None and len(outdeps) > 0:
indeps = outdeps
else:
indeps = None
return outdeps
def status_color(state):
col = clr.ENDC
if state == "done":
col = clr.OKGREEN
elif state == "running":
col = clr.WARNING
elif state == "failed":
col = clr.FAIL
elif state == "ready":
col = clr.OKBLUE
return col
def status_task(task, ttype, state, logdir):
fields = pipetasks.base.task_classes[ttype].name_split(task)
tasklog = None
if "night" in fields:
tasklogdir = os.path.join(
logdir, io.get_pipe_nightdir(),
"{:08d}".format(fields["night"])
)
tasklog = os.path.join(
tasklogdir,
"{}.log".format(task)
)
elif "pixel" in fields:
tasklogdir = os.path.join(
logdir, "healpix",
io.healpix_subdirectory(fields["nside"],fields["pixel"])
)
tasklog = os.path.join(
tasklogdir,
"{}.log".format(task)
)
col = status_color(state)
print("Task {}".format(task))
print(
"State = {}{}{}".format(
col,
state,
clr.ENDC
)
)
if os.path.isfile(tasklog):
print("Dumping task log {}".format(tasklog))
print("=========== Begin Log =============")
print("")
with open(tasklog, "r") as f:
logdata = f.read()
print(logdata)
print("")
print("============ End Log ==============")
print("", flush=True)
else:
print("Task log {} does not exist".format(tasklog), flush=True)
return
def status_taskname(tsklist):
for tsk in tsklist:
st = tsk[1]
col = status_color(st)
print(
" {:20s}: {}{}{}".format(tsk[0], col, st, clr.ENDC),
flush=True
)
def status_night_totals(tasktypes, nights, tasks, tskstates):
# Accumulate totals for each night and type
sep = "------------------+---------+---------+---------+---------+---------+"
ntlist = list()
nighttot = OrderedDict()
for tt in tasktypes:
if tt == "spectra" or tt == "redshift":
# This function only prints nightly tasks
continue
for tsk in tasks[tt]:
fields = pipetasks.base.task_classes[tt].name_split(tsk)
nt = fields["night"]
if nt not in nighttot:
nighttot[nt] = OrderedDict()
if tt not in nighttot[nt]:
nighttot[nt][tt] = OrderedDict()
for s in task_states:
nighttot[nt][tt][s] = 0
st = tskstates[tt][tsk]
nighttot[nt][tt][st] += 1
for nt, ttstates in nighttot.items():
ntstr = "{:08d}".format(nt)
if ntstr in nights:
ntlist.append(nt)
ntlist = list(sorted(ntlist))
for nt in ntlist:
ttstates = nighttot[nt]
ntstr = "{:08d}".format(nt)
if ntstr in nights:
header = "{:18s}|".format(ntstr)
for s in task_states:
col = status_color(s)
header = "{} {}{:8s}{}|".format(
header, col, s, clr.ENDC
)
print(sep)
print(header)
print(sep)
for tt, totst in ttstates.items():
line = " {:16s}|".format(tt)
for s in task_states:
line = "{}{:9d}|".format(line, totst[s])
print(line)
print("", flush=True)
def status_pixel_totals(tasktypes, tasks, tskstates):
# Accumulate totals for each type
sep = "------------------+---------+---------+---------+---------+---------+"
pixtot = OrderedDict()
for tt in tasktypes:
if (tt != "spectra") and (tt != "redshift"):
# This function only prints pixel tasks
continue
for tsk in tasks[tt]:
if tt not in pixtot:
pixtot[tt] = OrderedDict()
for s in task_states:
pixtot[tt][s] = 0
st = tskstates[tt][tsk]
pixtot[tt][st] += 1
header = "{:18s}|".format("Pixel Tasks")
for s in task_states:
col = status_color(s)
header = "{} {}{:8s}{}|".format(
header, col, s, clr.ENDC
)
print(sep)
print(header)
print(sep)
for tt, totst in pixtot.items():
line = " {:16s}|".format(tt)
for s in task_states:
line = "{}{:9d}|".format(line, totst[s])
print(line)
print("", flush=True)
def status_night_tasks(tasktypes, nights, tasks, tskstates):
# Sort the tasks into nights
nighttasks = OrderedDict()
ntlist = list()
for tt in tasktypes:
if tt == "spectra" or tt == "redshift":
# This function only prints nightly tasks
continue
for tsk in tasks[tt]:
fields = pipetasks.base.task_classes[tt].name_split(tsk)
nt = fields["night"]
if nt not in nighttasks:
nighttasks[nt] = list()
nighttasks[nt].append((tsk, tskstates[tt][tsk]))
for nt, tsklist in nighttasks.items():
ntstr = "{:08d}".format(nt)
if ntstr in nights:
ntlist.append(nt)
ntlist = list(sorted(ntlist))
for nt in ntlist:
tsklist = nighttasks[nt]
ntstr = "{:08d}".format(nt)
if ntstr in nights:
print(nt)
status_taskname(tsklist)
def status_pixel_tasks(tasktypes, tasks, tskstates):
for tt in tasktypes:
tsklist = list()
if (tt != "spectra") and (tt != "redshift"):
# This function only prints pixel tasks
continue
for tsk in tasks[tt]:
tsklist.append((tsk, tskstates[tt][tsk]))
print(tt)
status_taskname(tsklist)
def status_summary(tasktypes, nights, tasks, tskstates):
sep = "----------------+---------+---------+---------+---------+---------+"
hline = "-----------------------------------------------"
print(sep)
header_state = "{:16s}|".format(" Task Type")
for s in task_states:
col = status_color(s)
header_state = "{} {}{:8s}{}|".format(
header_state, col, s, clr.ENDC
)
print(header_state)
print(sep)
for tt in tasktypes:
line = "{:16s}|".format(tt)
for s in task_states:
tsum = np.sum(
np.array(
[1 for x, y in tskstates[tt].items() if y == s],
dtype=np.int32
)
)
line = "{}{:9d}|".format(line, tsum)
print(line, flush=True)
def status(task=None, tasktypes=None, nightstr=None, states=None,
expid=None, spec=None, db_postgres_user="desidev_ro"):
"""Check the status of pipeline tasks.
Args:
Returns:
None
"""
dbpath = io.get_pipe_database()
db = pipedb.load_db(dbpath, mode="r", user=db_postgres_user)
rundir = io.get_pipe_rundir()
logdir = os.path.join(rundir, io.get_pipe_logdir())
tasks = OrderedDict()
summary = False
if (tasktypes is None) and (nightstr is None):
summary = True
if task is None:
ttypes = None
if tasktypes is not None:
ttypes = list()
for tt in pipetasks.base.default_task_chain:
if tt in tasktypes:
ttypes.append(tt)
else:
ttypes = list(pipetasks.base.default_task_chain)
if states is None:
states = task_states
else:
for s in states:
if s not in task_states:
raise RuntimeError("Task state '{}' is not valid".format(s))
allnights = io.get_nights(strip_path=True)
nights = pipeprod.select_nights(allnights, nightstr)
for tt in ttypes:
tasks[tt] = get_tasks(
db, [tt], nights, states=states, expid=expid, spec=spec
)
else:
ttypes = [pipetasks.base.task_type(task)]
tasks[ttypes[0]] = [task]
tstates = OrderedDict()
for typ, tsks in tasks.items():
tstates[typ] = pipedb.check_tasks(tsks, db=db)
if len(ttypes) == 1 and len(tasks[ttypes[0]]) == 1:
# Print status of this specific task
thistype = ttypes[0]
thistask = tasks[thistype][0]
status_task(thistask, thistype, tstates[thistype][thistask], logdir)
else:
if len(ttypes) > 1 and len(nights) > 1:
# We have multiple nights and multiple task types.
# Just print totals.
if summary:
status_summary(ttypes, nights, tasks, tstates)
else:
status_night_totals(ttypes, nights, tasks, tstates)
status_pixel_totals(ttypes, tasks, tstates)
elif len(ttypes) > 1:
# Multiple task types for one night. Print the totals for each
# task type.
thisnight = nights[0]
status_night_totals(ttypes, nights, tasks, tstates)
elif len(nights) > 1:
# We have just one task type, print the state totals for each night
# OR the full task list for redshift or spectra tasks.
thistype = ttypes[0]
print("Task type {}".format(thistype))
if thistype == "spectra" or thistype == "redshift":
status_pixel_tasks(ttypes, tasks, tstates)
else:
status_night_totals(ttypes, nights, tasks, tstates)
else:
# We have one type and one night, print the full state of every
# task.
thistype = ttypes[0]
thisnight = nights[0]
print("Task type {}".format(thistype))
status_night_tasks(ttypes, nights, tasks, tstates)
status_pixel_tasks(ttypes, tasks, tstates)
return
|
bsd-3-clause
| 5,987,236,070,991,762,000 | 33.75432 | 91 | 0.581543 | false |
rwstauner/python-photo-shrinker
|
shrinkphotos.py
|
1
|
2783
|
#!/usr/bin/env python
"shrink photos so that it doesn't take 8 days to transfer them"
# Copyright (c) 2012 Randy Stauner
# Licensed under the MIT License: http://opensource.org/licenses/MIT
from PIL import Image
import os
from os.path import join, isfile, isdir, dirname, basename
import re
import sys
# configuration
DIR_SUFFIX = '-shrunk'
FILE_SUFFIX = '-shrunk'
# PIC_RE should capture the file extension;
# Later FILE_SUFFIX will be inserted before it
PIC_RE = re.compile(r'(\.jpe?g)$', re.I)
QUALITY = 85
RESOLUTION = (1600, 1200)
def shrinkphotos(top, src, dest):
"shrink each file in top/src/* and save in top/dest/*"
os.chdir(top)
src_full, dest_full = [join(top, x) for x in [src, dest]]
print "shrinking images found in\n %s\nand saving to\n %s" % \
(src_full, dest_full)
# Just a warning; Allow the script to be re-run.
if isdir(dest_full):
print "destination %s already exists" % (dest_full)
if raw_input("\ncontinue? (y/n): ").lower() == 'y':
recurse(src_full, dest_full)
def recurse(src, dest):
"down, down, down"
# I suppose this could be allowed as long as there is a FILE_SUFFIX
if src == dest:
raise "source and destination directories should not be the same!"
# os.walk descends by itself and then it's hard to replace src with dest
# so we recurse manually
files = os.listdir(src)
files.sort()
print " - %s: %d files" % (src, len(files))
for name in files:
if isfile(join(src, name)):
# If the file name matches the re (has the right extension)
if PIC_RE.search(name):
# src/file.jpg => dest/file-shrunk.jpg
dest_file = join(dest, PIC_RE.sub(r'%s\1' % FILE_SUFFIX, name))
# skip if already exists
if not isfile(dest_file):
# Ensure destination directory exists
if not isdir(dest):
os.makedirs(dest)
thumbnail(join(src, name), dest_file)
# descend to the next directory
elif isdir(join(src, name)):
recurse(join(src, name), join(dest, name))
def thumbnail(src, dest):
"shrink src and save to dest"
img = Image.open(src)
# Ensure image is not larger than RESOLUTION.
img.thumbnail(RESOLUTION)
# Set compression level on the new image.
img.save(dest, quality=QUALITY)
def shrinkarg(arg):
"use command line arg as source dir"
if arg == '' or arg == '.':
arg = os.getcwd()
# Strip trailing slash, etc
arg = os.path.normpath(arg)
top, src = dirname(arg), basename(arg)
dest = "%s%s" % (src, DIR_SUFFIX)
shrinkphotos(top, src, dest)
if __name__ == "__main__":
# If a directory is specified on command line
if len(sys.argv) > 1:
shrinkarg(sys.argv[1])
# Else use the directory the script is in
else:
shrinkarg(dirname(sys.argv[0]))
|
mit
| -1,206,501,911,200,051,200 | 26.284314 | 74 | 0.651096 | false |
50wu/gpdb
|
contrib/unaccent/generate_unaccent_rules.py
|
7
|
12976
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This script builds unaccent.rules on standard output when given the
# contents of UnicodeData.txt [1] and Latin-ASCII.xml [2] given as
# arguments. Optionally includes ligature expansion and Unicode CLDR
# Latin-ASCII transliterator, enabled by default, this can be disabled
# with "--no-ligatures-expansion" command line option.
#
# The approach is to use the Unicode decomposition data to identify
# precomposed codepoints that are equivalent to a ligature of several
# letters, or a base letter with any number of diacritical marks.
#
# This approach handles most letters with diacritical marks and some
# ligatures. However, several characters (notably a majority of
# ligatures) don't have decomposition. To handle all these cases, one can
# use a standard Unicode transliterator available in Common Locale Data
# Repository (CLDR): Latin-ASCII. This transliterator associates Unicode
# characters to ASCII-range equivalent. Unless "--no-ligatures-expansion"
# option is enabled, the XML file of this transliterator [2] -- given as a
# command line argument -- will be parsed and used.
#
# Ideally you should use the latest release for each data set. For
# Latin-ASCII.xml, the latest data sets released can be browsed directly
# via [3]. Note that this script is compatible with at least release 29.
#
# [1] http://unicode.org/Public/8.0.0/ucd/UnicodeData.txt
# [2] http://unicode.org/cldr/trac/export/14746/tags/release-34/common/transforms/Latin-ASCII.xml
# [3] https://unicode.org/cldr/trac/browser/tags
# BEGIN: Python 2/3 compatibility - remove when Python 2 compatibility dropped
# The approach is to be Python3 compatible with Python2 "backports".
from __future__ import print_function
from __future__ import unicode_literals
import codecs
import sys
if sys.version_info[0] <= 2:
# Encode stdout as UTF-8, so we can just print to it
sys.stdout = codecs.getwriter('utf8')(sys.stdout)
# Map Python 2's chr to unichr
chr = unichr
# Python 2 and 3 compatible bytes call
def bytes(source, encoding='ascii', errors='strict'):
return source.encode(encoding=encoding, errors=errors)
# END: Python 2/3 compatibility - remove when Python 2 compatibility dropped
import re
import argparse
import sys
import xml.etree.ElementTree as ET
# The ranges of Unicode characters that we consider to be "plain letters".
# For now we are being conservative by including only Latin and Greek. This
# could be extended in future based on feedback from people with relevant
# language knowledge.
PLAIN_LETTER_RANGES = ((ord('a'), ord('z')), # Latin lower case
(ord('A'), ord('Z')), # Latin upper case
(0x03b1, 0x03c9), # GREEK SMALL LETTER ALPHA, GREEK SMALL LETTER OMEGA
(0x0391, 0x03a9)) # GREEK CAPITAL LETTER ALPHA, GREEK CAPITAL LETTER OMEGA
# Combining marks follow a "base" character, and result in a composite
# character. Example: "U&'A\0300'"produces "À".There are three types of
# combining marks: enclosing (Me), non-spacing combining (Mn), spacing
# combining (Mc). We identify the ranges of marks we feel safe removing.
# References:
# https://en.wikipedia.org/wiki/Combining_character
# https://www.unicode.org/charts/PDF/U0300.pdf
# https://www.unicode.org/charts/PDF/U20D0.pdf
COMBINING_MARK_RANGES = ((0x0300, 0x0362), # Mn: Accents, IPA
(0x20dd, 0x20E0), # Me: Symbols
(0x20e2, 0x20e4),) # Me: Screen, keycap, triangle
def print_record(codepoint, letter):
if letter:
output = chr(codepoint) + "\t" + letter
else:
output = chr(codepoint)
print(output)
class Codepoint:
def __init__(self, id, general_category, combining_ids):
self.id = id
self.general_category = general_category
self.combining_ids = combining_ids
def is_mark_to_remove(codepoint):
"""Return true if this is a combining mark to remove."""
if not is_mark(codepoint):
return False
for begin, end in COMBINING_MARK_RANGES:
if codepoint.id >= begin and codepoint.id <= end:
return True
return False
def is_plain_letter(codepoint):
"""Return true if codepoint represents a "plain letter"."""
for begin, end in PLAIN_LETTER_RANGES:
if codepoint.id >= begin and codepoint.id <= end:
return True
return False
def is_mark(codepoint):
"""Returns true for diacritical marks (combining codepoints)."""
return codepoint.general_category in ("Mn", "Me", "Mc")
def is_letter_with_marks(codepoint, table):
"""Returns true for letters combined with one or more marks."""
# See http://www.unicode.org/reports/tr44/tr44-14.html#General_Category_Values
# Letter may have no combining characters, in which case it has
# no marks.
if len(codepoint.combining_ids) == 1:
return False
# A letter without diacritical marks has none of them.
if any(is_mark(table[i]) for i in codepoint.combining_ids[1:]) is False:
return False
# Check if the base letter of this letter has marks.
codepoint_base = codepoint.combining_ids[0]
if (is_plain_letter(table[codepoint_base]) is False and \
is_letter_with_marks(table[codepoint_base], table) is False):
return False
return True
def is_letter(codepoint, table):
"""Return true for letter with or without diacritical marks."""
return is_plain_letter(codepoint) or is_letter_with_marks(codepoint, table)
def get_plain_letter(codepoint, table):
"""Return the base codepoint without marks. If this codepoint has more
than one combining character, do a recursive lookup on the table to
find out its plain base letter."""
if is_letter_with_marks(codepoint, table):
if len(table[codepoint.combining_ids[0]].combining_ids) > 1:
return get_plain_letter(table[codepoint.combining_ids[0]], table)
elif is_plain_letter(table[codepoint.combining_ids[0]]):
return table[codepoint.combining_ids[0]]
# Should not come here
assert(False)
elif is_plain_letter(codepoint):
return codepoint
# Should not come here
assert(False)
def is_ligature(codepoint, table):
"""Return true for letters combined with letters."""
return all(is_letter(table[i], table) for i in codepoint.combining_ids)
def get_plain_letters(codepoint, table):
"""Return a list of plain letters from a ligature."""
assert(is_ligature(codepoint, table))
return [get_plain_letter(table[id], table) for id in codepoint.combining_ids]
def parse_cldr_latin_ascii_transliterator(latinAsciiFilePath):
"""Parse the XML file and return a set of tuples (src, trg), where "src"
is the original character and "trg" the substitute."""
charactersSet = set()
# RegEx to parse rules
rulePattern = re.compile(r'^(?:(.)|(\\u[0-9a-fA-F]{4})) \u2192 (?:\'(.+)\'|(.+)) ;')
# construct tree from XML
transliterationTree = ET.parse(latinAsciiFilePath)
transliterationTreeRoot = transliterationTree.getroot()
# Fetch all the transliteration rules. Since release 29 of Latin-ASCII.xml
# all the transliteration rules are located in a single tRule block with
# all rules separated into separate lines.
blockRules = transliterationTreeRoot.findall("./transforms/transform/tRule")
assert(len(blockRules) == 1)
# Split the block of rules into one element per line.
rules = blockRules[0].text.splitlines()
# And finish the processing of each individual rule.
for rule in rules:
matches = rulePattern.search(rule)
# The regular expression capture four groups corresponding
# to the characters.
#
# Group 1: plain "src" char. Empty if group 2 is not.
# Group 2: unicode-escaped "src" char (e.g. "\u0110"). Empty if group 1 is not.
#
# Group 3: plain "trg" char. Empty if group 4 is not.
# Group 4: plain "trg" char between quotes. Empty if group 3 is not.
if matches is not None:
src = matches.group(1) if matches.group(1) is not None else bytes(matches.group(2), 'UTF-8').decode('unicode-escape')
trg = matches.group(3) if matches.group(3) is not None else matches.group(4)
# "'" and """ are escaped
trg = trg.replace("\\'", "'").replace('\\"', '"')
# the parser of unaccent only accepts non-whitespace characters
# for "src" and "trg" (see unaccent.c)
if not src.isspace() and not trg.isspace():
charactersSet.add((ord(src), trg))
return charactersSet
def special_cases():
"""Returns the special cases which are not handled by other methods"""
charactersSet = set()
# Cyrillic
charactersSet.add((0x0401, u"\u0415")) # CYRILLIC CAPITAL LETTER IO
charactersSet.add((0x0451, u"\u0435")) # CYRILLIC SMALL LETTER IO
# Symbols of "Letterlike Symbols" Unicode Block (U+2100 to U+214F)
charactersSet.add((0x2103, u"\xb0C")) # DEGREE CELSIUS
charactersSet.add((0x2109, u"\xb0F")) # DEGREE FAHRENHEIT
charactersSet.add((0x2117, "(P)")) # SOUND RECORDING COPYRIGHT
return charactersSet
def main(args):
# http://www.unicode.org/reports/tr44/tr44-14.html#Character_Decomposition_Mappings
decomposition_type_pattern = re.compile(" *<[^>]*> *")
table = {}
all = []
# unordered set for ensure uniqueness
charactersSet = set()
# read file UnicodeData.txt
unicodeDataFile = open(args.unicodeDataFilePath, 'r')
# read everything we need into memory
for line in unicodeDataFile:
fields = line.split(";")
if len(fields) > 5:
# http://www.unicode.org/reports/tr44/tr44-14.html#UnicodeData.txt
general_category = fields[2]
decomposition = fields[5]
decomposition = re.sub(decomposition_type_pattern, ' ', decomposition)
id = int(fields[0], 16)
combining_ids = [int(s, 16) for s in decomposition.split(" ") if s != ""]
codepoint = Codepoint(id, general_category, combining_ids)
table[id] = codepoint
all.append(codepoint)
# walk through all the codepoints looking for interesting mappings
for codepoint in all:
if codepoint.general_category.startswith('L') and \
len(codepoint.combining_ids) > 1:
if is_letter_with_marks(codepoint, table):
charactersSet.add((codepoint.id,
chr(get_plain_letter(codepoint, table).id)))
elif args.noLigaturesExpansion is False and is_ligature(codepoint, table):
charactersSet.add((codepoint.id,
"".join(chr(combining_codepoint.id)
for combining_codepoint \
in get_plain_letters(codepoint, table))))
elif is_mark_to_remove(codepoint):
charactersSet.add((codepoint.id, None))
# add CLDR Latin-ASCII characters
if not args.noLigaturesExpansion:
charactersSet |= parse_cldr_latin_ascii_transliterator(args.latinAsciiFilePath)
charactersSet |= special_cases()
# sort for more convenient display
charactersList = sorted(charactersSet, key=lambda characterPair: characterPair[0])
for characterPair in charactersList:
print_record(characterPair[0], characterPair[1])
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='This script builds unaccent.rules on standard output when given the contents of UnicodeData.txt and Latin-ASCII.xml given as arguments.')
parser.add_argument("--unicode-data-file", help="Path to formatted text file corresponding to UnicodeData.txt. See <http://unicode.org/Public/8.0.0/ucd/UnicodeData.txt>.", type=str, required=True, dest='unicodeDataFilePath')
parser.add_argument("--latin-ascii-file", help="Path to XML file from Unicode Common Locale Data Repository (CLDR) corresponding to Latin-ASCII transliterator (Latin-ASCII.xml). See <http://unicode.org/cldr/trac/export/12304/tags/release-28/common/transforms/Latin-ASCII.xml>.", type=str, dest='latinAsciiFilePath')
parser.add_argument("--no-ligatures-expansion", help="Do not expand ligatures and do not use Unicode CLDR Latin-ASCII transliterator. By default, this option is not enabled and \"--latin-ascii-file\" argument is required. If this option is enabled, \"--latin-ascii-file\" argument is optional and ignored.", action="store_true", dest='noLigaturesExpansion')
args = parser.parse_args()
if args.noLigaturesExpansion is False and args.latinAsciiFilePath is None:
sys.stderr.write('You must specify the path to Latin-ASCII transliterator file with \"--latin-ascii-file\" option or use \"--no-ligatures-expansion\" option. Use \"-h\" option for help.')
sys.exit(1)
main(args)
|
apache-2.0
| 150,941,420,492,318,900 | 43.896194 | 361 | 0.677996 | false |
pkdevbox/trac
|
trac/ticket/tests/admin.py
|
1
|
13172
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
import unittest
from trac.resource import ResourceNotFound
from trac.test import EnvironmentStub, Mock, MockPerm, locale_en
from trac.ticket.admin import ComponentAdminPanel, MilestoneAdminPanel, \
PriorityAdminPanel, ResolutionAdminPanel, \
SeverityAdminPanel, TicketTypeAdminPanel, \
VersionAdminPanel
from trac.ticket.model import Component, Milestone, Priority, Resolution,\
Severity, Type, Version
from trac.util.datefmt import utc
from trac.web.api import RequestDone, _RequestArgs
class BaseTestCase(unittest.TestCase):
def setUp(self):
self.env = EnvironmentStub(default_data=True)
def tearDown(self):
self.env.reset_db()
def _create_request(self, authname='anonymous', **kwargs):
kw = {'path_info': '/', 'perm': MockPerm(), 'args': _RequestArgs(),
'href': self.env.href, 'abs_href': self.env.abs_href,
'tz': utc, 'locale': None, 'lc_time': locale_en,
'session': {}, 'authname': authname,
'chrome': {'notices': [], 'warnings': []},
'method': None, 'get_header': lambda v: None, 'is_xhr': False,
'form_token': None}
if 'args' in kwargs:
kw['args'].update(kwargs.pop('args'))
kw.update(kwargs)
def redirect(url, permanent=False):
raise RequestDone
return Mock(add_redirect_listener=lambda x: [].append(x),
redirect=redirect, **kw)
class ComponentAdminPanelTestCase(BaseTestCase):
def test_add_component(self):
cap = ComponentAdminPanel(self.env)
name, owner = 'component3', 'user3'
req = self._create_request(method='POST',
args={'name': name, 'owner': owner,
'add': True})
self.assertRaises(ResourceNotFound, Component, self.env, name)
self.assertRaises(RequestDone, cap.render_admin_panel, req,
'ticket', 'component', None)
component = Component(self.env, name)
self.assertEqual(name, component.name)
self.assertEqual(owner, component.owner)
def test_remove_component(self):
cap = ComponentAdminPanel(self.env)
name = 'component2'
req = self._create_request(method='POST',
args={'sel': name, 'remove': True})
component = Component(self.env, name)
self.assertEqual(name, component.name)
self.assertEqual('somebody', component.owner)
self.assertRaises(RequestDone, cap.render_admin_panel, req,
'ticket', 'component', None)
self.assertRaises(ResourceNotFound, Component, self.env, name)
def test_remove_multiple_components(self):
cap = ComponentAdminPanel(self.env)
names = ['component1', 'component2']
req = self._create_request(method='POST',
args={'sel': names, 'remove': True})
for name in names:
component = Component(self.env, name)
self.assertEqual(name, component.name)
self.assertEqual('somebody', component.owner)
self.assertRaises(RequestDone, cap.render_admin_panel, req,
'ticket', 'component', None)
for name in names:
self.assertRaises(ResourceNotFound, Component, self.env, name)
def test_set_default_component(self):
name = 'component2'
config_key = 'default_component'
cap = ComponentAdminPanel(self.env)
req = self._create_request(method='POST',
args={'default': name, 'apply': True})
self.assertRaises(RequestDone, cap.render_admin_panel, req,
'ticket', 'component', None)
self.assertEqual(name, self.env.config.get('ticket', config_key))
def test_remove_default_component(self):
name = 'component2'
cap = ComponentAdminPanel(self.env)
config_key = 'default_component'
self.env.config.set('ticket', config_key, name)
req = self._create_request(method='POST',
args={'sel': name, 'remove': True})
self.assertRaises(RequestDone, cap.render_admin_panel, req,
'ticket', 'component', None)
self.assertEqual('', self.env.config.get('ticket', config_key))
class MilestoneAdminPanelTestCase(BaseTestCase):
def test_add_milestone(self):
name = 'milestone5'
map = MilestoneAdminPanel(self.env)
req = self._create_request(method='POST',
args={'name': name, 'add': True})
self.assertRaises(ResourceNotFound, Milestone, self.env, name)
self.assertRaises(RequestDone, map.render_admin_panel, req,
'ticket', 'milestone', None)
milestone = Milestone(self.env, name)
self.assertEqual(name, milestone.name)
def test_set_default_milestone(self):
name = 'milestone2'
config_key = 'default_milestone'
map = MilestoneAdminPanel(self.env)
req = self._create_request(method='POST',
args={'ticket_default': name,
'apply': True})
self.assertRaises(RequestDone, map.render_admin_panel, req,
'ticket', 'milestone', None)
self.assertEqual(name, self.env.config.get('ticket', config_key))
def test_set_default_retarget_to(self):
name = 'milestone2'
config_key = 'default_retarget_to'
map = MilestoneAdminPanel(self.env)
req = self._create_request(method='POST',
args={'retarget_default': name,
'apply': True})
self.assertRaises(RequestDone, map.render_admin_panel, req,
'ticket', 'milestone', None)
self.assertEqual(name, self.env.config.get('milestone', config_key))
def test_remove_default_milestone(self):
name = 'milestone2'
map = MilestoneAdminPanel(self.env)
self.env.config.set('ticket', 'default_milestone', 'milestone2')
self.env.config.set('milestone', 'default_retarget_to', 'milestone2')
req = self._create_request(method='POST',
args={'sel': name,
'remove': True})
self.assertRaises(RequestDone, map.render_admin_panel, req,
'ticket', 'milestone', None)
self.assertEqual('', self.env.config.get('ticket',
'default_milestone'))
self.assertEqual('', self.env.config.get('milestone',
'default_retarget_to'))
class AbstractEnumTestCase(BaseTestCase):
type = None
cls = None
def _test_add(self, panel, name):
req = self._create_request(method='POST',
args={'name': name, 'add': True})
self.assertRaises(ResourceNotFound, self.cls, self.env, name)
self.assertRaises(RequestDone, panel.render_admin_panel, req,
'ticket', self.type, None)
item = self.cls(self.env, name)
self.assertEqual(name, item.name)
def _test_set_default(self, panel, name):
config_key = 'default_' + self.type
req = self._create_request(method='POST',
args={'default': name, 'apply': True})
for item in self.cls.select(self.env):
req.args.update({'value_' + str(item.value): str(item.value)})
self.assertRaises(RequestDone, panel.render_admin_panel, req,
'ticket', self.type, None)
self.assertEqual(name, self.env.config.get('ticket', config_key))
def _test_remove_default(self, panel, name):
config_key = 'default_' + self.type
self.env.config.set('ticket', config_key, name)
req = self._create_request(method='POST',
args={'sel': name, 'remove': True})
self.assertRaises(RequestDone, panel.render_admin_panel, req,
'ticket', self.type, None)
self.assertEqual('', self.env.config.get('ticket', config_key))
class PriorityAdminPanelTestCase(AbstractEnumTestCase):
type = 'priority'
cls = Priority
def test_add_priority(self):
ap = PriorityAdminPanel(self.env)
self._test_add(ap, 'priority 1')
def test_set_default_priority(self):
ap = PriorityAdminPanel(self.env)
self._test_set_default(ap, 'critical')
def test_remove_default_priority(self):
ap = PriorityAdminPanel(self.env)
self._test_remove_default(ap, 'critical')
class ResolutionAdminPanelTestCase(AbstractEnumTestCase):
type = 'resolution'
cls = Resolution
def test_add_resolution(self):
ap = ResolutionAdminPanel(self.env)
self._test_add(ap, 'resolution 1')
def test_set_default_resolution(self):
ap = ResolutionAdminPanel(self.env)
self._test_set_default(ap, 'invalid')
def test_remove_default_resolution(self):
ap = ResolutionAdminPanel(self.env)
self._test_remove_default(ap, 'invalid')
class SeverityAdminPanelTestCase(AbstractEnumTestCase):
type = 'severity'
cls = Severity
def test_add_severity(self):
ap = SeverityAdminPanel(self.env)
self._test_add(ap, 'severity 1')
def test_set_default_severity(self):
s = Severity(self.env)
s.name = 'severity 1'
s.insert()
ap = SeverityAdminPanel(self.env)
self._test_set_default(ap, 'severity 1')
def test_remove_default_severity(self):
s = Severity(self.env)
s.name = 'severity 1'
s.insert()
ap = SeverityAdminPanel(self.env)
self._test_remove_default(ap, 'severity 1')
class TicketTypeAdminPanelTestCase(AbstractEnumTestCase):
type = 'type'
cls = Type
def test_add_type(self):
ap = TicketTypeAdminPanel(self.env)
self._test_add(ap, 'improvement')
def test_set_default_type(self):
ap = TicketTypeAdminPanel(self.env)
self._test_set_default(ap, 'task')
def test_remove_default_type(self):
ap = TicketTypeAdminPanel(self.env)
self._test_remove_default(ap, 'task')
class VersionAdminPanelTestCase(BaseTestCase):
def test_add_version(self):
name = '3.0'
ap = VersionAdminPanel(self.env)
req = self._create_request(method='POST',
args={'name': name, 'add': True})
self.assertRaises(ResourceNotFound, Version, self.env, name)
self.assertRaises(RequestDone, ap.render_admin_panel, req,
'ticket', 'version', None)
version = Version(self.env, name)
self.assertEqual(name, version.name)
def test_set_default_version(self):
name = '1.0'
ap = VersionAdminPanel(self.env)
config_key = 'default_version'
req = self._create_request(method='POST',
args={'default': name, 'apply': True})
self.assertRaises(RequestDone, ap.render_admin_panel, req,
'ticket', 'version', None)
self.assertEqual(name, self.env.config.get('ticket', config_key))
def test_remove_default_version(self):
name = '1.0'
ap = VersionAdminPanel(self.env)
config_key = 'default_version'
self.env.config.set('ticket', config_key, name)
req = self._create_request(method='POST',
args={'sel': name, 'remove': True})
self.assertRaises(RequestDone, ap.render_admin_panel, req,
'ticket', 'version', None)
self.assertEqual(self.env.config.get('ticket', config_key), '')
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(ComponentAdminPanelTestCase))
suite.addTest(unittest.makeSuite(MilestoneAdminPanelTestCase))
suite.addTest(unittest.makeSuite(PriorityAdminPanelTestCase))
suite.addTest(unittest.makeSuite(ResolutionAdminPanelTestCase))
suite.addTest(unittest.makeSuite(SeverityAdminPanelTestCase))
suite.addTest(unittest.makeSuite(TicketTypeAdminPanelTestCase))
suite.addTest(unittest.makeSuite(VersionAdminPanelTestCase))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
bsd-3-clause
| -7,359,954,567,975,773,000 | 37.17971 | 77 | 0.592621 | false |
clarkerubber/irwin
|
modules/irwin/training/Evaluation.py
|
1
|
3292
|
from default_imports import *
from conf.ConfigWrapper import ConfigWrapper
from modules.game.Player import Player
from modules.game.GameStore import GameStore
from modules.game.AnalysedGame import GameAnalysedGame
from modules.irwin.PlayerReport import PlayerReport
class Evaluation(NamedTuple('Evaluation', [
('irwin', 'Irwin'),
('config', ConfigWrapper)
])):
def getPlayerOutcomes(self, engine: bool, batchSize: int) -> Opt[int]: # returns a generator for activations, player by player.
for player in self.irwin.env.playerDB.engineSample(engine, batchSize):
analysedGames = self.irwin.env.analysedGameDB.byPlayerId(player.id)
games = self.irwin.env.gameDB.byIds([ag.gameId for ag in analysedGames])
predictions = self.irwin.analysedGameModel.predict([GameAnalysedGame(ag, g) for ag, g in zip(analysedGames, games) if ag.gameLength() <= 60])
playerReport = PlayerReport.new(player, zip(analysedGames, predictions))
if len(playerReport.gameReports) > 0:
yield Evaluation.outcome(
playerReport.activation,
92, 64, engine)
else:
yield None
def evaluate(self):
outcomes = []
[[((outcomes.append(o) if o is not None else ...), Evaluation.performance(outcomes)) for o in self.getPlayerOutcomes(engine, self.config['irwin testing eval_size'])] for engine in (True, False)]
@staticmethod
def performance(outcomes):
tp = len([a for a in outcomes if a == 1])
fn = len([a for a in outcomes if a == 2])
tn = len([a for a in outcomes if a == 3])
fp = len([a for a in outcomes if a == 4])
tr = len([a for a in outcomes if a == 5])
fr = len([a for a in outcomes if a == 6])
cheatsLen = max(1, tp + fn + tr)
legitsLen = max(1, fp + tn + fr)
logging.warning("True positive: " + str(tp) + " (" + str(int(100*tp/cheatsLen)) + "%)")
logging.warning("False negative: " + str(fn) + " (" + str(int(100*fn/cheatsLen)) + "%)")
logging.warning("True negative: " + str(tn) + " (" + str(int(100*tn/legitsLen)) + "%)")
logging.warning("False positive: " + str(fp) + " (" + str(int(100*fp/legitsLen)) + "%)")
logging.warning("True Report: " + str(tr) + " (" + str(int(100*tr/cheatsLen)) + "%)")
logging.warning("False Report: " + str(fr) + " (" + str(int(100*fr/legitsLen)) + "%)")
logging.warning("Cheats coverage: " + str(int(100*(tp+tr)/cheatsLen)) + "%")
logging.warning("Legits coverage: " + str(int(100*(tn)/legitsLen)) + "%")
@staticmethod
def outcome(a: int, tm: int, tr: int, e: bool) -> int: # activation, threshold mark, threshold report, expected value
logging.debug(a)
true_positive = 1
false_negative = 2
true_negative = 3
false_positive = 4
true_report = 5
false_report = 6
if a > tm and e:
return true_positive
if a > tm and not e:
return false_positive
if a > tr and e:
return true_report
if a > tr and not e:
return false_report
if a <= tr and e:
return false_negative
return true_negative
|
agpl-3.0
| 6,676,919,598,307,802,000 | 44.109589 | 202 | 0.593864 | false |
HybridF5/jacket
|
jacket/api/compute/openstack/compute/legacy_v2/contrib/server_diagnostics.py
|
1
|
2408
|
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob.exc
from jacket.api.compute.openstack import common
from jacket.api.compute.openstack import extensions
from jacket.compute import cloud
from jacket.compute import exception
from jacket.i18n import _
authorize = extensions.extension_authorizer('cloud', 'server_diagnostics')
class ServerDiagnosticsController(object):
def __init__(self):
self.compute_api = cloud.API()
def index(self, req, server_id):
context = req.environ["compute.context"]
authorize(context)
instance = common.get_instance(self.compute_api, context, server_id)
try:
return self.compute_api.get_diagnostics(context, instance)
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'get_diagnostics', server_id)
except NotImplementedError:
msg = _("Unable to get diagnostics, functionality not implemented")
raise webob.exc.HTTPNotImplemented(explanation=msg)
class Server_diagnostics(extensions.ExtensionDescriptor):
"""Allow Admins to view server diagnostics through server action."""
name = "ServerDiagnostics"
alias = "os-server-diagnostics"
namespace = ("http://docs.openstack.org/cloud/ext/"
"server-diagnostics/api/v1.1")
updated = "2011-12-21T00:00:00Z"
def get_resources(self):
parent_def = {'member_name': 'server', 'collection_name': 'servers'}
# NOTE(bcwaldon): This should be prefixed with 'os-'
ext = extensions.ResourceExtension('diagnostics',
ServerDiagnosticsController(),
parent=parent_def)
return [ext]
|
apache-2.0
| 4,184,177,910,797,046,000 | 37.222222 | 79 | 0.675249 | false |
uclouvain/osis
|
program_management/forms/prerequisite.py
|
1
|
3264
|
##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2019 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from django import forms
from django.utils.translation import gettext_lazy as _
from education_group.forms.fields import UpperCaseCharField
from program_management.ddd.domain.node import NodeLearningUnitYear
from program_management.ddd.domain.program_tree import ProgramTree
from program_management.ddd.validators.validators_by_business_action import UpdatePrerequisiteValidatorList
class PrerequisiteForm(forms.Form):
prerequisite_string = UpperCaseCharField(
label=_("Prerequisite"),
required=False,
help_text=_(
"<b>Syntax rules</b>:<ul><li>No double parentheses.</li><li>Valid operators are OU or ET.</li><li>The "
"operator must be the same inside all parentheses (groups).</li><li>The operator that linked groups must "
"be different than the one that linked LU inside groups (parentheses).</li><li>The LU code cannot include "
"spaces (ex: LDROI1001 and not LDROI 1001).</li></ul></p><p><b>Examples</b>:<ul><li>A OU B OU C: "
"valid</li><li>A ET B ET C : valid</li><li>A ET (B OU C) ET (D OU E): valid</li><li>A ET (B OU C) OU (D OU "
"E): not valid</li><li>A ET (B ET C) ET (D ET E): not valid</li><li>A ET (B OU C) ET (D ET E): not valid"
"</li></ul>"
),
)
def __init__(self, program_tree: ProgramTree, node: NodeLearningUnitYear, *args, **kwargs):
super().__init__(*args, **kwargs)
self.program_tree = program_tree
self.node = node
def clean_prerequisite_string(self):
prerequisite_string = self.cleaned_data["prerequisite_string"].upper()
validator = UpdatePrerequisiteValidatorList(prerequisite_string, self.node, self.program_tree)
if not validator.is_valid():
for error_message in validator.error_messages:
self.add_error("prerequisite_string", error_message.message)
return prerequisite_string
def save(self, commit=False):
pass
|
agpl-3.0
| 6,778,399,643,430,902,000 | 48.439394 | 120 | 0.652467 | false |
fedspendingtransparency/data-act-core
|
dataactcore/scripts/databaseSetup.py
|
1
|
1596
|
import sqlalchemy_utils
from dataactcore.config import CONFIG_DB, ALEMBIC_PATH, MIGRATION_PATH
from alembic.config import Config
from alembic import command
from sqlalchemy.exc import ProgrammingError
def createDatabase(dbName):
"""Create specified database if it doesn't exist."""
config = CONFIG_DB
connectString = "postgresql://{}:{}@{}:{}/{}".format(config["username"],
config["password"], config["host"], config["port"],
dbName)
if not sqlalchemy_utils.database_exists(connectString):
sqlalchemy_utils.create_database(connectString)
def dropDatabase(dbName):
"""Drop specified database."""
config = CONFIG_DB
connectString = "postgresql://{}:{}@{}:{}/{}".format(config["username"],
config["password"], config["host"], config["port"], dbName)
if sqlalchemy_utils.database_exists(connectString):
sqlalchemy_utils.drop_database(connectString)
def runMigrations(alembicDbName):
"""Run Alembic migrations for a specific database/model set.
Args:
alembicDbName: the database to target (must match one of the
default databases in alembic.ini.
"""
alembic_cfg = Config(ALEMBIC_PATH)
alembic_cfg.set_main_option("script_location", MIGRATION_PATH)
alembic_cfg.set_main_option("databases", alembicDbName)
try:
command.upgrade(alembic_cfg, "head")
except ProgrammingError as e:
if "relation" and "already exists" in e.message:
raise Exception("Cannot run initial db migration if tables "
"already exist. " + e.message)
|
cc0-1.0
| 4,368,196,399,254,522,400 | 36.116279 | 76 | 0.678571 | false |
alexissmirnov/donomo
|
donomo_archive/deps/paypal.jonboxall/standard/pdt/tests/pdt.py
|
1
|
5162
|
"""
run this with ./manage.py test website
see http://www.djangoproject.com/documentation/testing/ for details
"""
from django.conf import settings
from django.core.urlresolvers import reverse
from django.shortcuts import render_to_response
from django.template import Context
from django.template.loader import get_template
from django.test import TestCase
from django.test.client import Client
from paypal.standard.pdt.forms import PayPalPDTForm
from paypal.standard.pdt.models import PayPalPDT
from paypal.standard.pdt.signals import pdt_successful, pdt_failed
class DummyPayPalPDT():
def __init__(self, update_context_dict={}):
self.context_dict = {'st': 'SUCCESS', 'custom':'cb736658-3aad-4694-956f-d0aeade80194',
'txn_id':'1ED550410S3402306', 'mc_gross': '225.00',
'business': settings.PAYPAL_RECEIVER_EMAIL, 'error': 'Error code: 1234'}
self.context_dict.update(update_context_dict)
def update_with_get_params(self, get_params):
if get_params.has_key('tx'):
self.context_dict['txn_id'] = get_params.get('tx')
if get_params.has_key('amt'):
self.context_dict['mc_gross'] = get_params.get('amt')
if get_params.has_key('cm'):
self.context_dict['custom'] = get_params.get('cm')
def _postback(self, test=True):
"""
Perform a Fake PayPal PDT Postback request.
"""
t = get_template('pdt/fake_pdt_response.html')
c = Context(self.context_dict)
html = t.render(c)
return html
class PDTTest(TestCase):
def setUp(self):
# set up some dummy PDT get parameters
self.get_params = {"tx":"4WJ86550014687441", "st":"Completed", "amt":"225.00", "cc":"EUR",
"cm":"a3e192b8%2d8fea%2d4a86%2db2e8%2dd5bf502e36be", "item_number":"",
"sig":"blahblahblah"}
# monkey patch the PayPalPDT._postback function
self.dpppdt = DummyPayPalPDT()
self.dpppdt.update_with_get_params(self.get_params)
PayPalPDT._postback = self.dpppdt._postback
# Every test needs a client.
self.client = Client()
def test_parse_paypal_response(self):
dpppdt = DummyPayPalPDT()
paypal_response = dpppdt._postback()
assert('SUCCESS' in paypal_response)
self.assertEqual(len(PayPalPDT.objects.all()), 0)
pdt_obj = PayPalPDT()
pdt_obj.ipaddress = '127.0.0.1'
pdt_obj._parse_paypal_response(paypal_response)
self.assertEqual(len(PayPalPDT.objects.all()), 0)
self.assertEqual(pdt_obj.txn_id, '1ED550410S3402306')
def test_pdt(self):
self.assertEqual(len(PayPalPDT.objects.all()), 0)
self.dpppdt.update_with_get_params(self.get_params)
paypal_response = self.client.get(reverse('paypal-pdt'), self.get_params)
self.assertContains(paypal_response, 'Transaction complete', status_code=200)
self.assertEqual(len(PayPalPDT.objects.all()), 1)
def test_pdt_signals(self):
self.successful_pdt_fired = False
self.failed_pdt_fired = False
def successful_pdt(sender, **kwargs):
self.successful_pdt_fired = True
pdt_successful.connect(successful_pdt)
def failed_pdt(sender, **kwargs):
self.failed_pdt_fired = True
pdt_failed.connect(failed_pdt)
self.assertEqual(len(PayPalPDT.objects.all()), 0)
paypal_response = self.client.get(reverse('paypal-pdt'), self.get_params)
self.assertContains(paypal_response, 'Transaction complete', status_code=200)
self.assertEqual(len(PayPalPDT.objects.all()), 1)
self.assertTrue(self.successful_pdt_fired)
self.assertFalse(self.failed_pdt_fired)
pdt_obj = PayPalPDT.objects.all()[0]
self.assertEqual(pdt_obj.flag, False)
def test_double_pdt_get(self):
self.assertEqual(len(PayPalPDT.objects.all()), 0)
paypal_response = self.client.get(reverse('paypal-pdt'), self.get_params)
self.assertContains(paypal_response, 'Transaction complete', status_code=200)
self.assertEqual(len(PayPalPDT.objects.all()), 1)
pdt_obj = PayPalPDT.objects.all()[0]
self.assertEqual(pdt_obj.flag, False)
paypal_response = self.client.get(reverse('paypal-pdt'), self.get_params)
self.assertContains(paypal_response, 'Transaction complete', status_code=200)
self.assertEqual(len(PayPalPDT.objects.all()), 1) # we don't create a new pdt
pdt_obj = PayPalPDT.objects.all()[0]
self.assertEqual(pdt_obj.flag, False)
def test_no_txn_id_in_pdt(self):
self.dpppdt.context_dict.pop('txn_id')
self.get_params={}
paypal_response = self.client.get(reverse('paypal-pdt'), self.get_params)
self.assertContains(paypal_response, 'Transaction Failed', status_code=200)
self.assertEqual(len(PayPalPDT.objects.all()), 0)
|
bsd-3-clause
| -4,190,962,985,214,679,000 | 42.754237 | 101 | 0.626501 | false |
Dinnerbone/mcstatus
|
mcstatus/tests/protocol/test_connection.py
|
1
|
9117
|
import pytest
from mock import Mock, patch
from mcstatus.protocol.connection import (
Connection,
TCPSocketConnection,
UDPSocketConnection,
)
class TestConnection:
connection: Connection
def setup_method(self):
self.connection = Connection()
def test_flush(self):
self.connection.sent = bytearray.fromhex("7FAABB")
assert self.connection.flush() == bytearray.fromhex("7FAABB")
assert self.connection.sent == bytearray()
def test_receive(self):
self.connection.receive(bytearray.fromhex("7F"))
self.connection.receive(bytearray.fromhex("AABB"))
assert self.connection.received == bytearray.fromhex("7FAABB")
def test_remaining(self):
self.connection.receive(bytearray.fromhex("7F"))
self.connection.receive(bytearray.fromhex("AABB"))
assert self.connection.remaining() == 3
def test_send(self):
self.connection.write(bytearray.fromhex("7F"))
self.connection.write(bytearray.fromhex("AABB"))
assert self.connection.flush() == bytearray.fromhex("7FAABB")
def test_read(self):
self.connection.receive(bytearray.fromhex("7FAABB"))
assert self.connection.read(2) == bytearray.fromhex("7FAA")
assert self.connection.read(1) == bytearray.fromhex("BB")
def _assert_varint_read_write(self, hexstr, value):
self.connection.receive(bytearray.fromhex(hexstr))
assert self.connection.read_varint() == value
self.connection.write_varint(value)
assert self.connection.flush() == bytearray.fromhex(hexstr)
def test_varint_cases(self):
self._assert_varint_read_write("00", 0)
self._assert_varint_read_write("01", 1)
self._assert_varint_read_write("0F", 15)
self._assert_varint_read_write("FFFFFFFF07", 2147483647)
self._assert_varint_read_write("FFFFFFFF0F", -1)
self._assert_varint_read_write("8080808008", -2147483648)
def test_readInvalidVarInt(self):
self.connection.receive(bytearray.fromhex("FFFFFFFF80"))
with pytest.raises(IOError):
self.connection.read_varint()
def test_writeInvalidVarInt(self):
with pytest.raises(ValueError):
self.connection.write_varint(2147483648)
with pytest.raises(ValueError):
self.connection.write_varint(-2147483649)
def test_readUtf(self):
self.connection.receive(bytearray.fromhex("0D48656C6C6F2C20776F726C6421"))
assert self.connection.read_utf() == "Hello, world!"
def test_writeUtf(self):
self.connection.write_utf("Hello, world!")
assert self.connection.flush() == bytearray.fromhex("0D48656C6C6F2C20776F726C6421")
def test_readEmptyUtf(self):
self.connection.write_utf("")
assert self.connection.flush() == bytearray.fromhex("00")
def test_readAscii(self):
self.connection.receive(bytearray.fromhex("48656C6C6F2C20776F726C642100"))
assert self.connection.read_ascii() == "Hello, world!"
def test_writeAscii(self):
self.connection.write_ascii("Hello, world!")
assert self.connection.flush() == bytearray.fromhex("48656C6C6F2C20776F726C642100")
def test_readEmptyAscii(self):
self.connection.write_ascii("")
assert self.connection.flush() == bytearray.fromhex("00")
def test_readShortNegative(self):
self.connection.receive(bytearray.fromhex("8000"))
assert self.connection.read_short() == -32768
def test_writeShortNegative(self):
self.connection.write_short(-32768)
assert self.connection.flush() == bytearray.fromhex("8000")
def test_readShortPositive(self):
self.connection.receive(bytearray.fromhex("7FFF"))
assert self.connection.read_short() == 32767
def test_writeShortPositive(self):
self.connection.write_short(32767)
assert self.connection.flush() == bytearray.fromhex("7FFF")
def test_readUShortPositive(self):
self.connection.receive(bytearray.fromhex("8000"))
assert self.connection.read_ushort() == 32768
def test_writeUShortPositive(self):
self.connection.write_ushort(32768)
assert self.connection.flush() == bytearray.fromhex("8000")
def test_readIntNegative(self):
self.connection.receive(bytearray.fromhex("80000000"))
assert self.connection.read_int() == -2147483648
def test_writeIntNegative(self):
self.connection.write_int(-2147483648)
assert self.connection.flush() == bytearray.fromhex("80000000")
def test_readIntPositive(self):
self.connection.receive(bytearray.fromhex("7FFFFFFF"))
assert self.connection.read_int() == 2147483647
def test_writeIntPositive(self):
self.connection.write_int(2147483647)
assert self.connection.flush() == bytearray.fromhex("7FFFFFFF")
def test_readUIntPositive(self):
self.connection.receive(bytearray.fromhex("80000000"))
assert self.connection.read_uint() == 2147483648
def test_writeUIntPositive(self):
self.connection.write_uint(2147483648)
assert self.connection.flush() == bytearray.fromhex("80000000")
def test_readLongNegative(self):
self.connection.receive(bytearray.fromhex("8000000000000000"))
assert self.connection.read_long() == -9223372036854775808
def test_writeLongNegative(self):
self.connection.write_long(-9223372036854775808)
assert self.connection.flush() == bytearray.fromhex("8000000000000000")
def test_readLongPositive(self):
self.connection.receive(bytearray.fromhex("7FFFFFFFFFFFFFFF"))
assert self.connection.read_long() == 9223372036854775807
def test_writeLongPositive(self):
self.connection.write_long(9223372036854775807)
assert self.connection.flush() == bytearray.fromhex("7FFFFFFFFFFFFFFF")
def test_readULongPositive(self):
self.connection.receive(bytearray.fromhex("8000000000000000"))
assert self.connection.read_ulong() == 9223372036854775808
def test_writeULongPositive(self):
self.connection.write_ulong(9223372036854775808)
assert self.connection.flush() == bytearray.fromhex("8000000000000000")
def test_readBuffer(self):
self.connection.receive(bytearray.fromhex("027FAA"))
buffer = self.connection.read_buffer()
assert buffer.received == bytearray.fromhex("7FAA")
assert self.connection.flush() == bytearray()
def test_writeBuffer(self):
buffer = Connection()
buffer.write(bytearray.fromhex("7FAA"))
self.connection.write_buffer(buffer)
assert self.connection.flush() == bytearray.fromhex("027FAA")
class TCPSocketConnectionTest:
def setup_method(self):
socket = Mock()
socket.recv = Mock()
socket.send = Mock()
with patch("socket.create_connection") as create_connection:
create_connection.return_value = socket
self.connection = TCPSocketConnection(("localhost", 1234))
def test_flush(self):
with pytest.raises(TypeError):
self.connection.flush()
def test_receive(self):
with pytest.raises(TypeError):
self.connection.receive("")
def test_remaining(self):
with pytest.raises(TypeError):
self.connection.remaining()
def test_read(self):
self.connection.socket.recv.return_value = bytearray.fromhex("7FAA")
assert self.connection.read(2) == bytearray.fromhex("7FAA")
def test_read_empty(self):
self.connection.socket.recv.return_value = bytearray.fromhex("")
with pytest.raises(IOError):
self.connection.read(2)
def test_write(self):
self.connection.write(bytearray.fromhex("7FAA"))
# pytype: disable=attribute-error
self.connection.socket.send.assert_called_once_with(bytearray.fromhex("7FAA"))
# pytype: enable=attribute-error
class UDPSocketConnectionTest:
def setup_method(self):
socket = Mock()
socket.recvfrom = Mock()
socket.sendto = Mock()
with patch("socket.socket") as create_socket:
create_socket.return_value = socket
self.connection = UDPSocketConnection(("localhost", 1234))
def test_flush(self):
with pytest.raises(TypeError):
self.connection.flush()
def test_receive(self):
with pytest.raises(TypeError):
self.connection.receive("")
def test_remaining(self):
assert self.connection.remaining() == 65535
def test_read(self):
self.connection.socket.recvfrom.return_value = [bytearray.fromhex("7FAA")]
assert self.connection.read(2) == bytearray.fromhex("7FAA")
def test_write(self):
self.connection.write(bytearray.fromhex("7FAA"))
# pytype: disable=attribute-error
self.connection.socket.sendto.assert_called_once_with(bytearray.fromhex("7FAA"), ("localhost", 1234))
# pytype: enable=attribute-error
|
apache-2.0
| -2,915,360,041,184,111,600 | 31.44484 | 109 | 0.670067 | false |
auto-mat/klub
|
apps/aklub/migrations/0040_auto_20170117_1325.py
|
1
|
1245
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-01-17 13:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('aklub', '0039_auto_20161221_1256'),
]
operations = [
migrations.AddField(
model_name='taxconfirmation',
name='user_profile',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='aklub.UserProfile'),
),
migrations.AlterField(
model_name='campaign',
name='result',
field=models.ManyToManyField(blank=True, to='aklub.Result', verbose_name='Acceptable results of communication'),
),
migrations.AlterField(
model_name='masscommunication',
name='template',
field=models.TextField(help_text='Template can contain variable substitutions like addressment, name, variable symbol etc.', max_length=50000, null=True, verbose_name='Template'),
),
migrations.AlterUniqueTogether(
name='taxconfirmation',
unique_together=set([('user_profile', 'year')]),
),
]
|
gpl-3.0
| -6,172,260,691,570,445,000 | 34.571429 | 191 | 0.625703 | false |
tejal29/pants
|
src/python/pants/goal/aggregated_timings.py
|
1
|
1733
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from collections import defaultdict
from pants.util.dirutil import safe_mkdir_for
class AggregatedTimings(object):
"""Aggregates timings over multiple invocations of 'similar' work.
If filepath is not none, stores the timings in that file. Useful for finding bottlenecks."""
def __init__(self, path=None):
# Map path -> timing in seconds (a float)
self._timings_by_path = defaultdict(float)
self._tool_labels = set()
self._path = path
safe_mkdir_for(self._path)
def add_timing(self, label, secs, is_tool=False):
"""Aggregate timings by label.
secs - a double, so fractional seconds are allowed.
is_tool - whether this label represents a tool invocation.
"""
self._timings_by_path[label] += secs
if is_tool:
self._tool_labels.add(label)
# Check existence in case we're a clean-all. We don't want to write anything in that case.
if self._path and os.path.exists(os.path.dirname(self._path)):
with open(self._path, 'w') as f:
for x in self.get_all():
f.write('%(label)s: %(timing)s\n' % x)
def get_all(self):
"""Returns all the timings, sorted in decreasing order.
Each value is a dict: { path: <path>, timing: <timing in seconds> }
"""
return [{ 'label': x[0], 'timing': x[1], 'is_tool': x[0] in self._tool_labels}
for x in sorted(self._timings_by_path.items(), key=lambda x: x[1], reverse=True)]
|
apache-2.0
| -8,865,574,243,113,284,000 | 36.673913 | 94 | 0.663012 | false |
vene/marseille
|
experiments/exp_rnn.py
|
1
|
5162
|
import os
import dill
import numpy as np
from sklearn.model_selection import KFold
from marseille.custom_logging import logging
from marseille.datasets import get_dataset_loader, load_embeds
from marseille.io import cache_fname
from marseille.argrnn import ArgumentLSTM
def argrnn_cv_score(dataset, dynet_weight_decay, mlp_dropout,
rnn_dropout, prop_layers, class_weight, constraints,
compat_features, second_order):
fn = cache_fname("argrnn_cv_score", (dataset, dynet_weight_decay,
mlp_dropout, rnn_dropout, prop_layers,
class_weight, constraints,
compat_features, second_order))
if os.path.exists(fn):
logging.info("Cached file already exists.")
with open(fn, "rb") as f:
return dill.load(f)
load, ids = get_dataset_loader(dataset, split="train")
embeds = load_embeds(dataset)
grandparent_layers = 1 if second_order and dataset == 'ukp' else 0
coparent_layers = 1 if second_order else 0
sibling_layers = 1 if second_order and dataset == 'cdcp' else 0
scores = []
all_Y_pred = []
score_at_iter = [10, 25, 50, 75, 100]
n_folds = 5 if dataset == 'ukp' else 3
for k, (tr, val) in enumerate(KFold(n_folds).split(ids)):
docs_train = list(load(ids[tr]))
docs_val = list(load(ids[val]))
Y_train = [doc.label for doc in docs_train]
Y_val = [doc.label for doc in docs_val]
rnn = ArgumentLSTM(lstm_dropout=rnn_dropout,
mlp_dropout=mlp_dropout,
compat_features=compat_features,
constraints=constraints,
prop_mlp_layers=prop_layers,
coparent_layers=coparent_layers,
grandparent_layers=grandparent_layers,
sibling_layers=sibling_layers,
class_weight=class_weight,
second_order_multilinear=True,
max_iter=100,
score_at_iter=score_at_iter,
n_mlp=128,
n_lstm=128,
lstm_layers=2,
link_mlp_layers=1,
embeds=embeds,
exact_inference=False,
link_bilinear=True)
rnn.fit(docs_train, Y_train, docs_val, Y_val)
Y_val_pred = rnn.predict(docs_val)
all_Y_pred.extend(Y_val_pred)
scores.append(rnn.scores_)
with open(fn, "wb") as f:
dill.dump((scores, score_at_iter, all_Y_pred), f)
return scores, score_at_iter, all_Y_pred
if __name__ == '__main__':
from docopt import docopt
usage = """
Usage:
exp_rnn (cdcp|ukp) [\
--dynet-seed N --dynet-weight-decay N --dynet-mem N --prop-layers=N \
--rnn-dropout=N --mlp-dropout=N --balanced --constraints --strict \
--compat-features --second-order]
Options:
--dynet-seed=N random number generator seed for dynet library
--dynet-weight-decay=N global weight decay amount for dynet library
--dynet-mem=N memory pool size for dynet
--prop-layers=N number of prop classifier layers. [default: 2]
--rnn-dropout=N dropout ratio in lstm. [default: 0.0]
--mlp-dropout=N dropout ratio in mlp. [default: 0.1]
--balanced whether to reweight class costs by freq
--constraints whether to constrain the decoding
--strict whether to use strict domain constraints
--compat-features whether to use features for compat factors
--second-order whether to use coparent / grandpa / siblings
"""
args = docopt(usage)
dataset = 'cdcp' if args['cdcp'] else 'ukp'
prop_layers = int(args['--prop-layers'])
rnn_dropout = float(args['--rnn-dropout'])
mlp_dropout = float(args['--mlp-dropout'])
cw = 'balanced' if args['--balanced'] else None
if args['--constraints']:
constraints = dataset
if args['--strict']:
constraints += '+strict'
else:
constraints = ""
scores, score_at_iter, _ = argrnn_cv_score(dataset,
args['--dynet-weight-decay'],
mlp_dropout,
rnn_dropout,
prop_layers,
cw,
constraints,
args['--compat-features'],
args['--second-order'])
for iter, score in zip(score_at_iter, np.mean(scores, axis=0)):
print("iter={} "
"Link: {:.3f}/{:.3f} "
"Node: {:.3f}/{:.3f} "
"accuracy {:.3f}".format(iter, *score),
)
|
bsd-3-clause
| -241,226,134,149,875,740 | 38.106061 | 79 | 0.509299 | false |
dementrock/nbgrader
|
nbgrader/tests/nbextensions/test_assignment_list.py
|
1
|
13208
|
import pytest
import os
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import TimeoutException, NoSuchElementException
from .. import run_python_module
def _wait(browser):
return WebDriverWait(browser, 30)
def _load_assignments_list(browser, retries=5):
# go to the correct page
browser.get("http://localhost:9000/tree")
def page_loaded(browser):
return browser.execute_script(
'return typeof IPython !== "undefined" && IPython.page !== undefined;')
# wait for the page to load
try:
_wait(browser).until(page_loaded)
except TimeoutException:
if retries > 0:
print("Retrying page load...")
# page timeout, but sometimes this happens, so try refreshing?
_load_assignments_list(browser, retries=retries - 1)
else:
print("Failed to load the page too many times")
raise
# wait for the extension to load
_wait(browser).until(EC.presence_of_element_located((By.CSS_SELECTOR, "#assignments")))
# switch to the assignments list
element = browser.find_element_by_link_text("Assignments")
element.click()
# make sure released, downloaded, and submitted assignments are visible
_wait(browser).until(EC.visibility_of_element_located((By.CSS_SELECTOR, "#released_assignments_list")))
_wait(browser).until(EC.visibility_of_element_located((By.CSS_SELECTOR, "#fetched_assignments_list")))
_wait(browser).until(EC.visibility_of_element_located((By.CSS_SELECTOR, "#submitted_assignments_list")))
def _expand(browser, list_id, assignment):
browser.find_element_by_link_text(assignment).click()
rows = browser.find_elements_by_css_selector("{} .list_item".format(list_id))
for i in range(1, len(rows)):
_wait(browser).until(lambda browser: browser.find_elements_by_css_selector("{} .list_item".format(list_id))[i].is_displayed())
return rows
def _unexpand(browser, list_id, assignment):
browser.find_element_by_link_text(assignment).click()
rows = browser.find_elements_by_css_selector("{} .list_item".format(list_id))
for i in range(1, len(rows)):
_wait(browser).until(lambda browser: not browser.find_elements_by_css_selector("{} .list_item".format(list_id))[i].is_displayed())
def _wait_for_modal(browser):
_wait(browser).until(EC.presence_of_all_elements_located((By.CSS_SELECTOR, ".modal-dialog")))
def _dismiss_modal(browser):
button = browser.find_element_by_css_selector(".modal-footer .btn-primary")
button.click()
def modal_gone(browser):
try:
browser.find_element_by_css_selector(".modal-dialog")
except NoSuchElementException:
return True
return False
_wait(browser).until(modal_gone)
def _sort_rows(x):
try:
item_name = x.find_element_by_class_name("item_name").text
except NoSuchElementException:
item_name = ""
return item_name
@pytest.mark.js
def test_show_assignments_list(browser, class_files):
_load_assignments_list(browser)
# make sure all the placeholders ar initially showing
_wait(browser).until(EC.visibility_of_element_located((By.CSS_SELECTOR, "#released_assignments_list_placeholder")))
_wait(browser).until(EC.visibility_of_element_located((By.CSS_SELECTOR, "#fetched_assignments_list_placeholder")))
_wait(browser).until(EC.visibility_of_element_located((By.CSS_SELECTOR, "#submitted_assignments_list_placeholder")))
# release an assignment
run_python_module(["nbgrader", "assign", "Problem Set 1"])
run_python_module(["nbgrader", "release", "Problem Set 1", "--course", "abc101"])
# click the refresh button
browser.find_element_by_css_selector("#refresh_assignments_list").click()
# wait for the released assignments to update
_wait(browser).until(EC.invisibility_of_element_located((By.CSS_SELECTOR, "#released_assignments_list_placeholder")))
rows = browser.find_elements_by_css_selector("#released_assignments_list > .list_item")
assert len(rows) == 1
assert rows[0].find_element_by_class_name("item_name").text == "Problem Set 1"
assert rows[0].find_element_by_class_name("item_course").text == "abc101"
@pytest.mark.js
def test_multiple_released_assignments(browser, class_files):
_load_assignments_list(browser)
# release another assignment
run_python_module(["nbgrader", "assign", "ps1"])
run_python_module(["nbgrader", "release", "ps1", "--course", "xyz 200"])
# click the refresh button
browser.find_element_by_css_selector("#refresh_assignments_list").click()
# wait for the released assignments to update
_wait(browser).until(lambda browser: len(browser.find_elements_by_css_selector("#released_assignments_list > .list_item")) == 2)
rows = browser.find_elements_by_css_selector("#released_assignments_list > .list_item")
rows.sort(key=_sort_rows)
assert rows[0].find_element_by_class_name("item_name").text == "Problem Set 1"
assert rows[0].find_element_by_class_name("item_course").text == "abc101"
assert rows[1].find_element_by_class_name("item_name").text == "ps1"
assert rows[1].find_element_by_class_name("item_course").text == "xyz 200"
@pytest.mark.js
def test_fetch_assignment(browser, class_files):
_load_assignments_list(browser)
# click the "fetch" button
_wait(browser).until(EC.invisibility_of_element_located((By.CSS_SELECTOR, "#released_assignments_list_placeholder")))
rows = browser.find_elements_by_css_selector("#released_assignments_list > .list_item")
rows[1].find_element_by_css_selector(".item_status button").click()
# wait for the downloaded assignments list to update
_wait(browser).until(lambda browser: len(browser.find_elements_by_css_selector("#fetched_assignments_list > .list_item")) == 1)
rows = browser.find_elements_by_css_selector("#fetched_assignments_list > .list_item")
assert rows[0].find_element_by_class_name("item_name").text == "ps1"
assert rows[0].find_element_by_class_name("item_course").text == "xyz 200"
assert os.path.exists(os.path.join(class_files, "ps1"))
# expand the assignment to show the notebooks
rows = _expand(browser, "#xyz_200-ps1", "ps1")
rows.sort(key=_sort_rows)
assert len(rows) == 2
assert rows[1].find_element_by_class_name("item_name").text == "problem 1"
# unexpand the assignment
_unexpand(browser, "#xyz_200-ps1", "ps1")
@pytest.mark.js
def test_submit_assignment(browser, class_files):
_load_assignments_list(browser)
# submit it
_wait(browser).until(EC.invisibility_of_element_located((By.CSS_SELECTOR, "#fetched_assignments_list_placeholder")))
rows = browser.find_elements_by_css_selector("#fetched_assignments_list > .list_item")
rows[0].find_element_by_css_selector(".item_status button").click()
# wait for the submitted assignments list to update
_wait(browser).until(EC.invisibility_of_element_located((By.CSS_SELECTOR, "#submitted_assignments_list_placeholder")))
rows = browser.find_elements_by_css_selector("#submitted_assignments_list > .list_item")
assert len(rows) == 1
assert rows[0].find_element_by_class_name("item_name").text == "ps1"
assert rows[0].find_element_by_class_name("item_course").text == "xyz 200"
# submit it again
rows = browser.find_elements_by_css_selector("#fetched_assignments_list > .list_item")
rows[0].find_element_by_css_selector(".item_status button").click()
# wait for the submitted assignments list to update
_wait(browser).until(lambda browser: len(browser.find_elements_by_css_selector("#submitted_assignments_list > .list_item")) == 2)
rows = browser.find_elements_by_css_selector("#submitted_assignments_list > .list_item")
rows.sort(key=_sort_rows)
assert rows[0].find_element_by_class_name("item_name").text == "ps1"
assert rows[0].find_element_by_class_name("item_course").text == "xyz 200"
assert rows[1].find_element_by_class_name("item_name").text == "ps1"
assert rows[1].find_element_by_class_name("item_course").text == "xyz 200"
assert rows[0].find_element_by_class_name("item_status").text != rows[1].find_element_by_class_name("item_status").text
@pytest.mark.js
def test_fetch_second_assignment(browser, class_files):
_load_assignments_list(browser)
# click the "fetch" button
_wait(browser).until(EC.invisibility_of_element_located((By.CSS_SELECTOR, "#released_assignments_list_placeholder")))
rows = browser.find_elements_by_css_selector("#released_assignments_list > .list_item")
rows[0].find_element_by_css_selector(".item_status button").click()
# wait for the downloaded assignments list to update
_wait(browser).until(EC.visibility_of_element_located((By.CSS_SELECTOR, "#released_assignments_list_placeholder")))
_wait(browser).until(lambda browser: len(browser.find_elements_by_css_selector("#fetched_assignments_list > .list_item")) == 2)
rows = browser.find_elements_by_css_selector("#fetched_assignments_list > .list_item")
rows.sort(key=_sort_rows)
assert rows[0].find_element_by_class_name("item_name").text == "Problem Set 1"
assert rows[0].find_element_by_class_name("item_course").text == "abc101"
assert rows[1].find_element_by_class_name("item_name").text == "ps1"
assert rows[1].find_element_by_class_name("item_course").text == "xyz 200"
assert os.path.exists(os.path.join(class_files, "Problem Set 1"))
# expand the assignment to show the notebooks
rows = _expand(browser, "#abc101-Problem_Set_1", "Problem Set 1")
rows.sort(key=_sort_rows)
assert len(rows) == 3
assert rows[1].find_element_by_class_name("item_name").text == "Problem 1"
assert rows[2].find_element_by_class_name("item_name").text == "Problem 2"
# unexpand the assignment
_unexpand(browser, "abc101-Problem_Set_1", "Problem Set 1")
@pytest.mark.js
def test_submit_other_assignment(browser, class_files):
_load_assignments_list(browser)
# submit it
_wait(browser).until(EC.invisibility_of_element_located((By.CSS_SELECTOR, "#fetched_assignments_list_placeholder")))
rows = browser.find_elements_by_css_selector("#fetched_assignments_list > .list_item")
rows[0].find_element_by_css_selector(".item_status button").click()
# wait for the submitted assignments list to update
_wait(browser).until(lambda browser: len(browser.find_elements_by_css_selector("#submitted_assignments_list > .list_item")) == 3)
rows = browser.find_elements_by_css_selector("#submitted_assignments_list > .list_item")
rows.sort(key=_sort_rows)
assert rows[0].find_element_by_class_name("item_name").text == "Problem Set 1"
assert rows[0].find_element_by_class_name("item_course").text == "abc101"
assert rows[1].find_element_by_class_name("item_name").text == "ps1"
assert rows[1].find_element_by_class_name("item_course").text == "xyz 200"
assert rows[2].find_element_by_class_name("item_name").text == "ps1"
assert rows[2].find_element_by_class_name("item_course").text == "xyz 200"
assert rows[0].find_element_by_class_name("item_status").text != rows[1].find_element_by_class_name("item_status").text
assert rows[0].find_element_by_class_name("item_status").text != rows[2].find_element_by_class_name("item_status").text
@pytest.mark.js
def test_validate_ok(browser, class_files):
_load_assignments_list(browser)
# expand the assignment to show the notebooks
_wait(browser).until(EC.invisibility_of_element_located((By.CSS_SELECTOR, "#fetched_assignments_list_placeholder")))
rows = _expand(browser, "#xyz_200-ps1", "ps1")
rows.sort(key=_sort_rows)
assert len(rows) == 2
assert rows[1].find_element_by_class_name("item_name").text == "problem 1"
# click the "validate" button
rows[1].find_element_by_css_selector(".item_status button").click()
# wait for the modal dialog to appear
_wait_for_modal(browser)
# check that it succeeded
browser.find_element_by_css_selector(".modal-dialog .validation-success")
# close the modal dialog
_dismiss_modal(browser)
@pytest.mark.js
def test_validate_failure(browser, class_files):
_load_assignments_list(browser)
# expand the assignment to show the notebooks
_wait(browser).until(EC.invisibility_of_element_located((By.CSS_SELECTOR, "#fetched_assignments_list_placeholder")))
rows = _expand(browser, "#abc101-Problem_Set_1", "Problem Set 1")
rows.sort(key=_sort_rows)
assert len(rows) == 3
assert rows[1].find_element_by_class_name("item_name").text == "Problem 1"
assert rows[2].find_element_by_class_name("item_name").text == "Problem 2"
# click the "validate" button
rows[2].find_element_by_css_selector(".item_status button").click()
# wait for the modal dialog to appear
_wait_for_modal(browser)
# check that it succeeded
browser.find_element_by_css_selector(".modal-dialog .validation-failed")
# close the modal dialog
_dismiss_modal(browser)
|
bsd-3-clause
| 4,586,161,533,023,483,000 | 44.232877 | 138 | 0.696548 | false |
merc-devel/merc
|
merc/errors.py
|
1
|
2503
|
from merc import message
class BaseError(Exception, message.Reply):
pass
class Error(Exception, message.Message):
NAME = "ERROR"
FORCE_TRAILING = True
def __init__(self, reason):
self.reason = reason
def as_params(self, client):
return [self.reason]
class LinkError(Error):
NAME = "ERROR"
FORCE_TRAILING = True
def as_params(self, client):
host, *_ = client.protocol.transport.get_extra_info("peername")
return ["Closing link: {} ({})".format(host, self.reason)]
class SimpleError(BaseError):
def as_reply_params(self):
return [self.REASON]
class ParametrizedError(BaseError):
def __init__(self, *params):
self.params = params
def as_reply_params(self):
return list(self.params) + [self.REASON]
class NoSuchNick(ParametrizedError):
NAME = "401"
REASON = "No such nick/channel"
class NoSuchServer(ParametrizedError):
NAME = "402"
REASON = "No such server"
class NoSuchChannel(ParametrizedError):
NAME = "403"
REASON = "No such channel"
class CannotSendToChan(ParametrizedError):
NAME = "404"
REASON = "Cannot send to channel"
class ErroneousNickname(SimpleError):
NAME = "432"
REASON = "Erroneous nickname"
class NicknameInUse(ParametrizedError):
NAME = "433"
REASON = "Nickname in use"
class NotRegistered(SimpleError):
NAME = "451"
REASON = "You have not registered"
class NeedMoreParams(ParametrizedError):
NAME = "461"
REASON = "Not enough parameters"
class UnknownCommand(ParametrizedError):
NAME = "421"
REASON = "Unknown command"
class UnknownMode(ParametrizedError):
NAME = "472"
REASON = "is an unknown mode char to me"
class UmodeUnknownFlag(SimpleError):
NAME = "501"
REASON = "Unknown MODE flag"
class UsersDontMatch(SimpleError):
NAME = "502"
REASON = "Can't change mode for other users"
class ChanOpPrivsNeeded(ParametrizedError):
NAME = "482"
REASON = "You're not a channel operator"
class NotOnChannel(ParametrizedError):
NAME = "442"
REASON = "You're not on that channel"
class AlreadyOnChannel(ParametrizedError):
NAME = "443"
REASON = "is already on channel"
class PasswordMismatch(SimpleError):
NAME = "464"
REASON = "Password mismatch"
class NoPrivileges(SimpleError):
NAME = "481"
REASON = "You're not an IRC operator"
class BannedFromChannel(ParametrizedError):
NAME = "474"
REASON = "You are banned from the channel"
class AlreadyRegistered(SimpleError):
NAME = "462"
REASON = "You may not reregister"
|
mit
| -4,638,788,628,289,347,000 | 17.819549 | 67 | 0.703556 | false |
ypcs/bearded-adventure
|
bearded_adventure/webvm/api.py
|
1
|
1904
|
from tastypie.resources import ModelResource, ALL, ALL_WITH_RELATIONS
from webvm.models import Slave, Snapshot, VirtualMachine, MachineImage, HwConfiguration, JobQueueItem
from bearded_adventure.common import CamelCaseJSONSerializer
from tastypie import fields
from tastypie.authentication import ApiKeyAuthentication
class SlaveResource(ModelResource):
class Meta:
queryset = Slave.objects.all()
resource_name = 'slave'
excludes = [
'id',
'ssh_public_key',
]
filtering = {
'uuid': ALL,
}
serializer = CamelCaseJSONSerializer()
authentication = ApiKeyAuthentication()
# detail_uri_name = 'uuid'
# def prepend_urls(self):
# return [url(r'^(?P<resource_name>%s)/(?P<uuid>[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})$' % self._meta.resource_name, self.wrap_view('dispatch_detail'), name='api_dispatch_detail')]
class MachineImageResource(ModelResource):
class Meta:
queryset = MachineImage.objects.all()
resource_name = 'machine-image'
serializer = CamelCaseJSONSerializer()
excludes = ['id',]
authentication = ApiKeyAuthentication()
class VirtualMachineResource(ModelResource):
machine_image = fields.ForeignKey(MachineImageResource, 'machine_image')
class Meta:
queryset = VirtualMachine.objects.all()
resource_name = 'virtual_machine'
serializer = CamelCaseJSONSerializer()
authentication = ApiKeyAuthentication()
class JobQueueResource(ModelResource):
virtual_machine = fields.ForeignKey(VirtualMachineResource, 'vm')
class Meta:
queryset = JobQueueItem.objects.all().order_by('-priority', 'created')
resource_name = 'queue'
serializer = CamelCaseJSONSerializer()
excludes = ['id',]
authentication = ApiKeyAuthentication()
|
gpl-3.0
| -1,674,742,944,250,033,200 | 37.857143 | 210 | 0.669118 | false |
zenoss/ZenPacks.community.IBMSystemxIMM
|
ZenPacks/community/IBMSystemxIMM/modeler/plugins/community/snmp/IBMIMMMemVpdMap.py
|
1
|
3309
|
# ==============================================================================
# IBMIMMMemVpdMap modeler plugin
#
# Zenoss community Zenpack for IBM SystemX Integrated Management Module
# version: 0.3
#
# (C) Copyright IBM Corp. 2011. All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# ==============================================================================
__doc__="""IBMIMMMemVpdMap maps Memory DIMM VPD entries associated with an IMM"""
__author__ = "IBM"
__copyright__ = "(C) Copyright IBM Corp. 2011. All Rights Reserved."
__license__ = "GPL"
__version__ = "0.3.0"
from Products.DataCollector.plugins.CollectorPlugin import SnmpPlugin, GetTableMap, GetMap
from Products.DataCollector.plugins.DataMaps import ObjectMap
class IBMIMMMemVpdMap(SnmpPlugin):
relname = "IMMMEMVPD"
modname = "ZenPacks.community.IBMSystemxIMM.IMMMemVpd"
columns = {
'.1': 'memoryVpdIndex',
'.2': 'memoryVpdDescription',
'.3': 'memoryVpdPartNumber',
'.4': 'memoryVpdFRUSerialNumber',
'.5': 'memoryVpdManufactureDate',
'.6': 'memoryVpdType',
'.7': 'memoryVpdSize',
}
# snmpGetTableMaps gets tabular data
snmpGetTableMaps = (
# Memory VPD table
GetTableMap('systemMemoryVpdEntry', '.1.3.6.1.4.1.2.3.51.3.1.5.21.1', columns),
)
def process(self, device, results, log):
"""collect snmp information from this device"""
log.info('processing %s for device %s', self.name(), device.id)
# Collect the data from device
getdata, tabledata = results
# Debug: print data retrieved from device.
log.warn( "Get data = %s", getdata )
log.warn( "Table data = %s", tabledata )
VpdTable = tabledata.get("systemMemoryVpdEntry")
# If no data retrieved return nothing.
if not VpdTable:
log.warn( 'No data collected from %s for the %s plugin', device.id, self.name() )
log.warn( "Data = %s", getdata )
log.warn( "Columns = %s", self.columns )
return
rm = self.relMap()
for oid, data in VpdTable.items():
om = self.objectMap(data)
om.id = self.prepId(om.memoryVpdDescription)
# om.snmpindex = int(om.memoryVpdIndex)
om.memoryVpdIndex = int(om.memoryVpdIndex)
# Debug: print values of object map.
# for key,value in om.__dict__.items():
# log.warn("om key=value: %s = %s", key,value)
rm.append(om)
return rm
|
gpl-2.0
| -2,510,824,104,534,055,400 | 37.476744 | 93 | 0.598368 | false |
saukrIppl/seahub
|
tests/api/test_dir.py
|
1
|
1041
|
import json
import os
from django.core.urlresolvers import reverse
from seahub.test_utils import BaseTestCase
class DirTest(BaseTestCase):
def setUp(self):
self.login_as(self.user)
self.endpoint = reverse('DirView', args=[self.repo.id])
self.folder_name = os.path.basename(self.folder)
def tearDown(self):
self.remove_repo()
def test_can_list(self):
resp = self.client.get(self.endpoint)
json_resp = json.loads(resp.content)
self.assertEqual(200, resp.status_code)
assert len(json_resp) == 1
assert self.folder_name == json_resp[0]['name']
def test_can_create(self):
resp = self.client.post(self.endpoint + '?p=/new_dir', {
'operation': 'mkdir'
})
self.assertEqual(201, resp.status_code)
def test_create_with_nonexistent_parent(self):
resp = self.client.post(self.endpoint + '?p=/new_parent/new_dir', {
'operation': 'mkdir'
})
self.assertEqual(400, resp.status_code)
|
apache-2.0
| -4,933,864,733,565,598,000 | 27.135135 | 75 | 0.621518 | false |
lgarren/spack
|
var/spack/repos/builtin/packages/r-yaml/package.py
|
1
|
1687
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RYaml(RPackage):
"""This package implements the libyaml YAML 1.1 parser and emitter
(http://pyyaml.org/wiki/LibYAML) for R."""
homepage = "https://cran.r-project.org/web/packages/yaml/index.html"
url = "https://cran.r-project.org/src/contrib/yaml_2.1.13.tar.gz"
list_url = homepage
version('2.1.14', '2de63248e6a122c368f8e4537426e35c')
version('2.1.13', 'f2203ea395adaff6bd09134666191d9a')
|
lgpl-2.1
| -7,837,539,906,779,129,000 | 44.594595 | 78 | 0.676941 | false |
pikhovkin/instructor
|
instructor/model.py
|
1
|
2702
|
from collections import OrderedDict
from .errors import InvalidData, InvalidDataSize, InvalidModelDeclaration
from .fields import BaseFieldInstructor, DefaultByteOrder
__all__ = (
'InstructorModel',
)
class Opts(object):
pass
class MetaInstructor(type):
def __new__(cls, name, bases, attrs):
declared_fields = [(key, value) for key, value in attrs.iteritems() if isinstance(value, BaseFieldInstructor)]
_fields = OrderedDict(sorted(declared_fields, key=lambda x: x[1]._order_counter))
if _fields and not isinstance(_fields.values()[0], DefaultByteOrder):
raise InvalidModelDeclaration('First field of a class must be subclass of DefaultByteOrder')
for field_name, field in _fields.iteritems():
field.name = field_name
attrs.pop(field_name)
new_cls = type.__new__(cls, name, bases, attrs)
new_cls._meta = Opts()
new_cls._meta.fields = _fields
for field_name, field in _fields.iteritems():
setattr(new_cls._meta, field_name, field)
return new_cls
class InstructorModel(object):
__metaclass__ = MetaInstructor
def __init__(self, *args, **kwargs):
if args:
data = args[0]
offset = 0
byte_order = self._meta.fields.values()[0]
try:
for i, field in enumerate(self._meta.fields.itervalues()):
if i == 0:
continue
value, size = field._unpack(self, byte_order, data, offset=offset)
offset += size
setattr(self, field.name, value)
except Exception as e:
if e.args[0] == 'total struct size too long':
raise InvalidDataSize(e.args[0])
elif e.args[0].startswith('unpack_from requires a buffer of at least'):
raise InvalidDataSize(e.args[0])
raise e
elif kwargs:
for i, field in enumerate(self._meta.fields.itervalues()):
if i == 0:
continue
value = kwargs.get(field.name, field.get_default())
setattr(self, field.name, value)
else:
raise InvalidData
@classmethod
def unpack(cls, data):
return cls(data)
def pack(self):
fmt = ''
data = ''
byte_order = self._meta.fields.values()[0]
for i, field in enumerate(self._meta.fields.itervalues()):
if i == 0:
continue
_fmt, _data = field._pack(self, byte_order)
fmt += _fmt
data += _data
return data
|
mit
| 4,084,107,766,100,893,700 | 29.022222 | 118 | 0.551813 | false |
rzarzynski/tempest
|
tempest/scenario/test_volume_boot_pattern.py
|
1
|
8285
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib.common.utils import data_utils
from tempest_lib import decorators
from tempest import config
from tempest.openstack.common import log
from tempest.scenario import manager
from tempest import test
CONF = config.CONF
LOG = log.getLogger(__name__)
class TestVolumeBootPattern(manager.ScenarioTest):
"""
This test case attempts to reproduce the following steps:
* Create in Cinder some bootable volume importing a Glance image
* Boot an instance from the bootable volume
* Write content to the volume
* Delete an instance and Boot a new instance from the volume
* Check written content in the instance
* Create a volume snapshot while the instance is running
* Boot an additional instance from the new snapshot based volume
* Check written content in the instance booted from snapshot
"""
@classmethod
def skip_checks(cls):
super(TestVolumeBootPattern, cls).skip_checks()
if not CONF.volume_feature_enabled.snapshot:
raise cls.skipException("Cinder volume snapshots are disabled")
def _create_volume_from_image(self):
img_uuid = CONF.compute.image_ref
vol_name = data_utils.rand_name('volume-origin')
return self.create_volume(name=vol_name, imageRef=img_uuid)
def _boot_instance_from_volume(self, vol_id, keypair):
# NOTE(gfidente): the syntax for block_device_mapping is
# dev_name=id:type:size:delete_on_terminate
# where type needs to be "snap" if the server is booted
# from a snapshot, size instead can be safely left empty
bd_map = [{
'device_name': 'vda',
'volume_id': vol_id,
'delete_on_termination': '0'}]
self.security_group = self._create_security_group()
security_groups = [{'name': self.security_group['name']}]
create_kwargs = {
'block_device_mapping': bd_map,
'key_name': keypair['name'],
'security_groups': security_groups
}
return self.create_server(image='', create_kwargs=create_kwargs)
def _create_snapshot_from_volume(self, vol_id):
snap_name = data_utils.rand_name('snapshot')
snap = self.snapshots_client.create_snapshot(
volume_id=vol_id,
force=True,
display_name=snap_name)
self.addCleanup_with_wait(
waiter_callable=self.snapshots_client.wait_for_resource_deletion,
thing_id=snap['id'], thing_id_param='id',
cleanup_callable=self.delete_wrapper,
cleanup_args=[self.snapshots_client.delete_snapshot, snap['id']])
self.snapshots_client.wait_for_snapshot_status(snap['id'], 'available')
self.assertEqual(snap_name, snap['display_name'])
return snap
def _create_volume_from_snapshot(self, snap_id):
vol_name = data_utils.rand_name('volume')
return self.create_volume(name=vol_name, snapshot_id=snap_id)
def _stop_instances(self, instances):
# NOTE(gfidente): two loops so we do not wait for the status twice
for i in instances:
self.servers_client.stop(i['id'])
for i in instances:
self.servers_client.wait_for_server_status(i['id'], 'SHUTOFF')
def _detach_volumes(self, volumes):
# NOTE(gfidente): two loops so we do not wait for the status twice
for v in volumes:
self.volumes_client.detach_volume(v['id'])
for v in volumes:
self.volumes_client.wait_for_volume_status(v['id'], 'available')
def _ssh_to_server(self, server, keypair):
if CONF.compute.use_floatingip_for_ssh:
floating_ip = self.floating_ips_client.create_floating_ip()
self.addCleanup(self.delete_wrapper,
self.floating_ips_client.delete_floating_ip,
floating_ip['id'])
self.floating_ips_client.associate_floating_ip_to_server(
floating_ip['ip'], server['id'])
ip = floating_ip['ip']
else:
network_name_for_ssh = CONF.compute.network_for_ssh
ip = server.networks[network_name_for_ssh][0]
return self.get_remote_client(ip, private_key=keypair['private_key'],
log_console_of_servers=[server])
def _get_content(self, ssh_client):
return ssh_client.exec_command('cat /tmp/text')
def _write_text(self, ssh_client):
text = data_utils.rand_name('text-')
ssh_client.exec_command('echo "%s" > /tmp/text; sync' % (text))
return self._get_content(ssh_client)
def _delete_server(self, server):
self.servers_client.delete_server(server['id'])
self.servers_client.wait_for_server_termination(server['id'])
def _check_content_of_written_file(self, ssh_client, expected):
actual = self._get_content(ssh_client)
self.assertEqual(expected, actual)
@decorators.skip_because(bug='1373513')
@test.idempotent_id('557cd2c2-4eb8-4dce-98be-f86765ff311b')
@test.services('compute', 'volume', 'image')
def test_volume_boot_pattern(self):
keypair = self.create_keypair()
self.security_group = self._create_security_group()
# create an instance from volume
volume_origin = self._create_volume_from_image()
instance_1st = self._boot_instance_from_volume(volume_origin['id'],
keypair)
# write content to volume on instance
ssh_client_for_instance_1st = self._ssh_to_server(instance_1st,
keypair)
text = self._write_text(ssh_client_for_instance_1st)
# delete instance
self._delete_server(instance_1st)
# create a 2nd instance from volume
instance_2nd = self._boot_instance_from_volume(volume_origin['id'],
keypair)
# check the content of written file
ssh_client_for_instance_2nd = self._ssh_to_server(instance_2nd,
keypair)
self._check_content_of_written_file(ssh_client_for_instance_2nd, text)
# snapshot a volume
snapshot = self._create_snapshot_from_volume(volume_origin['id'])
# create a 3rd instance from snapshot
volume = self._create_volume_from_snapshot(snapshot['id'])
instance_from_snapshot = self._boot_instance_from_volume(volume['id'],
keypair)
# check the content of written file
ssh_client = self._ssh_to_server(instance_from_snapshot, keypair)
self._check_content_of_written_file(ssh_client, text)
# NOTE(gfidente): ensure resources are in clean state for
# deletion operations to succeed
self._stop_instances([instance_2nd, instance_from_snapshot])
self._detach_volumes([volume_origin, volume])
class TestVolumeBootPatternV2(TestVolumeBootPattern):
def _boot_instance_from_volume(self, vol_id, keypair):
bdms = [{'uuid': vol_id, 'source_type': 'volume',
'destination_type': 'volume', 'boot_index': 0,
'delete_on_termination': False}]
self.security_group = self._create_security_group()
security_groups = [{'name': self.security_group['name']}]
create_kwargs = {
'block_device_mapping_v2': bdms,
'key_name': keypair['name'],
'security_groups': security_groups
}
return self.create_server(image='', create_kwargs=create_kwargs)
|
apache-2.0
| -8,211,220,916,780,681,000 | 41.706186 | 79 | 0.620881 | false |
soft-matter/mr
|
mr/tests/test_feature_saving.py
|
1
|
1721
|
import unittest
import nose
from numpy.testing import assert_almost_equal, assert_allclose
from numpy.testing.decorators import slow
from pandas.util.testing import (assert_series_equal, assert_frame_equal)
import os
from tempfile import NamedTemporaryFile
import pandas as pd
from pandas import DataFrame, Series
import mr
import sqlite3
path, _ = os.path.split(os.path.abspath(__file__))
class TestFeatureSaving(unittest.TestCase):
def setUp(self):
self.db_conn = sqlite3.connect(':memory:')
directory = os.path.join(path, 'video', 'image_sequence')
self.v = mr.ImageSequence(directory)
self.PARAMS = (11, 3000)
with NamedTemporaryFile() as temp:
self.expected = mr.batch(self.v[[0, 1]], *self.PARAMS,
meta=temp.name)
def test_sqlite(self):
with NamedTemporaryFile() as temp:
f = mr.batch(self.v[[0, 1]], *self.PARAMS, conn=self.db_conn,
sql_flavor='sqlite', table='features', meta=temp.name)
assert_frame_equal(f, self.expected)
def test_HDFStore(self):
STORE_NAME = 'temp_for_testing.h5'
if os.path.isfile(STORE_NAME):
os.remove(STORE_NAME)
try:
store = pd.HDFStore(STORE_NAME)
except:
nose.SkipTest('Cannot make an HDF5 file. Skipping')
else:
with NamedTemporaryFile() as temp:
f = mr.batch(self.v[[0, 1]], *self.PARAMS, store=store,
table='features', meta=temp.name)
assert_frame_equal(f.reset_index(drop=True),
self.expected.reset_index(drop=True))
os.remove(STORE_NAME)
|
gpl-3.0
| 2,259,999,253,511,794,400 | 34.122449 | 75 | 0.606043 | false |
alexryndin/ambari
|
ambari-server/src/test/python/TestAmbariServer.py
|
1
|
340374
|
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import os
from mock.mock import patch, MagicMock, create_autospec, call
import sys
with patch.object(os, "geteuid", new=MagicMock(return_value=0)):
from resource_management.core import sudo
reload(sudo)
from stacks.utils.RMFTestCase import *
import traceback
import datetime
import errno
import json
import operator
from optparse import OptionParser
import platform
import re
import shutil
import signal
import stat
import StringIO
import tempfile
import logging
import logging.handlers
import logging.config
from unittest import TestCase
os.environ["ROOT"] = ""
from only_for_platform import get_platform, not_for_platform, only_for_platform, os_distro_value, PLATFORM_LINUX, PLATFORM_WINDOWS
from ambari_commons import os_utils
if get_platform() != PLATFORM_WINDOWS:
from pwd import getpwnam
import shutil
project_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)),os.path.normpath("../../../../"))
shutil.copyfile(project_dir+"/ambari-server/conf/unix/ambari.properties", "/tmp/ambari.properties")
# We have to use this import HACK because the filename contains a dash
_search_file = os_utils.search_file
def search_file_proxy(filename, searchpatch, pathsep=os.pathsep):
global _search_file
if "ambari.properties" in filename:
return "/tmp/ambari.properties"
return _search_file(filename, searchpatch, pathsep)
os_utils.search_file = search_file_proxy
with patch.object(platform, "linux_distribution", return_value = MagicMock(return_value=('Redhat', '6.4', 'Final'))):
with patch("os.path.isdir", return_value = MagicMock(return_value=True)):
with patch("os.access", return_value = MagicMock(return_value=True)):
with patch.object(os_utils, "parse_log4j_file", return_value={'ambari.log.dir': '/var/log/ambari-server'}):
with patch("platform.linux_distribution", return_value = os_distro_value):
with patch("os.symlink"):
with patch("glob.glob", return_value = ['/etc/init.d/postgresql-9.3']):
_ambari_server_ = __import__('ambari-server')
with patch("__builtin__.open"):
from ambari_commons.firewall import Firewall
from ambari_commons.os_check import OSCheck, OSConst
from ambari_commons.os_family_impl import OsFamilyImpl, OsFamilyFuncImpl
from ambari_commons.exceptions import FatalException, NonFatalException
from ambari_commons.logging_utils import get_verbose, set_verbose, get_silent, set_silent, get_debug_mode, \
print_info_msg, print_warning_msg, print_error_msg
from ambari_commons.os_utils import run_os_command, search_file, set_file_permissions, remove_file, copy_file, \
is_valid_filepath
from ambari_server.dbConfiguration import DBMSConfigFactory, check_jdbc_drivers
from ambari_server.dbConfiguration_linux import PGConfig, LinuxDBMSConfig, OracleConfig
from ambari_server.properties import Properties
from ambari_server.resourceFilesKeeper import ResourceFilesKeeper, KeeperException
from ambari_server.serverConfiguration import configDefaults, get_java_exe_path, \
check_database_name_property, OS_FAMILY_PROPERTY, \
find_properties_file, get_ambari_properties, get_JAVA_HOME, \
parse_properties_file, read_ambari_user, update_ambari_properties, update_properties_2, write_property, find_jdk, \
get_is_active_instance, \
AMBARI_CONF_VAR, AMBARI_SERVER_LIB, JDBC_DATABASE_PROPERTY, JDBC_RCA_PASSWORD_FILE_PROPERTY, \
PERSISTENCE_TYPE_PROPERTY, JDBC_URL_PROPERTY, get_conf_dir, JDBC_USER_NAME_PROPERTY, JDBC_PASSWORD_PROPERTY, \
JDBC_DATABASE_NAME_PROPERTY, OS_TYPE_PROPERTY, validate_jdk, JDBC_POSTGRES_SCHEMA_PROPERTY, \
RESOURCES_DIR_PROPERTY, JDBC_RCA_PASSWORD_ALIAS, JDBC_RCA_SCHEMA_PROPERTY, IS_LDAP_CONFIGURED, \
SSL_API, SSL_API_PORT, CLIENT_API_PORT_PROPERTY,\
JDBC_CONNECTION_POOL_TYPE, JDBC_CONNECTION_POOL_ACQUISITION_SIZE, \
JDBC_CONNECTION_POOL_IDLE_TEST_INTERVAL, JDBC_CONNECTION_POOL_MAX_AGE, JDBC_CONNECTION_POOL_MAX_IDLE_TIME, \
JDBC_CONNECTION_POOL_MAX_IDLE_TIME_EXCESS,\
LDAP_MGR_PASSWORD_PROPERTY, LDAP_MGR_PASSWORD_ALIAS, JDBC_PASSWORD_FILENAME, NR_USER_PROPERTY, SECURITY_KEY_IS_PERSISTED, \
SSL_TRUSTSTORE_PASSWORD_PROPERTY, SECURITY_IS_ENCRYPTION_ENABLED, PID_DIR_PROPERTY, SSL_TRUSTSTORE_PASSWORD_ALIAS, \
SECURITY_MASTER_KEY_LOCATION, SECURITY_KEYS_DIR, LDAP_PRIMARY_URL_PROPERTY, store_password_file, \
get_pass_file_path, GET_FQDN_SERVICE_URL, JDBC_USE_INTEGRATED_AUTH_PROPERTY, SECURITY_KEY_ENV_VAR_NAME, \
JAVA_HOME_PROPERTY, JDK_NAME_PROPERTY, JCE_NAME_PROPERTY, STACK_LOCATION_KEY, SERVER_VERSION_FILE_PATH, \
COMMON_SERVICES_PATH_PROPERTY, WEBAPP_DIR_PROPERTY, SHARED_RESOURCES_DIR, BOOTSTRAP_SCRIPT, \
CUSTOM_ACTION_DEFINITIONS, BOOTSTRAP_SETUP_AGENT_SCRIPT, STACKADVISOR_SCRIPT, BOOTSTRAP_DIR_PROPERTY, MPACKS_STAGING_PATH_PROPERTY
from ambari_server.serverUtils import is_server_runing, refresh_stack_hash
from ambari_server.serverSetup import check_selinux, check_ambari_user, proceedJDBCProperties, SE_STATUS_DISABLED, SE_MODE_ENFORCING, configure_os_settings, \
download_and_install_jdk, prompt_db_properties, setup, \
AmbariUserChecks, AmbariUserChecksLinux, AmbariUserChecksWindows, JDKSetup, reset, setup_jce_policy, expand_jce_zip_file
from ambari_server.serverUpgrade import upgrade, upgrade_local_repo, change_objects_owner, upgrade_stack, \
run_stack_upgrade, run_metainfo_upgrade, run_schema_upgrade, move_user_custom_actions, find_and_copy_custom_services
from ambari_server.setupHttps import is_valid_https_port, setup_https, import_cert_and_key_action, get_fqdn, \
generate_random_string, get_cert_info, COMMON_NAME_ATTR, is_valid_cert_exp, NOT_AFTER_ATTR, NOT_BEFORE_ATTR, \
SSL_DATE_FORMAT, import_cert_and_key, is_valid_cert_host, setup_truststore, \
SRVR_ONE_WAY_SSL_PORT_PROPERTY, SRVR_TWO_WAY_SSL_PORT_PROPERTY, GANGLIA_HTTPS
from ambari_server.setupSecurity import adjust_directory_permissions, get_alias_string, get_ldap_event_spec_names, sync_ldap, LdapSyncOptions, \
configure_ldap_password, setup_ldap, REGEX_HOSTNAME_PORT, REGEX_TRUE_FALSE, REGEX_ANYTHING, setup_master_key, \
setup_ambari_krb5_jaas, ensure_can_start_under_current_user, generate_env
from ambari_server.userInput import get_YN_input, get_choice_string_input, get_validated_string_input, \
read_password
from ambari_server_main import get_ulimit_open_files, ULIMIT_OPEN_FILES_KEY, ULIMIT_OPEN_FILES_DEFAULT
from ambari_server.serverClassPath import ServerClassPath
from ambari_server.hostUpdate import update_host_names
from ambari_server.checkDatabase import check_database
from ambari_server import serverConfiguration
CURR_AMBARI_VERSION = "2.0.0"
@patch.object(platform, "linux_distribution", new = MagicMock(return_value=('Redhat', '6.4', 'Final')))
@patch("ambari_server.dbConfiguration_linux.get_postgre_hba_dir", new = MagicMock(return_value = "/var/lib/pgsql/data"))
@patch("ambari_server.dbConfiguration_linux.get_postgre_running_status", new = MagicMock(return_value = "running"))
class TestAmbariServer(TestCase):
def setUp(self):
out = StringIO.StringIO()
sys.stdout = out
def tearDown(self):
sys.stdout = sys.__stdout__
@not_for_platform(PLATFORM_WINDOWS)
@patch("ambari_server.dbConfiguration_linux.run_os_command")
def test_configure_pg_hba_ambaridb_users(self, run_os_command_method):
# Prepare mocks
run_os_command_method.return_value = (0, "", "")
database_username = "ffdf"
tf1 = tempfile.NamedTemporaryFile()
# Run test
PGConfig._configure_pg_hba_ambaridb_users(tf1.name, database_username)
# Check results
self.assertTrue(run_os_command_method.called)
string_expected = self.get_file_string(self.get_samples_dir("configure_pg_hba_ambaridb_users1"))
string_actual = self.get_file_string(tf1.name)
self.assertEquals(string_expected, string_actual)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch("__builtin__.raw_input")
def test_servicename_regex(self, raw_input_method):
''' Test to make sure the service name can contain digits '''
set_silent(False)
raw_input_method.return_value = "OT100"
result = OracleConfig._get_validated_service_name("ambari", 1)
self.assertEqual("OT100", result, "Not accepting digits")
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch("__builtin__.raw_input")
def test_dbname_regex(self, raw_input_method):
''' Test to make sure the service name can contain digits '''
set_silent(False)
raw_input_method.return_value = "OT100"
result = LinuxDBMSConfig._get_validated_db_name("Database", "ambari")
self.assertEqual("OT100", result, "Not accepting digits")
pass
@not_for_platform(PLATFORM_WINDOWS)
def test_configure_pg_hba_postgres_user(self):
tf1 = tempfile.NamedTemporaryFile()
PGConfig.PG_HBA_CONF_FILE = tf1.name
with open(PGConfig.PG_HBA_CONF_FILE, 'w') as fout:
fout.write("\n")
fout.write("local all all md5\n")
fout.write("host all all 0.0.0.0/0 md5\n")
fout.write("host all all ::/0 md5\n")
PGConfig._configure_pg_hba_postgres_user()
expected = self.get_file_string(self.get_samples_dir(
"configure_pg_hba_ambaridb_users2"))
result = self.get_file_string(PGConfig.PG_HBA_CONF_FILE)
self.assertEqual(expected, result, "pg_hba_conf not processed")
mode = oct(os.stat(PGConfig.PG_HBA_CONF_FILE)[stat.ST_MODE])
str_mode = str(mode)[-4:]
self.assertEqual("0644", str_mode, "Wrong file permissions")
pass
@patch("__builtin__.raw_input")
def test_get_choice_string_input(self, raw_input_method):
prompt = "blablabla"
default = "default blablabla"
firstChoice = set(['yes', 'ye', 'y'])
secondChoice = set(['no', 'n'])
# test first input
raw_input_method.return_value = "Y"
result = get_choice_string_input(prompt, default,
firstChoice, secondChoice)
self.assertEquals(result, True)
raw_input_method.reset_mock()
# test second input
raw_input_method.return_value = "N"
result = get_choice_string_input(prompt, default,
firstChoice, secondChoice)
self.assertEquals(result, False)
raw_input_method.reset_mock()
# test enter pressed
raw_input_method.return_value = ""
result = get_choice_string_input(prompt, default,
firstChoice, secondChoice)
self.assertEquals(result, default)
raw_input_method.reset_mock()
# test wrong input
list_of_return_values = ['yes', 'dsad', 'fdsfds']
def side_effect(list):
return list_of_return_values.pop()
raw_input_method.side_effect = side_effect
result = get_choice_string_input(prompt, default,
firstChoice, secondChoice)
self.assertEquals(result, True)
self.assertEquals(raw_input_method.call_count, 3)
pass
@patch("re.search")
@patch("__builtin__.raw_input")
@patch("getpass.getpass")
def test_get_validated_string_input(self, get_pass_method,
raw_input_method, re_search_method):
prompt = "blabla"
default = "default_pass"
pattern = "pattern_pp"
description = "blabla2"
# check password input
self.assertFalse(False, get_silent())
is_pass = True
get_pass_method.return_value = "dfdsfdsfds"
result = get_validated_string_input(prompt, default,
pattern, description, is_pass)
self.assertEquals(get_pass_method.return_value, result)
get_pass_method.assure_called_once(prompt)
self.assertFalse(raw_input_method.called)
# check raw input
get_pass_method.reset_mock()
raw_input_method.reset_mock()
is_pass = False
raw_input_method.return_value = "dkf90ewuf0"
result = get_validated_string_input(prompt, default,
pattern, description, is_pass)
self.assertEquals(raw_input_method.return_value, result)
self.assertFalse(get_pass_method.called)
raw_input_method.assure_called_once(prompt)
pass
@not_for_platform(PLATFORM_WINDOWS)
def test_get_pass_file_path(self):
result = get_pass_file_path("/etc/ambari/conf_file", JDBC_PASSWORD_FILENAME)
self.assertEquals("/etc/ambari/password.dat", result)
pass
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(_ambari_server_, "setup_security")
@patch("optparse.OptionParser")
@patch.object(_ambari_server_, "logger")
@patch("ambari_server.serverConfiguration.get_ambari_properties")
@patch.object(_ambari_server_, "setup_logging")
@patch.object(_ambari_server_, "init_logging")
def test_main_test_setup_security(self, init_logging_mock, setup_logging_mock, get_ambari_properties_mock,
logger_mock, OptionParserMock,
setup_security_method):
opm = OptionParserMock.return_value
options = MagicMock()
args = ["setup-security"]
opm.parse_args.return_value = (options, args)
options.dbms = None
options.security_option = "setup-security"
options.sid_or_sname = "sid"
setup_security_method.return_value = None
_ambari_server_.mainBody()
_ambari_server_.mainBody()
self.assertTrue(setup_security_method.called)
self.assertFalse(False, get_verbose())
self.assertFalse(False, get_silent())
pass
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(_ambari_server_, "setup_ambari_krb5_jaas")
@patch.object(_ambari_server_, "setup_master_key")
@patch.object(_ambari_server_, "setup_truststore")
@patch.object(_ambari_server_, "setup_https")
@patch.object(_ambari_server_, "get_validated_string_input")
@patch.object(_ambari_server_, "logger")
def test_setup_security(self, logger_mock, get_validated_string_input_mock, setup_https_mock,
setup_truststore_mock, setup_master_key_mock,
setup_ambari_krb5_jaas_mock):
args = self._create_empty_options_mock()
get_validated_string_input_mock.return_value = '1'
_ambari_server_.setup_security(args)
self.assertTrue(setup_https_mock.called)
get_validated_string_input_mock.return_value = '2'
_ambari_server_.setup_security(args)
self.assertTrue(setup_master_key_mock.called)
get_validated_string_input_mock.return_value = '3'
_ambari_server_.setup_security(args)
self.assertTrue(setup_ambari_krb5_jaas_mock.called)
get_validated_string_input_mock.return_value = '4'
_ambari_server_.setup_security(args)
self.assertTrue(setup_truststore_mock.called)
get_validated_string_input_mock.return_value = '5'
_ambari_server_.setup_security(args)
self.assertTrue(setup_truststore_mock.called)
pass
@patch("re.sub")
@patch("fileinput.FileInput")
@patch("ambari_server.setupSecurity.get_validated_string_input")
@patch("ambari_server.setupSecurity.search_file")
@patch("os.path.exists")
def test_setup_ambari_krb5_jaas(self, exists_mock, search_mock,
get_validated_string_input_mock,
fileinput_mock, re_sub_mock):
search_mock.return_value = 'filepath'
exists_mock.return_value = False
# Negative case
try:
setup_ambari_krb5_jaas(self._create_empty_options_mock())
self.fail("Should throw exception")
except NonFatalException as fe:
# Expected
self.assertTrue("No jaas config file found at location" in fe.reason)
pass
# Positive case
exists_mock.reset_mock()
exists_mock.return_value = True
get_validated_string_input_mock.side_effect = ['aaa@aaa.cnn',
'pathtokeytab']
fileinput_mock.return_value = [ 'keyTab=xyz', 'principal=xyz' ]
setup_ambari_krb5_jaas(self._create_empty_options_mock())
self.assertTrue(fileinput_mock.called)
self.assertTrue(re_sub_mock.called)
self.assertTrue(re_sub_mock.call_args_list, [('aaa@aaa.cnn'),
('pathtokeytab')])
pass
@patch("sys.exit")
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(_ambari_server_, "setup")
@patch.object(_ambari_server_, "start")
@patch.object(_ambari_server_, "stop")
@patch.object(_ambari_server_, "reset")
@patch("optparse.OptionParser")
@patch.object(_ambari_server_, "logger")
@patch("ambari_server.serverConfiguration.get_ambari_properties")
@patch.object(_ambari_server_, "setup_logging")
@patch.object(_ambari_server_, "init_logging")
def test_main_test_setup(self, init_logging_mock, setup_logging_mock, get_ambari_properties_mock,
logger_mock, OptionParserMock, reset_method, stop_method,
start_method, setup_method, exit_mock):
opm = OptionParserMock.return_value
options = self._create_empty_options_mock()
args = ["setup"]
opm.parse_args.return_value = (options, args)
options.dbms = None
options.sid_or_sname = "sid"
_ambari_server_.mainBody()
self.assertTrue(setup_method.called)
self.assertFalse(start_method.called)
self.assertFalse(stop_method.called)
self.assertFalse(reset_method.called)
self.assertFalse(False, get_verbose())
self.assertFalse(False, get_silent())
setup_method.reset_mock()
start_method.reset_mock()
stop_method.reset_mock()
reset_method.reset_mock()
exit_mock.reset_mock()
args = ["setup", "-v"]
options = self._create_empty_options_mock()
opm.parse_args.return_value = (options, args)
options.dbms = None
options.sid_or_sname = "sid"
setup_method.side_effect = Exception("Unexpected error")
try:
_ambari_server_.mainBody()
except Exception:
self.assertTrue(True)
self.assertTrue(setup_method.called)
self.assertFalse(start_method.called)
self.assertFalse(stop_method.called)
self.assertFalse(reset_method.called)
self.assertTrue(get_verbose())
setup_method.reset_mock()
start_method.reset_mock()
stop_method.reset_mock()
reset_method.reset_mock()
exit_mock.reset_mock()
args = ["setup"]
options = self._create_empty_options_mock()
opm.parse_args.return_value = (options, args)
options.dbms = None
options.sid_or_sname = "sid"
options.verbose = False
setup_method.side_effect = Exception("Unexpected error")
_ambari_server_.mainBody()
self.assertTrue(exit_mock.called)
self.assertTrue(setup_method.called)
self.assertFalse(start_method.called)
self.assertFalse(stop_method.called)
self.assertFalse(reset_method.called)
self.assertFalse(get_verbose())
pass
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(_ambari_server_, "setup")
@patch("optparse.OptionParser")
@patch.object(_ambari_server_, "logger")
@patch("ambari_server.serverConfiguration.get_ambari_properties")
@patch.object(_ambari_server_, "setup_logging")
@patch.object(_ambari_server_, "init_logging")
def test_main_with_preset_dbms(self, init_logging_mock, setup_logging_mock, get_ambari_properties_mock,
logger_mock, optionParserMock, setup_method):
opm = optionParserMock.return_value
options = self._create_empty_options_mock()
args = ["setup"]
opm.parse_args.return_value = (options, args)
options.dbms = "sqlanywhere"
options.sid_or_sname = "sname"
_ambari_server_.mainBody()
self.assertTrue(setup_method.called)
self.assertEquals(options.database_index, 5)
pass
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(_ambari_server_, "setup")
@patch.object(_ambari_server_, "fix_database_options")
@patch("optparse.OptionParser")
@patch.object(_ambari_server_, "logger")
@patch("ambari_server.serverConfiguration.get_ambari_properties")
@patch.object(_ambari_server_, "setup_logging")
@patch.object(_ambari_server_, "init_logging")
def test_fix_database_options_called(self, init_logging_mock, setup_logging_mock, get_ambari_properties_mock, logger_mock, optionParserMock,
fixDBOptionsMock, setup_method):
opm = optionParserMock.return_value
options = self._create_empty_options_mock()
args = ["setup"]
opm.parse_args.return_value = (options, args)
_ambari_server_.mainBody()
self.assertTrue(setup_method.called)
self.assertTrue(fixDBOptionsMock.called)
set_silent(False)
pass
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(_ambari_server_, "setup")
@patch.object(_ambari_server_, "start")
@patch.object(_ambari_server_, "stop")
@patch.object(_ambari_server_, "reset")
@patch("optparse.OptionParser")
@patch.object(_ambari_server_, "logger")
@patch("ambari_server.serverConfiguration.get_ambari_properties")
@patch.object(_ambari_server_, "setup_logging")
@patch.object(_ambari_server_, "init_logging")
def test_main_test_start(self, init_logging_mock, setup_logging_mock, get_ambari_properties_mock, logger_mock,
optionParserMock, reset_method, stop_method,
start_method, setup_method):
opm = optionParserMock.return_value
options = self._create_empty_options_mock()
args = ["setup"]
opm.parse_args.return_value = (options, args)
options.dbms = None
options.sid_or_sname = "sname"
_ambari_server_.mainBody()
self.assertTrue(setup_method.called)
self.assertFalse(start_method.called)
self.assertFalse(stop_method.called)
self.assertFalse(reset_method.called)
self.assertFalse(False, get_verbose())
self.assertFalse(False, get_silent())
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(_ambari_server_, "setup")
@patch.object(_ambari_server_, "start")
@patch.object(_ambari_server_, "stop")
@patch.object(_ambari_server_, "reset")
def test_main_test_start_debug_short(self, reset_method, stop_method,
start_method, setup_method):
temp_args = sys.argv
try:
sys.argv = ["ambari-server", "start", "-g"]
_ambari_server_.mainBody()
self.assertFalse(setup_method.called)
self.assertTrue(start_method.called)
self.assertFalse(stop_method.called)
self.assertFalse(reset_method.called)
self.assertTrue(get_debug_mode())
finally:
sys.argv = temp_args
pass
@only_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(_ambari_server_, "setup")
@patch.object(_ambari_server_, "start")
@patch.object(_ambari_server_, "stop")
@patch.object(_ambari_server_, "reset")
def test_main_test_start_debug_short(self, reset_method, stop_method,
start_method, setup_method):
temp_args = sys.argv
try:
sys.argv = ["ambari-server", "pstart", "-g"]
_ambari_server_.mainBody()
self.assertFalse(setup_method.called)
self.assertTrue(start_method.called)
self.assertFalse(stop_method.called)
self.assertFalse(reset_method.called)
self.assertTrue(get_debug_mode())
finally:
sys.argv = temp_args
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(_ambari_server_, "setup")
@patch.object(_ambari_server_, "start")
@patch.object(_ambari_server_, "stop")
@patch.object(_ambari_server_, "reset")
def test_main_test_start_debug_long(self, reset_method, stop_method,
start_method, setup_method):
temp_args = sys.argv
try:
sys.argv = ["ambari-server", "start", "--debug"]
_ambari_server_.mainBody()
self.assertFalse(setup_method.called)
self.assertTrue(start_method.called)
self.assertFalse(stop_method.called)
self.assertFalse(reset_method.called)
self.assertTrue(get_debug_mode())
finally:
sys.argv = temp_args
pass
@only_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(_ambari_server_, "setup")
@patch.object(_ambari_server_, "start")
@patch.object(_ambari_server_, "stop")
@patch.object(_ambari_server_, "reset")
def test_main_test_start_debug_long(self, reset_method, stop_method,
start_method, setup_method):
temp_args = sys.argv
try:
sys.argv = ["ambari-server", "pstart", "--debug"]
_ambari_server_.mainBody()
self.assertFalse(setup_method.called)
self.assertTrue(start_method.called)
self.assertFalse(stop_method.called)
self.assertFalse(reset_method.called)
self.assertTrue(get_debug_mode())
finally:
sys.argv = temp_args
pass
#Backup is not yet supported on Windows
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(_ambari_server_, "setup")
@patch.object(_ambari_server_, "start")
@patch.object(_ambari_server_, "stop")
@patch.object(_ambari_server_, "reset")
@patch.object(_ambari_server_, "backup")
@patch.object(_ambari_server_, "restore")
@patch("optparse.OptionParser")
@patch.object(_ambari_server_, "logger")
@patch("ambari_server.serverConfiguration.get_ambari_properties")
@patch.object(_ambari_server_, "setup_logging")
@patch.object(_ambari_server_, "init_logging")
def test_main_test_backup(self, init_logging_mock, setup_logging_mock, get_ambari_properties_mock, logger_mock,
optionParserMock, restore_mock, backup_mock, reset_method, stop_method,
start_method, setup_method):
opm = optionParserMock.return_value
options = self._create_empty_options_mock()
args = ["backup"]
opm.parse_args.return_value = (options, args)
options.dbms = None
options.sid_or_sname = "sname"
_ambari_server_.mainBody()
self.assertTrue(backup_mock.called)
self.assertFalse(restore_mock.called)
self.assertFalse(setup_method.called)
self.assertFalse(start_method.called)
self.assertFalse(stop_method.called)
self.assertFalse(reset_method.called)
self.assertFalse(False, get_verbose())
self.assertFalse(False, get_silent())
pass
#Restore is not yet supported on Windows
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(_ambari_server_, "setup")
@patch.object(_ambari_server_, "start")
@patch.object(_ambari_server_, "stop")
@patch.object(_ambari_server_, "reset")
@patch.object(_ambari_server_, "backup")
@patch.object(_ambari_server_, "restore")
@patch("optparse.OptionParser")
@patch.object(_ambari_server_, "logger")
@patch("ambari_server.serverConfiguration.get_ambari_properties")
@patch.object(_ambari_server_, "setup_logging")
@patch.object(_ambari_server_, "init_logging")
def test_main_test_restore(self, init_logging_mock, setup_logging_mock, get_ambari_properties_mock, logger_mock,
optionParserMock, restore_mock, backup_mock, reset_method, stop_method,
start_method, setup_method):
opm = optionParserMock.return_value
options = self._create_empty_options_mock()
args = ["restore"]
opm.parse_args.return_value = (options, args)
options.dbms = None
options.sid_or_sname = "sname"
_ambari_server_.mainBody()
self.assertTrue(restore_mock.called)
self.assertFalse(backup_mock.called)
self.assertFalse(setup_method.called)
self.assertFalse(start_method.called)
self.assertFalse(stop_method.called)
self.assertFalse(reset_method.called)
self.assertFalse(False, get_verbose())
self.assertFalse(False, get_silent())
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(_ambari_server_, "setup")
@patch.object(_ambari_server_, "start")
@patch.object(_ambari_server_, "is_server_runing")
@patch.object(_ambari_server_, "reset")
@patch("optparse.OptionParser")
def test_main_test_stop(self, optionParserMock, reset_method, is_server_runing_method,
start_method, setup_method):
opm = optionParserMock.return_value
options = self._create_empty_options_mock()
del options.exit_message
args = ["stop"]
opm.parse_args.return_value = (options, args)
is_server_runing_method.return_value = (False, None)
options.dbms = None
options.sid_or_sname = "sid"
_ambari_server_.mainBody()
self.assertFalse(setup_method.called)
self.assertFalse(start_method.called)
self.assertTrue(is_server_runing_method.called)
self.assertFalse(reset_method.called)
self.assertFalse(False, get_verbose())
self.assertFalse(False, get_silent())
self.assertTrue(options.exit_message is None)
pass
@only_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(_ambari_server_, "setup")
@patch.object(_ambari_server_, "start")
@patch("os_windows.win32serviceutil.WaitForServiceStatus")
@patch("os_windows.win32serviceutil.StopService")
@patch("os_windows.win32serviceutil.StopServiceWithDeps")
@patch.object(_ambari_server_, "reset")
def test_main_test_stop(self, reset_method, service_stop_w_deps_method,
service_stop_method, service_status_wait_method,
start_method, setup_method):
temp_args = sys.argv
try:
sys.argv = ["ambari-server", "stop"]
_ambari_server_.mainBody()
self.assertFalse(setup_method.called)
self.assertFalse(start_method.called)
self.assertTrue(service_stop_w_deps_method.called)
self.assertTrue(service_status_wait_method.called)
self.assertFalse(reset_method.called)
self.assertFalse(False, get_verbose())
self.assertFalse(False, get_silent())
finally:
sys.argv = temp_args
pass
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(_ambari_server_, "setup")
@patch.object(_ambari_server_, "start")
@patch.object(_ambari_server_, "stop")
@patch.object(_ambari_server_, "reset")
@patch("optparse.OptionParser")
@patch.object(_ambari_server_, "logger")
@patch("ambari_server.serverConfiguration.get_ambari_properties")
@patch.object(_ambari_server_, "setup_logging")
@patch.object(_ambari_server_, "init_logging")
def test_main_test_reset(self, init_logging_mock, setup_logging_mock, get_ambari_properties_mock,
logger_mock, optionParserMock, reset_method, stop_method,
start_method, setup_method):
opm = optionParserMock.return_value
options = self._create_empty_options_mock()
args = ["reset"]
opm.parse_args.return_value = (options, args)
options.dbms = None
options.sid_or_sname = "sid"
_ambari_server_.mainBody()
self.assertFalse(setup_method.called)
self.assertFalse(start_method.called)
self.assertFalse(stop_method.called)
self.assertTrue(reset_method.called)
self.assertFalse(False, get_verbose())
self.assertFalse(False, get_silent())
pass
@not_for_platform(PLATFORM_WINDOWS)
def test_configure_postgresql_conf(self):
tf1 = tempfile.NamedTemporaryFile()
PGConfig.POSTGRESQL_CONF_FILE = tf1.name
with open(PGConfig.POSTGRESQL_CONF_FILE, 'w') as f:
f.write("#listen_addresses = '127.0.0.1' #\n")
f.write("#listen_addresses = '127.0.0.1'")
PGConfig._configure_postgresql_conf()
expected = self.get_file_string(self.get_samples_dir(
"configure_postgresql_conf1"))
result = self.get_file_string(PGConfig.POSTGRESQL_CONF_FILE)
self.assertEqual(expected, result, "postgresql.conf not updated")
mode = oct(os.stat(PGConfig.POSTGRESQL_CONF_FILE)[stat.ST_MODE])
str_mode = str(mode)[-4:]
self.assertEqual("0644", str_mode, "Wrong file permissions")
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(PGConfig, "_restart_postgres")
@patch.object(PGConfig, "_get_postgre_status")
@patch.object(PGConfig, "_configure_postgresql_conf")
@patch("ambari_server.dbConfiguration_linux.run_os_command")
def test_configure_postgres(self,
run_os_command_mock,
configure_postgresql_conf_mock,
get_postgre_status_mock,
restart_postgres_mock):
args = MagicMock()
properties = Properties()
args.database_index = 0
del args.dbms
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.silent
factory = DBMSConfigFactory()
dbConfig = factory.create(args, properties)
self.assertTrue(dbConfig.dbms, "postgres")
self.assertTrue(dbConfig.persistence_type, "local")
tf1 = tempfile.NamedTemporaryFile()
tf2 = tempfile.NamedTemporaryFile()
PGConfig.PG_HBA_CONF_FILE = tf1.name
PGConfig.PG_HBA_CONF_FILE_BACKUP = tf2.name
out = StringIO.StringIO()
sys.stdout = out
retcode, out1, err = dbConfig._configure_postgres()
sys.stdout = sys.__stdout__
self.assertEqual(0, retcode)
self.assertEqual("Backup for pg_hba found, reconfiguration not required\n",
out.getvalue())
tf2.close()
get_postgre_status_mock.return_value = PGConfig.PG_STATUS_RUNNING, 0, "", ""
run_os_command_mock.return_value = 0, "", ""
restart_postgres_mock.return_value = 0, "", ""
rcode, out, err = dbConfig._configure_postgres()
self.assertTrue(os.path.isfile(PGConfig.PG_HBA_CONF_FILE_BACKUP),
"postgresql.conf backup not created")
self.assertTrue(run_os_command_mock.called)
mode = oct(os.stat(PGConfig.PG_HBA_CONF_FILE)[stat.ST_MODE])
str_mode = str(mode)[-4:]
self.assertEqual("0644", str_mode, "Wrong file permissions")
self.assertTrue(configure_postgresql_conf_mock.called)
self.assertEqual(0, rcode)
os.unlink(PGConfig.PG_HBA_CONF_FILE_BACKUP)
get_postgre_status_mock.return_value = "stopped", 0, "", ""
rcode, out, err = dbConfig._configure_postgres()
self.assertEqual(0, rcode)
os.unlink(PGConfig.PG_HBA_CONF_FILE_BACKUP)
sys.stdout = sys.__stdout__
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch("time.sleep")
@patch("subprocess.Popen")
@patch("ambari_server.dbConfiguration_linux.run_os_command")
@patch.object(PGConfig, "_get_postgre_status")
@patch("ambari_server.dbConfiguration_linux.print_info_msg")
def test_restart_postgres(self, printInfoMsg_mock, get_postgre_status_mock,
run_os_command_mock, popenMock, sleepMock):
p = MagicMock()
p.poll.return_value = 0
popenMock.return_value = p
retcode, out, err = PGConfig._restart_postgres()
self.assertEqual(0, retcode)
p.poll.return_value = None
get_postgre_status_mock.return_value = "stopped", 0, "", ""
run_os_command_mock.return_value = (1, None, None)
retcode, out, err = PGConfig._restart_postgres()
self.assertEqual(1, retcode)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch("shlex.split")
@patch("subprocess.Popen")
@patch("ambari_commons.os_linux.print_info_msg")
def test_run_os_command(self, printInfoMsg_mock, popenMock, splitMock):
p = MagicMock()
p.communicate.return_value = (None, None)
p.returncode = 3
popenMock.return_value = p
# with list arg
cmd = ["exec", "arg"]
run_os_command(cmd)
self.assertFalse(splitMock.called)
# with str arg
resp = run_os_command("runme")
self.assertEqual(3, resp[0])
self.assertTrue(splitMock.called)
pass
@only_for_platform(PLATFORM_WINDOWS)
@patch("shlex.split")
@patch("subprocess.Popen")
@patch("ambari_commons.os_windows.print_info_msg")
def test_run_os_command(self, printInfoMsg_mock, popenMock, splitMock):
p = MagicMock()
p.communicate.return_value = (None, None)
p.returncode = 3
popenMock.return_value = p
# with list arg
cmd = ["exec", "arg"]
run_os_command(cmd)
self.assertFalse(splitMock.called)
# with str arg
resp = run_os_command("runme")
self.assertEqual(3, resp[0])
self.assertTrue(splitMock.called)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch("ambari_server.serverConfiguration.get_conf_dir")
@patch("ambari_server.serverConfiguration.search_file")
def test_write_property(self, search_file_mock, get_conf_dir_mock):
expected_content = "key1=val1\n"
tf1 = tempfile.NamedTemporaryFile()
search_file_mock.return_value = tf1.name
write_property("key1", "val1")
result = tf1.read()
self.assertTrue(expected_content in result)
pass
@only_for_platform(PLATFORM_WINDOWS)
@patch("ambari_server.serverConfiguration.get_conf_dir")
@patch("ambari_server.serverConfiguration.search_file")
def test_write_property(self, search_file_mock, get_conf_dir_mock):
expected_content = "key1=val1\n"
tf1 = tempfile.NamedTemporaryFile("r+b", delete=False)
search_file_mock.return_value = tf1.name
tf1.close()
write_property("key1", "val1")
hf1 = open(tf1.name, "r")
try:
result = hf1.read()
self.assertTrue(expected_content in result)
finally:
hf1.close()
os.unlink(tf1.name)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch("ambari_server.dbConfiguration.decrypt_password_for_alias")
@patch("ambari_server.dbConfiguration_linux.run_os_command")
def test_setup_db(self, run_os_command_mock,
decrypt_password_for_alias_mock):
args = MagicMock()
del args.database_index
del args.dbms
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.init_script_file
del args.drop_script_file
properties = Properties()
properties.process_pair(JDBC_PASSWORD_PROPERTY, get_alias_string("mypwdalias"))
decrypt_password_for_alias_mock.return_value = "password"
dbms = PGConfig(args, properties, "local")
self.assertTrue(decrypt_password_for_alias_mock.called)
run_os_command_mock.return_value = (0, None, None)
result = dbms._setup_db()
self.assertTrue(run_os_command_mock.called)
self.assertEqual(run_os_command_mock.call_count, 2)
self.assertEqual((0, None, None), result)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch("ambari_server.dbConfiguration.decrypt_password_for_alias")
@patch("time.sleep")
@patch("ambari_server.dbConfiguration_linux.run_os_command")
def test_setup_db_connect_attempts_fail(self, run_os_command_mock,
sleep_mock, decrypt_password_for_alias_mock):
args = MagicMock()
del args.database_index
del args.dbms
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.init_script_file
del args.drop_script_file
properties = Properties()
decrypt_password_for_alias_mock.return_value = "password"
dbms = PGConfig(args, properties, "local")
run_os_command_mock.side_effect = [(1, "error", "error"), (1, "error", "error"),
(1, "error", "error")]
result = dbms._setup_db()
self.assertTrue(run_os_command_mock.called)
self.assertEqual((1, 'error', 'error') , result)
self.assertEqual(2, sleep_mock.call_count)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch("ambari_server.dbConfiguration.decrypt_password_for_alias")
@patch("time.sleep")
@patch("ambari_server.dbConfiguration_linux.run_os_command")
def test_setup_db_connect_attempts_success(self, run_os_command_mock,
sleep_mock, decrypt_password_for_alias_mock):
args = MagicMock()
del args.database_index
del args.dbms
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.init_script_file
del args.drop_script_file
properties = Properties()
decrypt_password_for_alias_mock.return_value = "password"
dbms = PGConfig(args, properties, "local")
run_os_command_mock.side_effect = [(1, "error", "error"), (0, None, None),
(0, None, None)]
result = dbms._setup_db()
self.assertTrue(run_os_command_mock.called)
self.assertEqual((0, None, None) , result)
self.assertEqual(1, sleep_mock.call_count)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch("ambari_server.serverSetup.get_YN_input")
@patch("ambari_server.serverSetup.run_os_command")
def test_check_selinux(self, run_os_command_mock, getYNInput_mock):
run_os_command_mock.return_value = (0, SE_STATUS_DISABLED,
None)
rcode = check_selinux()
self.assertEqual(0, rcode)
getYNInput_mock.return_value = True
run_os_command_mock.return_value = (0, "enabled "
+ SE_MODE_ENFORCING,
None)
rcode = check_selinux()
self.assertEqual(0, rcode)
self.assertTrue(run_os_command_mock.called)
self.assertTrue(getYNInput_mock.called)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch("ambari_server.serverConfiguration.print_info_msg")
def test_get_ambari_jars(self, printInfoMsg_mock):
env = "/ambari/jars"
os.environ[AMBARI_SERVER_LIB] = env
result = get_ambari_jars()
self.assertEqual(env, result)
del os.environ[AMBARI_SERVER_LIB]
result = get_ambari_jars()
self.assertEqual("/usr/lib/ambari-server", result)
self.assertTrue(printInfoMsg_mock.called)
pass
@only_for_platform(PLATFORM_WINDOWS)
@patch("ambari_server.serverConfiguration.print_info_msg")
def test_get_ambari_jars(self, printInfoMsg_mock):
env = "\\ambari\\jars"
os.environ[AMBARI_SERVER_LIB] = env
result = get_ambari_jars()
self.assertEqual(env, result)
del os.environ[AMBARI_SERVER_LIB]
result = get_ambari_jars()
self.assertEqual("lib", result)
self.assertTrue(printInfoMsg_mock.called)
pass
@patch("ambari_server.serverConfiguration.print_info_msg")
def test_get_conf_dir(self, printInfoMsg_mock):
env = "/dummy/ambari/conf"
os.environ[AMBARI_CONF_VAR] = env
result = get_conf_dir()
self.assertEqual(env, result)
del os.environ[AMBARI_CONF_VAR]
result = get_conf_dir()
self.assertEqual("/etc/ambari-server/conf", result)
pass
@only_for_platform(PLATFORM_WINDOWS)
@patch("ambari_server.serverConfiguration.print_info_msg")
def test_get_conf_dir(self, printInfoMsg_mock):
env = "\\dummy\\ambari\\conf"
os.environ[AMBARI_CONF_VAR] = env
result = get_conf_dir()
self.assertEqual(env, result)
del os.environ[AMBARI_CONF_VAR]
result = get_conf_dir()
self.assertEqual("conf", result)
pass
def _test_search_file(self):
path = os.path.dirname(__file__)
result = search_file(__file__, path)
expected = os.path.abspath(__file__)
self.assertEqual(expected, result)
result = search_file("non_existent_file", path)
self.assertEqual(None, result)
pass
@patch("ambari_server.serverConfiguration.search_file")
def test_find_properties_file(self, search_file_mock):
# Testing case when file is not found
search_file_mock.return_value = None
try:
find_properties_file()
self.fail("File not found'")
except FatalException:
# Expected
pass
self.assertTrue(search_file_mock.called)
# Testing case when file is found
value = MagicMock()
search_file_mock.return_value = value
result = find_properties_file()
self.assertTrue(result is value)
pass
@patch("ambari_server.serverConfiguration.get_ambari_properties")
@patch("ambari_server.serverConfiguration.Properties")
def test_read_ambari_user(self, properties_mock, get_ambari_properties_mock):
# Testing with defined user
properties_mock.__getitem__.return_value = "dummy_user"
get_ambari_properties_mock.return_value = properties_mock
user = read_ambari_user()
self.assertEquals(user, "dummy_user")
# Testing with undefined user
properties_mock.__getitem__.return_value = None
user = read_ambari_user()
self.assertEquals(user, None)
pass
@patch("ambari_server.serverConfiguration.get_ambari_properties")
@patch("ambari_server.serverConfiguration.Properties")
def test_read_active_instance(self, properties_mock, get_ambari_properties_mock):
# Set up the mock
properties_mock.propertyNames = MagicMock(return_value=['active.instance'])
get_ambari_properties_mock.return_value = properties_mock
# Test with explicitly set value of "false" (should return False)
properties_mock.__getitem__.return_value = "false"
is_active_instance = get_is_active_instance()
self.assertFalse(is_active_instance)
# Test with empty string (should return False)
properties_mock.__getitem__.return_value = ""
is_active_instance = get_is_active_instance()
self.assertFalse(is_active_instance)
# Test with a random string (should return False)
properties_mock.__getitem__.return_value = "xyz"
is_active_instance = get_is_active_instance()
self.assertFalse(is_active_instance)
# Test with a explicit false string (should return False)
properties_mock.__getitem__.return_value = "false"
is_active_instance = get_is_active_instance()
self.assertFalse(is_active_instance)
# Test with explicitly set value of "true" (should return True)
properties_mock.__getitem__.return_value = "true"
is_active_instance = get_is_active_instance()
self.assertTrue(is_active_instance)
# Test with missing active.instance entry (should return True)
properties_mock.propertyNames = MagicMock(return_value=[])
is_active_instance = get_is_active_instance()
self.assertTrue(is_active_instance)
pass
@patch("ambari_server.setupSecurity.get_file_owner")
@patch("ambari_server.setupSecurity.get_ambari_repo_file_full_name")
@patch("os.path.exists")
@patch("ambari_server.setupSecurity.set_file_permissions")
@patch("ambari_server.setupSecurity.get_ambari_properties")
@patch("ambari_server.setupSecurity.get_resources_location")
@patch("ambari_server.setupSecurity.get_value_from_properties")
@patch("os.mkdir")
@patch("shutil.rmtree")
@patch("ambari_commons.os_utils.print_info_msg")
@patch("ambari_server.setupSecurity.change_owner")
def test_adjust_directory_permissions(self, change_owner_mock, print_info_msg_mock, rmtree_mock, mkdir_mock,
get_value_from_properties_mock, get_resources_location_mock,
get_ambari_properties_mock, set_file_permissions_mock, exists_mock,
get_ambari_repo_file_full_name_mock, get_file_owner_mock):
# Testing boostrap dir wipe
properties_mock = Properties()
properties_mock.process_pair(JDK_NAME_PROPERTY, "dummy_jdk")
properties_mock.process_pair(JCE_NAME_PROPERTY, "dummy_jce")
properties_mock.process_pair(JAVA_HOME_PROPERTY, "dummy_java_home")
get_ambari_properties_mock.return_value = properties_mock
get_value_from_properties_mock.return_value = "dummy_bootstrap_dir"
get_resources_location_mock.return_value = "dummy_resources_dir"
exists_mock.return_value = False
adjust_directory_permissions("user")
self.assertTrue(mkdir_mock.called)
set_file_permissions_mock.reset_mock()
change_owner_mock.reset_mock()
# Test recursive calls
old_adjust_owner_list = configDefaults.NR_ADJUST_OWNERSHIP_LIST
old_change_owner_list = configDefaults.NR_CHANGE_OWNERSHIP_LIST
try:
configDefaults.NR_ADJUST_OWNERSHIP_LIST = [
( "/etc/ambari-server/conf", "755", "{0}", True ),
( "/etc/ambari-server/conf/ambari.properties", "644", "{0}", False )
]
configDefaults.NR_CHANGE_OWNERSHIP_LIST = [
( "/etc/ambari-server", "{0}", True )
]
adjust_directory_permissions("user")
self.assertTrue(len(set_file_permissions_mock.call_args_list) ==
len(configDefaults.NR_ADJUST_OWNERSHIP_LIST))
self.assertEquals(set_file_permissions_mock.call_args_list[0][0][3], True)
self.assertEquals(set_file_permissions_mock.call_args_list[1][0][3], False)
self.assertTrue(len(change_owner_mock.call_args_list) ==
len(configDefaults.NR_CHANGE_OWNERSHIP_LIST))
self.assertEquals(change_owner_mock.call_args_list[0][0][2], True)
finally:
configDefaults.NR_ADJUST_OWNERSHIP_LIST = old_adjust_owner_list
configDefaults.NR_CHANGE_OWNERSHIP_LIST = old_change_owner_list
pass
#
# Test ambari repo file permission change call
#
# Test the case when ambari repo file is available
# Reset the set_file_permissions() mock function
set_file_permissions_mock.reset_mock()
# Save the existing permissions list
old_adjust_owner_list = configDefaults.NR_ADJUST_OWNERSHIP_LIST
# Set up the mock function for os_utils.get_ambari_repo_file_full_name()
get_ambari_repo_file_full_name_mock.return_value = "ambari.dummy.repo"
# Set up the mock function for os_utils.get_file_owner()
get_file_owner_mock.return_value = "dummy.root"
# Set os.path.exists to return true when the input file is an ambari repo file
def file_exists_side_effect(*args, **kwargs):
if args[0] == get_ambari_repo_file_full_name_mock():
return True
else:
return False
exists_mock.side_effect = file_exists_side_effect
exists_mock.return_value = None
try:
# Clear the list of files whose permissions are to be changed
configDefaults.NR_ADJUST_OWNERSHIP_LIST = [
]
# Call the function to be tested.
adjust_directory_permissions("dummy_user")
# Assert that set_file_permissions() was called
self.assertTrue(set_file_permissions_mock.called)
# One of the entries in NR_ADJUST_OWNERSHIP_LIST should be the full path to the ambari repo file.
# These are the expected values:
ambari_repo_file_entry = (
get_ambari_repo_file_full_name_mock(),
'644',
get_file_owner_mock(),
False
)
# Assert the arguments to the call set_file_permissions() - got from NR_ADJUST_OWNERSHIP_LIST
# Flag to ensure we found our entry in the set_file_permissions() call
entry_found = False
for args_entry in set_file_permissions_mock.call_args_list:
if args_entry[0][0] == ambari_repo_file_entry[0]: # File name
# ambari repo file name matched; assert the rest of the entries
self.assertEquals(args_entry[0][1], ambari_repo_file_entry[1]) # Permissions
self.assertEquals(args_entry[0][2], ambari_repo_file_entry[2]) # File owner
self.assertEquals(args_entry[0][3], ambari_repo_file_entry[3]) # Non-recursive
entry_found = True
break
# Ensure that the ambari repo file entry was found
self.assertTrue(entry_found)
finally:
# Restore the permissions list
configDefaults.NR_ADJUST_OWNERSHIP_LIST = old_adjust_owner_list
pass
#Test the case when ambari repo file is unavailable
# Reset the set_file_permissions() mock function
set_file_permissions_mock.reset_mock()
# Save the existing permissions list
old_adjust_owner_list = configDefaults.NR_ADJUST_OWNERSHIP_LIST
# Set up the mock function for os_utils.get_ambari_repo_file_full_name()
get_ambari_repo_file_full_name_mock.return_value = "ambari.dummy.repo"
# Set up the mock function for os_utils.get_file_owner()
get_file_owner_mock.return_value = "dummy.root"
# Set os.path.exists to return false always
exists_mock.side_effect = None
exists_mock.return_value = False
try:
# Clear the list of files whose permissions are to be changed
configDefaults.NR_ADJUST_OWNERSHIP_LIST = [
]
# Call the function to be tested.
adjust_directory_permissions("dummy_user")
# One of the entries in NR_ADJUST_OWNERSHIP_LIST should be the full path to the ambari repo file.
# These are the expected values:
ambari_repo_file_entry = (
get_ambari_repo_file_full_name_mock(),
'644',
get_file_owner_mock(),
False
)
# Assert the arguments to the call set_file_permissions() - got from NR_ADJUST_OWNERSHIP_LIST
# Flag to ensure we found our entry in the set_file_permissions() call
entry_found = False
for args_entry in set_file_permissions_mock.call_args_list:
if args_entry[0][0] == ambari_repo_file_entry[0]: # File name
entry_found = True
break
# Ensure that the ambari repo file entry was not found
self.assertFalse(entry_found)
finally:
# Restore the permissions list
configDefaults.NR_ADJUST_OWNERSHIP_LIST = old_adjust_owner_list
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch("os.path.exists")
@patch("ambari_commons.os_linux.os_run_os_command")
@patch("ambari_commons.os_linux.print_warning_msg")
@patch("ambari_commons.os_utils.print_info_msg")
def test_set_file_permissions(self, print_info_msg_mock, print_warning_msg_mock,
run_os_command_mock, exists_mock):
# Testing not existent file scenario
exists_mock.return_value = False
set_file_permissions("dummy-file", "dummy-mod",
"dummy-user", False)
self.assertFalse(run_os_command_mock.called)
self.assertTrue(print_info_msg_mock.called)
run_os_command_mock.reset_mock()
print_warning_msg_mock.reset_mock()
# Testing OK scenario
exists_mock.return_value = True
run_os_command_mock.side_effect = [(0, "", ""), (0, "", "")]
set_file_permissions("dummy-file", "dummy-mod",
"dummy-user", False)
self.assertTrue(len(run_os_command_mock.call_args_list) == 2)
self.assertFalse(print_warning_msg_mock.called)
run_os_command_mock.reset_mock()
print_warning_msg_mock.reset_mock()
# Testing first command fail
run_os_command_mock.side_effect = [(1, "", ""), (0, "", "")]
set_file_permissions("dummy-file", "dummy-mod",
"dummy-user", False)
self.assertTrue(len(run_os_command_mock.call_args_list) == 2)
self.assertTrue(print_warning_msg_mock.called)
run_os_command_mock.reset_mock()
print_warning_msg_mock.reset_mock()
# Testing second command fail
run_os_command_mock.side_effect = [(0, "", ""), (1, "", "")]
set_file_permissions("dummy-file", "dummy-mod",
"dummy-user", False)
self.assertTrue(len(run_os_command_mock.call_args_list) == 2)
self.assertTrue(print_warning_msg_mock.called)
run_os_command_mock.reset_mock()
print_warning_msg_mock.reset_mock()
# Testing recursive operation
exists_mock.return_value = True
run_os_command_mock.side_effect = [(0, "", ""), (0, "", "")]
set_file_permissions("dummy-file", "dummy-mod",
"dummy-user", True)
self.assertTrue(len(run_os_command_mock.call_args_list) == 2)
self.assertTrue("-R" in run_os_command_mock.call_args_list[0][0][0])
self.assertTrue("-R" in run_os_command_mock.call_args_list[1][0][0])
self.assertFalse(print_warning_msg_mock.called)
run_os_command_mock.reset_mock()
print_warning_msg_mock.reset_mock()
# Testing non-recursive operation
exists_mock.return_value = True
run_os_command_mock.side_effect = [(0, "", ""), (0, "", "")]
set_file_permissions("dummy-file", "dummy-mod",
"dummy-user", False)
self.assertTrue(len(run_os_command_mock.call_args_list) == 2)
self.assertFalse("-R" in run_os_command_mock.call_args_list[0][0][0])
self.assertFalse("-R" in run_os_command_mock.call_args_list[1][0][0])
self.assertFalse(print_warning_msg_mock.called)
run_os_command_mock.reset_mock()
print_warning_msg_mock.reset_mock()
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("ambari_server.serverSetup.get_validated_string_input")
@patch("ambari_server.serverSetup.print_info_msg")
@patch("ambari_server.serverSetup.print_warning_msg")
@patch("ambari_server.serverSetup.run_os_command")
def test_create_custom_user(self, run_os_command_mock, print_warning_msg_mock,
print_info_msg_mock, get_validated_string_input_mock):
options = self._create_empty_options_mock()
user = "dummy-user"
get_validated_string_input_mock.return_value = user
userChecks = AmbariUserChecks(options)
# Testing scenario: absent user
run_os_command_mock.side_effect = [(0, "", "")]
result = userChecks._create_custom_user()
self.assertFalse(print_warning_msg_mock.called)
self.assertEquals(result, 0)
self.assertEquals(userChecks.user, user)
print_info_msg_mock.reset_mock()
print_warning_msg_mock.reset_mock()
run_os_command_mock.reset_mock()
# Testing scenario: existing user
run_os_command_mock.side_effect = [(9, "", "")]
result = userChecks._create_custom_user()
self.assertTrue("User dummy-user already exists" in str(print_info_msg_mock.call_args_list[1][0]))
self.assertEquals(result, 0)
self.assertEquals(userChecks.user, user)
print_info_msg_mock.reset_mock()
print_warning_msg_mock.reset_mock()
run_os_command_mock.reset_mock()
# Testing scenario: os command fail
run_os_command_mock.side_effect = [(1, "", "")]
result = userChecks._create_custom_user()
self.assertTrue(print_warning_msg_mock.called)
self.assertEquals(result, 1)
pass
@only_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("win32security.LsaAddAccountRights")
@patch("win32security.LookupAccountName")
@patch("win32net.NetUserAdd")
@patch("win32net.NetUserGetInfo")
@patch("win32security.LsaOpenPolicy")
@patch("win32net.NetGetDCName")
@patch("ambari_server.serverSetup.get_validated_string_input")
@patch("ambari_server.serverSetup.print_info_msg")
@patch("ambari_server.serverSetup.print_warning_msg")
def test_create_custom_user(self, print_warning_msg_mock,
print_info_msg_mock, get_validated_string_input_mock,
net_get_dc_name_mock, lsa_open_policy_mock,
net_user_get_info_mock, net_user_add_mock,
lookup_account_name_mock, lsa_add_account_rights_mock):
def _reset_mocks():
get_validated_string_input_mock.reset_mock()
print_info_msg_mock.reset_mock()
print_warning_msg_mock.reset_mock()
net_get_dc_name_mock.reset_mock()
net_user_get_info_mock.reset_mock()
net_user_add_mock.reset_mock()
lookup_account_name_mock.reset_mock()
lsa_add_account_rights_mock.reset_mock()
pass
options = MagicMock()
user = "dummy-user"
get_validated_string_input_mock.return_value = user
userChecks = AmbariUserChecks(options)
# Testing scenario: absent user
def user_not_found(*args, **keywargs):
import pywintypes
raise pywintypes.error(2221)
net_user_get_info_mock.side_effect = user_not_found
result = userChecks._create_custom_user()
self.assertTrue(print_warning_msg_mock.called)
self.assertTrue(net_user_add_mock.called)
self.assertEqual(str(net_user_add_mock.call_args_list[0][0]), str((None, 1, {'comment': 'Ambari user', 'password': 'dummy-user', 'flags': 513, 'name': 'dummy-user', 'priv': 1})))
self.assertEquals(result, 0)
self.assertEquals(userChecks.user, ".\\" + user)
_reset_mocks()
# Testing scenario: existing user
net_user_get_info_mock.side_effect = None
net_user_get_info_mock.return_value = { "name":"dummy_user" }
#lookup_account_name_mock
#lsa_add_account_rights_mock
result = userChecks._create_custom_user()
self.assertTrue("User dummy-user already exists" in print_info_msg_mock.call_args_list[0][0][0])
self.assertEquals(result, 0)
self.assertEquals(userChecks.user, ".\\" + user)
self.assertFalse(net_user_add_mock.called)
_reset_mocks()
# Testing scenario: new domain user
get_validated_string_input_mock.side_effect = ["dummy_domain\\dummy_user", "newpassword"]
net_get_dc_name_mock.return_value = "dummy_dc"
net_user_get_info_mock.side_effect = user_not_found
result = userChecks._create_custom_user()
self.assertTrue(net_get_dc_name_mock.called)
self.assertEqual(str(net_get_dc_name_mock.call_args_list[0][0]), str((None, "dummy_domain")))
self.assertTrue(net_user_add_mock.called)
self.assertEqual(str(net_user_add_mock.call_args_list[0][0]), str(('dummy_dc', 1, {'comment': 'Ambari user', 'password': 'newpassword', 'flags': 513, 'name': 'dummy_user', 'priv': 1})))
self.assertEquals(result, 0)
self.assertEquals(userChecks.user, "dummy_domain\\dummy_user")
_reset_mocks()
# Testing scenario: existing domain user
get_validated_string_input_mock.side_effect = ["dummy_domain\\dummy_user", "newpassword"]
net_user_get_info_mock.side_effect = None
net_user_get_info_mock.return_value = { "name":"dummy_domain\\dummy_user" }
result = userChecks._create_custom_user()
self.assertTrue("User dummy_domain\\dummy_user already exists" in print_info_msg_mock.call_args_list[0][0][0])
self.assertTrue(net_get_dc_name_mock.called)
self.assertEqual(str(net_get_dc_name_mock.call_args_list[0][0]), str((None, "dummy_domain")))
self.assertFalse(net_user_add_mock.called)
self.assertEquals(result, 0)
self.assertEquals(userChecks.user, "dummy_domain\\dummy_user")
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("ambari_server.serverSetup.read_ambari_user")
@patch("ambari_server.serverSetup.get_YN_input")
@patch("ambari_server.serverSetup.get_validated_string_input")
@patch("ambari_server.serverSetup.adjust_directory_permissions")
@patch("ambari_server.serverSetup.run_os_command")
@patch("ambari_server.serverSetup.print_error_msg")
@patch("ambari_server.serverSetup.print_warning_msg")
@patch("ambari_server.serverSetup.print_info_msg")
def test_check_ambari_user(self, print_info_msg_mock, print_warning_msg_mock, print_error_msg_mock,
run_os_command_mock, adjust_directory_permissions_mock,
get_validated_string_input_mock, get_YN_input_mock, read_ambari_user_mock):
def _reset_mocks():
get_YN_input_mock.reset_mock()
get_validated_string_input_mock.reset_mock()
run_os_command_mock.reset_mock()
adjust_directory_permissions_mock.reset_mock()
pass
options = self._create_empty_options_mock()
run_os_command_mock.return_value = (0, "", "")
# Scenario: user is already defined, user does not want to reconfigure it
read_ambari_user_mock.return_value = "dummy-user"
get_YN_input_mock.return_value = False
result = check_ambari_user(options)
self.assertTrue(get_YN_input_mock.called)
self.assertFalse(get_validated_string_input_mock.called)
self.assertFalse(run_os_command_mock.called)
self.assertTrue(adjust_directory_permissions_mock.called)
self.assertEqual(result[0], 0)
_reset_mocks()
# Scenario: user is already defined, but user wants to reconfigure it
read_ambari_user_mock.return_value = "dummy-user"
get_validated_string_input_mock.return_value = "new-dummy-user"
get_YN_input_mock.return_value = True
result = check_ambari_user(options)
self.assertTrue(get_YN_input_mock.called)
self.assertTrue(result[2] == "new-dummy-user")
self.assertTrue(get_validated_string_input_mock.called)
self.assertTrue(adjust_directory_permissions_mock.called)
self.assertEqual(result[0], 0)
_reset_mocks()
# Negative scenario: user is already defined, but user wants
# to reconfigure it, user creation failed
read_ambari_user_mock.return_value = "dummy-user"
run_os_command_mock.return_value = (1, "", "")
get_YN_input_mock.return_value = True
result = check_ambari_user(options)
self.assertTrue(get_YN_input_mock.called)
self.assertTrue(get_validated_string_input_mock.called)
self.assertTrue(run_os_command_mock.called)
self.assertFalse(adjust_directory_permissions_mock.called)
self.assertEqual(result[0], 1)
_reset_mocks()
# Scenario: user is not defined (setup process)
read_ambari_user_mock.return_value = None
get_YN_input_mock.return_value = True
get_validated_string_input_mock.return_value = "dummy-user"
run_os_command_mock.return_value = (0, "", "")
result = check_ambari_user(options)
self.assertTrue(get_YN_input_mock.called)
self.assertTrue(get_validated_string_input_mock.called)
self.assertTrue(run_os_command_mock.called)
self.assertTrue(result[2] == "dummy-user")
self.assertTrue(adjust_directory_permissions_mock.called)
self.assertEqual(result[0], 0)
_reset_mocks()
# Scenario: user is not defined (setup process), user creation failed
read_ambari_user_mock.return_value = None
get_YN_input_mock.return_value = True
run_os_command_mock.return_value = (1, "", "")
result = check_ambari_user(options)
self.assertTrue(get_YN_input_mock.called)
self.assertTrue(get_validated_string_input_mock.called)
self.assertTrue(run_os_command_mock.called)
self.assertFalse(adjust_directory_permissions_mock.called)
self.assertEqual(result[0], 1)
_reset_mocks()
# negative scenario: user is not defined (setup process), user creation failed
read_ambari_user_mock.return_value = None
get_YN_input_mock.return_value = True
run_os_command_mock.return_value = (1, "", "")
result = check_ambari_user(options)
self.assertTrue(get_YN_input_mock.called)
self.assertTrue(get_validated_string_input_mock.called)
self.assertTrue(run_os_command_mock.called)
self.assertFalse(adjust_directory_permissions_mock.called)
self.assertEqual(result[0], 1)
_reset_mocks()
# Scenario: user is not defined and left to be root
read_ambari_user_mock.return_value = None
get_YN_input_mock.return_value = False
result = check_ambari_user(options)
self.assertTrue(get_YN_input_mock.called)
self.assertFalse(get_validated_string_input_mock.called)
self.assertFalse(run_os_command_mock.called)
self.assertTrue(result[2] == "root")
self.assertTrue(adjust_directory_permissions_mock.called)
self.assertEqual(result[0], 0)
pass
@only_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("ambari_commons.os_windows.UserHelper.add_user_privilege")
@patch("ambari_commons.os_windows.UserHelper.create_user")
@patch("ambari_commons.os_windows.UserHelper.find_user")
@patch("ambari_server.serverSetup.read_ambari_user")
@patch("ambari_server.serverSetup.get_YN_input")
@patch("ambari_server.serverSetup.get_validated_string_input")
@patch("ambari_server.serverSetup.adjust_directory_permissions")
@patch("ambari_server.serverSetup.run_os_command")
@patch("ambari_server.serverSetup.print_error_msg")
@patch("ambari_server.serverSetup.print_warning_msg")
@patch("ambari_server.serverSetup.print_info_msg")
def test_check_ambari_user(self, print_info_msg_mock, print_warning_msg_mock, print_error_msg_mock,
run_os_command_mock, adjust_directory_permissions_mock,
get_validated_string_input_mock, get_YN_input_mock, read_ambari_user_mock,
find_user_mock, create_user_mock, add_user_privilege_mock):
def _reset_mocks():
get_YN_input_mock.reset_mock()
get_validated_string_input_mock.reset_mock()
find_user_mock.reset_mock()
create_user_mock.reset_mock()
adjust_directory_permissions_mock.reset_mock()
pass
options = MagicMock()
options.svc_user = None
options.svc_password = None
run_os_command_mock.return_value = (0, "", "")
# Scenario: user is already defined, user does not want to reconfigure it
read_ambari_user_mock.return_value = "dummy-user"
get_YN_input_mock.return_value = False
result = check_ambari_user(options)
self.assertTrue(get_YN_input_mock.called)
self.assertFalse(get_validated_string_input_mock.called)
self.assertFalse(find_user_mock.called)
self.assertFalse(create_user_mock.called)
self.assertTrue(adjust_directory_permissions_mock.called)
self.assertEqual(result[0], 0)
_reset_mocks()
# Scenario: user is already defined, but user wants to reconfigure it
read_ambari_user_mock.return_value = "dummy-user"
get_validated_string_input_mock.side_effect = ["new-dummy-user", "new_password"]
get_YN_input_mock.return_value = True
find_user_mock.return_value = False
create_user_mock.return_value = (0, "User created")
add_user_privilege_mock.return_value = (0, "Privilege added")
result = check_ambari_user(options)
self.assertTrue(get_YN_input_mock.called)
self.assertEqual(result[2], ".\\new-dummy-user")
self.assertTrue(get_validated_string_input_mock.called)
self.assertTrue(adjust_directory_permissions_mock.called)
self.assertTrue(find_user_mock.called)
self.assertTrue(create_user_mock.called)
self.assertTrue(add_user_privilege_mock.called)
self.assertEqual(result[0], 0)
_reset_mocks()
# Negative scenario: user is already defined, but user wants
# to reconfigure it, user creation failed
read_ambari_user_mock.return_value = "dummy-user"
get_validated_string_input_mock.side_effect = ["new-dummy-user", "new_password"]
find_user_mock.return_value = False
create_user_mock.return_value = (-1, "Failed")
get_YN_input_mock.return_value = True
result = check_ambari_user(options)
self.assertTrue(get_YN_input_mock.called)
self.assertTrue(get_validated_string_input_mock.called)
self.assertTrue(create_user_mock.called)
self.assertFalse(adjust_directory_permissions_mock.called)
self.assertEqual(result[0], -1)
_reset_mocks()
# Scenario: user is not defined (setup process)
read_ambari_user_mock.return_value = None
get_YN_input_mock.return_value = True
get_validated_string_input_mock.side_effect = ["dummy-user", "new_password"]
create_user_mock.return_value = (0, "User created")
result = check_ambari_user(options)
self.assertTrue(get_YN_input_mock.called)
self.assertTrue(get_validated_string_input_mock.called)
self.assertTrue(create_user_mock.called)
self.assertTrue(result[2] == ".\\dummy-user")
self.assertTrue(adjust_directory_permissions_mock.called)
self.assertEqual(result[0], 0)
_reset_mocks()
# Scenario: user is not defined, use system account (setup process)
read_ambari_user_mock.return_value = None
get_YN_input_mock.return_value = True
get_validated_string_input_mock.side_effect = ["NT AUTHORITY\\SYSTEM"]
create_user_mock.return_value = (0, "User created")
result = check_ambari_user(options)
self.assertTrue(get_YN_input_mock.called)
self.assertTrue(get_validated_string_input_mock.called)
self.assertEqual(get_validated_string_input_mock.call_count, 1)
self.assertFalse(find_user_mock.called)
self.assertFalse(create_user_mock.called)
self.assertTrue(result[2] == "NT AUTHORITY\\SYSTEM")
self.assertTrue(adjust_directory_permissions_mock.called)
self.assertEqual(result[0], 0)
_reset_mocks()
# Scenario: user is not defined (setup process), user creation failed
read_ambari_user_mock.return_value = None
get_YN_input_mock.return_value = True
get_validated_string_input_mock.side_effect = ["new-dummy-user", "new_password"]
find_user_mock.return_value = False
create_user_mock.return_value = (-1, "Failed")
result = check_ambari_user(options)
self.assertTrue(get_YN_input_mock.called)
self.assertTrue(get_validated_string_input_mock.called)
self.assertTrue(create_user_mock.called)
self.assertFalse(adjust_directory_permissions_mock.called)
self.assertEqual(result[0], -1)
_reset_mocks()
# Scenario: user is not defined and left to be the default
read_ambari_user_mock.return_value = None
get_YN_input_mock.return_value = False
result = check_ambari_user(options)
self.assertTrue(get_YN_input_mock.called)
self.assertFalse(get_validated_string_input_mock.called)
self.assertFalse(run_os_command_mock.called)
self.assertTrue(result[2] == "NT AUTHORITY\\SYSTEM")
self.assertTrue(adjust_directory_permissions_mock.called)
self.assertEqual(result[0], 0)
pass
@patch("ambari_server.serverConfiguration.search_file")
@patch("__builtin__.open")
@patch("ambari_server.serverConfiguration.read_ambari_user")
@patch("ambari_server.serverConfiguration.set_file_permissions")
def test_store_password_file(self, set_file_permissions_mock,
read_ambari_user_mock, open_mock, search_file_mock):
search_file_mock.return_value = "/etc/ambari-server/conf/ambari.properties"
open_mock.return_value = MagicMock()
store_password_file("password", "passfile")
self.assertTrue(set_file_permissions_mock.called)
pass
@patch("resource_management.core.shell.call")
@patch.object(OSCheck, "get_os_family")
@patch.object(OSCheck, "get_os_type")
@patch.object(OSCheck, "get_os_major_version")
def test_check_firewall_is_running(self, get_os_major_version_mock, get_os_type_mock, get_os_family_mock, shell_call_mock):
get_os_major_version_mock.return_value = 18
get_os_type_mock.return_value = OSConst.OS_FEDORA
get_os_family_mock.return_value = OSConst.REDHAT_FAMILY
firewall_obj = Firewall().getFirewallObject()
shell_call_mock.return_value = (0, "active", "err")
self.assertEqual("Fedora18FirewallChecks", firewall_obj.__class__.__name__)
self.assertTrue(firewall_obj.check_firewall())
shell_call_mock.return_value = (3, "", "err")
self.assertFalse(firewall_obj.check_firewall())
self.assertEqual("err", firewall_obj.stderrdata)
get_os_type_mock.return_value = OSConst.OS_UBUNTU
get_os_family_mock.return_value = OSConst.UBUNTU_FAMILY
firewall_obj = Firewall().getFirewallObject()
shell_call_mock.return_value = (0, "Status: active", "err")
self.assertEqual("UbuntuFirewallChecks", firewall_obj.__class__.__name__)
self.assertTrue(firewall_obj.check_firewall())
shell_call_mock.return_value = (0, "Status: inactive", "err")
self.assertFalse(firewall_obj.check_firewall())
self.assertEqual("err", firewall_obj.stderrdata)
get_os_type_mock.return_value = ""
get_os_family_mock.return_value = OSConst.SUSE_FAMILY
firewall_obj = Firewall().getFirewallObject()
shell_call_mock.return_value = (0, "running", "err")
self.assertEqual("SuseFirewallChecks", firewall_obj.__class__.__name__)
self.assertTrue(firewall_obj.check_firewall())
shell_call_mock.return_value = (0, "unused", "err")
self.assertFalse(firewall_obj.check_firewall())
self.assertEqual("err", firewall_obj.stderrdata)
get_os_type_mock.return_value = ""
get_os_family_mock.return_value = OSConst.REDHAT_FAMILY
get_os_major_version_mock.return_value = 6
firewall_obj = Firewall().getFirewallObject()
shell_call_mock.return_value = (0, "Table: filter", "err")
self.assertEqual("FirewallChecks", firewall_obj.__class__.__name__)
self.assertTrue(firewall_obj.check_firewall())
shell_call_mock.return_value = (3, "", "err")
self.assertFalse(firewall_obj.check_firewall())
self.assertEqual("err", firewall_obj.stderrdata)
get_os_major_version_mock.return_value = 7
get_os_type_mock.return_value = ""
get_os_family_mock.return_value = OSConst.REDHAT_FAMILY
firewall_obj = Firewall().getFirewallObject()
shell_call_mock.return_value = (0, "active\nactive", "err")
self.assertEqual("RedHat7FirewallChecks", firewall_obj.__class__.__name__)
self.assertTrue(firewall_obj.check_firewall())
shell_call_mock.return_value = (3, "inactive\nactive", "err")
self.assertTrue(firewall_obj.check_firewall())
shell_call_mock.return_value = (3, "active\ninactive", "err")
self.assertTrue(firewall_obj.check_firewall())
shell_call_mock.return_value = (3, "inactive\ninactive", "err")
self.assertFalse(firewall_obj.check_firewall())
self.assertEqual("err", firewall_obj.stderrdata)
pass
@patch("ambari_server.setupHttps.get_validated_filepath_input")
@patch("ambari_server.setupHttps.get_validated_string_input")
@patch("ambari_server.setupHttps.run_os_command")
@patch("ambari_server.setupHttps.get_and_persist_truststore_type")
@patch("__builtin__.open")
@patch("ambari_server.setupHttps.find_properties_file")
@patch("ambari_server.setupHttps.run_component_https_cmd")
@patch("ambari_server.setupHttps.get_delete_cert_command")
@patch("ambari_server.setupHttps.get_and_persist_truststore_password")
@patch("ambari_server.setupHttps.get_and_persist_truststore_path")
@patch("ambari_server.setupHttps.get_YN_input")
@patch("ambari_server.setupHttps.get_ambari_properties")
@patch("ambari_server.setupHttps.find_jdk")
def test_setup_truststore(self, find_jdk_mock, get_ambari_properties_mock, get_YN_input_mock,
get_and_persist_truststore_path_mock, get_and_persist_truststore_password_mock,
get_delete_cert_command_mock, run_component_https_cmd_mock,
find_properties_file_mock, open_mock,
get_and_persist_truststore_type_mock, run_os_command_mock,
get_validated_string_input_mock,
get_validated_filepath_input_mock):
out = StringIO.StringIO()
sys.stdout = out
component = "component"
command = "command"
property = "use_ssl"
alias = "alias"
options = self._create_empty_options_mock()
#Silent mode
set_silent(True)
setup_truststore(options)
self.assertEqual('setup-security is not enabled in silent mode.\n', out.getvalue())
sys.stdout = sys.__stdout__
#Verbouse mode and jdk_path is None
set_silent(False)
p = get_ambari_properties_mock.return_value
# Dont disable ssl
get_YN_input_mock.side_effect = [False]
get_validated_string_input_mock.return_value = "alias"
setup_truststore(options)
self.assertTrue(get_YN_input_mock.called)
p.get_property.reset_mock()
get_YN_input_mock.reset_mock()
# Cant find jdk
find_jdk_mock.return_value = None
try:
setup_truststore(options)
self.fail("Should throw exception")
except FatalException as fe:
# Expected
self.assertTrue('No JDK found, please run the "ambari-server setup" command to install a' +
' JDK automatically or install any JDK manually to ' in fe.reason)
pass
#Verbouse mode and jdk_path is not None (use_https = true)
find_jdk_mock.return_value = "/jdk_path"
p.get_property.side_effect = ["true"]
get_YN_input_mock.side_effect = [True,True]
get_and_persist_truststore_path_mock.return_value = "/truststore_path"
get_and_persist_truststore_password_mock.return_value = "/truststore_password"
get_delete_cert_command_mock.return_value = "rm -f"
setup_truststore(options, True)
self.assertTrue(get_and_persist_truststore_path_mock.called)
self.assertTrue(get_and_persist_truststore_password_mock.called)
self.assertTrue(get_delete_cert_command_mock.called)
self.assertTrue(find_properties_file_mock.called)
self.assertTrue(open_mock.called)
self.assertTrue(p.store.called)
self.assertTrue(run_component_https_cmd_mock.called)
p.process_pair.reset_mock()
get_and_persist_truststore_path_mock.reset_mock()
get_and_persist_truststore_password_mock.reset_mock()
get_delete_cert_command_mock.reset_mock()
find_properties_file_mock.reset_mock()
open_mock.reset_mock()
p.store.reset_mock()
#Verbouse mode and jdk_path is not None (use_https = false) and import cert
p.get_property.side_effect = ["false"]
get_YN_input_mock.side_effect = [True,True]
setup_truststore(options, True)
self.assertTrue(get_and_persist_truststore_type_mock.called)
self.assertTrue(get_and_persist_truststore_path_mock.called)
self.assertTrue(get_and_persist_truststore_password_mock.called)
self.assertTrue(get_delete_cert_command_mock.called)
self.assertTrue(find_properties_file_mock.called)
self.assertTrue(open_mock.called)
self.assertTrue(p.store.called)
self.assertTrue(run_component_https_cmd_mock.called)
self.assertTrue(run_os_command_mock.called)
self.assertTrue(get_validated_filepath_input_mock.called)
p.process_pair.reset_mock()
get_and_persist_truststore_type_mock.reset_mock()
get_and_persist_truststore_path_mock.reset_mock()
get_and_persist_truststore_password_mock.reset_mock()
get_delete_cert_command_mock.reset_mock()
find_properties_file_mock.reset_mock()
open_mock.reset_mock()
p.store.reset_mock()
run_os_command_mock.reset_mock()
get_validated_filepath_input_mock.reset_mock()
pass
@patch("__builtin__.open")
@patch("ambari_commons.logging_utils.get_silent")
@patch("ambari_server.setupHttps.find_jdk")
@patch("ambari_server.setupHttps.get_ambari_properties")
@patch("ambari_server.setupHttps.get_YN_input")
@patch("ambari_server.setupHttps.get_and_persist_truststore_type")
@patch("ambari_server.setupHttps.get_and_persist_truststore_path")
@patch("ambari_server.setupHttps.get_and_persist_truststore_password")
@patch("ambari_server.setupHttps.find_properties_file")
@patch("ambari_server.setupHttps.get_validated_string_input")
@patch("ambari_server.setupHttps.run_os_command")
@patch("ambari_server.setupHttps.get_validated_filepath_input")
@patch("ambari_server.setupHttps.get_import_cert_command")
@patch("ambari_server.setupHttps.run_component_https_cmd")
def test_reconfigure_truststore(self, run_component_https_cmd_mock,
get_import_cert_command_mock,
get_validated_filepath_input_mock, run_os_command_mock,
get_validated_string_input_mock, find_properties_file_mock,
get_and_persist_truststore_password_mock, get_and_persist_truststore_path_mock,
get_and_persist_truststore_type_mock, get_YN_input_mock,
get_ambari_properties_mock, find_jdk_mock, get_silent_mock,
open_mock):
def reset_mocks():
open_mock.reset_mock()
find_jdk_mock.reset_mock()
get_ambari_properties_mock.reset_mock()
get_YN_input_mock.reset_mock()
get_and_persist_truststore_type_mock.reset_mock()
get_and_persist_truststore_path_mock.reset_mock()
get_and_persist_truststore_password_mock.reset_mock()
find_properties_file_mock.reset_mock()
get_validated_string_input_mock.reset_mock()
run_os_command_mock.reset_mock()
get_validated_filepath_input_mock.reset_mock()
get_import_cert_command_mock.reset_mock()
run_component_https_cmd_mock.reset_mock()
#Test preconditions
get_silent_mock.return_value = False
find_jdk_mock.return_value = "/path"
options = self._create_empty_options_mock()
#Reconfiguration allowed by the user
reset_mocks()
get_YN_input_mock.side_effect = [True, True, True]
setup_truststore(options)
self.assertTrue(get_and_persist_truststore_type_mock.called)
self.assertTrue(get_and_persist_truststore_path_mock.called)
self.assertTrue(get_and_persist_truststore_password_mock.called)
#Reconfiguration disallowed by the user
reset_mocks()
get_YN_input_mock.side_effect = [True, False]
setup_truststore(options)
self.assertTrue(get_and_persist_truststore_type_mock.called)
self.assertTrue(get_and_persist_truststore_path_mock.called)
self.assertTrue(get_and_persist_truststore_password_mock.called)
#Reconfiguration should be disabled when 'import_cert' flag is 'True'
reset_mocks()
get_YN_input_mock.side_effect = [True, True]
setup_truststore(options, True)
self.assertTrue(get_and_persist_truststore_type_mock.called)
self.assertTrue(get_and_persist_truststore_path_mock.called)
self.assertTrue(get_and_persist_truststore_password_mock.called)
self.assertTrue(get_import_cert_command_mock.called)
pass
@patch("ambari_server.setupHttps.adjust_directory_permissions")
@patch("ambari_server.setupHttps.read_ambari_user")
@patch("ambari_server.setupHttps.get_validated_string_input")
@patch("ambari_server.setupHttps.find_properties_file")
@patch("ambari_server.setupHttps.get_ambari_properties")
@patch("ambari_server.setupHttps.import_cert_and_key_action")
@patch("ambari_server.setupHttps.get_YN_input")
@patch("__builtin__.open")
@patch("ambari_server.setupHttps.is_root")
@patch("ambari_server.setupHttps.is_valid_cert_host")
@patch("ambari_server.setupHttps.is_valid_cert_exp")
def test_setup_https(self, is_valid_cert_exp_mock, is_valid_cert_host_mock, \
is_root_mock, open_Mock, get_YN_input_mock, \
import_cert_and_key_action_mock,
get_ambari_properties_mock, \
find_properties_file_mock, \
get_validated_string_input_mock, read_ambari_user_method, \
adjust_directory_permissions_mock):
is_valid_cert_exp_mock.return_value = True
is_valid_cert_host_mock.return_value = True
open_Mock.return_value = file
p = get_ambari_properties_mock.return_value
args = MagicMock()
args.api_ssl_port = None
args.api_ssl = None
args.import_cert_path = None
args.import_key_path = None
args.pem_password = None
# Testing call under root
is_root_mock.return_value = True
read_ambari_user_method.return_value = "user"
#Case #1: if client ssl is on and user didnt choose
#disable ssl option and choose import certs and keys
p.get_property.side_effect = ["key_dir", "5555", "6666", "true", "5555", "true", "true", "5555"]
get_YN_input_mock.side_effect = [False, True]
get_validated_string_input_mock.side_effect = ["4444"]
get_property_expected = "[call('security.server.keys_dir'),\n" + \
" call('client.api.ssl.port'),\n" + \
" call('client.api.ssl.port'),\n call('api.ssl'),\n" + \
" call('client.api.ssl.port'),\n call('api.ssl'),\n" + \
" call('api.ssl'),\n call('client.api.ssl.port')]"
process_pair_expected = "[call('client.api.ssl.port', '4444')]"
set_silent(False)
setup_https(args)
self.assertTrue(p.process_pair.called)
self.assertTrue(p.get_property.call_count == 8)
self.assertEqual(str(p.get_property.call_args_list), get_property_expected)
self.assertEqual(str(p.process_pair.call_args_list), process_pair_expected)
self.assertTrue(p.store.called)
self.assertTrue(import_cert_and_key_action_mock.called)
p.process_pair.reset_mock()
p.get_property.reset_mock()
p.store.reset_mock()
import_cert_and_key_action_mock.reset_mock()
#Case #2: if client ssl is on and user choose to disable ssl option
p.get_property.side_effect = ["key_dir", "", "true", "", "true", "false", ""]
get_YN_input_mock.side_effect = [True]
get_validated_string_input_mock.side_effect = ["4444"]
get_property_expected = "[call('security.server.keys_dir'),\n" + \
" call('client.api.ssl.port'),\n call('api.ssl'),\n" + \
" call('client.api.ssl.port'),\n call('api.ssl'),\n" + \
" call('api.ssl')]"
process_pair_expected = "[call('api.ssl', 'false')]"
setup_https(args)
self.assertTrue(p.process_pair.called)
self.assertTrue(p.get_property.call_count == 6)
self.assertEqual(str(p.get_property.call_args_list), get_property_expected)
self.assertEqual(str(p.process_pair.call_args_list), process_pair_expected)
self.assertTrue(p.store.called)
self.assertFalse(import_cert_and_key_action_mock.called)
p.process_pair.reset_mock()
p.get_property.reset_mock()
p.store.reset_mock()
import_cert_and_key_action_mock.reset_mock()
#Case #3: if client ssl is off and user choose option
#to import cert and keys
p.get_property.side_effect = ["key_dir", "", None, "", None, None, ""]
get_YN_input_mock.side_effect = [True, True]
get_validated_string_input_mock.side_effect = ["4444"]
get_property_expected = "[call('security.server.keys_dir'),\n" + \
" call('client.api.ssl.port'),\n call('api.ssl'),\n" + \
" call('client.api.ssl.port'),\n call('api.ssl'),\n" + \
" call('api.ssl'),\n call('client.api.ssl.port')]"
process_pair_expected = "[call('client.api.ssl.port', '4444')]"
setup_https(args)
self.assertTrue(p.process_pair.called)
self.assertTrue(p.get_property.call_count == 7)
self.assertEqual(str(p.get_property.call_args_list), get_property_expected)
self.assertEqual(str(p.process_pair.call_args_list), process_pair_expected)
self.assertTrue(p.store.called)
self.assertTrue(import_cert_and_key_action_mock.called)
p.process_pair.reset_mock()
p.get_property.reset_mock()
p.store.reset_mock()
import_cert_and_key_action_mock.reset_mock()
#Case #4: if client ssl is off and
#user did not choose option to import cert and keys
p.get_property.side_effect = ["key_dir", "", None, "", None]
get_YN_input_mock.side_effect = [False]
get_validated_string_input_mock.side_effect = ["4444"]
get_property_expected = "[call('security.server.keys_dir'),\n" + \
" call('client.api.ssl.port'),\n call('api.ssl'),\n" + \
" call('client.api.ssl.port'),\n call('api.ssl')]"
process_pair_expected = "[]"
setup_https(args)
self.assertFalse(p.process_pair.called)
self.assertTrue(p.get_property.call_count == 5)
self.assertEqual(str(p.get_property.call_args_list), get_property_expected)
self.assertEqual(str(p.process_pair.call_args_list), process_pair_expected)
self.assertFalse(p.store.called)
self.assertFalse(import_cert_and_key_action_mock.called)
p.process_pair.reset_mock()
p.get_property.reset_mock()
p.store.reset_mock()
import_cert_and_key_action_mock.reset_mock()
#Case #5: if cert must be imported but didnt imported
p.get_property.side_effect = ["key_dir", "", "false", "", "false"]
get_YN_input_mock.side_effect = [True]
import_cert_and_key_action_mock.side_effect = [False]
get_validated_string_input_mock.side_effect = ["4444"]
get_property_expected = "[call('security.server.keys_dir'),\n" + \
" call('client.api.ssl.port'),\n call('api.ssl'),\n" + \
" call('client.api.ssl.port'),\n call('api.ssl')]"
process_pair_expected = "[call('client.api.ssl.port', '4444')]"
self.assertFalse(setup_https(args))
self.assertTrue(p.process_pair.called)
self.assertTrue(p.get_property.call_count == 5)
self.assertEqual(str(p.get_property.call_args_list), get_property_expected)
self.assertEqual(str(p.process_pair.call_args_list), process_pair_expected)
self.assertFalse(p.store.called)
self.assertTrue(import_cert_and_key_action_mock.called)
p.process_pair.reset_mock()
p.get_property.reset_mock()
p.store.reset_mock()
import_cert_and_key_action_mock.reset_mock()
#Case #6: if silent mode is enabled
set_silent(True)
try:
setup_https(args)
self.fail("Should throw exception")
except NonFatalException as fe:
self.assertTrue("setup-https is not enabled in silent mode" in fe.reason)
p.process_pair.reset_mock()
p.get_property.reset_mock()
p.store.reset_mock()
import_cert_and_key_action_mock.reset_mock()
#Case #7: read property throw exception
set_silent(False)
find_properties_file_mock.return_value = "propertyFile"
p.get_property.side_effect = KeyError("Failed to read property")
try:
setup_https(args)
self.fail("Should throw exception")
except FatalException as fe:
self.assertTrue("Failed to read property" in fe.reason)
pass
@patch("ambari_server.setupHttps.import_cert_and_key")
def test_import_cert_and_key_action(self, import_cert_and_key_mock):
import_cert_and_key_mock.return_value = True
properties = MagicMock()
properties.get_property.side_effect = ["key_dir", "5555", "6666", "true"]
properties.process_pair = MagicMock()
expect_process_pair = "[call('client.api.ssl.cert_name', 'https.crt'),\n" + \
" call('client.api.ssl.key_name', 'https.key'),\n" + \
" call('api.ssl', 'true')]"
options = self._create_empty_options_mock()
import_cert_and_key_action("key_dir", properties, options)
self.assertEqual(str(properties.process_pair.call_args_list), \
expect_process_pair)
pass
@patch("ambari_server.setupHttps.remove_file")
@patch("ambari_server.setupHttps.copy_file")
@patch("ambari_server.setupHttps.read_ambari_user")
@patch("ambari_server.setupHttps.set_file_permissions")
@patch("ambari_server.setupHttps.import_file_to_keystore")
@patch("__builtin__.open")
@patch("ambari_server.setupHttps.run_os_command")
@patch("os.path.join")
@patch("os.path.isfile")
@patch("__builtin__.raw_input")
@patch("ambari_server.setupHttps.get_validated_string_input")
@patch("ambari_server.setupHttps.is_valid_cert_host")
@patch("ambari_server.setupHttps.is_valid_cert_exp")
def test_ambariServerSetupWithCustomDbName(self, is_valid_cert_exp_mock, \
is_valid_cert_host_mock, \
get_validated_string_input_mock, \
raw_input_mock, \
os_path_isfile_mock, \
os_path_join_mock, run_os_command_mock, \
open_mock, import_file_to_keystore_mock, \
set_file_permissions_mock, read_ambari_user_mock, copy_file_mock, \
remove_file_mock):
is_valid_cert_exp_mock.return_value = True
is_valid_cert_host_mock.return_value = True
os_path_isfile_mock.return_value = True
get_validated_string_input_mock.return_value = "password"
raw_input_mock.side_effect = \
["cert_file_path", "key_file_path"]
os_path_join_mock.side_effect = ["keystore_file_path", "keystore_file_path_tmp", \
"pass_file_path", "pass_file_path_tmp", \
"passin_file_path", "password_file_path", \
"keystore_cert_file_path", \
"keystore_cert_key_file_path", ]
run_os_command_mock.return_value = (0, "", "")
om = open_mock.return_value
expect_import_file_to_keystore = "[call('keystore_file_path_tmp'," + \
" 'keystore_file_path'),\n" + \
" call('pass_file_path_tmp'," + \
" 'pass_file_path'),\n" + \
" call('cert_file_path'," + \
" 'keystore_cert_file_path'),\n" + \
" call('key_file_path'," + \
" 'keystore_cert_key_file_path')]"
options = self._create_empty_options_mock()
import_cert_and_key("key_dir", options)
self.assertTrue(raw_input_mock.call_count == 2)
self.assertTrue(get_validated_string_input_mock.called)
self.assertEqual(os_path_join_mock.call_count, 8)
self.assertTrue(set_file_permissions_mock.call_count == 1)
self.assertEqual(str(import_file_to_keystore_mock.call_args_list), \
expect_import_file_to_keystore)
pass
@patch("ambari_server.setupHttps.remove_file")
@patch("ambari_server.setupHttps.copy_file")
@patch("ambari_server.setupHttps.generate_random_string")
@patch("ambari_server.setupHttps.read_ambari_user")
@patch("ambari_server.setupHttps.set_file_permissions")
@patch("ambari_server.setupHttps.import_file_to_keystore")
@patch("__builtin__.open")
@patch("ambari_server.setupHttps.run_os_command")
@patch("os.path.join")
@patch("ambari_server.setupHttps.get_validated_filepath_input")
@patch("ambari_server.setupHttps.get_validated_string_input")
@patch("ambari_server.setupHttps.is_valid_cert_host")
@patch("ambari_server.setupHttps.is_valid_cert_exp")
def test_import_cert_and_key_with_empty_password(self, \
is_valid_cert_exp_mock, is_valid_cert_host_mock,
get_validated_string_input_mock, get_validated_filepath_input_mock, \
os_path_join_mock, run_os_command_mock, open_mock, \
import_file_to_keystore_mock, set_file_permissions_mock,
read_ambari_user_mock, generate_random_string_mock, copy_file_mock, \
remove_file_mock):
is_valid_cert_exp_mock.return_value = True
is_valid_cert_host_mock.return_value = True
get_validated_string_input_mock.return_value = ""
get_validated_filepath_input_mock.side_effect = \
["cert_file_path", "key_file_path"]
os_path_join_mock.side_effect = ["keystore_file_path", "keystore_file_path_tmp", \
"pass_file_path", "pass_file_path_tmp", \
"passin_file_path", "password_file_path", \
"keystore_cert_file_path", \
"keystore_cert_key_file_path", ]
run_os_command_mock.return_value = (0, "", "")
expect_import_file_to_keystore = "[call('keystore_file_path_tmp'," + \
" 'keystore_file_path'),\n" + \
" call('pass_file_path_tmp'," + \
" 'pass_file_path'),\n" + \
" call('cert_file_path'," + \
" 'keystore_cert_file_path'),\n" + \
" call('key_file_path.secured'," + \
" 'keystore_cert_key_file_path')]"
options = self._create_empty_options_mock()
import_cert_and_key("key_dir", options)
self.assertEquals(get_validated_filepath_input_mock.call_count, 2)
self.assertTrue(get_validated_string_input_mock.called)
self.assertEquals(os_path_join_mock.call_count, 8)
self.assertEquals(set_file_permissions_mock.call_count, 1)
self.assertEqual(str(import_file_to_keystore_mock.call_args_list), \
expect_import_file_to_keystore)
self.assertTrue(generate_random_string_mock.called)
pass
@patch("__builtin__.open")
@patch("ambari_server.setupHttps.copy_file")
@patch("ambari_server.setupHttps.is_root")
@patch("ambari_server.setupHttps.read_ambari_user")
@patch("ambari_server.setupHttps.set_file_permissions")
@patch("ambari_server.setupHttps.import_file_to_keystore")
@patch("ambari_server.setupHttps.run_os_command")
@patch("os.path.join")
@patch("ambari_server.setupHttps.get_validated_filepath_input")
@patch("ambari_server.setupHttps.get_validated_string_input")
def test_import_cert_and_key_with_incorrect_password(self,
get_validated_string_input_mock, \
get_validated_filepath_input_mock, \
os_path_join_mock, \
run_os_command_mock, \
import_file_to_keystore_mock, \
set_file_permissions_mock, \
read_ambari_user_mock, \
is_root_mock, \
copy_file_mock, \
open_mock):
get_validated_string_input_mock.return_value = "incorrect_password"
get_validated_filepath_input_mock.return_value = 'filename'
open_mock.return_value = MagicMock()
os_path_join_mock.return_value = ''
is_root_mock.return_value = True
options = self._create_empty_options_mock()
#provided password doesn't match, openssl command returns an error
run_os_command_mock.return_value = (1, "", "Some error message")
self.assertFalse(import_cert_and_key_action(*["key_dir", None, options]))
self.assertFalse(import_cert_and_key("key_dir", options))
pass
def test_is_valid_cert_exp(self):
#No data in certInfo
certInfo = {}
is_valid = is_valid_cert_exp(certInfo)
self.assertFalse(is_valid)
#Issued in future
issuedOn = (datetime.datetime.now() + datetime.timedelta(hours=1000)).strftime(SSL_DATE_FORMAT)
expiresOn = (datetime.datetime.now() + datetime.timedelta(hours=2000)).strftime(SSL_DATE_FORMAT)
certInfo = {NOT_BEFORE_ATTR: issuedOn,
NOT_AFTER_ATTR: expiresOn}
is_valid = is_valid_cert_exp(certInfo)
self.assertFalse(is_valid)
#Was expired
issuedOn = (datetime.datetime.now() - datetime.timedelta(hours=2000)).strftime(SSL_DATE_FORMAT)
expiresOn = (datetime.datetime.now() - datetime.timedelta(hours=1000)).strftime(SSL_DATE_FORMAT)
certInfo = {NOT_BEFORE_ATTR: issuedOn,
NOT_AFTER_ATTR: expiresOn}
is_valid = is_valid_cert_exp(certInfo)
self.assertFalse(is_valid)
#Valid
issuedOn = (datetime.datetime.now() - datetime.timedelta(hours=2000)).strftime(SSL_DATE_FORMAT)
expiresOn = (datetime.datetime.now() + datetime.timedelta(hours=1000)).strftime(SSL_DATE_FORMAT)
certInfo = {NOT_BEFORE_ATTR: issuedOn,
NOT_AFTER_ATTR: expiresOn}
is_valid = is_valid_cert_exp(certInfo)
self.assertTrue(is_valid)
pass
@patch("ambari_server.setupHttps.get_fqdn")
def test_is_valid_cert_host(self, get_fqdn_mock):
#No data in certInfo
certInfo = {}
is_valid = is_valid_cert_host(certInfo)
self.assertFalse(is_valid)
#Failed to get FQDN
get_fqdn_mock.return_value = None
is_valid = is_valid_cert_host(certInfo)
self.assertFalse(is_valid)
#FQDN and Common name in certificated don't correspond
get_fqdn_mock.return_value = 'host1'
certInfo = {COMMON_NAME_ATTR: 'host2'}
is_valid = is_valid_cert_host(certInfo)
self.assertFalse(is_valid)
#FQDN and Common name in certificated correspond
get_fqdn_mock.return_value = 'host1'
certInfo = {COMMON_NAME_ATTR: 'host1'}
is_valid = is_valid_cert_host(certInfo)
self.assertTrue(is_valid)
pass
@patch("ambari_server.setupHttps.get_ambari_properties")
def test_is_valid_https_port(self, get_ambari_properties_mock):
#No ambari.properties
get_ambari_properties_mock.return_value = -1
is_valid = is_valid_https_port(1111)
self.assertEqual(is_valid, False)
#User entered port used by one way auth
portOneWay = "1111"
portTwoWay = "2222"
validPort = "3333"
get_ambari_properties_mock.return_value = {SRVR_ONE_WAY_SSL_PORT_PROPERTY: portOneWay,
SRVR_TWO_WAY_SSL_PORT_PROPERTY: portTwoWay}
is_valid = is_valid_https_port(portOneWay)
self.assertEqual(is_valid, False)
#User entered port used by two way auth
is_valid = is_valid_https_port(portTwoWay)
self.assertEqual(is_valid, False)
#User entered valid port
get_ambari_properties_mock.return_value = {SRVR_ONE_WAY_SSL_PORT_PROPERTY: portOneWay,
SRVR_TWO_WAY_SSL_PORT_PROPERTY: portTwoWay}
is_valid = is_valid_https_port(validPort)
self.assertEqual(is_valid, True)
pass
@patch("socket.getfqdn")
@patch("urllib2.urlopen")
@patch("ambari_server.setupHttps.get_ambari_properties")
def test_get_fqdn(self, get_ambari_properties_mock, url_open_mock, getfqdn_mock):
#No ambari.properties
get_ambari_properties_mock.return_value = -1
fqdn = get_fqdn()
self.assertEqual(fqdn, None)
#Check mbari_server.GET_FQDN_SERVICE_URL property name (AMBARI-2612)
#property name should be server.fqdn.service.url
self.assertEqual(GET_FQDN_SERVICE_URL, "server.fqdn.service.url")
#Read FQDN from service
p = MagicMock()
p[GET_FQDN_SERVICE_URL] = 'someurl'
get_ambari_properties_mock.return_value = p
u = MagicMock()
host = 'host1.domain.com'
u.read.return_value = host
url_open_mock.return_value = u
fqdn = get_fqdn()
self.assertEqual(fqdn, host)
#Failed to read FQDN from service, getting from socket
u.reset_mock()
u.side_effect = Exception("Failed to read FQDN from service")
getfqdn_mock.return_value = host
fqdn = get_fqdn()
self.assertEqual(fqdn, host)
pass
def test_get_ulimit_open_files(self):
# 1 - No ambari.properties
p = Properties()
open_files = get_ulimit_open_files(p)
self.assertEqual(open_files, ULIMIT_OPEN_FILES_DEFAULT)
# 2 - With ambari.properties - ok
prop_value = 65000
p.process_pair(ULIMIT_OPEN_FILES_KEY, str(prop_value))
open_files = get_ulimit_open_files(p)
self.assertEqual(open_files, 65000)
# 2 - With ambari.properties - default
tf1 = tempfile.NamedTemporaryFile()
prop_value = 0
p.process_pair(ULIMIT_OPEN_FILES_KEY, str(prop_value))
open_files = get_ulimit_open_files(p)
self.assertEqual(open_files, ULIMIT_OPEN_FILES_DEFAULT)
pass
@patch("ambari_server.setupHttps.run_os_command")
def test_get_cert_info(self, run_os_command_mock):
# Error running openssl command
path = 'path/to/certificate'
run_os_command_mock.return_value = -1, None, None
cert_info = get_cert_info(path)
self.assertEqual(cert_info, None)
#Empty result of openssl command
run_os_command_mock.return_value = 0, None, None
cert_info = get_cert_info(path)
self.assertEqual(cert_info, None)
#Positive scenario
notAfter = 'Jul 3 14:12:57 2014 GMT'
notBefore = 'Jul 3 14:12:57 2013 GMT'
attr1_key = 'A'
attr1_value = 'foo'
attr2_key = 'B'
attr2_value = 'bar'
attr3_key = 'CN'
attr3_value = 'host.domain.com'
subject_pattern = '/{attr1_key}={attr1_value}/{attr2_key}={attr2_value}/{attr3_key}={attr3_value}'
subject = subject_pattern.format(attr1_key=attr1_key, attr1_value=attr1_value,
attr2_key=attr2_key, attr2_value=attr2_value,
attr3_key=attr3_key, attr3_value=attr3_value)
out_pattern = \
"notAfter={notAfter}" + os.linesep + \
"notBefore={notBefore}" + os.linesep + \
"subject={subject}" + os.linesep + \
"-----BEGIN CERTIFICATE-----" + os.linesep + \
"MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV" + os.linesep + \
"..." + os.linesep + \
"5lqd8XxOGSYoMOf+70BLN2sB" + os.linesep + \
"-----END CERTIFICATE-----" + os.linesep + \
""
out = out_pattern.format(notAfter=notAfter, notBefore=notBefore, subject=subject)
run_os_command_mock.return_value = 0, out, None
cert_info = get_cert_info(path)
self.assertEqual(cert_info['notAfter'], notAfter)
self.assertEqual(cert_info['notBefore'], notBefore)
self.assertEqual(cert_info['subject'], subject)
self.assertEqual(cert_info[attr1_key], attr1_value)
self.assertEqual(cert_info[attr2_key], attr2_value)
self.assertEqual(cert_info[attr3_key], attr3_value)
pass
@patch("__builtin__.raw_input")
def test_get_validated_string_input(self, raw_input_mock):
prompt = 'prompt'
default_value = 'default'
description = 'desc'
validator = MagicMock()
validator.return_value = True
inputed_value1 = 'val1'
inputed_value2 = 'val2'
raw_input_mock.return_value = inputed_value1
input = get_validated_string_input(prompt, default_value, None,
description, False, False, validator)
self.assertTrue(validator.called)
self.assertEqual(inputed_value1, input)
validator.side_effect = [False, True]
raw_input_mock.side_effect = [inputed_value1, inputed_value2]
input = get_validated_string_input(prompt, default_value, None,
description, False, False, validator)
self.assertEqual(inputed_value2, input)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("ambari_server.serverUtils.run_os_command")
@patch("__builtin__.open")
@patch("os.path.exists")
def test_is_server_runing(self, os_path_exists_mock, open_mock, \
run_os_command_mock):
os_path_exists_mock.return_value = True
f = open_mock.return_value
f.readline.return_value = "111"
run_os_command_mock.return_value = 0, "", ""
status, pid = is_server_runing()
self.assertTrue(status)
self.assertEqual(111, pid)
os_path_exists_mock.return_value = False
status, pid = is_server_runing()
self.assertFalse(status)
pass
@only_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("os_windows.win32serviceutil.QueryServiceStatus")
def test_is_server_runing(self, query_service_status_mock):
query_service_status_mock.return_value = ("", 4)
status, desc = is_server_runing()
self.assertTrue(status)
self.assertEqual("", desc)
query_service_status_mock.return_value = ("", 1)
status, desc = is_server_runing()
self.assertFalse(status)
self.assertEqual("stopped", desc)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("ambari_server.serverUtils.run_os_command")
@patch("__builtin__.open")
@patch("os.path.exists")
def test_is_server_runing_bad_file(self, os_path_exists_mock, open_mock, \
run_os_command_mock):
os_path_exists_mock.return_value = True
f = open_mock.return_value
f.readline.return_value = "" # empty file content
run_os_command_mock.return_value = 0, "", ""
self.assertRaises(NonFatalException, is_server_runing)
open_mock.side_effect = IOError('[Errno 13] Permission denied: /var/run/ambari-server/ambari-server.pid')
self.assertRaises(FatalException, is_server_runing)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("os.path.exists")
@patch("os.makedirs")
@patch("os.chdir")
@patch("ambari_server.serverSetup.run_os_command")
def test_install_jdk(self, run_os_command_mock, os_chdir_mock, os_makedirs_mock, os_path_exists_mock):
run_os_command_mock.return_value = 1, "", ""
os_path_exists_mock.return_value = False
failed = False
try:
jdkSetup = JDKSetup()
jdkSetup._install_jdk(MagicMock(), MagicMock())
self.fail("Exception was not rised!")
except FatalException:
failed = True
self.assertTrue(failed)
pass
@only_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("os.path.exists")
@patch("os.makedirs")
@patch("os.chdir")
@patch("ambari_server.serverSetup.run_os_command")
def test_install_jdk(self, run_os_command_mock, os_chdir_mock, os_makedirs_mock, os_path_exists_mock):
jdk_cfg = MagicMock()
jdk_cfg.inst_dir = "java_home_dir"
run_os_command_mock.return_value = 1, "", ""
os_path_exists_mock.return_value = False
failed = False
try:
jdkSetup = JDKSetup()
jdkSetup._install_jdk("jdk.exe", jdk_cfg)
self.fail("Exception was not rised!")
except FatalException:
failed = True
self.assertTrue(failed)
pass
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("ambari_server.serverSetup.read_ambari_user")
@patch("os.stat")
@patch("os.path.isfile")
@patch("os.path.exists")
@patch("os.chdir")
@patch("os.makedirs")
@patch("ambari_server.serverSetup.JDKSetupLinux.adjust_jce_permissions")
@patch("ambari_server.serverSetup.expand_jce_zip_file")
@patch("ambari_server.serverSetup.force_download_file")
@patch("ambari_server.serverSetup.get_YN_input")
@patch("ambari_server.serverSetup.run_os_command")
@patch("ambari_server.serverSetup.update_properties")
@patch("ambari_server.serverSetup.get_validated_string_input")
@patch("ambari_server.serverSetup.print_info_msg")
@patch("ambari_server.serverSetup.validate_jdk")
@patch("ambari_server.serverSetup.get_JAVA_HOME")
@patch("ambari_server.serverSetup.get_resources_location")
@patch("ambari_server.serverSetup.get_ambari_properties")
@patch("shutil.copyfile")
@patch("sys.exit")
def test_download_jdk(self, exit_mock, copyfile_mock, get_ambari_properties_mock, get_resources_location_mock, get_JAVA_HOME_mock, \
validate_jdk_mock, print_info_msg_mock, get_validated_string_input_mock, update_properties_mock, \
run_os_command_mock, get_YN_input_mock, force_download_file_mock, expand_jce_zip_file_mock,
adjust_jce_permissions_mock, os_makedirs_mock,
os_chdir_mock, path_existsMock, path_isfileMock, statMock, read_ambari_user_mock):
@OsFamilyFuncImpl(OSConst.WINSRV_FAMILY)
def _init_test_jdk_mocks():
jdk1_url = "http://somewhere/myjdk.exe"
res_location = "resources"
p = Properties()
p.process_pair("java.releases", "jdk1")
p.process_pair("jdk1.desc", "JDK name")
p.process_pair("jdk1.url", "http://somewhere/myjdk.exe")
p.process_pair("jdk1.dest-file", "myjdk.exe")
p.process_pair("jdk1.jcpol-url", "http://somewhere/some-jcpol.zip")
p.process_pair("jdk1.jcpol-file", "some-jcpol.zip")
p.process_pair("jdk1.home", "C:\\jdk1")
p.process_pair("jdk1.re", "(jdk.*)/jre")
p.process_pair("jdk.download.supported", "true")
p.process_pair("jce.download.supported", "true")
pem_side_effect1 = [False, True, False]
return p, jdk1_url, res_location, pem_side_effect1
@OsFamilyFuncImpl(OsFamilyImpl.DEFAULT)
def _init_test_jdk_mocks():
jdk1_url = "http://somewhere/somewhere.tar.gz"
res_location = MagicMock()
p = Properties()
p.process_pair("java.releases", "jdk1")
p.process_pair("jdk1.desc", "JDK name")
p.process_pair("jdk1.url", jdk1_url)
p.process_pair("jdk1.dest-file", "somewhere.tar.gz")
p.process_pair("jdk1.jcpol-url", "http://somewhere/some-jcpol.tar.gz")
p.process_pair("jdk1.jcpol-file", "some-jcpol.tar.gz")
p.process_pair("jdk1.home", "/jdk1")
p.process_pair("jdk1.re", "(jdk.*)/jre")
p.process_pair("jdk.download.supported", "true")
p.process_pair("jce.download.supported", "true")
pem_side_effect1 = [True, False, True, False]
return p, jdk1_url, res_location, pem_side_effect1
args = MagicMock()
args.java_home = "somewhere"
args.silent = False
p, jdk1_url, res_location, pem_side_effect1 = _init_test_jdk_mocks()
validate_jdk_mock.return_value = False
path_existsMock.return_value = False
get_resources_location_mock.return_value = res_location
get_JAVA_HOME_mock.return_value = False
read_ambari_user_mock.return_value = "ambari"
get_ambari_properties_mock.return_value = p
# Test case: ambari.properties not found
try:
download_and_install_jdk(args)
self.fail("Should throw exception because of not found ambari.properties")
except FatalException:
# Expected
self.assertTrue(get_ambari_properties_mock.called)
pass
# Test case: JDK already exists
args.java_home = None
args.jdk_location = None
get_JAVA_HOME_mock.return_value = "some_jdk"
validate_jdk_mock.return_value = True
get_YN_input_mock.return_value = False
path_existsMock.return_value = False
run_os_command_mock.return_value = 0, "", ""
rcode = download_and_install_jdk(args)
self.assertEqual(0, rcode)
# Test case: java home setup
args.java_home = "somewhere"
validate_jdk_mock.return_value = True
path_existsMock.return_value = False
get_JAVA_HOME_mock.return_value = None
rcode = download_and_install_jdk(args)
self.assertEqual(0, rcode)
self.assertTrue(update_properties_mock.called)
# Test case: JDK file does not exist, property not defined
validate_jdk_mock.return_value = False
path_existsMock.return_value = False
get_ambari_properties_mock.return_value = p
p.removeProp("jdk1.url")
try:
download_and_install_jdk(args)
self.fail("Should throw exception")
except FatalException:
# Expected
pass
# Test case: JDK file does not exist, HTTP response does not
# contain Content-Length
p.process_pair("jdk1.url", jdk1_url)
validate_jdk_mock.return_value = False
path_existsMock.return_value = False
get_YN_input_mock.return_value = True
get_validated_string_input_mock.return_value = "1"
run_os_command_mock.return_value = (0, "Wrong out", None)
try:
download_and_install_jdk(args)
self.fail("Should throw exception")
except FatalException:
# Expected
pass
# Successful JDK download
args.java_home = None
validate_jdk_mock.return_value = False
path_existsMock.reset_mock()
path_existsMock.side_effect = [False, False, False]
path_isfileMock.return_value = False
args.jdk_location = None
run_os_command_mock.return_value = (0, "Creating jdk1/jre", None)
statResult = MagicMock()
statResult.st_size = 32000
statMock.return_value = statResult
try:
rcode = download_and_install_jdk(args)
except Exception, e:
raise
self.assertEqual(0, rcode)
# Test case: not accept the license"
get_YN_input_mock.return_value = False
path_existsMock.reset_mock()
path_existsMock.side_effect = [False, False, True, False, True, False]
download_and_install_jdk(args)
self.assertTrue(exit_mock.called)
# Test case: jdk is already installed, ensure that JCE check is skipped if -j option is not supplied.
args.jdk_location = None
get_JAVA_HOME_mock.return_value = "some_jdk"
validate_jdk_mock.return_value = True
get_YN_input_mock.return_value = False
path_existsMock.reset_mock()
path_existsMock.side_effect = pem_side_effect1
force_download_file_mock.reset_mock()
with patch("ambari_server.serverSetup.JDKSetup._download_jce_policy") as download_jce_policy_mock:
rcode = download_and_install_jdk(args)
self.assertFalse(download_jce_policy_mock.called)
self.assertFalse(force_download_file_mock.called)
# Test case: Update JAVA_HOME location using command: ambari-server setup -j %NEW_LOCATION%
update_properties_mock.reset_mock()
args.java_home = "somewhere"
validate_jdk_mock.return_value = True
path_existsMock.reset_mock()
path_existsMock.side_effect = pem_side_effect1
get_JAVA_HOME_mock.return_value = "some_jdk"
path_isfileMock.return_value = True
download_and_install_jdk(args)
self.assertTrue(update_properties_mock.call_count == 1)
# Test case: Negative test case JAVA_HOME location should not be updated if -j option is supplied and
# jce_policy file already exists in resources dir.
#write_property_mock.reset_mock()
#args.java_home = "somewhere"
#path_existsMock.side_effect = None
#path_existsMock.return_value = True
#get_JAVA_HOME_mock.return_value = "some_jdk"
#try:
# download_and_install_jdk(args)
# self.fail("Should throw exception")
#except FatalException as fe:
# Expected
# self.assertFalse(write_property_mock.called)
# Test case: Setup ambari-server first time, Custom JDK selected, JDK exists
args.java_home = None
args.jdk_location = None
validate_jdk_mock.return_value = False
update_properties_mock.reset_mock()
path_existsMock.reset_mock()
path_existsMock.side_effect = [True, True, True, True]
get_validated_string_input_mock.return_value = "2"
get_JAVA_HOME_mock.return_value = None
rcode = download_and_install_jdk(args)
self.assertEqual(0, rcode)
self.assertTrue(update_properties_mock.called)
# Test case: Setup ambari-server first time, Custom JDK selected, JDK not exists
update_properties_mock.reset_mock()
validate_jdk_mock.return_value = False
path_existsMock.reset_mock()
path_existsMock.side_effect = pem_side_effect1
get_validated_string_input_mock.return_value = "2"
get_JAVA_HOME_mock.return_value = None
try:
download_and_install_jdk(args)
self.fail("Should throw exception")
except FatalException as fe:
# Expected
pass
# Test when custom java home exists but java binary file doesn't exist
args.java_home = None
validate_jdk_mock.return_value = False
path_isfileMock.return_value = False
update_properties_mock.reset_mock()
path_existsMock.reset_mock()
path_existsMock.side_effect = pem_side_effect1
get_validated_string_input_mock.return_value = "2"
get_JAVA_HOME_mock.return_value = None
flag = False
try:
download_and_install_jdk(args)
self.fail("Should throw exception")
except FatalException as fe:
# Expected
flag = True
pass
self.assertTrue(flag)
#Test case: Setup ambari-server with java home passed. Path to java home doesn't exist
args.java_home = "somewhere"
validate_jdk_mock.return_value = False
path_existsMock.reset_mock()
path_existsMock.side_effect = pem_side_effect1
try:
download_and_install_jdk(args)
self.fail("Should throw exception")
except FatalException as fe:
self.assertTrue("Path to java home somewhere or java binary file does not exists" in fe.reason)
pass
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch("ambari_server.dbConfiguration_linux.run_os_command")
def test_get_postgre_status(self, run_os_command_mock):
run_os_command_mock.return_value = (0, "running", None)
pg_status, retcode, out, err = PGConfig._get_postgre_status()
self.assertEqual("running", pg_status)
run_os_command_mock.return_value = (1, "wrong", None)
pg_status, retcode, out, err = PGConfig._get_postgre_status()
self.assertEqual(None, pg_status)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch("time.sleep")
@patch("subprocess.Popen")
@patch("ambari_server.dbConfiguration_linux.run_os_command")
@patch.object(PGConfig, "_get_postgre_status")
def test_check_postgre_up(self, get_postgre_status_mock, run_os_command_mock,
popen_mock, sleep_mock):
from ambari_server import serverConfiguration
p = MagicMock()
p.communicate.return_value = (None, None)
p.returncode = 0
popen_mock.return_value = p
get_postgre_status_mock.return_value = "running", 0, "", ""
serverConfiguration.OS_TYPE = OSConst.OS_REDHAT
p.poll.return_value = 0
run_os_command_mock.return_value = (0, None, None)
pg_status, retcode, out, err = PGConfig._check_postgre_up()
self.assertEqual(0, retcode)
serverConfiguration.OS_TYPE = OSConst.OS_SUSE
run_os_command_mock.return_value = (0, None, None)
p.poll.return_value = 0
get_postgre_status_mock.return_value = "stopped", 0, "", ""
pg_status, retcode, out, err = PGConfig._check_postgre_up()
self.assertEqual(0, retcode)
pass
@patch("platform.linux_distribution")
@patch("platform.system")
@patch("ambari_commons.logging_utils.print_info_msg")
@patch("ambari_commons.logging_utils.print_error_msg")
@patch("ambari_server.serverSetup.get_ambari_properties")
@patch("ambari_server.serverSetup.write_property")
@patch("ambari_server.serverConfiguration.get_conf_dir")
def test_configure_os_settings(self, get_conf_dir_mock, write_property_mock, get_ambari_properties_mock,
print_error_msg_mock, print_info_msg_mock,
systemMock, distMock):
get_ambari_properties_mock.return_value = -1
rcode = configure_os_settings()
self.assertEqual(-1, rcode)
p = MagicMock()
p[OS_TYPE_PROPERTY] = 'somevalue'
get_ambari_properties_mock.return_value = p
rcode = configure_os_settings()
self.assertEqual(0, rcode)
p.__getitem__.return_value = ""
rcode = configure_os_settings()
self.assertEqual(0, rcode)
self.assertTrue(write_property_mock.called)
self.assertEqual(2, write_property_mock.call_count)
self.assertEquals(write_property_mock.call_args_list[0][0][0], "server.os_family")
self.assertEquals(write_property_mock.call_args_list[1][0][0], "server.os_type")
pass
@patch("__builtin__.open")
@patch("ambari_server.serverConfiguration.Properties")
@patch("ambari_server.serverConfiguration.search_file")
@patch("ambari_server.serverConfiguration.get_conf_dir")
def test_get_JAVA_HOME(self, get_conf_dir_mock, search_file_mock,
Properties_mock, openMock):
openMock.side_effect = Exception("exception")
result = get_JAVA_HOME()
self.assertEqual(None, result)
expected = os.path.dirname(__file__)
p = MagicMock()
p.__getitem__.return_value = expected
openMock.side_effect = None
Properties_mock.return_value = p
result = get_JAVA_HOME()
self.assertEqual(expected, result)
pass
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("ambari_server.dbConfiguration.get_ambari_properties")
def test_prompt_db_properties_default(self, get_ambari_properties_mock):
args = MagicMock()
args.must_set_database_options = False
del args.database_index
del args.dbms
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.persistence_type
get_ambari_properties_mock.return_value = Properties()
prompt_db_properties(args)
self.assertEqual(args.database_index, 0)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(LinuxDBMSConfig, "_setup_remote_server")
@patch("ambari_server.dbConfiguration_linux.print_info_msg")
@patch("ambari_server.dbConfiguration_linux.read_password")
@patch("ambari_server.dbConfiguration_linux.get_validated_string_input")
@patch("ambari_server.dbConfiguration.get_validated_string_input")
@patch("ambari_server.serverSetup.get_YN_input")
def test_prompt_db_properties_oracle_sname(self, gyni_mock, gvsi_mock, gvsi_2_mock, rp_mock, print_info_msg_mock, srs_mock):
gyni_mock.return_value = True
list_of_return_values = ["ambari-server", "ambari", "1", "1521", "localhost", "2"]
def side_effect(*args, **kwargs):
return list_of_return_values.pop()
gvsi_mock.side_effect = side_effect
gvsi_2_mock.side_effect = side_effect
rp_mock.return_value = "password"
args = MagicMock()
args.must_set_database_options = True
del args.database_index
del args.dbms
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.persistence_type
del args.sid_or_sname
del args.jdbc_url
set_silent(False)
prompt_db_properties(args)
self.assertEqual(args.database_index, 1)
props = Properties()
factory = DBMSConfigFactory()
dbmsConfig = factory.create(args, props)
self.assertEqual(dbmsConfig.dbms, "oracle")
self.assertEqual(dbmsConfig.database_port, "1521")
self.assertEqual(dbmsConfig.database_host, "localhost")
self.assertEqual(dbmsConfig.database_name, "ambari")
self.assertEqual(dbmsConfig.database_username, "ambari")
self.assertEqual(dbmsConfig.database_password, "bigdata")
self.assertEqual(dbmsConfig.sid_or_sname, "sid")
dbmsConfig.configure_database(props, args)
self.assertEqual(dbmsConfig.database_username, "ambari-server")
self.assertEqual(dbmsConfig.sid_or_sname, "sname")
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch("os.path.isdir")
@patch("os.mkdir")
@patch("os.chown")
@patch("pwd.getpwnam")
@patch.object(OSCheck, "get_os_family")
@patch.object(LinuxDBMSConfig, "_setup_remote_server")
@patch("ambari_server.dbConfiguration_linux.print_info_msg")
@patch("ambari_server.dbConfiguration_linux.read_password")
@patch("ambari_server.dbConfiguration_linux.get_validated_string_input")
@patch("ambari_server.dbConfiguration.get_validated_string_input")
@patch("ambari_server.serverSetup.get_YN_input")
def test_prompt_db_properties_postgre_adv(self, gyni_mock, gvsi_mock, gvsi_2_mock, rp_mock, print_info_msg_mock, sls_mock,
get_os_family_mock, get_pw_nam_mock, chown_mock, mkdir_mock, isdir_mock):
gyni_mock.return_value = True
list_of_return_values = ["ambari-server", "ambari", "2", "1521", "localhost", "2"]
get_os_family_mock.return_value = OSConst.SUSE_FAMILY
pw = MagicMock()
pw.setattr('pw_uid', 0)
pw.setattr('pw_gid', 0)
get_pw_nam_mock.return_value = pw
def side_effect(*args, **kwargs):
return list_of_return_values.pop()
gvsi_mock.side_effect = side_effect
gvsi_2_mock.side_effect = side_effect
rp_mock.return_value = "password"
args = MagicMock()
args.must_set_database_options = True
del args.database_index
del args.dbms
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.persistence_type
del args.sid_or_sname
del args.jdbc_url
set_silent(False)
prompt_db_properties(args)
self.assertEqual(args.database_index, 1)
props = Properties()
factory = DBMSConfigFactory()
dbmsConfig = factory.create(args, props)
self.assertEqual(dbmsConfig.dbms, "oracle")
self.assertEqual(dbmsConfig.database_port, "1521")
self.assertEqual(dbmsConfig.database_host, "localhost")
self.assertEqual(dbmsConfig.database_name, "ambari")
self.assertEqual(dbmsConfig.database_username, "ambari")
self.assertEqual(dbmsConfig.database_password, "bigdata")
isdir_mock.return_value = False
dbmsConfig.configure_database(props, args)
self.assertEqual(dbmsConfig.database_username, "ambari-server")
self.assertEqual(dbmsConfig.database_password, "password")
self.assertEqual(dbmsConfig.sid_or_sname, "sid")
self.assertTrue(chown_mock.called)
self.assertTrue(mkdir_mock.called)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch("os.path.isdir")
@patch("os.mkdir")
@patch("os.chown")
@patch("pwd.getpwnam")
@patch.object(OSCheck, "get_os_family")
@patch.object(PGConfig, "_setup_local_server")
@patch("ambari_server.dbConfiguration_linux.print_info_msg")
@patch("ambari_server.dbConfiguration_linux.read_password")
@patch("ambari_server.dbConfiguration_linux.get_validated_string_input")
@patch("ambari_server.dbConfiguration.get_validated_string_input")
@patch("ambari_server.serverSetup.get_YN_input")
def test_prompt_db_properties_postgre_adv(self, gyni_mock, gvsi_mock, gvsi_2_mock, rp_mock, print_info_msg_mock, sls_mock,
get_os_family_mock, get_pw_nam_mock, chown_mock, mkdir_mock, isdir_mock):
gyni_mock.return_value = True
list_of_return_values = ["ambari-server", "postgres", "ambari", "ambari", "1"]
get_os_family_mock.return_value = OSConst.SUSE_FAMILY
pw = MagicMock()
pw.setattr('pw_uid', 0)
pw.setattr('pw_gid', 0)
get_pw_nam_mock.return_value = pw
def side_effect(*args, **kwargs):
return list_of_return_values.pop()
gvsi_mock.side_effect = side_effect
gvsi_2_mock.side_effect = side_effect
rp_mock.return_value = "password"
args = MagicMock()
args.must_set_database_options = True
del args.database_index
del args.dbms
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.persistence_type
set_silent(False)
prompt_db_properties(args)
self.assertEqual(args.database_index, 0)
props = Properties()
factory = DBMSConfigFactory()
dbmsConfig = factory.create(args, props)
self.assertEqual(dbmsConfig.dbms, "postgres")
self.assertEqual(dbmsConfig.database_port, "5432")
self.assertEqual(dbmsConfig.database_host, "localhost")
self.assertEqual(dbmsConfig.database_name, "ambari")
self.assertEqual(dbmsConfig.database_username, "ambari")
self.assertEqual(dbmsConfig.database_password, "bigdata")
dbmsConfig.configure_database(props, args)
self.assertEqual(dbmsConfig.database_username, "ambari-server")
self.assertEqual(dbmsConfig.database_password, "password")
self.assertEqual(dbmsConfig.sid_or_sname, "sid")
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("ambari_server.dbConfiguration_linux.store_password_file")
@patch("ambari_server.dbConfiguration_linux.read_password")
@patch("ambari_server.dbConfiguration_linux.get_validated_string_input")
@patch("ambari_server.dbConfiguration_linux.get_YN_input")
def test_prompt_db_properties_for_each_database_type(self, gyni_mock, gvsi_mock, rp_mock, spf_mock):
"""
:return: Validates that installation for each database type correctly stores the database type, database name,
and optionally the postgres schema name.
"""
from ambari_server import serverConfiguration
gyni_mock.return_value = True
rp_mock.return_value = "password"
spf_mock.return_value = "encrypted password"
# Values to use while installing several database types
hostname = "localhost"
db_name = "db_ambari"
postgres_schema = "sc_ambari"
port = "1234"
local_admin_user = "postgres"
oracle_service = "1"
oracle_service_name = "ambari"
user_name = "ambari"
# Input values
postgres_embedded_values = [local_admin_user, db_name, postgres_schema, hostname]
oracle_values = [hostname, port, oracle_service, oracle_service_name, user_name]
mysql_values = [hostname, port, db_name, user_name]
postgres_external_values = [hostname, port, db_name, postgres_schema, user_name]
mssql_values = [hostname, port, db_name, user_name]
list_of_return_values = postgres_embedded_values + oracle_values + mysql_values + postgres_external_values + mssql_values
list_of_return_values = list_of_return_values[::-1] # Reverse the list since the input will be popped
def side_effect(*args, **kwargs):
return list_of_return_values.pop()
gvsi_mock.side_effect = side_effect
if AMBARI_CONF_VAR in os.environ:
del os.environ[AMBARI_CONF_VAR]
tempdir = tempfile.gettempdir()
os.environ[AMBARI_CONF_VAR] = tempdir
prop_file = os.path.join(tempdir, "ambari.properties")
for i in range(0, 5):
# Use the expected path of the ambari.properties file to delete it if it exists, and then create a new one
# during each use case.
if os.path.exists(prop_file):
os.remove(prop_file)
with open(prop_file, "w") as f:
f.write("server.jdbc.database_name=oldDBName")
f.close()
serverConfiguration.AMBARI_PROPERTIES_FILE = prop_file
args = MagicMock()
properties = Properties()
args.database_index = i
args.silent = False
del args.dbms
del args.database_host
del args.local_admin_user
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.sid_or_sname
del args.jdbc_url
factory = DBMSConfigFactory()
dbConfig = factory.create(args, properties)
dbConfig._prompt_db_properties()
if dbConfig._is_local_database():
dbConfig._setup_local_server(properties, None)
else:
dbConfig._setup_remote_server(properties, None)
if i == 0:
# Postgres Embedded
self.assertEqual(properties[JDBC_DATABASE_PROPERTY], "postgres")
self.assertEqual(properties[JDBC_DATABASE_NAME_PROPERTY], db_name)
self.assertEqual(properties[JDBC_POSTGRES_SCHEMA_PROPERTY], postgres_schema)
self.assertEqual(properties[PERSISTENCE_TYPE_PROPERTY], "local")
elif i == 1:
# Oracle
self.assertEqual(properties[JDBC_DATABASE_PROPERTY], "oracle")
self.assertFalse(JDBC_POSTGRES_SCHEMA_PROPERTY in properties.propertyNames())
self.assertEqual(properties[PERSISTENCE_TYPE_PROPERTY], "remote")
elif i == 2:
# MySQL
self.assertEqual(properties[JDBC_DATABASE_PROPERTY], "mysql")
self.assertFalse(JDBC_POSTGRES_SCHEMA_PROPERTY in properties.propertyNames())
self.assertEqual(properties[PERSISTENCE_TYPE_PROPERTY], "remote")
elif i == 3:
# Postgres External
self.assertEqual(properties[JDBC_DATABASE_PROPERTY], "postgres")
self.assertEqual(properties[JDBC_DATABASE_NAME_PROPERTY], db_name)
self.assertEqual(properties[JDBC_POSTGRES_SCHEMA_PROPERTY], postgres_schema)
self.assertEqual(properties[PERSISTENCE_TYPE_PROPERTY], "remote")
elif i == 4:
# MSSQL
self.assertEqual(properties[JDBC_DATABASE_PROPERTY], "mssql")
self.assertFalse(JDBC_POSTGRES_SCHEMA_PROPERTY in properties.propertyNames())
self.assertEqual(properties[PERSISTENCE_TYPE_PROPERTY], "remote")
pass
@patch.object(os.path, "exists")
@patch.object(os.path, "isfile")
def test_validate_jdk(self, isfile_mock, exists_mock):
exists_mock.side_effect = [False]
result = validate_jdk("path")
self.assertFalse(result)
exists_mock.side_effect = [True, False]
result = validate_jdk("path")
self.assertFalse(result)
exists_mock.side_effect = [True, True]
isfile_mock.return_value = False
result = validate_jdk("path")
self.assertFalse(result)
exists_mock.side_effect = [True, True]
isfile_mock.return_value = True
result = validate_jdk("path")
self.assertTrue(result)
pass
@patch("glob.glob")
@patch("ambari_server.serverConfiguration.get_JAVA_HOME")
@patch("ambari_server.serverConfiguration.validate_jdk")
def test_find_jdk(self, validate_jdk_mock, get_JAVA_HOME_mock, globMock):
get_JAVA_HOME_mock.return_value = "somewhere"
validate_jdk_mock.return_value = True
result = find_jdk()
self.assertEqual("somewhere", result)
get_JAVA_HOME_mock.return_value = None
globMock.return_value = []
result = find_jdk()
self.assertEqual(None, result)
globMock.return_value = ["one", "two"]
result = find_jdk()
self.assertNotEqual(None, result)
globMock.return_value = ["one", "two"]
validate_jdk_mock.side_effect = [False, True]
result = find_jdk()
self.assertEqual(result, "one")
pass
@patch("os.path.exists")
@patch("zipfile.ZipFile")
@patch("os.path.split")
@patch("os.listdir")
@patch("ambari_server.serverSetup.copy_files")
@patch("shutil.rmtree")
def test_unpack_jce_policy(self, rmtree_mock, copy_files_mock, os_listdir_mock, os_path_split_mock, zipfile_mock, exists_mock):
# Testing the case when the zip file doesn't contains any folder
exists_mock.return_value = True
zipfile = MagicMock()
zipfile_mock.return_value = zipfile
zip_members = ["US_export_policy.jar", "local_policy.jar", "README.txt"]
zipfile.namelist.return_value = zip_members
os_path_split_mock.return_value = [""]
expand_jce_zip_file("", "")
self.assertTrue(exists_mock.called)
self.assertTrue(zipfile_mock.called)
self.assertTrue(os_path_split_mock.called)
# Testing the case when the zip file contains a folder
unziped_jce_path = "jce"
os_path_split_mock.return_value = unziped_jce_path
expand_jce_zip_file("", "")
self.assertTrue(exists_mock.called)
self.assertTrue(zipfile_mock.called)
self.assertTrue(os_listdir_mock.called)
self.assertTrue(copy_files_mock.called)
self.assertTrue(rmtree_mock.called)
# Testing when the jdk_security_path or jce_zip_path doesn't exist
exists_mock.return_value = False
try:
expand_jce_zip_file("", "")
except FatalException:
self.assertTrue(True)
exists_mock.return_value = True
# Testing when zipfile fail with an error
zipfile_mock.side_effect = FatalException(1,"Extract error")
try:
expand_jce_zip_file("", "")
except FatalException:
self.assertTrue(True)
@patch("os.path.exists")
@patch("shutil.copy")
@patch("os.path.split")
@patch("ambari_server.serverSetup.update_properties")
@patch.object(JDKSetup, "unpack_jce_policy")
@patch("ambari_server.serverSetup.get_ambari_properties")
@patch("ambari_commons.os_utils.search_file")
@patch("__builtin__.open")
@patch("ambari_server.serverSetup.logger")
def test_setup_jce_policy(self, logger_mock, open_mock, search_file_mock, get_ambari_properties_mock, unpack_jce_policy_mock,
update_properties_mock, path_split_mock, shutil_copy_mock, exists_mock):
exists_mock.return_value = True
properties = Properties()
properties.process_pair(JAVA_HOME_PROPERTY, "/java_home")
unpack_jce_policy_mock.return_value = 0
get_ambari_properties_mock.return_value = properties
conf_file = 'etc/ambari-server/conf/ambari.properties'
search_file_mock.return_value = conf_file
path_split_mock.return_value = ["/path/to", "JCEPolicy.zip"]
args = ['setup-jce', '/path/to/JCEPolicy.zip']
setup_jce_policy(args)
shutil_copy_mock.assert_called_with(args[1], configDefaults.SERVER_RESOURCES_DIR)
self.assertTrue(unpack_jce_policy_mock.called)
self.assertTrue(get_ambari_properties_mock.called)
self.assertTrue(update_properties_mock.called)
# Testing that if the source and the destination is the same will not try to copy the file
path_split_mock.return_value = [configDefaults.SERVER_RESOURCES_DIR, "JCEPolicy.zip"]
shutil_copy_mock.reset_mock()
setup_jce_policy(args)
self.assertFalse(shutil_copy_mock.called)
self.assertTrue(unpack_jce_policy_mock.called)
self.assertTrue(get_ambari_properties_mock.called)
self.assertTrue(update_properties_mock.called)
path_split_mock.return_value = ["/path/to", "JCEPolicy.zip"]
# Testing with bad path
exists_mock.return_value = False
try:
setup_jce_policy(args)
except FatalException:
self.assertTrue(True)
exists_mock.return_value = True
# Testing with an error produced by shutil.copy
shutil_copy_mock.reset_mock()
shutil_copy_mock.side_effect = FatalException(1, "Error trying to copy the file.")
try:
setup_jce_policy(args)
except FatalException:
self.assertTrue(True)
# Testing with an error produced by Properties.store function
update_properties_mock.side_effect = Exception("Invalid file.")
try:
setup_jce_policy(args)
except Exception:
self.assertTrue(True)
update_properties_mock.reset_mock()
# Testing with an error produced by unpack_jce_policy
unpack_jce_policy_mock.side_effect = FatalException(1, "Can not install JCE policy")
try:
setup_jce_policy(args)
except FatalException:
self.assertTrue(True)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("pwd.getpwnam")
@patch("resource_management.core.shell.call")
@patch("os.path.exists")
@patch("os.path.isfile")
@patch("ambari_commons.os_utils.remove_file")
@patch("ambari_server.dbConfiguration_linux.LinuxDBMSConfig.ensure_jdbc_driver_installed")
@patch("ambari_server.dbConfiguration_linux.get_YN_input")
@patch("ambari_server.serverSetup.update_properties")
@patch("ambari_server.dbConfiguration_linux.get_ambari_properties")
@patch("ambari_server.dbConfiguration_linux.store_password_file")
@patch("ambari_server.dbConfiguration_linux.run_os_command")
@patch("ambari_server.dbConfiguration_linux.PGConfig._configure_postgres")
@patch("ambari_server.dbConfiguration_linux.PGConfig._check_postgre_up")
@patch("ambari_server.dbConfiguration_linux.PGConfig._is_jdbc_user_changed")
@patch("ambari_server.serverSetup.verify_setup_allowed")
@patch("ambari_server.dbConfiguration_linux.read_password")
@patch("ambari_server.dbConfiguration_linux.get_validated_string_input")
@patch("ambari_server.dbConfiguration.get_validated_string_input")
@patch("ambari_server.serverSetup.get_YN_input")
@patch("ambari_server.serverSetup.get_ambari_properties")
@patch("ambari_server.serverSetup.configure_os_settings")
@patch("ambari_server.serverSetup.download_and_install_jdk")
@patch("ambari_server.serverSetup.check_ambari_user")
@patch("ambari_server.serverSetup.check_jdbc_drivers")
@patch("ambari_server.serverSetup.disable_security_enhancements")
@patch("ambari_server.serverSetup.is_root")
@patch("ambari_server.serverSetup.proceedJDBCProperties")
@patch("ambari_server.serverSetup.extract_views")
@patch("ambari_server.serverSetup.adjust_directory_permissions")
@patch("ambari_server.serverSetup.service_setup")
@patch("ambari_server.serverSetup.read_ambari_user")
@patch("ambari_server.serverSetup.expand_jce_zip_file")
def test_setup_linux(self, expand_jce_zip_file_mock, read_ambari_user_mock,
service_setup_mock, adjust_dirs_mock, extract_views_mock, proceedJDBCProperties_mock, is_root_mock,
disable_security_enhancements_mock, check_jdbc_drivers_mock, check_ambari_user_mock,
download_jdk_mock, configure_os_settings_mock, get_ambari_properties_mock,
get_YN_input_mock, gvsi_mock, gvsi_1_mock,
read_password_mock, verify_setup_allowed_method, is_jdbc_user_changed_mock, check_postgre_up_mock,
configure_postgres_mock, run_os_command_1_mock,
store_password_file_mock, get_ambari_properties_1_mock, update_properties_mock,
get_YN_input_1_mock, ensure_jdbc_driver_installed_mock,
remove_file_mock, isfile_mock, exists_mock,
run_os_command_mock, get_pw_nam_mock):
hostname = "localhost"
db_admin_user = 'postgres'
db_name = "db_ambari"
postgres_schema = "sc_ambari"
db_username = 'u_ambari'
port = "1234"
oracle_service = "1"
oracle_service_name = "ambari"
user_name = "ambari"
args = MagicMock()
del args.dbms
del args.database_index
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
args.silent = False
failed = False
properties = Properties()
def side_effect(username):
raise KeyError("")
get_pw_nam_mock.side_effect = side_effect
get_YN_input_mock.return_value = False
isfile_mock.return_value = False
verify_setup_allowed_method.return_value = 0
exists_mock.return_value = False
remove_file_mock.return_value = 0
run_os_command_mock.return_value = 3,"",""
extract_views_mock.return_value = 0
read_ambari_user_mock.return_value = "ambari"
read_password_mock.return_value = "bigdata2"
get_ambari_properties_mock.return_value = properties
get_ambari_properties_1_mock.return_value = properties
store_password_file_mock.return_value = "encrypted_bigdata2"
ensure_jdbc_driver_installed_mock.return_value = True
check_postgre_up_mock.return_value = (PGConfig.PG_STATUS_RUNNING, 0, "", "")
configure_postgres_mock.return_value = (0, "", "")
run_os_command_1_mock.return_value = (0, "", "")
expand_jce_zip_file_mock.return_value = 0
def reset_mocks():
is_jdbc_user_changed_mock.reset_mock()
is_root_mock.reset_mock()
disable_security_enhancements_mock.reset_mock()
check_jdbc_drivers_mock.reset_mock()
check_ambari_user_mock.reset_mock()
run_os_command_mock.reset_mock()
configure_os_settings_mock.reset_mock()
run_os_command_1_mock.reset_mock()
get_YN_input_1_mock.reset_mock()
update_properties_mock.reset_mock()
extract_views_mock.reset_mock()
args = MagicMock()
del args.dbms
del args.database_index
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.persistence_type
del args.sid_or_sname
del args.jdbc_url
del args.init_script_file
del args.drop_script_file
args.jdbc_driver= None
args.jdbc_db = None
args.silent = False
args.skip_view_extraction = False
return args
# Testing call under non-root
is_root_mock.return_value = False
try:
setup(args)
except FatalException as fe:
self.fail("Should not throw exception, only print warning") # see AMBARI-15245
args = reset_mocks()
# Testing calls under root
# remote case
is_root_mock.return_value = True
disable_security_enhancements_mock.return_value = (0, "")
check_ambari_user_mock.return_value = (0, False, 'user', None)
check_jdbc_drivers_mock.return_value = 0
download_jdk_mock.return_value = 0
configure_os_settings_mock.return_value = 0
result = setup(args)
self.assertEqual(None, result)
self.assertTrue(check_ambari_user_mock.called)
self.assertEqual(1, run_os_command_mock.call_count)
self.assertTrue(extract_views_mock.called)
# test view extraction is skipped on-demand
args = reset_mocks()
args.skip_view_extraction = True
setup(args)
self.assertFalse(extract_views_mock.called)
#Local case
args = reset_mocks()
# Input values
db_selection_values = ["1"]
postgres_values = [db_admin_user, db_name, postgres_schema, db_username]
postgres_values = postgres_values[::-1] # Reverse the list since the input will be popped
def side_effect(*args, **kwargs):
return db_selection_values.pop()
gvsi_mock.side_effect = side_effect
def side_effect_1(*args, **kwargs):
return postgres_values.pop()
gvsi_1_mock.side_effect = side_effect_1
get_YN_input_mock.return_value = True
# is_local_database_mock.return_value = True
is_jdbc_user_changed_mock.return_value = False
try:
result = setup(args)
except FatalException:
self.fail("Setup should be successful")
self.assertEqual(None, result)
self.assertTrue(is_jdbc_user_changed_mock.called)
self.assertTrue(update_properties_mock.called)
self.assertTrue(run_os_command_1_mock.called)
self.assertFalse(remove_file_mock.called)
self.assertTrue("Ambari-DDL-Postgres-CREATE.sql" in run_os_command_1_mock.call_args[0][0][3])
self.assertTrue("-U {0}".format(db_username) in run_os_command_1_mock.call_args[0][0][3])
#if DB user name was changed
args = reset_mocks()
# is_local_database_mock.return_value = True
is_jdbc_user_changed_mock.return_value = True
db_selection_values = ["1"]
postgres_values = [db_admin_user, db_name, postgres_schema, db_username]
postgres_values = postgres_values[::-1] # Reverse the list since the input will be popped
try:
result = setup(args)
except FatalException:
self.fail("Setup should be successful")
self.assertEqual(None, result)
self.assertTrue(is_jdbc_user_changed_mock.called)
self.assertTrue(update_properties_mock.called)
self.assertTrue(run_os_command_1_mock.called)
self.assertFalse(remove_file_mock.called)
#negative case
args = reset_mocks()
# Use remote database
get_YN_input_1_mock.return_value = False
db_selection_values = ["4"]
postgres_values = [hostname, port, db_name, postgres_schema, user_name]
postgres_values = postgres_values[::-1] # Reverse the list since the input will be popped
try:
result = setup(args)
self.fail("Should throw exception")
except NonFatalException as fe:
self.assertTrue("Remote database setup aborted." in fe.reason)
self.assertFalse(run_os_command_1_mock.called)
# test not run setup if ambari-server setup executed with jdbc properties
args = reset_mocks()
args.jdbc_driver= "path/to/driver"
args.jdbc_db = "test_db_name"
setup(args)
self.assertTrue(proceedJDBCProperties_mock.called)
self.assertFalse(disable_security_enhancements_mock.called)
self.assertFalse(check_ambari_user_mock.called)
pass
@only_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("resource_management.core.shell.call")
@patch("os.path.exists")
@patch("os.path.isfile")
@patch("ambari_commons.os_utils.remove_file")
@patch("ambari_server.dbConfiguration_windows.MSSQLConfig.ensure_jdbc_driver_installed")
@patch("ambari_server.serverSetup.update_properties")
@patch("ambari_server.dbConfiguration_windows.store_password_file")
@patch("ambari_server.dbConfiguration_windows.run_os_command")
@patch("ambari_server.serverSetup.verify_setup_allowed")
@patch("ambari_server.dbConfiguration_windows.get_validated_string_input")
@patch("ambari_server.dbConfiguration.get_validated_string_input")
@patch("ambari_server.serverSetup.get_YN_input")
@patch("ambari_server.serverSetup.get_ambari_properties")
@patch("ambari_server.serverSetup.configure_os_settings")
@patch("ambari_server.serverSetup.download_and_install_jdk")
@patch("ambari_server.serverSetup.check_firewall")
@patch("ambari_server.serverSetup.check_ambari_user")
@patch("ambari_server.serverSetup.check_jdbc_drivers")
@patch("ambari_server.serverSetup.disable_security_enhancements")
@patch("ambari_server.serverSetup.is_root")
@patch("ambari_server.serverSetup.proceedJDBCProperties")
@patch("ambari_server.serverSetup.extract_views")
@patch("ambari_server.serverSetup.adjust_directory_permissions")
@patch("ambari_server.serverSetup.service_setup")
@patch("ambari_server.serverSetup.read_ambari_user")
@patch("ambari_server.serverSetup.expand_jce_zip_file")
def test_setup_windows(self, expand_jce_zip_file_mock, read_ambari_user_mock,
service_setup_mock, adjust_dirs_mock, extract_views_mock, proceedJDBCProperties_mock, is_root_mock,
disable_security_enhancements_mock, check_jdbc_drivers_mock, check_ambari_user_mock, check_firewall_mock,
download_jdk_mock, configure_os_settings_mock, get_ambari_properties_mock,
get_YN_input_mock, gvsi_mock, gvsi_1_mock,
verify_setup_allowed_method, run_os_command_1_mock,
store_password_file_mock, update_properties_mock,
ensure_jdbc_driver_installed_mock,
remove_file_mock, isfile_mock, exists_mock,
run_os_command_mock):
hostname = "localhost"
db_name = "db_ambari"
port = "1433"
user_name = "ambari"
password = "bigdata2"
failed = False
properties = Properties()
get_YN_input_mock.return_value = False
isfile_mock.return_value = False
verify_setup_allowed_method.return_value = 0
exists_mock.return_value = False
remove_file_mock.return_value = 0
run_os_command_mock.return_value = 3,"",""
extract_views_mock.return_value = 0
read_ambari_user_mock.return_value = "ambari"
get_ambari_properties_mock.return_value = properties
store_password_file_mock.return_value = "encrypted_bigdata2"
ensure_jdbc_driver_installed_mock.return_value = True
run_os_command_1_mock.return_value = (0, "", "")
expand_jce_zip_file_mock.return_value = 0
def reset_mocks():
is_root_mock.reset_mock()
disable_security_enhancements_mock.reset_mock()
check_jdbc_drivers_mock.reset_mock()
check_ambari_user_mock.reset_mock()
run_os_command_mock.reset_mock()
configure_os_settings_mock.reset_mock()
run_os_command_1_mock.reset_mock()
update_properties_mock.reset_mock()
args = MagicMock()
del args.dbms
del args.database_index
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.default_database_host
del args.persistence_type
del args.init_db_script_file
del args.cleanup_db_script_file
del args.sid_or_sname
del args.jdbc_url
args.jdbc_driver= None
args.jdbc_db = None
args.silent = False
args.must_set_database_options = True
return args
args = reset_mocks()
# Testing call under non-root
is_root_mock.return_value = False
try:
setup(args)
self.fail("Should throw exception")
except FatalException as fe:
# Expected
self.assertTrue("administrator-level" in fe.reason)
pass
args = reset_mocks()
# Testing calls under root
is_root_mock.return_value = True
disable_security_enhancements_mock.return_value = (0, "")
check_ambari_user_mock.return_value = (0, False, 'user', None)
check_jdbc_drivers_mock.return_value = 0
download_jdk_mock.return_value = 0
configure_os_settings_mock.return_value = 0
result = setup(args)
self.assertEqual(None, result)
self.assertTrue(check_ambari_user_mock.called)
self.assertEqual(2, run_os_command_1_mock.call_count)
#negative case
args = reset_mocks()
# Use Windows authentication
get_YN_input_mock.return_value = True
gvsi_1_mock.side_effect = [hostname, "1"]
try:
result = setup(args)
except Exception:
self.fail("Shouldn't throw exception")
self.assertTrue(run_os_command_1_mock.called)
# Use SQL Server authentication
get_YN_input_mock.return_value = True
gvsi_1_mock.side_effect = [hostname, "2", user_name, password]
try:
result = setup(args)
except Exception:
self.fail("Shouldn't throw exception")
self.assertTrue(run_os_command_1_mock.called)
# test not run setup if ambari-server setup executed with jdbc properties
args = reset_mocks()
args.jdbc_driver= "path/to/driver"
args.jdbc_db = "test_db_name"
setup(args)
self.assertTrue(proceedJDBCProperties_mock.called)
self.assertFalse(disable_security_enhancements_mock.called)
self.assertFalse(check_ambari_user_mock.called)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(OracleConfig, "_get_remote_script_line")
@patch("ambari_server.serverSetup.is_server_runing")
@patch("ambari_server.dbConfiguration_linux.get_YN_input")
@patch("ambari_server.serverSetup.get_YN_input")
@patch.object(PGConfig, "_setup_db")
@patch("ambari_server.dbConfiguration_linux.print_warning_msg")
@patch("ambari_server.dbConfiguration_linux.print_info_msg")
@patch("ambari_server.dbConfiguration_linux.run_os_command")
@patch("ambari_server.dbConfiguration.decrypt_password_for_alias")
@patch("ambari_server.serverSetup.get_ambari_properties")
@patch("ambari_server.serverSetup.is_root")
def test_reset(self, is_root_mock, get_ambari_properties_mock, decrypt_password_for_alias_mock,
run_os_command_mock, print_info_msg_mock, print_warning_msg_mock,
setup_db_mock, get_YN_input_mock, get_YN_input_2_mock, is_server_running_mock,
get_remote_script_line_mock):
def reset_mocks():
args = MagicMock()
del args.dbms
del args.database_index
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.persistence_type
del args.init_script_file
del args.drop_script_file
del args.sid_or_sname
del args.jdbc_url
return args
properties = Properties()
get_ambari_properties_mock.return_value = properties
args = reset_mocks()
args.persistence_type = "local"
get_YN_input_mock.return_value = False
decrypt_password_for_alias_mock.return_value = "password"
is_server_running_mock.return_value = (False, 0)
setup_db_mock.side_effect = [(0,None, None),(0,None, "ERROR: database 'ambari' is being accessed by other users"), (0, None, "ERROR: user 'mapred' already exist")]
# Testing call under non-root
is_root_mock.return_value = False
try:
reset(args)
self.fail("Should throw exception")
except FatalException as fe:
# Expected
self.assertTrue("root-level" in fe.reason)
pass
# Testing calls under root
is_root_mock.return_value = True
try:
reset(args)
self.fail("Should throw exception")
except FatalException as fe:
# Expected
self.assertFalse("root-level" in fe.reason)
pass
get_YN_input_mock.return_value = True
get_YN_input_2_mock.return_value = True
run_os_command_mock.return_value = (1, None, None)
try:
reset(args)
self.fail("Should throw exception")
except FatalException:
# Expected
pass
run_os_command_mock.return_value = (0, None, None)
reset(args)
self.assertTrue(setup_db_mock.called)
# Database errors cases
is_server_running_mock.side_effect = [(True, 123), (False, 0), (False, 0), (False, 0), (False, 0)]
try:
reset(args)
self.fail("Should throw exception")
except FatalException:
# Expected
pass
try:
reset(args)
self.fail("Should throw exception")
except NonFatalException:
# Expected
pass
args = reset_mocks()
args.dbms = "postgres"
try:
#remote db case
reset(args)
self.fail("Should throw exception")
except NonFatalException:
# Expected
pass
args = reset_mocks()
args.dbms = "oracle"
print_warning_msg_mock.reset_mock()
get_remote_script_line_mock.reset_mock()
get_remote_script_line_mock.side_effect = ["drop", "create"]
try:
#remote db case (not Postgres)
rcode = reset(args)
self.fail("Should throw exception")
except NonFatalException:
# Expected
self.assertTrue(get_remote_script_line_mock.called)
self.assertTrue(print_warning_msg_mock.called)
pass
pass
@only_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("ambari_server.serverSetup.is_server_runing")
@patch("ambari_server.serverSetup.get_YN_input")
@patch("ambari_server.dbConfiguration_windows.print_warning_msg")
@patch("ambari_server.dbConfiguration_windows.print_info_msg")
@patch("ambari_server.dbConfiguration_windows.run_os_command")
@patch("ambari_server.dbConfiguration.decrypt_password_for_alias")
@patch("ambari_server.serverSetup.get_ambari_properties")
@patch("ambari_server.serverSetup.is_root")
def test_reset(self, is_root_mock, get_ambari_properties_mock, decrypt_password_for_alias_mock,
run_os_command_mock, print_info_msg_mock, print_warning_msg_mock,
get_YN_input_mock, is_server_running_mock):
def reset_mocks():
args = MagicMock()
del args.dbms
del args.database_index
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.default_database_host
del args.persistence_type
del args.init_db_script_file
del args.cleanup_db_script_file
del args.sid_or_sname
del args.jdbc_url
return args
properties = Properties()
get_ambari_properties_mock.return_value = properties
args = reset_mocks()
args.persistence_type = "local"
get_YN_input_mock.return_value = False
decrypt_password_for_alias_mock.return_value = "password"
is_server_running_mock.return_value = (False, 0)
# Testing call under non-root
is_root_mock.return_value = False
try:
reset(args)
self.fail("Should throw exception")
except FatalException as fe:
# Expected
self.assertTrue("administrator-level" in fe.reason)
pass
# Testing calls under root
is_root_mock.return_value = True
try:
reset(args)
self.fail("Should throw exception")
except FatalException as fe:
# Expected
self.assertFalse("administrator-level" in fe.reason)
pass
get_YN_input_mock.return_value = True
run_os_command_mock.return_value = (1, None, None)
try:
reset(args)
self.fail("Should throw exception")
except FatalException:
# Expected
pass
run_os_command_mock.reset_mock()
run_os_command_mock.return_value = (0, None, None)
reset(args)
self.assertTrue(run_os_command_mock.called)
self.assertEqual(run_os_command_mock.call_count, 2)
# Database errors cases
is_server_running_mock.side_effect = [(True, 123), (False, 0)]
try:
reset(args)
self.fail("Should throw exception")
except FatalException:
# Expected
pass
try:
reset(args)
except NonFatalException:
self.fail("Shouldn't throw exception")
pass
pass
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("ambari_server.serverSetup.get_YN_input")
@patch("__builtin__.raw_input")
@patch("ambari_server.serverSetup.is_root")
@patch("ambari_server.serverSetup.logger")
def test_reset_default(self, logger_mock, is_root_mock, raw_input_mock, get_YN_inputMock):
is_root_mock.return_value=True
get_YN_inputMock.return_value = False
raw_input_mock.return_value=""
args = MagicMock()
try:
reset(args)
self.fail("Should throw exception")
except FatalException as fe:
# Expected
self.assertTrue(fe.code == 1)
pass
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(PGConfig, "_setup_db")
@patch("ambari_server.dbConfiguration_linux.print_info_msg")
@patch("ambari_server.dbConfiguration_linux.run_os_command")
@patch("ambari_server.serverSetup.is_root")
@patch("ambari_server.serverSetup.is_server_runing")
def test_silent_reset(self, is_server_runing_mock,
is_root_mock,
run_os_command_mock, print_info_msg_mock,
setup_db_mock):
is_root_mock.return_value = True
args = MagicMock()
del args.dbms
del args.database_index
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.persistence_type
del args.init_script_file
del args.drop_script_file
set_silent(True)
self.assertTrue(get_silent())
setup_db_mock.return_value = (0, None, None)
run_os_command_mock.return_value = (0, None, None)
is_server_runing_mock.return_value = (False, 0)
def signal_handler(signum, frame):
self.fail("Timed out!")
signal.signal(signal.SIGALRM, signal_handler)
try:
signal.alarm(5)
rcode = reset(args)
signal.alarm(0)
self.assertEqual(None, rcode)
self.assertTrue(setup_db_mock.called)
finally:
signal.signal(signal.SIGALRM, signal.SIG_IGN)
pass
@only_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("ambari_server.dbConfiguration_windows.MSSQLConfig._execute_db_script")
@patch("ambari_server.serverSetup.get_ambari_properties")
@patch("ambari_server.serverSetup.is_root")
@patch("ambari_server.serverSetup.is_server_runing")
def test_silent_reset(self, is_server_runing_mock,
is_root_mock, get_ambari_properties_mock,
execute_db_script_mock):
is_root_mock.return_value = True
args = MagicMock()
del args.dbms
del args.database_index
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.default_database_host
del args.persistence_type
del args.init_db_script_file
del args.cleanup_db_script_file
set_silent(True)
self.assertTrue(get_silent())
properties = Properties()
get_ambari_properties_mock.return_value = properties
is_server_runing_mock.return_value = (False, 0)
rcode = reset(args)
self.assertEqual(None, rcode)
self.assertEqual(execute_db_script_mock.call_count, 2)
@not_for_platform(PLATFORM_WINDOWS)
@patch("os.path.isdir", new = MagicMock(return_value=True))
@patch("os.access", new = MagicMock(return_value=True))
@patch.object(ServerClassPath, "get_full_ambari_classpath_escaped_for_shell",
new = MagicMock(return_value = '/etc/conf' + os.pathsep + 'test' + os.pathsep + 'path12'))
@patch("ambari_server_main.get_is_active_instance")
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("sys.stdout.flush")
@patch("sys.stdout.write")
@patch("ambari_server_main.looking_for_pid")
@patch("ambari_server_main.wait_for_ui_start")
@patch("ambari_server_main.save_main_pid_ex")
@patch("ambari_server_main.check_exitcode")
@patch("os.makedirs")
@patch("ambari_server_main.locate_file")
@patch.object(_ambari_server_, "is_server_runing")
@patch("os.chown")
@patch("ambari_server.setupSecurity.get_master_key_location")
@patch("ambari_server.setupSecurity.save_master_key")
@patch("ambari_server.setupSecurity.get_is_persisted")
@patch("ambari_server.setupSecurity.get_is_secure")
@patch('os.chmod', autospec=True)
@patch("ambari_server.serverConfiguration.write_property")
@patch("ambari_server.serverConfiguration.get_validated_string_input")
@patch("os.environ")
@patch("ambari_server.dbConfiguration.get_ambari_properties")
@patch("ambari_server.setupSecurity.get_ambari_properties")
@patch("ambari_server.serverSetup.get_ambari_properties")
@patch("ambari_server.serverConfiguration.get_ambari_properties")
@patch("ambari_server_main.get_ambari_properties")
@patch("os.path.exists")
@patch("__builtin__.open")
@patch("subprocess.Popen")
@patch("ambari_server.serverConfiguration.search_file")
@patch("ambari_server_main.check_database_name_property")
@patch("ambari_server_main.find_jdk")
@patch("ambari_server_main.print_warning_msg")
@patch("ambari_server_main.print_info_msg")
@patch.object(PGConfig, "_check_postgre_up")
@patch("ambari_server_main.read_ambari_user")
@patch("ambari_server.setupSecurity.is_root")
@patch("ambari_server.dbConfiguration_linux.is_root")
@patch("ambari_server_main.is_root")
@patch.object(LinuxDBMSConfig, "_find_jdbc_driver")
@patch("getpass.getuser")
@patch("os.chdir")
@patch.object(ResourceFilesKeeper, "perform_housekeeping")
@patch.object(_ambari_server_, "logger")
def test_start(self, logger_mock, perform_housekeeping_mock, chdir_mock, getuser_mock, find_jdbc_driver_mock,
is_root_mock, is_root_2_mock, is_root_3_mock, read_ambari_user_mock,
check_postgre_up_mock, print_info_msg_mock, print_warning_msg_mock,
find_jdk_mock, check_database_name_property_mock, search_file_mock,
popenMock, openMock, pexistsMock,
get_ambari_properties_mock, get_ambari_properties_2_mock, get_ambari_properties_3_mock,
get_ambari_properties_4_mock, get_ambari_properties_5_mock, os_environ_mock,
get_validated_string_input_method, write_property_method,
os_chmod_method, get_is_secure_mock, get_is_persisted_mock,
save_master_key_method, get_master_key_location_method,
os_chown_mock, is_server_running_mock, locate_file_mock,
os_makedirs_mock, check_exitcode_mock, save_main_pid_ex_mock,
wait_for_ui_start_mock, looking_for_pid_mock, stdout_write_mock, stdout_flush_mock,
get_is_active_instance_mock):
def reset_mocks():
pexistsMock.reset_mock()
get_is_active_instance_mock.reset_mock()
get_is_active_instance_mock.return_value = True
args = MagicMock()
del args.dbms
del args.database_index
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.persistence_type
del args.sid_or_sname
del args.jdbc_url
del args.debug
del args.suspend_start
args.skip_properties_validation = False
return args
args = reset_mocks()
locate_file_mock.side_effect = lambda *args: '/bin/su' if args[0] == 'su' else '/bin/sh'
f = MagicMock()
f.readline.return_value = '42'
openMock.return_value = f
looking_for_pid_mock.return_value = [{
"pid": "777",
"exe": "/test",
"cmd": "test arg"
}]
wait_for_ui_start_mock.return_value = True
check_exitcode_mock.return_value = 0
p = Properties()
p.process_pair(PID_DIR_PROPERTY, '/var/run/ambari-server')
p.process_pair(SECURITY_IS_ENCRYPTION_ENABLED, 'False')
p.process_pair(JDBC_DATABASE_NAME_PROPERTY, 'some_value')
p.process_pair(NR_USER_PROPERTY, 'some_value')
p.process_pair(STACK_LOCATION_KEY, 'some_value')
p.process_pair(SERVER_VERSION_FILE_PATH, 'some_value')
p.process_pair(OS_TYPE_PROPERTY, 'some_value')
p.process_pair(JAVA_HOME_PROPERTY, 'some_value')
p.process_pair(JDK_NAME_PROPERTY, 'some_value')
p.process_pair(JCE_NAME_PROPERTY, 'some_value')
p.process_pair(COMMON_SERVICES_PATH_PROPERTY, 'some_value')
p.process_pair(JDBC_PASSWORD_PROPERTY, 'some_value')
p.process_pair(WEBAPP_DIR_PROPERTY, 'some_value')
p.process_pair(SHARED_RESOURCES_DIR, 'some_value')
p.process_pair(SECURITY_KEYS_DIR, 'some_value')
p.process_pair(JDBC_USER_NAME_PROPERTY, 'some_value')
p.process_pair(BOOTSTRAP_SCRIPT, 'some_value')
p.process_pair(OS_FAMILY_PROPERTY, 'some_value')
p.process_pair(RESOURCES_DIR_PROPERTY, 'some_value')
p.process_pair(CUSTOM_ACTION_DEFINITIONS, 'some_value')
p.process_pair(BOOTSTRAP_SETUP_AGENT_SCRIPT, 'some_value')
p.process_pair(STACKADVISOR_SCRIPT, 'some_value')
p.process_pair(BOOTSTRAP_DIR_PROPERTY, 'some_value')
p.process_pair(MPACKS_STAGING_PATH_PROPERTY, 'some_value')
get_ambari_properties_5_mock.return_value = get_ambari_properties_4_mock.return_value = \
get_ambari_properties_3_mock.return_value = get_ambari_properties_2_mock.return_value = \
get_ambari_properties_mock.return_value = p
get_is_secure_mock.return_value = False
get_is_persisted_mock.return_value = (False, None)
search_file_mock.return_value = None
is_server_running_mock.return_value = (True, 123)
os_chown_mock.return_value = None
# Checking "server is running"
pexistsMock.return_value = True
if get_platform() != PLATFORM_WINDOWS:
with patch("pwd.getpwnam") as getpwnam_mock:
pw = MagicMock()
pw.setattr('pw_uid', 0)
pw.setattr('pw_gid', 0)
getpwnam_mock.return_value = pw
try:
_ambari_server_.start(args)
self.fail("Should fail with 'Server is running'")
except FatalException as e:
# Expected
self.assertTrue('Ambari Server is already running.' in e.reason)
args = reset_mocks()
is_server_running_mock.return_value = (False, 0)
pexistsMock.return_value = False
# Checking situation when ambari user is not set up
read_ambari_user_mock.return_value = None
try:
_ambari_server_.start(args)
self.fail("Should fail with 'Can not detect a system user for Ambari'")
except FatalException as e:
# Expected
self.assertTrue('Unable to detect a system user for Ambari Server.' in e.reason)
# Checking start from non-root when current user is not the same as a
# custom user
args = reset_mocks()
read_ambari_user_mock.return_value = "dummy-user"
getuser_mock.return_value = "non_custom_user"
is_root_3_mock.return_value = \
is_root_2_mock.return_value = \
is_root_mock.return_value = False
try:
_ambari_server_.start(args)
self.fail("Should fail with 'Can not start ambari-server as user...'")
except FatalException as e:
# Expected
self.assertTrue('Unable to start Ambari Server as user' in e.reason)
# If not active instance, exception should be thrown
args = reset_mocks()
get_is_active_instance_mock.return_value = False
try:
_ambari_server_.start(args)
self.fail("Should fail with 'This is not an active instance. Shutting down...'")
except FatalException as e:
# Expected
self.assertTrue('This is not an active instance' in e.reason)
pass
# Checking "jdk not found"
args = reset_mocks()
is_root_3_mock.return_value = \
is_root_2_mock.return_value = \
is_root_mock.return_value = True
find_jdk_mock.return_value = None
try:
_ambari_server_.start(args)
self.fail("Should fail with 'No JDK found'")
except FatalException as e:
# Expected
self.assertTrue('No JDK found' in e.reason)
args = reset_mocks()
find_jdk_mock.return_value = "somewhere"
## Testing workflow under root
is_root_3_mock.return_value = \
is_root_2_mock.return_value = \
is_root_mock.return_value = True
# Remote DB
p.process_pair(JDBC_DATABASE_PROPERTY, 'oracle')
p.process_pair(PERSISTENCE_TYPE_PROPERTY, 'remote')
# Case when jdbc driver is not used
find_jdbc_driver_mock.return_value = -1
try:
_ambari_server_.start(args)
self.fail("Should fail with exception")
except FatalException as e:
self.assertTrue('Before starting Ambari Server' in e.reason)
args = reset_mocks()
# Remote DB
p.process_pair(JDBC_DATABASE_PROPERTY, 'oracle')
p.process_pair(PERSISTENCE_TYPE_PROPERTY, 'remote')
find_jdbc_driver_mock.reset_mock()
find_jdbc_driver_mock.return_value = -1
try:
_ambari_server_.start(args)
except FatalException as e:
# Ignored
pass
args = reset_mocks()
# Remote DB
p.process_pair(JDBC_DATABASE_PROPERTY, 'oracle')
p.process_pair(PERSISTENCE_TYPE_PROPERTY, 'remote')
find_jdbc_driver_mock.reset_mock()
find_jdbc_driver_mock.return_value = 0
# Test exception handling on resource files housekeeping
perform_housekeeping_mock.reset_mock()
perform_housekeeping_mock.side_effect = KeeperException("some_reason")
pexistsMock.return_value = True
try:
_ambari_server_.start(args)
self.fail("Should fail with exception")
except FatalException as e:
self.assertTrue('some_reason' in e.reason)
self.assertTrue(perform_housekeeping_mock.called)
perform_housekeeping_mock.side_effect = lambda *v, **kv : None
perform_housekeeping_mock.reset_mock()
self.assertFalse('Unable to start PostgreSQL server' in e.reason)
self.assertFalse(check_postgre_up_mock.called)
args = reset_mocks()
# Local DB
p.process_pair(JDBC_DATABASE_PROPERTY, 'postgres')
p.process_pair(PERSISTENCE_TYPE_PROPERTY, 'local')
check_postgre_up_mock.reset_mock()
# case: postgres failed to start
check_postgre_up_mock.return_value = None, 1, "Unable to start PostgreSQL serv", "error"
try:
_ambari_server_.start(args)
self.fail("Should fail with 'Unable to start PostgreSQL server'")
except FatalException as e:
# Expected
self.assertTrue('Unable to start PostgreSQL server' in e.reason)
self.assertTrue(check_postgre_up_mock.called)
args = reset_mocks()
# Local DB
p.process_pair(JDBC_DATABASE_PROPERTY, 'postgres')
p.process_pair(PERSISTENCE_TYPE_PROPERTY, 'local')
check_postgre_up_mock.return_value = "running", 0, "success", ""
# Case: custom user is "root"
read_ambari_user_mock.return_value = "root"
# Java failed to start
proc = MagicMock()
proc.pid = -186
popenMock.return_value = proc
try:
_ambari_server_.start(args)
except FatalException as e:
# Expected
self.assertTrue(popenMock.called)
self.assertTrue('Ambari Server java process died' in e.reason)
self.assertTrue(perform_housekeeping_mock.called)
args = reset_mocks()
# Java OK
proc.pid = 186
popenMock.reset_mock()
_ambari_server_.start(args)
self.assertTrue(popenMock.called)
popen_arg = popenMock.call_args[0][0]
self.assertTrue(popen_arg[0] == "/bin/sh")
self.assertTrue(perform_housekeeping_mock.called)
args = reset_mocks()
# Local DB
p.process_pair(JDBC_DATABASE_PROPERTY, 'postgres')
p.process_pair(PERSISTENCE_TYPE_PROPERTY, 'local')
perform_housekeeping_mock.reset_mock()
popenMock.reset_mock()
# Case: custom user is not "root"
read_ambari_user_mock.return_value = "not-root-user"
_ambari_server_.start(args)
self.assertTrue(chdir_mock.called)
self.assertTrue(popenMock.called)
popen_arg = popenMock.call_args_list[0][0][0]
self.assertTrue("; /bin/su" in popen_arg[2])
self.assertTrue(perform_housekeeping_mock.called)
args = reset_mocks()
# Local DB
p.process_pair(JDBC_DATABASE_PROPERTY, 'postgres')
p.process_pair(PERSISTENCE_TYPE_PROPERTY, 'local')
check_postgre_up_mock.reset_mock()
popenMock.reset_mock()
## Testing workflow under non-root
is_root_3_mock.return_value = \
is_root_2_mock.return_value = \
is_root_mock.return_value = False
read_ambari_user_mock.return_value = "not-root-user"
getuser_mock.return_value = read_ambari_user_mock.return_value
_ambari_server_.start(args)
self.assertFalse(check_postgre_up_mock.called)
args = reset_mocks()
# Remote DB
p.process_pair(JDBC_DATABASE_PROPERTY, 'postgres')
p.process_pair(PERSISTENCE_TYPE_PROPERTY, 'remote')
_ambari_server_.start(args)
self.assertFalse(check_postgre_up_mock.called)
args = reset_mocks()
# Remote DB
p.process_pair(JDBC_DATABASE_PROPERTY, 'postgres')
p.process_pair(PERSISTENCE_TYPE_PROPERTY, 'remote')
# Checking call
_ambari_server_.start(args)
self.assertTrue(popenMock.called)
popen_arg = popenMock.call_args[0][0]
self.assertTrue(popen_arg[0] == "/bin/sh")
args = reset_mocks()
# Remote DB
p.process_pair(JDBC_DATABASE_PROPERTY, 'postgres')
p.process_pair(PERSISTENCE_TYPE_PROPERTY, 'remote')
# Test start under wrong user
read_ambari_user_mock.return_value = "not-root-user"
getuser_mock.return_value = "non_custom_user"
try:
_ambari_server_.start(args)
self.fail("Can not start ambari-server as user non_custom_user.")
except FatalException as e:
# Expected
self.assertTrue('Unable to start Ambari Server as user' in e.reason)
args = reset_mocks()
# Check environ master key is set
popenMock.reset_mock()
os_environ_mock.copy.return_value = {"a": "b",
SECURITY_KEY_ENV_VAR_NAME: "masterkey"}
p.process_pair(JDBC_DATABASE_PROPERTY, 'postgres')
p.process_pair(PERSISTENCE_TYPE_PROPERTY, 'local')
read_ambari_user_mock.return_value = "root"
is_root_3_mock.return_value = \
is_root_2_mock.return_value = \
is_root_mock.return_value = True
_ambari_server_.start(args)
self.assertFalse(get_validated_string_input_method.called)
self.assertFalse(save_master_key_method.called)
popen_arg = popenMock.call_args[1]['env']
self.assertEquals(os_environ_mock.copy.return_value, popen_arg)
args = reset_mocks()
# Check environ master key is not set
popenMock.reset_mock()
os_environ_mock.reset_mock()
p.process_pair(SECURITY_IS_ENCRYPTION_ENABLED, 'True')
os_environ_mock.copy.return_value = {"a": "b"}
p.process_pair(JDBC_DATABASE_PROPERTY, 'postgres')
p.process_pair(PERSISTENCE_TYPE_PROPERTY, 'local')
read_ambari_user_mock.return_value = "root"
is_root_3_mock.return_value = \
is_root_2_mock.return_value = \
is_root_mock.return_value = True
get_validated_string_input_method.return_value = "masterkey"
os_chmod_method.return_value = None
get_is_secure_mock.return_value = True
_ambari_server_.start(args)
self.assertTrue(get_validated_string_input_method.called)
self.assertTrue(save_master_key_method.called)
popen_arg = popenMock.call_args[1]['env']
self.assertEquals(os_environ_mock.copy.return_value, popen_arg)
# Checking situation when required properties not set up
args = reset_mocks()
p.removeProp(JAVA_HOME_PROPERTY)
get_ambari_properties_mock.return_value = p
try:
_ambari_server_.start(args)
self.fail("Should fail with 'Required properties are not found:'")
except FatalException as e:
# Expected
self.assertTrue('Required properties are not found:' in e.reason)
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(_ambari_server_, "is_server_runing")
@patch("os.remove")
@patch("os.killpg")
@patch("os.getpgid")
@patch.object(_ambari_server_, "print_info_msg")
def test_stop(self, print_info_msg_mock, gpidMock, removeMock,
killMock, isServerRuningMock):
isServerRuningMock.return_value = (True, 123)
_ambari_server_.stop(None)
self.assertTrue(killMock.called)
self.assertTrue(removeMock.called)
pass
@only_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("win32serviceutil.StopServiceWithDeps")
@patch("win32serviceutil.StopService")
@patch("win32serviceutil.WaitForServiceStatus")
def test_stop(self, WaitForServiceStatusMock, StopServiceMock, StopServiceWithDepsMock):
_ambari_server_.stop()
self.assertTrue(StopServiceWithDepsMock.called)
self.assertFalse(StopServiceMock.called)
self.assertTrue(WaitForServiceStatusMock.called)
pass
@patch.object(_ambari_server_, "BackupRestore_main")
def test_backup(self, bkrestore_mock):
args = ["", "/some/path/file.zip"]
_ambari_server_.backup(args)
self.assertTrue(bkrestore_mock.called)
pass
@patch.object(_ambari_server_, "BackupRestore_main")
def test_backup_no_path(self, bkrestore_mock):
args = [""]
_ambari_server_.backup(args)
self.assertTrue(bkrestore_mock.called)
pass
@patch.object(_ambari_server_, "BackupRestore_main")
@patch.object(_ambari_server_, "logger")
def test_restore(self, logger_mock, bkrestore_mock):
args = ["", "/some/path/file.zip"]
_ambari_server_.restore(args)
self.assertTrue(bkrestore_mock.called)
pass
@patch.object(_ambari_server_, "BackupRestore_main")
@patch.object(_ambari_server_, "logger")
def test_restore_no_path(self, logger_mock, bkrestore_mock):
args = [""]
_ambari_server_.restore(args)
self.assertTrue(bkrestore_mock.called)
pass
@patch("ambari_server.serverUpgrade.get_ambari_properties")
@patch("os.listdir")
@patch("os.path.isfile")
@patch("shutil.move")
def test_move_user_custom_actions(self, shutil_move_mock, os_path_isfile_mock, os_listdir_mock, get_ambari_properties_mock):
properties = Properties()
properties.process_pair(RESOURCES_DIR_PROPERTY, 'some/test/fake/resources/dir/path')
get_ambari_properties_mock.return_value = properties
os_listdir_mock.return_value = ['sometestdir', 'sometestfile.md', 'sometestfile.py', 'sometestfile2.java', 'sometestfile2.py', 'sometestdir2.py']
os_path_isfile_mock.side_effect = [False, True, True, True, True, False]
move_user_custom_actions()
custom_actions_scripts_dir = os.path.join('some/test/fake/resources/dir/path', 'custom_actions', 'scripts')
shutil_move_mock.assert_has_calls([call(os.path.join('some/test/fake/resources/dir/path', 'custom_actions', 'sometestfile.py'), custom_actions_scripts_dir),
call(os.path.join('some/test/fake/resources/dir/path', 'custom_actions', 'sometestfile2.py'), custom_actions_scripts_dir)])
self.assertEqual(shutil_move_mock.call_count, 2)
pass
@patch("os.path.isdir", new = MagicMock(return_value=True))
@patch("os.access", new = MagicMock(return_value=True))
@patch("ambari_server.serverConfiguration.get_conf_dir")
@patch("ambari_server.serverUpgrade.run_os_command")
@patch("ambari_server.serverUpgrade.get_java_exe_path")
def test_run_stack_upgrade(self, java_exe_path_mock, run_os_command_mock,
get_conf_dir_mock):
java_exe_path_mock.return_value = "/usr/lib/java/bin/java"
run_os_command_mock.return_value = (0, None, None)
get_conf_dir_mock.return_value = '/etc/conf'
stackIdMap = {'HDP' : '2.0', 'repo_url' : 'http://test.com'}
run_stack_upgrade(None, 'HDP', '2.0', 'http://test.com', None)
self.assertTrue(java_exe_path_mock.called)
self.assertTrue(get_conf_dir_mock.called)
self.assertTrue(run_os_command_mock.called)
run_os_command_mock.assert_called_with('/usr/lib/java/bin/java -cp \'/etc/conf:/usr/lib/ambari-server/*\' '
'org.apache.ambari.server.upgrade.StackUpgradeHelper '
'updateStackId ' + "'" + json.dumps(stackIdMap) + "'" +
' > ' + os.sep + 'var' + os.sep + 'log' + os.sep + 'ambari-server' + os.sep +
'ambari-server.out 2>&1')
pass
@patch("os.path.isdir", new = MagicMock(return_value=True))
@patch("os.access", new = MagicMock(return_value=True))
@patch.object(ServerClassPath, "get_full_ambari_classpath_escaped_for_shell",
new = MagicMock(return_value = '/etc/conf' + os.pathsep + 'test' + os.pathsep + 'path12'))
@patch("ambari_server.serverConfiguration.get_conf_dir")
@patch("ambari_server.serverUpgrade.run_os_command")
@patch("ambari_server.serverUpgrade.get_java_exe_path")
def test_run_stack_upgrade_with_url_os(self, java_exe_path_mock, run_os_command_mock,
get_conf_dir_mock):
java_exe_path_mock.return_value = "/usr/lib/java/bin/java"
run_os_command_mock.return_value = (0, None, None)
get_conf_dir_mock.return_value = '/etc/conf'
stackIdMap = {'HDP' : '2.0', 'repo_url': 'http://test.com', 'repo_url_os': 'centos5,centos6'}
run_stack_upgrade(None, 'HDP', '2.0', 'http://test.com', 'centos5,centos6')
self.assertTrue(java_exe_path_mock.called)
self.assertTrue(get_conf_dir_mock.called)
self.assertTrue(run_os_command_mock.called)
run_os_command_mock.assert_called_with('/usr/lib/java/bin/java -cp /etc/conf' + os.pathsep + 'test:path12 '
'org.apache.ambari.server.upgrade.StackUpgradeHelper '
'updateStackId ' + "'" + json.dumps(stackIdMap) + "'" +
' > ' + os.sep + 'var' + os.sep + 'log' + os.sep + 'ambari-server' + os.sep +
'ambari-server.out 2>&1')
pass
@patch("os.path.isdir", new = MagicMock(return_value=True))
@patch("os.access", new = MagicMock(return_value=True))
@patch.object(ServerClassPath, "get_full_ambari_classpath_escaped_for_shell",
new = MagicMock(return_value = '/etc/conf' + os.pathsep + 'test' +
os.pathsep + 'path12' + os.pathsep +'/path/to/jdbc.jar'))
@patch("ambari_server.serverUpgrade.ensure_jdbc_driver_is_installed")
@patch("ambari_server.serverUpgrade.get_jdbc_driver_path")
@patch("ambari_server.serverUpgrade.ensure_can_start_under_current_user")
@patch("ambari_server.serverUpgrade.generate_env")
@patch("ambari_server.serverUpgrade.read_ambari_user")
@patch("ambari_server.serverConfiguration.get_conf_dir")
@patch("ambari_server.serverUpgrade.run_os_command")
@patch("ambari_server.serverUpgrade.get_java_exe_path")
@patch("ambari_server.serverUpgrade.get_ambari_properties")
@patch("ambari_server.serverUpgrade.get_YN_input")
def test_run_schema_upgrade(self, get_YN_input_mock, get_ambari_properties_mock, java_exe_path_mock, run_os_command_mock,
get_conf_dir_mock,
read_ambari_user_mock, generate_env_mock,
ensure_can_start_under_current_user_mock, get_jdbc_mock,
ensure_jdbc_driver_is_installed_mock):
java_exe_path_mock.return_value = "/usr/lib/java/bin/java"
run_os_command_mock.return_value = (0, None, None)
get_conf_dir_mock.return_value = '/etc/conf'
command = '/usr/lib/java/bin/java -cp /etc/conf' + os.pathsep + 'test' + os.pathsep + 'path12' + \
os.pathsep +'/path/to/jdbc.jar ' \
'org.apache.ambari.server.upgrade.SchemaUpgradeHelper ' \
'> ' + os.sep + 'var' + os.sep + 'log' + os.sep + 'ambari-server' + os.sep + 'ambari-server.out 2>&1'
environ = {}
generate_env_mock.return_value = environ
ensure_can_start_under_current_user_mock.return_value = "root"
read_ambari_user_mock.return_value = "ambari"
properties = Properties()
properties.process_pair(PERSISTENCE_TYPE_PROPERTY, "local")
get_ambari_properties_mock.return_value = properties
get_YN_input_mock.return_value = True
get_jdbc_mock.return_value = '/path/to/jdbc.jar'
run_schema_upgrade(None)
self.assertTrue(java_exe_path_mock.called)
self.assertTrue(ensure_can_start_under_current_user_mock.called)
self.assertTrue(generate_env_mock.called)
self.assertTrue(read_ambari_user_mock.called)
self.assertTrue(run_os_command_mock.called)
run_os_command_mock.assert_called_with(command, env=environ)
@patch("os.path.isdir", new = MagicMock(return_value=True))
@patch("os.access", new = MagicMock(return_value=True))
@patch("ambari_server.serverConfiguration.get_conf_dir")
@patch.object(ServerClassPath, "get_full_ambari_classpath_escaped_for_shell", new = MagicMock(return_value = 'test' + os.pathsep + 'path12'))
@patch("ambari_server.serverUpgrade.run_os_command")
@patch("ambari_server.serverUpgrade.get_java_exe_path")
def test_run_metainfo_upgrade(self, java_exe_path_mock, run_os_command_mock,
get_conf_dir_mock):
java_exe_path_mock.return_value = "/usr/lib/java/bin/java"
run_os_command_mock.return_value = (0, None, None)
get_conf_dir_mock.return_value = '/etc/conf'
json_map = {'a': 'http://newurl'}
run_metainfo_upgrade(None, json_map)
self.assertTrue(java_exe_path_mock.called)
self.assertTrue(run_os_command_mock.called)
run_os_command_mock.assert_called_with('/usr/lib/java/bin/java '
'-cp test' + os.pathsep + 'path12 '
'org.apache.ambari.server.upgrade.StackUpgradeHelper updateMetaInfo ' +
"'" + json.dumps(json_map) + "'" +
' > ' + os.sep + 'var' + os.sep + 'log' + os.sep + 'ambari-server' +
os.sep + 'ambari-server.out 2>&1')
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("os.path.isfile")
@patch("ambari_server.serverSetup.get_ambari_properties")
@patch("os.path.exists")
@patch("os.path.lexists")
@patch("os.remove")
@patch("os.symlink")
@patch("shutil.copy")
def test_proceedJDBCProperties(self, copy_mock, os_symlink_mock, os_remove_mock, lexists_mock, exists_mock,
get_ambari_properties_mock, isfile_mock):
args = MagicMock()
# test incorrect path to jdbc-driver
isfile_mock.return_value = False
args.jdbc_driver = "test jdbc"
fail = False
try:
proceedJDBCProperties(args)
except FatalException as e:
self.assertEquals("File test jdbc does not exist!", e.reason)
fail = True
self.assertTrue(fail)
# test incorrect jdbc-db
isfile_mock.return_value = True
args.jdbc_db = "incorrect db"
fail = False
try:
proceedJDBCProperties(args)
except FatalException as e:
self.assertEquals("Unsupported database name incorrect db. Please see help for more information.", e.reason)
fail = True
self.assertTrue(fail)
# test getAmbariProperties failed
args.jdbc_db = "mysql"
get_ambari_properties_mock.return_value = -1
fail = False
try:
proceedJDBCProperties(args)
except FatalException as e:
self.assertEquals("Error getting ambari properties", e.reason)
fail = True
self.assertTrue(fail)
# test getAmbariProperties failed
args.jdbc_db = "mssql"
get_ambari_properties_mock.return_value = -1
fail = False
try:
proceedJDBCProperties(args)
except FatalException as e:
self.assertEquals("Error getting ambari properties", e.reason)
fail = True
self.assertTrue(fail)
# test get resource dir param failed
args.jdbc_db = "oracle"
p = MagicMock()
get_ambari_properties_mock.return_value = p
p.__getitem__.side_effect = KeyError("test exception")
exists_mock.return_value = False
fail = False
try:
proceedJDBCProperties(args)
except FatalException as e:
fail = True
self.assertTrue(fail)
# test copy jdbc failed and symlink exists
lexists_mock.return_value = True
args.jdbc_db = "postgres"
get_ambari_properties_mock.return_value = MagicMock()
isfile_mock.side_effect = [True, False]
exists_mock.return_value = True
fail = False
def side_effect():
raise Exception(-1, "Failed to copy!")
copy_mock.side_effect = side_effect
try:
proceedJDBCProperties(args)
except FatalException as e:
fail = True
self.assertTrue(fail)
self.assertTrue(os_remove_mock.called)
# test success symlink creation
get_ambari_properties_mock.reset_mock()
os_remove_mock.reset_mock()
p = MagicMock()
get_ambari_properties_mock.return_value = p
p.__getitem__.side_effect = None
p.__getitem__.return_value = "somewhere"
copy_mock.reset_mock()
copy_mock.side_effect = None
isfile_mock.side_effect = [True, False]
proceedJDBCProperties(args)
self.assertTrue(os_remove_mock.called)
self.assertTrue(os_symlink_mock.called)
self.assertTrue(copy_mock.called)
self.assertEquals(os_symlink_mock.call_args_list[0][0][0], os.path.join("somewhere","test jdbc"))
self.assertEquals(os_symlink_mock.call_args_list[0][0][1], os.path.join("somewhere","postgres-jdbc-driver.jar"))
pass
@only_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("os.path.isfile")
@patch("ambari_server.serverSetup.get_ambari_properties")
@patch("os.path.exists")
@patch("os.path.lexists")
@patch("os.remove")
@patch("os.symlink")
@patch("shutil.copy")
def test_proceedJDBCProperties(self, copy_mock, os_symlink_mock, os_remove_mock, lexists_mock, exists_mock,
get_ambari_properties_mock, isfile_mock):
args = MagicMock()
# test incorrect path to jdbc-driver
isfile_mock.return_value = False
args.jdbc_driver = "test jdbc"
fail = False
try:
proceedJDBCProperties(args)
except FatalException as e:
self.assertEquals("File test jdbc does not exist!", e.reason)
fail = True
self.assertTrue(fail)
# test incorrect jdbc-db
isfile_mock.return_value = True
args.jdbc_db = "incorrect db"
fail = False
try:
proceedJDBCProperties(args)
except FatalException as e:
self.assertEquals("Unsupported database name incorrect db. Please see help for more information.", e.reason)
fail = True
self.assertTrue(fail)
# test getAmbariProperties succeeded
args.jdbc_db = "mssql"
get_ambari_properties_mock.return_value = -1
fail = False
try:
proceedJDBCProperties(args)
except FatalException as e:
self.assertEquals("Error getting ambari properties", e.reason)
fail = True
self.assertFalse(fail)
pass
@patch("shutil.copytree")
@patch("os.makedirs")
@patch("os.path.islink")
@patch("os.path.exists")
@patch("os.path.getctime")
@patch("re.compile")
@patch("os.path.join")
@patch("os.path.basename")
@patch("os.path.isdir")
@patch("glob.glob")
def test_find_and_copy_custom_services(self, glob_mock, isdir_mock, basename_mock, join_mock, re_compile_mock,
getctime_mock, exists_mock, islink_mock, makedirs_mock, copytree_mock):
# service/version dir is not link
glob_mock.return_value = [""]
isdir_mock.side_effect = [False, True, True]
islink_mock.return_value = False
exists_mock.side_effect = [True, False]
find_and_copy_custom_services("", "", "", "", "", "/common-services/")
self.assertTrue(makedirs_mock.called)
self.assertTrue(copytree_mock.called)
# service/version dir is link
makedirs_mock.reset_mock()
copytree_mock.reset_mock()
islink_mock.side_effect = [False, True]
self.assertFalse(makedirs_mock.called)
self.assertFalse(copytree_mock.called)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("__builtin__.open")
@patch("os.path.isfile")
@patch("os.path.lexists")
@patch("os.path.exists")
@patch("os.remove")
@patch("os.symlink")
@patch.object(Properties, "store")
@patch("ambari_server.serverUpgrade.adjust_directory_permissions")
@patch("ambari_server.serverUpgrade.print_warning_msg")
@patch("ambari_server.serverUpgrade.read_ambari_user")
@patch("ambari_server.serverUpgrade.run_schema_upgrade")
@patch("ambari_server.dbConfiguration_linux.run_os_command")
@patch("ambari_server.serverConfiguration.find_properties_file")
@patch("ambari_server.serverUpgrade.update_ambari_properties")
@patch("ambari_server.serverUpgrade.is_root")
@patch("ambari_server.serverConfiguration.write_property")
@patch("ambari_server.serverConfiguration.get_ambari_version")
@patch("ambari_server.dbConfiguration.get_ambari_properties")
@patch("ambari_server.serverConfiguration.get_ambari_properties")
@patch("ambari_server.serverUpgrade.get_ambari_properties")
@patch("ambari_server.serverUpgrade.upgrade_local_repo")
@patch("ambari_server.serverUpgrade.move_user_custom_actions")
@patch("ambari_server.serverUpgrade.update_krb_jaas_login_properties")
@patch("ambari_server.serverUpgrade.update_ambari_env")
@patch("ambari_server.setupMpacks.get_replay_log_file")
@patch("ambari_server.serverUpgrade.logger")
@patch.object(PGConfig, "_change_db_files_owner", return_value=0)
def test_upgrade_from_161(self, change_db_files_owner_mock, logger_mock, get_replay_log_file_mock, update_ambari_env_mock, update_krb_jaas_login_properties_mock, move_user_custom_actions_mock, upgrade_local_repo_mock, get_ambari_properties_mock,
get_ambari_properties_2_mock, get_ambari_properties_3_mock, get_ambari_version_mock, write_property_mock,
is_root_mock, update_ambari_properties_mock, find_properties_file_mock, run_os_command_mock,
run_schema_upgrade_mock, read_ambari_user_mock, print_warning_msg_mock,
adjust_directory_permissions_mock, properties_store_mock,
os_symlink_mock, os_remove_mock, exists_mock, lexists_mock, isfile_mock, open_mock):
def reset_mocks():
run_os_command_mock.reset_mock()
write_property_mock.reset_mock()
isfile_mock.reset_mock()
lexists_mock.reeset_mock()
os_symlink_mock.reset_mock()
lexists_mock.return_value = False
args = MagicMock()
del args.dbms
del args.database_index
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.database_windows_auth
del args.default_database_host
del args.init_db_script_file
del args.cleanup_db_script_file
del args.must_set_database_options
del args.sid_or_sname
del args.jdbc_url
args.jdbc_driver= None
args.jdbc_db = None
args.silent = False
return args
args = reset_mocks()
args.dbms = "postgres"
is_root_mock.return_value = True
update_ambari_properties_mock.return_value = 0
update_ambari_env_mock.return_value = 0
get_ambari_version_mock.return_value = "1.7.0"
move_user_custom_actions_mock.return_value = None
update_krb_jaas_login_properties_mock.return_value = -2
# Local Postgres
# In Ambari 1.6.1 for an embedded postgres database, the "server.jdbc.database" property stored the DB name,
# and the DB type was assumed to be "postgres" if the "server.persistence.type" property was "local"
properties = Properties()
properties.process_pair(PERSISTENCE_TYPE_PROPERTY, "local")
properties.process_pair(JDBC_DATABASE_PROPERTY, "ambari")
properties.process_pair(RESOURCES_DIR_PROPERTY, "/tmp")
get_ambari_properties_mock.return_value = properties
properties2 = Properties()
properties2.process_pair(PERSISTENCE_TYPE_PROPERTY, "local")
properties2.process_pair(JDBC_DATABASE_NAME_PROPERTY, "ambari")
properties2.process_pair(JDBC_DATABASE_PROPERTY, "postgres")
get_ambari_properties_3_mock.side_effect = get_ambari_properties_2_mock.side_effect = [properties, properties2, properties2]
get_replay_log_file_mock.return_value = "/invalid_path/mpacks_replay.log"
run_schema_upgrade_mock.return_value = 0
read_ambari_user_mock.return_value = "custom_user"
run_os_command_mock.return_value = (0, "", "")
isfile_mock.return_value = False
try:
upgrade(args)
except FatalException as fe:
self.fail("Did not expect failure: " + str(fe))
else:
self.assertTrue(write_property_mock.called)
self.assertEquals(write_property_mock.call_args_list[0][0][0], JDBC_DATABASE_NAME_PROPERTY)
self.assertEquals(write_property_mock.call_args_list[0][0][1], "ambari")
self.assertEquals(write_property_mock.call_args_list[1][0][0], JDBC_DATABASE_PROPERTY)
self.assertEquals(write_property_mock.call_args_list[1][0][1], "postgres")
self.assertFalse(move_user_custom_actions_mock.called)
args = reset_mocks()
# External Postgres
# In Ambari 1.6.1 for an external postgres database, the "server.jdbc.database" property stored the
# DB type ("postgres"), and the "server.jdbc.schema" property stored the DB name.
properties = Properties()
properties.process_pair(PERSISTENCE_TYPE_PROPERTY, "remote")
properties.process_pair(JDBC_DATABASE_PROPERTY, "postgres")
properties.process_pair(JDBC_RCA_SCHEMA_PROPERTY, "ambari")
properties.process_pair(JDBC_URL_PROPERTY, "jdbc:postgresql://c6410.ambari.apache.org:5432/ambari")
properties2 = Properties()
properties2.process_pair(PERSISTENCE_TYPE_PROPERTY, "remote")
properties2.process_pair(JDBC_DATABASE_NAME_PROPERTY, "ambari")
properties2.process_pair(JDBC_DATABASE_PROPERTY, "postgres")
properties2.process_pair(JDBC_RCA_SCHEMA_PROPERTY, "ambari")
properties2.process_pair(JDBC_URL_PROPERTY, "jdbc:postgresql://c6410.ambari.apache.org:5432/ambari")
get_ambari_properties_mock.return_value = properties
get_ambari_properties_3_mock.side_effect = get_ambari_properties_2_mock.side_effect = [properties, properties2, properties2]
exists_mock.return_value = True
try:
upgrade(args)
except FatalException as fe:
self.fail("Did not expect failure: " + str(fe))
else:
self.assertTrue(write_property_mock.called)
self.assertFalse(run_os_command_mock.called)
self.assertFalse(move_user_custom_actions_mock.called)
args = reset_mocks()
# External Postgres missing DB type, so it should be set based on the JDBC URL.
properties = Properties()
properties.process_pair(PERSISTENCE_TYPE_PROPERTY, "remote")
properties.process_pair(JDBC_RCA_SCHEMA_PROPERTY, "ambari")
properties.process_pair(JDBC_URL_PROPERTY, "jdbc:postgresql://c6410.ambari.apache.org:5432/ambari")
get_ambari_properties_mock.return_value = properties
get_ambari_properties_3_mock.side_effect = get_ambari_properties_2_mock.side_effect = [properties, properties2, properties2]
try:
upgrade(args)
except FatalException as fe:
self.fail("Did not expect failure: " + str(fe))
else:
self.assertTrue(write_property_mock.call_count == 2)
self.assertFalse(move_user_custom_actions_mock.called)
args = reset_mocks()
# External MySQL
# In Ambari 1.6.1 for an external MySQL database, the "server.jdbc.database" property stored the DB type ("mysql"),
# And the "server.jdbc.schema" property stored the DB name.
properties = Properties()
properties.process_pair(PERSISTENCE_TYPE_PROPERTY, "remote")
properties.process_pair(JDBC_DATABASE_PROPERTY, "mysql")
properties.process_pair(JDBC_RCA_SCHEMA_PROPERTY, "ambari")
properties.process_pair(JDBC_URL_PROPERTY, "jdbc:mysql://c6409.ambari.apache.org:3306/ambari")
properties2 = Properties()
properties2.process_pair(PERSISTENCE_TYPE_PROPERTY, "remote")
properties2.process_pair(JDBC_DATABASE_PROPERTY, "mysql")
properties2.process_pair(JDBC_DATABASE_NAME_PROPERTY, "ambari")
properties2.process_pair(JDBC_RCA_SCHEMA_PROPERTY, "ambari")
properties2.process_pair(JDBC_URL_PROPERTY, "jdbc:mysql://c6409.ambari.apache.org:3306/ambari")
get_ambari_properties_mock.return_value = properties
get_ambari_properties_3_mock.side_effect = get_ambari_properties_2_mock.side_effect = [properties, properties2, properties2]
isfile_mock.side_effect = [False, True, False, False, False]
try:
upgrade(args)
except FatalException as fe:
self.fail("Did not expect failure: " + str(fe))
else:
self.assertTrue(write_property_mock.called)
self.assertFalse(move_user_custom_actions_mock.called)
self.assertTrue(os_symlink_mock.called)
self.assertTrue(os_symlink_mock.call_args_list[0][0][0] == "/var/lib/ambari-server/resources/mysql-connector-java.jar")
self.assertTrue(os_symlink_mock.call_args_list[0][0][1] == "/var/lib/ambari-server/resources/mysql-jdbc-driver.jar")
args = reset_mocks()
# External MySQL missing DB type, so it should be set based on the JDBC URL.
properties = Properties()
properties.process_pair(PERSISTENCE_TYPE_PROPERTY, "remote")
properties.process_pair(JDBC_RCA_SCHEMA_PROPERTY, "ambari")
properties.process_pair(JDBC_URL_PROPERTY, "jdbc:mysql://c6409.ambari.apache.org:3306/ambari")
get_ambari_properties_mock.return_value = properties
get_ambari_properties_3_mock.side_effect = get_ambari_properties_2_mock.side_effect = [properties, properties2, properties2]
isfile_mock.side_effect = None
try:
upgrade(args)
except FatalException as fe:
self.fail("Did not expect failure: " + str(fe))
else:
self.assertTrue(write_property_mock.call_count == 2)
self.assertFalse(move_user_custom_actions_mock.called)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("__builtin__.open")
@patch("os.path.isfile")
@patch("os.path.exists")
@patch("os.path.lexists")
@patch("os.remove")
@patch("os.symlink")
@patch.object(Properties, "store")
@patch.object(PGConfig, "_change_db_files_owner")
@patch("ambari_server.serverConfiguration.find_properties_file")
@patch("ambari_server.serverUpgrade.adjust_directory_permissions")
@patch("ambari_server.serverUpgrade.print_warning_msg")
@patch("ambari_server.serverUpgrade.read_ambari_user")
@patch("ambari_server.serverUpgrade.run_schema_upgrade")
@patch("ambari_server.serverUpgrade.update_ambari_properties")
@patch("ambari_server.serverUpgrade.parse_properties_file")
@patch("ambari_server.serverUpgrade.get_ambari_version")
@patch("ambari_server.serverConfiguration.get_ambari_version")
@patch("ambari_server.serverUpgrade.is_root")
@patch("ambari_server.dbConfiguration.get_ambari_properties")
@patch("ambari_server.serverConfiguration.get_ambari_properties")
@patch("ambari_server.serverUpgrade.get_ambari_properties")
@patch("ambari_server.serverUpgrade.upgrade_local_repo")
@patch("ambari_server.serverUpgrade.move_user_custom_actions")
@patch("ambari_server.serverUpgrade.update_krb_jaas_login_properties")
@patch("ambari_server.serverUpgrade.update_ambari_env")
def test_upgrade(self, update_ambari_env_mock, update_krb_jaas_login_properties_mock, move_user_custom_actions, upgrade_local_repo_mock,
get_ambari_properties_mock, get_ambari_properties_2_mock, get_ambari_properties_3_mock,
is_root_mock, get_ambari_version_mock, get_ambari_version_2_mock,
parse_properties_file_mock,
update_ambari_properties_mock, run_schema_upgrade_mock,
read_ambari_user_mock, print_warning_msg_mock,
adjust_directory_permissions_mock,
find_properties_file_mock, change_db_files_owner_mock, properties_store_mock,
os_symlink_mock, os_remove_mock, lexists_mock, exists_mock, isfile_mock, open_mock):
def reset_mocks():
isfile_mock.reset_mock()
args = MagicMock()
del args.database_index
del args.dbms
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.persistence_type
del args.sid_or_sname
del args.jdbc_url
args.must_set_database_options = True
return args
args = reset_mocks()
properties = Properties()
get_ambari_properties_3_mock.return_value = get_ambari_properties_2_mock.return_value = \
get_ambari_properties_mock.return_value = properties
update_ambari_properties_mock.return_value = 0
update_ambari_env_mock.return_value = 0
run_schema_upgrade_mock.return_value = 0
isfile_mock.return_value = False
get_ambari_version_2_mock.return_value = get_ambari_version_mock.return_value = CURR_AMBARI_VERSION
move_user_custom_actions.return_value = None
update_krb_jaas_login_properties_mock.return_value = -2
# Testing call under non-root
is_root_mock.return_value = False
try:
upgrade(args)
self.fail("Should throw exception")
except FatalException as fe:
# Expected
self.assertTrue("root-level" in fe.reason)
pass
args = reset_mocks()
# Testing calls under root
is_root_mock.return_value = True
# Testing with undefined custom user
read_ambari_user_mock.return_value = None
run_schema_upgrade_mock.return_value = 0
change_db_files_owner_mock.return_value = 0
exists_mock.return_value = True
upgrade(args)
self.assertTrue(print_warning_msg_mock.called)
warning_args = print_warning_msg_mock.call_args[0][0]
self.assertTrue("custom ambari user" in warning_args)
self.assertTrue(upgrade_local_repo_mock.called)
self.assertTrue(move_user_custom_actions.called)
args = reset_mocks()
# Testing with defined custom user
read_ambari_user_mock.return_value = "ambari-custom-user"
upgrade(args)
self.assertTrue(adjust_directory_permissions_mock.called)
args = reset_mocks()
run_schema_upgrade_mock.return_value = 0
parse_properties_file_mock.called = False
move_user_custom_actions.called = False
retcode = upgrade(args)
self.assertTrue(get_ambari_properties_mock.called)
self.assertTrue(get_ambari_properties_2_mock.called)
self.assertNotEqual(-1, retcode)
self.assertTrue(parse_properties_file_mock.called)
self.assertTrue(run_schema_upgrade_mock.called)
self.assertTrue(move_user_custom_actions.called)
# Assert that move_user_custom_actions is called on upgrade to Ambari == 2.0.0
get_ambari_version_2_mock.return_value = get_ambari_version_mock.return_value = '2.0.0'
move_user_custom_actions.called = False
upgrade(args)
self.assertTrue(move_user_custom_actions.called)
# Assert that move_user_custom_actions is not called on upgrade to Ambari < 2.0.0
get_ambari_version_2_mock.return_value = get_ambari_version_mock.return_value = '1.6.0'
move_user_custom_actions.called = False
upgrade(args)
self.assertFalse(move_user_custom_actions.called)
get_ambari_version_2_mock.return_value = get_ambari_version_mock.return_value = CURR_AMBARI_VERSION
# test getAmbariProperties failed
args = reset_mocks()
get_ambari_properties_3_mock.return_value = get_ambari_properties_2_mock.return_value = \
get_ambari_properties_mock.return_value = -1
fail = False
try:
upgrade(args)
except FatalException as e:
self.assertEquals("Error getting ambari properties", e.reason)
fail = True
self.assertTrue(fail)
# test get resource dir param failed
args = reset_mocks()
p = MagicMock()
get_ambari_properties_mock.reset_mock()
get_ambari_properties_2_mock.reset_mock()
get_ambari_properties_3_mock.reset_mock()
get_ambari_properties_3_mock.return_value = get_ambari_properties_2_mock.return_value = \
get_ambari_properties_mock.return_value = p
p.__getitem__.side_effect = ["something", "something", "something", "something", KeyError("test exception")]
exists_mock.return_value = False
fail = False
try:
upgrade(args)
except FatalException as e:
fail = True
self.assertTrue(fail)
# test if some drivers are available in resources, and symlink available too
args = reset_mocks()
props = Properties()
props.process_pair(JDBC_DATABASE_NAME_PROPERTY, "something")
props.process_pair(RESOURCES_DIR_PROPERTY, "resources")
get_ambari_properties_3_mock.return_value = get_ambari_properties_2_mock.return_value = \
get_ambari_properties_mock.return_value = props
exists_mock.return_value = True
lexists_mock.return_value = True
isfile_mock.side_effect = [True, False, False]
upgrade(args)
self.assertTrue(os_remove_mock.called)
self.assertEquals(os_remove_mock.call_count, 1)
self.assertEquals(os_remove_mock.call_args[0][0], os.path.join("resources", "oracle-jdbc-driver.jar"))
self.assertEquals(os_symlink_mock.call_count, 1)
self.assertEquals(os_symlink_mock.call_args[0][0], os.path.join("resources", "ojdbc6.jar"))
self.assertEquals(os_symlink_mock.call_args[0][1], os.path.join("resources", "oracle-jdbc-driver.jar"))
pass
@only_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("__builtin__.open")
@patch("os.path.isfile")
@patch("os.path.exists")
@patch("os.path.lexists")
@patch("os.remove")
@patch("os.symlink")
@patch.object(Properties, "store")
@patch("ambari_server.serverConfiguration.find_properties_file")
@patch("ambari_server.serverUpgrade.adjust_directory_permissions")
@patch("ambari_server.serverUpgrade.print_warning_msg")
@patch("ambari_server.serverUpgrade.read_ambari_user")
@patch("ambari_server.serverUpgrade.run_schema_upgrade")
@patch("ambari_server.serverUpgrade.update_ambari_properties")
@patch("ambari_server.serverUpgrade.parse_properties_file")
@patch("ambari_server.serverUpgrade.get_ambari_version")
@patch("ambari_server.serverConfiguration.get_ambari_version")
@patch("ambari_server.serverUpgrade.is_root")
@patch("ambari_server.dbConfiguration.get_ambari_properties")
@patch("ambari_server.serverConfiguration.get_ambari_properties")
@patch("ambari_server.serverUpgrade.get_ambari_properties")
@patch("ambari_server.serverUpgrade.upgrade_local_repo")
@patch("ambari_server.serverUpgrade.move_user_custom_actions")
@patch("ambari_server.serverUpgrade.update_krb_jaas_login_properties")
def test_upgrade(self, update_krb_jaas_login_properties_mock, move_user_custom_actions, upgrade_local_repo_mock,
get_ambari_properties_mock, get_ambari_properties_2_mock, get_ambari_properties_3_mock,
is_root_mock, get_ambari_version_mock, get_ambari_version_2_mock,
parse_properties_file_mock,
update_ambari_properties_mock, run_schema_upgrade_mock,
read_ambari_user_mock, print_warning_msg_mock,
adjust_directory_permissions_mock,
find_properties_file_mock, properties_store_mock,
os_symlink_mock, os_remove_mock, lexists_mock, exists_mock, isfile_mock, open_mock):
def reset_mocks():
isfile_mock.reset_mock()
args = MagicMock()
del args.dbms
del args.database_index
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.default_database_host
del args.persistence_type
del args.init_db_script_file
del args.cleanup_db_script_file
del args.sid_or_sname
del args.jdbc_url
args.must_set_database_options = True
return args
args = reset_mocks()
properties = Properties()
get_ambari_properties_3_mock.return_value = get_ambari_properties_2_mock.return_value = \
get_ambari_properties_mock.return_value = properties
update_ambari_properties_mock.return_value = 0
run_schema_upgrade_mock.return_value = 0
isfile_mock.return_value = False
get_ambari_version_2_mock.return_value = get_ambari_version_mock.return_value = CURR_AMBARI_VERSION
move_user_custom_actions.return_value = None
update_krb_jaas_login_properties_mock.return_value = -2
# Testing call under non-root
is_root_mock.return_value = False
try:
upgrade(args)
self.fail("Should throw exception")
except FatalException as fe:
# Expected
self.assertTrue("administrator-level" in fe.reason)
pass
args = reset_mocks()
# Testing calls under root
is_root_mock.return_value = True
# Testing with undefined custom user
read_ambari_user_mock.return_value = None
run_schema_upgrade_mock.return_value = 0
exists_mock.return_value = True
upgrade(args)
self.assertTrue(print_warning_msg_mock.called)
warning_args = print_warning_msg_mock.call_args[0][0]
self.assertTrue("custom ambari user" in warning_args)
self.assertTrue(upgrade_local_repo_mock.called)
self.assertTrue(move_user_custom_actions.called)
args = reset_mocks()
# Testing with defined custom user
read_ambari_user_mock.return_value = "ambari-custom-user"
upgrade(args)
self.assertTrue(adjust_directory_permissions_mock.called)
args = reset_mocks()
run_schema_upgrade_mock.return_value = 0
parse_properties_file_mock.called = False
move_user_custom_actions.called = False
retcode = upgrade(args)
self.assertTrue(get_ambari_properties_mock.called)
self.assertTrue(get_ambari_properties_2_mock.called)
self.assertNotEqual(-1, retcode)
self.assertTrue(parse_properties_file_mock.called)
self.assertTrue(run_schema_upgrade_mock.called)
self.assertTrue(move_user_custom_actions.called)
# Assert that move_user_custom_actions is called on upgrade to Ambari == 2.0.0
get_ambari_version_2_mock.return_value = get_ambari_version_mock.return_value = '2.0.0'
move_user_custom_actions.called = False
upgrade(args)
self.assertTrue(move_user_custom_actions.called)
# Assert that move_user_custom_actions is not called on upgrade to Ambari < 2.0.0
get_ambari_version_2_mock.return_value = get_ambari_version_mock.return_value = '1.6.0'
move_user_custom_actions.called = False
upgrade(args)
self.assertFalse(move_user_custom_actions.called)
get_ambari_version_2_mock.return_value = get_ambari_version_mock.return_value = CURR_AMBARI_VERSION
# test getAmbariProperties failed
args = reset_mocks()
get_ambari_properties_3_mock.return_value = get_ambari_properties_2_mock.return_value = \
get_ambari_properties_mock.return_value = -1
fail = False
try:
upgrade(args)
except FatalException as e:
self.assertEquals("Error getting ambari properties", e.reason)
fail = True
self.assertTrue(fail)
# test get resource dir param failed
args = reset_mocks()
p = MagicMock()
get_ambari_properties_mock.reset_mock()
get_ambari_properties_2_mock.reset_mock()
get_ambari_properties_3_mock.reset_mock()
get_ambari_properties_3_mock.return_value = get_ambari_properties_2_mock.return_value = \
get_ambari_properties_mock.return_value = p
p.__getitem__.side_effect = ["something", "something", "something", "something", KeyError("test exception")]
exists_mock.return_value = False
fail = False
try:
upgrade(args)
except FatalException as e:
fail = True
self.assertTrue(fail)
# test if some drivers are available in resources, and symlink available too
args = reset_mocks()
props = Properties()
props.process_pair(JDBC_DATABASE_NAME_PROPERTY, "something")
props.process_pair(RESOURCES_DIR_PROPERTY, "resources")
get_ambari_properties_3_mock.return_value = get_ambari_properties_2_mock.return_value = \
get_ambari_properties_mock.return_value = props
exists_mock.return_value = True
lexists_mock.return_value = True
isfile_mock.side_effect = [True, False, False]
pass
def test_print_info_msg(self):
out = StringIO.StringIO()
sys.stdout = out
set_verbose(True)
print_info_msg("msg")
self.assertNotEqual("", out.getvalue())
sys.stdout = sys.__stdout__
pass
def test_print_error_msg(self):
out = StringIO.StringIO()
sys.stdout = out
set_verbose(True)
print_error_msg("msg")
self.assertNotEqual("", out.getvalue())
sys.stdout = sys.__stdout__
pass
def test_print_warning_msg(self):
out = StringIO.StringIO()
sys.stdout = out
set_verbose(True)
print_warning_msg("msg")
self.assertNotEqual("", out.getvalue())
sys.stdout = sys.__stdout__
pass
@patch("ambari_server.userInput.get_choice_string_input")
def test_get_YN_input(self, get_choice_string_input_mock):
get_YN_input("prompt", "default")
self.assertTrue(get_choice_string_input_mock.called)
self.assertEqual(5, len(get_choice_string_input_mock.call_args_list[0][0]))
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(_ambari_server_, "setup")
def test_main_db_options(self, setup_mock):
base_args = ["ambari-server.py", "setup"]
db_args = ["--database", "postgres", "--databasehost", "somehost.net", "--databaseport", "12345",
"--databasename", "ambari", "--databaseusername", "ambari", "--databasepassword", "bigdata"]
#test no args
failed = False
sys.argv = list(base_args)
try:
_ambari_server_.mainBody()
except SystemExit:
failed = True
pass
self.assertFalse(failed)
self.assertTrue(setup_mock.called)
self.assertTrue(setup_mock.call_args_list[0][0][0].must_set_database_options)
setup_mock.reset_mock()
# test embedded option
failed = False
sys.argv = list(base_args)
sys.argv.extend(db_args[-10:])
sys.argv.extend(["--database", "embedded"])
try:
_ambari_server_.mainBody()
except SystemExit:
failed = True
pass
self.assertFalse(failed)
self.assertTrue(setup_mock.called)
setup_mock.reset_mock()
#test full args
sys.argv = list(base_args)
sys.argv.extend(db_args)
try:
_ambari_server_.mainBody()
except SystemExit:
failed = True
pass
self.assertFalse(failed)
self.assertTrue(setup_mock.called)
self.assertFalse(setup_mock.call_args_list[0][0][0].must_set_database_options)
setup_mock.reset_mock()
#test not full args
sys.argv = list(base_args)
sys.argv.extend(["--database", "postgres"])
try:
_ambari_server_.mainBody()
except SystemExit:
failed = True
pass
self.assertFalse(setup_mock.called)
self.assertTrue(failed)
setup_mock.reset_mock()
#test wrong database
failed = False
sys.argv = list(base_args)
sys.argv.extend(["--database", "unknown"])
sys.argv.extend(db_args[2:])
try:
_ambari_server_.mainBody()
except SystemExit:
failed = True
pass
self.assertTrue(failed)
self.assertFalse(setup_mock.called)
setup_mock.reset_mock()
#test wrong port check
failed = False
sys.argv = list(base_args)
sys.argv.extend(["--databaseport", "unknown"])
sys.argv.extend(db_args[:4])
sys.argv.extend(db_args[6:])
try:
_ambari_server_.mainBody()
except SystemExit:
failed = True
pass
self.assertTrue(failed)
self.assertFalse(setup_mock.called)
pass
@only_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(_ambari_server_, "setup")
def test_main_db_options(self, setup_mock):
base_args = ["ambari-server.py", "setup"]
db_args = ["--databasehost", "somehost.net", "--databaseport", "12345",
"--databasename", "ambari", "--databaseusername", "ambari", "--databasepassword", "bigdata"]
#test no args
failed = False
sys.argv = list(base_args)
try:
_ambari_server_.mainBody()
except SystemExit:
failed = True
pass
self.assertFalse(failed)
self.assertTrue(setup_mock.called)
self.assertTrue(setup_mock.call_args_list[0][0][0].must_set_database_options)
setup_mock.reset_mock()
#test full args
sys.argv = list(base_args)
sys.argv.extend(db_args)
try:
_ambari_server_.mainBody()
except SystemExit:
failed = True
pass
self.assertFalse(failed)
self.assertTrue(setup_mock.called)
self.assertFalse(setup_mock.call_args_list[0][0][0].must_set_database_options)
setup_mock.reset_mock()
#test not full args
sys.argv = list(base_args)
sys.argv.extend(["--databasehost", "somehost.net"])
try:
_ambari_server_.mainBody()
except SystemExit:
failed = True
pass
self.assertFalse(setup_mock.called)
self.assertTrue(failed)
setup_mock.reset_mock()
#test wrong port check
failed = False
sys.argv = list(base_args)
sys.argv.extend(["--databaseport", "unknown"])
sys.argv.extend(db_args[:2])
sys.argv.extend(db_args[6:])
try:
_ambari_server_.mainBody()
except SystemExit:
failed = True
pass
self.assertTrue(failed)
self.assertFalse(setup_mock.called)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("ambari_server.serverSetup.get_YN_input")
@patch("ambari_server.dbConfiguration.get_validated_string_input")
@patch("ambari_server.dbConfiguration_linux.print_info_msg")
@patch("ambari_server.dbConfiguration.get_ambari_properties")
def test_prompt_db_properties(self, get_ambari_properties_mock, print_info_msg_mock,
get_validated_string_input_mock, get_YN_input_mock):
def reset_mocks():
get_validated_string_input_mock.reset_mock()
get_YN_input_mock.reset_mock()
args = MagicMock()
del args.database_index
del args.dbms
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.persistence_type
return args
args = reset_mocks()
get_ambari_properties_mock.return_value = Properties()
set_silent(False)
#test not prompt
args.must_set_database_options = False
prompt_db_properties(args)
self.assertFalse(get_validated_string_input_mock.called)
self.assertFalse(get_YN_input_mock.called)
args = reset_mocks()
#test prompt
args.must_set_database_options = True
get_YN_input_mock.return_value = False
prompt_db_properties(args)
self.assertTrue(get_YN_input_mock.called)
self.assertFalse(get_validated_string_input_mock.called)
args = reset_mocks()
#test prompt advanced
args.must_set_database_options = True
get_YN_input_mock.return_value = True
get_validated_string_input_mock.return_value = "4"
prompt_db_properties(args)
self.assertTrue(get_YN_input_mock.called)
self.assertTrue(get_validated_string_input_mock.called)
self.assertEquals(args.database_index, 3)
pass
@patch("ambari_server.serverConfiguration.get_conf_dir")
def _test_update_ambari_properties(self, get_conf_dir_mock):
from ambari_server import serverConfiguration # need to modify constants inside the module
properties = ["server.jdbc.user.name=ambari-server\n",
"server.jdbc.user.passwd=/etc/ambari-server/conf/password.dat\n",
"java.home=/usr/jdk64/jdk1.6.0_31\n",
"server.jdbc.database_name=ambari\n",
"ambari-server.user=ambari\n",
"agent.fqdn.service.url=URL\n",
"java.releases=jdk1.7,jdk1.6\n"]
NEW_PROPERTY = 'some_new_property=some_value\n'
JAVA_RELEASES_NEW_PROPERTY = 'java.releases=jdk1.8,jdk1.7\n'
CHANGED_VALUE_PROPERTY = 'server.jdbc.database_name=should_not_overwrite_value\n'
get_conf_dir_mock.return_value = '/etc/ambari-server/conf'
(tf1, fn1) = tempfile.mkstemp()
(tf2, fn2) = tempfile.mkstemp()
configDefaults.AMBARI_PROPERTIES_BACKUP_FILE = fn1
os.close(tf1)
serverConfiguration.AMBARI_PROPERTIES_FILE = fn2
os.close(tf2)
with open(serverConfiguration.AMBARI_PROPERTIES_FILE, "w") as f:
f.write(NEW_PROPERTY)
f.write(CHANGED_VALUE_PROPERTY)
f.write(JAVA_RELEASES_NEW_PROPERTY)
f.close()
with open(configDefaults.AMBARI_PROPERTIES_BACKUP_FILE, 'w') as f:
for line in properties:
f.write(line)
f.close()
#Call tested method
update_ambari_properties()
timestamp = datetime.datetime.now()
#RPMSAVE_FILE wasn't found
self.assertFalse(os.path.exists(configDefaults.AMBARI_PROPERTIES_BACKUP_FILE))
#Renamed RPMSAVE_FILE exists
self.assertTrue(os.path.exists(configDefaults.AMBARI_PROPERTIES_BACKUP_FILE
+ '.' + timestamp.strftime('%Y%m%d%H%M%S')))
with open(serverConfiguration.AMBARI_PROPERTIES_FILE, 'r') as f:
ambari_properties_content = f.readlines()
for line in properties:
if (line == "agent.fqdn.service.url=URL\n"):
if (not GET_FQDN_SERVICE_URL + "=URL\n" in ambari_properties_content) and (
line in ambari_properties_content):
self.fail()
elif line == "java.releases=jdk1.7,jdk1.6\n":
if not "java.releases=jdk1.8,jdk1.7\n" in ambari_properties_content:
self.fail()
else:
if not line in ambari_properties_content:
self.fail()
if not NEW_PROPERTY in ambari_properties_content:
self.fail()
if CHANGED_VALUE_PROPERTY in ambari_properties_content:
self.fail()
# Command should not fail if *.rpmsave file is missing
result = update_ambari_properties()
self.assertEquals(result, 0)
os.unlink(fn2)
#if ambari.properties file is absent then "ambari-server upgrade" should
# fail
(tf, fn) = tempfile.mkstemp()
configDefaults.AMBARI_PROPERTIES_BACKUP_FILE = fn
result = update_ambari_properties()
self.assertNotEquals(result, 0)
pass
@patch("ambari_server.properties.Properties.__init__")
@patch("ambari_server.serverConfiguration.search_file")
def test_update_ambari_properties_negative_case(self, search_file_mock, properties_mock):
search_file_mock.return_value = None
#Call tested method
self.assertEquals(0, update_ambari_properties())
self.assertFalse(properties_mock.called)
search_file_mock.return_value = False
#Call tested method
self.assertEquals(0, update_ambari_properties())
self.assertFalse(properties_mock.called)
search_file_mock.return_value = ''
#Call tested method
self.assertEquals(0, update_ambari_properties())
self.assertFalse(properties_mock.called)
pass
@patch("ambari_server.serverConfiguration.get_conf_dir")
def _test_update_ambari_properties_without_some_properties(self, get_conf_dir_mock):
'''
Checks: update_ambari_properties call should add ambari-server.user property if
it's absent
'''
from ambari_server import serverConfiguration # need to modify constants inside the module
properties = ["server.jdbc.user.name=ambari-server\n",
"server.jdbc.user.passwd=/etc/ambari-server/conf/password.dat\n",
"java.home=/usr/jdk64/jdk1.6.0_31\n",
"server.os_type=redhat6\n"]
get_conf_dir_mock.return_value = '/etc/ambari-server/conf'
(tf1, fn1) = tempfile.mkstemp()
os.close(tf1)
(tf2, fn2) = tempfile.mkstemp()
os.close(tf2)
serverConfiguration.AMBARI_PROPERTIES_RPMSAVE_FILE = fn1
serverConfiguration.AMBARI_PROPERTIES_FILE = fn2
with open(serverConfiguration.AMBARI_PROPERTIES_RPMSAVE_FILE, 'w') as f:
for line in properties:
f.write(line)
#Call tested method
update_ambari_properties()
ambari_properties = Properties()
ambari_properties.load(open(fn2))
self.assertTrue(NR_USER_PROPERTY in ambari_properties.keys())
value = ambari_properties[NR_USER_PROPERTY]
self.assertEqual(value, "root")
self.assertTrue(OS_FAMILY_PROPERTY in ambari_properties.keys())
os.unlink(fn2)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("resource_management.core.shell.call")
@patch("ambari_server.serverSetup.verify_setup_allowed")
@patch("sys.exit")
@patch("ambari_server.serverSetup.get_YN_input")
@patch("ambari_server.dbConfiguration.get_validated_string_input")
@patch("ambari_server.dbConfiguration_linux.get_YN_input")
@patch("ambari_server.dbConfiguration_linux.get_validated_string_input")
@patch("ambari_server.dbConfiguration_linux.PGConfig._store_remote_properties")
@patch("ambari_server.dbConfiguration_linux.LinuxDBMSConfig.ensure_jdbc_driver_installed")
@patch("ambari_server.dbConfiguration_linux.read_password")
@patch("ambari_server.serverSetup.check_jdbc_drivers")
@patch("ambari_server.serverSetup.is_root")
@patch("ambari_server.serverSetup.check_ambari_user")
@patch("ambari_server.serverSetup.download_and_install_jdk")
@patch("ambari_server.serverSetup.configure_os_settings")
@patch('__builtin__.raw_input')
@patch("ambari_server.serverSetup.disable_security_enhancements")
@patch("ambari_server.serverSetup.expand_jce_zip_file")
@patch("ambari_server.serverSetup.logger")
def test_setup_remote_db_wo_client(self, logger_mock, expand_jce_zip_file_mock, check_selinux_mock, raw_input, configure_os_settings_mock,
download_jdk_mock, check_ambari_user_mock, is_root_mock, check_jdbc_drivers_mock,
read_password_mock, ensure_jdbc_driver_installed_mock, store_remote_properties_mock,
get_validated_string_input_0_mock, get_YN_input_0_mock,
get_validated_string_input_mock, get_YN_input,
exit_mock, verify_setup_allowed_method,
run_os_command_mock):
args = MagicMock()
args.jdbc_driver = None
args.jdbc_db = None
args.silent = False
del args.dbms
del args.database_index
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.persistence_type
raw_input.return_value = ""
is_root_mock.return_value = True
check_selinux_mock.return_value = (0, "")
run_os_command_mock.return_value = 3,"",""
store_remote_properties_mock.return_value = 0
get_YN_input.return_value = True
get_validated_string_input_mock.side_effect = ["4"]
get_validated_string_input_0_mock.side_effect = ["localhost", "5432", "ambari", "ambari", "admin"]
get_YN_input_0_mock.return_value = False
read_password_mock.return_value = "encrypted_bigdata"
ensure_jdbc_driver_installed_mock.return_value = True
check_jdbc_drivers_mock.return_value = 0
check_ambari_user_mock.return_value = (0, False, 'user', None)
download_jdk_mock.return_value = 0
configure_os_settings_mock.return_value = 0
verify_setup_allowed_method.return_value = 0
expand_jce_zip_file_mock.return_value = 0
try:
setup(args)
self.fail("Should throw exception")
except NonFatalException as fe:
# Expected
self.assertTrue("Remote database setup aborted." in fe.reason)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("resource_management.core.shell.call")
@patch("sys.exit")
@patch("ambari_server.userInput.get_YN_input")
@patch("ambari_commons.os_utils.is_root")
@patch("ambari_server.dbConfiguration_linux.store_password_file")
@patch("__builtin__.raw_input")
def test_store_remote_properties(self, raw_input_mock, store_password_file_mock,
is_root_mock, get_YN_input, exit_mock,
run_os_command_mock
):
raw_input_mock.return_value = ""
is_root_mock.return_value = True
get_YN_input.return_value = False
run_os_command_mock.return_value = 3,"",""
store_password_file_mock.return_value = "encrypted_bigdata"
import optparse
args = optparse.Values()
args.dbms = "oracle"
args.database_host = "localhost"
args.database_port = "1234"
args.database_name = "ambari"
args.postgres_schema = "ambari"
args.sid_or_sname = "foo"
args.database_username = "foo"
args.database_password = "foo"
properties0 = Properties()
properties = Properties()
factory = DBMSConfigFactory()
dbConfig = factory.create(args, properties0)
dbConfig._store_remote_properties(properties, None)
found = False
for n in properties.propertyNames():
if not found and n.startswith("server.jdbc.properties"):
found = True
self.assertTrue(found)
# verify that some properties exist
self.assertEquals("internal", properties.get_property(JDBC_CONNECTION_POOL_TYPE))
# now try with MySQL instead of Oracle to verify that the properties are different
args.dbms = "mysql"
args.database_index = 2
properties0 = Properties()
properties = Properties()
factory = DBMSConfigFactory()
dbConfig = factory.create(args, properties0)
dbConfig._store_remote_properties(properties, args)
# verify MySQL properties
self.assertEquals("c3p0", properties.get_property(JDBC_CONNECTION_POOL_TYPE))
@not_for_platform(PLATFORM_WINDOWS)
@patch("ambari_server.serverConfiguration.find_properties_file")
def test_get_ambari_properties(self, find_properties_file_mock):
find_properties_file_mock.return_value = None
rcode = get_ambari_properties()
self.assertEqual(rcode, -1)
tf1 = tempfile.NamedTemporaryFile()
find_properties_file_mock.return_value = tf1.name
prop_name = 'name'
prop_value = 'val'
with open(tf1.name, 'w') as fout:
fout.write(prop_name + '=' + prop_value)
fout.close()
properties = get_ambari_properties()
self.assertEqual(properties[prop_name], prop_value)
pass
@only_for_platform(PLATFORM_WINDOWS)
@patch("ambari_server.serverConfiguration.find_properties_file")
def test_get_ambari_properties(self, find_properties_file):
find_properties_file.return_value = None
rcode = get_ambari_properties()
self.assertEqual(rcode, -1)
tf1 = tempfile.NamedTemporaryFile(delete=False)
find_properties_file.return_value = tf1.name
tf1.close()
prop_name = 'name'
prop_value = 'val'
with open(tf1.name, 'w') as fout:
fout.write(prop_name + '=' + prop_value)
properties = get_ambari_properties()
self.assertEqual(properties[prop_name], prop_value)
self.assertEqual(properties.fileName, os.path.abspath(tf1.name))
sys.stdout = sys.__stdout__
pass
@patch("os.path.exists")
@patch("os.remove")
@patch("ambari_commons.os_utils.print_warning_msg")
def test_remove_file(self, printWarningMsgMock, removeMock, pathExistsMock):
def side_effect():
raise Exception(-1, "Failed to delete!")
removeMock.side_effect = side_effect
pathExistsMock.return_value = 1
res = remove_file("/someNonExsistantDir/filename")
self.assertEquals(res, 1)
removeMock.side_effect = None
res = remove_file("/someExsistantDir/filename")
self.assertEquals(res, 0)
@patch("shutil.copyfile")
def test_copy_file(self, shutilCopyfileMock):
def side_effect():
raise Exception(-1, "Failed to copy!")
shutilCopyfileMock.side_effect = side_effect
try:
copy_file("/tmp/psswd", "/someNonExsistantDir/filename")
self.fail("Exception on file not copied has not been thrown!")
except FatalException:
# Expected
pass
self.assertTrue(shutilCopyfileMock.called)
shutilCopyfileMock.side_effect = None
try:
copy_file("/tmp/psswd", "/root/psswd")
except FatalException:
self.fail("Exception on file copied should not be thrown!")
self.assertTrue(shutilCopyfileMock.called)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("ambari_server.dbConfiguration.get_ambari_properties")
@patch("ambari_server.dbConfiguration_linux.get_ambari_properties")
@patch("ambari_server.dbConfiguration_linux.print_error_msg")
@patch("ambari_server.dbConfiguration.print_error_msg")
@patch("ambari_server.dbConfiguration_linux.print_warning_msg")
@patch("__builtin__.raw_input")
@patch("glob.glob")
@patch("os.path.isdir")
@patch("os.path.lexists")
@patch("os.remove")
def test_ensure_jdbc_drivers_installed(self, os_remove_mock, lexists_mock, isdir_mock, glob_mock,
raw_input_mock, print_warning_msg, print_error_msg_mock, print_error_msg_2_mock,
get_ambari_properties_mock, get_ambari_properties_2_mock):
out = StringIO.StringIO()
sys.stdout = out
def reset_mocks():
get_ambari_properties_mock.reset_mock()
get_ambari_properties_2_mock.reset_mock()
print_error_msg_mock.reset_mock()
print_warning_msg.reset_mock()
raw_input_mock.reset_mock()
args = MagicMock()
del args.database_index
del args.persistence_type
del args.silent
del args.sid_or_sname
del args.jdbc_url
args.dbms = "oracle"
return args
# Check positive scenario
drivers_list = [os.path.join(os.sep,'usr','share','java','ojdbc6.jar')]
resources_dir = os.sep + 'tmp'
props = Properties()
props.process_pair(RESOURCES_DIR_PROPERTY, resources_dir)
get_ambari_properties_2_mock.return_value = get_ambari_properties_mock.return_value = props
factory = DBMSConfigFactory()
args = reset_mocks()
glob_mock.return_value = drivers_list
isdir_mock.return_value = True
lexists_mock.return_value = True
dbms = factory.create(args, props)
rcode = dbms.ensure_jdbc_driver_installed(props)
self.assertTrue(rcode)
# Check negative scenarios
# Silent option, no drivers
set_silent(True)
args = reset_mocks()
glob_mock.return_value = []
failed = False
try:
dbms = factory.create(args, props)
rcode = dbms.ensure_jdbc_driver_installed(props)
except FatalException:
failed = True
self.assertTrue(print_error_msg_mock.called)
self.assertTrue(failed)
# Non-Silent option, no drivers
set_silent(False)
args = reset_mocks()
glob_mock.return_value = []
failed = False
try:
dbms = factory.create(args, props)
rcode = dbms.ensure_jdbc_driver_installed(props)
except FatalException:
failed = True
self.assertTrue(failed)
self.assertTrue(print_error_msg_mock.called)
# Non-Silent option, no drivers at first ask, present drivers after that
args = reset_mocks()
glob_mock.side_effect = [[], drivers_list, drivers_list]
dbms = factory.create(args, props)
rcode = dbms.ensure_jdbc_driver_installed(props)
self.assertTrue(rcode)
# Non-Silent option, no drivers at first ask, no drivers after that
args = reset_mocks()
glob_mock.side_effect = [[], []]
failed = False
try:
dbms = factory.create(args, props)
rcode = dbms.ensure_jdbc_driver_installed(props)
except FatalException:
failed = True
self.assertTrue(failed)
self.assertTrue(print_error_msg_mock.called)
# Failed to copy_files
args = reset_mocks()
glob_mock.side_effect = [[], drivers_list, drivers_list]
try:
dbms = factory.create(args, props)
rcode = dbms.ensure_jdbc_driver_installed(props)
except FatalException:
failed = True
self.assertTrue(failed)
sys.stdout = sys.__stdout__
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("ambari_server.dbConfiguration.get_ambari_properties")
@patch("os.path.isdir")
@patch("os.path.isfile")
@patch("os.path.lexists")
@patch("os.remove")
@patch("os.symlink")
def test_check_jdbc_drivers(self, os_symlink_mock, os_remove_mock, lexists_mock, isfile_mock, isdir_mock,
get_ambari_properties_mock):
args = MagicMock()
# Check positive scenario
drivers_list = [os.path.join(os.sep,'usr','share','java','ojdbc6.jar')]
resources_dir = os.sep + 'tmp'
props = Properties()
props.process_pair(RESOURCES_DIR_PROPERTY, resources_dir)
get_ambari_properties_mock.return_value = props
isdir_mock.return_value = True
isfile_mock.side_effect = [True, False, False, False, False]
del args.database_index
del args.persistence_type
del args.silent
del args.sid_or_sname
del args.jdbc_url
lexists_mock.return_value = True
check_jdbc_drivers(args)
self.assertEquals(os_symlink_mock.call_count, 1)
self.assertEquals(os_symlink_mock.call_args_list[0][0][0], os.path.join(os.sep,'tmp','ojdbc6.jar'))
self.assertEquals(os_symlink_mock.call_args_list[0][0][1], os.path.join(os.sep,'tmp','oracle-jdbc-driver.jar'))
# Check negative scenarios
# No drivers deployed
get_ambari_properties_mock.reset_mock()
os_symlink_mock.reset_mock()
isfile_mock.side_effect = [False, False, False, False, False]
check_jdbc_drivers(args)
self.assertFalse(os_symlink_mock.called)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch("ambari_server.serverConfiguration.find_properties_file")
def test_get_ambari_properties(self, find_properties_file_mock):
find_properties_file_mock.return_value = None
rcode = get_ambari_properties()
self.assertEqual(rcode, -1)
tf1 = tempfile.NamedTemporaryFile()
find_properties_file_mock.return_value = tf1.name
prop_name = 'name'
prop_value = 'val'
with open(tf1.name, 'w') as fout:
fout.write(prop_name + '=' + prop_value)
fout.close()
properties = get_ambari_properties()
self.assertEqual(properties[prop_name], prop_value)
self.assertEqual(properties.fileName, os.path.abspath(tf1.name))
sys.stdout = sys.__stdout__
pass
@only_for_platform(PLATFORM_WINDOWS)
@patch("ambari_server.serverConfiguration.find_properties_file")
def test_get_ambari_properties(self, find_properties_file_mock):
find_properties_file_mock.return_value = None
rcode = get_ambari_properties()
self.assertEqual(rcode, -1)
tf1 = tempfile.NamedTemporaryFile(delete=False)
find_properties_file_mock.return_value = tf1.name
prop_name = 'name'
prop_value = 'val'
tf1.close()
with open(tf1.name, 'w') as fout:
fout.write(prop_name + '=' + prop_value)
fout.close()
properties = get_ambari_properties()
self.assertEqual(properties[prop_name], prop_value)
self.assertEqual(properties.fileName, os.path.abspath(tf1.name))
sys.stdout = sys.__stdout__
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch("ambari_server.serverConfiguration.check_database_name_property")
@patch("ambari_server.serverConfiguration.find_properties_file")
def test_parse_properties_file(self, find_properties_file_mock, check_database_name_property_mock):
check_database_name_property_mock.return_value = 1
tf1 = tempfile.NamedTemporaryFile(mode='r')
find_properties_file_mock.return_value = tf1.name
args = MagicMock()
parse_properties_file(args)
self.assertEquals(args.persistence_type, "local")
with open(tf1.name, 'w') as fout:
fout.write("\n")
fout.write(PERSISTENCE_TYPE_PROPERTY + "=remote")
args = MagicMock()
parse_properties_file(args)
self.assertEquals(args.persistence_type, "remote")
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch("os.path.isabs")
@patch("ambari_server.dbConfiguration.decrypt_password_for_alias")
@patch("ambari_server.dbConfiguration_linux.get_ambari_properties")
def test_configure_database_username_password_masterkey_persisted(self,
get_ambari_properties_method,
decrypt_password_for_alias_method,
path_isabs_method):
out = StringIO.StringIO()
sys.stdout = out
properties = Properties()
properties.process_pair(JDBC_USER_NAME_PROPERTY, "fakeuser")
properties.process_pair(JDBC_PASSWORD_PROPERTY, "${alias=somealias}")
properties.process_pair(JDBC_DATABASE_NAME_PROPERTY, "fakedbname")
properties.process_pair(SECURITY_KEY_IS_PERSISTED, "True")
get_ambari_properties_method.return_value = properties
decrypt_password_for_alias_method.return_value = "falepasswd"
args = MagicMock()
args.master_key = None
del args.database_index
del args.dbms
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.sid_or_sname
del args.jdbc_url
dbms = OracleConfig(args, properties, "local")
self.assertTrue(decrypt_password_for_alias_method.called)
self.assertEquals("fakeuser", dbms.database_username)
self.assertEquals("falepasswd", dbms.database_password)
sys.stdout = sys.__stdout__
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch("ambari_server.dbConfiguration_linux.read_password")
def test_configure_database_password(self, read_password_method):
out = StringIO.StringIO()
sys.stdout = out
read_password_method.return_value = "fakepasswd"
result = LinuxDBMSConfig._configure_database_password(True)
self.assertTrue(read_password_method.called)
self.assertEquals("fakepasswd", result)
result = LinuxDBMSConfig._configure_database_password(True)
self.assertEquals("fakepasswd", result)
result = LinuxDBMSConfig._configure_database_password(True)
self.assertEquals("fakepasswd", result)
sys.stdout = sys.__stdout__
pass
@not_for_platform(PLATFORM_WINDOWS)
def test_configure_database_password_silent(self):
out = StringIO.StringIO()
sys.stdout = out
set_silent(True)
result = LinuxDBMSConfig._configure_database_password(True, "CustomDefaultPasswd")
self.assertEquals("CustomDefaultPasswd", result)
sys.stdout = sys.__stdout__
pass
@patch("os.path.exists")
@patch("ambari_server.setupSecurity.get_is_secure")
@patch("ambari_server.setupSecurity.get_is_persisted")
@patch("ambari_server.setupSecurity.remove_password_file")
@patch("ambari_server.setupSecurity.save_passwd_for_alias")
@patch("ambari_server.setupSecurity.read_master_key")
@patch("ambari_server.setupSecurity.read_ambari_user")
@patch("ambari_server.setupSecurity.get_master_key_location")
@patch("ambari_server.setupSecurity.update_properties_2")
@patch("ambari_server.setupSecurity.save_master_key")
@patch("ambari_server.setupSecurity.get_YN_input")
@patch("ambari_server.setupSecurity.search_file")
@patch("ambari_server.setupSecurity.get_ambari_properties")
@patch("ambari_server.setupSecurity.is_root")
def test_setup_master_key_not_persist(self, is_root_method,
get_ambari_properties_method, search_file_message,
get_YN_input_method, save_master_key_method,
update_properties_method, get_master_key_location_method,
read_ambari_user_method, read_master_key_method,
save_passwd_for_alias_method, remove_password_file_method,
get_is_persisted_method, get_is_secure_method, exists_mock):
is_root_method.return_value = True
p = Properties()
FAKE_PWD_STRING = "fakepasswd"
p.process_pair(JDBC_PASSWORD_PROPERTY, FAKE_PWD_STRING)
p.process_pair(LDAP_MGR_PASSWORD_PROPERTY, FAKE_PWD_STRING)
p.process_pair(SSL_TRUSTSTORE_PASSWORD_PROPERTY, FAKE_PWD_STRING)
p.process_pair(JDBC_RCA_PASSWORD_FILE_PROPERTY, FAKE_PWD_STRING)
get_ambari_properties_method.return_value = p
read_master_key_method.return_value = "aaa"
get_YN_input_method.return_value = False
read_ambari_user_method.return_value = None
save_passwd_for_alias_method.return_value = 0
get_is_persisted_method.return_value = (True, "filepath")
get_is_secure_method.return_value = False
exists_mock.return_value = False
options = self._create_empty_options_mock()
setup_master_key(options)
self.assertTrue(get_YN_input_method.called)
self.assertTrue(read_master_key_method.called)
self.assertTrue(read_ambari_user_method.called)
self.assertTrue(update_properties_method.called)
self.assertFalse(save_master_key_method.called)
self.assertTrue(save_passwd_for_alias_method.called)
self.assertEquals(3, save_passwd_for_alias_method.call_count)
self.assertTrue(remove_password_file_method.called)
result_expected = {JDBC_PASSWORD_PROPERTY:
get_alias_string(JDBC_RCA_PASSWORD_ALIAS),
JDBC_RCA_PASSWORD_FILE_PROPERTY:
get_alias_string(JDBC_RCA_PASSWORD_ALIAS),
LDAP_MGR_PASSWORD_PROPERTY:
get_alias_string(LDAP_MGR_PASSWORD_ALIAS),
SSL_TRUSTSTORE_PASSWORD_PROPERTY:
get_alias_string(SSL_TRUSTSTORE_PASSWORD_ALIAS),
SECURITY_IS_ENCRYPTION_ENABLED: 'true'}
sorted_x = sorted(result_expected.iteritems(), key=operator.itemgetter(0))
sorted_y = sorted(update_properties_method.call_args[0][1].iteritems(),
key=operator.itemgetter(0))
self.assertEquals(sorted_x, sorted_y)
pass
@patch("ambari_server.setupSecurity.save_passwd_for_alias")
@patch("os.path.exists")
@patch("ambari_server.setupSecurity.get_is_secure")
@patch("ambari_server.setupSecurity.get_is_persisted")
@patch("ambari_server.setupSecurity.read_master_key")
@patch("ambari_server.setupSecurity.read_ambari_user")
@patch("ambari_server.setupSecurity.get_master_key_location")
@patch("ambari_server.setupSecurity.update_properties_2")
@patch("ambari_server.setupSecurity.save_master_key")
@patch("ambari_server.setupSecurity.get_YN_input")
@patch("ambari_server.serverConfiguration.search_file")
@patch("ambari_server.setupSecurity.get_ambari_properties")
@patch("ambari_server.setupSecurity.is_root")
def test_setup_master_key_persist(self, is_root_method,
get_ambari_properties_method, search_file_message,
get_YN_input_method, save_master_key_method,
update_properties_method, get_master_key_location_method,
read_ambari_user_method, read_master_key_method,
get_is_persisted_method, get_is_secure_method, exists_mock,
save_passwd_for_alias_method):
is_root_method.return_value = True
p = Properties()
FAKE_PWD_STRING = "fakepasswd"
p.process_pair(JDBC_PASSWORD_PROPERTY, FAKE_PWD_STRING)
get_ambari_properties_method.return_value = p
search_file_message.return_value = "propertiesfile"
read_master_key_method.return_value = "aaa"
get_YN_input_method.side_effect = [True, False]
read_ambari_user_method.return_value = None
get_is_persisted_method.return_value = (True, "filepath")
get_is_secure_method.return_value = False
exists_mock.return_value = False
save_passwd_for_alias_method.return_value = 0
options = self._create_empty_options_mock()
setup_master_key(options)
self.assertTrue(get_YN_input_method.called)
self.assertTrue(read_master_key_method.called)
self.assertTrue(read_ambari_user_method.called)
self.assertTrue(update_properties_method.called)
self.assertTrue(save_master_key_method.called)
result_expected = {JDBC_PASSWORD_PROPERTY:
get_alias_string(JDBC_RCA_PASSWORD_ALIAS),
SECURITY_IS_ENCRYPTION_ENABLED: 'true'}
sorted_x = sorted(result_expected.iteritems(), key=operator.itemgetter(0))
sorted_y = sorted(update_properties_method.call_args[0][1].iteritems(),
key=operator.itemgetter(0))
self.assertEquals(sorted_x, sorted_y)
pass
@patch("ambari_server.setupSecurity.read_master_key")
@patch("ambari_server.setupSecurity.remove_password_file")
@patch("os.path.exists")
@patch("ambari_server.setupSecurity.read_ambari_user")
@patch("ambari_server.setupSecurity.get_master_key_location")
@patch("ambari_server.setupSecurity.save_passwd_for_alias")
@patch("ambari_server.setupSecurity.read_passwd_for_alias")
@patch("ambari_server.setupSecurity.update_properties_2")
@patch("ambari_server.setupSecurity.save_master_key")
@patch("ambari_server.setupSecurity.get_validated_string_input")
@patch("ambari_server.setupSecurity.get_YN_input")
@patch("ambari_server.setupSecurity.search_file")
@patch("ambari_server.setupSecurity.get_ambari_properties")
@patch("ambari_server.setupSecurity.is_root")
def test_reset_master_key_persisted(self, is_root_method,
get_ambari_properties_method, search_file_message,
get_YN_input_method, get_validated_string_input_method,
save_master_key_method, update_properties_method,
read_passwd_for_alias_method, save_passwd_for_alias_method,
get_master_key_location_method,
read_ambari_user_method, exists_mock,
remove_password_file_method, read_master_key_method):
# Testing call under root
is_root_method.return_value = True
search_file_message.return_value = "filepath"
read_ambari_user_method.return_value = None
p = Properties()
FAKE_PWD_STRING = '${alias=fakealias}'
p.process_pair(JDBC_PASSWORD_PROPERTY, FAKE_PWD_STRING)
p.process_pair(LDAP_MGR_PASSWORD_PROPERTY, FAKE_PWD_STRING)
p.process_pair(SSL_TRUSTSTORE_PASSWORD_PROPERTY, FAKE_PWD_STRING)
p.process_pair(JDBC_RCA_PASSWORD_FILE_PROPERTY, FAKE_PWD_STRING)
get_ambari_properties_method.return_value = p
get_YN_input_method.side_effect = [True, True]
read_master_key_method.return_value = "aaa"
read_passwd_for_alias_method.return_value = "fakepassword"
save_passwd_for_alias_method.return_value = 0
exists_mock.return_value = False
options = self._create_empty_options_mock()
setup_master_key(options)
self.assertTrue(save_master_key_method.called)
self.assertTrue(get_YN_input_method.called)
self.assertTrue(read_master_key_method.called)
self.assertTrue(update_properties_method.called)
self.assertTrue(read_passwd_for_alias_method.called)
self.assertTrue(3, read_passwd_for_alias_method.call_count)
self.assertTrue(3, save_passwd_for_alias_method.call_count)
result_expected = {JDBC_PASSWORD_PROPERTY:
get_alias_string(JDBC_RCA_PASSWORD_ALIAS),
JDBC_RCA_PASSWORD_FILE_PROPERTY:
get_alias_string(JDBC_RCA_PASSWORD_ALIAS),
LDAP_MGR_PASSWORD_PROPERTY:
get_alias_string(LDAP_MGR_PASSWORD_ALIAS),
SSL_TRUSTSTORE_PASSWORD_PROPERTY:
get_alias_string(SSL_TRUSTSTORE_PASSWORD_ALIAS),
SECURITY_IS_ENCRYPTION_ENABLED: 'true'}
sorted_x = sorted(result_expected.iteritems(), key=operator.itemgetter(0))
sorted_y = sorted(update_properties_method.call_args[0][1].iteritems(),
key=operator.itemgetter(0))
self.assertEquals(sorted_x, sorted_y)
pass
@patch("os.path.isdir", new = MagicMock(return_value=True))
@patch("os.access", new = MagicMock(return_value=True))
@patch.object(ServerClassPath, "get_full_ambari_classpath_escaped_for_shell", new = MagicMock(return_value = 'test' + os.pathsep + 'path12'))
@patch("ambari_server.serverUtils.is_server_runing")
@patch("ambari_commons.os_utils.run_os_command")
@patch("ambari_server.setupSecurity.generate_env")
@patch("ambari_server.setupSecurity.ensure_can_start_under_current_user")
@patch("ambari_server.serverConfiguration.read_ambari_user")
@patch("ambari_server.dbConfiguration.ensure_jdbc_driver_is_installed")
@patch("ambari_server.serverConfiguration.parse_properties_file")
@patch("ambari_server.serverConfiguration.get_ambari_properties")
@patch("ambari_server.serverConfiguration.get_java_exe_path")
@patch("sys.exit")
def test_check_database(self, exitMock, getJavaExePathMock,
getAmbariPropertiesMock, parsePropertiesFileMock, ensureDriverInstalledMock, readAmbariUserMock,
ensureCanStartUnderCurrentUserMock, generateEnvMock, runOSCommandMock, isServerRunningMock):
properties = Properties()
properties.process_pair("server.jdbc.database", "embedded")
getJavaExePathMock.return_value = "/path/to/java"
getAmbariPropertiesMock.return_value = properties
readAmbariUserMock.return_value = "test_user"
ensureCanStartUnderCurrentUserMock.return_value = "test_user"
generateEnvMock.return_value = {}
runOSCommandMock.return_value = (0, "", "")
isServerRunningMock.return_value = (False, 1)
check_database(properties)
self.assertTrue(getJavaExePathMock.called)
self.assertTrue(readAmbariUserMock.called)
self.assertTrue(ensureCanStartUnderCurrentUserMock.called)
self.assertTrue(generateEnvMock.called)
self.assertEquals(runOSCommandMock.call_args[0][0], '/path/to/java -cp test:path12 org.apache.ambari.server.checks.DatabaseConsistencyChecker')
pass
@patch("ambari_server.setupSecurity.get_is_persisted")
@patch("ambari_server.setupSecurity.get_is_secure")
@patch("ambari_server.setupSecurity.remove_password_file")
@patch("os.path.exists")
@patch("ambari_server.setupSecurity.read_ambari_user")
@patch("ambari_server.setupSecurity.get_master_key_location")
@patch("ambari_server.setupSecurity.save_passwd_for_alias")
@patch("ambari_server.setupSecurity.read_passwd_for_alias")
@patch("ambari_server.setupSecurity.update_properties_2")
@patch("ambari_server.setupSecurity.save_master_key")
@patch("ambari_server.setupSecurity.get_validated_string_input")
@patch("ambari_server.setupSecurity.get_YN_input")
@patch("ambari_server.setupSecurity.search_file")
@patch("ambari_server.setupSecurity.get_ambari_properties")
@patch("ambari_server.setupSecurity.is_root")
def test_reset_master_key_not_persisted(self, is_root_method,
get_ambari_properties_method,
search_file_message, get_YN_input_method,
get_validated_string_input_method, save_master_key_method,
update_properties_method, read_passwd_for_alias_method,
save_passwd_for_alias_method,
get_master_key_location_method, read_ambari_user_method,
exists_mock, remove_password_file_method, get_is_secure_method,
get_is_persisted_method):
is_root_method.return_value = True
search_file_message.return_value = False
read_ambari_user_method.return_value = None
p = Properties()
FAKE_PWD_STRING = '${alias=fakealias}'
p.process_pair(JDBC_PASSWORD_PROPERTY, FAKE_PWD_STRING)
p.process_pair(LDAP_MGR_PASSWORD_PROPERTY, FAKE_PWD_STRING)
p.process_pair(SSL_TRUSTSTORE_PASSWORD_PROPERTY, FAKE_PWD_STRING)
p.process_pair(JDBC_RCA_PASSWORD_FILE_PROPERTY, FAKE_PWD_STRING)
get_ambari_properties_method.return_value = p
get_YN_input_method.side_effect = [True, False]
get_validated_string_input_method.return_value = "aaa"
read_passwd_for_alias_method.return_value = "fakepassword"
save_passwd_for_alias_method.return_value = 0
exists_mock.return_value = False
get_is_secure_method.return_value = True
get_is_persisted_method.return_value = (True, "filePath")
options = self._create_empty_options_mock()
setup_master_key(options)
self.assertFalse(save_master_key_method.called)
self.assertTrue(get_YN_input_method.called)
self.assertTrue(get_validated_string_input_method.called)
self.assertTrue(update_properties_method.called)
self.assertTrue(read_passwd_for_alias_method.called)
self.assertTrue(3, read_passwd_for_alias_method.call_count)
self.assertTrue(3, save_passwd_for_alias_method.call_count)
self.assertFalse(save_master_key_method.called)
result_expected = {JDBC_PASSWORD_PROPERTY:
get_alias_string(JDBC_RCA_PASSWORD_ALIAS),
JDBC_RCA_PASSWORD_FILE_PROPERTY:
get_alias_string(JDBC_RCA_PASSWORD_ALIAS),
LDAP_MGR_PASSWORD_PROPERTY:
get_alias_string(LDAP_MGR_PASSWORD_ALIAS),
SSL_TRUSTSTORE_PASSWORD_PROPERTY:
get_alias_string(SSL_TRUSTSTORE_PASSWORD_ALIAS),
SECURITY_IS_ENCRYPTION_ENABLED: 'true'}
sorted_x = sorted(result_expected.iteritems(), key=operator.itemgetter(0))
sorted_y = sorted(update_properties_method.call_args[0][1].iteritems(),
key=operator.itemgetter(0))
self.assertEquals(sorted_x, sorted_y)
pass
@staticmethod
@OsFamilyFuncImpl(OSConst.WINSRV_FAMILY)
def _init_test_ldap_properties_map_invalid_input_1():
ldap_properties_map = \
{
LDAP_PRIMARY_URL_PROPERTY: "a:3",
"authentication.ldap.secondaryUrl": "b:2",
"authentication.ldap.useSSL": "false",
"authentication.ldap.usernameAttribute": "user",
"authentication.ldap.baseDn": "uid",
"authentication.ldap.bindAnonymously": "true",
"ldap.sync.username.collision.behavior": "skip",
"authentication.ldap.referral": "follow",
"client.security": "ldap",
"ambari.ldap.isConfigured": "true"
}
return ldap_properties_map
@staticmethod
@OsFamilyFuncImpl(OsFamilyImpl.DEFAULT)
def _init_test_ldap_properties_map_invalid_input_1():
ldap_properties_map = \
{
LDAP_PRIMARY_URL_PROPERTY: "a:3",
"authentication.ldap.secondaryUrl": "b:2",
"authentication.ldap.useSSL": "false",
"authentication.ldap.userObjectClass": "user",
"authentication.ldap.usernameAttribute": "uid",
"authentication.ldap.groupObjectClass": "group",
"authentication.ldap.groupNamingAttr": "cn",
"authentication.ldap.groupMembershipAttr": "member",
"authentication.ldap.dnAttribute": "dn",
"authentication.ldap.baseDn": "base",
"authentication.ldap.referral": "follow",
"authentication.ldap.bindAnonymously": "true",
"ldap.sync.username.collision.behavior": "skip",
"client.security": "ldap",
"ambari.ldap.isConfigured": "true"
}
return ldap_properties_map
@staticmethod
@OsFamilyFuncImpl(OSConst.WINSRV_FAMILY)
def _init_test_ldap_properties_map_invalid_input_2():
ldap_properties_map = \
{
LDAP_PRIMARY_URL_PROPERTY: "a:3",
"authentication.ldap.useSSL": "false",
"authentication.ldap.usernameAttribute": "user",
"authentication.ldap.baseDn": "uid",
"authentication.ldap.bindAnonymously": "true",
"authentication.ldap.referral": "follow",
"client.security": "ldap",
"ambari.ldap.isConfigured": "true"
}
return ldap_properties_map
@staticmethod
@OsFamilyFuncImpl(OsFamilyImpl.DEFAULT)
def _init_test_ldap_properties_map_invalid_input_2():
ldap_properties_map = \
{
LDAP_PRIMARY_URL_PROPERTY: "a:3",
"authentication.ldap.useSSL": "false",
"authentication.ldap.userObjectClass": "user",
"authentication.ldap.usernameAttribute": "uid",
"authentication.ldap.groupObjectClass": "group",
"authentication.ldap.groupNamingAttr": "cn",
"authentication.ldap.groupMembershipAttr": "member",
"authentication.ldap.dnAttribute": "dn",
"authentication.ldap.baseDn": "base",
"authentication.ldap.referral": "follow",
"authentication.ldap.bindAnonymously": "true",
"ldap.sync.username.collision.behavior": "skip",
"client.security": "ldap",
"ambari.ldap.isConfigured": "true"
}
return ldap_properties_map
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("__builtin__.raw_input")
@patch("ambari_server.setupSecurity.get_is_secure")
@patch("ambari_server.setupSecurity.get_YN_input")
@patch("ambari_server.setupSecurity.update_properties_2")
@patch("ambari_server.setupSecurity.search_file")
@patch("ambari_server.setupSecurity.get_ambari_properties")
@patch("ambari_server.setupSecurity.is_root")
@patch("ambari_server.setupSecurity.logger")
def test_setup_ldap_invalid_input(self, logger_mock, is_root_method, get_ambari_properties_method,
search_file_message,
update_properties_method,
get_YN_input_method,
get_is_secure_method,
raw_input_mock):
out = StringIO.StringIO()
sys.stdout = out
is_root_method.return_value = True
search_file_message.return_value = "filepath"
configs = {SECURITY_MASTER_KEY_LOCATION: "filepath",
SECURITY_KEYS_DIR: tempfile.gettempdir(),
SECURITY_IS_ENCRYPTION_ENABLED: "true"
}
get_ambari_properties_method.return_value = configs
raw_input_mock.side_effect = ['a:3', 'b:b', 'hody', 'b:2', 'false', 'user', 'uid', 'group', 'cn', 'member', 'dn', 'base', 'follow', 'true', 'skip']
set_silent(False)
get_YN_input_method.return_value = True
options = self._create_empty_options_mock()
setup_ldap(options)
ldap_properties_map = TestAmbariServer._init_test_ldap_properties_map_invalid_input_1()
sorted_x = sorted(ldap_properties_map.iteritems(), key=operator.itemgetter(0))
sorted_y = sorted(update_properties_method.call_args[0][1].iteritems(),
key=operator.itemgetter(0))
self.assertEquals(sorted_x, sorted_y)
self.assertTrue(get_YN_input_method.called)
self.assertEquals(15, raw_input_mock.call_count)
raw_input_mock.reset_mock()
raw_input_mock.side_effect = ['a:3', '', 'b:2', 'false', 'user', 'uid', 'group', 'cn', 'member', 'dn', 'base', 'follow', 'true', 'skip']
setup_ldap(options)
ldap_properties_map = TestAmbariServer._init_test_ldap_properties_map_invalid_input_2()
sorted_x = sorted(ldap_properties_map.iteritems(), key=operator.itemgetter(0))
sorted_y = sorted(update_properties_method.call_args[0][1].iteritems(),
key=operator.itemgetter(0))
self.assertEquals(sorted_x, sorted_y)
self.assertEquals(14, raw_input_mock.call_count)
sys.stdout = sys.__stdout__
pass
@staticmethod
@OsFamilyFuncImpl(OSConst.WINSRV_FAMILY)
def _init_test_ldap_properties_map():
ldap_properties_map = \
{
"authentication.ldap.primaryUrl": "test",
"authentication.ldap.secondaryUrl": "test",
"authentication.ldap.useSSL": "false",
"authentication.ldap.usernameAttribute": "test",
"authentication.ldap.baseDn": "test",
"authentication.ldap.bindAnonymously": "false",
"ldap.sync.username.collision.behavior": "skip",
"authentication.ldap.managerDn": "test",
"authentication.ldap.referral": "test",
"client.security": "ldap",
LDAP_MGR_PASSWORD_PROPERTY: "ldap-password.dat",
"ambari.ldap.isConfigured": "true"
}
return ldap_properties_map
@staticmethod
@OsFamilyFuncImpl(OsFamilyImpl.DEFAULT)
def _init_test_ldap_properties_map():
ldap_properties_map = \
{
"authentication.ldap.primaryUrl": "test",
"authentication.ldap.secondaryUrl": "test",
"authentication.ldap.useSSL": "false",
"authentication.ldap.userObjectClass": "test",
"authentication.ldap.usernameAttribute": "test",
"authentication.ldap.baseDn": "test",
"authentication.ldap.bindAnonymously": "false",
"ldap.sync.username.collision.behavior": "skip",
"authentication.ldap.managerDn": "test",
"authentication.ldap.groupObjectClass": "test",
"authentication.ldap.groupMembershipAttr": "test",
"authentication.ldap.groupNamingAttr": "test",
"authentication.ldap.dnAttribute": "test",
"authentication.ldap.referral": "test",
"client.security": "ldap",
LDAP_MGR_PASSWORD_PROPERTY: "ldap-password.dat",
"ambari.ldap.isConfigured": "true"
}
return ldap_properties_map
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("ambari_server.setupSecurity.get_is_secure")
@patch("ambari_server.setupSecurity.encrypt_password")
@patch("ambari_server.setupSecurity.save_passwd_for_alias")
@patch("ambari_server.setupSecurity.get_YN_input")
@patch("ambari_server.setupSecurity.update_properties_2")
@patch("ambari_server.setupSecurity.configure_ldap_password")
@patch("ambari_server.setupSecurity.get_validated_string_input")
@patch("ambari_server.serverConfiguration.search_file")
@patch("ambari_server.setupSecurity.get_ambari_properties")
@patch("ambari_server.setupSecurity.is_root")
@patch("ambari_server.setupSecurity.read_password")
@patch("os.path.exists")
@patch("ambari_server.setupSecurity.logger")
def test_setup_ldap(self, logger_mock, exists_method, read_password_method, is_root_method, get_ambari_properties_method,
search_file_message,
get_validated_string_input_method,
configure_ldap_password_method, update_properties_method,
get_YN_input_method, save_passwd_for_alias_method,
encrypt_password_method, get_is_secure_method):
out = StringIO.StringIO()
sys.stdout = out
options = self._create_empty_options_mock()
# Testing call under non-root
is_root_method.return_value = False
try:
setup_ldap(options)
self.fail("Should throw exception")
except FatalException as fe:
# Expected
self.assertTrue("root-level" in fe.reason)
pass
# Testing call under root
is_root_method.return_value = True
search_file_message.return_value = "filepath"
configs = {SECURITY_MASTER_KEY_LOCATION: "filepath",
SECURITY_KEYS_DIR: tempfile.gettempdir(),
SECURITY_IS_ENCRYPTION_ENABLED: "true"
}
get_ambari_properties_method.return_value = configs
configure_ldap_password_method.return_value = "password"
save_passwd_for_alias_method.return_value = 0
encrypt_password_method.return_value = get_alias_string(LDAP_MGR_PASSWORD_ALIAS)
def yn_input_side_effect(*args, **kwargs):
if 'TrustStore' in args[0]:
return False
else:
return True
get_YN_input_method.side_effect = [True, ]
def valid_input_side_effect(*args, **kwargs):
if 'Bind anonymously' in args[0]:
return 'false'
if 'username collisions' in args[0]:
return 'skip'
if args[1] == "true" or args[1] == "false":
return args[1]
else:
return "test"
get_validated_string_input_method.side_effect = valid_input_side_effect
setup_ldap(options)
ldap_properties_map = TestAmbariServer._init_test_ldap_properties_map()
sorted_x = sorted(ldap_properties_map.iteritems(), key=operator.itemgetter(0))
sorted_y = sorted(update_properties_method.call_args[0][1].iteritems(),
key=operator.itemgetter(0))
self.assertEquals(sorted_x, sorted_y)
self.assertTrue(update_properties_method.called)
self.assertTrue(configure_ldap_password_method.called)
self.assertTrue(get_validated_string_input_method.called)
self.assertTrue(get_YN_input_method.called)
# truststore not found case
def os_path_exists(*args, **kwargs):
if "bogus" in args[0]:
return False
else:
return True
pass
def input_enable_ssl(*args, **kwargs):
if 'Bind anonymously' in args[0]:
return 'false'
if "SSL" in args[0]:
return "true"
if "Path to TrustStore file" in args[0]:
if input_enable_ssl.path_counter < 2:
input_enable_ssl.path_counter += 1
return "bogus"
else:
return "valid"
if args[1] == "true" or args[1] == "false":
return args[1]
else:
return "test"
pass
input_enable_ssl.path_counter = 0
exists_method.side_effect = os_path_exists
get_validated_string_input_method.side_effect = input_enable_ssl
read_password_method.return_value = "password"
get_YN_input_method.reset_mock()
get_YN_input_method.side_effect = [True, True]
update_properties_method.reset_mock()
options.ldap_url = None
options.ldap_member_attr = None
setup_ldap(options)
self.assertTrue(read_password_method.called)
ldap_properties_map = \
{
"authentication.ldap.primaryUrl": "test",
"authentication.ldap.secondaryUrl": "test",
"authentication.ldap.useSSL": "true",
"authentication.ldap.usernameAttribute": "test",
"authentication.ldap.baseDn": "test",
"authentication.ldap.dnAttribute": "test",
"authentication.ldap.bindAnonymously": "false",
"ldap.sync.username.collision.behavior": "skip",
"authentication.ldap.managerDn": "test",
"client.security": "ldap",
"ssl.trustStore.type": "test",
"ssl.trustStore.path": "valid",
"ssl.trustStore.password": "password",
LDAP_MGR_PASSWORD_PROPERTY: get_alias_string(LDAP_MGR_PASSWORD_ALIAS)
}
sorted_x = sorted(ldap_properties_map.iteritems(), key=operator.itemgetter(0))
sorted_y = sorted(update_properties_method.call_args[0][1].iteritems(),
key=operator.itemgetter(0))
sys.stdout = sys.__stdout__
pass
@patch("urllib2.urlopen")
@patch("ambari_server.setupSecurity.get_validated_string_input")
@patch("ambari_server.setupSecurity.get_ambari_properties")
@patch("ambari_server.setupSecurity.is_server_runing")
@patch("ambari_server.setupSecurity.is_root")
@patch("ambari_server.setupSecurity.logger")
def test_ldap_sync_all(self, logger_mock, is_root_method, is_server_runing_mock, get_ambari_properties_mock,
get_validated_string_input_mock, urlopen_mock):
is_root_method.return_value = True
is_server_runing_mock.return_value = (True, 0)
properties = Properties()
properties.process_pair(IS_LDAP_CONFIGURED, 'true')
properties.process_pair(CLIENT_API_PORT_PROPERTY, '8080')
get_ambari_properties_mock.return_value = properties
get_validated_string_input_mock.side_effect = ['admin', 'admin']
response = MagicMock()
response.getcode.side_effect = [201, 200, 200]
response.read.side_effect = ['{"resources" : [{"href" : "http://c6401.ambari.apache.org:8080/api/v1/ldap_sync_events/16","Event" : {"id" : 16}}]}',
'{"Event":{"status" : "RUNNING","summary" : {"groups" : {"created" : 0,"removed" : 0,"updated" : 0},"memberships" : {"created" : 0,"removed" : 0},"users" : {"created" : 0,"removed" : 0,"updated" : 0}}}}',
'{"Event":{"status" : "COMPLETE","summary" : {"groups" : {"created" : 1,"removed" : 0,"updated" : 0},"memberships" : {"created" : 5,"removed" : 0},"users" : {"created" : 5,"removed" : 0,"updated" : 0}}}}']
urlopen_mock.return_value = response
options = self._create_empty_options_mock()
options.ldap_sync_all = True
options.ldap_sync_existing = False
sync_ldap(options)
url = '{0}://{1}:{2!s}{3}'.format('http', '127.0.0.1', '8080', '/api/v1/ldap_sync_events')
request = urlopen_mock.call_args_list[0][0][0]
self.assertEquals(url, str(request.get_full_url()))
self.assertEquals('[{"Event": {"specs": [{"principal_type": "users", "sync_type": "all"}, {"principal_type": "groups", "sync_type": "all"}]}}]', request.data)
self.assertTrue(response.getcode.called)
self.assertTrue(response.read.called)
pass
@patch("__builtin__.open")
@patch("os.path.exists")
@patch("urllib2.urlopen")
@patch("ambari_server.setupSecurity.get_validated_string_input")
@patch("ambari_server.setupSecurity.get_ambari_properties")
@patch("ambari_server.setupSecurity.is_server_runing")
@patch("ambari_server.setupSecurity.is_root")
@patch("ambari_server.setupSecurity.logger")
def test_ldap_sync_users(self, logger_mock, is_root_method, is_server_runing_mock, get_ambari_properties_mock,
get_validated_string_input_mock, urlopen_mock, os_path_exists_mock, open_mock):
os_path_exists_mock.return_value = 1
f = MagicMock()
f.__enter__().read.return_value = "bob, tom"
open_mock.return_value = f
is_root_method.return_value = True
is_server_runing_mock.return_value = (True, 0)
properties = Properties()
properties.process_pair(IS_LDAP_CONFIGURED, 'true')
get_ambari_properties_mock.return_value = properties
get_validated_string_input_mock.side_effect = ['admin', 'admin']
response = MagicMock()
response.getcode.side_effect = [201, 200, 200]
response.read.side_effect = ['{"resources" : [{"href" : "http://c6401.ambari.apache.org:8080/api/v1/ldap_sync_events/16","Event" : {"id" : 16}}]}',
'{"Event":{"status" : "RUNNING","summary" : {"groups" : {"created" : 0,"removed" : 0,"updated" : 0},"memberships" : {"created" : 0,"removed" : 0},"users" : {"created" : 0,"removed" : 0,"updated" : 0}}}}',
'{"Event":{"status" : "COMPLETE","summary" : {"groups" : {"created" : 1,"removed" : 0,"updated" : 0},"memberships" : {"created" : 5,"removed" : 0},"users" : {"created" : 5,"removed" : 0,"updated" : 0}}}}']
urlopen_mock.return_value = response
options = self._create_empty_options_mock()
options.ldap_sync_all = False
options.ldap_sync_existing = False
options.ldap_sync_users = 'users.txt'
options.ldap_sync_groups = None
sync_ldap(options)
request = urlopen_mock.call_args_list[0][0][0]
self.assertEquals('[{"Event": {"specs": [{"principal_type": "users", "sync_type": "specific", "names": "bob, tom"}]}}]', request.data)
self.assertTrue(response.getcode.called)
self.assertTrue(response.read.called)
pass
@patch("__builtin__.open")
@patch("os.path.exists")
@patch("urllib2.urlopen")
@patch("ambari_server.setupSecurity.get_validated_string_input")
@patch("ambari_server.setupSecurity.get_ambari_properties")
@patch("ambari_server.setupSecurity.is_server_runing")
@patch("ambari_server.setupSecurity.is_root")
@patch("ambari_server.setupSecurity.logger")
def test_ldap_sync_groups(self, logger_mock, is_root_method, is_server_runing_mock, get_ambari_properties_mock,
get_validated_string_input_mock, urlopen_mock, os_path_exists_mock, open_mock):
os_path_exists_mock.return_value = 1
f = MagicMock()
f.__enter__().read.return_value = "group1, group2"
open_mock.return_value = f
is_root_method.return_value = True
is_server_runing_mock.return_value = (True, 0)
properties = Properties()
properties.process_pair(IS_LDAP_CONFIGURED, 'true')
get_ambari_properties_mock.return_value = properties
get_validated_string_input_mock.side_effect = ['admin', 'admin']
response = MagicMock()
response.getcode.side_effect = [201, 200, 200]
response.read.side_effect = ['{"resources" : [{"href" : "http://c6401.ambari.apache.org:8080/api/v1/ldap_sync_events/16","Event" : {"id" : 16}}]}',
'{"Event":{"status" : "RUNNING","summary" : {"groups" : {"created" : 0,"removed" : 0,"updated" : 0},"memberships" : {"created" : 0,"removed" : 0},"users" : {"created" : 0,"removed" : 0,"updated" : 0}}}}',
'{"Event":{"status" : "COMPLETE","summary" : {"groups" : {"created" : 1,"removed" : 0,"updated" : 0},"memberships" : {"created" : 5,"removed" : 0},"users" : {"created" : 5,"removed" : 0,"updated" : 0}}}}']
urlopen_mock.return_value = response
options = self._create_empty_options_mock()
options.ldap_sync_all = False
options.ldap_sync_existing = False
options.ldap_sync_users = None
options.ldap_sync_groups = 'groups.txt'
sync_ldap(options)
request = urlopen_mock.call_args_list[0][0][0]
self.assertEquals('[{"Event": {"specs": [{"principal_type": "groups", "sync_type": "specific", "names": "group1, group2"}]}}]', request.data)
self.assertTrue(response.getcode.called)
self.assertTrue(response.read.called)
pass
@patch("urllib2.urlopen")
@patch("ambari_server.setupSecurity.get_validated_string_input")
@patch("ambari_server.setupSecurity.get_ambari_properties")
@patch("ambari_server.setupSecurity.is_server_runing")
@patch("ambari_server.setupSecurity.is_root")
@patch("ambari_server.setupSecurity.logger")
def test_ldap_sync_ssl(self, logger_mock, is_root_method, is_server_runing_mock, get_ambari_properties_mock,
get_validated_string_input_mock, urlopen_mock):
is_root_method.return_value = True
is_server_runing_mock.return_value = (True, 0)
properties = Properties()
properties.process_pair(IS_LDAP_CONFIGURED, 'true')
properties.process_pair(SSL_API, 'true')
properties.process_pair(SSL_API_PORT, '8443')
get_ambari_properties_mock.return_value = properties
get_validated_string_input_mock.side_effect = ['admin', 'admin']
response = MagicMock()
response.getcode.side_effect = [201, 200, 200]
response.read.side_effect = ['{"resources" : [{"href" : "https://c6401.ambari.apache.org:8443/api/v1/ldap_sync_events/16","Event" : {"id" : 16}}]}',
'{"Event":{"status" : "RUNNING","summary" : {"groups" : {"created" : 0,"removed" : 0,"updated" : 0},"memberships" : {"created" : 0,"removed" : 0},"users" : {"created" : 0,"removed" : 0,"updated" : 0}}}}',
'{"Event":{"status" : "COMPLETE","summary" : {"groups" : {"created" : 1,"removed" : 0,"updated" : 0},"memberships" : {"created" : 5,"removed" : 0},"users" : {"created" : 5,"removed" : 0,"updated" : 0}}}}']
urlopen_mock.return_value = response
options = self._create_empty_options_mock()
options.ldap_sync_all = True
options.ldap_sync_existing = False
options.ldap_sync_users = None
options.ldap_sync_groups = None
sync_ldap(options)
url = '{0}://{1}:{2!s}{3}'.format('https', '127.0.0.1', '8443', '/api/v1/ldap_sync_events')
request = urlopen_mock.call_args_list[0][0][0]
self.assertEquals(url, str(request.get_full_url()))
self.assertTrue(response.getcode.called)
self.assertTrue(response.read.called)
pass
@patch("urllib2.urlopen")
@patch("ambari_server.setupSecurity.get_validated_string_input")
@patch("ambari_server.setupSecurity.get_ambari_properties")
@patch("ambari_server.setupSecurity.is_server_runing")
@patch("ambari_server.setupSecurity.is_root")
@patch("ambari_server.setupSecurity.logger")
def test_ldap_sync_existing(self, logger_mock, is_root_method, is_server_runing_mock, get_ambari_properties_mock,
get_validated_string_input_mock, urlopen_mock):
is_root_method.return_value = True
is_server_runing_mock.return_value = (True, 0)
properties = Properties()
properties.process_pair(IS_LDAP_CONFIGURED, 'true')
get_ambari_properties_mock.return_value = properties
get_validated_string_input_mock.side_effect = ['admin', 'admin']
response = MagicMock()
response.getcode.side_effect = [201, 200, 200]
response.read.side_effect = ['{"resources" : [{"href" : "http://c6401.ambari.apache.org:8080/api/v1/ldap_sync_events/16","Event" : {"id" : 16}}]}',
'{"Event":{"status" : "RUNNING","summary" : {"groups" : {"created" : 0,"removed" : 0,"updated" : 0},"memberships" : {"created" : 0,"removed" : 0},"users" : {"created" : 0,"removed" : 0,"updated" : 0}}}}',
'{"Event":{"status" : "COMPLETE","summary" : {"groups" : {"created" : 1,"removed" : 0,"updated" : 0},"memberships" : {"created" : 5,"removed" : 0},"users" : {"created" : 5,"removed" : 0,"updated" : 0}}}}']
urlopen_mock.return_value = response
options = self._create_empty_options_mock()
options.ldap_sync_all = False
options.ldap_sync_existing = True
options.ldap_sync_users = None
options.ldap_sync_groups = None
sync_ldap(options)
self.assertTrue(response.getcode.called)
self.assertTrue(response.read.called)
pass
@patch("urllib2.urlopen")
@patch("ambari_server.setupSecurity.get_validated_string_input")
@patch("ambari_server.setupSecurity.get_ambari_properties")
@patch("ambari_server.setupSecurity.is_server_runing")
@patch("ambari_server.setupSecurity.is_root")
@patch("ambari_server.setupSecurity.logger")
def test_ldap_sync_no_sync_mode(self, logger_mock, is_root_method, is_server_runing_mock, get_ambari_properties_mock,
get_validated_string_input_mock, urlopen_mock):
is_root_method.return_value = True
is_server_runing_mock.return_value = (True, 0)
properties = Properties()
properties.process_pair(IS_LDAP_CONFIGURED, 'true')
get_ambari_properties_mock.return_value = properties
get_validated_string_input_mock.side_effect = ['admin', 'admin']
response = MagicMock()
response.getcode.side_effect = [201, 200, 200]
response.read.side_effect = ['{"resources" : [{"href" : "http://c6401.ambari.apache.org:8080/api/v1/ldap_sync_events/16","Event" : {"id" : 16}}]}',
'{"Event":{"status" : "RUNNING","summary" : {"groups" : {"created" : 0,"removed" : 0,"updated" : 0},"memberships" : {"created" : 0,"removed" : 0},"users" : {"created" : 0,"removed" : 0,"updated" : 0}}}}',
'{"Event":{"status" : "COMPLETE","summary" : {"groups" : {"created" : 1,"removed" : 0,"updated" : 0},"memberships" : {"created" : 5,"removed" : 0},"users" : {"created" : 5,"removed" : 0,"updated" : 0}}}}']
urlopen_mock.return_value = response
options = self._create_empty_options_mock()
del options.ldap_sync_all
del options.ldap_sync_existing
del options.ldap_sync_users
del options.ldap_sync_groups
try:
sync_ldap(options)
self.fail("Should fail with exception")
except FatalException as e:
pass
pass
@patch("urllib2.urlopen")
@patch("ambari_server.setupSecurity.get_validated_string_input")
@patch("ambari_server.setupSecurity.get_ambari_properties")
@patch("ambari_server.setupSecurity.is_server_runing")
@patch("ambari_server.setupSecurity.is_root")
@patch("ambari_server.setupSecurity.logger")
def test_ldap_sync_error_status(self, logger_mock, is_root_method, is_server_runing_mock, get_ambari_properties_mock,
get_validated_string_input_mock, urlopen_mock):
is_root_method.return_value = True
is_server_runing_mock.return_value = (True, 0)
properties = Properties()
properties.process_pair(IS_LDAP_CONFIGURED, 'true')
get_ambari_properties_mock.return_value = properties
get_validated_string_input_mock.side_effect = ['admin', 'admin']
response = MagicMock()
response.getcode.side_effect = [201, 200]
response.read.side_effect = ['{"resources" : [{"href" : "http://c6401.ambari.apache.org:8080/api/v1/ldap_sync_events/16","Event" : {"id" : 16}}]}',
'{"Event":{"status" : "ERROR","status_detail" : "Error!!","summary" : {"groups" : {"created" : 0,"removed" : 0,"updated" : 0},"memberships" : {"created" : 0,"removed" : 0},"users" : {"created" : 0,"removed" : 0,"updated" : 0}}}}']
urlopen_mock.return_value = response
options = self._create_empty_options_mock()
options.ldap_sync_all = False
options.ldap_sync_existing = False
options.ldap_sync_users = None
options.ldap_sync_groups = None
try:
sync_ldap(options)
self.fail("Should fail with exception")
except FatalException as e:
pass
pass
@patch("urllib2.urlopen")
@patch("urllib2.Request")
@patch("base64.encodestring")
@patch("ambari_server.setupSecurity.is_server_runing")
@patch("ambari_server.setupSecurity.get_ambari_properties")
@patch("ambari_server.setupSecurity.get_validated_string_input")
@patch("ambari_server.setupSecurity.logger")
def test_sync_ldap_forbidden(self, logger_mock, get_validated_string_input_method, get_ambari_properties_method,
is_server_runing_method,
encodestring_method, request_constructor, urlopen_method):
options = self._create_empty_options_mock()
options.ldap_sync_all = True
options.ldap_sync_existing = False
options.ldap_sync_users = None
options.ldap_sync_groups = None
is_server_runing_method.return_value = (None, None)
try:
sync_ldap(options)
self.fail("Should throw exception if ambari is stopped")
except FatalException as fe:
# Expected
self.assertTrue("not running" in fe.reason)
pass
is_server_runing_method.return_value = (True, None)
configs = MagicMock()
configs.get_property.return_value = None
get_ambari_properties_method.return_value = configs
try:
sync_ldap(options)
self.fail("Should throw exception if ldap is not configured")
except FatalException as fe:
# Expected
self.assertTrue("not configured" in fe.reason)
pass
configs.get_property.return_value = 'true'
get_validated_string_input_method.return_value = 'admin'
encodestring_method.return_value = 'qwe123'
requestMocks = [MagicMock()]
request_constructor.side_effect = requestMocks
response = MagicMock()
response.getcode.return_value = 403
urlopen_method.return_value = response
try:
sync_ldap(options)
self.fail("Should throw exception if return code != 200")
except FatalException as fe:
# Expected
self.assertTrue("status code" in fe.reason)
pass
pass
@patch("ambari_server.setupSecurity.is_root")
def test_sync_ldap_ambari_stopped(self, is_root_method):
is_root_method.return_value = False
options = self._create_empty_options_mock()
options.ldap_sync_all = True
options.ldap_sync_existing = False
options.ldap_sync_users = None
options.ldap_sync_groups = None
try:
sync_ldap(options)
self.fail("Should throw exception if not root")
except FatalException as fe:
# Expected
self.assertTrue("root-level" in fe.reason)
pass
pass
@patch("ambari_server.setupSecurity.is_root")
@patch("ambari_server.setupSecurity.is_server_runing")
@patch("ambari_server.setupSecurity.logger")
def test_sync_ldap_ambari_stopped(self, logger_mock, is_server_runing_method, is_root_method):
is_root_method.return_value = True
is_server_runing_method.return_value = (None, None)
options = self._create_empty_options_mock()
options.ldap_sync_all = True
options.ldap_sync_existing = False
options.ldap_sync_users = None
options.ldap_sync_groups = None
try:
sync_ldap(options)
self.fail("Should throw exception if ambari is stopped")
except FatalException as fe:
# Expected
self.assertTrue("not running" in fe.reason)
pass
pass
@patch("ambari_server.setupSecurity.is_root")
@patch("ambari_server.setupSecurity.is_server_runing")
@patch("ambari_server.setupSecurity.get_ambari_properties")
@patch("ambari_server.setupSecurity.logger")
def test_sync_ldap_not_configured(self, logger_mock, get_ambari_properties_method,
is_server_runing_method, is_root_method):
is_root_method.return_value = True
is_server_runing_method.return_value = (True, None)
configs = MagicMock()
configs.get_property.return_value = None
get_ambari_properties_method.return_value = configs
options = self._create_empty_options_mock()
options.ldap_sync_all = True
del options.ldap_sync_existing
del options.ldap_sync_users
del options.ldap_sync_groups
try:
sync_ldap(options)
self.fail("Should throw exception if ldap is not configured")
except FatalException as fe:
# Expected
self.assertTrue("not configured" in fe.reason)
pass
pass
@patch("__builtin__.open")
@patch("os.path.exists")
def test_get_ldap_event_spec_names(self, os_path_exists_mock, open_mock):
os_path_exists_mock.return_value = 1
f = MagicMock()
f.__enter__().read.return_value = "\n\n\t some group, \tanother group, \n\t\tgrp, \ngroup*\n\n\n\n"
open_mock.return_value = f
bodies = [{"Event":{"specs":[]}}]
body = bodies[0]
events = body['Event']
specs = events['specs']
new_specs = [{"principal_type":"groups","sync_type":"specific","names":""}]
get_ldap_event_spec_names("groups.txt", specs, new_specs)
self.assertEquals("[{'Event': {'specs': [{'principal_type': 'groups', 'sync_type': 'specific', 'names': ' some group, another group, grp, group*'}]}}]", str(bodies))
pass
@patch("ambari_server.setupSecurity.read_password")
def test_configure_ldap_password(self, read_password_method):
out = StringIO.StringIO()
sys.stdout = out
read_password_method.return_value = "blah"
options = self._create_empty_options_mock()
configure_ldap_password(options)
self.assertTrue(read_password_method.called)
sys.stdout = sys.__stdout__
pass
@patch("ambari_server.userInput.get_validated_string_input")
def test_read_password(self, get_validated_string_input_method):
out = StringIO.StringIO()
sys.stdout = out
passwordDefault = ""
passwordPrompt = 'Enter Manager Password* : '
passwordPattern = ".*"
passwordDescr = "Invalid characters in password."
get_validated_string_input_method.side_effect = ['', 'aaa', 'aaa']
password = read_password(passwordDefault, passwordPattern,
passwordPrompt, passwordDescr)
self.assertTrue(3, get_validated_string_input_method.call_count)
self.assertEquals('aaa', password)
get_validated_string_input_method.reset_mock()
get_validated_string_input_method.side_effect = ['aaa', 'aaa']
password = read_password(passwordDefault, passwordPattern,
passwordPrompt, passwordDescr)
self.assertTrue(2, get_validated_string_input_method.call_count)
self.assertEquals('aaa', password)
get_validated_string_input_method.reset_mock()
get_validated_string_input_method.side_effect = ['aaa']
password = read_password('aaa', passwordPattern,
passwordPrompt, passwordDescr)
self.assertTrue(1, get_validated_string_input_method.call_count)
self.assertEquals('aaa', password)
sys.stdout = sys.__stdout__
pass
def test_generate_random_string(self):
random_str_len = 100
str1 = generate_random_string(random_str_len)
self.assertTrue(len(str1) == random_str_len)
str2 = generate_random_string(random_str_len)
self.assertTrue(str1 != str2)
pass
@patch("__builtin__.open")
@patch("ambari_server.serverConfiguration.search_file")
@patch("ambari_server.serverConfiguration.backup_file_in_temp")
def test_update_properties_2(self, backup_file_in_temp_mock, search_file_mock, open_mock):
conf_file = "ambari.properties"
propertyMap = {"1": "1", "2": "2"}
properties = MagicMock()
f = MagicMock(name="file")
search_file_mock.return_value = conf_file
open_mock.return_value = f
update_properties_2(properties, propertyMap)
properties.store_ordered.assert_called_with(f.__enter__.return_value)
backup_file_in_temp_mock.assert_called_with(conf_file)
self.assertEquals(2, properties.removeOldProp.call_count)
self.assertEquals(2, properties.process_pair.call_count)
properties = MagicMock()
backup_file_in_temp_mock.reset_mock()
open_mock.reset_mock()
update_properties_2(properties, None)
properties.store_ordered.assert_called_with(f.__enter__.return_value)
backup_file_in_temp_mock.assert_called_with(conf_file)
self.assertFalse(properties.removeOldProp.called)
self.assertFalse(properties.process_pair.called)
pass
def test_regexps(self):
res = re.search(REGEX_HOSTNAME_PORT, "")
self.assertTrue(res is None)
res = re.search(REGEX_HOSTNAME_PORT, "ddd")
self.assertTrue(res is None)
res = re.search(REGEX_HOSTNAME_PORT, "gg:ff")
self.assertTrue(res is None)
res = re.search(REGEX_HOSTNAME_PORT, "gg:55444325")
self.assertTrue(res is None)
res = re.search(REGEX_HOSTNAME_PORT, "gg:555")
self.assertTrue(res is not None)
res = re.search(REGEX_TRUE_FALSE, "")
self.assertTrue(res is not None)
res = re.search(REGEX_TRUE_FALSE, "t")
self.assertTrue(res is None)
res = re.search(REGEX_TRUE_FALSE, "trrrr")
self.assertTrue(res is None)
res = re.search(REGEX_TRUE_FALSE, "true|false")
self.assertTrue(res is None)
res = re.search(REGEX_TRUE_FALSE, "true")
self.assertTrue(res is not None)
res = re.search(REGEX_TRUE_FALSE, "false")
self.assertTrue(res is not None)
res = re.search(REGEX_ANYTHING, "")
self.assertTrue(res is not None)
res = re.search(REGEX_ANYTHING, "t")
self.assertTrue(res is not None)
res = re.search(REGEX_ANYTHING, "trrrr")
self.assertTrue(res is not None)
pass
def get_sample(self, sample):
"""
Returns sample file content as string with normalized line endings
"""
path = self.get_samples_dir(sample)
return self.get_file_string(path)
def get_file_string(self, file):
"""
Returns file content as string with normalized line endings
"""
string = open(file, 'r').read()
return self.normalize(string)
def normalize(self, string):
"""
Normalizes line ending in string according to platform-default encoding
"""
return string.replace("\n", os.linesep)
def get_samples_dir(self, sample):
"""
Returns full file path by sample name
"""
testdir = os.path.dirname(__file__)
return os.path.dirname(testdir) + os.sep + "resources" + os.sep \
+ 'TestAmbaryServer.samples/' + sample
@not_for_platform(PLATFORM_WINDOWS)
@patch("ambari_server.dbConfiguration_linux.get_ambari_properties")
def test_is_jdbc_user_changed(self, get_ambari_properties_mock):
previous_user = "previous_user"
new_user = "new_user"
props = Properties()
props.process_pair(JDBC_USER_NAME_PROPERTY, previous_user)
get_ambari_properties_mock.return_value = props
#check if users are different
result = PGConfig._is_jdbc_user_changed(new_user)
self.assertTrue(result)
#check if users are equal
result = PGConfig._is_jdbc_user_changed(previous_user)
self.assertFalse(result)
#check if one of users is None
result = PGConfig._is_jdbc_user_changed(None)
self.assertEqual(None, result)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch("ambari_server.serverConfiguration.write_property")
@patch("ambari_server.serverConfiguration.get_ambari_properties")
@patch("ambari_server.serverConfiguration.get_ambari_version")
def test_check_database_name_property(self, get_ambari_version_mock, get_ambari_properties_mock, write_property_mock):
parser = OptionParser()
parser.add_option('--database', default=None, help="Database to use embedded|oracle|mysql|mssql|postgres", dest="dbms")
args = parser.parse_args()
# negative case
get_ambari_properties_mock.return_value = {JDBC_DATABASE_NAME_PROPERTY: ""}
try:
result = check_database_name_property()
self.fail("Should fail with exception")
except FatalException as e:
self.assertTrue('DB Name property not set in config file.' in e.reason)
# positive case
dbname = "ambari"
get_ambari_properties_mock.reset_mock()
get_ambari_properties_mock.return_value = {JDBC_DATABASE_NAME_PROPERTY: dbname}
try:
result = check_database_name_property()
except FatalException:
self.fail("Setup should be successful")
# Check upgrade. In Ambari < 1.7.1 "database" property contained db name for local db
dbname = "ambari"
database = "ambari"
persistence = "local"
get_ambari_properties_mock.reset_mock()
get_ambari_properties_mock.return_value = {JDBC_DATABASE_NAME_PROPERTY: dbname,
JDBC_DATABASE_PROPERTY: database,
PERSISTENCE_TYPE_PROPERTY: persistence}
try:
result = check_database_name_property(upgrade=True)
except FatalException:
self.fail("Setup should be successful")
self.assertTrue(write_property_mock.called)
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("resource_management.core.shell.call")
@patch("ambari_server.dbConfiguration_linux.PGConfig._is_jdbc_user_changed")
@patch("ambari_server.serverSetup.verify_setup_allowed")
@patch("ambari_server.serverSetup.get_YN_input")
@patch("ambari_server.serverSetup.configure_os_settings")
@patch("ambari_server.serverSetup.download_and_install_jdk")
@patch.object(PGConfig, "_configure_postgres")
@patch.object(PGConfig, "_check_postgre_up")
@patch("ambari_server.serverSetup.check_ambari_user")
@patch("ambari_server.serverSetup.check_jdbc_drivers")
@patch("ambari_server.serverSetup.check_selinux")
@patch("ambari_server.serverSetup.is_root")
@patch.object(PGConfig, "_setup_db")
@patch("ambari_server.serverSetup.get_is_secure")
@patch("ambari_server.dbConfiguration_linux.store_password_file")
@patch("ambari_server.serverSetup.extract_views")
@patch("ambari_server.serverSetup.adjust_directory_permissions")
@patch("sys.exit")
@patch("__builtin__.raw_input")
@patch("ambari_server.serverSetup.expand_jce_zip_file")
def test_ambariServerSetupWithCustomDbName(self, expand_jce_zip_file_mock, raw_input, exit_mock, adjust_dirs_mock,
extract_views_mock, store_password_file_mock,
get_is_secure_mock, setup_db_mock, is_root_mock, #is_local_database_mock,
check_selinux_mock, check_jdbc_drivers_mock, check_ambari_user_mock,
check_postgre_up_mock, configure_postgres_mock,
download_jdk_mock, configure_os_settings_mock, get_YN_input,
verify_setup_allowed_method, is_jdbc_user_changed_mock,
run_os_command_mock):
args = MagicMock()
raw_input.return_value = ""
get_YN_input.return_value = False
verify_setup_allowed_method.return_value = 0
is_root_mock.return_value = True
check_selinux_mock.return_value = 0
check_ambari_user_mock.return_value = (0, False, 'user', None)
check_jdbc_drivers_mock.return_value = 0
check_postgre_up_mock.return_value = "running", 0, "", ""
configure_postgres_mock.return_value = 0, "", ""
download_jdk_mock.return_value = 0
configure_os_settings_mock.return_value = 0
is_jdbc_user_changed_mock.return_value = False
setup_db_mock.return_value = (0, None, None)
get_is_secure_mock.return_value = False
store_password_file_mock.return_value = "password"
extract_views_mock.return_value = 0
run_os_command_mock.return_value = 3,"",""
new_db = "newDBName"
args.dbms = "postgres"
args.database_name = new_db
args.postgres_schema = new_db
args.database_username = "user"
args.database_password = "password"
args.jdbc_driver= None
args.jdbc_db = None
args.must_set_database_options = True
del args.database_index
del args.persistence_type
tempdir = tempfile.gettempdir()
prop_file = os.path.join(tempdir, "ambari.properties")
with open(prop_file, "w") as f:
f.write("server.jdbc.database_name=oldDBName")
f.close()
os.environ[AMBARI_CONF_VAR] = tempdir
try:
result = setup(args)
except FatalException as ex:
self.fail("Setup should be successful")
properties = get_ambari_properties()
self.assertTrue(JDBC_DATABASE_NAME_PROPERTY in properties.keys())
value = properties[JDBC_DATABASE_NAME_PROPERTY]
self.assertEqual(value, new_db)
del os.environ[AMBARI_CONF_VAR]
os.remove(prop_file)
pass
@only_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch("ambari_server.serverSetup.service_setup")
@patch("ambari_server.dbConfiguration_windows.MSSQLConfig._execute_db_script")
@patch("ambari_server.dbConfiguration_windows.store_password_file")
@patch("ambari_server.dbConfiguration_windows.MSSQLConfig._is_jdbc_driver_installed")
@patch("ambari_server.serverSetup.verify_setup_allowed")
@patch("ambari_server.serverSetup.get_YN_input")
@patch("ambari_server.serverSetup.configure_os_settings")
@patch("ambari_server.serverSetup.download_and_install_jdk")
@patch("ambari_server.serverSetup.check_firewall")
@patch("ambari_server.serverSetup.check_ambari_user")
@patch("ambari_server.serverSetup.check_jdbc_drivers")
@patch("ambari_server.serverSetup.is_root")
@patch("ambari_server.serverSetup.extract_views")
@patch("ambari_server.serverSetup.adjust_directory_permissions")
def test_ambariServerSetupWithCustomDbName(self,
adjust_dirs_mock,
extract_views_mock,
is_root_mock,
check_jdbc_drivers_mock,
check_ambari_user_mock,
check_firewall_mock,
download_jdk_mock,
configure_os_settings_mock,
get_YN_input,
verify_setup_allowed_method,
is_jdbc_driver_installed_mock,
store_password_file_mock,
execute_db_script_mock,
service_setup_mock):
args = MagicMock()
get_YN_input.return_value = False
verify_setup_allowed_method.return_value = 0
is_root_mock.return_value = True
check_ambari_user_mock.return_value = (0, False, 'user', None)
check_jdbc_drivers_mock.return_value = 0
download_jdk_mock.return_value = 0
configure_os_settings_mock.return_value = 0
is_jdbc_driver_installed_mock.return_value = True
store_password_file_mock.return_value = "password.dat"
extract_views_mock.return_value = 0
new_db = "newDBName"
del args.dbms
del args.database_index
del args.database_host
del args.database_port
args.database_name = new_db
args.database_username = "user"
args.database_password = "password"
del args.database_windows_auth
args.jdbc_driver= None
args.jdbc_db = None
args.must_set_database_options = True
del args.default_database_host
del args.persistence_type
del args.init_db_script_file
del args.cleanup_db_script_file
tempdir = tempfile.gettempdir()
prop_file = os.path.join(tempdir, "ambari.properties")
with open(prop_file, "w") as f:
f.write("server.jdbc.database_name=oldDBName")
f.close()
os.environ[AMBARI_CONF_VAR] = tempdir
try:
result = setup(args)
except FatalException as ex:
self.fail("Setup should be successful")
properties = get_ambari_properties()
self.assertTrue(JDBC_DATABASE_NAME_PROPERTY in properties.keys())
value = properties[JDBC_DATABASE_NAME_PROPERTY]
self.assertEqual(value, new_db)
self.assertEqual(store_password_file_mock.call_count, 2)
self.assertEqual(execute_db_script_mock.call_count, 2)
del os.environ[AMBARI_CONF_VAR]
os.remove(prop_file)
pass
def test_is_valid_filepath(self):
temp_dir = tempfile.gettempdir()
temp_file = tempfile.NamedTemporaryFile(mode='r')
# Correct path to an existing file
self.assertTrue(temp_file)
# Correct path to an existing directory
self.assertFalse(is_valid_filepath(temp_dir), \
'is_valid_filepath(path) should return False is path is a directory')
# Incorrect path
self.assertFalse(is_valid_filepath(''))
pass
@patch("ambari_server.setupSecurity.search_file")
@patch("ambari_server.setupSecurity.get_validated_string_input")
def test_setup_ambari_krb5_jaas_with_options(self, get_validated_string_input_mock,
search_file_mock):
options = self._create_empty_options_mock()
options.jaas_keytab = '/kerberos/admin.keytab'
temp_file = tempfile.NamedTemporaryFile(mode='r')
search_file_mock.return_value = temp_file.name
get_validated_string_input_mock.side_effect = ['adm@EXAMPLE.COM', temp_file]
self.assertEqual(None, setup_ambari_krb5_jaas(options))
self.assertTrue(get_validated_string_input_mock.called)
self.assertEqual(get_validated_string_input_mock.call_count, 2)
get_validated_string_input_mock.assert_called_with("Enter keytab path for ambari server's kerberos principal: ",
'/etc/security/keytabs/ambari.keytab', '.*', False, False,
validatorFunction = is_valid_filepath, answer='/kerberos/admin.keytab')
pass
@patch("os.listdir")
@patch("os.path.exists")
@patch("ambari_server.serverUpgrade.load_stack_values")
@patch("ambari_server.serverUpgrade.get_ambari_properties")
@patch("ambari_server.serverUpgrade.run_metainfo_upgrade")
def test_upgrade_local_repo(self,
run_metainfo_upgrade_mock,
get_ambari_properties_mock,
load_stack_values_mock,
os_path_exists_mock,
os_listdir_mock):
from mock.mock import call
args = MagicMock()
args.persistence_type = "local"
def load_values_side_effect(*args, **kwargs):
res = {}
res['a'] = 'http://oldurl'
if -1 != args[1].find("HDPLocal"):
res['a'] = 'http://newurl'
return res
load_stack_values_mock.side_effect = load_values_side_effect
properties = Properties()
get_ambari_properties_mock.return_value = properties
os_path_exists_mock.return_value = 1
os_listdir_mock.return_value = ['1.1']
upgrade_local_repo(args)
self.assertTrue(get_ambari_properties_mock.called)
self.assertTrue(load_stack_values_mock.called)
self.assertTrue(run_metainfo_upgrade_mock.called)
run_metainfo_upgrade_mock.assert_called_with(args, {'a': 'http://newurl'})
pass
@patch("os.listdir")
@patch("os.path.exists")
@patch("ambari_server.serverUpgrade.load_stack_values")
@patch("ambari_server.serverUpgrade.get_ambari_properties")
@patch("ambari_server.serverUpgrade.run_metainfo_upgrade")
def test_upgrade_local_repo_nochange(self,
run_metainfo_upgrade_mock,
get_ambari_properties_mock,
load_stack_values_mock,
os_path_exists_mock,
os_listdir_mock):
from mock.mock import call
args = MagicMock()
args.persistence_type = "local"
def load_values_side_effect(*args, **kwargs):
res = {}
res['a'] = 'http://oldurl'
return res
load_stack_values_mock.side_effect = load_values_side_effect
properties = Properties()
get_ambari_properties_mock.return_value = properties
os_path_exists_mock.return_value = 1
os_listdir_mock.return_value = ['1.1']
upgrade_local_repo(args)
self.assertTrue(get_ambari_properties_mock.called)
self.assertTrue(load_stack_values_mock.called)
self.assertTrue(run_metainfo_upgrade_mock.called)
run_metainfo_upgrade_mock.assert_called_with(args, {})
pass
@patch("os.path.exists")
@patch.object(ResourceFilesKeeper, "perform_housekeeping")
def test_refresh_stack_hash(self,
perform_housekeeping_mock, path_exists_mock):
path_exists_mock.return_value = True
properties = Properties()
refresh_stack_hash(properties)
self.assertTrue(perform_housekeeping_mock.called)
pass
@patch("ambari_server.dbConfiguration.decrypt_password_for_alias")
@patch("ambari_server.dbConfiguration_linux.run_os_command")
@patch("ambari_server.dbConfiguration_linux.print_error_msg")
def test_change_tables_owner_no_tables(self, print_error_msg_mock, run_os_command_mock,
decrypt_password_for_alias_mock):
args = MagicMock()
del args.database_index
del args.dbms
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.init_script_file
del args.drop_script_file
properties = Properties()
properties.process_pair(JDBC_PASSWORD_PROPERTY, get_alias_string("mypwdalias"))
decrypt_password_for_alias_mock.return_value = "password"
run_os_command_mock.return_value = 0, "", ""
dbms = PGConfig(args, properties, "local")
result = dbms._change_tables_owner()
self.assertFalse(result)
self.assertEquals(print_error_msg_mock.call_args_list[0][0][0], 'Failed to get list of ambari tables')
@patch("ambari_server.dbConfiguration.decrypt_password_for_alias")
@patch("ambari_server.dbConfiguration_linux.run_os_command")
@patch("ambari_server.dbConfiguration_linux.print_error_msg")
def test_change_tables_owner_fatal_psql(self, print_error_msg_mock, run_os_command_mock,
decrypt_password_for_alias_mock):
args = MagicMock()
del args.database_index
del args.dbms
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.init_script_file
del args.drop_script_file
properties = Properties()
properties.process_pair(JDBC_PASSWORD_PROPERTY, get_alias_string("mypwdalias"))
decrypt_password_for_alias_mock.return_value = "password"
run_os_command_mock.return_value = 0, "", "psql: could not connect to server: No such file or directory"
dbms = PGConfig(args, properties, "local")
result = dbms._change_tables_owner()
self.assertFalse(result)
self.assertEquals(print_error_msg_mock.call_args_list[0][0][0], """Failed to get list of ambari tables. Message from psql:
stdout:
stderr:psql: could not connect to server: No such file or directory
""")
@patch("ambari_server.dbConfiguration.decrypt_password_for_alias")
@patch("ambari_server.dbConfiguration_linux.run_os_command")
@patch("ambari_server.dbConfiguration_linux.print_error_msg")
def test_change_tables_owner(self, print_error_msg_mock, run_os_command_mock,
decrypt_password_for_alias_mock):
args = MagicMock()
del args.database_index
del args.dbms
del args.database_host
del args.database_port
del args.database_name
del args.database_username
del args.database_password
del args.init_script_file
del args.drop_script_file
properties = Properties()
properties.process_pair(JDBC_PASSWORD_PROPERTY, get_alias_string("mypwdalias"))
decrypt_password_for_alias_mock.return_value = "password"
run_os_command_mock.side_effect = [(0, "tbl1\n,tbl2", ""),
(0, "", ""),
(0, "", ""),
(0, "postgres", ""),
(0, "ALTER TABLE", ""),
(0, "postgres", ""),
(0, "ALTER TABLE", "")]
dbms = PGConfig(args, properties, "local")
result = dbms._change_tables_owner()
self.assertTrue(result)
self.assertEquals(run_os_command_mock.call_count, 7)
@patch("os.path.isdir", new = MagicMock(return_value=True))
@patch("os.access", new = MagicMock(return_value=True))
@patch.object(ServerClassPath, "get_full_ambari_classpath_escaped_for_shell", new = MagicMock(return_value = 'test' + os.pathsep + 'path12'))
@patch("ambari_server.serverUtils.is_server_runing")
@patch("ambari_commons.os_utils.run_os_command")
@patch("ambari_server.setupSecurity.generate_env")
@patch("ambari_server.setupSecurity.ensure_can_start_under_current_user")
@patch("ambari_server.serverConfiguration.read_ambari_user")
@patch("ambari_server.dbConfiguration.ensure_jdbc_driver_is_installed")
@patch("ambari_server.serverConfiguration.parse_properties_file")
@patch("ambari_server.serverConfiguration.get_ambari_properties")
@patch("ambari_server.serverConfiguration.get_java_exe_path")
@patch("os.path.isfile")
@patch("sys.exit")
@patch("ambari_server.userInput.get_YN_input")
@patch("ambari_server.hostUpdate.logger")
def test_update_host_names(self, logger_mock, getYNInput_mock, sysExitMock, isFileMock, getJavaExePathMock,
getAmbariPropertiesMock, parsePropertiesFileMock, ensureDriverInstalledMock, readAmbariUserMock,
ensureCanStartUnderCurrentUserMock, generateEnvMock, runOSCommandMock, isServerRunningMock):
properties = Properties()
properties.process_pair("server.jdbc.database", "embedded")
getYNInput_mock.return_value = False
isFileMock.return_value = True
getJavaExePathMock.return_value = "/path/to/java"
getAmbariPropertiesMock.return_value = properties
readAmbariUserMock.return_value = "test_user"
ensureCanStartUnderCurrentUserMock.return_value = "test_user"
generateEnvMock.return_value = {}
runOSCommandMock.return_value = (0, "", "")
isServerRunningMock.return_value = (False, 1)
update_host_names(["update-host-names", "/testFileWithChanges"], properties)
self.assertEquals(len(sysExitMock.call_args_list), 3)
self.assertTrue(isFileMock.called)
self.assertTrue(getJavaExePathMock.called)
self.assertTrue(readAmbariUserMock.called)
self.assertTrue(ensureCanStartUnderCurrentUserMock.called)
self.assertTrue(generateEnvMock.called)
self.assertEquals(runOSCommandMock.call_args[0][0], '/path/to/java -cp test:path12 '
'org.apache.ambari.server.update.HostUpdateHelper /testFileWithChanges > '
'/var/log/ambari-server/ambari-server.out 2>&1')
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(_ambari_server_, "is_server_runing")
@patch("optparse.OptionParser")
@patch.object(_ambari_server_, "logger")
@patch("ambari_server.serverConfiguration.get_ambari_properties")
@patch.object(_ambari_server_, "setup_logging")
@patch.object(_ambari_server_, "init_logging")
def test_main_test_status_running(self, init_logging_mock, setup_logging_mock, get_ambari_properties_mock,
logger_mock, optionParserMock, is_server_runing_method):
opm = optionParserMock.return_value
options = self._create_empty_options_mock()
del options.exit_message
args = ["status"]
opm.parse_args.return_value = (options, args)
is_server_runing_method.return_value = (True, 100)
options.dbms = None
options.sid_or_sname = "sid"
try:
_ambari_server_.mainBody()
except SystemExit as e:
self.assertTrue(e.code == 0)
self.assertTrue(is_server_runing_method.called)
pass
@not_for_platform(PLATFORM_WINDOWS)
@patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
@patch.object(_ambari_server_, "is_server_runing")
@patch("optparse.OptionParser")
@patch.object(_ambari_server_, "logger")
@patch("ambari_server.serverConfiguration.get_ambari_properties")
@patch.object(_ambari_server_, "setup_logging")
@patch.object(_ambari_server_, "init_logging")
def test_main_test_status_not_running(self, init_logging_mock, setup_logging_mock, get_ambari_properties_mock,
logger_mock, optionParserMock, is_server_runing_method):
opm = optionParserMock.return_value
options = self._create_empty_options_mock()
del options.exit_message
args = ["status"]
opm.parse_args.return_value = (options, args)
is_server_runing_method.return_value = (False, None)
options.dbms = None
options.sid_or_sname = "sid"
try:
_ambari_server_.mainBody()
except SystemExit as e:
self.assertTrue(e.code == 3)
self.assertTrue(is_server_runing_method.called)
pass
def test_web_server_startup_timeout(self):
from ambari_server.serverConfiguration import get_web_server_startup_timeout
from ambari_server.serverConfiguration import WEB_SERVER_STARTUP_TIMEOUT
properties = Properties()
timeout = get_web_server_startup_timeout(properties)
self.assertEquals(50, timeout)
properties.process_pair(WEB_SERVER_STARTUP_TIMEOUT, "")
timeout = get_web_server_startup_timeout(properties)
self.assertEquals(50, timeout)
properties.process_pair(WEB_SERVER_STARTUP_TIMEOUT, "120")
timeout = get_web_server_startup_timeout(properties)
self.assertEquals(120, timeout)
properties.process_pair(WEB_SERVER_STARTUP_TIMEOUT, "120 ")
timeout = get_web_server_startup_timeout(properties)
self.assertEquals(120, timeout)
def _create_empty_options_mock(self):
options = MagicMock()
options.ldap_url = None
options.ldap_secondary_url = None
options.ldap_ssl = None
options.ldap_user_class = None
options.ldap_user_attr = None
options.ldap_group_class = None
options.ldap_group_attr = None
options.ldap_member_attr = None
options.ldap_dn = None
options.ldap_base_dn = None
options.ldap_manager_dn = None
options.ldap_manager_password = None
options.ldap_save_settings = None
options.ldap_referral = None
options.ldap_bind_anonym = None
options.ldap_sync_admin_name = None
options.ldap_sync_username_collisions_behavior = None
options.ldap_sync_admin_password = None
options.custom_trust_store = None
options.trust_store_type = None
options.trust_store_path = None
options.trust_store_password = None
options.security_option = None
options.api_ssl = None
options.api_ssl_port = None
options.import_cert_path = None
options.import_cert_alias = None
options.pem_password = None
options.import_key_path = None
options.master_key = None
options.master_key_persist = None
options.jaas_principal = None
options.jaas_keytab = None
return options
|
apache-2.0
| 1,978,595,072,674,674,700 | 38.286011 | 256 | 0.669998 | false |
gena/qgis-earthengine-plugin
|
ee_plugin.py
|
1
|
5587
|
# -*- coding: utf-8 -*-
"""
Main plugin file.
"""
from __future__ import absolute_import
import configparser
import requests
import webbrowser
from builtins import object
import os.path
import json
from qgis.PyQt.QtCore import QSettings, QTranslator, qVersion, QCoreApplication
from qgis.PyQt.QtWidgets import QAction
from qgis.PyQt.QtGui import QIcon
from qgis.core import QgsProject
from ee_plugin import provider
from ee_plugin.icons import resources
# read the plugin version from metadata
cfg = configparser.ConfigParser()
cfg.read(os.path.join(os.path.dirname(__file__), 'metadata.txt'))
VERSION = cfg.get('general', 'version')
version_checked = False
class GoogleEarthEnginePlugin(object):
"""QGIS Plugin Implementation."""
def __init__(self, iface):
"""Constructor.
:param iface: An interface instance that will be passed to this class
which provides the hook by which you can manipulate the QGIS
application at run time.
:type iface: QgsInterface
"""
# Save reference to the QGIS interface
self.iface = iface
# initialize plugin directory
self.plugin_dir = os.path.dirname(__file__)
# initialize locale
locale = QSettings().value('locale/userLocale')[0:2]
locale_path = os.path.join(
self.plugin_dir,
'i18n',
'GoogleEarthEnginePlugin_{}.qm'.format(locale))
if os.path.exists(locale_path):
self.translator = QTranslator()
self.translator.load(locale_path)
if qVersion() > '4.3.3':
QCoreApplication.installTranslator(self.translator)
self.menu_name_plugin = self.tr("Google Earth Engine Plugin")
# Create and register the EE data providers
provider.register_data_provider()
# noinspection PyMethodMayBeStatic
def tr(self, message):
"""Get the translation for a string using Qt translation API.
We implement this ourselves since we do not inherit QObject.
:param message: String for translation.
:type message: str, QString
:returns: Translated version of message.
:rtype: QString
"""
# noinspection PyTypeChecker,PyArgumentList,PyCallByClass
return QCoreApplication.translate('GoogleEarthEngine', message)
def initGui(self):
### Main dockwidget menu
# Create action that will start plugin configuration
icon_path = ':/plugins/ee_plugin/icons/earth_engine.svg'
self.dockable_action = QAction(
QIcon(icon_path), "User Guide", self.iface.mainWindow())
# connect the action to the run method
self.dockable_action.triggered.connect(self.run)
# Add menu item
self.iface.addPluginToMenu(self.menu_name_plugin, self.dockable_action)
# Register signal to initialize EE layers on project load
self.iface.projectRead.connect(self.updateLayers)
def run(self):
# open user guide in external web browser
webbrowser.open_new(
"http://qgis-ee-plugin.appspot.com/user-guide")
def check_version(self):
global version_checked
if version_checked:
return
try:
latest_version = requests.get('https://qgis-ee-plugin.appspot.com/get_latest_version').text
if VERSION < latest_version:
self.iface.messageBar().pushMessage('Earth Engine plugin:',
'There is a more recent version of the ee_plugin available {0} and you have {1}, please upgrade!'.format(latest_version, VERSION), duration=15)
except:
print('Error occurred when checking for recent plugin version, skipping ...')
finally:
version_checked = True
def unload(self):
# Remove the plugin menu item and icon
self.iface.removePluginMenu(
self.menu_name_plugin, self.dockable_action)
def updateLayers(self):
import ee
from ee_plugin.utils import add_or_update_ee_layer
layers = QgsProject.instance().mapLayers().values()
for l in filter(lambda layer: layer.customProperty('ee-layer'), layers):
ee_object = l.customProperty('ee-object')
ee_object_vis = l.customProperty('ee-object-vis')
# check for backward-compatibility, older file formats (before 0.0.3) store ee-objects in ee-script property an no ee-object-vis is stored
# also, it seems that JSON representation of persistent object has been changed, making it difficult to read older EE JSON
if ee_object is None:
print('\nWARNING:\n Map layer saved with older version of EE plugin is detected, backward-compatibility for versions before 0.0.3 is not supported due to changes in EE library, please re-create EE layer by re-running the Python script\n')
return
ee_object = ee.deserializer.fromJSON(ee_object)
if ee_object_vis is not None:
ee_object_vis = json.loads(ee_object_vis)
# update loaded EE layer
# get existing values for name, visibility, and opacity
# TODO: this should not be needed, refactor add_or_update_ee_layer to update_ee_layer
name = l.name()
shown = QgsProject.instance().layerTreeRoot().findLayer(l.id()).itemVisibilityChecked()
opacity = l.renderer().opacity()
add_or_update_ee_layer(ee_object, ee_object_vis, name, shown, opacity)
|
mit
| 1,137,429,394,335,913,300 | 35.756579 | 254 | 0.646501 | false |
1905410/Misago
|
misago/users/management/commands/synchronizeusers.py
|
1
|
1305
|
import time
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand
from misago.core.management.progressbar import show_progress
from misago.core.pgutils import batch_update
class Command(BaseCommand):
help = 'Synchronizes users'
def handle(self, *args, **options):
users_to_sync = get_user_model().objects.count()
if not users_to_sync:
self.stdout.write('\n\nNo users were found')
else:
self.sync_users(users_to_sync)
def sync_users(self, users_to_sync):
message = 'Synchronizing %s users...\n'
self.stdout.write(message % users_to_sync)
message = '\n\nSynchronized %s users'
synchronized_count = 0
show_progress(self, synchronized_count, users_to_sync)
start_time = time.time()
for user in batch_update(get_user_model().objects.all()):
user.threads = user.thread_set.count()
user.posts = user.post_set.count()
user.followers = user.followed_by.count()
user.following = user.follows.count()
user.save()
synchronized_count += 1
show_progress(self, synchronized_count, users_to_sync, start_time)
self.stdout.write(message % synchronized_count)
|
gpl-2.0
| 5,006,594,123,180,027,000 | 31.625 | 78 | 0.638314 | false |
ybalgir/Quantop
|
Lec7.py
|
1
|
1822
|
import numpy as np
import pandas as pd
from statsmodels import regression
import statsmodels.api as sm
import matplotlib.pyplot as plt
import math
import pandas_datareader.data as web
from datetime import datetime
def Starter_Lec7():
start = datetime(2014, 1, 1)
end = datetime(2015, 1, 1)
asset = web.DataReader("TSLA","yahoo",start,end)
asset_closingPrice = asset['Close']
benchmark = web.DataReader("SPY","yahoo",start,end)
benchmark_closingPrice = benchmark['Close']
r_a = asset_closingPrice.pct_change()[1:]
r_b = benchmark_closingPrice.pct_change()[1:]
modelSummary = linreg(r_a,r_b)
print("{0} {1} \n\n".format(modelSummary,type(modelSummary)))
def linreg(X,Y):
#running linear regression
X = sm.add_constant(X)
model = regression.linear_model.OLS(Y,X).fit()
a = model.params[0]
b = model.params[1]
X = pd.DataFrame(X, columns=['Close']) #Y_CMT Neat trick to extract columns from a pandas dataframe
# Return summary of the regression and plot results
X2 = np.linspace(float(X.min()), float(X.max()), 100)
Y_hat = X2 * b + a
plt.scatter(X, Y, alpha=0.3) # Plot the raw data
plt.plot(X2, Y_hat, 'r', alpha=0.9) # Add the regression line, colored in red
plt.xlabel('X Value')
plt.ylabel('Y Value')
plt.show()
return model.summary()
def TestPlotting():
N = 8
y = np.zeros(N)
x1 = np.linspace(0, 10, N, endpoint=True)
x2 = np.linspace(0, 10, N, endpoint=False)
plt.plot(x1, y, 'o')
plt.plot(x2, y + 0.5, 'o')
plt.ylim([-0.5, 1])
plt.show()
def NumpyMatrix():
array1 = np.matrix([[1,2,3],[4,5,6],[7,8,9]])
print("{0} {1} \n\n".format(array1[:,2],type(array1)))
array1 = array1[:,2]
print("{0} {1} \n\n".format(array1,type(array1)))
|
gpl-3.0
| 5,670,257,389,628,504,000 | 25.405797 | 107 | 0.623491 | false |
othelarian/arcns
|
mainscene/mainscene.py
|
1
|
75804
|
# -*- coding: utf-8 -*-
from direct.showbase.DirectObject import DirectObject
from direct.fsm.FSM import FSM
from direct.gui.OnscreenText import OnscreenText
from direct.gui.DirectGui import DirectFrame, DGG
from direct.stdpy.file import *
from direct.actor.Actor import Actor
from panda3d.core import Point3, TextNode, CardMaker, BitMask32, Multifile, VirtualFileSystem, Filename, Patchfile
from direct.interval.IntervalGlobal import Sequence, Parallel
import Tkinter, tkFileDialog, json, sys, lang, os, urllib, shutil, time, scenebuilder
class mainScene(FSM,DirectObject):
""" ****************************
Méthodes pour l'initialisation
**************************** """
def __init__(self,app):
FSM.__init__(self,"mainScene"); self.defaultTransitions = {"Init":["MainMenu"],"MainMenu":["SubMenu"],"SubMenu":["MainMenu"]}
camera.setPos(0,-62,12); camera.setHpr(0,-10,0); self.accept("escape",sys.exit,[0])
self.app = app; self.version = "v0.0"; self.nomove = False
if exists("arcns_config.json"):
self.app.main_config = json.loads("".join([line.rstrip().lstrip() for line in file("arcns_config.json","rb")]))
else:
self.app.main_config = {"fullscreen": [False], "lang_chx": 1,"music":True,"sounds":True,"music_vol":1,"sounds_vol":1}
try:
mcf = open("arcns_config.json","w"); mcf.write(json.dumps(self.app.main_config)); mcf.close()
except Exception,e: print e
if self.app.main_config["lang_chx"] == 0: self.app.speak = lang.fr.fr_lang
elif self.app.main_config["lang_chx"] == 1: self.app.speak = lang.en.en_lang
self.states = {"main_chx":0,"main_lst":["campaign","mission","credits","options","quit"],"camp_sel":0,"saves_lst":[]}; self.options = {}
for key in self.app.main_config:
if key == "fullscreen": self.options[key] = [self.app.main_config[key][0]]
else: self.options[key] = self.app.main_config[key]
if exists("arcns_saves"):
for fsav in os.listdir("arcns_saves"): self.states["saves_lst"].append( json.loads("".join([line.rstrip().lstrip() for line in file("arcns_saves/"+fsav,"rb")])))
self.actscene = scenebuilder.mainscene_builder
self.dic_statics, self.dic_dynamics, self.dic_lights = self.app.arcstools.parse_scene(self.actscene)
self.dic_sounds = {}; self.loadSfx(); guibuild = self.structureGUI(); self.dic_gui = self.app.arcstools.parse_gui(guibuild)
self.dic_arrows= {}; self.loadmodels(); self.dic_anims = {}; self.activeAnim()
self.vers_txt = OnscreenText(text=self.version,font=self.app.arcFont,pos=(1.15,-0.95),fg=(0,0,0,1),bg=(1,1,1,0.8))
self.dic_musics = {}; self.loadMusics(); self.dic_musics["mainscene_music"].setLoop(True)
if self.app.main_config["music"]: self.dic_musics["mainscene_music"].play()
self.mouse_task = taskMgr.add(self.mouseTask,"mainscene mouse task")
def loadSfx(self):
self.dic_sounds["main_menu_sel"] = base.loader.loadSfx("mainscene/sounds/son_main_menu_sel.wav")
self.dic_sounds["main_menu_switch"] = base.loader.loadSfx("mainscene/sounds/son_main_menu_main.wav")
self.dic_sounds["main_menu_escape"] = base.loader.loadSfx("mainscene/sounds/son_main_menu_aux.wav")
for key in self.dic_sounds: self.dic_sounds[key].setVolume(self.app.main_config["sounds_vol"])
def loadMusics(self):
self.dic_musics["mainscene_music"] = base.loader.loadMusic("mainscene/musics/main_music.wav")
self.dic_musics["mainscene_music"].setVolume(self.app.main_config["music_vol"])
def structureGUI(self):
opt_lang_txt = ""
if self.options["lang_chx"] == 0: opt_lang_txt = "Français"
elif self.options["lang_chx"] == 1: opt_lang_txt = "English"
mainscene_gui = {
"main_menu":{
"frame":{
"hide":True,"parent":None,"elts":{
"campaign":{"type":"button","pos":(-0.15,0,-0.2),"cmd":self.actionMainMenu,"scale":0.12,"algn":TextNode.ALeft,"extra":[],"sound":None,"hide":False},
"mission":{"type":"button","pos":(-0.19,0,-0.34),"cmd":self.actionMainMenu,"scale":0.1,"algn":TextNode.ALeft,"extra":[],"sound":None,"hide":False},
"credits":{"type":"button","pos":(-0.26,0,-0.47),"cmd":self.actionMainMenu,"scale":0.09,"algn":TextNode.ALeft,"extra":[],"sound":None,"hide":False},
"options":{"type":"button","pos":(-0.35,0,-0.58),"cmd":self.actionMainMenu,"scale":0.07,"algn":TextNode.ALeft,"extra":[],"sound":None,"hide":False},
"quit":{"type":"button","pos":(-0.41,0,-0.66),"cmd":self.actionMainMenu,"scale":0.05,"algn":TextNode.ALeft,"extra":[],"sound":None,"hide":False}
}
}
},
"camp_menu":{
#
#"frame":{
# "hide":True,"parent":None,"elts":{
# #"stitre":{"type":"label"}
# }
#}
#
},
"mission_menu":{
"frame":{
"hide":True,"parent":None,"elts":{
"stitre":{"type":"label","pos":(-0.8,0,0.7),"scale":0.15,"algn":TextNode.ALeft,"hide":False},
#
# TODO : élément pour le sous-menu "Mission à construire ici
#
# DEBUG : label temporaire "W.I.P." pour le sous menu "Missions"
"wip":{"type":"label","pos":(0,0,0),"scale":0.2,"algn":TextNode.ALeft,"hide":False,"text":"W.I.P."}
###
#
}
}
#
# TODO : s'il y a d'autres frames à générer pour le sous-menu "Missions", elles seront ici
#
},
"credits_menu":{
"frame":{
"hide":True,"parent":None,"elts":{
"stitre":{"type":"label","pos":(-0.8,0,0.7),"scale":0.14,"algn":TextNode.ACenter,"hide":False},
"graph_lab":{"type":"label","pos":(-0.5,0,0.4),"scale":0.1,"algn":TextNode.ACenter,"hide":False},
"graph_name":{"type":"label","pos":(-0.5,0,0.3),"scale":0.08,"algn":TextNode.ACenter,"hide":False},
"dev_lab":{"type":"label","pos":(0.5,0,0.4),"scale":0.1,"algn":TextNode.ACenter,"hide":False},
"dev_name":{"type":"label","pos":(0.5,0,0.3),"scale":0.08,"algn":TextNode.ACenter,"hide":False},
"trad_lab":{"type":"label","pos":(-0.5,0,-0.1),"scale":0.1,"algn":TextNode.ACenter,"hide":False},
"trad_name":{"type":"label","pos":(-0.5,0,-0.2),"scale":0.08,"algn":TextNode.ACenter,"hide":False},
"music_lab":{"type":"label","pos":(0.5,0,-0.1),"scale":0.1,"algn":TextNode.ACenter,"hide":False},
"music_name":{"type":"label","pos":(0.5,0,-0.2),"scale":0.08,"algn":TextNode.ACenter,"hide":False}
}
}
},
"option_menu":{
"frame":{
"hide":True,"parent":None,"elts":{
"stitre":{"type":"label","pos":(-0.8,0,0.7),"scale":0.15,"algn":TextNode.ALeft,"hide":False},
"lst_radio":{"type":"radio","scale":0.08,"algn":TextNode.ALeft,
"elts":[
["windowed",self.options["fullscreen"],[False],self.actionSubMenu,["","change_opt","win"],(-1,0,0.4)],
["fullscreen",self.options["fullscreen"],[True],self.actionSubMenu,["","change_opt","win"],(-1,0,0.3)]
]
},
"lang_chx":{"type":"label","pos":(-1.05,0,0.15),"scale":0.08,"algn":TextNode.ALeft,"hide":False},
"opt_optmenu":{"type":"optmenu","pos":(-0.45,0,0.15),"items":["Français","English"],"init":self.options["lang_chx"],"cmd":self.actionSubMenu,
"scale":0.08,"change":1,"algn":TextNode.ALeft,"extra":["change_opt","lang"],"hide":False,"text":opt_lang_txt},
"music_vol":{"type":"label","pos":(-1.05,0,-0.2),"scale":0.08,"algn":TextNode.ALeft,"hide":False},
"music_mute":{"type":"checkbox","pos":(0.3,0,-0.2),"cmd":self.actionSubMenu,"val":(1 if self.options["music"] else 0),"scale":0.08,
"box":"left","algn":TextNode.ALeft,"extra":["change_opt","music_mute"],"hide":False},
"music_slider":{"type":"slider","pos":(-0.3,0,-0.3),"scale":1,"inter":(0,1),"init":self.options["music_vol"],"pas":0.1,
"cmd":self.actionSubMenu,"extra":[None,"change_opt","music_vol"],"orient":DGG.HORIZONTAL,"hide":False},
"sound_vol":{"type":"label","pos":(-1.05,0,-0.5),"scale":0.08,"algn":TextNode.ALeft,"hide":False},
"sound_mute":{"type":"checkbox","pos":(0.3,0,-0.5),"cmd":self.actionSubMenu,"val":(1 if self.options["sounds"] else 0),"scale":0.08,
"box":"left","algn":TextNode.ALeft,"extra":["change_opt","sound_mute"],"hide":False},
"sound_slider":{"type":"slider","pos":(-0.3,0,-0.6),"scale":1,"inter":(0,1),"init":self.options["sounds_vol"],"pas":0.1,
"cmd":self.actionSubMenu,"extra":[None,"change_opt","sounds_vol"],"orient":DGG.HORIZONTAL,"hide":False},
"maj_verify":{"type":"button","pos":(0.5,0,0.4),"cmd":self.checkMajStarter,"scale":0.08,"algn":TextNode.ALeft,"extra":[],"sound":None,"hide":False,"disabled":False},
"btn_valid":{"type":"button","pos":(-0.9,0,-0.8),"cmd":self.actionSubMenu,"scale":0.08,"algn":TextNode.ALeft,"extra":["valid_opt"],"sound":None,"hide":False},
"btn_reset":{"type":"button","pos":(-0.5,0,-0.8),"cmd":self.actionSubMenu,"scale":0.08,"algn":TextNode.ALeft,"extra":["cancel_opt"],"sound":None,"hide":False}
}
},
"maj_frame":{
"hide":True,"parent":self.app.voile,"elts":{
"maj_stitre":{"type":"label","pos":(0,0,0.4),"scale":0.15,"algn":TextNode.ACenter,"hide":False},
"maj_progress":{"type":"waitbar","pos":(0,0,0),"scale":0.8,"range":4,"val":0,"hide":False},
"maj_err0":{"type":"label","pos":(0,0,0.1),"scale":0.1,"algn":TextNode.ACenter,"hide":True},
"maj_retry":{"type":"button","pos":(-0.3,0,-0.1),"cmd":self.checkMajStarter,"scale":0.08,"algn":TextNode.ACenter,"extra":[],"sound":None,"hide":True}#,
#
#"maj_cancel":{"type":"button","pos":(
#
#
}
}
},
"aux_menu":{
"frame":{
"hide":True,"parent":None,"elts":{
"return_btn":{"type":"button","pos":(0,0,-0.8),"cmd":self.actionSubMenu,"scale":0.08,"algn":TextNode.ALeft,"extra":["quit"],"sound":None,"hide":False}
}
}
}
}
#
"""
#formulaire de mise à jour
tmp_gui = self.app.arcButton(self.app.speak["option_menu"]["maj_cancel"],(0.3,0,-0.1),self.cancelMaj,txtalgn=TextNode.ACenter)
tmp_gui.hide(); tmp_gui.reparentTo(tmp_frame2); self.dic_gui["option_menu"]["maj_cancel"] = tmp_gui
tmp_gui = self.app.arcLabel(self.app.speak["option_menu"]["maj_err1"],(0,0,0.1),0.1,TextNode.ACenter)
tmp_gui.hide(); tmp_gui.reparentTo(tmp_frame2); self.dic_gui["option_menu"]["maj_err1"] = tmp_gui
tmp_gui = self.app.arcLabel(self.app.speak["option_menu"]["maj_nomaj"],(0,0,0.1),0.1,TextNode.ACenter)
tmp_gui.hide(); tmp_gui.reparentTo(tmp_frame2); self.dic_gui["option_menu"]["maj_nomaj"] = tmp_gui
tmp_gui = self.app.arcLabel(self.app.speak["option_menu"]["maj_update"],(0,0,0.1),0.1,TextNode.ACenter)
tmp_gui.hide(); tmp_gui.reparentTo(tmp_frame2); self.dic_gui["option_menu"]["maj_update"] = tmp_gui
tmp_gui = self.app.arcButton(self.app.speak["option_menu"]["maj_doit"],(-0.3,0,-0.1),self.doMajStarter,txtalgn=TextNode.ACenter)
tmp_gui.hide(); tmp_gui.reparentTo(tmp_frame2); self.dic_gui["option_menu"]["maj_doit"] = tmp_gui
tmp_gui = self.app.arcWaitBar((0,0,0),0.8,4,0,self.app.speak["option_menu"]["maj_upgrade"])
tmp_gui.hide(); tmp_gui.reparentTo(tmp_frame2); self.dic_gui["option_menu"]["maj_upgrade"] = tmp_gui
tmp_gui = self.app.arcLabel(self.app.speak["option_menu"]["maj_success"],(0,0,0),0.1,TextNode.ACenter)
tmp_gui.hide(); tmp_gui.reparentTo(tmp_frame2); self.dic_gui["option_menu"]["maj_success"] = tmp_gui
tmp_gui = self.app.arcButton(self.app.speak["option_menu"]["maj_quit"],(0,0,-0.4),self.endingMaj,0.11,TextNode.ACenter)
tmp_gui.hide(); tmp_gui.reparentTo(tmp_frame2); self.dic_gui["option_menu"]["maj_quit"] = tmp_gui
#camp_menu
tmp_frame = DirectFrame(); tmp_frame.hide(); self.dic_gui["camp_menu"]["frame"] = tmp_frame
tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["stitre"],(-0.8,0,0.7),0.15); tmp_gui.reparentTo(tmp_frame)
self.dic_gui["camp_menu"]["stitre"] = tmp_gui
tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["sel_lab"],(-0.9,0,0.4)); tmp_gui.reparentTo(tmp_frame)
self.dic_gui["camp_menu"]["sel_lab"] = tmp_gui
tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["new_unit"],(0.1,0,0.25)); tmp_gui.reparentTo(tmp_frame)
self.dic_gui["camp_menu"]["new_unit"] = tmp_gui
tmp_gui = self.app.arcEntry((0.2,0,0.1)); tmp_gui.reparentTo(tmp_frame); self.dic_gui["camp_menu"]["entry_unit"] = tmp_gui
tmp_gui = self.app.arcButton(self.app.speak["camp_menu"]["crea_unit"],(0.9,0,-0.05),self.actionSubMenu,
txtalgn=TextNode.ACenter,extraArgs=["launch_game","crea_game"])
tmp_gui.reparentTo(tmp_frame); tmp_gui["state"] = DGG.DISABLED; self.dic_gui["camp_menu"]["crea_unit"] = tmp_gui
tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["used_name"],(0.15,0,-0.2)); tmp_gui.reparentTo(tmp_frame)
self.dic_gui["camp_menu"]["used_name"] = tmp_gui; tmp_gui.hide()
it = 1; lst_pos = [(0.4,0,-0.3),(0.6,0,-0.45),(0.8,0,-0.6)]; lst_scale = [0.07,0.06,0.04]
for elt in self.states["saves_lst"]:
pos = None; scale = None
if it > 3:
pos = lst_pos[2]; scale = lst_scale[2]
else:
pos = lst_pos[it-1]; scale = lst_scale[it-1]
tmp_gui = self.app.arcLabel(elt["name"],pos,scale); tmp_gui.reparentTo(tmp_frame)
self.dic_gui["camp_menu"]["sav_name_"+str(it)] = tmp_gui
if it > 3: tmp_gui.hide()
timed = ""
if elt["time"] < 60: timed = str(elt["time"])+"s"
elif elt["time"] < 3600: timed = str((elt["time"] - elt["time"]%60) / 60)+":"+("0" if elt["time"]%60 < 10 else "")+str(elt["time"]%60)
elif elt["time"] < 86400:
timed = str((elt["time"] - elt["time"]%3600) /3600)+":"+("0" if (elt["time"]%3600) < 600 else "")+str((elt["time"]%3600 - elt["time"]%60)/60)
timed += ":"+("0" if elt["time"]%60 < 10 else "")+str(elt["time"]%60)
else:
days = ("days" if self.app.main_config["lang_chx"] == 1 else "jours")
timed = str((elt["time"] - elt["time"]%86400)/86400)+" "+days+" "+str((elt["time"]%86400 - elt["time"]%3600)/3600)+":"+("0" if (elt["time"]%3600) < 600 else "")
timed += str((elt["time"]%3600 - elt["time"]%60)/60)+":"+("0" if elt["time"]%60 < 10 else "")+str(elt["time"]%60)
tmp_gui = self.app.arcLabel(timed,(0.9,0,0.1),txtalgn=TextNode.ARight); tmp_gui.reparentTo(tmp_frame); tmp_gui.hide()
self.dic_gui["camp_menu"]["sav_time_"+str(it)] = tmp_gui
it += 1
tmp_gui = self.app.arcButton(self.app.speak["camp_menu"]["save_import"],(-0.8,0,0.2),self.actionSubMenu,extraArgs=["import_game"])
tmp_gui.reparentTo(tmp_frame); tmp_gui["state"] = DGG.DISABLED; self.dic_gui["camp_menu"]["save_import"] = tmp_gui
tmp_gui = self.app.arcButton(self.app.speak["camp_menu"]["save_export"],(0.9,0,-0.05),self.actionSubMenu,
extraArgs=["export_game"],txtalgn=TextNode.ACenter);
tmp_gui.hide(); tmp_gui.reparentTo(tmp_frame); tmp_gui["state"] = DGG.DISABLED; self.dic_gui["camp_menu"]["save_export"] = tmp_gui
tmp_gui = self.app.arcButton(self.app.speak["camp_menu"]["supp_unit"],(0.3,0,-0.05),self.actionSubMenu,
extraArgs=["supp_game"],txtalgn=TextNode.ACenter)
tmp_gui.reparentTo(tmp_frame); tmp_gui["state"] = DGG.DISABLED;
self.dic_gui["camp_menu"]["supp_unit"] = tmp_gui; tmp_gui.hide()
tmp_gui = self.app.arcButton(self.app.speak["camp_menu"]["launch"],(-0.3,0,0.2),self.actionSubMenu,extraArgs=["launch_game"])
tmp_gui.reparentTo(tmp_frame); tmp_gui["state"] = DGG.DISABLED; self.dic_gui["camp_menu"]["launch"] = tmp_gui
#frame d'export
tmp_frame = DirectFrame(); self.dic_gui["camp_menu"]["export_frame"] = tmp_frame
tmp_frame.reparentTo(self.app.voile); tmp_frame.hide()
tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["export_titre"],(0,0,0.5),0.15,TextNode.ACenter)
tmp_gui.reparentTo(tmp_frame); self.dic_gui["camp_menu"]["export_titre"] = tmp_gui
tmp_gui = self.app.arcLabel("",(0,0,0.3),0.15,TextNode.ACenter); tmp_gui.reparentTo(tmp_frame);
self.dic_gui["camp_menu"]["export_name"] = tmp_gui
tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["export_progress"],(0,0,0),0.1,TextNode.ACenter)
tmp_gui.reparentTo(tmp_frame); self.dic_gui["camp_menu"]["export_progress"] = tmp_gui
tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["export_dupli"],(0,0,0),0.1,TextNode.ACenter)
tmp_gui.reparentTo(tmp_frame); tmp_gui.hide(); self.dic_gui["camp_menu"]["export_dupli"] = tmp_gui
tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["export_nowrite"],(0,0,0),0.1,TextNode.ACenter)
tmp_gui.reparentTo(tmp_frame); tmp_gui.hide(); self.dic_gui["camp_menu"]["export_nowrite"] = tmp_gui
tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["export_success"],(0,0,0),0.1,TextNode.ACenter)
tmp_gui.reparentTo(tmp_frame); tmp_gui.hide(); self.dic_gui["camp_menu"]["export_success"] = tmp_gui
tmp_gui = self.app.arcButton(self.app.speak["camp_menu"]["export_return"],(0,0,-.4),self.campaignVoile,txtalgn=TextNode.ACenter)
tmp_gui.reparentTo(tmp_frame); tmp_gui["state"] = DGG.DISABLED; tmp_gui.hide(); self.dic_gui["camp_menu"]["export_return"] = tmp_gui
#frame d'import
tmp_frame = DirectFrame(); self.dic_gui["camp_menu"]["import_frame"] = tmp_frame
tmp_frame.reparentTo(self.app.voile); tmp_frame.hide()
tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["import_titre"],(0,0,0.5),0.15,TextNode.ACenter)
tmp_gui.reparentTo(tmp_frame); self.dic_gui["camp_menu"]["import_titre"] = tmp_gui
tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["import_progress"],(0,0,0),0.1,TextNode.ACenter)
tmp_gui.reparentTo(tmp_frame); tmp_gui.hide(); self.dic_gui["camp_menu"]["import_progress"] = tmp_gui
tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["import_fail"],(0,0,0),0.1,TextNode.ACenter)
tmp_gui.reparentTo(tmp_frame); tmp_gui.hide(); self.dic_gui["camp_menu"]["import_fail"] = tmp_gui
tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["import_dupli"],(0,0,0),0.1,TextNode.ACenter)
tmp_gui.reparentTo(tmp_frame); tmp_gui.hide(); self.dic_gui["camp_menu"]["import_dupli"] = tmp_gui
tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["import_success"],(0,0,0),0.1,TextNode.ACenter)
tmp_gui.reparentTo(tmp_frame); tmp_gui.hide(); self.dic_gui["camp_menu"]["import_success"] = tmp_gui
tmp_gui = self.app.arcButton(self.app.speak["camp_menu"]["import_return"],(0,0,-0.4),self.campaignVoile,txtalgn=TextNode.ACenter)
tmp_gui.reparentTo(tmp_frame); tmp_gui["state"] = DGG.DISABLED; tmp_gui.hide(); self.dic_gui["camp_menu"]["import_return"] = tmp_gui
#frame de suppression
tmp_frame = DirectFrame(); self.dic_gui["camp_menu"]["supp_frame"] = tmp_frame
tmp_frame.reparentTo(self.app.voile); tmp_frame.hide()
tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["supp_titre"],(0,0,0.5),0.15,TextNode.ACenter)
tmp_gui.reparentTo(tmp_frame); self.dic_gui["camp_menu"]["supp_titre"] = tmp_gui
tmp_gui = self.app.arcLabel("test_unit",(0,0,0.3),0.15,TextNode.ACenter); tmp_gui.reparentTo(tmp_frame)
self.dic_gui["camp_menu"]["supp_name"] = tmp_gui
tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["supp_question"],(0,0,0),0.1,TextNode.ACenter)
tmp_gui.reparentTo(tmp_frame); self.dic_gui["camp_menu"]["supp_question"] = tmp_gui
tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["supp_progress"],(0,0,0),0.1,TextNode.ACenter)
tmp_gui.reparentTo(tmp_frame); tmp_gui.hide(); self.dic_gui["camp_menu"]["supp_progress"] = tmp_gui
tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["supp_finish"],(0,0,0),0.1,TextNode.ACenter)
tmp_gui.reparentTo(tmp_frame); tmp_gui.hide(); self.dic_gui["camp_menu"]["supp_finish"] = tmp_gui
tmp_gui = self.app.arcButton(self.app.speak["camp_menu"]["supp_cancel"],(-0.4,0,-0.4),self.campaignVoile,txtalgn=TextNode.ACenter)
tmp_gui.reparentTo(tmp_frame); tmp_gui["state"] = DGG.DISABLED; self.dic_gui["camp_menu"]["supp_cancel"] = tmp_gui
tmp_gui = self.app.arcButton(self.app.speak["camp_menu"]["supp_valid"],(0.4,0,-0.4),self.suppUnit,txtalgn=TextNode.ACenter)
tmp_gui.reparentTo(tmp_frame); tmp_gui["state"] = DGG.DISABLED; self.dic_gui["camp_menu"]["supp_valid"] = tmp_gui
tmp_gui = self.app.arcButton(self.app.speak["camp_menu"]["supp_return"],(0,0,-0.4),self.campaignVoile,txtalgn=TextNode.ACenter)
tmp_gui.reparentTo(tmp_frame); tmp_gui.hide(); tmp_gui["state"] = DGG.DISABLED; self.dic_gui["camp_menu"]["supp_return"] = tmp_gui
"""
#
return mainscene_gui
def loadmodels(self):
#arrows & cards
arr_up = render.attachNewNode("main arrow up"); arr_up.setHpr(0,90,0); arr_up.setPos(6.2,1.5,7.3); arr_up.hide()
self.app.arrow_mod.instanceTo(arr_up); arr_up.reparentTo(render)
arr_up_crd = render.attachNewNode(self.app.card_arrow.generate()); arr_up_crd.node().setIntoCollideMask(BitMask32.bit(1)); arr_up_crd.hide()
arr_up_crd.node().setTag("arrow","mainup"); arr_up_crd.reparentTo(self.app.pickly_node); arr_up_crd.setPos(6.2,1.7,7)
self.dic_arrows["arrow_up"] = {"node":arr_up,"card":arr_up_crd,"status":0,"posn":[6.2,1.5,7.3],"posh":[6.2,1.7,7.5]}
arr_dn = render.attachNewNode("main arrow down"); arr_dn.setHpr(180,-90,0); arr_dn.setPos(6.2,1.5,5); arr_dn.hide()
self.app.arrow_mod.instanceTo(arr_dn); arr_dn.reparentTo(render)
arr_dn_crd = render.attachNewNode(self.app.card_arrow.generate()); arr_dn_crd.node().setIntoCollideMask(BitMask32.bit(1)); arr_dn_crd.hide()
arr_dn_crd.node().setTag("arrow","maindn"); arr_dn_crd.reparentTo(self.app.pickly_node); arr_dn_crd.setPos(6.2,1.7,5.2)
self.dic_arrows["arrow_dn"] = {"node":arr_dn,"card":arr_dn_crd,"status":0,"posn":[6.2,1.5,5],"posh":[6.2,1.7,4.8]}
arr_camp_up = render.attachNewNode("sub camp arrow up"); arr_camp_up.setHpr(0,90,-90); arr_camp_up.setPos(12,-2.1,7.5)
self.app.arrow_mod.instanceTo(arr_camp_up); arr_camp_up.reparentTo(render); arr_camp_up.hide()
arr_camp_up_crd = render.attachNewNode(self.app.card_arrow.generate()); arr_camp_up_crd.node().setIntoCollideMask(BitMask32.bit(1))
arr_camp_up_crd.node().setTag("arrow","campup"); arr_camp_up_crd.reparentTo(self.app.pickly_node)
arr_camp_up_crd.setPos(12.2,-2.1,7.3); arr_camp_up_crd.setHpr(-90,0,0); arr_camp_up_crd.hide()
self.dic_arrows["arrow_camp_up"] = {"node":arr_camp_up,"card":arr_camp_up_crd,"status":0,"posn":[12,-2.1,7.5],"posh":[12,-2.1,7.7]}
arr_camp_dn = render.attachNewNode("sub camp arrow down"); arr_camp_dn.setHpr(0,-90,-90); arr_camp_dn.setPos(12,-2.2,1.5)
self.app.arrow_mod.instanceTo(arr_camp_dn); arr_camp_dn.reparentTo(render); arr_camp_dn.hide()
arr_camp_dn_crd = render.attachNewNode(self.app.card_arrow.generate()); arr_camp_dn_crd.node().setIntoCollideMask(BitMask32.bit(1))
arr_camp_dn_crd.node().setTag("arrow","campdn"); arr_camp_dn_crd.reparentTo(self.app.pickly_node)
arr_camp_dn_crd.setPos(12.2,-2.2,1.6); arr_camp_dn_crd.setHpr(-90,0,0); arr_camp_dn_crd.hide()
self.dic_arrows["arrow_camp_dn"] = {"node":arr_camp_dn,"card":arr_camp_dn_crd,"status":0,"posn":[12,-2.2,1.5],"posh":[12,-2.2,1.3]}
#
# TODO : flèches pour le sous-menu "missions" à construire ici
#
# NOTE : toutes la partie entre ### est temporaire, en attente que le module de parsing fonctionne
###
#gates and moving arcs
tmp_mod = Actor("mainscene/models/dynamics/main_gates.bam"); tmp_mod.reparentTo(render)
tmp_mod.setPos(0,-48.2,9.5); tmp_mod.setHpr(0,80,0); self.dic_dynamics["gates"] = tmp_mod
tmp_mod = Actor("mainscene/models/dynamics/main_m_menu.bam"); tmp_mod.reparentTo(render)
tmp_mod.pose("load",1); self.dic_dynamics["arcs_main_menu"] = tmp_mod
tmp_mod = Actor("mainscene/models/dynamics/main_a_menu.bam"); tmp_mod.reparentTo(render)
tmp_mod.pose("load",1); self.dic_dynamics["arcs_aux_menu"] = tmp_mod
#
#décors additionnels (temporaire)
#
tmp_mod = base.loader.loadModel("mainscene/models/statics/main_roofs.bam"); tmp_mod.reparentTo(render)
tmp_mod.setPos(0,0,0); self.dic_statics["roofs"] = tmp_mod
tmp_mod = base.loader.loadModel("mainscene/models/statics/main_arcs_show.bam"); tmp_mod.reparentTo(render)
tmp_mod.setPos(0,7.3,3); self.dic_statics["arcs_shower"] = tmp_mod
#
tmp_mod = base.loader.loadModel("mainscene/models/statics/main_title.bam"); tmp_mod.reparentTo(render)
self.dic_statics["arc_title"] = tmp_mod
###
#
def activeAnim(self):
tmp_anim = self.dic_statics["arcs_shower"].hprInterval(5,Point3(360,0,0),startHpr=Point3(0,0,0))
self.dic_anims["arcs_shower_pace"] = Sequence(tmp_anim,name="arcs_shower_pace")
self.dic_anims["cam_move_init"] = camera.posInterval(4,Point3(0,-25,12))
self.dic_anims["move_texts"] = None; self.dic_anims["move_saves"] = None
self.dic_anims["cam_move_maintosub"] = Parallel(name="main to sub")
self.dic_anims["cam_move_maintosub"].append(camera.posInterval(2,Point3(-4,-1,7)))
self.dic_anims["cam_move_maintosub"].append(camera.hprInterval(2,Point3(-90,-10,0)))
self.dic_anims["cam_move_subtomain"] = Parallel(name="sub to main")
self.dic_anims["cam_move_subtomain"].append(camera.posInterval(2,Point3(0,-25,12)))
self.dic_anims["cam_move_subtomain"].append(camera.hprInterval(2,Point3(0,-10,0)))
self.dic_anims["cam_move_launch"] = Parallel(name="launch the game")
self.dic_anims["cam_move_launch"].append(camera.posInterval(4,Point3(0,-62,12)))
self.dic_anims["cam_move_launch"].append(camera.hprInterval(2,Point3(0,-10,0)))
def mouseTask(self,task):
if base.mouseWatcherNode.hasMouse():
mpos = base.mouseWatcherNode.getMouse()
self.app.pickerRay.setFromLens(base.camNode,mpos.getX(),mpos.getY())
self.app.mouse_trav.traverse(self.app.pickly_node)
if self.app.mouse_hand.getNumEntries() > 0 and self.nomove:
tag = self.app.mouse_hand.getEntry(0).getIntoNode().getTag("arrow"); nod = None
if self.state == "MainMenu":
if tag == "mainup": nod = self.dic_arrows["arrow_up"]
elif tag == "maindn": nod = self.dic_arrows["arrow_dn"]
elif self.state == "SubMenu":
if self.states["main_chx"] == 0:
if tag == "campup": nod = self.dic_arrows["arrow_camp_up"]
elif tag == "campdn": nod = self.dic_arrows["arrow_camp_dn"]
elif self.states["main_chx"] == 1:
#
# TODO : capture des flèches opur le sous-menu "Missions"
#
pass
if not nod == None:
nod["status"] = 2; nod["node"].setPos(nod["posh"][0],nod["posh"][1],nod["posh"][2])
elif self.nomove:
for key in self.dic_arrows:
if self.dic_arrows[key]["status"] == 2:
self.dic_arrows[key]["status"] = 1
self.dic_arrows[key]["node"].setPos(self.dic_arrows[key]["posn"][0],self.dic_arrows[key]["posn"][1],self.dic_arrows[key]["posn"][2])
return task.cont
""" ****************************
Méthodes pour l'état "Init"
**************************** """
def enterInit(self):
self.dic_anims["arcs_shower_pace"].loop(); self.dic_dynamics["gates"].play("open_gates")
self.task_chx = 0; taskMgr.doMethodLater(6.5,self.initTasks,"cam movement")
taskMgr.doMethodLater(9,self.initTasks,"play arcs_main_menu load anim")
taskMgr.doMethodLater(11,self.initTasks,"request for the next state")
def exitInit(self):
pass
def initTasks(self,task):
if self.task_chx == 0: #moving camera
self.dic_anims["cam_move_init"].start(); self.task_chx += 1
elif self.task_chx == 1: #launch arcs_m_menu animation
self.dic_dynamics["arcs_main_menu"].play("load"); self.task_chx += 1
elif self.task_chx == 2: self.request("MainMenu")
return task.done
""" ****************************
Méthodes pour l'état "MainMenu"
**************************** """
def enterMainMenu(self):
self.app.change_cursor("main"); self.dic_gui["main_menu"]["frame"].show(); taskMgr.add(self.reactiveMainMenu,"reactive MainMenu")
self.dic_arrows["arrow_up"]["status"] = 1; self.dic_arrows["arrow_dn"]["status"] = 1
self.dic_gui["main_menu"][self.states["main_lst"][self.states["main_chx"]]]["state"] = DGG.NORMAL
def exitMainMenu(self):
pass
def actionMainMenu(self,value="valid"):
if not self.nomove: return
if value == "click":
if self.dic_arrows["arrow_up"]["status"] == 2: value = "up"
elif self.dic_arrows["arrow_dn"]["status"] == 2: value = "down"
else: return
self.dic_gui["main_menu"][self.states["main_lst"][self.states["main_chx"]]]["state"] = DGG.DISABLED
if value == "down" or value == "up":
sens = None
if value == "down" and self.states["main_chx"] > 0: sens = True
elif value == "up" and self.states["main_chx"] < 4: sens = False
if sens != None:
self.dic_arrows["arrow_up"]["status"] = 1
self.dic_arrows["arrow_up"]["node"].setPos(self.dic_arrows["arrow_up"]["posn"][0]
,self.dic_arrows["arrow_up"]["posn"][1],self.dic_arrows["arrow_up"]["posn"][2])
self.dic_arrows["arrow_dn"]["status"] = 1
self.dic_arrows["arrow_dn"]["node"].setPos(self.dic_arrows["arrow_dn"]["posn"][0]
,self.dic_arrows["arrow_dn"]["posn"][1],self.dic_arrows["arrow_dn"]["posn"][2])
if self.app.main_config["sounds"]: self.dic_sounds["main_menu_switch"].play()
self.states["main_chx"] += (-1 if sens else 1)
self.dic_arrows["arrow_up"]["node"].hide(); self.dic_arrows["arrow_dn"]["node"].hide()
pos_texts = [(-0.41,0,0.31),(-0.35,0,0.22),(-0.26,0,0.1),(-0.19,0,-0.04),(-0.15,0,-0.2),
(-0.19,0,-0.34),(-0.26,0,-0.47),(-0.35,0,-0.58),(-0.41,0,-0.66)]
scale_texts = [0.05,0.07,0.09,0.1,0.12,0.1,0.09,0.07,0.05]
try: self.dic_anims["move_texts"].finish()
except: pass
self.dic_anims["move_texts"] = None; self.dic_anims["move_texts"] = Parallel(name="MainMenu movement")
for it in range(5):
tmp_anim = self.dic_gui["main_menu"][self.states["main_lst"][it]].posInterval(0.4,Point3(pos_texts[4-self.states["main_chx"]+it]))
self.dic_anims["move_texts"].append(tmp_anim)
tmp_anim = self.dic_gui["main_menu"][self.states["main_lst"][it]].scaleInterval(0.4,scale_texts[4-self.states["main_chx"]+it])
self.dic_anims["move_texts"].append(tmp_anim)
self.dic_dynamics["arcs_main_menu"].play("state_"+str(self.states["main_chx"]+(1 if sens else -1))+"_"+str(self.states["main_chx"]))
self.nomove = False; self.dic_anims["move_texts"].start(); taskMgr.doMethodLater(0.4,self.reactiveMainMenu,"reactive MainMenu")
else:
self.dic_gui["main_menu"][self.states["main_lst"][self.states["main_chx"]]]["state"] = DGG.NORMAL
elif value == "valid":
if self.states["main_chx"] == 1 and len(self.states["saves_lst"]) == 0: return
if self.app.main_config["sounds"]: self.dic_sounds["main_menu_sel"].play()
self.ignoreAll(); self.accept("escape",sys.exit,[0]); self.nomove = False
if self.states["main_chx"] == 4: sys.exit(0)
else: self.launchSubMenu()
def reactiveMainMenu(self,task):
self.nomove = True
if self.states["main_chx"] < 4: self.dic_arrows["arrow_up"]["node"].show()
if self.states["main_chx"] > 0: self.dic_arrows["arrow_dn"]["node"].show()
if self.states["main_chx"] == 1:
if len(self.states["saves_lst"]) > 0: self.dic_gui["main_menu"]["mission"]["state"] = DGG.NORMAL
else: self.dic_gui["main_menu"][self.states["main_lst"][self.states["main_chx"]]]["state"] = DGG.NORMAL
self.accept("mouse1",self. actionMainMenu,["click"]); self.accept("wheel_up",self.actionMainMenu,["up"])
self.accept("wheel_down",self.actionMainMenu,["down"])
self.accept("arrow_up",self.actionMainMenu,["up"]); self.accept("arrow_down",self.actionMainMenu,["down"])
self.accept("enter",self.actionMainMenu,["valid"])
return task.done
def launchSubMenu(self):
self.app.change_cursor("blank"); self.dic_gui["main_menu"]["frame"].hide()
self.dic_arrows["arrow_up"]["status"] = 0; self.dic_arrows["arrow_dn"]["status"] = 0
self.dic_arrows["arrow_up"]["node"].hide(); self.dic_arrows["arrow_dn"]["node"].hide()
self.dic_anims["cam_move_maintosub"].start()
taskMgr.doMethodLater(1,self.subArcsTask,"anim aux arcs task")
taskMgr.doMethodLater(2.5,self.goSubMenuTask,"aff sub menu task")
def goSubMenuTask(self,task):
self.request("SubMenu"); return task.done
""" ****************************
Méthodes pour l'état "SubMenu"
**************************** """
def enterSubMenu(self):
self.app.change_cursor("main"); frame = None; self.accept("escape",self.actionSubMenu,["quit"])
if self.states["main_chx"] == 0:
self.dic_arrows["arrow_camp_up"]["status"] = 1; self.dic_arrows["arrow_camp_dn"]["status"] = 1
if self.states["camp_sel"] > 0: self.dic_arrows["arrow_camp_dn"]["node"].show()
self.accept("enter",self.actionSubMenu,["launch_game"])
self.accept("arrow_up",self.actionSubMenu,["camp_move","up"]); self.accept("arrow_down",self.actionSubMenu,["camp_move","down"])
self.accept("mouse1",self.actionSubMenu,["camp_move","click"])
self.accept("wheel_up",self.actionSubMenu,["camp_move","up"]); self.accept("wheel_down",self.actionSubMenu,["camp_move","down"])
if len(self.states["saves_lst"]) > 0 and self.states["camp_sel"] != len(self.states["saves_lst"]): self.dic_arrows["arrow_camp_up"]["node"].show()
if self.states["camp_sel"] == 0: self.dic_gui["camp_menu"]["crea_unit"]["state"] = DGG.NORMAL
else:
self.dic_gui["camp_menu"]["save_export"]["state"] = DGG.NORMAL
self.dic_gui["camp_menu"]["supp_unit"]["state"] = DGG.NORMAL
self.dic_gui["camp_menu"]["save_import"]["state"] = DGG.NORMAL
self.dic_gui["camp_menu"]["launch"]["state"] = DGG.NORMAL
frame = "camp_menu"; self.nomove = True
elif self.states["main_chx"] == 1:
#
# TODO : self.accept des touches fléchées et de la touche entrée pour le sous menu "Missions"
#
frame = "mission_menu"; self.nomove = True
elif self.states["main_chx"] == 2: frame = "credits_menu"
elif self.states["main_chx"] == 3:
frame = "option_menu"; self.accept("enter",self.actionSubMenu,["valid_opt"])
self.dic_gui[frame]["frame"].show(); self.dic_gui["aux_menu"]["frame"].show()
self.dic_gui["aux_menu"]["return_btn"]["state"] = DGG.NORMAL
def exitSubMenu(self):
pass
def actionSubMenu(self,val1,val2=None,val3=None):
if val1 == "quit":
if self.app.main_config["sounds"]: self.dic_sounds["main_menu_escape"].play()
self.app.change_cursor("blank"); frame = None
if self.states["main_chx"] == 0:
self.dic_arrows["arrow_camp_up"]["status"] = 0; self.dic_arrows["arrow_camp_dn"]["status"] = 0
self.dic_arrows["arrow_camp_up"]["node"].hide(); self.dic_arrows["arrow_camp_dn"]["node"].hide()
self.dic_gui["camp_menu"]["crea_unit"]["state"] = DGG.DISABLED; self.dic_gui["camp_menu"]["save_export"]["state"] = DGG.DISABLED
self.dic_gui["camp_menu"]["supp_unit"]["state"] = DGG.DISABLED; self.dic_gui["camp_menu"]["save_import"]["state"] = DGG.DISABLED
self.dic_gui["camp_menu"]["launch"]["state"] = DGG.DISABLED
frame = "camp_menu"; self.nomove = False
elif self.states["main_chx"] == 1:
#
# TODO : vérifier qu'il n'y a rien à faire avant de quitter le sous menu "missions"
#
frame = "mission_menu"; self.nomove = False
elif self.states["main_chx"] == 2: frame = "credits_menu"
elif self.states["main_chx"] == 3:
self.actionSubMenu("cancel_opt"); frame = "option_menu"
self.dic_gui[frame]["frame"].hide(); self.dic_gui["aux_menu"]["frame"].hide()
self.ignoreAll(); self.accept("escape",sys.exit,[0])
self.dic_anims["cam_move_subtomain"].start()
taskMgr.doMethodLater(1,self.subArcsTask,"anim aux arcs task")
taskMgr.doMethodLater(2.5,self.goMainMenuTask,"aff main menu task")
self.dic_gui["aux_menu"]["return_btn"]["state"] = DGG.DISABLED
elif val1 == "cancel_opt":
if self.app.main_config["sounds"]: self.dic_sounds["main_menu_escape"].play()
for key in self.options:
if key == "fullscreen":
if self.options[key][0] != self.app.main_config[key][0]:
self.dic_gui["option_menu"]["windowed"]["indicatorValue"] = (0 if self.app.main_config["fullscreen"][0] else 1)
self.dic_gui["option_menu"]["windowed"].setIndicatorValue()
self.dic_gui["option_menu"]["fullscreen"]["indicatorValue"] = (1 if self.app.main_config["fullscreen"][0] else 0)
self.dic_gui["option_menu"]["fullscreen"].setIndicatorValue()
else:
if self.options[key] != self.app.main_config[key]:
if key == "lang_chx": self.dic_gui["option_menu"]["lang_opt"].set(self.app.main_config[key])
elif key == "music":
self.dic_gui["option_menu"]["music_mute"]["indicatorValue"] = self.app.main_config[key]
self.dic_gui["option_menu"]["music_mute"].setIndicatorValue()
elif key == "sounds":
self.dic_gui["option_menu"]["sound_mute"]["indicatorValue"] = self.app.main_config[key]
self.dic_gui["option_menu"]["sound_mute"].setIndicatorValue()
elif key == "music_vol": self.dic_gui["option_menu"]["music_slider"]["value"] = self.app.main_config[key]
elif key == "sounds_vol": self.dic_gui["option_menu"]["sound_slider"]["value"] = self.app.main_config[key]
for key in self.app.main_config:
if key == "fullscreen": self.options[key][0] = self.app.main_config[key][0]
else: self.options[key] = self.app.main_config[key]
self.dic_gui["option_menu"]["btn_valid"]["state"] = DGG.DISABLED
self.dic_gui["option_menu"]["btn_reset"]["state"] = DGG.DISABLED
elif val1 == "valid_opt":
if self.app.main_config["sounds"]: self.dic_sounds["main_menu_sel"].play()
if self.options["lang_chx"] != self.app.main_config["lang_chx"]:
old_text = ""; new_text = ""
if self.options["lang_chx"] == 0:
old_text = "days"; new_text = "jours"; self.app.speak = lang.fr.fr_lang
elif self.options["lang_chx"] == 1:
old_text = "jours"; new_text = "days"; self.app.speak = lang.en.en_lang
for key1 in self.app.speak:
for key2 in self.app.speak[key1]: self.dic_gui[key1][key2]["text"] = self.app.speak[key1][key2]
for it in range(1,len(self.states["saves_lst"])+1):
self.dic_gui["camp_menu"]["sav_time_"+str(it)]["text"] = self.dic_gui["camp_menu"]["sav_time_"+str(it)]["text"].replace(old_text,new_text)
#
# TODO : ajouter des éléments de traduction certainement présent dans le menu "Mission"
#
if self.options["music"] != self.app.main_config["music"]:
if self.options["music"]: self.dic_music["mainscene_music"].play()
else: self.dic_music["mainscene_music"].stop()
if self.options["sounds_vol"] != self.app.main_config["sounds_vol"]:
for key in self.dic_sounds: self.dic_sounds[key].setVolume(self.options["sounds_vol"])
if self.options["music_vol"] != self.app.main_config["music_vol"]: self.dic_music["mainscene_music"].setVolume(self.options["music_vol"])
for key in self.app.main_config:
if key == "fullscreen": self.app.main_config[key][0] = self.options[key][0]
else: self.app.main_config[key] = self.options[key]
mcf = open("arcns_config.json","w"); mcf.write(json.dumps(self.options)); mcf.close()
self.dic_gui["option_menu"]["btn_valid"]["state"] = DGG.DISABLED
self.dic_gui["option_menu"]["btn_reset"]["state"] = DGG.DISABLED
elif val2 == "change_opt":
if val3 == "win": pass
elif val3 == "lang":
if val1 == "Français": self.options["lang_chx"] = 0
elif val1 == "English": self.options["lang_chx"] = 1
elif val3 == "music_mute": self.options["music"] = bool(val1)
elif val3 == "sound_mute": self.options["sounds"] = bool(val1)
elif val3 == "music_vol": self.options["music_vol"] = int(self.dic_gui["option_menu"]["music_slider"]["value"]*100)/100.0
elif val3 == "sounds_vol": self.options["sounds_vol"] = int(self.dic_gui["option_menu"]["sound_slider"]["value"]*100)/100.0
self.dic_gui["option_menu"]["btn_valid"]["state"] = DGG.DISABLED
self.dic_gui["option_menu"]["btn_reset"]["state"] = DGG.DISABLED
for key in self.options:
if not self.options[key] == self.app.main_config[key]:
self.dic_gui["option_menu"]["btn_valid"]["state"] = DGG.NORMAL
self.dic_gui["option_menu"]["btn_reset"]["state"] = DGG.NORMAL
break
elif val1 == "launch_game":
sav = None; self.dic_gui["camp_menu"]["used_name"].hide()
if val2 == "crea_game" or self.states["camp_sel"] == 0:
name = self.dic_gui["camp_menu"]["entry_unit"].get()
if name == "": return
if not exists("arcns_saves"): os.mkdir("arcns_saves")
elif len(self.states["saves_lst"]) > 0:
for elt in self.states["saves_lst"]:
if elt["name"] == name:
self.dic_gui["camp_menu"]["used_name"].show(); return
dte = time.strftime("%d-%m-%Y_%H%M%S",time.localtime())
sav = {"name":name,"crea_date":dte,"time":0,"saved_place":"firstbase","init":0}
fsav = open("arcns_saves/"+dte+".sav","w"); fsav.write(json.dumps(sav)); fsav.close()
else: sav = self.states["saves_lst"][self.states["camp_sel"]-1]
self.dic_arrows["arrow_camp_up"]["node"].hide(); self.dic_arrows["arrow_camp_dn"]["node"].hide()
self.dic_gui["aux_menu"]["frame"].hide()
self.ignoreAll(); self.nomove = False; self.dic_gui["camp_menu"]["frame"].hide(); self.launchGame(sav)
elif val1 == "camp_move":
if not self.nomove: return
if val2 == "click":
if self.dic_arrows["arrow_camp_up"]["status"] == 2: val2 = "up"
elif self.dic_arrows["arrow_camp_dn"]["status"] == 2: val2 = "down"
else: return
if val2 == "up":
if len(self.states["saves_lst"]) == self.states["camp_sel"]: return
if self.states["camp_sel"] == 0:
self.dic_gui["camp_menu"]["new_unit"].hide(); self.dic_gui["camp_menu"]["entry_unit"].hide()
self.dic_gui["camp_menu"]["crea_unit"]["state"] = DGG.DISABLED
self.dic_gui["camp_menu"]["crea_unit"].hide(); self.dic_gui["camp_menu"]["used_name"].hide()
else:
self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"])].hide()
self.dic_gui["camp_menu"]["sav_time_"+str(self.states["camp_sel"])].hide()
self.dic_gui["camp_menu"]["save_export"].hide(); self.dic_gui["camp_menu"]["supp_unit"].hide()
self.dic_gui["camp_menu"]["save_export"]["state"] = DGG.DISABLED
self.dic_gui["camp_menu"]["supp_unit"]["state"] = DGG.DISABLED
self.dic_anims["move_saves"] = None; self.dic_anims["move_saves"] = Parallel(name="saves movement")
lst_pos = [(0.1,0,0.25),(0.4,0,-0.3),(0.6,0,-0.45)]; lst_scale = [0.08,0.07,0.06]
it = 0; self.states["camp_sel"] += 1
while (self.states["camp_sel"]+it) <= len(self.states["saves_lst"]) and it < 3:
tmp_anim = self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"]+it)].posInterval(0.2,Point3(lst_pos[it]))
self.dic_anims["move_saves"].append(tmp_anim)
tmp_anim = self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"]+it)].scaleInterval(0.2,lst_scale[it])
self.dic_anims["move_saves"].append(tmp_anim)
it += 1
if self.states["camp_sel"]+3 <= len(self.states["saves_lst"]): self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"]+3)].show()
elif val2 == "down":
if self.states["camp_sel"] == 0: return
self.dic_gui["camp_menu"]["sav_time_"+str(self.states["camp_sel"])].hide()
self.dic_gui["camp_menu"]["save_export"].hide(); self.dic_gui["camp_menu"]["supp_unit"].hide()
self.dic_gui["camp_menu"]["save_export"]["state"] = DGG.DISABLED
self.dic_gui["camp_menu"]["supp_unit"]["state"] = DGG.DISABLED
self.dic_anims["move_saves"] = None; self.dic_anims["move_saves"] = Parallel(name="saves movement")
lst_pos = [(0.4,0,-0.3),(0.6,0,-0.45),(0.8,0,-0.6)]; lst_scale = [0.07,0.06,0.04]; it = 0
while (self.states["camp_sel"]+it) <= len(self.states["saves_lst"]) and it < 3:
tmp_anim = self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"]+it)].posInterval(0.2,Point3(lst_pos[it]))
self.dic_anims["move_saves"].append(tmp_anim)
tmp_anim = self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"]+it)].scaleInterval(0.2,lst_scale[it])
self.dic_anims["move_saves"].append(tmp_anim)
it += 1
self.states["camp_sel"] -= 1
if self.states["camp_sel"]+4 <= len(self.states["saves_lst"]): self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"]+4)].hide()
self.dic_gui["camp_menu"]["save_import"]["state"] = DGG.DISABLED
self.dic_gui["camp_menu"]["launch"]["state"] = DGG.DISABLED
self.ignore("enter"); self.dic_arrows["arrow_camp_up"]["node"].hide(); self.dic_arrows["arrow_camp_dn"]["node"].hide()
self.dic_arrows["arrow_camp_up"]["status"] = 1; self.dic_arrows["arrow_camp_dn"]["status"] = 1
self.dic_arrows["arrow_camp_up"]["node"].setPos(self.dic_arrows["arrow_camp_up"]["posn"][0],
self.dic_arrows["arrow_camp_up"]["posn"][1],self.dic_arrows["arrow_camp_up"]["posn"][2])
self.dic_arrows["arrow_camp_dn"]["node"].setPos(self.dic_arrows["arrow_camp_dn"]["posn"][0],
self.dic_arrows["arrow_camp_dn"]["posn"][1],self.dic_arrows["arrow_camp_dn"]["posn"][2])
if self.app.main_config["sounds"]: self.dic_sounds["main_menu_switch"].play()
self.nomove = False; self.dic_anims["move_saves"].start()
#
# TODO : animation du décors (valable uniquement avec le nouveau décors)
#
taskMgr.doMethodLater(0.2,self.reactiveCampaign,"reactive campaign interactions")
elif val1 == "export_game":
root = Tkinter.Tk(); root.withdraw(); path = tkFileDialog.askdirectory()
if path != "":
exp_save = self.states["saves_lst"][self.states["camp_sel"]-1]
fln = "/arcns_export_("+exp_save["name"]+").sav"
self.ignoreAll(); self.app.voile.show(); self.dic_gui["camp_menu"]["frame"].hide(); self.nomove = False
self.dic_gui["aux_menu"]["frame"].hide(); self.dic_gui["camp_menu"]["export_frame"].show()
self.dic_gui["camp_menu"]["export_name"]["text"] = exp_save["name"]
if exists(path+fln):
self.accept("enter",self.campaignVoile); self.dic_gui["camp_menu"]["export_progress"].hide()
self.dic_gui["camp_menu"]["export_dupli"].show()
self.dic_gui["camp_menu"]["export_return"]["state"] = DGG.NORMAL; self.dic_gui["camp_menu"]["export_return"].show()
return
try:
fexp = open(path+fln,"w"); fexp.write(json.dumps(exp_save)); fexp.close()
self.accept("enter",self.campaignVoile); self.dic_gui["camp_menu"]["export_progress"].hide()
self.dic_gui["camp_menu"]["export_success"].show()
self.dic_gui["camp_menu"]["export_return"]["state"] = DGG.NORMAL; self.dic_gui["camp_menu"]["export_return"].show()
except Exception,e:
print e; self.accept("enter",self.campaignVoile); self.dic_gui["camp_menu"]["export_progress"].hide()
self.dic_gui["camp_menu"]["export_nowrite"].show()
self.dic_gui["camp_menu"]["export_return"]["state"] = DGG.NORMAL; self.dic_gui["camp_menu"]["export_return"].show()
elif val1 == "import_game":
root = Tkinter.Tk(); root.withdraw(); path = tkFileDialog.askopenfilename(filetypes=[("Saves","*.sav"),("All","*")])
if path != "":
self.ignoreAll(); self.app.voile.show(); self.dic_gui["camp_menu"]["frame"].hide(); self.nomove = False
self.dic_gui["aux_menu"]["frame"].hide(); self.dic_gui["camp_menu"]["import_frame"].show()
try:
import_save = json.loads("".join([line.rstrip().lstrip() for line in file(path,"rb")]))
for elt in self.states["saves_lst"]:
if elt["name"] == import_save["name"]:
self.dic_gui["camp_menu"]["import_progress"].hide(); self.dic_gui["camp_menu"]["import_dupli"].show()
self.dic_gui["camp_menu"]["import_return"]["state"] = DGG.NORMAL; self.dic_gui["camp_menu"]["import_return"].show()
self.accept("enter",self.campaignVoile); return
lst_pos = [(0.4,0,-0.3),(0.6,0,-0.45),(0.8,0,-0.6)]; lst_scale = [0.07,0.06,0.04]; act_pos = None; act_scale = None
if self.states["camp_sel"] == len(self.states["saves_lst"]):
act_pos = lst_pos[0]; act_scale = lst_scale[0]
elif self.states["camp_sel"]+1 == len(self.states["saves_lst"]):
act_pos = lst_pos[1]; act_scale = lst_scale[1]
else:
act_pos = lst_pos[2]; act_scale = lst_scale[2]
tmp_gui = self.app.arcLabel(import_save["name"],act_pos,act_scale); tmp_gui.reparentTo(self.dic_gui["camp_menu"]["frame"])
self.dic_gui["camp_menu"]["sav_name_"+str(len(self.states["saves_lst"])+1)] = tmp_gui
if self.states["camp_sel"]+2 < len(self.states["saves_lst"]): tmp_gui.hide()
timed = ""
if import_save["time"] < 60: timed = str(import_save["time"])+"s"
elif import_save["time"] < 3600:
timed = str((import_save["time"] - import_save["time"]%60) / 60)+":"
timed += ("0" if import_save["time"]%60 < 10 else "")+str(import_save["time"]%60)
elif import_save["time"] < 86400:
timed = str((import_save["time"] - import_save["time"]%3600) /3600)+":"+("0" if (import_save["time"]%3600) < 600 else "")
timed += str((import_save["time"]%3600 - import_save["time"]%60)/60)
timed +":"+("0" if import_save["time"]%60 < 10 else "")+str(import_save["time"]%60)
else:
days = ("days" if self.app.main_config["lang_chx"] == 1 else "jours")
timed = str((import_save["time"] - import_save["time"]%86400)/86400)+" "+days+" "
timed += str((import_save["time"]%86400 - import_save["time"]%3600)/3600)+":"+("0" if (import_save["time"]%3600) < 600 else "")
timed += str((import_save["time"]%3600 - import_save["time"]%60)/60)+":"
timed += ("0" if import_save["time"]%60 < 10 else "")+str(import_save["time"]%60)
tmp_gui = self.app.arcLabel(timed,(0.9,0,0.1),txtalgn=TextNode.ARight); tmp_gui.reparentTo(self.dic_gui["camp_menu"]["frame"])
tmp_gui.hide(); self.dic_gui["camp_menu"]["sav_time_"+str(len(self.states["saves_lst"])+1)] = tmp_gui
if not import_save.has_key("ori_date"): import_save["ori_date"] = import_save["crea_date"]
import_save["crea_date"] = time.strftime("%d-%m-%Y_%H%M%S",time.localtime())
fli = open("arcns_saves/"+import_save["crea_date"]+".sav","w"); fli.write(json.dumps(import_save)); fli.close()
self.states["saves_lst"].append(import_save); self.accept("enter",self.campaignVoile)
self.dic_gui["camp_menu"]["import_progress"].hide(); self.dic_gui["camp_menu"]["import_success"].show()
self.dic_gui["camp_menu"]["import_return"]["state"] = DGG.NORMAL; self.dic_gui["camp_menu"]["import_return"].show()
except Exception,e:
print e; self.accept("enter",self.campaignVoile)
self.dic_gui["camp_menu"]["import_progress"].hide(); self.dic_gui["camp_menu"]["import_fail"].show()
self.dic_gui["camp_menu"]["import_return"]["state"] = DGG.NORMAL; self.dic_gui["camp_menu"]["import_return"].show()
elif val1 == "supp_game":
self.ignoreAll(); self.app.voile.show(); self.dic_gui["camp_menu"]["supp_frame"].show()
self.dic_gui["camp_menu"]["frame"].hide(); self.dic_gui["aux_menu"]["frame"].hide()
self.dic_gui["camp_menu"]["supp_cancel"]["state"] = DGG.NORMAL
self.dic_gui["camp_menu"]["supp_valid"]["state"] = DGG.NORMAL
self.accept("enter",self.suppUnit); self.accept("escape",self.campaignVoile)
self.nomove = False
#
# TODO : reste des actions pour le sous-menu "Missions" à définir ici
#
print "val1 : "+str(val1)
print "val2 : "+str(val2)
print "val3 : "+str(val3)
#
def reactiveCampaign(self,task):
if self.states["camp_sel"] > 0: self.dic_arrows["arrow_camp_dn"]["node"].show()
if self.states["camp_sel"] < len(self.states["saves_lst"]): self.dic_arrows["arrow_camp_up"]["node"].show()
self.accept("enter",self.actionSubMenu,["launch_game"])
if self.states["camp_sel"] == 0:
self.dic_gui["camp_menu"]["new_unit"].show(); self.dic_gui["camp_menu"]["entry_unit"].show()
self.dic_gui["camp_menu"]["crea_unit"].show(); self.dic_gui["camp_menu"]["crea_unit"]["state"] = DGG.NORMAL
else:
self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"])].show()
self.dic_gui["camp_menu"]["sav_time_"+str(self.states["camp_sel"])].show()
self.dic_gui["camp_menu"]["save_export"].show(); self.dic_gui["camp_menu"]["supp_unit"].show()
self.dic_gui["camp_menu"]["save_export"]["state"] = DGG.NORMAL; self.dic_gui["camp_menu"]["supp_unit"]["state"] = DGG.NORMAL
self.dic_gui["camp_menu"]["save_import"].show(); self.dic_gui["camp_menu"]["launch"].show()
self.dic_gui["camp_menu"]["save_import"]["state"] = DGG.NORMAL; self.dic_gui["camp_menu"]["launch"]["state"] = DGG.NORMAL
self.nomove = True; return task.done
def campaignVoile(self):
self.app.voile.hide()
#nettoyage de la frame d'export
self.dic_gui["camp_menu"]["export_frame"].hide(); self.dic_gui["camp_menu"]["export_name"]["text"] = ""
self.dic_gui["camp_menu"]["export_progress"].show(); self.dic_gui["camp_menu"]["export_dupli"].hide()
self.dic_gui["camp_menu"]["export_nowrite"].hide(); self.dic_gui["camp_menu"]["export_success"].hide()
self.dic_gui["camp_menu"]["export_return"]["state"] = DGG.DISABLED; self.dic_gui["camp_menu"]["export_return"].hide()
#nettoyage de la frame d'import
self.dic_gui["camp_menu"]["import_frame"].hide(); self.dic_gui["camp_menu"]["import_progress"].show()
self.dic_gui["camp_menu"]["import_fail"].hide(); self.dic_gui["camp_menu"]["import_dupli"].hide()
self.dic_gui["camp_menu"]["import_success"].hide()
self.dic_gui["camp_menu"]["import_return"]["state"] = DGG.DISABLED; self.dic_gui["camp_menu"]["import_return"].hide()
#nettoyage de la frame de suppression
self.dic_gui["camp_menu"]["supp_frame"].hide(); self.dic_gui["camp_menu"]["supp_name"]["text"] = ""
self.dic_gui["camp_menu"]["supp_question"].show()
self.dic_gui["camp_menu"]["supp_progress"].hide(); self.dic_gui["camp_menu"]["supp_finish"].hide()
self.dic_gui["camp_menu"]["supp_return"].hide(); self.dic_gui["camp_menu"]["supp_return"]["state"] = DGG.DISABLED
self.dic_gui["camp_menu"]["supp_cancel"].show(); self.dic_gui["camp_menu"]["supp_cancel"]["state"] = DGG.DISABLED
self.dic_gui["camp_menu"]["supp_valid"].show(); self.dic_gui["camp_menu"]["supp_valid"]["state"] = DGG.DISABLED
#remise en place du menu "Campagne"
self.dic_gui["camp_menu"]["frame"].show(); self.dic_gui["aux_menu"]["frame"].show()
self.accept("escape",self.actionSubMenu,["quit"]); self.accept("enter",self.actionSubMenu,["launch_game"])
self.accept("arrow_up",self.actionSubMenu,["camp_move","up"]); self.accept("arrow_down",self.actionSubMenu,["camp_move","down"])
self.accept("mouse1",self.actionSubMenu,["camp_move","click"])
self.accept("wheel_up",self.actionSubMenu,["camp_move","up"]); self.accept("wheel_down",self.actionSubMenu,["camp_move","down"])
self.nomove = True
def suppUnit(self):
self.ignoreAll()
self.dic_gui["camp_menu"]["supp_cancel"].hide(); self.dic_gui["camp_menu"]["supp_cancel"]["state"] = DGG.DISABLED
self.dic_gui["camp_menu"]["supp_valid"].hide(); self.dic_gui["camp_menu"]["supp_valid"]["state"] = DGG.DISABLED
self.dic_gui["camp_menu"]["supp_question"].hide(); self.dic_gui["camp_menu"]["supp_progress"].show()
os.unlink("arcns_saves/"+self.states["saves_lst"][self.states["camp_sel"]-1]["crea_date"]+".sav")
self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"])].removeNode()
self.dic_gui["camp_menu"]["sav_time_"+str(self.states["camp_sel"])].removeNode()
if len(self.states["saves_lst"]) == 1:
self.dic_gui["camp_menu"]["new_unit"].show(); self.dic_gui["camp_menu"]["entry_unit"].show()
self.dic_gui["camp_menu"]["save_export"].hide(); self.dic_gui["camp_menu"]["save_export"]["state"] = DGG.DISABLED
self.dic_gui["camp_menu"]["supp_unit"].hide(); self.dic_gui["camp_menu"]["supp_unit"]["state"] = DGG.DISABLED
self.dic_gui["camp_menu"]["crea_unit"].show(); self.dic_gui["camp_menu"]["crea_unit"]["state"] = DGG.NORMAL
self.states["camp_sel"] = 0
elif self.states["camp_sel"] == len(self.states["saves_lst"]):
self.states["camp_sel"] -= 1
self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"])].show()
self.dic_gui["camp_menu"]["sav_time_"+str(self.states["camp_sel"])].show()
else:
it = 0; lst_pos = [(0.1,0,0.25),(0.4,0,-0.3),(0.6,0,-0.45)]; lst_scale = [0.08,0.07,0.06]
while (self.states["camp_sel"]+it) < len(self.states["saves_lst"]):
self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"]+it)] = self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"]+it+1)]
if it < 3:
self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"]+it)].setPos(lst_pos[it][0],lst_pos[it][1],lst_pos[it][2])
self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"]+it)].setScale(lst_scale[it])
elif it == 3: self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"]+it)].show()
self.dic_gui["camp_menu"]["sav_time_"+str(self.states["camp_sel"]+it)] = self.dic_gui["camp_menu"]["sav_time_"+str(self.states["camp_sel"]+it+1)]
it += 1
self.dic_gui["camp_menu"]["sav_time_"+str(self.states["camp_sel"])].show()
del self.dic_gui["camp_menu"]["sav_name_"+str(len(self.states["saves_lst"]))]
del self.dic_gui["camp_menu"]["sav_time_"+str(len(self.states["saves_lst"]))]
del self.states["saves_lst"][self.states["camp_sel"]-1]
self.dic_gui["camp_menu"]["supp_progress"].hide(); self.dic_gui["camp_menu"]["supp_finish"].show()
self.dic_gui["camp_menu"]["supp_return"].show(); self.dic_gui["camp_menu"]["supp_return"]["state"] = DGG.NORMAL
self.accept("enter",self.campaignVoile)
def checkMajStarter(self):
self.dic_gui["option_menu"]["maj_success"].hide(); self.dic_gui["option_menu"]["maj_quit"].hide()
self.dic_gui["option_menu"]["maj_retry"].hide(); self.dic_gui["option_menu"]["maj_retry"]["command"] = self.checkMajStarter
self.dic_gui["option_menu"]["maj_err0"].hide(); self.dic_gui["option_menu"]["maj_err1"].hide()
self.dic_gui["option_menu"]["maj_nomaj"].hide(); self.dic_gui["option_menu"]["maj_update"].hide()
self.dic_gui["option_menu"]["maj_doit"].hide(); self.dic_gui["option_menu"]["maj_upgrade"].hide()
self.dic_gui["option_menu"]["maj_cancel"].hide(); self.dic_gui["option_menu"]["maj_progress"].show()
self.dic_gui["option_menu"]["frame"].hide(); self.dic_gui["aux_menu"]["frame"].hide()
self.app.voile.show(); self.ignoreAll(); self.dic_gui["option_menu"]["maj_frame"].show()
self.app.change_cursor("blank"); self.dic_gui["option_menu"]["maj_progress"]["value"] = 0
taskMgr.doMethodLater(0.1,self.majTask,"check maj task")
def doMajStarter(self):
self.dic_gui["option_menu"]["maj_cancel"].hide()
self.dic_gui["option_menu"]["maj_retry"].hide(); self.dic_gui["option_menu"]["maj_retry"]["command"] = self.doMajStarter
self.dic_gui["option_menu"]["maj_update"].hide(); self.dic_gui["option_menu"]["maj_doit"].hide()
self.dic_gui["option_menu"]["maj_err0"].hide(); self.dic_gui["option_menu"]["maj_err1"].hide()
lst = listdir("arcns_tmp")
for elt in lst:
if not elt == "arcns_multifiles.json": os.unlink("arcns_tmp/"+elt)
self.dic_gui["option_menu"]["maj_progress"]["value"] = 5; self.dic_gui["option_menu"]["maj_upgrade"].show()
self.app.change_cursor("blank"); self.ignoreAll(); self.dic_gui["option_menu"]["maj_upgrade"]["value"] = 0
taskMgr.doMethodLater(0.1,self.majTask,"do maj task")
def majTask(self,task):
if self.dic_gui["option_menu"]["maj_progress"]["value"] == 0:
if not exists("arcns_tmp"):
try: os.mkdir("arcns_tmp")
except Exception,e:
print e; self.labelMaj(); return task.done
self.dic_gui["option_menu"]["maj_progress"]["value"] = 1; return task.again
elif self.dic_gui["option_menu"]["maj_progress"]["value"] == 1:
try:
urllib.urlretrieve("http://www.arcns.net/arcns_multifiles.json","arcns_tmp/arcns_multifiles.json")
except Exception,e:
print e; self.labelMaj(); return task.done
self.dic_gui["option_menu"]["maj_progress"]["value"] = 2; return task.again
elif self.dic_gui["option_menu"]["maj_progress"]["value"] == 2:
try:
self.tmp_multifiles = json.loads("".join([line.rstrip().lstrip() for line in file("arcns_tmp/arcns_multifiles.json","rb")]))
except Exception,e:
print e; self.labelMaj(); return task.done
self.dic_gui["option_menu"]["maj_progress"]["value"] = 3; return task.again
elif self.dic_gui["option_menu"]["maj_progress"]["value"] == 3:
for key in self.app.arcns_multifiles:
if self.app.arcns_multifiles[key] < self.tmp_multifiles[key]: self.dic_gui["option_menu"]["maj_progress"]["value"] = 4
else: del self.tmp_multifiles[key]
self.labelMaj(); return task.done
elif self.dic_gui["option_menu"]["maj_upgrade"]["value"] == 0:
try:
for key in self.tmp_multifiles:
fln = "patch_"+key+"_r"+str(self.app.arcns_multifiles[key])+"_r"+str(self.tmp_multifiles[key])+".mf"
urllib.urlretrieve("http://www.arcns.net/patchs/"+fln,"arcns_tmp/"+fln)
self.dic_gui["option_menu"]["maj_upgrade"]["value"] = 1; return task.again
except Exception,e:
print e; self.labelMaj(); return task.done
elif self.dic_gui["option_menu"]["maj_upgrade"]["value"] == 1:
try:
for key in self.tmp_multifiles: shutil.copy("arcns_mfs/"+key+"_r"+str(self.app.arcns_multifiles[key])+".mf","arcns_tmp")
self.dic_gui["option_menu"]["maj_upgrade"]["value"] = 2; return task.again
except Exception,e:
print e; self.labelMaj(); return task.done
elif self.dic_gui["option_menu"]["maj_upgrade"]["value"] == 2:
try:
p = Patchfile(); m = Multifile()
for key in self.tmp_multifiles:
fln = "patch_"+key+"_r"+str(self.app.arcns_multifiles[key])+"_r"+str(self.tmp_multifiles[key])
m.openRead("arcns_tmp/"+fln+".mf"); m.extractSubfile(0,"arcns_tmp/"+fln+".patch"); m.close()
rtn = p.apply(Filename("arcns_tmp/"+fln+".patch"),Filename("arcns_tmp/"+key+"_r"+str(self.app.arcns_multifiles[key])+".mf"))
if not rtn:
self.labelMaj(); return task.done
else: os.rename("arcns_tmp/"+key+"_r"+str(self.app.arcns_multifiles[key])+".mf","arcns_tmp/"+key+"_r"+str(self.tmp_multifiles[key])+".mf")
self.dic_gui["option_menu"]["maj_upgrade"]["value"] = 3; return task.again
except Exception,e:
print e; self.labelMaj(); return task.done
elif self.dic_gui["option_menu"]["maj_upgrade"]["value"] == 3:
try:
for key in self.tmp_multifiles:
shutil.copy("arcns_tmp/"+key+"_r"+str(self.tmp_multifiles[key])+".mf","arcns_mfs")
self.dic_gui["option_menu"]["maj_upgrade"]["value"] = 4; return task.again
except Exception,e:
print e; self.labelMaj(); return task.done
elif self.dic_gui["option_menu"]["maj_upgrade"]["value"] == 4:
for key in self.tmp_multifiles:
oldnb = self.app.arcns_multifiles[key]; self.app.arcns_multifiles[key] = self.tmp_multifiles[key]
mcm = open("arcns_multifiles.json","w"); mcm.write(json.dumps(self.app.arcns_multifiles)); mcm.close()
self.tmp_multifiles[key] = oldnb; os.unlink("arcns_mfs/"+key+"_r"+str(oldnb)+".mf")
self.dic_gui["option_menu"]["maj_success"].show(); self.dic_gui["option_menu"]["maj_quit"].show()
self.dic_gui["option_menu"]["maj_upgrade"].hide(); self.accept("enter",self.endingMaj)
self.app.change_cursor("main"); return task.done
def labelMaj(self):
self.app.change_cursor("main"); self.accept("escape",self.cancelMaj); val_btn = "retry"
self.accept("enter",(self.checkMajStarter if self.dic_gui["option_menu"]["maj_progress"] == 5 else self.doMajStarter))
self.dic_gui["option_menu"]["maj_progress"].hide(); self.dic_gui["option_menu"]["maj_upgrade"].hide()
if self.dic_gui["option_menu"]["maj_progress"]["value"] == 0: self.dic_gui["option_menu"]["maj_err0"].show()
elif self.dic_gui["option_menu"]["maj_progress"]["value"] == 1: self.dic_gui["option_menu"]["maj_err1"].show()
elif self.dic_gui["option_menu"]["maj_progress"]["value"] == 2: self.dic_gui["option_menu"]["maj_err1"].show()
elif self.dic_gui["option_menu"]["maj_progress"]["value"] == 3: self.dic_gui["option_menu"]["maj_nomaj"].show()
elif self.dic_gui["option_menu"]["maj_progress"]["value"] == 4:
val_btn = "doit"; self.dic_gui["option_menu"]["maj_update"].show()
elif self.dic_gui["option_menu"]["maj_upgrade"]["value"] == 0: self.dic_gui["option_menu"]["maj_err1"].show()
elif self.dic_gui["option_menu"]["maj_upgrade"]["value"] == 1: self.dic_gui["option_menu"]["maj_err0"].show()
elif self.dic_gui["option_menu"]["maj_upgrade"]["value"] == 2: self.dic_gui["option_menu"]["maj_err0"].show()
elif self.dic_gui["option_menu"]["maj_upgrade"]["value"] == 3: self.dic_gui["option_menu"]["maj_err0"].show()
self.dic_gui["option_menu"]["maj_cancel"].show(); self.dic_gui["option_menu"]["maj_"+val_btn].show()
def cancelMaj(self):
if exists("arcns_tmp"):
lst = listdir("arcns_tmp")
for elt in lst: os.unlink("arcns_tmp/"+elt)
os.rmdir("arcns_tmp")
self.app.voile.hide(); self.ignoreAll(); self.dic_gui["option_menu"]["maj_frame"].hide()
self.dic_gui["option_menu"]["frame"].show(); self.dic_gui["aux_menu"]["frame"].show()
self.accept("enter",self.actionSubMenu,["valid_opt"]); self.accept("escape",self.actionSubMenu,["quit"])
def endingMaj(self):
executable = sys.executable; args = sys.argv[:]; args.insert(0, sys.executable); os.execvp(executable, args)
def goMainMenuTask(self,task):
self.request("MainMenu"); return task.done
def subArcsTask(self,task):
if self.state == "MainMenu": self.dic_dynamics["arcs_aux_menu"].play("load")
elif self.state == "SubMenu": self.dic_dynamics["arcs_aux_menu"].play("unload")
return task.done
def initGameTask(self,task):
self.dic_dynamics["gates"].play("close_gates"); return task.done
""" ****************************
Méthodes pour la sortie du menu principal
**************************** """
def launchGame(self,sav,options=None):
self.app.change_cursor("blank"); self.ignoreAll(); self.accept("escape",sys.exit,[0])
self.app.transit = {}; self.app.transit["save"] = sav; self.app.transit["place"] = sav["saved_place"]
#
# TODO : cas du sous-menu "Missions"
#
self.dic_dynamics["arcs_aux_menu"].play("unload"); self.dic_anims["cam_move_launch"].start()
taskMgr.doMethodLater(3.5,self.initGameTask,"close gates"); taskMgr.doMethodLater(10,self.app.game_screen,"launching the game")
def close(self):
self.ignoreAll(); taskMgr.remove(self.mouse_task); self.mouse_task = None
self.states = None
for key in self.dic_anims:
try: self.dic_anims[key].finish()
except: pass
self.dic_anims[key] = None
for key in self.dic_lights:
render.clearLight(self.dic_lights[key]); self.dic_lights[key].removeNode()
for key1 in self.dic_gui:
for key2 in self.dic_gui[key1]:
for t in self.dic_gui[key1][key2].options():
if t[0] == "command":
self.dic_gui[key1][key2]["command"] = None; break
self.dic_gui[key1][key2].removeNode()
for key in self.dic_arrows:
self.dic_arrows[key]["node"].removeNode(); self.dic_arrows[key]["card"].removeNode()
for key in self.dic_statics: self.dic_statics[key].removeNode()
for key in self.dic_dynamics: self.dic_dynamics[key].delete()
for key in self.dic_sounds:
self.dic_sounds[key].stop(); self.dic_sounds[key] = None
for key in self.dic_musics:
self.dic_musics[key].stop(); self.dic_musics[key] = None
self.dic_statics = None; self.dic_dynamics = None; self.dic_anims = None
self.dic_sounds = None; self.dic_musics = None
self.vers_txt.removeNode()
# DEBUG : cette fonction n'aura plus d'utilité une fois le code de la scène terminé
def __del__(self):
print "delete mainscene"
###
|
lgpl-3.0
| -9,076,149,160,606,977,000 | 76.313265 | 189 | 0.566619 | false |
studywolf/pydmps
|
pydmps/dmp_rhythmic.py
|
1
|
5004
|
"""
Copyright (C) 2013 Travis DeWolf
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from pydmps.dmp import DMPs
import numpy as np
class DMPs_rhythmic(DMPs):
"""An implementation of discrete DMPs"""
def __init__(self, **kwargs):
"""
"""
# call super class constructor
super(DMPs_rhythmic, self).__init__(pattern="rhythmic", **kwargs)
self.gen_centers()
# set variance of Gaussian basis functions
# trial and error to find this spacing
self.h = np.ones(self.n_bfs) * self.n_bfs # 1.75
self.check_offset()
def gen_centers(self):
"""Set the centre of the Gaussian basis
functions be spaced evenly throughout run time"""
c = np.linspace(0, 2 * np.pi, self.n_bfs + 1)
c = c[0:-1]
self.c = c
def gen_front_term(self, x, dmp_num):
"""Generates the front term on the forcing term.
For rhythmic DMPs it's non-diminishing, so this
function is just a placeholder to return 1.
x float: the current value of the canonical system
dmp_num int: the index of the current dmp
"""
if isinstance(x, np.ndarray):
return np.ones(x.shape)
return 1
def gen_goal(self, y_des):
"""Generate the goal for path imitation.
For rhythmic DMPs the goal is the average of the
desired trajectory.
y_des np.array: the desired trajectory to follow
"""
goal = np.zeros(self.n_dmps)
for n in range(self.n_dmps):
num_idx = ~np.isnan(y_des[n]) # ignore nan's when calculating goal
goal[n] = 0.5 * (y_des[n, num_idx].min() + y_des[n, num_idx].max())
return goal
def gen_psi(self, x):
"""Generates the activity of the basis functions for a given
canonical system state or path.
x float, array: the canonical system state or path
"""
if isinstance(x, np.ndarray):
x = x[:, None]
return np.exp(self.h * (np.cos(x - self.c) - 1))
def gen_weights(self, f_target):
"""Generate a set of weights over the basis functions such
that the target forcing term trajectory is matched.
f_target np.array: the desired forcing term trajectory
"""
# calculate x and psi
x_track = self.cs.rollout()
psi_track = self.gen_psi(x_track)
# efficiently calculate BF weights using weighted linear regression
for d in range(self.n_dmps):
for b in range(self.n_bfs):
self.w[d, b] = np.dot(psi_track[:, b], f_target[:, d]) / (
np.sum(psi_track[:, b]) + 1e-10
)
# ==============================
# Test code
# ==============================
if __name__ == "__main__":
import matplotlib.pyplot as plt
# test normal run
dmp = DMPs_rhythmic(n_dmps=1, n_bfs=10, w=np.zeros((1, 10)))
y_track, dy_track, ddy_track = dmp.rollout()
plt.figure(1, figsize=(6, 3))
plt.plot(np.ones(len(y_track)) * dmp.goal, "r--", lw=2)
plt.plot(y_track, lw=2)
plt.title("DMP system - no forcing term")
plt.xlabel("time (ms)")
plt.ylabel("system trajectory")
plt.legend(["goal", "system state"], loc="lower right")
plt.tight_layout()
# test imitation of path run
plt.figure(2, figsize=(6, 4))
n_bfs = [10, 30, 50, 100, 10000]
# a straight line to target
path1 = np.sin(np.arange(0, 2 * np.pi, 0.01) * 5)
# a strange path to target
path2 = np.zeros(path1.shape)
path2[int(len(path2) / 2.0) :] = 0.5
for ii, bfs in enumerate(n_bfs):
dmp = DMPs_rhythmic(n_dmps=2, n_bfs=bfs)
dmp.imitate_path(y_des=np.array([path1, path2]))
y_track, dy_track, ddy_track = dmp.rollout()
plt.figure(2)
plt.subplot(211)
plt.plot(y_track[:, 0], lw=2)
plt.subplot(212)
plt.plot(y_track[:, 1], lw=2)
plt.subplot(211)
a = plt.plot(path1, "r--", lw=2)
plt.title("DMP imitate path")
plt.xlabel("time (ms)")
plt.ylabel("system trajectory")
plt.legend([a[0]], ["desired path"], loc="lower right")
plt.subplot(212)
b = plt.plot(path2, "r--", lw=2)
plt.title("DMP imitate path")
plt.xlabel("time (ms)")
plt.ylabel("system trajectory")
plt.legend(["%i BFs" % i for i in n_bfs], loc="lower right")
plt.tight_layout()
plt.show()
|
gpl-3.0
| -4,765,107,268,492,839,000 | 29.888889 | 79 | 0.592126 | false |
idrogeno/enigma2
|
lib/python/Components/NimManager.py
|
1
|
85000
|
from boxbranding import getBoxType
from time import localtime, mktime
from datetime import datetime
import xml.etree.cElementTree
from os import path
from enigma import eDVBSatelliteEquipmentControl as secClass, \
eDVBSatelliteLNBParameters as lnbParam, \
eDVBSatelliteDiseqcParameters as diseqcParam, \
eDVBSatelliteSwitchParameters as switchParam, \
eDVBSatelliteRotorParameters as rotorParam, \
eDVBResourceManager, eDVBDB, eEnv, iDVBFrontend
from Tools.HardwareInfo import HardwareInfo
from Tools.BoundFunction import boundFunction
from Components.About import about
from config import config, ConfigSubsection, ConfigSelection, ConfigFloat, ConfigSatlist, ConfigYesNo, ConfigInteger, ConfigSubList, ConfigNothing, ConfigSubDict, ConfigOnOff, ConfigDateTime, ConfigText
maxFixedLnbPositions = 0
# LNB65 3601 All satellites 1 (USALS)
# LNB66 3602 All satellites 2 (USALS)
# LNB67 3603 All satellites 3 (USALS)
# LNB68 3604 All satellites 4 (USALS)
# LNB69 3605 Selecting satellites 1 (USALS)
# LNB70 3606 Selecting satellites 2 (USALS)
MAX_LNB_WILDCARDS = 6
MAX_ORBITPOSITION_WILDCARDS = 6
#magic numbers
ORBITPOSITION_LIMIT = 3600
def getConfigSatlist(orbpos, satlist):
default_orbpos = None
for x in satlist:
if x[0] == orbpos:
default_orbpos = orbpos
break
return ConfigSatlist(satlist, default_orbpos)
class SecConfigure:
def getConfiguredSats(self):
return self.configuredSatellites
def addSatellite(self, sec, orbpos):
sec.addSatellite(orbpos)
self.configuredSatellites.add(orbpos)
def addLNBSimple(self, sec, slotid, diseqcmode, toneburstmode = diseqcParam.NO, diseqcpos = diseqcParam.SENDNO, orbpos = 0, longitude = 0, latitude = 0, loDirection = 0, laDirection = 0, turningSpeed = rotorParam.FAST, useInputPower=True, inputPowerDelta=50, fastDiSEqC = False, setVoltageTone = True, diseqc13V = False, CircularLNB = False):
if orbpos is None or orbpos == 3600 or orbpos == 3601:
return
#simple defaults
if sec.addLNB():
print "No space left on m_lnbs (mac No. 144 LNBs exceeded)"
return
tunermask = 1 << slotid
if self.equal.has_key(slotid):
for slot in self.equal[slotid]:
tunermask |= (1 << slot)
if self.linked.has_key(slotid):
for slot in self.linked[slotid]:
tunermask |= (1 << slot)
sec.setLNBSatCR(-1)
sec.setLNBSatCRTuningAlgo(0)
sec.setLNBSatCRpositionnumber(1)
sec.setLNBLOFL(CircularLNB and 10750000 or 9750000)
sec.setLNBLOFH(CircularLNB and 10750000 or 10600000)
sec.setLNBThreshold(CircularLNB and 10750000 or 11700000)
sec.setLNBIncreasedVoltage(False)
sec.setRepeats(0)
sec.setFastDiSEqC(fastDiSEqC)
sec.setSeqRepeat(False)
sec.setCommandOrder(0)
#user values
sec.setDiSEqCMode(3 if diseqcmode == 4 else diseqcmode)
sec.setToneburst(toneburstmode)
sec.setCommittedCommand(diseqcpos)
sec.setUncommittedCommand(0) # SENDNO
if 0 <= diseqcmode < 3:
self.addSatellite(sec, orbpos)
if setVoltageTone:
if diseqc13V:
sec.setVoltageMode(switchParam.HV_13)
else:
sec.setVoltageMode(switchParam.HV)
sec.setToneMode(switchParam.HILO)
else:
# noinspection PyProtectedMember
sec.setVoltageMode(switchParam._14V)
sec.setToneMode(switchParam.OFF)
elif 3 <= diseqcmode < 5: # diseqc 1.2
if self.satposdepends.has_key(slotid):
for slot in self.satposdepends[slotid]:
tunermask |= (1 << slot)
sec.setLatitude(latitude)
sec.setLaDirection(laDirection)
sec.setLongitude(longitude)
sec.setLoDirection(loDirection)
sec.setUseInputpower(useInputPower)
sec.setInputpowerDelta(inputPowerDelta)
sec.setRotorTurningSpeed(turningSpeed)
user_satList = self.NimManager.satList
if diseqcmode == 4:
user_satList = []
if orbpos and isinstance(orbpos, str):
for user_sat in self.NimManager.satList:
if str(user_sat[0]) in orbpos:
user_satList.append(user_sat)
for x in user_satList:
print "Add sat " + str(x[0])
self.addSatellite(sec, int(x[0]))
if diseqc13V:
sec.setVoltageMode(switchParam.HV_13)
else:
sec.setVoltageMode(switchParam.HV)
sec.setToneMode(switchParam.HILO)
sec.setRotorPosNum(0) # USALS
sec.setLNBSlotMask(tunermask)
def setSatposDepends(self, sec, nim1, nim2):
print "tuner", nim1, "depends on satpos of", nim2
sec.setTunerDepends(nim1, nim2)
def linkInternally(self, slotid):
nim = self.NimManager.getNim(slotid)
if nim.internallyConnectableTo is not None:
nim.setInternalLink()
def linkNIMs(self, sec, nim1, nim2):
print "link tuner", nim1, "to tuner", nim2
# for internally connect tuner A to B
if getBoxType() == 'vusolo2' or nim2 == (nim1 - 1):
self.linkInternally(nim1)
sec.setTunerLinked(nim1, nim2)
def getRoot(self, slotid, connto):
visited = []
while self.NimManager.getNimConfig(connto).configMode.value in ("satposdepends", "equal", "loopthrough"):
connto = int(self.NimManager.getNimConfig(connto).connectedTo.value)
if connto in visited: # prevent endless loop
return slotid
visited.append(connto)
return connto
def update(self):
sec = secClass.getInstance()
self.configuredSatellites = set()
for slotid in self.NimManager.getNimListOfType("DVB-S"):
if self.NimManager.nimInternallyConnectableTo(slotid) is not None:
self.NimManager.nimRemoveInternalLink(slotid)
sec.clear() ## this do unlinking NIMs too !!
print "sec config cleared"
self.linked = { }
self.satposdepends = { }
self.equal = { }
nim_slots = self.NimManager.nim_slots
used_nim_slots = [ ]
for slot in nim_slots:
if slot.type is not None:
used_nim_slots.append((slot.slot, slot.description, slot.config.configMode.value != "nothing" and True or False, slot.isCompatible("DVB-S2"), slot.frontend_id is None and -1 or slot.frontend_id))
eDVBResourceManager.getInstance().setFrontendSlotInformations(used_nim_slots)
try:
for slot in nim_slots:
if slot.frontend_id is not None:
types = [type for type in ["DVB-C", "DVB-T", "DVB-T2", "DVB-S", "DVB-S2", "ATSC"] if eDVBResourceManager.getInstance().frontendIsCompatible(slot.frontend_id, type)]
if "DVB-T2" in types:
# DVB-T2 implies DVB-T support
types.remove("DVB-T")
if "DVB-S2" in types:
# DVB-S2 implies DVB-S support
types.remove("DVB-S")
if len(types) > 1:
slot.multi_type = {}
for type in types:
slot.multi_type[str(types.index(type))] = type
except:
pass
for slot in nim_slots:
x = slot.slot
nim = slot.config
if slot.isCompatible("DVB-S"):
# save what nim we link to/are equal to/satposdepends to.
# this is stored in the *value* (not index!) of the config list
if nim.configMode.value == "equal":
connto = self.getRoot(x, int(nim.connectedTo.value))
if not self.equal.has_key(connto):
self.equal[connto] = []
self.equal[connto].append(x)
elif nim.configMode.value == "loopthrough":
self.linkNIMs(sec, x, int(nim.connectedTo.value))
connto = self.getRoot(x, int(nim.connectedTo.value))
if not self.linked.has_key(connto):
self.linked[connto] = []
self.linked[connto].append(x)
elif nim.configMode.value == "satposdepends":
self.setSatposDepends(sec, x, int(nim.connectedTo.value))
connto = self.getRoot(x, int(nim.connectedTo.value))
if not self.satposdepends.has_key(connto):
self.satposdepends[connto] = []
self.satposdepends[connto].append(x)
for slot in nim_slots:
x = slot.slot
nim = slot.config
hw = HardwareInfo()
if slot.isCompatible("DVB-S"):
print "slot: " + str(x) + " configmode: " + str(nim.configMode.value)
if nim.configMode.value in ( "loopthrough", "satposdepends", "nothing" ):
pass
else:
sec.setSlotNotLinked(x)
if nim.configMode.value == "equal":
pass
elif nim.configMode.value == "simple": #simple config
print "diseqcmode: ", nim.diseqcMode.value
if nim.diseqcMode.value == "single": #single
currentCircular = False
if nim.diseqcA.value in ("360", "560"):
currentCircular = nim.simpleDiSEqCSetCircularLNB.value
if nim.simpleSingleSendDiSEqC.value:
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcA.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.AA, diseqc13V = nim.diseqc13V.value, CircularLNB = currentCircular)
else:
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcA.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.NONE, diseqcpos = diseqcParam.SENDNO, diseqc13V = nim.diseqc13V.value, CircularLNB = currentCircular)
elif nim.diseqcMode.value == "toneburst_a_b": #Toneburst A/B
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcA.orbital_position, toneburstmode = diseqcParam.A, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.SENDNO, diseqc13V = nim.diseqc13V.value)
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcB.orbital_position, toneburstmode = diseqcParam.B, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.SENDNO, diseqc13V = nim.diseqc13V.value)
elif nim.diseqcMode.value == "diseqc_a_b": #DiSEqC A/B
fastDiSEqC = nim.simpleDiSEqCOnlyOnSatChange.value
setVoltageTone = nim.simpleDiSEqCSetVoltageTone.value
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcA.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.AA, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value)
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcB.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.AB, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value)
elif nim.diseqcMode.value == "diseqc_a_b_c_d": #DiSEqC A/B/C/D
fastDiSEqC = nim.simpleDiSEqCOnlyOnSatChange.value
setVoltageTone = nim.simpleDiSEqCSetVoltageTone.value
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcA.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.AA, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value)
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcB.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.AB, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value)
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcC.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.BA, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value)
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcD.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.BB, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value)
elif nim.diseqcMode.value in ("positioner", "positioner_select"): #Positioner
current_mode = 3
sat = 0
if nim.diseqcMode.value == "positioner_select":
current_mode = 4
sat = nim.userSatellitesList.value
if nim.latitudeOrientation.value == "north":
laValue = rotorParam.NORTH
else:
laValue = rotorParam.SOUTH
if nim.longitudeOrientation.value == "east":
loValue = rotorParam.EAST
else:
loValue = rotorParam.WEST
inputPowerDelta=nim.powerThreshold.value
useInputPower=False
turning_speed=0
if nim.powerMeasurement.value:
useInputPower=True
turn_speed_dict = { "fast": rotorParam.FAST, "slow": rotorParam.SLOW }
if turn_speed_dict.has_key(nim.turningSpeed.value):
turning_speed = turn_speed_dict[nim.turningSpeed.value]
else:
beg_time = localtime(nim.fastTurningBegin.value)
end_time = localtime(nim.fastTurningEnd.value)
turning_speed = ((beg_time.tm_hour+1) * 60 + beg_time.tm_min + 1) << 16
turning_speed |= (end_time.tm_hour+1) * 60 + end_time.tm_min + 1
self.addLNBSimple(sec, slotid = x, diseqcmode = current_mode,
orbpos = sat,
longitude = nim.longitude.float,
loDirection = loValue,
latitude = nim.latitude.float,
laDirection = laValue,
turningSpeed = turning_speed,
useInputPower = useInputPower,
inputPowerDelta = inputPowerDelta,
diseqc13V = nim.diseqc13V.value)
elif nim.configMode.value == "advanced": #advanced config
self.updateAdvanced(sec, x)
print "sec config completed"
def updateAdvanced(self, sec, slotid):
try:
if config.Nims[slotid].advanced.unicableconnected is not None:
if config.Nims[slotid].advanced.unicableconnected.value:
config.Nims[slotid].advanced.unicableconnectedTo.save_forced = True
self.linkNIMs(sec, slotid, int(config.Nims[slotid].advanced.unicableconnectedTo.value))
connto = self.getRoot(slotid, int(config.Nims[slotid].advanced.unicableconnectedTo.value))
if not self.linked.has_key(connto):
self.linked[connto] = []
self.linked[connto].append(slotid)
else:
config.Nims[slotid].advanced.unicableconnectedTo.save_forced = False
except:
pass
lnbSat = {}
for x in range(1, 71):
lnbSat[x] = []
#wildcard for all satellites ( for rotor )
for x in range(3601, 3605):
lnb = int(config.Nims[slotid].advanced.sat[x].lnb.value)
if lnb != 0:
for x in self.NimManager.satList:
print "add", x[0], "to", lnb
lnbSat[lnb].append(x[0])
#wildcard for user satellites ( for rotor )
for x in range(3605, 3607):
lnb = int(config.Nims[slotid].advanced.sat[x].lnb.value)
if lnb != 0:
for user_sat in self.NimManager.satList:
if str(user_sat[0]) in config.Nims[slotid].advanced.sat[x].userSatellitesList.value:
print "add", user_sat[0], "to", lnb
lnbSat[lnb].append(user_sat[0])
for x in self.NimManager.satList:
lnb = int(config.Nims[slotid].advanced.sat[x[0]].lnb.value)
if lnb != 0:
print "add", x[0], "to", lnb
lnbSat[lnb].append(x[0])
for x in range(1, 71):
if len(lnbSat[x]) > 0:
currLnb = config.Nims[slotid].advanced.lnb[x]
if sec.addLNB():
print "No space left on m_lnbs (max No. 144 LNBs exceeded)"
return
posnum = 1; #default if LNB movable
if x <= maxFixedLnbPositions:
posnum = x;
sec.setLNBSatCRpositionnumber(x) # LNB has fixed Position
else:
sec.setLNBSatCRpositionnumber(0) # or not (movable LNB)
tunermask = 1 << slotid
if self.equal.has_key(slotid):
for slot in self.equal[slotid]:
tunermask |= (1 << slot)
if self.linked.has_key(slotid):
for slot in self.linked[slotid]:
tunermask |= (1 << slot)
if currLnb.lof.value != "unicable":
sec.setLNBSatCR(-1)
sec.setLNBSatCRTuningAlgo(0)
if currLnb.lof.value == "universal_lnb":
sec.setLNBLOFL(9750000)
sec.setLNBLOFH(10600000)
sec.setLNBThreshold(11700000)
elif currLnb.lof.value == "unicable":
def setupUnicable(configManufacturer, ProductDict):
manufacturer_name = configManufacturer.value
manufacturer = ProductDict[manufacturer_name]
product_name = manufacturer.product.value
if product_name == "None" and manufacturer.product.saved_value != "None":
product_name = manufacturer.product.value = manufacturer.product.saved_value
manufacturer_scr = manufacturer.scr
manufacturer_positions_value = manufacturer.positions[product_name][0].value
position_idx = (posnum - 1) % manufacturer_positions_value
if product_name in manufacturer_scr:
diction = manufacturer.diction[product_name].value
positionsoffset = manufacturer.positionsoffset[product_name][0].value
if diction !="EN50607" or ((posnum <= (positionsoffset + manufacturer_positions_value) and (posnum > positionsoffset) and x <= maxFixedLnbPositions)): #for every allowed position
sec.setLNBSatCRformat(diction =="EN50607" and 1 or 0)
sec.setLNBSatCR(manufacturer_scr[product_name].index)
sec.setLNBSatCRvco(manufacturer.vco[product_name][manufacturer_scr[product_name].index].value*1000)
sec.setLNBSatCRpositions(manufacturer_positions_value)
sec.setLNBLOFL(manufacturer.lofl[product_name][position_idx].value * 1000)
sec.setLNBLOFH(manufacturer.lofh[product_name][position_idx].value * 1000)
sec.setLNBThreshold(manufacturer.loft[product_name][position_idx].value * 1000)
sec.setLNBSatCRTuningAlgo(currLnb.unicableTuningAlgo.value == "reliable" and 1 or 0)
configManufacturer.save_forced = True
manufacturer.product.save_forced = True
manufacturer.vco[product_name][manufacturer_scr[product_name].index].save_forced = True
else: #positionnumber out of range
print "positionnumber out of range"
else:
print "no product in list"
if currLnb.unicable.value == "unicable_user":
#TODO satpositions for satcruser
if currLnb.dictionuser.value == "EN50607":
sec.setLNBSatCRformat(1)
sec.setLNBSatCR(currLnb.satcruserEN50607.index)
sec.setLNBSatCRvco(currLnb.satcrvcouserEN50607[currLnb.satcruserEN50607.index].value*1000)
else:
sec.setLNBSatCRformat(0)
sec.setLNBSatCR(currLnb.satcruserEN50494.index)
sec.setLNBSatCRvco(currLnb.satcrvcouserEN50494[currLnb.satcruserEN50494.index].value*1000)
sec.setLNBLOFL(currLnb.lofl.value * 1000)
sec.setLNBLOFH(currLnb.lofh.value * 1000)
sec.setLNBThreshold(currLnb.threshold.value * 1000)
sec.setLNBSatCRpositions(64)
elif currLnb.unicable.value == "unicable_matrix":
self.reconstructUnicableDate(currLnb.unicableMatrixManufacturer, currLnb.unicableMatrix, currLnb)
setupUnicable(currLnb.unicableMatrixManufacturer, currLnb.unicableMatrix)
elif currLnb.unicable.value == "unicable_lnb":
self.reconstructUnicableDate(currLnb.unicableLnbManufacturer, currLnb.unicableLnb, currLnb)
setupUnicable(currLnb.unicableLnbManufacturer, currLnb.unicableLnb)
elif currLnb.lof.value == "c_band":
sec.setLNBLOFL(5150000)
sec.setLNBLOFH(5150000)
sec.setLNBThreshold(5150000)
elif currLnb.lof.value == "user_defined":
sec.setLNBLOFL(currLnb.lofl.value * 1000)
sec.setLNBLOFH(currLnb.lofh.value * 1000)
sec.setLNBThreshold(currLnb.threshold.value * 1000)
elif currLnb.lof.value == "circular_lnb":
sec.setLNBLOFL(10750000)
sec.setLNBLOFH(10750000)
sec.setLNBThreshold(10750000)
if currLnb.increased_voltage.value:
sec.setLNBIncreasedVoltage(True)
else:
sec.setLNBIncreasedVoltage(False)
dm = currLnb.diseqcMode.value
if dm == "none":
sec.setDiSEqCMode(diseqcParam.NONE)
elif dm == "1_0":
sec.setDiSEqCMode(diseqcParam.V1_0)
elif dm == "1_1":
sec.setDiSEqCMode(diseqcParam.V1_1)
elif dm == "1_2":
sec.setDiSEqCMode(diseqcParam.V1_2)
if self.satposdepends.has_key(slotid):
for slot in self.satposdepends[slotid]:
tunermask |= (1 << slot)
if dm != "none":
if currLnb.toneburst.value == "none":
sec.setToneburst(diseqcParam.NO)
elif currLnb.toneburst.value == "A":
sec.setToneburst(diseqcParam.A)
elif currLnb.toneburst.value == "B":
sec.setToneburst(diseqcParam.B)
# Committed Diseqc Command
cdc = currLnb.commitedDiseqcCommand.value
c = { "none": diseqcParam.SENDNO,
"AA": diseqcParam.AA,
"AB": diseqcParam.AB,
"BA": diseqcParam.BA,
"BB": diseqcParam.BB }
if c.has_key(cdc):
sec.setCommittedCommand(c[cdc])
else:
sec.setCommittedCommand(long(cdc))
sec.setFastDiSEqC(currLnb.fastDiseqc.value)
sec.setSeqRepeat(currLnb.sequenceRepeat.value)
if currLnb.diseqcMode.value == "1_0":
currCO = currLnb.commandOrder1_0.value
sec.setRepeats(0)
else:
currCO = currLnb.commandOrder.value
udc = int(currLnb.uncommittedDiseqcCommand.value)
if udc > 0:
sec.setUncommittedCommand(0xF0|(udc-1))
else:
sec.setUncommittedCommand(0) # SENDNO
sec.setRepeats({"none": 0, "one": 1, "two": 2, "three": 3}[currLnb.diseqcRepeats.value])
setCommandOrder = False
# 0 "committed, toneburst",
# 1 "toneburst, committed",
# 2 "committed, uncommitted, toneburst",
# 3 "toneburst, committed, uncommitted",
# 4 "uncommitted, committed, toneburst"
# 5 "toneburst, uncommitted, commmitted"
order_map = {"ct": 0, "tc": 1, "cut": 2, "tcu": 3, "uct": 4, "tuc": 5}
sec.setCommandOrder(order_map[currCO])
if dm == "1_2":
latitude = currLnb.latitude.float
sec.setLatitude(latitude)
longitude = currLnb.longitude.float
sec.setLongitude(longitude)
if currLnb.latitudeOrientation.value == "north":
sec.setLaDirection(rotorParam.NORTH)
else:
sec.setLaDirection(rotorParam.SOUTH)
if currLnb.longitudeOrientation.value == "east":
sec.setLoDirection(rotorParam.EAST)
else:
sec.setLoDirection(rotorParam.WEST)
if currLnb.powerMeasurement.value:
sec.setUseInputpower(True)
sec.setInputpowerDelta(currLnb.powerThreshold.value)
turn_speed_dict = { "fast": rotorParam.FAST, "slow": rotorParam.SLOW }
if turn_speed_dict.has_key(currLnb.turningSpeed.value):
turning_speed = turn_speed_dict[currLnb.turningSpeed.value]
else:
beg_time = localtime(currLnb.fastTurningBegin.value)
end_time = localtime(currLnb.fastTurningEnd.value)
turning_speed = ((beg_time.tm_hour + 1) * 60 + beg_time.tm_min + 1) << 16
turning_speed |= (end_time.tm_hour + 1) * 60 + end_time.tm_min + 1
sec.setRotorTurningSpeed(turning_speed)
else:
sec.setUseInputpower(False)
sec.setLNBSlotMask(tunermask)
sec.setLNBPrio(int(currLnb.prio.value))
# finally add the orbital positions
for y in lnbSat[x]:
self.addSatellite(sec, y)
if x > maxFixedLnbPositions:
satpos = x > maxFixedLnbPositions and (3606-(70 - x)) or y
else:
satpos = y
currSat = config.Nims[slotid].advanced.sat[satpos]
if currSat.voltage.value == "polarization":
if config.Nims[slotid].diseqc13V.value:
sec.setVoltageMode(switchParam.HV_13)
else:
sec.setVoltageMode(switchParam.HV)
elif currSat.voltage.value == "13V":
# noinspection PyProtectedMember
sec.setVoltageMode(switchParam._14V)
elif currSat.voltage.value == "18V":
# noinspection PyProtectedMember
sec.setVoltageMode(switchParam._18V)
if currSat.tonemode.value == "band":
sec.setToneMode(switchParam.HILO)
elif currSat.tonemode.value == "on":
sec.setToneMode(switchParam.ON)
elif currSat.tonemode.value == "off":
sec.setToneMode(switchParam.OFF)
if not currSat.usals.value and x <= maxFixedLnbPositions:
sec.setRotorPosNum(currSat.rotorposition.value)
else:
sec.setRotorPosNum(0) #USALS
def reconstructUnicableDate(self, configManufacturer, ProductDict, currLnb):
val = currLnb.content.stored_values
if currLnb.unicable.value == "unicable_lnb":
ManufacturerName = val.get('unicableLnbManufacturer', 'none')
SDict = val.get('unicableLnb', None)
elif currLnb.unicable.value == "unicable_matrix":
ManufacturerName = val.get('unicableMatrixManufacturer', 'none')
SDict = val.get('unicableMatrix', None)
else:
return
# print "[reconstructUnicableDate] SDict %s" % SDict
if SDict is None:
return
print "ManufacturerName %s" % ManufacturerName
PDict = SDict.get(ManufacturerName, None) #dict contained last stored device data
if PDict is None:
return
PN = PDict.get('product', None) #product name
if PN is None:
return
if ManufacturerName in ProductDict.keys(): # manufacture are listed, use its ConfigSubsection
tmp = ProductDict[ManufacturerName]
if PN in tmp.product.choices.choices:
return
else: #if manufacture not in list, then generate new ConfigSubsection
print "[reconstructUnicableDate] Manufacturer %s not in unicable.xml" % ManufacturerName
tmp = ConfigSubsection()
tmp.scr = ConfigSubDict()
tmp.vco = ConfigSubDict()
tmp.lofl = ConfigSubDict()
tmp.lofh = ConfigSubDict()
tmp.loft = ConfigSubDict()
tmp.diction = ConfigSubDict()
tmp.product = ConfigSelection(choices = [], default = None)
if PN not in tmp.product.choices.choices:
print "[reconstructUnicableDate] Product %s not in unicable.xml" % PN
scrlist = []
SatCR = int(PDict.get('scr', {PN,1}).get(PN,1)) - 1
vco = int(PDict.get('vco', {PN,0}).get(PN,0).get(str(SatCR),1))
positionslist=[1,(9750, 10600, 11700)] ##adenin_todo
positions = int(positionslist[0])
tmp.positions = ConfigSubDict()
tmp.positions[PN] = ConfigSubList()
tmp.positions[PN].append(ConfigInteger(default=positions, limits = (positions, positions)))
tmp.vco[PN] = ConfigSubList()
for cnt in range(0,SatCR + 1):
vcofreq = (cnt == SatCR) and vco or 0 # equivalent to vcofreq = (cnt == SatCR) ? 1432 : 0
if vcofreq == 0 :
scrlist.append(("%d" %(cnt+1),"SCR %d " %(cnt+1) +_("not used")))
else:
scrlist.append(("%d" %(cnt+1),"SCR %d" %(cnt+1)))
print "vcofreq %d" % vcofreq
tmp.vco[PN].append(ConfigInteger(default=vcofreq, limits = (vcofreq, vcofreq)))
tmp.scr[PN] = ConfigSelection(choices = scrlist, default = scrlist[SatCR][0])
tmp.lofl[PN] = ConfigSubList()
tmp.lofh[PN] = ConfigSubList()
tmp.loft[PN] = ConfigSubList()
for cnt in range(1,positions+1):
lofl = int(positionslist[cnt][0])
lofh = int(positionslist[cnt][1])
loft = int(positionslist[cnt][2])
tmp.lofl[PN].append(ConfigInteger(default=lofl, limits = (lofl, lofl)))
tmp.lofh[PN].append(ConfigInteger(default=lofh, limits = (lofh, lofh)))
tmp.loft[PN].append(ConfigInteger(default=loft, limits = (loft, loft)))
dictionlist = [("EN50494", "Unicable(EN50494)")] ##adenin_todo
tmp.diction[PN] = ConfigSelection(choices = dictionlist, default = dictionlist[0][0])
tmp.product.choices.choices.append(PN)
tmp.product.choices.default = PN
tmp.scr[PN].save_forced = True
tmp.scr.save_forced = True
tmp.vco.save_forced = True
tmp.product.save_forced = True
ProductDict[ManufacturerName] = tmp
if ManufacturerName not in configManufacturer.choices.choices: #check if name in choices list
configManufacturer.choices.choices.append(ManufacturerName) #add name to choises list
def __init__(self, nimmgr):
self.NimManager = nimmgr
self.configuredSatellites = set()
self.update()
class NIM(object):
def __init__(self, slot, type, description, has_outputs=True, internally_connectable=None, multi_type=None, frontend_id=None, i2c=None, is_empty=False, input_name = None):
if not multi_type: multi_type = {}
self.slot = slot
if type not in ("DVB-S", "DVB-C", "DVB-T", "DVB-S2", "DVB-T2", "DVB-C2", "ATSC", None):
print "warning: unknown NIM type %s, not using." % type
type = None
self.type = type
self.description = description
self.has_outputs = has_outputs
self.internally_connectable = internally_connectable
self.multi_type = multi_type
self.i2c = i2c
self.frontend_id = frontend_id
self.__is_empty = is_empty
self.input_name = input_name
self.compatible = {
None: (None,),
"DVB-S": ("DVB-S", None),
"DVB-C": ("DVB-C", None),
"DVB-T": ("DVB-T", None),
"DVB-S2": ("DVB-S", "DVB-S2", None),
"DVB-C2": ("DVB-C", "DVB-C2", None),
"DVB-T2": ("DVB-T", "DVB-T2", None),
"ATSC": ("ATSC", None),
}
def isCompatible(self, what):
if not self.isSupported():
return False
return what in self.compatible[self.getType()]
def canBeCompatible(self, what):
if not self.isSupported():
return False
if self.isCompatible(what):
return True
for type in self.multi_type.values():
if what in self.compatible[type]:
return True
return False
def getType(self):
try:
if self.isMultiType():
return self.multi_type[self.config.multiType.value]
except:
pass
return self.type
def connectableTo(self):
connectable = {
"DVB-S": ("DVB-S", "DVB-S2"),
"DVB-C": ("DVB-C", "DVB-C2"),
"DVB-T": ("DVB-T","DVB-T2"),
"DVB-S2": ("DVB-S", "DVB-S2"),
"DVB-C2": ("DVB-C", "DVB-C2"),
"DVB-T2": ("DVB-T", "DVB-T2"),
"ATSC": "ATSC",
}
return connectable[self.getType()]
def getSlotInputName(self):
name = self.input_name
if name is None:
name = chr(ord('A') + self.slot)
return name
slot_input_name = property(getSlotInputName)
def getSlotName(self):
# get a friendly description for a slot name.
# we name them "Tuner A/B/C/...", because that's what's usually written on the back
# of the device.
# for DM7080HD "Tuner A1/A2/B/C/..."
descr = _("Tuner ")
return descr + self.getSlotInputName()
slot_name = property(getSlotName)
def getSlotID(self):
return chr(ord('A') + self.slot)
def getI2C(self):
return self.i2c
def hasOutputs(self):
return self.has_outputs
def internallyConnectableTo(self):
return self.internally_connectable
def setInternalLink(self):
if self.internally_connectable is not None:
print "setting internal link on frontend id", self.frontend_id
f = open("/proc/stb/frontend/%d/rf_switch" % self.frontend_id, "w")
f.write("internal")
f.close()
def removeInternalLink(self):
if self.internally_connectable is not None:
print "removing internal link on frontend id", self.frontend_id
f = open("/proc/stb/frontend/%d/rf_switch" % self.frontend_id, "w")
f.write("external")
f.close()
def isMultiType(self):
return len(self.multi_type) > 0
def isEmpty(self):
return self.__is_empty
# empty tuners are supported!
def isSupported(self):
return (self.frontend_id is not None) or self.__is_empty
def isMultistream(self):
multistream = self.frontend_id and eDVBResourceManager.getInstance().frontendIsMultistream(self.frontend_id) or False
# HACK due to poor support for VTUNER_SET_FE_INFO
# When vtuner does not accept fe_info we have to fallback to detection using tuner name
# More tuner names will be added when confirmed as multistream (FE_CAN_MULTISTREAM)
if not multistream and "TBS" in self.description:
multistream = True
return multistream
# returns dict {<slotid>: <type>}
def getMultiTypeList(self):
return self.multi_type
slot_id = property(getSlotID)
def getFriendlyType(self):
return {
"DVB-S": "DVB-S",
"DVB-T": "DVB-T",
"DVB-C": "DVB-C",
"DVB-S2": "DVB-S2",
"DVB-T2": "DVB-T2",
"DVB-C2": "DVB-C2",
"ATSC": "ATSC",
None: _("empty")
}[self.getType()]
friendly_type = property(getFriendlyType)
def getFriendlyFullDescription(self):
nim_text = self.slot_name + ": "
if self.empty:
nim_text += _("(empty)")
elif not self.isSupported():
nim_text += self.description + " (" + _("not supported") + ")"
else:
nim_text += self.description + " (" + self.friendly_type + ")"
return nim_text
friendly_full_description = property(getFriendlyFullDescription)
config_mode = property(lambda self: config.Nims[self.slot].configMode.value)
config = property(lambda self: config.Nims[self.slot])
empty = property(lambda self: self.getType is None)
class NimManager:
def getConfiguredSats(self):
return self.sec.getConfiguredSats()
def getTransponders(self, pos):
if self.transponders.has_key(pos):
return self.transponders[pos]
else:
return []
def getTranspondersCable(self, nim):
nimConfig = config.Nims[nim]
if nimConfig.configMode.value != "nothing" and nimConfig.cable.scan_type.value == "provider":
return self.transponderscable[self.cablesList[nimConfig.cable.scan_provider.index][0]]
return [ ]
def getTranspondersTerrestrial(self, region):
return self.transpondersterrestrial[region]
def getCableDescription(self, nim):
return self.cablesList[config.Nims[nim].scan_provider.index][0]
def getCableFlags(self, nim):
return self.cablesList[config.Nims[nim].scan_provider.index][1]
def getTerrestrialDescription(self, nim):
return self.terrestrialsList[config.Nims[nim].terrestrial.index][0]
def getTerrestrialFlags(self, nim):
return self.terrestrialsList[config.Nims[nim].terrestrial.index][1]
def getSatDescription(self, pos):
return self.satellites[pos]
def sortFunc(self, x):
orbpos = x[0]
if orbpos > 1800:
return orbpos - 3600
else:
return orbpos + 1800
def readTransponders(self):
self.satellites = { }
self.transponders = { }
self.transponderscable = { }
self.transpondersterrestrial = { }
self.transpondersatsc = { }
db = eDVBDB.getInstance()
if self.hasNimType("DVB-S"):
print "Reading satellites.xml"
if db.readSatellites(self.satList, self.satellites, self.transponders):
self.satList.sort() # sort by orbpos
else: #satellites.xml not found or corrupted
from Tools.Notifications import AddPopup
from Screens.MessageBox import MessageBox
def emergencyAid():
if not path.exists("/etc/enigma2/lamedb"):
print "/etc/enigma2/lamedb not found"
return None
f = file("/etc/enigma2/lamedb","r")
lamedb = f.readlines()
f.close()
if lamedb[0].find("/3/") != -1:
version = 3
elif lamedb[0].find("/4/") != -1:
version = 4
else:
print "unknown lamedb version: ",lamedb[0]
return False
print "import version %d" % version
collect = False
transponders = []
tp = []
for line in lamedb:
if line == "transponders\n":
collect = True
continue
if line == "end\n":
break
if collect:
data = line.strip().split(":")
if data[0] == "/":
transponders.append(tp)
tp = []
else:
tp.append(data)
t1 = ("namespace","tsid","onid")
t2_sv3 = ("frequency",
"symbol_rate",
"polarization",
"fec_inner",
"position",
"inversion",
"system",
"modulation",
"rolloff",
"pilot",
)
t2_sv4 = ("frequency",
"symbol_rate",
"polarization",
"fec_inner",
"position",
"inversion",
"flags",
"system",
"modulation",
"rolloff",
"pilot"
)
tplist = []
for x in transponders:
tp = {}
if len(x[0]) > len(t1):
continue
freq = x[1][0].split()
if len(freq) != 2:
continue
x[1][0] = freq[1]
if freq[0] == "s" or freq[0] == "S":
if ((version == 3) and len(x[1]) > len(t2_sv3)) or ((version == 4) and len(x[1]) > len(t2_sv4)):
continue
for y in range(0, len(x[0])):
tp.update({t1[y]:x[0][y]})
for y in range(0, len(x[1])):
if version == 3:
tp.update({t2_sv3[y]:x[1][y]})
elif version == 4:
tp.update({t2_sv4[y]:x[1][y]})
if ((int(tp.get("namespace"),16) >> 16) & 0xFFF) != int(tp.get("position")):
print "Namespace %s and Position %s are not identical"% (tp.get("namespace"), tp.get("position"))
continue
if version >= 4:
tp.update({"supposition":((int(tp.get("namespace","0"),16) >> 24) & 0x0F)})
elif freq[0] == "c" or freq[0] == "C":
print "DVB-C"
continue
elif freq[0] == "t" or freq[0] == "T":
print "DVB-T"
continue
tplist.append(tp)
satDict = {}
for tp in tplist:
freq = int(tp.get("frequency",0))
if freq:
tmp_sat = satDict.get(int(tp.get("position")),{})
tmp_tp = self.transponders.get(int(tp.get("position")),[])
sat_pos = int(tp.get("position"))
fake_sat_pos = int(tp.get("position"))
if sat_pos > 1800:
sat_pos -= 1800
dir = 'W'
else:
dir = 'E'
if freq >= 10000000 and freq <= 13000000:
fake_sat_pos = sat_pos
tmp_sat.update({'name':'%3.1f%c Ku-band satellite' %(sat_pos/10.0, dir)})
#tmp_sat.update({"band":"Ku"})
if freq >= 3000000 and freq <= 4000000:
fake_sat_pos = sat_pos + 1
tmp_sat.update({'name':'%3.1f%c C-band satellite' %(sat_pos/10.0, dir)})
#tmp_sat.update({"band":"C"})
if freq >= 17000000 and freq <= 23000000:
fake_sat_pos = sat_pos + 2
tmp_sat.update({'name':'%3.1f%c Ka-band satellite' %(sat_pos/10.0, dir)})
#tmp_sat.update({"band":"Ka"})
tmp_tp.append((
0, #???
int(tp.get("frequency",0)),
int(tp.get("symbol_rate",0)),
int(tp.get("polarization",0)),
int(tp.get("fec_inner",0)),
int(tp.get("system",0)),
int(tp.get("modulation",0)),
int(tp.get("inversion",0)),
int(tp.get("rolloff",0)),
int(tp.get("pilot",0)),
-1, #tsid -1 -> any tsid are valid
-1 #onid -1 -> any tsid are valid
))
tmp_sat.update({'flags':int(tp.get("flags"))})
satDict.update({fake_sat_pos:tmp_sat})
self.transponders.update({fake_sat_pos:tmp_tp})
for sat_pos in satDict:
self.satellites.update({sat_pos: satDict.get(sat_pos).get('name')})
self.satList.append((sat_pos, satDict.get(sat_pos).get('name'), satDict.get(sat_pos).get('flags')))
return True
AddPopup(_("satellites.xml not found or corrupted!\nIt is possible to watch TV,\nbut it's not possible to search for new TV channels\nor to configure tuner settings"), type = MessageBox.TYPE_ERROR, timeout = 0, id = "SatellitesLoadFailed")
if not emergencyAid():
AddPopup(_("resoring satellites.xml not posibel!"), type = MessageBox.TYPE_ERROR, timeout = 0, id = "SatellitesLoadFailed")
return
if self.hasNimType("DVB-C") or self.hasNimType("DVB-T") or self.hasNimType("DVB-T2"):
print "Reading cables.xml"
db.readCables(self.cablesList, self.transponderscable)
print "Reading terrestrial.xml"
db.readTerrestrials(self.terrestrialsList, self.transpondersterrestrial)
def enumerateNIMs(self):
# enum available NIMs. This is currently very dreambox-centric and uses the /proc/bus/nim_sockets interface.
# the result will be stored into nim_slots.
# the content of /proc/bus/nim_sockets looks like:
# NIM Socket 0:
# Type: DVB-S
# Name: BCM4501 DVB-S2 NIM (internal)
# NIM Socket 1:
# Type: DVB-S
# Name: BCM4501 DVB-S2 NIM (internal)
# NIM Socket 2:
# Type: DVB-T
# Name: Philips TU1216
# NIM Socket 3:
# Type: DVB-S
# Name: Alps BSBE1 702A
#
# Type will be either "DVB-S", "DVB-S2", "DVB-T", "DVB-C" or None.
# nim_slots is an array which has exactly one entry for each slot, even for empty ones.
self.nim_slots = [ ]
try:
nimfile = open("/proc/bus/nim_sockets")
except IOError:
return
current_slot = None
entries = {}
for line in nimfile:
if not line:
break
line = line.strip()
if line.startswith("NIM Socket"):
parts = line.split(" ")
current_slot = int(parts[2][:-1])
entries[current_slot] = {}
elif line.startswith("Type:"):
entries[current_slot]["type"] = str(line[6:])
entries[current_slot]["isempty"] = False
elif line.strip().startswith("Input_Name:"):
entries[current_slot]["input_name"] = str(line.strip()[12:])
elif line.startswith("Name:"):
entries[current_slot]["name"] = str(line[6:])
entries[current_slot]["isempty"] = False
elif line.startswith("Has_Outputs:"):
input = str(line[len("Has_Outputs:") + 1:])
entries[current_slot]["has_outputs"] = (input == "yes")
elif line.startswith("Internally_Connectable:"):
input = int(line[len("Internally_Connectable:") + 1:])
entries[current_slot]["internally_connectable"] = input
elif line.startswith("Frontend_Device:"):
input = int(line[len("Frontend_Device:") + 1:])
entries[current_slot]["frontend_device"] = input
elif line.startswith("Mode"):
# Mode 0: DVB-C
# Mode 1: DVB-T
# "Mode 1: DVB-T" -> ["Mode 1", "DVB-T"]
split = line.split(":")
split[1] = split[1].replace(' ','')
split2 = split[0].split(" ")
modes = entries[current_slot].get("multi_type", {})
modes[split2[1]] = split[1]
entries[current_slot]["multi_type"] = modes
elif line.startswith("I2C_Device:"):
input = int(line[len("I2C_Device:") + 1:])
entries[current_slot]["i2c"] = input
elif line.startswith("empty"):
entries[current_slot]["type"] = None
entries[current_slot]["name"] = _("N/A")
entries[current_slot]["isempty"] = True
nimfile.close()
for id, entry in entries.items():
if not (entry.has_key("name") and entry.has_key("type")):
entry["name"] = _("N/A")
entry["type"] = None
if not (entry.has_key("i2c")):
entry["i2c"] = None
if not (entry.has_key("has_outputs")):
entry["has_outputs"] = True
if entry.has_key("frontend_device"): # check if internally connectable
if path.exists("/proc/stb/frontend/%d/rf_switch" % entry["frontend_device"]) and ((id > 0) or (getBoxType() == 'vusolo2')):
entry["internally_connectable"] = entry["frontend_device"] - 1
else:
entry["internally_connectable"] = None
else:
entry["frontend_device"] = entry["internally_connectable"] = None
if not (entry.has_key("multi_type")):
if entry["name"] == "DVB-T2/C USB-Stick": # workaround dvbsky hybrit usb stick
entry["multi_type"] = {'0': 'DVB-T'}
entry["multi_type"] = {'1': 'DVB-C'}
else:
entry["multi_type"] = {}
if not (entry.has_key("input_name")):
entry["input_name"] = chr(ord('A') + id)
self.nim_slots.append(NIM(slot = id, description = entry["name"], type = entry["type"], has_outputs = entry["has_outputs"], internally_connectable = entry["internally_connectable"], multi_type = entry["multi_type"], frontend_id = entry["frontend_device"], i2c = entry["i2c"], is_empty = entry["isempty"], input_name = entry.get("input_name", None)))
def hasNimType(self, chktype):
for slot in self.nim_slots:
if slot.isCompatible(chktype):
return True
for type in slot.getMultiTypeList().values():
if chktype == type:
return True
return False
def getNimType(self, slotid):
return self.nim_slots[slotid].type
def getNimDescription(self, slotid):
return self.nim_slots[slotid].friendly_full_description
def getNimName(self, slotid):
return self.nim_slots[slotid].description
def getNimSlotInputName(self, slotid):
# returns just "A", "B", ...
return self.nim_slots[slotid].slot_input_name
def getNim(self, slotid):
return self.nim_slots[slotid]
def getI2CDevice(self, slotid):
return self.nim_slots[slotid].getI2C()
def getNimListOfType(self, type, exception = -1):
# returns a list of indexes for NIMs compatible to the given type, except for 'exception'
list = []
for x in self.nim_slots:
if x.isCompatible(type) and x.slot != exception:
list.append(x.slot)
return list
def __init__(self):
sec = secClass.getInstance()
global maxFixedLnbPositions
maxFixedLnbPositions = sec.getMaxFixedLnbPositions()
self.satList = [ ]
self.cablesList = []
self.terrestrialsList = []
self.atscList = []
self.enumerateNIMs()
self.readTransponders()
InitNimManager(self) #init config stuff
# get a list with the friendly full description
def nimList(self):
list = [ ]
for slot in self.nim_slots:
list.append(slot.friendly_full_description)
return list
def getSlotCount(self):
return len(self.nim_slots)
def hasOutputs(self, slotid):
return self.nim_slots[slotid].hasOutputs()
def nimInternallyConnectableTo(self, slotid):
return self.nim_slots[slotid].internallyConnectableTo()
def nimRemoveInternalLink(self, slotid):
self.nim_slots[slotid].removeInternalLink()
def canConnectTo(self, slotid):
slots = []
if self.nim_slots[slotid].internallyConnectableTo() is not None:
slots.append(self.nim_slots[slotid].internallyConnectableTo())
for type in self.nim_slots[slotid].connectableTo():
for slot in self.getNimListOfType(type, exception = slotid):
if self.hasOutputs(slot) and slot not in slots:
slots.append(slot)
# remove nims, that have a conntectedTo reference on
for testnim in slots[:]:
for nim in self.getNimListOfType("DVB-S", slotid):
nimConfig = self.getNimConfig(nim)
if nimConfig.content.items.has_key("configMode") and nimConfig.configMode.value == "loopthrough" and int(nimConfig.connectedTo.value) == testnim:
slots.remove(testnim)
break
slots.sort()
return slots
def canEqualTo(self, slotid):
type = self.getNimType(slotid)
type = type[:5] # DVB-S2 --> DVB-S, DVB-T2 --> DVB-T, DVB-C2 --> DVB-C
nimList = self.getNimListOfType(type, slotid)
for nim in nimList[:]:
mode = self.getNimConfig(nim)
if mode.configMode.value == "loopthrough" or mode.configMode.value == "satposdepends":
nimList.remove(nim)
return nimList
def canDependOn(self, slotid):
type = self.getNimType(slotid)
type = type[:5] # DVB-S2 --> DVB-S, DVB-T2 --> DVB-T, DVB-C2 --> DVB-C
nimList = self.getNimListOfType(type, slotid)
positionerList = []
for nim in nimList[:]:
mode = self.getNimConfig(nim)
nimHaveRotor = mode.configMode.value == "simple" and mode.diseqcMode.value in ("positioner", "positioner_select")
if not nimHaveRotor and mode.configMode.value == "advanced":
for x in range(3601, 3607):
lnb = int(mode.advanced.sat[x].lnb.value)
if lnb != 0:
nimHaveRotor = True
break
if not nimHaveRotor:
for sat in mode.advanced.sat.values():
lnb_num = int(sat.lnb.value)
diseqcmode = lnb_num and mode.advanced.lnb[lnb_num].diseqcMode.value or ""
if diseqcmode == "1_2":
nimHaveRotor = True
break
if nimHaveRotor:
alreadyConnected = False
for testnim in nimList:
testmode = self.getNimConfig(testnim)
if testmode.configMode.value == "satposdepends" and int(testmode.connectedTo.value) == int(nim):
alreadyConnected = True
break
if not alreadyConnected:
positionerList.append(nim)
return positionerList
def getNimConfig(self, slotid):
return config.Nims[slotid]
def getSatName(self, pos):
for sat in self.satList:
if sat[0] == pos:
return sat[1]
return _("N/A")
def getSatList(self):
return self.satList
# returns True if something is configured to be connected to this nim
# if slotid == -1, returns if something is connected to ANY nim
def somethingConnected(self, slotid = -1):
if slotid == -1:
connected = False
for id in range(self.getSlotCount()):
if self.somethingConnected(id):
connected = True
return connected
else:
nim = config.Nims[slotid]
configMode = nim.configMode.value
if self.nim_slots[slotid].isCompatible("DVB-S") or self.nim_slots[slotid].isCompatible("DVB-T") or self.nim_slots[slotid].isCompatible("DVB-C"):
return not (configMode == "nothing")
def getSatListForNim(self, slotid):
list = []
if self.nim_slots[slotid].isCompatible("DVB-S"):
nim = config.Nims[slotid]
#print "slotid:", slotid
#print "self.satellites:", self.satList[config.Nims[slotid].diseqcA.index]
#print "diseqcA:", config.Nims[slotid].diseqcA.value
configMode = nim.configMode.value
if configMode == "equal":
slotid = int(nim.connectedTo.value)
nim = config.Nims[slotid]
configMode = nim.configMode.value
elif configMode == "loopthrough":
slotid = self.sec.getRoot(slotid, int(nim.connectedTo.value))
nim = config.Nims[slotid]
configMode = nim.configMode.value
if configMode == "simple":
dm = nim.diseqcMode.value
if dm in ("single", "toneburst_a_b", "diseqc_a_b", "diseqc_a_b_c_d"):
if nim.diseqcA.orbital_position < 3600:
list.append(self.satList[nim.diseqcA.index - 2])
if dm in ("toneburst_a_b", "diseqc_a_b", "diseqc_a_b_c_d"):
if nim.diseqcB.orbital_position < 3600:
list.append(self.satList[nim.diseqcB.index - 2])
if dm == "diseqc_a_b_c_d":
if nim.diseqcC.orbital_position < 3600:
list.append(self.satList[nim.diseqcC.index - 2])
if nim.diseqcD.orbital_position < 3600:
list.append(self.satList[nim.diseqcD.index - 2])
if dm == "positioner":
for x in self.satList:
list.append(x)
if dm == "positioner_select":
for x in self.satList:
if str(x[0]) in nim.userSatellitesList.value:
list.append(x)
elif configMode == "advanced":
for x in range(3601, 3605):
if int(nim.advanced.sat[x].lnb.value) != 0:
for x in self.satList:
list.append(x)
if not list:
for x in self.satList:
if int(nim.advanced.sat[x[0]].lnb.value) != 0:
list.append(x)
for x in range(3605, 3607):
if int(nim.advanced.sat[x].lnb.value) != 0:
for user_sat in self.satList:
if str(user_sat[0]) in nim.advanced.sat[x].userSatellitesList.value and user_sat not in list:
list.append(user_sat)
return list
def getRotorSatListForNim(self, slotid):
list = []
if self.nim_slots[slotid].isCompatible("DVB-S"):
nim = config.Nims[slotid]
configMode = nim.configMode.value
if configMode == "simple":
if nim.diseqcMode.value == "positioner":
for x in self.satList:
list.append(x)
elif nim.diseqcMode.value == "positioner_select":
for x in self.satList:
if str(x[0]) in nim.userSatellitesList.value:
list.append(x)
elif configMode == "advanced":
for x in range(3601, 3605):
if int(nim.advanced.sat[x].lnb.value) != 0:
for x in self.satList:
list.append(x)
if not list:
for x in self.satList:
lnbnum = int(nim.advanced.sat[x[0]].lnb.value)
if lnbnum != 0:
lnb = nim.advanced.lnb[lnbnum]
if lnb.diseqcMode.value == "1_2":
list.append(x)
for x in range(3605, 3607):
if int(nim.advanced.sat[x].lnb.value) != 0:
for user_sat in self.satList:
if str(user_sat[0]) in nim.advanced.sat[x].userSatellitesList.value and user_sat not in list:
list.append(user_sat)
return list
def InitSecParams():
config.sec = ConfigSubsection()
x = ConfigInteger(default=25, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_CONT_TONE_DISABLE_BEFORE_DISEQC, configElement.value))
config.sec.delay_after_continuous_tone_disable_before_diseqc = x
x = ConfigInteger(default=10, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_FINAL_CONT_TONE_CHANGE, configElement.value))
config.sec.delay_after_final_continuous_tone_change = x
x = ConfigInteger(default=10, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_FINAL_VOLTAGE_CHANGE, configElement.value))
config.sec.delay_after_final_voltage_change = x
x = ConfigInteger(default=120, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_BETWEEN_DISEQC_REPEATS, configElement.value))
config.sec.delay_between_diseqc_repeats = x
x = ConfigInteger(default=100, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_LAST_DISEQC_CMD, configElement.value))
config.sec.delay_after_last_diseqc_command = x
x = ConfigInteger(default=50, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_TONEBURST, configElement.value))
config.sec.delay_after_toneburst = x
x = ConfigInteger(default=75, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_VOLTAGE_CHANGE_BEFORE_SWITCH_CMDS, configElement.value))
config.sec.delay_after_change_voltage_before_switch_command = x
x = ConfigInteger(default=200, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_ENABLE_VOLTAGE_BEFORE_SWITCH_CMDS, configElement.value))
config.sec.delay_after_enable_voltage_before_switch_command = x
x = ConfigInteger(default=700, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_BETWEEN_SWITCH_AND_MOTOR_CMD, configElement.value))
config.sec.delay_between_switch_and_motor_command = x
x = ConfigInteger(default=500, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_VOLTAGE_CHANGE_BEFORE_MEASURE_IDLE_INPUTPOWER, configElement.value))
config.sec.delay_after_voltage_change_before_measure_idle_inputpower = x
x = ConfigInteger(default=900, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_ENABLE_VOLTAGE_BEFORE_MOTOR_CMD, configElement.value))
config.sec.delay_after_enable_voltage_before_motor_command = x
x = ConfigInteger(default=500, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_MOTOR_STOP_CMD, configElement.value))
config.sec.delay_after_motor_stop_command = x
x = ConfigInteger(default=500, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_VOLTAGE_CHANGE_BEFORE_MOTOR_CMD, configElement.value))
config.sec.delay_after_voltage_change_before_motor_command = x
x = ConfigInteger(default=70, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_BEFORE_SEQUENCE_REPEAT, configElement.value))
config.sec.delay_before_sequence_repeat = x
x = ConfigInteger(default=360, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.MOTOR_RUNNING_TIMEOUT, configElement.value))
config.sec.motor_running_timeout = x
x = ConfigInteger(default=1, limits = (0, 5))
x.addNotifier(lambda configElement: secClass.setParam(secClass.MOTOR_COMMAND_RETRIES, configElement.value))
config.sec.motor_command_retries = x
x = ConfigInteger(default=50, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_DISEQC_RESET_CMD, configElement.value))
config.sec.delay_after_diseqc_reset_cmd = x
x = ConfigInteger(default=150, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_DISEQC_PERIPHERIAL_POWERON_CMD, configElement.value))
config.sec.delay_after_diseqc_peripherial_poweron_cmd = x
# TODO add support for satpos depending nims to advanced nim configuration
# so a second/third/fourth cable from a motorized lnb can used behind a
# diseqc 1.0 / diseqc 1.1 / toneburst switch
# the C(++) part should can handle this
# the configElement should be only visible when diseqc 1.2 is disabled
jess_alias = ("JESS","UNICABLE2","SCD2","EN50607","EN 50607")
lscr = [("scr%d" % i) for i in range(1,33)]
def InitNimManager(nimmgr, update_slots = []):
hw = HardwareInfo()
addNimConfig = False
try:
config.Nims
except:
addNimConfig = True
if addNimConfig:
InitSecParams()
config.Nims = ConfigSubList()
for x in range(len(nimmgr.nim_slots)):
config.Nims.append(ConfigSubsection())
lnb_choices = {
"universal_lnb": _("Universal LNB"),
"unicable": _("Unicable / JESS"),
"c_band": _("C-Band"),
"circular_lnb": _("Circular LNB"),
"user_defined": _("User defined")}
lnb_choices_default = "universal_lnb"
unicablelnbproducts = {}
unicablematrixproducts = {}
file = open(eEnv.resolve("${datadir}/enigma2/unicable.xml"), 'r')
doc = xml.etree.cElementTree.parse(file)
file.close()
root = doc.getroot()
entry = root.find("lnb")
for manufacturer in entry.getchildren():
m={}
m_update = m.update
for product in manufacturer.getchildren():
p={} #new dict empty for new product
p_update = p.update
scr=[]
scr_append = scr.append
scr_pop = scr.pop
for i in range(len(lscr)):
scr_append(product.get(lscr[i],"0"))
for i in range(len(lscr)):
if scr[len(lscr)-i-1] == "0":
scr_pop()
else:
break;
p_update({"frequencies":tuple(scr)}) #add scr frequencies to dict product
diction = product.get("format","EN50494").upper()
if diction in jess_alias:
diction = "EN50607"
else:
diction = "EN50494"
p_update({"diction":tuple([diction])}) #add diction to dict product
positionsoffset = product.get("positionsoffset",0)
p_update({"positionsoffset":tuple([positionsoffset])}) #add positionsoffset to dict product
positions=[]
positions_append = positions.append
positions_append(int(product.get("positions",1)))
for cnt in range(positions[0]):
lof=[]
lof.append(int(product.get("lofl",9750)))
lof.append(int(product.get("lofh",10600)))
lof.append(int(product.get("threshold",11700)))
positions_append(tuple(lof))
p_update({"positions":tuple(positions)}) #add positons to dict product
m_update({product.get("name"):p}) #add dict product to dict manufacturer
unicablelnbproducts.update({manufacturer.get("name"):m})
entry = root.find("matrix")
for manufacturer in entry.getchildren():
m={}
m_update = m.update
for product in manufacturer.getchildren():
p={} #new dict empty for new product
p_update = p.update
scr=[]
scr_append = scr.append
scr_pop = scr.pop
for i in range(len(lscr)):
scr_append(product.get(lscr[i],"0"))
for i in range(len(lscr)):
if scr[len(lscr)-i-1] == "0":
scr_pop()
else:
break;
p_update({"frequencies":tuple(scr)}) #add scr frequencies to dict product
diction = product.get("format","EN50494").upper()
if diction in jess_alias:
diction = "EN50607"
else:
diction = "EN50494"
p_update({"diction":tuple([diction])}) #add diction to dict product
positionsoffset = product.get("positionsoffset",0)
p_update({"positionsoffset":tuple([positionsoffset])}) #add positionsoffset to dict product
positions=[]
positions_append = positions.append
positions_append(int(product.get("positions",1)))
for cnt in range(positions[0]):
lof=[]
lof.append(int(product.get("lofl",9750)))
lof.append(int(product.get("lofh",10600)))
lof.append(int(product.get("threshold",11700)))
positions_append(tuple(lof))
p_update({"positions":tuple(positions)}) #add positons to dict product
m_update({product.get("name"):p}) #add dict product to dict manufacturer
unicablematrixproducts.update({manufacturer.get("name"):m}) #add dict manufacturer to dict unicablematrixproducts
UnicableLnbManufacturers = unicablelnbproducts.keys()
UnicableLnbManufacturers.sort()
UnicableMatrixManufacturers = unicablematrixproducts.keys()
UnicableMatrixManufacturers.sort()
unicable_choices = {
"unicable_lnb": _("Unicable LNB"),
"unicable_matrix": _("Unicable Matrix"),
"unicable_user": "Unicable "+_("User defined")}
unicable_choices_default = "unicable_lnb"
advanced_lnb_satcr_user_choicesEN50494 = [("%d" % i, "SatCR %d" % i) for i in range(1,9)]
advanced_lnb_satcr_user_choicesEN50607 = [("%d" % i, "SatCR %d" % i) for i in range(1,33)]
advanced_lnb_diction_user_choices = [("EN50494", "Unicable(EN50494)"), ("EN50607", "JESS(EN50607)")]
prio_list = [ ("-1", _("Auto")) ]
for prio in range(65)+range(14000,14065)+range(19000,19065):
description = ""
if prio == 0:
description = _(" (disabled)")
elif 0 < prio < 65:
description = _(" (lower than any auto)")
elif 13999 < prio < 14066:
description = _(" (higher than rotor any auto)")
elif 18999 < prio < 19066:
description = _(" (higher than any auto)")
prio_list.append((str(prio), str(prio) + description))
advanced_lnb_csw_choices = [("none", _("None")), ("AA", _("Port A")), ("AB", _("Port B")), ("BA", _("Port C")), ("BB", _("Port D"))]
advanced_lnb_ucsw_choices = [("0", _("None"))] + [(str(y), "Input " + str(y)) for y in range(1, 17)]
diseqc_mode_choices = [
("single", _("Single")), ("toneburst_a_b", _("Toneburst A/B")),
("diseqc_a_b", "DiSEqC A/B"), ("diseqc_a_b_c_d", "DiSEqC A/B/C/D"),
("positioner", _("Positioner")), ("positioner_select", _("Positioner (selecting satellites)"))]
positioner_mode_choices = [("usals", _("USALS")), ("manual", _("manual"))]
diseqc_satlist_choices = [(3600, _('automatic'), 1), (3601, _('nothing connected'), 1)] + nimmgr.satList
longitude_orientation_choices = [("east", _("East")), ("west", _("West"))]
latitude_orientation_choices = [("north", _("North")), ("south", _("South"))]
turning_speed_choices = [("fast", _("Fast")), ("slow", _("Slow")), ("fast epoch", _("Fast epoch"))]
advanced_satlist_choices = nimmgr.satList + [
(3601, _('All satellites 1 (USALS)'), 1), (3602, _('All satellites 2 (USALS)'), 1),
(3603, _('All satellites 3 (USALS)'), 1), (3604, _('All satellites 4 (USALS)'), 1), (3605, _('Selecting satellites 1 (USALS)'), 1), (3606, _('Selecting satellites 2 (USALS)'), 1)]
advanced_lnb_choices = [("0", _("not configured"))] + [(str(y), "LNB " + str(y)) for y in range(1, (maxFixedLnbPositions+1))]
advanced_voltage_choices = [("polarization", _("Polarization")), ("13V", _("13 V")), ("18V", _("18 V"))]
advanced_tonemode_choices = [("band", _("Band")), ("on", _("On")), ("off", _("Off"))]
advanced_lnb_toneburst_choices = [("none", _("None")), ("A", _("A")), ("B", _("B"))]
advanced_lnb_allsat_diseqcmode_choices = [("1_2", _("1.2"))]
advanced_lnb_diseqcmode_choices = [("none", _("None")), ("1_0", _("1.0")), ("1_1", _("1.1")), ("1_2", _("1.2"))]
advanced_lnb_commandOrder1_0_choices = [("ct", "DiSEqC 1.0, toneburst"), ("tc", "toneburst, DiSEqC 1.0")]
advanced_lnb_commandOrder_choices = [
("ct", "DiSEqC 1.0, toneburst"), ("tc", "toneburst, DiSEqC 1.0"),
("cut", "DiSEqC 1.0, DiSEqC 1.1, toneburst"), ("tcu", "toneburst, DiSEqC 1.0, DiSEqC 1.1"),
("uct", "DiSEqC 1.1, DiSEqC 1.0, toneburst"), ("tuc", "toneburst, DiSEqC 1.1, DiSEqC 1.0")]
advanced_lnb_diseqc_repeat_choices = [("none", _("None")), ("one", _("One")), ("two", _("Two")), ("three", _("Three"))]
advanced_lnb_fast_turning_btime = mktime(datetime(1970, 1, 1, 7, 0).timetuple())
advanced_lnb_fast_turning_etime = mktime(datetime(1970, 1, 1, 19, 0).timetuple())
def configLOFChanged(configElement):
if configElement.value == "unicable":
x = configElement.slot_id
lnb = configElement.lnb_id
nim = config.Nims[x]
lnbs = nim.advanced.lnb
section = lnbs[lnb]
if isinstance(section.unicable, ConfigNothing):
if lnb == 1 or lnb > maxFixedLnbPositions:
section.unicable = ConfigSelection(unicable_choices, unicable_choices_default)
else:
section.unicable = ConfigSelection(choices = {"unicable_matrix": _("Unicable Matrix"),"unicable_user": "Unicable "+_("User defined")}, default = "unicable_matrix")
def fillUnicableConf(sectionDict, unicableproducts, vco_null_check):
for manufacturer in unicableproducts:
products = unicableproducts[manufacturer].keys()
products.sort()
products_valide = []
products_valide_append = products_valide.append
tmp = ConfigSubsection()
tmp.scr = ConfigSubDict()
tmp.vco = ConfigSubDict()
tmp.lofl = ConfigSubDict()
tmp.lofh = ConfigSubDict()
tmp.loft = ConfigSubDict()
tmp.positionsoffset = ConfigSubDict()
tmp.positions = ConfigSubDict()
tmp.diction = ConfigSubDict()
for article in products:
positionslist = unicableproducts[manufacturer][article].get("positions")
positionsoffsetlist = unicableproducts[manufacturer][article].get("positionsoffset")
positionsoffset = int(positionsoffsetlist[0])
positions = int(positionslist[0])
dictionlist = [unicableproducts[manufacturer][article].get("diction")]
if dictionlist[0][0] !="EN50607" or ((lnb > positionsoffset) and (lnb <= (positions + positionsoffset))):
tmp.positionsoffset[article] = ConfigSubList()
tmp.positionsoffset[article].append(ConfigInteger(default=positionsoffset, limits = (positionsoffset, positionsoffset)))
tmp.positions[article] = ConfigSubList()
tmp.positions[article].append(ConfigInteger(default=positions, limits = (positions, positions)))
tmp.diction[article] = ConfigSelection(choices = dictionlist, default = dictionlist[0][0])
scrlist = []
scrlist_append = scrlist.append
vcolist=unicableproducts[manufacturer][article].get("frequencies")
tmp.vco[article] = ConfigSubList()
for cnt in range(1,len(vcolist)+1):
vcofreq = int(vcolist[cnt-1])
if vcofreq == 0 and vco_null_check:
scrlist_append(("%d" %cnt,"SCR %d " %cnt +_("not used")))
else:
scrlist_append(("%d" %cnt,"SCR %d" %cnt))
tmp.vco[article].append(ConfigInteger(default=vcofreq, limits = (vcofreq, vcofreq)))
tmp.scr[article] = ConfigSelection(choices = scrlist, default = scrlist[0][0])
tmp.lofl[article] = ConfigSubList()
tmp.lofh[article] = ConfigSubList()
tmp.loft[article] = ConfigSubList()
tmp_lofl_article_append = tmp.lofl[article].append
tmp_lofh_article_append = tmp.lofh[article].append
tmp_loft_article_append = tmp.loft[article].append
for cnt in range(1,positions+1):
lofl = int(positionslist[cnt][0])
lofh = int(positionslist[cnt][1])
loft = int(positionslist[cnt][2])
tmp_lofl_article_append(ConfigInteger(default=lofl, limits = (lofl, lofl)))
tmp_lofh_article_append(ConfigInteger(default=lofh, limits = (lofh, lofh)))
tmp_loft_article_append(ConfigInteger(default=loft, limits = (loft, loft)))
products_valide_append(article)
if len(products_valide)==0:
products_valide_append("None")
tmp.product = ConfigSelection(choices = products_valide, default = products_valide[0])
sectionDict[manufacturer] = tmp
print "MATRIX"
section.unicableMatrix = ConfigSubDict()
section.unicableMatrixManufacturer = ConfigSelection(UnicableMatrixManufacturers, UnicableMatrixManufacturers[0])
fillUnicableConf(section.unicableMatrix, unicablematrixproducts, True)
print "LNB"
section.unicableLnb = ConfigSubDict()
section.unicableLnbManufacturer = ConfigSelection(UnicableLnbManufacturers, UnicableLnbManufacturers[0])
fillUnicableConf(section.unicableLnb, unicablelnbproducts, False)
#TODO satpositions for satcruser
section.dictionuser = ConfigSelection(advanced_lnb_diction_user_choices, default="EN50494")
section.satcruserEN50494 = ConfigSelection(advanced_lnb_satcr_user_choicesEN50494, default="1")
section.satcruserEN50607 = ConfigSelection(advanced_lnb_satcr_user_choicesEN50607, default="1")
tmpEN50494 = ConfigSubList()
for i in (1284, 1400, 1516, 1632, 1748, 1864, 1980, 2096):
tmpEN50494.append(ConfigInteger(default=i, limits = (950, 2150)))
section.satcrvcouserEN50494 = tmpEN50494
tmpEN50607 = ConfigSubList()
for i in (1210, 1420, 1680, 2040, 984, 1020, 1056, 1092, 1128, 1164, 1256, 1292, 1328, 1364, 1458, 1494, 1530, 1566, 1602, 1638, 1716, 1752, 1788, 1824, 1860, 1896, 1932, 1968, 2004, 2076, 2112, 2148):
tmpEN50607.append(ConfigInteger(default=i, limits = (950, 2150)))
section.satcrvcouserEN50607 = tmpEN50607
nim.advanced.unicableconnected = ConfigYesNo(default=False)
nim.advanced.unicableconnectedTo = ConfigSelection([(str(id), nimmgr.getNimDescription(id)) for id in nimmgr.getNimListOfType("DVB-S") if id != x])
if nim.advanced.unicableconnected.value == True and nim.advanced.unicableconnectedTo.value != nim.advanced.unicableconnectedTo.saved_value:
from Tools.Notifications import AddPopup
from Screens.MessageBox import MessageBox
nim.advanced.unicableconnected.value = False
nim.advanced.unicableconnected.save()
txt = _("Misconfigured unicable connection from tuner %s to tuner %s!\nTuner %s option \"connected to\" are disabled now") % (chr(int(x) + ord('A')), chr(int(nim.advanced.unicableconnectedTo.saved_value) + ord('A')), chr(int(x) + ord('A')),)
AddPopup(txt, type = MessageBox.TYPE_ERROR, timeout = 0, id = "UnicableConnectionFailed")
section.unicableTuningAlgo = ConfigSelection([("reliable", _("reliable")),("traditional", _("traditional (fast)"))], default="reliable")
def configDiSEqCModeChanged(configElement):
section = configElement.section
if configElement.value == "1_2" and isinstance(section.longitude, ConfigNothing):
section.longitude = ConfigFloat(default = [5,100], limits = [(0,359),(0,999)])
section.longitudeOrientation = ConfigSelection(longitude_orientation_choices, "east")
section.latitude = ConfigFloat(default = [50,767], limits = [(0,359),(0,999)])
section.latitudeOrientation = ConfigSelection(latitude_orientation_choices, "north")
section.tuningstepsize = ConfigFloat(default = [0,360], limits = [(0,9),(0,999)])
section.rotorPositions = ConfigInteger(default = 99, limits = [1,999])
section.turningspeedH = ConfigFloat(default = [2,3], limits = [(0,9),(0,9)])
section.turningspeedV = ConfigFloat(default = [1,7], limits = [(0,9),(0,9)])
section.powerMeasurement = ConfigYesNo(default=True)
section.powerThreshold = ConfigInteger(default=hw.get_device_name() == "dm7025" and 50 or 15, limits=(0, 100))
section.turningSpeed = ConfigSelection(turning_speed_choices, "fast")
section.fastTurningBegin = ConfigDateTime(default=advanced_lnb_fast_turning_btime, formatstring = _("%H:%M"), increment = 600)
section.fastTurningEnd = ConfigDateTime(default=advanced_lnb_fast_turning_etime, formatstring = _("%H:%M"), increment = 600)
def configLNBChanged(configElement):
x = configElement.slot_id
nim = config.Nims[x]
if isinstance(configElement.value, tuple):
lnb = int(configElement.value[0])
else:
lnb = int(configElement.value)
lnbs = nim.advanced.lnb
if lnb and lnb not in lnbs:
section = lnbs[lnb] = ConfigSubsection()
section.lofl = ConfigInteger(default=9750, limits = (0, 99999))
section.lofh = ConfigInteger(default=10600, limits = (0, 99999))
section.threshold = ConfigInteger(default=11700, limits = (0, 99999))
section.increased_voltage = ConfigYesNo(False)
section.toneburst = ConfigSelection(advanced_lnb_toneburst_choices, "none")
section.longitude = ConfigNothing()
if lnb > maxFixedLnbPositions:
tmp = ConfigSelection(advanced_lnb_allsat_diseqcmode_choices, "1_2")
tmp.section = section
configDiSEqCModeChanged(tmp)
else:
tmp = ConfigSelection(advanced_lnb_diseqcmode_choices, "none")
tmp.section = section
tmp.addNotifier(configDiSEqCModeChanged)
section.diseqcMode = tmp
section.commitedDiseqcCommand = ConfigSelection(advanced_lnb_csw_choices)
section.fastDiseqc = ConfigYesNo(False)
section.sequenceRepeat = ConfigYesNo(False)
section.commandOrder1_0 = ConfigSelection(advanced_lnb_commandOrder1_0_choices, "ct")
section.commandOrder = ConfigSelection(advanced_lnb_commandOrder_choices, "ct")
section.uncommittedDiseqcCommand = ConfigSelection(advanced_lnb_ucsw_choices)
section.diseqcRepeats = ConfigSelection(advanced_lnb_diseqc_repeat_choices, "none")
section.prio = ConfigSelection(prio_list, "-1")
section.unicable = ConfigNothing()
tmp = ConfigSelection(lnb_choices, lnb_choices_default)
tmp.slot_id = x
tmp.lnb_id = lnb
tmp.addNotifier(configLOFChanged, initial_call = False)
section.lof = tmp
def configModeChanged(configMode):
slot_id = configMode.slot_id
nim = config.Nims[slot_id]
if configMode.value == "advanced" and isinstance(nim.advanced, ConfigNothing):
# advanced config:
nim.advanced = ConfigSubsection()
nim.advanced.sat = ConfigSubDict()
nim.advanced.sats = getConfigSatlist(192, advanced_satlist_choices)
nim.advanced.lnb = ConfigSubDict()
nim.advanced.lnb[0] = ConfigNothing()
for x in nimmgr.satList:
tmp = ConfigSubsection()
tmp.voltage = ConfigSelection(advanced_voltage_choices, "polarization")
tmp.tonemode = ConfigSelection(advanced_tonemode_choices, "band")
tmp.usals = ConfigYesNo(True)
tmp.rotorposition = ConfigInteger(default=1, limits=(1, 255))
lnb = ConfigSelection(advanced_lnb_choices, "0")
lnb.slot_id = slot_id
lnb.addNotifier(configLNBChanged, initial_call = False)
tmp.lnb = lnb
nim.advanced.sat[x[0]] = tmp
for x in range(3601, 3607):
tmp = ConfigSubsection()
tmp.voltage = ConfigSelection(advanced_voltage_choices, "polarization")
tmp.tonemode = ConfigSelection(advanced_tonemode_choices, "band")
tmp.usals = ConfigYesNo(default=True)
tmp.userSatellitesList = ConfigText('[]')
tmp.rotorposition = ConfigInteger(default=1, limits=(1, 255))
lnbnum = maxFixedLnbPositions + x - 3600
lnb = ConfigSelection([("0", _("not configured")), (str(lnbnum), "LNB %d"%(lnbnum))], "0")
lnb.slot_id = slot_id
lnb.addNotifier(configLNBChanged, initial_call = False)
tmp.lnb = lnb
nim.advanced.sat[x] = tmp
def scpcSearchRangeChanged(configElement):
fe_id = configElement.fe_id
slot_id = configElement.slot_id
name = nimmgr.nim_slots[slot_id].description
if path.exists("/proc/stb/frontend/%d/use_scpc_optimized_search_range" % fe_id):
f = open("/proc/stb/frontend/%d/use_scpc_optimized_search_range" % fe_id, "w")
f.write(configElement.value)
f.close()
def ForceLNBPowerChanged(configElement):
if path.exists("/proc/stb/frontend/fbc/force_lnbon"):
f = open("/proc/stb/frontend/fbc/force_lnbon", "w")
f.write(configElement.value)
f.close()
def ForceToneBurstChanged(configElement):
if path.exists("/proc/stb/frontend/fbc/force_toneburst"):
f = open("/proc/stb/frontend/fbc/force_toneburst", "w")
f.write(configElement.value)
f.close()
def toneAmplitudeChanged(configElement):
fe_id = configElement.fe_id
slot_id = configElement.slot_id
if path.exists("/proc/stb/frontend/%d/tone_amplitude" % fe_id):
f = open("/proc/stb/frontend/%d/tone_amplitude" % fe_id, "w")
f.write(configElement.value)
f.close()
def connectedToChanged(slot_id, nimmgr, configElement):
configMode = nimmgr.getNimConfig(slot_id).configMode
if configMode.value == 'loopthrough':
internally_connectable = nimmgr.nimInternallyConnectableTo(slot_id)
dest_slot = configElement.value
if internally_connectable is not None and int(internally_connectable) == int(dest_slot):
configMode.choices.updateItemDescription(configMode.index, _("internally loopthrough to"))
else:
configMode.choices.updateItemDescription(configMode.index, _("externally loopthrough to"))
def createSatConfig(nim, x, empty_slots):
try:
nim.toneAmplitude
except:
nim.toneAmplitude = ConfigSelection([("11", "340mV"), ("10", "360mV"), ("9", "600mV"), ("8", "700mV"), ("7", "800mV"), ("6", "900mV"), ("5", "1100mV")], "7")
nim.toneAmplitude.fe_id = x - empty_slots
nim.toneAmplitude.slot_id = x
nim.toneAmplitude.addNotifier(toneAmplitudeChanged)
nim.scpcSearchRange = ConfigSelection([("0", _("no")), ("1", _("yes"))], "0")
nim.scpcSearchRange.fe_id = x - empty_slots
nim.scpcSearchRange.slot_id = x
nim.scpcSearchRange.addNotifier(scpcSearchRangeChanged)
nim.forceLnbPower = ConfigSelection(default = "off", choices = [ ("on", _("Yes")), ("off", _("No"))] )
nim.forceLnbPower.addNotifier(ForceLNBPowerChanged)
nim.forceToneBurst = ConfigSelection(default = "disable", choices = [ ("enable", _("Yes")), ("disable", _("No"))] )
nim.forceToneBurst.addNotifier(ForceToneBurstChanged)
nim.diseqc13V = ConfigYesNo(False)
nim.diseqcMode = ConfigSelection(diseqc_mode_choices, "single")
nim.connectedTo = ConfigSelection([(str(id), nimmgr.getNimDescription(id)) for id in nimmgr.getNimListOfType("DVB-S") if id != x])
nim.simpleSingleSendDiSEqC = ConfigYesNo(False)
nim.simpleDiSEqCSetVoltageTone = ConfigYesNo(True)
nim.simpleDiSEqCOnlyOnSatChange = ConfigYesNo(False)
nim.simpleDiSEqCSetCircularLNB = ConfigYesNo(True)
nim.diseqcA = ConfigSatlist(list = diseqc_satlist_choices)
nim.diseqcB = ConfigSatlist(list = diseqc_satlist_choices)
nim.diseqcC = ConfigSatlist(list = diseqc_satlist_choices)
nim.diseqcD = ConfigSatlist(list = diseqc_satlist_choices)
nim.positionerMode = ConfigSelection(positioner_mode_choices, "usals")
nim.userSatellitesList = ConfigText('[]')
nim.pressOKtoList = ConfigNothing()
nim.longitude = ConfigFloat(default=[5,100], limits=[(0,359),(0,999)])
nim.longitudeOrientation = ConfigSelection(longitude_orientation_choices, "east")
nim.latitude = ConfigFloat(default=[50,767], limits=[(0,359),(0,999)])
nim.latitudeOrientation = ConfigSelection(latitude_orientation_choices, "north")
nim.tuningstepsize = ConfigFloat(default = [0,360], limits = [(0,9),(0,999)])
nim.rotorPositions = ConfigInteger(default = 99, limits = [1,999])
nim.turningspeedH = ConfigFloat(default = [2,3], limits = [(0,9),(0,9)])
nim.turningspeedV = ConfigFloat(default = [1,7], limits = [(0,9),(0,9)])
nim.powerMeasurement = ConfigYesNo(False)
nim.powerThreshold = ConfigInteger(default=hw.get_device_name() == "dm8000" and 15 or 50, limits=(0, 100))
nim.turningSpeed = ConfigSelection(turning_speed_choices, "fast")
btime = datetime(1970, 1, 1, 7, 0)
nim.fastTurningBegin = ConfigDateTime(default = mktime(btime.timetuple()), formatstring = _("%H:%M"), increment = 900)
etime = datetime(1970, 1, 1, 19, 0)
nim.fastTurningEnd = ConfigDateTime(default = mktime(etime.timetuple()), formatstring = _("%H:%M"), increment = 900)
def createCableConfig(nim, x):
try:
nim.cable
except:
list = [ ]
n = 0
for x in nimmgr.cablesList:
list.append((str(n), x[0]))
n += 1
nim.cable = ConfigSubsection()
nim.cable.scan_networkid = ConfigInteger(default = 0, limits = (0, 99999))
possible_scan_types = [("bands", _("Frequency bands")), ("steps", _("Frequency steps"))]
if n:
possible_scan_types.append(("provider", _("Provider")))
nim.cable.scan_provider = ConfigSelection(default = "0", choices = list)
nim.cable.scan_type = ConfigSelection(default = "provider", choices = possible_scan_types)
nim.cable.scan_band_EU_VHF_I = ConfigYesNo(default = True)
nim.cable.scan_band_EU_MID = ConfigYesNo(default = True)
nim.cable.scan_band_EU_VHF_III = ConfigYesNo(default = True)
nim.cable.scan_band_EU_UHF_IV = ConfigYesNo(default = True)
nim.cable.scan_band_EU_UHF_V = ConfigYesNo(default = True)
nim.cable.scan_band_EU_SUPER = ConfigYesNo(default = True)
nim.cable.scan_band_EU_HYPER = ConfigYesNo(default = True)
nim.cable.scan_band_US_LOW = ConfigYesNo(default = False)
nim.cable.scan_band_US_MID = ConfigYesNo(default = False)
nim.cable.scan_band_US_HIGH = ConfigYesNo(default = False)
nim.cable.scan_band_US_SUPER = ConfigYesNo(default = False)
nim.cable.scan_band_US_HYPER = ConfigYesNo(default = False)
nim.cable.scan_frequency_steps = ConfigInteger(default = 1000, limits = (1000, 10000))
nim.cable.scan_mod_qam16 = ConfigYesNo(default = False)
nim.cable.scan_mod_qam32 = ConfigYesNo(default = False)
nim.cable.scan_mod_qam64 = ConfigYesNo(default = True)
nim.cable.scan_mod_qam128 = ConfigYesNo(default = False)
nim.cable.scan_mod_qam256 = ConfigYesNo(default = True)
nim.cable.scan_sr_6900 = ConfigYesNo(default = True)
nim.cable.scan_sr_6875 = ConfigYesNo(default = True)
nim.cable.scan_sr_ext1 = ConfigInteger(default = 0, limits = (0, 7230))
nim.cable.scan_sr_ext2 = ConfigInteger(default = 0, limits = (0, 7230))
def createTerrestrialConfig(nim, x):
try:
nim.terrestrial
except:
list = []
n = 0
for x in nimmgr.terrestrialsList:
list.append((str(n), x[0]))
n += 1
nim.terrestrial = ConfigSelection(choices = list)
nim.terrestrial_5V = ConfigOnOff()
empty_slots = 0
for slot in nimmgr.nim_slots:
x = slot.slot
nim = config.Nims[x]
if slot.isCompatible("DVB-S"):
createSatConfig(nim, x, empty_slots)
config_mode_choices = [("nothing", _("nothing connected")),
("simple", _("simple")), ("advanced", _("advanced"))]
if len(nimmgr.getNimListOfType(slot.type, exception = x)) > 0:
config_mode_choices.append(("equal", _("equal to")))
config_mode_choices.append(("satposdepends", _("second cable of motorized LNB")))
if len(nimmgr.canConnectTo(x)) > 0:
config_mode_choices.append(("loopthrough", _("loopthrough to")))
nim.advanced = ConfigNothing()
tmp = ConfigSelection(config_mode_choices, "simple")
tmp.slot_id = x
tmp.addNotifier(configModeChanged, initial_call = False)
nim.configMode = tmp
nim.configMode.connectedToChanged = boundFunction(connectedToChanged, x, nimmgr)
nim.connectedTo.addNotifier(boundFunction(connectedToChanged, x, nimmgr), initial_call = False)
elif slot.isCompatible("DVB-C"):
nim.configMode = ConfigSelection(
choices = {
"enabled": _("enabled"),
"nothing": _("nothing connected"),
},
default = "enabled")
createCableConfig(nim, x)
elif slot.isCompatible("DVB-T"):
nim.configMode = ConfigSelection(
choices = {
"enabled": _("enabled"),
"nothing": _("nothing connected"),
},
default = "enabled")
createTerrestrialConfig(nim, x)
else:
empty_slots += 1
nim.configMode = ConfigSelection(choices = { "nothing": _("disabled") }, default="nothing")
if slot.type is not None:
print "pls add support for this frontend type!", slot.type
nimmgr.sec = SecConfigure(nimmgr)
def tunerTypeChanged(nimmgr, configElement):
print "dvb_api_version ",iDVBFrontend.dvb_api_version
fe_id = configElement.fe_id
eDVBResourceManager.getInstance().setFrontendType(nimmgr.nim_slots[fe_id].frontend_id, nimmgr.nim_slots[fe_id].getType())
frontend = eDVBResourceManager.getInstance().allocateRawChannel(fe_id).getFrontend()
if not path.exists("/proc/stb/frontend/%d/mode" % fe_id) and iDVBFrontend.dvb_api_version >= 5:
print "api >=5 and new style tuner driver"
if frontend:
system = configElement.getText()
if system == 'DVB-C':
ret = frontend.changeType(iDVBFrontend.feCable)
elif system in ('DVB-T','DVB-T2'):
ret = frontend.changeType(iDVBFrontend.feTerrestrial)
elif system in ('DVB-S','DVB-S2'):
ret = frontend.changeType(iDVBFrontend.feSatellite)
elif system == 'ATSC':
ret = frontend.changeType(iDVBFrontend.feATSC)
else:
ret = False
if not ret:
print "%d: tunerTypeChange to '%s' failed" %(fe_id, system)
else:
print "%d: tunerTypeChange to '%s' failed (BUSY)" %(fe_id, configElement.getText())
else:
print "api <5 or old style tuner driver"
if path.exists("/proc/stb/frontend/%d/mode" % fe_id):
cur_type = int(open("/proc/stb/frontend/%d/mode" % fe_id, "r").read())
if cur_type != int(configElement.value):
print "tunerTypeChanged feid %d from %d to mode %d" % (fe_id, cur_type, int(configElement.value))
try:
oldvalue = open("/sys/module/dvb_core/parameters/dvb_shutdown_timeout", "r").readline()
f = open("/sys/module/dvb_core/parameters/dvb_shutdown_timeout", "w")
f.write("0")
f.close()
except:
print "[info] no /sys/module/dvb_core/parameters/dvb_shutdown_timeout available"
frontend.closeFrontend()
f = open("/proc/stb/frontend/%d/mode" % fe_id, "w")
f.write(configElement.value)
f.close()
frontend.reopenFrontend()
try:
f = open("/sys/module/dvb_core/parameters/dvb_shutdown_timeout", "w")
f.write(oldvalue)
f.close()
except:
print "[info] no /sys/module/dvb_core/parameters/dvb_shutdown_timeout available"
nimmgr.enumerateNIMs()
else:
print "tuner type is already already %d" %cur_type
empty_slots = 0
for slot in nimmgr.nim_slots:
x = slot.slot
nim = config.Nims[x]
addMultiType = False
try:
nim.multiType
except:
if slot.description.find("Sundtek SkyTV Ultimate III") > -1:
print"[NimManager] Sundtek SkyTV Ultimate III detected, multiType = False"
addMultiType = False
else:
addMultiType = True
if slot.isMultiType() and addMultiType:
typeList = []
for id in slot.getMultiTypeList().keys():
type = slot.getMultiTypeList()[id]
typeList.append((id, type))
nim.multiType = ConfigSelection(typeList, "0")
nim.multiType.fe_id = x - empty_slots
nim.multiType.addNotifier(boundFunction(tunerTypeChanged, nimmgr))
print"[NimManager] slotname = %s, slotdescription = %s, multitype = %s, current type = %s" % (slot.input_name, slot.description,(slot.isMultiType() and addMultiType),slot.getType())
empty_slots = 0
for slot in nimmgr.nim_slots:
x = slot.slot
nim = config.Nims[x]
empty = True
if update_slots and (x not in update_slots):
continue
if slot.canBeCompatible("DVB-S"):
createSatConfig(nim, x, empty_slots)
empty = False
if slot.canBeCompatible("DVB-C"):
createCableConfig(nim, x)
empty = False
if slot.canBeCompatible("DVB-T"):
createTerrestrialConfig(nim, x)
empty = False
if empty:
empty_slots += 1
nimmanager = NimManager()
|
gpl-2.0
| 4,065,950,864,747,252,000 | 39.018832 | 352 | 0.680294 | false |
openstack/sahara-dashboard
|
sahara_dashboard/content/data_processing/clusters/cluster_templates/workflows/create.py
|
1
|
16094
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from django import urls
from django.utils.translation import ugettext_lazy as _
from saharaclient.api import base as api_base
from horizon import exceptions
from horizon import forms
from horizon import workflows
from sahara_dashboard.api import designate as designateclient
from sahara_dashboard.api import manila as manilaclient
from sahara_dashboard.api import sahara as saharaclient
from sahara_dashboard.content.data_processing.utils import helpers
from sahara_dashboard.content.data_processing. \
utils import anti_affinity as aa
from sahara_dashboard.content.data_processing.utils \
import acl as acl_utils
import sahara_dashboard.content.data_processing. \
utils.workflow_helpers as whelpers
from sahara_dashboard import utils
class SelectPluginAction(workflows.Action,
whelpers.PluginAndVersionMixin):
hidden_create_field = forms.CharField(
required=False,
widget=forms.HiddenInput(attrs={"class": "hidden_create_field"}))
def __init__(self, request, *args, **kwargs):
super(SelectPluginAction, self).__init__(request, *args, **kwargs)
sahara = saharaclient.client(request)
self._generate_plugin_version_fields(sahara)
class Meta(object):
name = _("Select plugin and hadoop version for cluster template")
help_text_template = ("cluster_templates/"
"_create_general_help.html")
class SelectPlugin(workflows.Step):
action_class = SelectPluginAction
class CreateClusterTemplate(workflows.Workflow):
slug = "create_cluster_template"
name = _("Create Cluster Template")
finalize_button_name = _("Next")
success_message = _("Created")
failure_message = _("Could not create")
success_url = "horizon:project:data_processing.clusters:clusters-tab"
default_steps = (SelectPlugin, )
def get_success_url(self):
url = urls.reverse(self.success_url)
return url
class GeneralConfigAction(workflows.Action):
hidden_configure_field = forms.CharField(
required=False,
widget=forms.HiddenInput(attrs={"class": "hidden_configure_field"}))
hidden_to_delete_field = forms.CharField(
required=False,
widget=forms.HiddenInput(attrs={"class": "hidden_to_delete_field"}))
cluster_template_name = forms.CharField(label=_("Template Name"))
description = forms.CharField(label=_("Description"),
required=False,
widget=forms.Textarea(attrs={'rows': 4}))
use_autoconfig = forms.BooleanField(
label=_("Auto-configure"),
help_text=_("If selected, instances of a cluster will be "
"automatically configured during creation. Otherwise you "
"should manually specify configuration values"),
required=False,
widget=forms.CheckboxInput(),
initial=True,
)
is_public = acl_utils.get_is_public_form(_("cluster template"))
is_protected = acl_utils.get_is_protected_form(_("cluster template"))
anti_affinity = aa.anti_affinity_field()
def __init__(self, request, *args, **kwargs):
super(GeneralConfigAction, self).__init__(request, *args, **kwargs)
plugin, hadoop_version = whelpers.\
get_plugin_and_hadoop_version(request)
self.fields["plugin_name"] = forms.CharField(
widget=forms.HiddenInput(),
initial=plugin
)
self.fields["hadoop_version"] = forms.CharField(
widget=forms.HiddenInput(),
initial=hadoop_version
)
populate_anti_affinity_choices = aa.populate_anti_affinity_choices
def get_help_text(self):
extra = dict()
plugin_name, hadoop_version = whelpers\
.get_plugin_and_hadoop_version(self.request)
extra["plugin_name"] = plugin_name
extra["hadoop_version"] = hadoop_version
plugin = saharaclient.plugin_get_version_details(
self.request, plugin_name, hadoop_version)
extra["deprecated"] = whelpers.is_version_of_plugin_deprecated(
plugin, hadoop_version)
return super(GeneralConfigAction, self).get_help_text(extra)
def clean(self):
cleaned_data = super(GeneralConfigAction, self).clean()
if cleaned_data.get("hidden_configure_field", None) \
== "create_nodegroup":
self._errors = dict()
return cleaned_data
class Meta(object):
name = _("Details")
help_text_template = ("cluster_templates/_configure_general_help.html")
class GeneralConfig(workflows.Step):
action_class = GeneralConfigAction
contributes = ("hidden_configure_field", )
def contribute(self, data, context):
for k, v in data.items():
context["general_" + k] = v
post = self.workflow.request.POST
context['anti_affinity_info'] = post.getlist("anti_affinity")
return context
class ConfigureNodegroupsAction(workflows.Action):
hidden_nodegroups_field = forms.CharField(
required=False,
widget=forms.HiddenInput(attrs={"class": "hidden_nodegroups_field"}))
forms_ids = forms.CharField(
required=False,
widget=forms.HiddenInput())
def __init__(self, request, *args, **kwargs):
super(ConfigureNodegroupsAction, self). \
__init__(request, *args, **kwargs)
# when we copy or edit a cluster template then
# request contains valuable info in both GET and POST methods
req = request.GET.copy()
req.update(request.POST)
plugin = req.get("plugin_name")
version = req.get("hadoop_version", None) or req["plugin_version"]
if plugin and not version:
version_name = plugin + "_version"
version = req.get(version_name)
if not plugin or not version:
self.templates = saharaclient.nodegroup_template_find(request)
else:
self.templates = saharaclient.nodegroup_template_find(
request, plugin_name=plugin, hadoop_version=version)
deletable = req.get("deletable", dict())
if 'forms_ids' in req:
self.groups = []
for id in json.loads(req['forms_ids']):
group_name = "group_name_" + str(id)
template_id = "template_id_" + str(id)
count = "count_" + str(id)
serialized = "serialized_" + str(id)
self.groups.append({"name": req[group_name],
"template_id": req[template_id],
"count": req[count],
"id": id,
"deletable": deletable.get(
req[group_name], "true"),
"serialized": req[serialized]})
whelpers.build_node_group_fields(self,
group_name,
template_id,
count,
serialized)
def clean(self):
cleaned_data = super(ConfigureNodegroupsAction, self).clean()
if cleaned_data.get("hidden_nodegroups_field", None) \
== "create_nodegroup":
self._errors = dict()
return cleaned_data
class Meta(object):
name = _("Node Groups")
class ConfigureNodegroups(workflows.Step):
action_class = ConfigureNodegroupsAction
contributes = ("hidden_nodegroups_field", )
template_name = ("cluster_templates/cluster_node_groups_template.html")
def contribute(self, data, context):
for k, v in data.items():
context["ng_" + k] = v
return context
class SelectClusterSharesAction(workflows.Action):
def __init__(self, request, *args, **kwargs):
super(SelectClusterSharesAction, self).__init__(
request, *args, **kwargs)
possible_shares = self.get_possible_shares(request)
self.fields["shares"] = whelpers.MultipleShareChoiceField(
label=_("Select Shares"),
widget=whelpers.ShareWidget(choices=possible_shares),
required=False,
choices=possible_shares
)
def get_possible_shares(self, request):
try:
shares = manilaclient.share_list(request)
choices = [(s.id, s.name) for s in shares]
except Exception:
exceptions.handle(request, _("Failed to get list of shares"))
choices = []
return choices
def clean(self):
cleaned_data = super(SelectClusterSharesAction, self).clean()
self._errors = dict()
return cleaned_data
class Meta(object):
name = _("Shares")
help_text = _("Select the manila shares for this cluster")
class SelectClusterShares(workflows.Step):
action_class = SelectClusterSharesAction
def contribute(self, data, context):
post = self.workflow.request.POST
shares_details = []
for index in range(0, len(self.action.fields['shares'].choices) * 3):
if index % 3 == 0:
share = post.get("shares_{0}".format(index))
if share:
path = post.get("shares_{0}".format(index + 1))
permissions = post.get("shares_{0}".format(index + 2))
shares_details.append({
"id": share,
"path": path,
"access_level": permissions
})
context['ct_shares'] = shares_details
return context
class SelectDnsDomainsAction(workflows.Action):
domain_name = forms.DynamicChoiceField(
label=_("Domain Name"),
required=False
)
def __init__(self, request, *args, **kwargs):
super(SelectDnsDomainsAction, self).__init__(request, *args, **kwargs)
def _get_domain_choices(self, request):
domains = designateclient.get_domain_names(request)
choices = [(None, _('No domain is specified'))]
choices.extend(
[(domain.get('name'), domain.get('name')) for domain in domains])
return choices
def populate_domain_name_choices(self, request, context):
return self._get_domain_choices(request)
class Meta(object):
name = _("DNS Domain Names")
help_text_template = (
"cluster_templates/_config_domain_names_help.html")
class SelectDnsDomains(workflows.Step):
action_class = SelectDnsDomainsAction
def contribute(self, data, context):
for k, v in data.items():
context["dns_" + k] = v
return context
class ConfigureClusterTemplate(whelpers.ServiceParametersWorkflow,
whelpers.StatusFormatMixin):
slug = "configure_cluster_template"
name = _("Create Cluster Template")
finalize_button_name = _("Create")
success_message = _("Created Cluster Template %s")
name_property = "general_cluster_template_name"
success_url = ("horizon:project:data_processing.clusters:"
"cluster-templates-tab")
default_steps = (GeneralConfig,
ConfigureNodegroups)
def __init__(self, request, context_seed, entry_point, *args, **kwargs):
ConfigureClusterTemplate._cls_registry = []
hlps = helpers.Helpers(request)
plugin, hadoop_version = whelpers.\
get_plugin_and_hadoop_version(request)
general_parameters = hlps.get_cluster_general_configs(
plugin,
hadoop_version)
service_parameters = hlps.get_targeted_cluster_configs(
plugin,
hadoop_version)
if saharaclient.base.is_service_enabled(request, 'share'):
ConfigureClusterTemplate._register_step(self, SelectClusterShares)
if saharaclient.base.is_service_enabled(request, 'dns'):
ConfigureClusterTemplate._register_step(self, SelectDnsDomains)
self._populate_tabs(general_parameters, service_parameters)
super(ConfigureClusterTemplate, self).__init__(request,
context_seed,
entry_point,
*args, **kwargs)
def is_valid(self):
steps_valid = True
for step in self.steps:
if not step.action.is_valid():
steps_valid = False
step.has_errors = True
errors_fields = list(step.action.errors.keys())
step.action.errors_fields = errors_fields
if not steps_valid:
return steps_valid
return self.validate(self.context)
def handle(self, request, context):
try:
node_groups = []
configs_dict = whelpers.parse_configs_from_context(context,
self.defaults)
ids = json.loads(context['ng_forms_ids'])
for id in ids:
name = context['ng_group_name_' + str(id)]
template_id = context['ng_template_id_' + str(id)]
count = context['ng_count_' + str(id)]
raw_ng = context.get("ng_serialized_" + str(id))
if raw_ng and raw_ng != 'null':
ng = json.loads(utils.deserialize(str(raw_ng)))
else:
ng = dict()
ng["name"] = name
ng["count"] = count
if template_id and template_id != u'None':
ng["node_group_template_id"] = template_id
node_groups.append(ng)
plugin, hadoop_version = whelpers.\
get_plugin_and_hadoop_version(request)
ct_shares = []
if "ct_shares" in context:
ct_shares = context["ct_shares"]
domain = context.get('dns_domain_name', None)
if domain == 'None':
domain = None
# TODO(nkonovalov): Fix client to support default_image_id
saharaclient.cluster_template_create(
request,
context["general_cluster_template_name"],
plugin,
hadoop_version,
context["general_description"],
configs_dict,
node_groups,
context["anti_affinity_info"],
use_autoconfig=context['general_use_autoconfig'],
shares=ct_shares,
is_public=context['general_is_public'],
is_protected=context['general_is_protected'],
domain_name=domain
)
hlps = helpers.Helpers(request)
if hlps.is_from_guide():
request.session["guide_cluster_template_name"] = (
context["general_cluster_template_name"])
self.success_url = (
"horizon:project:data_processing.clusters:cluster_guide")
return True
except api_base.APIException as e:
self.error_description = str(e)
return False
except Exception:
exceptions.handle(request,
_("Cluster template creation failed"))
return False
|
apache-2.0
| -5,240,183,144,820,533,000 | 36.168591 | 79 | 0.581832 | false |
SahilTikale/haas
|
hil/cli/project.py
|
2
|
1764
|
"""Commands related to projects are in this module"""
import click
import sys
from hil.cli.client_setup import client
@click.group()
def project():
"""Commands related to project"""
@project.command(name='create')
@click.argument('project')
def project_create(project):
"""Create a new project"""
client.project.create(project)
@project.command(name='delete')
@click.argument('project')
def project_delete(project):
"""Delete a project"""
client.project.delete(project)
@project.command(name='list')
def project_list():
"""List all projects"""
q = client.project.list()
sys.stdout.write('%s Projects : ' % len(q) + " ".join(q) + '\n')
@project.command(name='list-networks')
@click.argument('project')
def project_list_networks(project):
"""List all networks attached to a <project>"""
q = client.project.networks_in(project)
sys.stdout.write(
"Networks allocated to %s\t: %s\n" % (project, " ".join(q))
)
@project.group(name='node')
def project_node():
"""Project and node related operations"""
@project_node.command(name='list')
@click.argument('project')
def project_node_list(project):
"""List all nodes attached to a <project>"""
q = client.project.nodes_in(project)
sys.stdout.write('Nodes allocated to %s: ' % project + " ".join(q) + '\n')
@project_node.command(name='add')
@click.argument('project')
@click.argument('node')
def project_connect_node(project, node):
"""Add <node> to <project>"""
client.project.connect(project, node)
@project_node.command(name='remove')
@click.argument('project')
@click.argument('node')
def project_detach_node(project, node):
"""Remove <node> from <project>"""
client.project.detach(project, node)
|
apache-2.0
| -3,346,851,995,693,047,000 | 24.565217 | 79 | 0.665533 | false |
greenapes/python-blitline
|
tools/scrape.py
|
1
|
1910
|
import re
import string
import urllib.request, urllib.error, urllib.parse
from datetime import datetime
from lxml import etree
def function_name(text, rex=re.compile(r'"name"\s*:\s*"([^"]+)"')):
match = rex.search(text)
if match:
return match.group(1)
return None
parser = etree.HTMLParser()
tree = etree.parse(urllib.request.urlopen(
"http://www.blitline.com/docs/functions"), parser)
functions_data = []
functions = tree.xpath(
"""id("accordion")/div[contains(@class, "panel")]""")
for f in functions:
code_nodes = f.xpath("./div[contains(@class, 'panel-collapse')]//pre")
if not code_nodes:
continue
elif len(code_nodes) > 1:
raise ValueError("HTML mismatch, too many codes")
else:
code = code_nodes[0].text
fname = function_name(code)
if not fname:
raise ValueError("HTML mismatch, function name not found")
doc_nodes = f.xpath(".//h4//div[contains(@class, 'pull-left')][last()]/p")
if not doc_nodes:
doc = ''
elif len(doc_nodes) > 1:
raise ValueError("HTML mismatch, too many descriptions")
else:
doc = doc_nodes[0].text.strip()
functions_data.append((fname, doc))
# some functions not listed in the online page
functions_data.extend([
('vintage', 'Vintage Filter'),
('lomo', 'Lomo Filter'),
('photograph', 'Photograph Filter'),
('savannah', 'Savannah Filter'),
('xpro', 'Xpro Filter'),
('celsius', 'Celsius Filter'),
('stackhouse', 'Stackhouse Filter'),
])
fragments = [
"#autogenerated on %s" % datetime.now(),
"from blitline import Function",
]
tpl = '''
class {cname}(Function):
"""
{doc}
"""
function_name = "{fname}"
'''
for fname, doc in functions_data:
cname = string.capwords(fname, '_').replace('_', '')
fragments.append(tpl.format(cname=cname, fname=fname, doc=doc))
print('\n'.join(fragments))
|
mit
| -1,458,529,492,265,877,500 | 25.901408 | 78 | 0.624607 | false |
python-gitlab/python-gitlab
|
gitlab/v4/objects/hooks.py
|
1
|
3071
|
from gitlab.base import RequiredOptional, RESTManager, RESTObject
from gitlab.mixins import CRUDMixin, NoUpdateMixin, ObjectDeleteMixin, SaveMixin
__all__ = [
"Hook",
"HookManager",
"ProjectHook",
"ProjectHookManager",
"GroupHook",
"GroupHookManager",
]
class Hook(ObjectDeleteMixin, RESTObject):
_url = "/hooks"
_short_print_attr = "url"
class HookManager(NoUpdateMixin, RESTManager):
_path = "/hooks"
_obj_cls = Hook
_create_attrs = RequiredOptional(required=("url",))
class ProjectHook(SaveMixin, ObjectDeleteMixin, RESTObject):
_short_print_attr = "url"
class ProjectHookManager(CRUDMixin, RESTManager):
_path = "/projects/%(project_id)s/hooks"
_obj_cls = ProjectHook
_from_parent_attrs = {"project_id": "id"}
_create_attrs = RequiredOptional(
required=("url",),
optional=(
"push_events",
"issues_events",
"confidential_issues_events",
"merge_requests_events",
"tag_push_events",
"note_events",
"job_events",
"pipeline_events",
"wiki_page_events",
"enable_ssl_verification",
"token",
),
)
_update_attrs = RequiredOptional(
required=("url",),
optional=(
"push_events",
"issues_events",
"confidential_issues_events",
"merge_requests_events",
"tag_push_events",
"note_events",
"job_events",
"pipeline_events",
"wiki_events",
"enable_ssl_verification",
"token",
),
)
class GroupHook(SaveMixin, ObjectDeleteMixin, RESTObject):
_short_print_attr = "url"
class GroupHookManager(CRUDMixin, RESTManager):
_path = "/groups/%(group_id)s/hooks"
_obj_cls = GroupHook
_from_parent_attrs = {"group_id": "id"}
_create_attrs = RequiredOptional(
required=("url",),
optional=(
"push_events",
"issues_events",
"confidential_issues_events",
"merge_requests_events",
"tag_push_events",
"note_events",
"confidential_note_events",
"job_events",
"pipeline_events",
"wiki_page_events",
"deployment_events",
"releases_events",
"subgroup_events",
"enable_ssl_verification",
"token",
),
)
_update_attrs = RequiredOptional(
required=("url",),
optional=(
"push_events",
"issues_events",
"confidential_issues_events",
"merge_requests_events",
"tag_push_events",
"note_events",
"confidential_note_events",
"job_events",
"pipeline_events",
"wiki_page_events",
"deployment_events",
"releases_events",
"subgroup_events",
"enable_ssl_verification",
"token",
),
)
|
lgpl-3.0
| 785,224,633,121,416,800 | 25.938596 | 80 | 0.529144 | false |
DistrictDataLabs/topicmaps
|
topics/models.py
|
1
|
3031
|
# topics.models
# Topic modeling for data survey analysis
#
# Author: Benjamin Bengfort <bbengfort@districtdatalabs.com>
# Created: Tue Sep 08 19:43:58 2015 -0400
#
# Copyright (C) 2015 District Data Labs
# For license information, see LICENSE.txt
#
# ID: models.py [] benjamin@bengfort.com $
"""
Topic modeling for data survey analysis
"""
##########################################################################
## Imports
##########################################################################
from django.db import models
from model_utils import Choices
from autoslug import AutoSlugField
from model_utils.models import TimeStampedModel
from topics.managers import TopicManager, VotingManager
##########################################################################
## Topic Models
##########################################################################
class Topic(TimeStampedModel):
"""
Stores a topic, basically a string like a tag and manages it.
"""
# Topic fields
title = models.CharField(max_length=128)
slug = AutoSlugField(populate_from='title', unique=True)
link = models.URLField(null=True, blank=True, default=None)
refers_to = models.ForeignKey('self', related_name='references', null=True, blank=True, default=None)
is_canonical = models.BooleanField(default=True)
# Custom topic manager
objects = TopicManager()
# Topic meta class
class Meta:
db_table = 'topics'
ordering = ('title', )
def __unicode__(self):
return self.title
def vote_total(self):
"""
Accumulates the votes via aggregation
"""
votes = self.votes.aggregate(
total=models.Sum('vote')
)['total']
if self.is_canonical:
for ref in self.references.all():
votes += ref.vote_total()
return votes
##########################################################################
## Topic Voting
##########################################################################
class Vote(TimeStampedModel):
"""
Simple voting model that stores an up or down vote for a particular topic
associated with a particular IP address (and time of day).
"""
DATEFMT = "%a %b %d, %Y at %H:%M"
BALLOT = Choices((-1, 'downvote', 'downvote'), (1, 'upvote', 'upvote'), (0, 'novote', 'novote'))
# Vote fields
vote = models.SmallIntegerField(choices=BALLOT, default=BALLOT.upvote)
topic = models.ForeignKey(Topic, related_name='votes')
ipaddr = models.GenericIPAddressField()
# Custom voting manager
objects = VotingManager()
# Vote meta class
class Meta:
db_table = 'voting'
ordering = ('-created',)
def __unicode__(self):
action = {
-1: "-1",
0: "--",
1: "+1",
}[self.vote]
return "{} for \"{}\" ({} on {})".format(
action, self.topic, self.ipaddr, self.modified.strftime(self.DATEFMT)
)
|
mit
| 8,940,932,464,342,906,000 | 28.715686 | 105 | 0.527549 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.