text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
"""
mbed SDK
Copyright (c) 2011-2016 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from exporters import Exporter
from os.path import splitext, basename
class CodeRed(Exporter):
NAME = 'CodeRed'
TOOLCHAIN = 'GCC_CR'
MBED_CONFIG_HEADER_SUPPORTED = True
TARGETS = [
'LPC1768',
'LPC4088',
'LPC4088_DM',
'LPC4330_M4',
'LPC1114',
'LPC11U35_401',
'LPC11U35_501',
'UBLOX_C027',
'ARCH_PRO',
'LPC1549',
'LPC11U68',
'LPCCAPPUCCINO',
'LPC824',
'LPC11U37H_401',
]
def generate(self):
libraries = []
for lib in self.resources.libraries:
l, _ = splitext(basename(lib))
libraries.append(l[3:])
ctx = {
'name': self.project_name,
'include_paths': self.resources.inc_dirs,
'linker_script': self.resources.linker_script,
'object_files': self.resources.objects,
'libraries': libraries,
'symbols': self.toolchain.get_symbols()
}
ctx.update(self.flags)
self.gen_file('codered_%s_project.tmpl' % self.target.lower(), ctx, '.project')
self.gen_file('codered_%s_cproject.tmpl' % self.target.lower(), ctx, '.cproject')
|
svastm/mbed
|
tools/export/codered.py
|
Python
|
apache-2.0
| 1,779 | 0.001124 |
#!/usr/bin/python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example creates a click-to-download ad in a given ad group.
This type of ad is also known as an app promotion ad. To list ad groups, run
get_ad_groups.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
Tags: AdGroupAdService.mutate
Api: AdWordsOnly
"""
__author__ = 'Joseph DiLallo'
from googleads import adwords
AD_GROUP_ID = 'INSERT_AD_GROUP_ID_HERE'
def main(client, ad_group_id):
# Initialize appropriate service.
ad_group_ad_service = client.GetService('AdGroupAdService', version='v201506')
# Create the template elements for the ad. You can refer to
# https://developers.google.com/adwords/api/docs/appendix/templateads
# for the list of available template fields.
ad_data = {
'uniqueName': 'adData',
'fields': [
{
'name': 'headline',
'fieldText': 'Enjoy your drive in Mars',
'type': 'TEXT'
},
{
'name': 'description1',
'fieldText': 'Realistic physics simulation',
'type': 'TEXT'
},
{
'name': 'description2',
'fieldText': 'Race against players online',
'type': 'TEXT'
},
{
'name': 'appId',
'fieldText': 'com.example.demogame',
'type': 'TEXT'
},
{
'name': 'appStore',
'fieldText': '2',
'type': 'ENUM'
}
]
}
# Create click to download ad.
click_to_download_app_ad = {
'xsi_type': 'TemplateAd',
'name': 'Ad for demo game',
'templateId': '353',
'finalUrls': [
'http://play.google.com/store/apps/details?id=com.example.demogame'
],
'displayUrl': 'play.google.com',
'templateElements': [ad_data]
}
# Create ad group ad.
ad_group_ad = {
'adGroupId': ad_group_id,
'ad': click_to_download_app_ad,
# Optional.
'status': 'PAUSED'
}
# Add ad.
ads = ad_group_ad_service.mutate([
{'operator': 'ADD', 'operand': ad_group_ad}
])
# Display results.
if 'value' in ads:
for ad in ads['value']:
print ('Added new click-to-download ad to ad group ID \'%s\' '
'with URL \'%s\'.' % (ad['ad']['id'], ad['ad']['finalUrls'][0]))
else:
print 'No ads were added.'
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage()
main(adwords_client, AD_GROUP_ID)
|
coxmediagroup/googleads-python-lib
|
examples/adwords/v201506/advanced_operations/add_click_to_download_ad.py
|
Python
|
apache-2.0
| 3,318 | 0.006329 |
""" Launcher functionality for the Google Compute Engine (GCE)
"""
import json
import logging
import os
from dcos_launch import onprem, util
from dcos_launch.platforms import gcp
from dcos_test_utils.helpers import Host
from googleapiclient.errors import HttpError
log = logging.getLogger(__name__)
def get_credentials(env=None) -> tuple:
path = None
if env is None:
env = os.environ.copy()
if 'GCE_CREDENTIALS' in env:
json_credentials = env['GCE_CREDENTIALS']
elif 'GOOGLE_APPLICATION_CREDENTIALS' in env:
path = env['GOOGLE_APPLICATION_CREDENTIALS']
json_credentials = util.read_file(path)
else:
raise util.LauncherError(
'MissingParameter', 'Either GCE_CREDENTIALS or GOOGLE_APPLICATION_CREDENTIALS must be set in env')
return json_credentials, path
class OnPremLauncher(onprem.AbstractOnpremLauncher):
# Launches a homogeneous cluster of plain GMIs intended for onprem DC/OS
def __init__(self, config: dict, env=None):
creds_string, _ = get_credentials(env)
self.gcp_wrapper = gcp.GcpWrapper(json.loads(creds_string))
self.config = config
@property
def deployment(self):
""" Builds a BareClusterDeployment instance with self.config, but only returns it successfully if the
corresponding real deployment (active machines) exists and doesn't contain any errors.
"""
try:
deployment = gcp.BareClusterDeployment(self.gcp_wrapper, self.config['deployment_name'],
self.config['gce_zone'])
info = deployment.get_info()
errors = info['operation'].get('error')
if errors:
raise util.LauncherError('DeploymentContainsErrors', str(errors))
return deployment
except HttpError as e:
if e.resp.status == 404:
raise util.LauncherError('DeploymentNotFound',
"The deployment you are trying to access doesn't exist") from e
raise e
def create(self) -> dict:
self.key_helper()
node_count = 1 + (self.config['num_masters'] + self.config['num_public_agents']
+ self.config['num_private_agents'])
gcp.BareClusterDeployment.create(
self.gcp_wrapper,
self.config['deployment_name'],
self.config['gce_zone'],
node_count,
self.config['disk_size'],
self.config['disk_type'],
self.config['source_image'],
self.config['machine_type'],
self.config['image_project'],
self.config['ssh_user'],
self.config['ssh_public_key'],
self.config['disable_updates'],
self.config['use_preemptible_vms'],
tags=self.config.get('tags'))
return self.config
def key_helper(self):
""" Generates a public key and a private key and stores them in the config. The public key will be applied to
all the instances in the deployment later on when wait() is called.
"""
if self.config['key_helper']:
private_key, public_key = util.generate_rsa_keypair()
self.config['ssh_private_key'] = private_key.decode()
self.config['ssh_public_key'] = public_key.decode()
def get_cluster_hosts(self) -> [Host]:
return list(self.deployment.hosts)[1:]
def get_bootstrap_host(self) -> Host:
return list(self.deployment.hosts)[0]
def wait(self):
""" Waits for the deployment to complete: first, the network that will contain the cluster is deployed. Once
the network is deployed, a firewall for the network and an instance template are deployed. Finally,
once the instance template is deployed, an instance group manager and all its instances are deployed.
"""
self.deployment.wait_for_completion()
def delete(self):
""" Deletes all the resources associated with the deployment (instance template, network, firewall, instance
group manager and all its instances.
"""
self.deployment.delete()
|
dcos/dcos-launch
|
dcos_launch/gcp.py
|
Python
|
apache-2.0
| 4,208 | 0.002852 |
# Copyright (c) 2010-2013 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" In-Memory Object Server for Swift """
import os
from swift import gettext_ as _
from eventlet import Timeout
from swift.common.bufferedhttp import http_connect
from swift.common.exceptions import ConnectionTimeout
from swift.common.http import is_success
from swift.obj.mem_diskfile import InMemoryFileSystem
from swift.obj import server
class ObjectController(server.ObjectController):
"""
Implements the WSGI application for the Swift In-Memory Object Server.
"""
def setup(self, conf):
"""
Nothing specific to do for the in-memory version.
:param conf: WSGI configuration parameter
"""
self._filesystem = InMemoryFileSystem()
def get_diskfile(self, device, partition, account, container, obj,
**kwargs):
"""
Utility method for instantiating a DiskFile object supporting a given
REST API.
An implementation of the object server that wants to use a different
DiskFile class would simply over-ride this method to provide that
behavior.
"""
return self._filesystem.get_diskfile(account, container, obj, **kwargs)
def async_update(self, op, account, container, obj, host, partition,
contdevice, headers_out, objdevice, policy_idx):
"""
Sends or saves an async update.
:param op: operation performed (ex: 'PUT', or 'DELETE')
:param account: account name for the object
:param container: container name for the object
:param obj: object name
:param host: host that the container is on
:param partition: partition that the container is on
:param contdevice: device name that the container is on
:param headers_out: dictionary of headers to send in the container
request
:param objdevice: device name that the object is in
:param policy_idx: the associated storage policy index
"""
headers_out['user-agent'] = 'obj-server %s' % os.getpid()
full_path = '/%s/%s/%s' % (account, container, obj)
if all([host, partition, contdevice]):
try:
with ConnectionTimeout(self.conn_timeout):
ip, port = host.rsplit(':', 1)
conn = http_connect(ip, port, contdevice, partition, op,
full_path, headers_out)
with Timeout(self.node_timeout):
response = conn.getresponse()
response.read()
if is_success(response.status):
return
else:
self.logger.error(_(
'ERROR Container update failed: %(status)d '
'response from %(ip)s:%(port)s/%(dev)s'),
{'status': response.status, 'ip': ip, 'port': port,
'dev': contdevice})
except (Exception, Timeout):
self.logger.exception(_(
'ERROR container update failed with '
'%(ip)s:%(port)s/%(dev)s'),
{'ip': ip, 'port': port, 'dev': contdevice})
# FIXME: For now don't handle async updates
def REPLICATE(self, request):
"""
Handle REPLICATE requests for the Swift Object Server. This is used
by the object replicator to get hashes for directories.
"""
pass
def app_factory(global_conf, **local_conf):
"""paste.deploy app factory for creating WSGI object server apps"""
conf = global_conf.copy()
conf.update(local_conf)
return ObjectController(conf)
|
Khushbu27/Tutorial
|
swift/obj/mem_server.py
|
Python
|
apache-2.0
| 4,314 | 0 |
#!/usr/bin/env python
# Copyright (c) 2008-14 Qtrac Ltd. All rights reserved.
# This program or module is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 2 of the License, or
# version 3 of the License, or (at your option) any later version. It is
# provided for educational purposes and is distributed in the hope that
# it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
# the GNU General Public License for more details.
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future_builtins import *
from PyQt4.QtCore import (QRegExp, Qt)
from PyQt4.QtCore import pyqtSignal as Signal
from PyQt4.QtGui import (QCheckBox, QDialog, QDialogButtonBox,
QGridLayout, QLabel, QLineEdit, QMessageBox, QRegExpValidator,
QSpinBox)
class NumberFormatDlg(QDialog):
changed = Signal()
def __init__(self, format, parent=None):
super(NumberFormatDlg, self).__init__(parent)
self.setAttribute(Qt.WA_DeleteOnClose)
self.format = format
self.create_widgets()
self.layout_widgets()
self.create_connections()
self.setWindowTitle("Set Number Format (Modeless)")
def create_widgets(self):
punctuationRe = QRegExp(r"[ ,;:.]")
self.thousandsLabel = QLabel("&Thousands separator")
self.thousandsEdit = QLineEdit(self.format["thousandsseparator"])
self.thousandsLabel.setBuddy(self.thousandsEdit)
self.thousandsEdit.setMaxLength(1)
self.thousandsEdit.setValidator(
QRegExpValidator(punctuationRe, self))
self.decimalMarkerLabel = QLabel("Decimal &marker")
self.decimalMarkerEdit = QLineEdit(self.format["decimalmarker"])
self.decimalMarkerLabel.setBuddy(self.decimalMarkerEdit)
self.decimalMarkerEdit.setMaxLength(1)
self.decimalMarkerEdit.setValidator(
QRegExpValidator(punctuationRe, self))
self.decimalMarkerEdit.setInputMask("X")
self.decimalPlacesLabel = QLabel("&Decimal places")
self.decimalPlacesSpinBox = QSpinBox()
self.decimalPlacesLabel.setBuddy(self.decimalPlacesSpinBox)
self.decimalPlacesSpinBox.setRange(0, 6)
self.decimalPlacesSpinBox.setValue(self.format["decimalplaces"])
self.redNegativesCheckBox = QCheckBox("&Red negative numbers")
self.redNegativesCheckBox.setChecked(self.format["rednegatives"])
self.buttonBox = QDialogButtonBox(QDialogButtonBox.Apply|
QDialogButtonBox.Close)
def layout_widgets(self):
grid = QGridLayout()
grid.addWidget(self.thousandsLabel, 0, 0)
grid.addWidget(self.thousandsEdit, 0, 1)
grid.addWidget(self.decimalMarkerLabel, 1, 0)
grid.addWidget(self.decimalMarkerEdit, 1, 1)
grid.addWidget(self.decimalPlacesLabel, 2, 0)
grid.addWidget(self.decimalPlacesSpinBox, 2, 1)
grid.addWidget(self.redNegativesCheckBox, 3, 0, 1, 2)
grid.addWidget(self.buttonBox, 4, 0, 1, 2)
self.setLayout(grid)
def create_connections(self):
self.buttonBox.button(QDialogButtonBox.Apply).clicked.connect(
self.apply)
self.buttonBox.rejected.connect(self.reject)
def apply(self):
thousands = unicode(self.thousandsEdit.text())
decimal = unicode(self.decimalMarkerEdit.text())
if thousands == decimal:
QMessageBox.warning(self, "Format Error",
"The thousands separator and the decimal marker "
"must be different.")
self.thousandsEdit.selectAll()
self.thousandsEdit.setFocus()
return
if len(decimal) == 0:
QMessageBox.warning(self, "Format Error",
"The decimal marker may not be empty.")
self.decimalMarkerEdit.selectAll()
self.decimalMarkerEdit.setFocus()
return
self.format["thousandsseparator"] = thousands
self.format["decimalmarker"] = decimal
self.format["decimalplaces"] = (
self.decimalPlacesSpinBox.value())
self.format["rednegatives"] = (
self.redNegativesCheckBox.isChecked())
self.changed.emit()
|
manashmndl/LearningPyQt
|
pyqt/chap05/numberformatdlg2.py
|
Python
|
mit
| 4,502 | 0.002221 |
"""Test loading historical builds and jobs."""
from __future__ import absolute_import, unicode_literals
from travis_log_fetch.config import _get_travispy
from travis_log_fetch._target import Target
from travis_log_fetch.get import (
get_travis_repo,
get_historical_builds,
get_historical_build,
get_historical_job,
)
import pytest
class TestHistorical(object):
def test_latest(self):
_travis = _get_travispy()
repo = get_travis_repo(_travis, 'travispy/on_pypy')
builds = get_historical_builds(_travis, repo)
build = next(builds)
assert build.repository_id == 2598880
assert build.id == repo.last_build_id
def test_after(self):
_travis = _get_travispy()
repo = get_travis_repo(_travis, 'travispy/on_pypy')
builds = get_historical_builds(_travis, repo,
_after=3, _load_jobs=False)
build = next(builds)
assert build.repository_id == 2598880
assert build.number == '2'
build = next(builds)
assert build.repository_id == 2598880
assert build.number == '1'
def test_all_small(self):
_travis = _get_travispy()
repo = get_travis_repo(_travis, 'travispy/on_pypy')
builds = get_historical_builds(_travis, repo)
ids = []
for build in builds:
assert build.repository_id == 2598880
ids.append(build.id)
assert ids == [53686685, 37521698, 28881355]
def test_multiple_batches_menegazzo(self):
"""Test using a repository that has greater than 2*25 builds."""
# Ideally each has one or two jobs, so that doesnt slow down the test,
# and the logs are small in case the log is fetched with the job.
_travis = _get_travispy()
repo = get_travis_repo(_travis, 'menegazzo/travispy')
builds = get_historical_builds(_travis, repo, _load_jobs=False)
ids = []
prev_number = None
for build in builds:
assert build.repository_id == 2419489
if int(build.number) in [80]:
# There are two '80'
# See https://github.com/travis-ci/travis-ci/issues/2582
print('duplicate build number {0}: {1}'.format(
build.number, build.id))
assert build.id in [45019395, 45019396]
if build.id == 45019395:
assert prev_number == int(build.number)
else:
assert prev_number == int(build.number) + 1
elif prev_number:
# All other build numbers decrease rather orderly
assert prev_number == int(build.number) + 1
prev_number = int(build.number)
if ids:
assert build.id < ids[-1]
ids.append(build.id)
if len(ids) > 100:
break
assert len(ids) == len(set(ids))
def test_multiple_batches_bootstrap(self):
"""Test using a repository that has lots of builds, esp. PRs."""
_travis = _get_travispy()
repo = get_travis_repo(_travis, 'twbs/bootstrap')
builds = get_historical_builds(_travis, repo,
_after=12071,
_load_jobs=False)
ids = []
prev_number = None
for build in builds:
assert build.repository_id == 12962
if int(build.number) in [12069, 12062, 12061, 12054, 12049,
12048, 12041, 12038, 12037, 12033]:
# Many duplicates
# See https://github.com/travis-ci/travis-ci/issues/2582
print('duplicate build number {0}: {1}'.format(
build.number, build.id))
if build.id in [53437234, 53350534, 53350026,
53263731, 53263730, # two extra 12054
53180440, 53179846, 53062896, 53019568,
53004896, 52960766]:
assert prev_number == int(build.number)
else:
assert prev_number == int(build.number) + 1
elif prev_number:
# All other build numbers decrease rather orderly
assert prev_number == int(build.number) + 1
prev_number = int(build.number)
if ids:
assert build.id < ids[-1]
ids.append(build.id)
# There are many more duplicates, so we stop here.
if int(build.number) == 12033:
break
assert len(ids) == len(set(ids))
def test_logical_single_job_build(self):
target = Target.from_extended_slug('travispy/on_pypy#1.1')
_travis = _get_travispy()
job = get_historical_job(_travis, target)
assert job.repository_id == 2598880
assert job.number == '1.1'
assert job.id == 28881356
def test_logical_multiple_job_build(self):
target = Target.from_extended_slug('menegazzo/travispy#101.3')
_travis = _get_travispy()
job = get_historical_job(_travis, target)
assert job.repository_id == 2419489
assert job.number == '101.3'
assert job.id == 82131391
def test_logical_duplicate_build(self):
target = Target.from_extended_slug('menegazzo/travispy#80.3')
_travis = _get_travispy()
pytest.raises(AssertionError, get_historical_build, _travis, target)
|
jayvdb/travis_log_fetch
|
tests/test_historical.py
|
Python
|
mit
| 5,569 | 0 |
# ale_python_interface.py
# Author: Ben Goodrich
# This directly implements a python version of the arcade learning
# environment interface.
from ctypes import *
import numpy as np
from numpy.ctypeslib import as_ctypes
import os
ale_lib = cdll.LoadLibrary(os.path.join(os.path.dirname(__file__),
'libale_c.so'))
ale_lib.ALE_new.argtypes = None
ale_lib.ALE_new.restype = c_void_p
ale_lib.ALE_del.argtypes = [c_void_p]
ale_lib.ALE_del.restype = None
ale_lib.getString.argtypes = [c_void_p, c_char_p]
ale_lib.getString.restype = c_char_p
ale_lib.getInt.argtypes = [c_void_p, c_char_p]
ale_lib.getInt.restype = c_int
ale_lib.getBool.argtypes = [c_void_p, c_char_p]
ale_lib.getBool.restype = c_bool
ale_lib.getFloat.argtypes = [c_void_p, c_char_p]
ale_lib.getFloat.restype = c_float
ale_lib.setString.argtypes = [c_void_p, c_char_p, c_char_p]
ale_lib.setString.restype = None
ale_lib.setInt.argtypes = [c_void_p, c_char_p, c_int]
ale_lib.setInt.restype = None
ale_lib.setBool.argtypes = [c_void_p, c_char_p, c_bool]
ale_lib.setBool.restype = None
ale_lib.setFloat.argtypes = [c_void_p, c_char_p, c_float]
ale_lib.setFloat.restype = None
ale_lib.loadROM.argtypes = [c_void_p, c_char_p]
ale_lib.loadROM.restype = None
ale_lib.act.argtypes = [c_void_p, c_int]
ale_lib.act.restype = c_int
ale_lib.game_over.argtypes = [c_void_p]
ale_lib.game_over.restype = c_bool
ale_lib.reset_game.argtypes = [c_void_p]
ale_lib.reset_game.restype = None
ale_lib.getLegalActionSet.argtypes = [c_void_p, c_void_p]
ale_lib.getLegalActionSet.restype = None
ale_lib.getLegalActionSize.argtypes = [c_void_p]
ale_lib.getLegalActionSize.restype = c_int
ale_lib.getMinimalActionSet.argtypes = [c_void_p, c_void_p]
ale_lib.getMinimalActionSet.restype = None
ale_lib.getMinimalActionSize.argtypes = [c_void_p]
ale_lib.getMinimalActionSize.restype = c_int
ale_lib.getFrameNumber.argtypes = [c_void_p]
ale_lib.getFrameNumber.restype = c_int
ale_lib.lives.argtypes = [c_void_p]
ale_lib.lives.restype = c_int
ale_lib.getEpisodeFrameNumber.argtypes = [c_void_p]
ale_lib.getEpisodeFrameNumber.restype = c_int
ale_lib.getScreen.argtypes = [c_void_p, c_void_p]
ale_lib.getScreen.restype = None
ale_lib.getRAM.argtypes = [c_void_p, c_void_p]
ale_lib.getRAM.restype = None
ale_lib.getRAMSize.argtypes = [c_void_p]
ale_lib.getRAMSize.restype = c_int
ale_lib.getScreenWidth.argtypes = [c_void_p]
ale_lib.getScreenWidth.restype = c_int
ale_lib.getScreenHeight.argtypes = [c_void_p]
ale_lib.getScreenHeight.restype = c_int
ale_lib.getScreenRGB.argtypes = [c_void_p, c_void_p]
ale_lib.getScreenRGB.restype = None
ale_lib.getScreenGrayscale.argtypes = [c_void_p, c_void_p]
ale_lib.getScreenGrayscale.restype = None
ale_lib.saveState.argtypes = [c_void_p]
ale_lib.saveState.restype = None
ale_lib.loadState.argtypes = [c_void_p]
ale_lib.loadState.restype = None
ale_lib.cloneState.argtypes = [c_void_p]
ale_lib.cloneState.restype = c_void_p
ale_lib.restoreState.argtypes = [c_void_p, c_void_p]
ale_lib.restoreState.restype = None
ale_lib.cloneSystemState.argtypes = [c_void_p]
ale_lib.cloneSystemState.restype = c_void_p
ale_lib.restoreSystemState.argtypes = [c_void_p, c_void_p]
ale_lib.restoreSystemState.restype = None
ale_lib.deleteState.argtypes = [c_void_p]
ale_lib.deleteState.restype = None
ale_lib.saveScreenPNG.argtypes = [c_void_p, c_char_p]
ale_lib.saveScreenPNG.restype = None
ale_lib.encodeState.argtypes = [c_void_p, c_void_p, c_int]
ale_lib.encodeState.restype = None
ale_lib.encodeStateLen.argtypes = [c_void_p]
ale_lib.encodeStateLen.restype = c_int
ale_lib.decodeState.argtypes = [c_void_p, c_int]
ale_lib.decodeState.restype = c_void_p
class ALEInterface(object):
def __init__(self):
self.obj = ale_lib.ALE_new()
def getString(self, key):
return ale_lib.getString(self.obj, key)
def getInt(self, key):
return ale_lib.getInt(self.obj, key)
def getBool(self, key):
return ale_lib.getBool(self.obj, key)
def getFloat(self, key):
return ale_lib.getFloat(self.obj, key)
def setString(self, key, value):
ale_lib.setString(self.obj, key, value)
def setInt(self, key, value):
ale_lib.setInt(self.obj, key, value)
def setBool(self, key, value):
ale_lib.setBool(self.obj, key, value)
def setFloat(self, key, value):
ale_lib.setFloat(self.obj, key, value)
def loadROM(self, rom_file):
ale_lib.loadROM(self.obj, rom_file)
def act(self, action):
return ale_lib.act(self.obj, int(action))
def game_over(self):
return ale_lib.game_over(self.obj)
def reset_game(self):
ale_lib.reset_game(self.obj)
def getLegalActionSet(self):
act_size = ale_lib.getLegalActionSize(self.obj)
act = np.zeros((act_size), dtype=np.intc)
ale_lib.getLegalActionSet(self.obj, as_ctypes(act))
return act
def getMinimalActionSet(self):
act_size = ale_lib.getMinimalActionSize(self.obj)
act = np.zeros((act_size), dtype=np.intc)
ale_lib.getMinimalActionSet(self.obj, as_ctypes(act))
return act
def getFrameNumber(self):
return ale_lib.getFrameNumber(self.obj)
def lives(self):
return ale_lib.lives(self.obj)
def getEpisodeFrameNumber(self):
return ale_lib.getEpisodeFrameNumber(self.obj)
def getScreenDims(self):
"""returns a tuple that contains (screen_width, screen_height)
"""
width = ale_lib.getScreenWidth(self.obj)
height = ale_lib.getScreenHeight(self.obj)
return (width, height)
def getScreen(self, screen_data=None):
"""This function fills screen_data with the RAW Pixel data
screen_data MUST be a numpy array of uint8/int8. This could be initialized like so:
screen_data = np.empty(w*h, dtype=np.uint8)
Notice, it must be width*height in size also
If it is None, then this function will initialize it
Note: This is the raw pixel values from the atari, before any RGB palette transformation takes place
"""
if(screen_data is None):
width = ale_lib.getScreenWidth(self.obj)
height = ale_lib.getScreenHeight(self.obj)
screen_data = np.zeros(width*height, dtype=np.uint8)
ale_lib.getScreen(self.obj, as_ctypes(screen_data))
return screen_data
def getScreenRGB(self, screen_data=None):
"""This function fills screen_data with the data in RGB format
screen_data MUST be a numpy array of uint8. This can be initialized like so:
screen_data = np.empty((height,width,3), dtype=np.uint8)
If it is None, then this function will initialize it.
"""
if(screen_data is None):
width = ale_lib.getScreenWidth(self.obj)
height = ale_lib.getScreenHeight(self.obj)
screen_data = np.empty((height, width,3), dtype=np.uint8)
ale_lib.getScreenRGB(self.obj, as_ctypes(screen_data[:]))
return screen_data
def getScreenGrayscale(self, screen_data=None):
"""This function fills screen_data with the data in grayscale
screen_data MUST be a numpy array of uint8. This can be initialized like so:
screen_data = np.empty((height,width,1), dtype=np.uint8)
If it is None, then this function will initialize it.
"""
if(screen_data is None):
width = ale_lib.getScreenWidth(self.obj)
height = ale_lib.getScreenHeight(self.obj)
screen_data = np.empty((height, width,1), dtype=np.uint8)
ale_lib.getScreenGrayscale(self.obj, as_ctypes(screen_data[:]))
return screen_data
def getRAMSize(self):
return ale_lib.getRAMSize(self.obj)
def getRAM(self, ram=None):
"""This function grabs the atari RAM.
ram MUST be a numpy array of uint8/int8. This can be initialized like so:
ram = np.array(ram_size, dtype=uint8)
Notice: It must be ram_size where ram_size can be retrieved via the getRAMSize function.
If it is None, then this function will initialize it.
"""
if(ram is None):
ram_size = ale_lib.getRAMSize(self.obj)
ram = np.zeros(ram_size, dtype=np.uint8)
ale_lib.getRAM(self.obj, as_ctypes(ram))
return ram
def saveScreenPNG(self, filename):
"""Save the current screen as a png file"""
return ale_lib.saveScreenPNG(self.obj, filename)
def saveState(self):
"""Saves the state of the system"""
return ale_lib.saveState(self.obj)
def loadState(self):
"""Loads the state of the system"""
return ale_lib.loadState(self.obj)
def cloneState(self):
"""This makes a copy of the environment state. This copy does *not*
include pseudorandomness, making it suitable for planning
purposes. By contrast, see cloneSystemState.
"""
return ale_lib.cloneState(self.obj)
def restoreState(self, state):
"""Reverse operation of cloneState(). This does not restore
pseudorandomness, so that repeated calls to restoreState() in
the stochastic controls setting will not lead to the same
outcomes. By contrast, see restoreSystemState.
"""
ale_lib.restoreState(self.obj, state)
def cloneSystemState(self):
"""This makes a copy of the system & environment state, suitable for
serialization. This includes pseudorandomness and so is *not*
suitable for planning purposes.
"""
return ale_lib.cloneSystemState(self.obj)
def restoreSystemState(self, state):
"""Reverse operation of cloneSystemState."""
ale_lib.restoreSystemState(self.obj, state)
def deleteState(self, state):
""" Deallocates the ALEState """
ale_lib.deleteState(state)
def encodeStateLen(self, state):
return ale_lib.encodeStateLen(state)
def encodeState(self, state, buf=None):
if buf == None:
length = ale_lib.encodeStateLen(state)
buf = np.zeros(length, dtype=np.uint8)
ale_lib.encodeState(state, as_ctypes(buf), c_int(len(buf)))
return buf
def decodeState(self, serialized):
return ale_lib.decodeState(as_ctypes(serialized), len(serialized))
def __del__(self):
ale_lib.ALE_del(self.obj)
|
CyCraig/AtariRL
|
ale_python_interface/ale_python_interface.py
|
Python
|
gpl-2.0
| 10,369 | 0.001929 |
# Bu kod calismayacak, mantigi anlamak icin yazildi.
from gittigidiyor.applicationservice import *
from gittigidiyor.auth import *
if __name__ == "__main__":
# HTTP Basic authentication credentials.. It blows up for the wrong credentials..
auth = Auth("testuser", "testpassword", None, None)
api = ApplicationService(auth)
result = api.createApplication("testdeveloper", "Test Application", "This is the test application",
"C", "W", "", "xml", "xml", "tr")
print result
|
Annelutfen/gittigidiyor-python
|
examples/applicationservice.py
|
Python
|
mit
| 526 | 0.003802 |
from django.db.models.expressions import F, Func
from rest_framework import serializers
from .models import PdfStorage
class PdfStorageListSerializer(serializers.ModelSerializer):
author = serializers.SerializerMethodField("full_name")
class Meta:
model = PdfStorage
fields = [
"id",
"name",
"topic",
"author",
"created",
]
def full_name(self, pdf):
return pdf.author.person_name()
class PaidPdfDownloadLinkSerializer(serializers.ModelSerializer):
download_url = serializers.SerializerMethodField()
class Meta:
model = PdfStorage
fields = ["download_url"]
def get_download_url(self, obj):
return obj.pdf_file.url
class AllRelatedIdsSerializer(serializers.Serializer):
ids = serializers.SerializerMethodField()
class Meta:
fields = ["ids"]
def get_ids(self, obj):
all_ids = (
PdfStorage.objects.annotate(ids=Func(F("related_ids"), function="unnest"))
.values_list("ids", flat=True)
.distinct()
)
return all_ids
|
auto-mat/klub
|
apps/pdf_storage/serializers.py
|
Python
|
gpl-3.0
| 1,144 | 0.000874 |
# -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2016, Bruno Cauet
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
from __future__ import division, absolute_import, print_function
import os.path
from mock import Mock, patch, call
from tempfile import mkdtemp
from shutil import rmtree
import unittest
from test.helper import TestHelper
from beets.util import bytestring_path
from beetsplug.thumbnails import (ThumbnailsPlugin, NORMAL_DIR, LARGE_DIR,
write_metadata_im, write_metadata_pil,
PathlibURI, GioURI)
class ThumbnailsTest(unittest.TestCase, TestHelper):
def setUp(self):
self.setup_beets()
def tearDown(self):
self.teardown_beets()
@patch('beetsplug.thumbnails.util')
def test_write_metadata_im(self, mock_util):
metadata = {"a": u"A", "b": u"B"}
write_metadata_im("foo", metadata)
try:
command = u"convert foo -set a A -set b B foo".split(' ')
mock_util.command_output.assert_called_once_with(command)
except AssertionError:
command = u"convert foo -set b B -set a A foo".split(' ')
mock_util.command_output.assert_called_once_with(command)
@patch('beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok')
@patch('beetsplug.thumbnails.os.stat')
def test_add_tags(self, mock_stat, _):
plugin = ThumbnailsPlugin()
plugin.write_metadata = Mock()
plugin.get_uri = Mock(side_effect={b"/path/to/cover":
"COVER_URI"}.__getitem__)
album = Mock(artpath=b"/path/to/cover")
mock_stat.return_value.st_mtime = 12345
plugin.add_tags(album, b"/path/to/thumbnail")
metadata = {"Thumb::URI": "COVER_URI",
"Thumb::MTime": u"12345"}
plugin.write_metadata.assert_called_once_with(b"/path/to/thumbnail",
metadata)
mock_stat.assert_called_once_with(album.artpath)
@patch('beetsplug.thumbnails.os')
@patch('beetsplug.thumbnails.ArtResizer')
@patch('beetsplug.thumbnails.get_im_version')
@patch('beetsplug.thumbnails.get_pil_version')
@patch('beetsplug.thumbnails.GioURI')
def test_check_local_ok(self, mock_giouri, mock_pil, mock_im,
mock_artresizer, mock_os):
# test local resizing capability
mock_artresizer.shared.local = False
plugin = ThumbnailsPlugin()
self.assertFalse(plugin._check_local_ok())
# test dirs creation
mock_artresizer.shared.local = True
def exists(path):
if path == NORMAL_DIR:
return False
if path == LARGE_DIR:
return True
raise ValueError(u"unexpected path {0!r}".format(path))
mock_os.path.exists = exists
plugin = ThumbnailsPlugin()
mock_os.makedirs.assert_called_once_with(NORMAL_DIR)
self.assertTrue(plugin._check_local_ok())
# test metadata writer function
mock_os.path.exists = lambda _: True
mock_pil.return_value = False
mock_im.return_value = False
with self.assertRaises(AssertionError):
ThumbnailsPlugin()
mock_pil.return_value = True
self.assertEqual(ThumbnailsPlugin().write_metadata, write_metadata_pil)
mock_im.return_value = True
self.assertEqual(ThumbnailsPlugin().write_metadata, write_metadata_im)
mock_pil.return_value = False
self.assertEqual(ThumbnailsPlugin().write_metadata, write_metadata_im)
self.assertTrue(ThumbnailsPlugin()._check_local_ok())
# test URI getter function
giouri_inst = mock_giouri.return_value
giouri_inst.available = True
self.assertEqual(ThumbnailsPlugin().get_uri, giouri_inst.uri)
giouri_inst.available = False
self.assertEqual(ThumbnailsPlugin().get_uri.__self__.__class__,
PathlibURI)
@patch('beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok')
@patch('beetsplug.thumbnails.ArtResizer')
@patch('beetsplug.thumbnails.util')
@patch('beetsplug.thumbnails.os')
@patch('beetsplug.thumbnails.shutil')
def test_make_cover_thumbnail(self, mock_shutils, mock_os, mock_util,
mock_artresizer, _):
thumbnail_dir = os.path.normpath(b"/thumbnail/dir")
md5_file = os.path.join(thumbnail_dir, b"md5")
path_to_art = os.path.normpath(b"/path/to/art")
mock_os.path.join = os.path.join # don't mock that function
plugin = ThumbnailsPlugin()
plugin.add_tags = Mock()
album = Mock(artpath=path_to_art)
mock_util.syspath.side_effect = lambda x: x
plugin.thumbnail_file_name = Mock(return_value=b'md5')
mock_os.path.exists.return_value = False
def os_stat(target):
if target == md5_file:
return Mock(st_mtime=1)
elif target == path_to_art:
return Mock(st_mtime=2)
else:
raise ValueError(u"invalid target {0}".format(target))
mock_os.stat.side_effect = os_stat
plugin.make_cover_thumbnail(album, 12345, thumbnail_dir)
mock_os.path.exists.assert_called_once_with(md5_file)
mock_os.stat.has_calls([call(md5_file), call(path_to_art)],
any_order=True)
resize = mock_artresizer.shared.resize
resize.assert_called_once_with(12345, path_to_art, md5_file)
plugin.add_tags.assert_called_once_with(album, resize.return_value)
mock_shutils.move.assert_called_once_with(resize.return_value,
md5_file)
# now test with recent thumbnail & with force
mock_os.path.exists.return_value = True
plugin.force = False
resize.reset_mock()
def os_stat(target):
if target == md5_file:
return Mock(st_mtime=3)
elif target == path_to_art:
return Mock(st_mtime=2)
else:
raise ValueError(u"invalid target {0}".format(target))
mock_os.stat.side_effect = os_stat
plugin.make_cover_thumbnail(album, 12345, thumbnail_dir)
self.assertEqual(resize.call_count, 0)
# and with force
plugin.config['force'] = True
plugin.make_cover_thumbnail(album, 12345, thumbnail_dir)
resize.assert_called_once_with(12345, path_to_art, md5_file)
@patch('beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok')
def test_make_dolphin_cover_thumbnail(self, _):
plugin = ThumbnailsPlugin()
tmp = bytestring_path(mkdtemp())
album = Mock(path=tmp,
artpath=os.path.join(tmp, b"cover.jpg"))
plugin.make_dolphin_cover_thumbnail(album)
with open(os.path.join(tmp, b".directory"), "rb") as f:
self.assertEqual(
f.read().splitlines(),
[b"[Desktop Entry]", b"Icon=./cover.jpg"]
)
# not rewritten when it already exists (yup that's a big limitation)
album.artpath = b"/my/awesome/art.tiff"
plugin.make_dolphin_cover_thumbnail(album)
with open(os.path.join(tmp, b".directory"), "rb") as f:
self.assertEqual(
f.read().splitlines(),
[b"[Desktop Entry]", b"Icon=./cover.jpg"]
)
rmtree(tmp)
@patch('beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok')
@patch('beetsplug.thumbnails.ArtResizer')
def test_process_album(self, mock_artresizer, _):
get_size = mock_artresizer.shared.get_size
plugin = ThumbnailsPlugin()
make_cover = plugin.make_cover_thumbnail = Mock(return_value=True)
make_dolphin = plugin.make_dolphin_cover_thumbnail = Mock()
# no art
album = Mock(artpath=None)
plugin.process_album(album)
self.assertEqual(get_size.call_count, 0)
self.assertEqual(make_dolphin.call_count, 0)
# cannot get art size
album.artpath = b"/path/to/art"
get_size.return_value = None
plugin.process_album(album)
get_size.assert_called_once_with(b"/path/to/art")
self.assertEqual(make_cover.call_count, 0)
# dolphin tests
plugin.config['dolphin'] = False
plugin.process_album(album)
self.assertEqual(make_dolphin.call_count, 0)
plugin.config['dolphin'] = True
plugin.process_album(album)
make_dolphin.assert_called_once_with(album)
# small art
get_size.return_value = 200, 200
plugin.process_album(album)
make_cover.assert_called_once_with(album, 128, NORMAL_DIR)
# big art
make_cover.reset_mock()
get_size.return_value = 500, 500
plugin.process_album(album)
make_cover.has_calls([call(album, 128, NORMAL_DIR),
call(album, 256, LARGE_DIR)], any_order=True)
@patch('beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok')
@patch('beetsplug.thumbnails.decargs')
def test_invokations(self, mock_decargs, _):
plugin = ThumbnailsPlugin()
plugin.process_album = Mock()
album = Mock()
plugin.process_album.reset_mock()
lib = Mock()
album2 = Mock()
lib.albums.return_value = [album, album2]
plugin.process_query(lib, Mock(), None)
lib.albums.assert_called_once_with(mock_decargs.return_value)
plugin.process_album.has_calls([call(album), call(album2)],
any_order=True)
@patch('beetsplug.thumbnails.BaseDirectory')
def test_thumbnail_file_name(self, mock_basedir):
plug = ThumbnailsPlugin()
plug.get_uri = Mock(return_value=u"file:///my/uri")
self.assertEqual(plug.thumbnail_file_name(b'idontcare'),
b"9488f5797fbe12ffb316d607dfd93d04.png")
def test_uri(self):
gio = GioURI()
if not gio.available:
self.skipTest(u"GIO library not found")
self.assertEqual(gio.uri(u"/foo"), u"file:///") # silent fail
self.assertEqual(gio.uri(b"/foo"), u"file:///foo")
self.assertEqual(gio.uri(b"/foo!"), u"file:///foo!")
self.assertEqual(
gio.uri(b'/music/\xec\x8b\xb8\xec\x9d\xb4'),
u'file:///music/%EC%8B%B8%EC%9D%B4')
class TestPathlibURI():
"""Test PathlibURI class"""
def test_uri(self):
test_uri = PathlibURI()
# test it won't break if we pass it bytes for a path
test_uri.uri(b'/')
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
shamangeorge/beets
|
test/test_thumbnails.py
|
Python
|
mit
| 11,414 | 0.000088 |
# This file is part of Indico.
# Copyright (C) 2002 - 2020 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from flask import request
from indico.modules.admin.views import WPAdmin
from indico.modules.users import User
from indico.util.i18n import _
from indico.web.breadcrumbs import render_breadcrumbs
from indico.web.views import WPDecorated, WPJinjaMixin
class WPUser(WPJinjaMixin, WPDecorated):
"""Base WP for user profile pages.
Whenever you use this, you MUST include `user` in the params passed to
`render_template`. Any RH using this should inherit from `RHUserBase`
which already handles user/admin access. In this case, simply add
``user=self.user`` to your `render_template` call.
"""
template_prefix = 'users/'
def __init__(self, rh, active_menu_item, **kwargs):
kwargs['active_menu_item'] = active_menu_item
WPDecorated.__init__(self, rh, **kwargs)
def _get_breadcrumbs(self):
if 'user_id' in request.view_args:
user = User.get(request.view_args['user_id'])
profile_breadcrumb = _('Profile of {name}').format(name=user.full_name)
else:
profile_breadcrumb = _('My Profile')
return render_breadcrumbs(profile_breadcrumb)
def _get_body(self, params):
return self._get_page_content(params)
class WPUserDashboard(WPUser):
bundles = ('module_users.dashboard.js',)
class WPUsersAdmin(WPAdmin):
template_prefix = 'users/'
bundles = ('module_users.js',)
|
mic4ael/indico
|
indico/modules/users/views.py
|
Python
|
mit
| 1,654 | 0.000605 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-06-02 07:44
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('distances', '0010_auto_20170519_1604'),
]
operations = [
migrations.AlterField(
model_name='dates',
name='startDate',
field=models.DateField(default=datetime.datetime(2017, 5, 26, 10, 44, 7, 194576)),
),
]
|
tkettu/rokego
|
distances/migrations/0011_auto_20170602_1044.py
|
Python
|
mit
| 513 | 0.001949 |
import re
import shutil
import time
import os
import os.path
from pyquery.pyquery import PyQuery
import requests
import requests.utils
from WhatManager2.settings import MEDIA_ROOT
from bibliotik import manage_bibliotik
from bibliotik.models import BibliotikTorrent, BibliotikFulltext
from bibliotik.settings import BIBLIOTIK_UPLOAD_URL, BIBLIOTIK_DOWNLOAD_TORRENT_URL
from home.models import DownloadLocation
def extract_torrents(html):
result = []
pq = PyQuery(html)
for row in pq('#torrents_table tbody tr.torrent').items():
data = {
'id': row.attr('id')[len('torrent-'):],
'type': row('td:eq(0) img').attr('title'),
'title': row('td:eq(1) span.title').text(),
'publishers': [],
'authors': [],
'year': row('td:eq(1) span.torYear').text()[1:-1],
'format': row('td:eq(1) span.torFormat').text()[1:-1],
'retail': bool(row('td:eq(1) span.torRetail')),
'tags': []
}
for dlink in row('td:eq(1) > a').items():
href = dlink.attr('href')
if '/creators/' in href:
data['authors'].append({
'id': href[href.rfind('/') + 1:],
'name': dlink.text()
})
elif '/publishers/' in href:
data['publishers'].append({
'id': href[href.rfind('/') + 1:],
'name': dlink.text()
})
for tag in row('td:eq(1) > span.taglist > a').items():
href = tag.attr('href')
data['tags'].append({
'id': href[href.rfind('/') + 1:],
'name': tag.text()
})
result.append(data)
return result
class BibliotikClient(object):
def __init__(self, session_id):
self.session_id = session_id
self.session = requests.Session()
requests.utils.add_dict_to_cookiejar(self.session.cookies, {
'id': session_id
})
self.auth_key = None
def get_auth_key(self):
if self.auth_key:
return self.auth_key
for i in xrange(3):
try:
response = self.session.get('https://bibliotik.me/upload/ebooks')
response.raise_for_status()
break
except Exception:
pass
response.raise_for_status()
pq = PyQuery(response.content)
self.auth_key = pq('input[name="authkey"]').val()
if not self.auth_key:
raise Exception('Could not get the authkey')
return self.auth_key
def send_upload(self, payload, payload_files):
return self.session.post(BIBLIOTIK_UPLOAD_URL, data=payload, files=payload_files,
allow_redirects=False)
def download_torrent(self, torrent_id):
torrent_page = BIBLIOTIK_DOWNLOAD_TORRENT_URL.format(torrent_id)
for i in xrange(3):
try:
r = self.session.get(torrent_page, allow_redirects=False)
r.raise_for_status()
if r.status_code == 200 and 'application/x-bittorrent' in r.headers['content-type']:
filename = re.search('filename="(.*)"',
r.headers['content-disposition']).group(1)
return filename, r.content
else:
raise Exception('Wrong status_code or content-type')
except Exception as ex:
print u'Error while download bibliotik torrent. Will retry: {0}'.format(ex)
time.sleep(3)
download_exception = ex
raise download_exception
def search(self, query):
url = 'https://bibliotik.me/torrents/'
response = self._search_request(url, query)
if not response.url.startswith(url):
raise Exception(u'Search redirected to {0}. Probably invalid id. Was {1}.'.format(
response.url, self.session_id
))
return {
'results': extract_torrents(response.content),
}
def _search_request(self, url, query):
for i in xrange(3):
try:
response = self.session.get(url, params={
'search': query
})
response.raise_for_status()
return response
except Exception as ex:
time.sleep(3)
exception = ex
raise exception
def upload_book_to_bibliotik(bibliotik_client, book_upload):
print 'Sending request for upload to bibliotik.me'
payload_files = dict()
payload_files['TorrentFileField'] = ('torrent.torrent', book_upload.bibliotik_torrent_file)
payload = dict()
payload['upload'] = ''
payload['authkey'] = bibliotik_client.get_auth_key()
payload['AuthorsField'] = book_upload.author
payload['TitleField'] = book_upload.title
payload['IsbnField'] = book_upload.isbn or ''
payload['PublishersField'] = book_upload.publisher
payload['PagesField'] = book_upload.pages or ''
payload['YearField'] = book_upload.year
payload['FormatField'] = {
'AZW3': '21',
'EPUB': '15',
'PDF': '2',
}[book_upload.format]
payload['LanguageField'] = '1' # English
if book_upload.retail:
payload['RetailField'] = '1'
payload['TagsField'] = ','.join(book_upload.tag_list)
payload['ImageField'] = book_upload.cover_url
payload['DescriptionField'] = book_upload.description
response = bibliotik_client.send_upload(payload, payload_files)
response.raise_for_status()
if response.status_code == requests.codes.ok:
with open(os.path.join(MEDIA_ROOT, 'bibliotik_upload.html'), 'wb') as f:
f.write(response.content)
raise Exception('Bibliotik does not want this. Written to media/')
redirect_match = re.match('^https://bibliotik.me/torrents/(?P<id>\d+)$',
response.headers['location'])
if not redirect_match:
raise Exception('Could not get new torrent ID.')
torrent_id = redirect_match.groupdict()['id']
book_upload.bibliotik_torrent = BibliotikTorrent.get_or_create(bibliotik_client, torrent_id)
book_upload.save()
# Add the torrent to the client
location = DownloadLocation.get_bibliotik_preferred()
download_dir = os.path.join(location.path, unicode(book_upload.bibliotik_torrent.id))
book_path = os.path.join(download_dir, book_upload.target_filename)
if not os.path.exists(download_dir):
os.mkdir(download_dir)
os.chmod(download_dir, 0777)
shutil.copyfile(
book_upload.book_data.storage.path(book_upload.book_data),
book_path
)
os.chmod(book_path, 0777)
manage_bibliotik.add_bibliotik_torrent(
book_upload.bibliotik_torrent.id, location=location, bibliotik_client=bibliotik_client
)
return book_upload
def search_torrents(query):
b_fulltext = BibliotikFulltext.objects.only('id').all()
b_fulltext = b_fulltext.extra(where=['MATCH(`info`, `more_info`) AGAINST (%s IN BOOLEAN MODE)'],
params=[query])
b_fulltext = b_fulltext.extra(select={'score': 'MATCH (`info`) AGAINST (%s)'},
select_params=[query])
b_fulltext = b_fulltext.extra(order_by=['-score'])
b_torrents_dict = BibliotikTorrent.objects.in_bulk([b.id for b in b_fulltext])
b_torrents = list()
for i in b_fulltext:
b_torrent = b_torrents_dict[i.id]
coef = 1.0
if b_torrent.retail:
coef *= 1.2
if b_torrent.format == 'EPUB':
coef *= 1.1
elif b_torrent.format == 'PDF':
coef *= 0.9
b_torrent.score = i.score * coef
b_torrents.append(b_torrent)
return b_torrents
|
MADindustries/WhatManager2
|
bibliotik/utils.py
|
Python
|
mit
| 7,895 | 0.002027 |
# Copyright (C) 2007, 2009, 2010 Canonical Ltd
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import sys
from bzrlib.builtins import cmd_cat
from bzrlib.tests import StringIOWrapper
from bzrlib.tests.transport_util import TestCaseWithConnectionHookedTransport
class TestCat(TestCaseWithConnectionHookedTransport):
def setUp(self):
super(TestCat, self).setUp()
# Redirect sys.stdout as this is what cat uses
self.outf = StringIOWrapper()
self.overrideAttr(sys, 'stdout', self.outf)
def test_cat(self):
# FIXME: sftp raises ReadError instead of NoSuchFile when probing for
# branch/foo/.bzr/branch-format when used with the paramiko test
# server.
from bzrlib.tests import TestSkipped
raise TestSkipped('SFTPTransport raises incorrect exception'
' when reading from paramiko server')
wt1 = self.make_branch_and_tree('branch')
self.build_tree_contents([('branch/foo', 'foo')])
wt1.add('foo')
wt1.commit('add foo')
self.start_logging_connections()
cmd = cmd_cat()
cmd.run(self.get_url('branch/foo'))
self.assertEquals(1, len(self.connections))
self.assertEquals('foo', self.outf.getvalue())
|
stewartsmith/bzr
|
bzrlib/tests/commands/test_cat.py
|
Python
|
gpl-2.0
| 1,932 | 0.000518 |
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os
all_modifs={}
def fixdir(dir):
global all_modifs
for k in all_modifs:
for v in all_modifs[k]:
modif(os.path.join(dir,'waflib'),k,v)
def modif(dir,name,fun):
if name=='*':
lst=[]
for y in'. Tools extras'.split():
for x in os.listdir(os.path.join(dir,y)):
if x.endswith('.py'):
lst.append(y+os.sep+x)
for x in lst:
modif(dir,x,fun)
return
filename=os.path.join(dir,name)
f=open(filename,'r')
try:
txt=f.read()
finally:
f.close()
txt=fun(txt)
f=open(filename,'w')
try:
f.write(txt)
finally:
f.close()
def subst(*k):
def do_subst(fun):
global all_modifs
for x in k:
try:
all_modifs[x].append(fun)
except KeyError:
all_modifs[x]=[fun]
return fun
return do_subst
@subst('*')
def r1(code):
code=code.replace(',e:',',e:')
code=code.replace("",'')
code=code.replace('','')
return code
@subst('Runner.py')
def r4(code):
code=code.replace('next(self.biter)','self.biter.next()')
return code
|
automatthias/aubio
|
waflib/fixpy2.py
|
Python
|
gpl-3.0
| 1,110 | 0.067568 |
import time
from torba.server import util
def sessions_lines(data):
"""A generator returning lines for a list of sessions.
data is the return value of rpc_sessions()."""
fmt = ('{:<6} {:<5} {:>17} {:>5} {:>5} {:>5} '
'{:>7} {:>7} {:>7} {:>7} {:>7} {:>9} {:>21}')
yield fmt.format('ID', 'Flags', 'Client', 'Proto',
'Reqs', 'Txs', 'Subs',
'Recv', 'Recv KB', 'Sent', 'Sent KB', 'Time', 'Peer')
for (id_, flags, peer, client, proto, reqs, txs_sent, subs,
recv_count, recv_size, send_count, send_size, time) in data:
yield fmt.format(id_, flags, client, proto,
'{:,d}'.format(reqs),
'{:,d}'.format(txs_sent),
'{:,d}'.format(subs),
'{:,d}'.format(recv_count),
'{:,d}'.format(recv_size // 1024),
'{:,d}'.format(send_count),
'{:,d}'.format(send_size // 1024),
util.formatted_time(time, sep=''), peer)
def groups_lines(data):
"""A generator returning lines for a list of groups.
data is the return value of rpc_groups()."""
fmt = ('{:<6} {:>9} {:>9} {:>6} {:>6} {:>8}'
'{:>7} {:>9} {:>7} {:>9}')
yield fmt.format('ID', 'Sessions', 'Bwidth KB', 'Reqs', 'Txs', 'Subs',
'Recv', 'Recv KB', 'Sent', 'Sent KB')
for (id_, session_count, bandwidth, reqs, txs_sent, subs,
recv_count, recv_size, send_count, send_size) in data:
yield fmt.format(id_,
'{:,d}'.format(session_count),
'{:,d}'.format(bandwidth // 1024),
'{:,d}'.format(reqs),
'{:,d}'.format(txs_sent),
'{:,d}'.format(subs),
'{:,d}'.format(recv_count),
'{:,d}'.format(recv_size // 1024),
'{:,d}'.format(send_count),
'{:,d}'.format(send_size // 1024))
def peers_lines(data):
"""A generator returning lines for a list of peers.
data is the return value of rpc_peers()."""
def time_fmt(t):
if not t:
return 'Never'
return util.formatted_time(now - t)
now = time.time()
fmt = ('{:<30} {:<6} {:>5} {:>5} {:<17} {:>4} '
'{:>4} {:>8} {:>11} {:>11} {:>5} {:>20} {:<15}')
yield fmt.format('Host', 'Status', 'TCP', 'SSL', 'Server', 'Min',
'Max', 'Pruning', 'Last Good', 'Last Try',
'Tries', 'Source', 'IP Address')
for item in data:
features = item['features']
hostname = item['host']
host = features['hosts'][hostname]
yield fmt.format(hostname[:30],
item['status'],
host.get('tcp_port') or '',
host.get('ssl_port') or '',
features['server_version'] or 'unknown',
features['protocol_min'],
features['protocol_max'],
features['pruning'] or '',
time_fmt(item['last_good']),
time_fmt(item['last_try']),
item['try_count'],
item['source'][:20],
item['ip_addr'] or '')
|
lbryio/lbry
|
torba/torba/server/text.py
|
Python
|
mit
| 3,433 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vispy: gallery 30
# -----------------------------------------------------------------------------
# Copyright (c) 2015, California Institute of Technology.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
"""
Simple use of SceneCanvas to display an Image.
"""
from __future__ import print_function
import sys
import os
from vispy import scene, app, visuals, gloo
from vispy.visuals.transforms import STTransform, TransformSystem
import numpy as np
import astropy.io.fits as fits
from astropy.table import Table
from astropy.wcs import WCS
from scipy.misc import bytescale
from time import sleep
from vispy.geometry import Rect
from vispy.scene import PanZoomCamera
from vispy.util import keys
from vispy.app import Timer
import warnings
is_pacs = False
warnings.filterwarnings('ignore')
class SourceInspectCamera(PanZoomCamera):
"""
"""
_state_props = PanZoomCamera._state_props + ('index', )
def __init__(self, image, img_data, sources, poslist, index=0, **kwargs):
PanZoomCamera.__init__(self, **kwargs)
self.index = index
self.image = image
self.img_data = img_data
self.sources = sources
self.poslist = poslist
#self.smin = 0.9*np.nanmin(self.img_data)
#self.smax = 1.02*np.nanmax(self.img_data)
pcts = np.nanpercentile(self.img_data, [5.0, 99.0])
if np.all(np.isfinite(pcts)):
self.smin = pcts[0]
self.smax = pcts[1]
self.accelerator = 5.0
self.nsrc = len(poslist)
self._keymap = {
keys.UP: +1,
keys.DOWN: -1,
keys.LEFT: -1,
keys.RIGHT: +1,
keys.SPACE: +1
}
self._timer = Timer(0.2, start=False, connect=self.on_timer)
@property
def keymap(self):
"""
"""
return self._keymap
def update_index(self, val):
self.index += val
if (self.index > self.nsrc-1):
self.index = 0
if (self.index < 0):
self.index = self.nsrc - 1
def update_pan(self):
newX = self.poslist[self.index][0]
newY = self.poslist[self.index][1]
curX = self.rect.left + self.rect.width/2.0
curY = self.rect.bottom + self.rect.height/2.0
self.pan((newX-curX,newY-curY))
# update image data
imsect = self.img_data[int(self.rect.bottom):int(self.rect.top),
int(self.rect.left):int(self.rect.right)]
pcts = np.nanpercentile(imsect, [5.0, 99.0])
if np.all(np.isfinite(pcts)):
self.smin = pcts[0]
#cmin = -0.01 + 1.2*self.sources['background'][self.sources.index==self.index].values[0]
if (is_pacs):
self.smax = 1.2*self.sources['susflux']\
[self.sources.index==self.index].values[0]/1000.0/10.0 + self.smin
else:
self.smax = 1.2*self.sources['fluxtml']\
[self.sources.index==self.index].values[0]/1000.0/0.95 + self.smin
self.update_scale()
def update_scale(self):
self.image.set_data(bytescale(self.img_data, cmin=self.smin, cmax=self.smax))
self.view_changed()
def on_timer(self, event):
"""Timer event handler
Parameters
----------
event : instance of Event
The event.
"""
self.update_index(1)
self.update_pan()
self.view_changed()
def viewbox_key_event(self, event):
"""ViewBox key event handler
Parameters
----------
event : instance of Event
The event.
"""
PanZoomCamera.viewbox_key_event(self, event)
if event.handled or not self.interactive:
return
if event.type == 'key_press':
if event.key in self._keymap:
val = self._keymap[event.key]
self.update_index(val)
self.update_pan()
self.view_changed()
elif event.key == 'M':
self._timer.start()
elif event.key == 'S':
self._timer.stop()
#elif event.key == 'X':
# ind = np.argsort(self.poslist[:,0])
# self.poslist = self.poslist[ind]
#elif event.key == 'Y':
# ind = np.argsort(self.poslist[:,1])
# self.poslist = self.poslist[ind]
elif event.key == 'L':
print(self.sources[self.sources.sourceid==self.sources['sourceid'][self.index]])
elif event.key == 'T':
sdiff = self.accelerator*(self.smax - self.smin)/255.0
self.smax += sdiff
self.smin += sdiff
self.update_scale()
elif event.key == 'B':
sdiff = self.accelerator*(self.smax - self.smin)/255.0
self.smax -= sdiff
self.smin -= sdiff
self.update_scale()
elif event.key == 'N':
sdiff = self.accelerator*(self.smax - self.smin)/255.0
self.smax -= sdiff
self.smin += sdiff
self.update_scale()
elif event.key == 'W':
sdiff = self.accelerator*(self.smax - self.smin)/255.0
self.smax += sdiff
self.smin -= sdiff
self.update_scale()
elif event.key == 'U':
print("Current stretch limits: %10.4g, %10.4g"%(self.smin, self.smax))
self.smin = float(input("New lower value?"))
self.smax = float(input("New upper value?"))
self.update_scale()
def find_map(obsid, band, mapdir, template="{}{}_map.fits.zip"):
""" Walk the map directory and return the map data and marker size
Parameters:
-----------
obsid (int): observation id (10-digit integer)
band (string) : blue, green, red, PSW, PMW or PLW
mapdir (string) : top-level of map directory
template (string) : how to format obsid and filter into a map name
Returns:
--------
img_data : numpy array of image data
filter : 'blue', 'green', 'red' for PACS, 'PSW', 'PMW', 'PSW' for SPIRE
mrkr_size : size of markers in pixels
wcs : astropy.wcs object for the image
"""
fname = template.format(obsid, band)
fullname = fname
for root, dir, files in os.walk(os.path.expanduser(mapdir), followlinks=True):
for name in files:
if name.endswith(fname):
fullname = os.path.join(root, fname)
break
elif name.endswith(fname.replace('map','pmd')):
fullname = os.path.join(root, fname.replace('map','pmd'))
break
elif name.endswith(fname.replace('L25','L3')):
fullname = os.path.join(root, fname.replace('L25','L3'))
break
elif name.endswith(fname.replace('L25','L2').replace('JSMAP','PMAP')):
fullname = os.path.join(root, fname.replace('L25','L2').replace('JSMAP','PMAP'))
break
# Get the data
hdu = fits.open(fullname)
img_data = hdu[1].data
filter = band
if (band == 'B'):
if (hdu[0].header['WAVELNTH'] == 100.0):
filter = 'green'
else:
filter = 'blue'
elif (band == 'R'):
filter = 'red'
# Handle illegal CUNITn in PACS SPG12 and earlier maps
for key in ['CUNIT1', 'CUNIT2']:
if key in hdu[1].header.keys():
del hdu[1].header[key]
img_wcs = WCS(hdu[1].header)
deg_per_pix = np.sqrt(np.abs(np.linalg.det(img_wcs.pixel_scale_matrix)))
beams = {'blue':5.5, 'green':7.0, 'red':11.5, 'PSW':17.0, 'PMW':32.0, 'PLW':42.0}
beam_size = beams[filter]/3600.
mrkr_size = beam_size/deg_per_pix
return(img_data, filter, mrkr_size, img_wcs)
def sourcelist_pscdb(obsid, filter, sql_statement, dbname, username,
hostname, port=5432):
""" Return dataframe from source table
Parameters:
-----------
obsid (int): observation id (10-digit integer)
filter (string) : red, green, blue, PSW, PMW or PLW
sql_statement (string) : Query to database
dbname (string) : database name
username (string) : user name
hostname (string) : host name
port (int) : port for connecting to server, defaults to 5432
Returns:
--------
sources : Pandas dataframe of the sources
"""
import psycopg2 as pg
import pandas.io.sql as psql
with pg.connect("dbname={} user={} host={} port={}".format(dbname, username,
hostname, port)) as connection:
sources = psql.read_sql(sql_statement.format(obsid,filter), connection)
return(sources)
def display_sources(sources, img_data, mrkr_size, wcs, cmap='grays',
susscolor="blue", tmlcolor="green", tm2color="orange",
titlestring="SPIRE PSC"):
"""
display sources overlaid on image
Parameters:
-----------
sources : dataframe including ra and dec values
img_data : numpy array of the image
mrkr_size : diameter of the markers in pixel units
wcs : astropy.wcs wcs object for the image (to convert ra,dec to pixels)
cmap : vispy color map, defaults to 'grays'. See vispy.colors.get_colormaps()
Returns:
--------
None
"""
nsrc = len(sources)
pos = np.empty( shape=(0, 0) )
if (nsrc > 0):
if (is_pacs == True):
sworld = np.vstack([sources['susra'].values.astype(np.float64),
sources['susdec'].values.astype(np.float64)]).T
else:
sworld = np.vstack([sources['ra'].values,sources['dec'].values]).T
pos = wcs.wcs_world2pix(sworld,0) + 0.5
else:
print("No sources found")
sys.exit(-1);
keydict = dict(escape='close', p=lambda x: max(0,i-1),
n=lambda x: min(nsrc,i+1))
#canvas = scene.SceneCanvas(keys=keydict)
canvas = scene.SceneCanvas(keys='interactive')
canvas.size = img_data.shape
canvas.title = titlestring
canvas.show()
# Set viewbox to display the image with interactive pan/zoom
view = canvas.central_widget.add_view()
# Create the image
#image = scene.visuals.Image(bytescale(img_data, cmin=0.8*np.nanmin(img_data),
# cmax=1.05*np.nanmax(img_data)), parent=view.scene)
# note that vispy.color.get_colormaps() returns all the ColorMaps
image = scene.visuals.Image(bytescale(img_data, cmin=0.9*np.nanmin(img_data),
cmax=1.02*np.nanmax(img_data)),
#clim=(0.8*np.nanmin(img_data), 1.05*np.nanmax(img_data)),
cmap=cmap,
parent=view.scene)
# Set 2D camera (the camera will scale to the contents in the scene)
view.camera = SourceInspectCamera(image,img_data,sources,pos,index=0,aspect=1)
view.camera.set_range()
# Add the markers
if ((nsrc > 0) and (susscolor != None)):
p1 = scene.visuals.Markers(parent=view.scene)
p1.set_data(pos,
face_color=None, edge_color=susscolor, scaling=True,
edge_width=2.0, size=mrkr_size)
if ((nsrc > 0) and (tmlcolor != None)):
tmlworld = np.vstack([sources['ratml'].values,sources['dectml'].values]).T
postml = wcs.wcs_world2pix(tmlworld,0) + 0.5
p2 = scene.visuals.Markers(parent=view.scene)
p2.set_data(postml,
face_color=None, edge_color=tmlcolor, scaling=True,
edge_width=1.5, size=mrkr_size)
if ((nsrc > 0) and (tm2color != None)):
tm2world = np.vstack([sources['ratm2'].values,sources['dectm2'].values]).T
postm2 = wcs.wcs_world2pix(tm2world,0) + 0.5
p3 = scene.visuals.Markers(parent=view.scene)
p3.set_data(postm2,
face_color=None, edge_color=tm2color, scaling=True,
edge_width=1.5, size=mrkr_size)
app.run()
return
if __name__ == '__main__' and sys.flags.interactive == 0:
if (sys.argv[0].endswith('ppscinspector')):
is_pacs = True
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("obsid", help="observation id", type=int)
if (is_pacs):
parser.add_argument("band", help="PACS band, must be B or R")
else:
parser.add_argument("band", help="SPIRE band, must be PSW, PMW or PMW")
parser.add_argument("mapdir", help="top-level map directory")
parser.add_argument("--cmap",
help="""
color map, (grays|fire|ice|hot|spring|summer|
autumn|winter|blues|cool|hsl|husl|diverging|cubehelixcolormap)
""", nargs='?', default="grays")
if (is_pacs):
sql_statement = """
select sourceid, obsid, band, susra,susdec,daora,daodec,susflux
from source13
where obsid={} and band='{}'
order by sourceid asc
"""
dbname="pacs"
username="gaborm"
hostname="localhost"
port=5562
else:
sql_statement = """
select sourceid, obsid, arrayname, x, y,
ra, dec, flux, background, quality,
ratml, dectml, fluxtml, backgroundparm1tml,
ratm2, dectm2, fluxtm2, qualitydao
from source
where obsid={} and arrayname='{}'
order by sourceid asc
"""
dbname="spire"
username="spire"
hostname="psc.ipac.caltech.edu"
port=5432
parser.add_argument("-S", "--sql_statement", help="SQL statement, default=\"{}\"".format(sql_statement),
nargs='?', default=sql_statement)
parser.add_argument("-D", "--dbname", help="database name, default={}".format(dbname),
nargs='?', default=dbname)
parser.add_argument("-U", "--username", help="database username, default={}".format(username),
nargs='?', default=username)
parser.add_argument("-H", "--hostname", help="database hostname, default={}".format(hostname),
nargs='?', default=hostname)
parser.add_argument("-P", "--port", help="database port, default {}".format(port),
nargs='?', default=port)
args = parser.parse_args()
obsid = args.obsid
band = args.band
mapdir = args.mapdir
if (is_pacs):
img_data, filter, mrkr_size, wcs = find_map(obsid, band, mapdir,
template="{}_PACS_L25_HPPJSMAP{}_SPGv13.0.0.fits.gz")
else:
img_data, filter, mrkr_size, wcs = find_map(obsid, band, mapdir)
print('loading sources from database for {} {}...'.format(obsid,band), end='')
sources = sourcelist_pscdb(obsid, filter, sql_statement=args.sql_statement,
dbname=args.dbname,
username=args.username, hostname=args.hostname,
port=args.port)
print('done.')
if (is_pacs == True):
titlestring = "PPSC: {} {}".format(obsid, filter)
display_sources(sources, img_data, mrkr_size, wcs, titlestring=titlestring,
tmlcolor=None, tm2color=None, cmap=args.cmap)
else:
titlestring = "SPSC: {} {}".format(obsid, filter)
display_sources(sources, img_data, mrkr_size, wcs, titlestring=titlestring,
cmap=args.cmap)
|
stargaser/spscviz
|
spscinspector.py
|
Python
|
bsd-3-clause
| 15,570 | 0.008157 |
# --- BEGIN COPYRIGHT BLOCK ---
# Copyright (C) 2016 Red Hat, Inc.
# All rights reserved.
#
# License: GPL (version 3 or any later version).
# See LICENSE for details.
# --- END COPYRIGHT BLOCK ---
from lib389.plugins import Plugin, Plugins
import argparse
from lib389.cli_base import (
_generic_list,
_generic_get,
_generic_get_dn,
_generic_create,
_generic_delete,
_get_arg,
_get_args,
_get_attributes,
_warn,
)
SINGULAR = Plugin
MANY = Plugins
RDN = 'cn'
def plugin_list(inst, basedn, log, args):
_generic_list(inst, basedn, log.getChild('plugin_list'), MANY)
def plugin_get(inst, basedn, log, args):
rdn = _get_arg( args.selector, msg="Enter %s to retrieve" % RDN)
_generic_get(inst, basedn, log.getChild('plugin_get'), MANY, rdn)
def plugin_get_dn(inst, basedn, log, args):
dn = _get_arg( args.dn, msg="Enter dn to retrieve")
_generic_get_dn(inst, basedn, log.getChild('plugin_get_dn'), MANY, dn)
# Plugin enable
def plugin_enable(inst, basedn, log, args):
dn = _get_arg( args.dn, msg="Enter plugin dn to enable")
mc = MANY(inst, basedn)
o = mc.get(dn=dn)
o.enable()
o_str = o.display()
log.info('Enabled %s', o_str)
# Plugin disable
def plugin_disable(inst, basedn, log, args, warn=True):
dn = _get_arg( args.dn, msg="Enter plugin dn to disable")
if warn:
_warn(dn, msg="Disabling %s %s" % (SINGULAR.__name__, dn))
mc = MANY(inst, basedn)
o = mc.get(dn=dn)
o.disable()
o_str = o.display()
log.info('Disabled %s', o_str)
# Plugin configure?
def plugin_configure(inst, basedn, log, args):
pass
def generic_show(inst, basedn, log, args):
"""Display plugin configuration."""
plugin = args.plugin_cls(inst)
log.info(plugin.display())
def generic_enable(inst, basedn, log, args):
plugin = args.plugin_cls(inst)
plugin.enable()
log.info("Enabled %s", plugin.rdn)
def generic_disable(inst, basedn, log, args):
plugin = args.plugin_cls(inst)
plugin.disable()
log.info("Disabled %s", plugin.rdn)
def generic_status(inst, basedn, log, args):
plugin = args.plugin_cls(inst)
if plugin.status() == True:
log.info("%s is enabled", plugin.rdn)
else:
log.info("%s is disabled", plugin.rdn)
def add_generic_plugin_parsers(subparser, plugin_cls):
show_parser = subparser.add_parser('show', help='display plugin configuration')
show_parser.set_defaults(func=generic_show, plugin_cls=plugin_cls)
enable_parser = subparser.add_parser('enable', help='enable plugin')
enable_parser.set_defaults(func=generic_enable, plugin_cls=plugin_cls)
disable_parser = subparser.add_parser('disable', help='disable plugin')
disable_parser.set_defaults(func=generic_disable, plugin_cls=plugin_cls)
status_parser = subparser.add_parser('status', help='display plugin status')
status_parser.set_defaults(func=generic_status, plugin_cls=plugin_cls)
def create_parser(subparsers):
plugin_parser = subparsers.add_parser('plugin', help="Manage plugins available on the server")
subcommands = plugin_parser.add_subparsers(help="action")
list_parser = subcommands.add_parser('list', help="List current configured (enabled and disabled) plugins")
list_parser.set_defaults(func=plugin_list)
get_parser = subcommands.add_parser('get', help='get')
get_parser.set_defaults(func=plugin_get)
get_parser.add_argument('selector', nargs='?', help='The plugin to search for')
get_dn_parser = subcommands.add_parser('get_dn', help='get_dn')
get_dn_parser.set_defaults(func=plugin_get_dn)
get_dn_parser.add_argument('dn', nargs='?', help='The plugin dn to get')
enable_parser = subcommands.add_parser('enable', help='enable a plugin in the server')
enable_parser.set_defaults(func=plugin_enable)
enable_parser.add_argument('dn', nargs='?', help='The dn to enable')
disable_parser = subcommands.add_parser('disable', help='disable the plugin configuration')
disable_parser.set_defaults(func=plugin_disable)
disable_parser.add_argument('dn', nargs='?', help='The dn to disable')
|
Ilias95/lib389
|
lib389/cli_conf/plugin.py
|
Python
|
gpl-3.0
| 4,136 | 0.005561 |
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import logging
import superdesk
from flask import current_app as app
from settings import DAYS_TO_KEEP
from datetime import timedelta
from werkzeug.exceptions import HTTPException
from superdesk.notification import push_notification
from superdesk.io import providers
from superdesk.celery_app import celery
from superdesk.utc import utcnow
from superdesk.workflow import set_default_state
from superdesk.errors import ProviderError
from superdesk.stats import stats
from superdesk.upload import url_for_media
from superdesk.media.media_operations import download_file_from_url, process_file
from superdesk.media.renditions import generate_renditions
UPDATE_SCHEDULE_DEFAULT = {'minutes': 5}
LAST_UPDATED = 'last_updated'
STATE_INGESTED = 'ingested'
logger = logging.getLogger(__name__)
superdesk.workflow_state(STATE_INGESTED)
superdesk.workflow_action(
name='ingest'
)
def is_valid_type(provider, provider_type_filter=None):
"""Test if given provider has valid type and should be updated.
:param provider: provider to be updated
:param provider_type_filter: active provider type filter
"""
provider_type = provider.get('type')
if provider_type not in providers:
return False
if provider_type_filter and provider_type != provider_type_filter:
return False
return True
def is_scheduled(provider):
"""Test if given provider should be scheduled for update.
:param provider: ingest provider
"""
now = utcnow()
last_updated = provider.get(LAST_UPDATED, now - timedelta(days=100)) # if never updated run now
update_schedule = provider.get('update_schedule', UPDATE_SCHEDULE_DEFAULT)
return last_updated + timedelta(**update_schedule) < now
def is_closed(provider):
"""Test if provider is closed.
:param provider: ingest provider
"""
return provider.get('is_closed', False)
def filter_expired_items(provider, items):
try:
days_to_keep_content = provider.get('days_to_keep', DAYS_TO_KEEP)
expiration_date = utcnow() - timedelta(days=days_to_keep_content)
return [item for item in items if item.get('versioncreated', utcnow()) > expiration_date]
except Exception as ex:
raise ProviderError.providerFilterExpiredContentError(ex, provider)
def get_provider_rule_set(provider):
if provider.get('rule_set'):
return superdesk.get_resource_service('rule_sets').find_one(_id=provider['rule_set'], req=None)
def get_task_ttl(provider):
update_schedule = provider.get('update_schedule', UPDATE_SCHEDULE_DEFAULT)
return update_schedule.get('minutes', 0) * 60 + update_schedule.get('hours', 0) * 3600
def get_task_id(provider):
return 'update-ingest-{0}-{1}'.format(provider.get('name'), provider.get('_id'))
class UpdateIngest(superdesk.Command):
"""Update ingest providers."""
option_list = (
superdesk.Option('--provider', '-p', dest='provider_type'),
)
def run(self, provider_type=None):
for provider in superdesk.get_resource_service('ingest_providers').get(req=None, lookup={}):
if is_valid_type(provider, provider_type) and is_scheduled(provider) and not is_closed(provider):
kwargs = {
'provider': provider,
'rule_set': get_provider_rule_set(provider)
}
update_provider.apply_async(
task_id=get_task_id(provider),
expires=get_task_ttl(provider),
kwargs=kwargs)
@celery.task
def update_provider(provider, rule_set=None):
"""
Fetches items from ingest provider as per the configuration, ingests them into Superdesk and
updates the provider.
"""
superdesk.get_resource_service('ingest_providers').update(provider['_id'], {
LAST_UPDATED: utcnow(),
# Providing the _etag as system updates to the documents shouldn't override _etag.
app.config['ETAG']: provider.get(app.config['ETAG'])
})
for items in providers[provider.get('type')].update(provider):
ingest_items(items, provider, rule_set)
stats.incr('ingest.ingested_items', len(items))
logger.info('Provider {0} updated'.format(provider['_id']))
push_notification('ingest:update')
def process_anpa_category(item, provider):
try:
anpa_categories = superdesk.get_resource_service('vocabularies').find_one(req=None, _id='categories')
if anpa_categories:
for anpa_category in anpa_categories['items']:
if anpa_category['is_active'] is True \
and item['anpa-category']['qcode'].lower() == anpa_category['value'].lower():
item['anpa-category'] = {'qcode': item['anpa-category']['qcode'], 'name': anpa_category['name']}
break
except Exception as ex:
raise ProviderError.anpaError(ex, provider)
def apply_rule_set(item, provider, rule_set=None):
"""
Applies rules set on the item to be ingested into the system. If there's no rule set then the item will
be returned without any change.
:param item: Item to be ingested
:param provider: provider object from whom the item was received
:return: item
"""
try:
if rule_set is None and provider.get('rule_set') is not None:
rule_set = superdesk.get_resource_service('rule_sets').find_one(_id=provider['rule_set'], req=None)
if rule_set and 'body_html' in item:
body = item['body_html']
for rule in rule_set['rules']:
body = body.replace(rule['old'], rule['new'])
item['body_html'] = body
return item
except Exception as ex:
raise ProviderError.ruleError(ex, provider)
def ingest_items(items, provider, rule_set=None):
all_items = filter_expired_items(provider, items)
items_dict = {doc['guid']: doc for doc in all_items}
for item in [doc for doc in all_items if doc.get('type') != 'composite']:
ingest_item(item, provider, rule_set)
for item in [doc for doc in all_items if doc.get('type') == 'composite']:
for ref in [ref for group in item.get('groups', [])
for ref in group.get('refs', []) if 'residRef' in ref]:
ref.setdefault('location', 'ingest')
itemRendition = items_dict.get(ref['residRef'], {}).get('renditions')
if itemRendition:
ref.setdefault('renditions', itemRendition)
ingest_item(item, provider, rule_set)
def ingest_item(item, provider, rule_set=None):
try:
item.setdefault('_id', item['guid'])
providers[provider.get('type')].provider = provider
item['ingest_provider'] = str(provider['_id'])
item.setdefault('source', provider.get('source', ''))
set_default_state(item, STATE_INGESTED)
if 'anpa-category' in item:
process_anpa_category(item, provider)
apply_rule_set(item, provider, rule_set)
ingest_service = superdesk.get_resource_service('ingest')
if item.get('ingest_provider_sequence') is None:
ingest_service.set_ingest_provider_sequence(item, provider)
rend = item.get('renditions', {})
if rend:
baseImageRend = rend.get('baseImage') or next(iter(rend.values()))
if baseImageRend:
href = providers[provider.get('type')].prepare_href(baseImageRend['href'])
update_renditions(item, href)
old_item = ingest_service.find_one(_id=item['guid'], req=None)
if old_item:
ingest_service.put(item['guid'], item)
else:
try:
ingest_service.post([item])
except HTTPException as e:
logger.error("Exception while persisting item in ingest collection", e)
ingest_service.put(item['guid'], item)
except ProviderError:
raise
except Exception as ex:
raise ProviderError.ingestError(ex, provider)
def update_renditions(item, href):
inserted = []
try:
content, filename, content_type = download_file_from_url(href)
file_type, ext = content_type.split('/')
metadata = process_file(content, file_type)
file_guid = app.media.put(content, filename, content_type, metadata)
inserted.append(file_guid)
rendition_spec = app.config.get('RENDITIONS', {}).get('picture', {})
renditions = generate_renditions(content, file_guid, inserted, file_type,
content_type, rendition_spec, url_for_media)
item['renditions'] = renditions
item['mimetype'] = content_type
item['filemeta'] = metadata
except Exception as io:
logger.exception(io)
for file_id in inserted:
app.media.delete(file_id)
raise
superdesk.command('ingest:update', UpdateIngest())
|
petrjasek/superdesk-server
|
superdesk/io/commands/update_ingest.py
|
Python
|
agpl-3.0
| 9,268 | 0.002266 |
"""
Contains a function to generate and upload a LaTeX-rendered math image.
"""
import subprocess
import sys
import typing
def uploadLatex(math: typing.List[str], slackAPI: object, channel: object, users: list) -> str:
"""
Generates a LaTeX math image from the LaTeX source contained in `math`, and posts it to the
api `slackapi` in channel `channel`.
Returns a string describing any errors that occurred.
"""
toParse = "".join(math).replace("&","&")
# create a temporary directory
response = subprocess.run(["mktemp", "-d"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# check for errors
if response.returncode != 0 or response.stderr.decode() != '':
return "EE: latex: couldn't make temp. dir: '"+response.stderr.decode()+"'"
# Decode and store the temporary directory name
latexdir = response.stdout.decode().splitlines()[0]
# Generate the image using l2p
response = subprocess.run(["l2p", "-i", toParse, "-o", latexdir+"/latex_output.png"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
# Check for errors, both posting to the channel (because it's probable that a user messed up)
# as well as logging to the logfile
if response.stderr.decode() != '':
msg = "Unable to parse expression: %s: %s"
slackAPI.chat.post_message(channel['name'],
msg % ("`%s` because" % toParse, "`%s`" % response.stderr.decode()))
return "EE: latex: " + msg % ("'%s'" % toParse, "'%s'" % response.stderr.decode())
# If all went well, upload then delete the file
slackAPI.files.upload(latexdir+"/latex_output.png", channels=channel['id'])
retstr = "II: latex: uploaded image to slack (input: '%s')" % toParse
response = subprocess.run(["rm", "-r", "-f", latexdir], stderr=subprocess.PIPE)
if response.returncode != 0 or response.stderr.decode() != "":
return retstr+"\nEE: latex: error encountered during cleanup: '%s'" % response.stderr.decode()
return retstr
|
ocket8888/slackbot
|
slackbot/modules/math/math.py
|
Python
|
gpl-3.0
| 1,975 | 0.022785 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('ngw', '0009_config_eventdefaultperms'),
]
operations = [
migrations.DeleteModel(
name='ChoiceContactField',
),
migrations.DeleteModel(
name='DateContactField',
),
migrations.DeleteModel(
name='DateTimeContactField',
),
migrations.DeleteModel(
name='EmailContactField',
),
migrations.DeleteModel(
name='FileContactField',
),
migrations.DeleteModel(
name='ImageContactField',
),
migrations.DeleteModel(
name='LongTextContactField',
),
migrations.DeleteModel(
name='MultipleChoiceContactField',
),
migrations.DeleteModel(
name='MultipleDoubleChoiceContactField',
),
migrations.DeleteModel(
name='NumberContactField',
),
migrations.DeleteModel(
name='PasswordContactField',
),
migrations.DeleteModel(
name='PhoneContactField',
),
migrations.DeleteModel(
name='RibContactField',
),
migrations.DeleteModel(
name='TextContactField',
),
]
|
nirgal/ngw
|
core/migrations/0010_auto_drop_proxy_models.py
|
Python
|
bsd-2-clause
| 1,407 | 0 |
from temboo.Library.Google.Drive.Changes.Get import Get, GetInputSet, GetResultSet, GetChoreographyExecution
from temboo.Library.Google.Drive.Changes.List import List, ListInputSet, ListResultSet, ListChoreographyExecution
|
jordanemedlock/psychtruths
|
temboo/core/Library/Google/Drive/Changes/__init__.py
|
Python
|
apache-2.0
| 223 | 0.008969 |
# -*- coding: utf-8 -*-
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.apps import AppConfig
from django.apps import apps
from django.db.models import signals
def connect_issues_signals():
from taiga.projects.tagging import signals as tagging_handlers
from . import signals as handlers
# Finished date
signals.pre_save.connect(handlers.set_finished_date_when_edit_issue,
sender=apps.get_model("issues", "Issue"),
dispatch_uid="set_finished_date_when_edit_issue")
# Tags
signals.pre_save.connect(tagging_handlers.tags_normalization,
sender=apps.get_model("issues", "Issue"),
dispatch_uid="tags_normalization_issue")
def connect_issues_custom_attributes_signals():
from taiga.projects.custom_attributes import signals as custom_attributes_handlers
signals.post_save.connect(custom_attributes_handlers.create_custom_attribute_value_when_create_issue,
sender=apps.get_model("issues", "Issue"),
dispatch_uid="create_custom_attribute_value_when_create_issue")
def connect_all_issues_signals():
connect_issues_signals()
connect_issues_custom_attributes_signals()
def disconnect_issues_signals():
signals.pre_save.disconnect(sender=apps.get_model("issues", "Issue"),
dispatch_uid="set_finished_date_when_edit_issue")
signals.pre_save.disconnect(sender=apps.get_model("issues", "Issue"),
dispatch_uid="tags_normalization_issue")
def disconnect_issues_custom_attributes_signals():
signals.post_save.disconnect(sender=apps.get_model("issues", "Issue"),
dispatch_uid="create_custom_attribute_value_when_create_issue")
def disconnect_all_issues_signals():
disconnect_issues_signals()
disconnect_issues_custom_attributes_signals()
class IssuesAppConfig(AppConfig):
name = "taiga.projects.issues"
verbose_name = "Issues"
def ready(self):
connect_all_issues_signals()
|
xdevelsistemas/taiga-back-community
|
taiga/projects/issues/apps.py
|
Python
|
agpl-3.0
| 3,000 | 0.001668 |
from biokbase.workspace.client import Workspace
import requests
import json
import sys
from time import time
from fix_workspace_info import fix_all_workspace_info
from pprint import pprint
kb_port = 9999
mini_ws_url = f"http://localhost:{kb_port}/services/ws"
mini_auth_url = f"http://localhost:{kb_port}/services/auth/testmode"
mini_ws_admin = "wsadmin"
narrative_spec_file = '../../../narrative_object.spec'
old_narrative_spec_file = 'old_narrative_object.spec'
test_narrative_data = 'narrative_test_data.json'
test_user = "kbasetest"
####
# BEFORE YOU RUN THIS:
# 1. Spin up mini_kb with the workspace env pointed to my branch:
# that is, the "-env" line in the ws command points to
# "https://raw.githubusercontent.com/briehl/mini_kb/master/deployment/conf/workspace-minikb.ini"
#
# 2. When this starts up, the workspace will complain. Auth is in testmode, and there's no test user/token set up
# for the Shock configuration. Do the following:
# a. enter the mongo container
# > docker exec -it mini_kb_ci-mongo_1 /bin/bash
# b. start mongo (just "mongo" at the prompt)
# c. Run the following to use gridFS:
# > use workspace
# > db.settings.findAndModify({ query: {backend: "shock"}, update: { $set: {"backend": "gridFS"} } })
# d. Exit that container, and restart the workspace container
# > docker-compose restart ws
#
# With the setup done, this script should do the job of creating accounts, importing the Narrative type,
# loading test data, etc.
def create_user(user_id):
"""
Returns a token for that user.
"""
headers = {
"Content-Type": "application/json"
}
r = requests.post(mini_auth_url + '/api/V2/testmodeonly/user', headers=headers, data=json.dumps({'user': user_id, 'display': "User {}".format(user_id)}))
if r.status_code != 200 and r.status_code != 400:
print("Can't create dummy user!")
r.raise_for_status()
r = requests.post(mini_auth_url + '/api/V2/testmodeonly/token', headers=headers, data=json.dumps({'user': user_id, 'type': 'Login'}))
if r.status_code != 200:
print("Can't make dummy token!")
r.raise_for_status()
token = json.loads(r.text)
return token['token']
def load_narrative_type(ws):
"""
Loads the KBaseNarrative.Narrative type info into mini kb.
ws = Workspace client configured for admin
"""
ws.request_module_ownership("KBaseNarrative")
ws.administer({
'command': 'approveModRequest',
'module': 'KBaseNarrative'
})
with open(old_narrative_spec_file, "r") as f:
old_spec = f.read()
ws.register_typespec({
'spec': old_spec,
'dryrun': 0,
'new_types': [
'Narrative',
'Cell',
'Worksheet',
'Metadata'
]
})
ws.release_module('KBaseNarrative')
for n in ws.get_module_info({'mod': 'KBaseNarrative'})['types'].keys():
if '.Narrative' in n:
old_ver = n.split('-')[-1]
with open(narrative_spec_file, "r") as f:
spec = f.read()
ws.register_typespec({
'spec': spec,
'dryrun': 0,
'new_types': []
})
ws.release_module('KBaseNarrative')
for n in ws.get_module_info({'mod': 'KBaseNarrative'})['types'].keys():
if '.Narrative' in n:
new_ver = n.split('-')[-1]
return {
'old_ver': old_ver,
'new_ver': new_ver
}
def load_narrative_test_data(ws, vers):
"""
Loads the test data set into mini kb ws.
Returns this structure:
wsid: {
narrative_id: int
correct_ws_meta: {}
correct_ws_perms: {}
}
there's more than 1 wsid (should be ~7-10), but that's it.
"""
with open(test_narrative_data, 'r') as f:
test_data = json.loads(f.read().strip())
uploaded_data = list()
for ws_data in test_data["old"]:
uploaded_data.append(_load_workspace_data(ws, ws_data, len(uploaded_data), vers['old_ver']))
for ws_data in test_data["new"]:
uploaded_data.append(_load_workspace_data(ws, ws_data, len(uploaded_data), vers['new_ver']))
return uploaded_data
def _load_workspace_data(ws, ws_data, idx, narrative_ver):
"""
Loads up a single workspace with data and returns a dict about it.
Dict contains:
id = the workspace id
perms = the workspace permissions
correct_meta = the correct workspace metadata (for validation)
"""
print(ws_data.keys())
narratives = ws_data['narratives']
ws_meta = ws_data['ws_meta']
ws_info = ws.create_workspace({"workspace": "NarrativeWS-{}-{}".format(idx, int(time()*1000))})
ws_id = ws_info[0]
info = {
"ws_id": ws_id,
"ws_info": ws_info,
"nar_info": [],
"perms": ws_data["perms"],
"correct_meta": ws_data["correct_meta"],
"loaded_meta": ws_meta
}
if len(narratives):
for idx, nar in enumerate(narratives):
objects = ws.save_objects({
'id': ws_id,
'objects': [{
'type': 'KBaseNarrative.Narrative-{}'.format(narrative_ver),
'data': nar,
'name': 'Narrative-{}'.format(idx)
}]
})
info['nar_info'].append(objects[0])
if len(ws_meta):
ws.alter_workspace_metadata({
'wsi': {'id': ws_id},
'new': ws_meta
})
perms = ws_data["perms"]
if len(perms) > 1:
admin_perm = perms['wsadmin']
ws.set_permissions({
'id': ws_id,
'new_permission': admin_perm,
'users': ['wsadmin']
})
return info
def main():
admin_token = create_user(mini_ws_admin)
admin_ws = Workspace(url=mini_ws_url, token=admin_token)
versions = load_narrative_type(admin_ws)
versions = {
'old_ver': '1.0',
'new_ver': '2.0'
}
user_token = create_user(test_user)
user_ws = Workspace(url=mini_ws_url, token=user_token)
loaded_info = load_narrative_test_data(user_ws, versions)
pprint(loaded_info)
# fix_all_workspace_info(mini_ws_url, mini_auth_url, admin_token, 100)
# for ws_data in loaded_info:
# ws_id = ws_data['ws_id']
# ws_meta = user_ws.get_workspace_info({'id': ws_id})[8]
# try:
# assert(ws_meta == ws_data['correct_meta'])
# except:
# print("WS: {}".format(ws_id))
# pprint(ws_meta)
# print("doesn't match")
# pprint(ws_data['correct_meta'])
if __name__ == '__main__':
sys.exit(main())
|
pranjan77/narrative
|
src/scripts/test_data_uploader/populate_mini_ws.py
|
Python
|
mit
| 6,648 | 0.002106 |
from pythonforandroid.toolchain import Bootstrap, current_directory, info, info_main, shprint
from pythonforandroid.util import ensure_dir
from os.path import join
import sh
class WebViewBootstrap(Bootstrap):
name = 'webview'
recipe_depends = list(
set(Bootstrap.recipe_depends).union({'genericndkbuild'})
)
def assemble_distribution(self):
info_main('# Creating Android project from build and {} bootstrap'.format(
self.name))
shprint(sh.rm, '-rf', self.dist_dir)
shprint(sh.cp, '-r', self.build_dir, self.dist_dir)
with current_directory(self.dist_dir):
with open('local.properties', 'w') as fileh:
fileh.write('sdk.dir={}'.format(self.ctx.sdk_dir))
arch = self.ctx.archs[0]
if len(self.ctx.archs) > 1:
raise ValueError('built for more than one arch, but bootstrap cannot handle that yet')
info('Bootstrap running with arch {}'.format(arch))
with current_directory(self.dist_dir):
info('Copying python distribution')
self.distribute_libs(arch, [self.ctx.get_libs_dir(arch.arch)])
self.distribute_aars(arch)
self.distribute_javaclasses(self.ctx.javaclass_dir,
dest_dir=join("src", "main", "java"))
python_bundle_dir = join('_python_bundle', '_python_bundle')
ensure_dir(python_bundle_dir)
site_packages_dir = self.ctx.python_recipe.create_python_bundle(
join(self.dist_dir, python_bundle_dir), arch)
if 'sqlite3' not in self.ctx.recipe_build_order:
with open('blacklist.txt', 'a') as fileh:
fileh.write('\nsqlite3/*\nlib-dynload/_sqlite3.so\n')
if not self.ctx.with_debug_symbols:
self.strip_libraries(arch)
self.fry_eggs(site_packages_dir)
super().assemble_distribution()
bootstrap = WebViewBootstrap()
|
germn/python-for-android
|
pythonforandroid/bootstraps/webview/__init__.py
|
Python
|
mit
| 1,982 | 0.001514 |
from uber.tests import *
@pytest.fixture
def attendee_id():
with Session() as session:
return session.query(Attendee).filter_by(first_name='Regular', last_name='Attendee').one().id
@pytest.fixture(autouse=True)
def mock_apply(monkeypatch):
monkeypatch.setattr(Attendee, 'apply', Mock())
return Attendee.apply
def test_invalid_gets():
with Session() as session:
pytest.raises(Exception, session.attendee)
pytest.raises(Exception, session.attendee, '')
pytest.raises(Exception, session.attendee, [])
pytest.raises(Exception, session.attendee, None)
pytest.raises(Exception, session.attendee, str(uuid4()))
pytest.raises(Exception, session.attendee, {'id': str(uuid4())})
def test_basic_get(attendee_id, mock_apply):
with Session() as session:
assert session.attendee(attendee_id).first_name == 'Regular'
assert not mock_apply.called
assert session.attendee(id=attendee_id).first_name == 'Regular'
assert not mock_apply.called
assert session.attendee({'id': attendee_id}).first_name == 'Regular'
assert mock_apply.called
def test_empty_get(mock_apply):
with Session() as session:
assert session.attendee({}).paid == NOT_PAID # basic sanity check
assert mock_apply.called
def test_ignore_csrf(request):
with Session() as session:
pytest.raises(Exception, session.attendee, {'paid': NEED_NOT_PAY})
session.attendee({'paid': NEED_NOT_PAY}, ignore_csrf=True)
session.attendee({'paid': NEED_NOT_PAY}, allowed=['paid'])
request.addfinalizer(lambda: setattr(cherrypy.request, 'method', 'GET'))
cherrypy.request.method = 'POST'
session.attendee({'paid': NEED_NOT_PAY})
|
Anthrocon-Reg/ubersystem
|
uber/tests/models/test_getter.py
|
Python
|
gpl-3.0
| 1,761 | 0.004543 |
#-- GAUDI jobOptions generated on Fri Jul 17 16:39:48 2015
#-- Contains event types :
#-- 11104124 - 106 files - 1087377 events - 233.68 GBytes
#-- Extra information about the data processing phases:
#-- Processing Pass Step-124620
#-- StepId : 124620
#-- StepName : Digi13 with G4 dE/dx
#-- ApplicationName : Boole
#-- ApplicationVersion : v26r3
#-- OptionFiles : $APPCONFIGOPTS/Boole/Default.py;$APPCONFIGOPTS/Boole/DataType-2012.py;$APPCONFIGOPTS/Boole/Boole-SiG4EnergyDeposit.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r164
#-- Visible : Y
#-- Processing Pass Step-124630
#-- StepId : 124630
#-- StepName : Stripping20-NoPrescalingFlagged for Sim08
#-- ApplicationName : DaVinci
#-- ApplicationVersion : v32r2p1
#-- OptionFiles : $APPCONFIGOPTS/DaVinci/DV-Stripping20-Stripping-MC-NoPrescaling.py;$APPCONFIGOPTS/DaVinci/DataType-2012.py;$APPCONFIGOPTS/DaVinci/InputType-DST.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r164
#-- Visible : Y
#-- Processing Pass Step-125877
#-- StepId : 125877
#-- StepName : L0 emulation - TCK 003d
#-- ApplicationName : Moore
#-- ApplicationVersion : v20r4
#-- OptionFiles : $APPCONFIGOPTS/L0App/L0AppSimProduction.py;$APPCONFIGOPTS/L0App/L0AppTCK-0x003d.py;$APPCONFIGOPTS/L0App/DataType-2012.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r200
#-- Visible : N
#-- Processing Pass Step-127200
#-- StepId : 127200
#-- StepName : TCK-0x4097003d Flagged for Sim08 2012
#-- ApplicationName : Moore
#-- ApplicationVersion : v14r2p1
#-- OptionFiles : $APPCONFIGOPTS/Moore/MooreSimProductionForSeparateL0AppStep.py;$APPCONFIGOPTS/Conditions/TCK-0x4097003d.py;$APPCONFIGOPTS/Moore/DataType-2012.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r206
#-- Visible : Y
#-- Processing Pass Step-124834
#-- StepId : 124834
#-- StepName : Reco14a for MC
#-- ApplicationName : Brunel
#-- ApplicationVersion : v43r2p7
#-- OptionFiles : $APPCONFIGOPTS/Brunel/DataType-2012.py;$APPCONFIGOPTS/Brunel/MC-WithTruth.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r164
#-- Visible : Y
#-- Processing Pass Step-127148
#-- StepId : 127148
#-- StepName : Sim08g - 2012 - MU - Pythia8
#-- ApplicationName : Gauss
#-- ApplicationVersion : v45r9
#-- OptionFiles : $APPCONFIGOPTS/Gauss/Sim08-Beam4000GeV-mu100-2012-nu2.5.py;$DECFILESROOT/options/@{eventType}.py;$LBPYTHIA8ROOT/options/Pythia8.py;$APPCONFIGOPTS/Gauss/G4PL_FTFP_BERT_EmNoCuts.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : dddb-20130929-1
#-- CONDDB : sim-20130522-1-vc-mu100
#-- ExtraPackages : AppConfig.v3r205;DecFiles.v27r37
#-- Visible : Y
from Gaudi.Configuration import *
from GaudiConf import IOHelper
IOHelper('ROOT').inputFiles(['LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000001_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000002_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000003_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000004_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000005_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000006_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000007_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000008_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000009_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000010_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000011_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000012_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000013_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000014_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000015_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000016_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000017_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000018_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000019_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000020_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000021_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000022_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000023_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000024_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000025_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000026_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000027_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000032_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000033_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000034_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000045_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000057_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000058_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000062_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000073_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000074_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000075_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000076_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000077_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000078_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000079_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000080_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000081_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000082_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000083_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000084_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000085_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000086_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000087_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000088_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000089_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000090_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000091_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000092_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000093_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000094_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000095_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000096_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000097_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000098_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000099_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000100_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000101_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000102_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000103_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000104_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000105_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000106_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000107_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000108_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000109_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000110_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000111_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000112_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000113_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000114_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000115_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000116_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000117_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000118_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000119_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000120_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000121_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000122_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000123_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000124_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000125_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000126_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000127_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000128_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000129_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000130_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000131_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000132_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000133_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000134_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000135_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000136_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000137_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000138_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000139_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000140_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000141_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000142_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000143_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000144_1.allstreams.dst'
], clear=True)
|
Williams224/davinci-scripts
|
ksteta3pi/Consideredbkg/MC_12_11104124_MagUp.py
|
Python
|
mit
| 12,177 | 0.02825 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Mission()
result.template = "object/mission/base/shared_base_mission.iff"
result.attribute_template_id = -1
result.stfName("string_id_table","")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
anhstudios/swganh
|
data/scripts/templates/object/mission/base/shared_base_mission.py
|
Python
|
mit
| 435 | 0.048276 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# vim: expandtab:tabstop=4:shiftwidth=4
''' modify_yaml ansible module '''
import yaml
DOCUMENTATION = '''
---
module: modify_yaml
short_description: Modify yaml key value pairs
author: Andrew Butcher
requirements: [ ]
'''
EXAMPLES = '''
- modify_yaml:
dest: /etc/origin/master/master-config.yaml
yaml_key: 'kubernetesMasterConfig.masterCount'
yaml_value: 2
'''
# pylint: disable=missing-docstring
def set_key(yaml_data, yaml_key, yaml_value):
changes = []
ptr = yaml_data
for key in yaml_key.split('.'):
if key not in ptr and key != yaml_key.split('.')[-1]:
ptr[key] = {}
ptr = ptr[key]
elif key == yaml_key.split('.')[-1]:
if (key in ptr and module.safe_eval(ptr[key]) != yaml_value) or (key not in ptr):
ptr[key] = yaml_value
changes.append((yaml_key, yaml_value))
else:
ptr = ptr[key]
return changes
def main():
''' Modify key (supplied in jinja2 dot notation) in yaml file, setting
the key to the desired value.
'''
# disabling pylint errors for global-variable-undefined and invalid-name
# for 'global module' usage, since it is required to use ansible_facts
# pylint: disable=global-variable-undefined, invalid-name,
# redefined-outer-name
global module
module = AnsibleModule(
argument_spec=dict(
dest=dict(required=True),
yaml_key=dict(required=True),
yaml_value=dict(required=True),
backup=dict(required=False, default=True, type='bool'),
),
supports_check_mode=True,
)
dest = module.params['dest']
yaml_key = module.params['yaml_key']
yaml_value = module.safe_eval(module.params['yaml_value'])
backup = module.params['backup']
# Represent null values as an empty string.
# pylint: disable=missing-docstring, unused-argument
def none_representer(dumper, data):
return yaml.ScalarNode(tag=u'tag:yaml.org,2002:null', value=u'')
yaml.add_representer(type(None), none_representer)
try:
yaml_file = open(dest)
yaml_data = yaml.safe_load(yaml_file.read())
yaml_file.close()
changes = set_key(yaml_data, yaml_key, yaml_value)
if len(changes) > 0:
if backup:
module.backup_local(dest)
yaml_file = open(dest, 'w')
yaml_string = yaml.dump(yaml_data, default_flow_style=False)
yaml_string = yaml_string.replace('\'\'', '""')
yaml_file.write(yaml_string)
yaml_file.close()
return module.exit_json(changed=(len(changes) > 0), changes=changes)
# ignore broad-except error to avoid stack trace to ansible user
# pylint: disable=broad-except
except Exception, e:
return module.fail_json(msg=str(e))
# ignore pylint errors related to the module_utils import
# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
rhdedgar/openshift-tools
|
openshift/installer/vendored/openshift-ansible-3.4.12-1/library/modify_yaml.py
|
Python
|
apache-2.0
| 3,141 | 0.000955 |
"""
desispec.fiberflat
==================
Utility functions to compute a fiber flat correction and apply it
We try to keep all the (fits) io separated.
"""
from __future__ import absolute_import, division
import numpy as np
from desispec.io import read_frame
from desispec.io import write_fiberflat
from desispec.fiberflat import compute_fiberflat
from desispec.log import get_logger
from desispec.io.qa import load_qa_frame
from desispec.io import write_qa_frame
from desispec.qa import qa_plots
import argparse
def parse(options=None):
parser = argparse.ArgumentParser(description="Compute the fiber flat field correction from a DESI continuum lamp frame")
parser.add_argument('--infile', type = str, default = None, required=True,
help = 'path of DESI frame fits file corresponding to a continuum lamp exposure')
parser.add_argument('--outfile', type = str, default = None, required=True,
help = 'path of DESI fiberflat fits file')
parser.add_argument('--qafile', type=str, default=None, required=False,
help='path of QA file')
parser.add_argument('--qafig', type = str, default = None, required=False,
help = 'path of QA figure file')
args = None
if options is None:
args = parser.parse_args()
else:
args = parser.parse_args(options)
return args
def main(args) :
log=get_logger()
log.info("starting")
# Process
frame = read_frame(args.infile)
fiberflat = compute_fiberflat(frame)
# QA
if (args.qafile is not None):
log.info("performing fiberflat QA")
# Load
qaframe = load_qa_frame(args.qafile, frame, flavor=frame.meta['FLAVOR'])
# Run
qaframe.run_qa('FIBERFLAT', (frame, fiberflat))
# Write
if args.qafile is not None:
write_qa_frame(args.qafile, qaframe)
log.info("successfully wrote {:s}".format(args.qafile))
# Figure(s)
if args.qafig is not None:
qa_plots.frame_fiberflat(args.qafig, qaframe, frame, fiberflat)
# Write
write_fiberflat(args.outfile, fiberflat, frame.meta)
log.info("successfully wrote %s"%args.outfile)
|
gdhungana/desispec
|
py/desispec/scripts/fiberflat.py
|
Python
|
bsd-3-clause
| 2,238 | 0.011171 |
class Solution(object):
def isSelfCrossing(self, x):
"""
:type x: List[int]
:rtype: bool
"""
inf = float('inf')
n = len(x)
if n < 3:
return False
ruld = [0, 0, 0, 0] # right, up, left, down
next_max = inf
current = [-x[1], x[0]]
for i, elem in enumerate(x[2:], 2):
i %= 4
if elem >= next_max:
return True
xy = 1 if i in {0, 2} else 0
pn = 1 if i in {0, 3} else -1
new = current[xy] + pn * elem
if pn * new > pn * ruld[i - 3]:
next_max = inf
else:
if next_max is inf and pn * new >= pn * ruld[i - 1]:
ruld[i - 2] = ruld[i]
next_max = abs(ruld[i - 2] - current[xy ^ 1])
ruld[i - 1], current[xy] = current[xy], new
return False
assert Solution().isSelfCrossing([2, 1, 1, 2])
assert not Solution().isSelfCrossing([1, 2, 3, 4])
assert Solution().isSelfCrossing([1, 1, 1, 1])
assert not Solution().isSelfCrossing([3,3,4,2,2])
assert Solution().isSelfCrossing([1,1,2,1,1])
assert not Solution().isSelfCrossing([3,3,3,2,1,1])
|
wufangjie/leetcode
|
335. Self Crossing.py
|
Python
|
gpl-3.0
| 1,217 | 0.012325 |
PROJECT_PATH = __path__[0]
TIEMPO_REGISTRY = {}
REDIS_GROUP_NAMESPACE = 'tiempogroup'
RECENT_KEY = 'tiempo:recent_tasks'
RESULT_PREFIX = 'tiempo:task_result'
__version__ = "1.2.3"
|
hangarunderground/tiempo
|
tiempo/__init__.py
|
Python
|
gpl-2.0
| 183 | 0 |
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generic presubmit checks that can be reused by other presubmit checks."""
import os as _os
_HERE = _os.path.dirname(_os.path.abspath(__file__))
### Description checks
def CheckChangeHasTestField(input_api, output_api):
"""Requires that the changelist have a TEST= field."""
if input_api.change.TEST:
return []
else:
return [output_api.PresubmitNotifyResult(
'If this change requires manual test instructions to QA team, add '
'TEST=[instructions].')]
def CheckChangeHasBugField(input_api, output_api):
"""Requires that the changelist have a BUG= field."""
if input_api.change.BUG:
return []
else:
return [output_api.PresubmitNotifyResult(
'If this change has an associated bug, add BUG=[bug number].')]
def CheckChangeHasTestedField(input_api, output_api):
"""Requires that the changelist have a TESTED= field."""
if input_api.change.TESTED:
return []
else:
return [output_api.PresubmitError('Changelist must have a TESTED= field.')]
def CheckChangeHasQaField(input_api, output_api):
"""Requires that the changelist have a QA= field."""
if input_api.change.QA:
return []
else:
return [output_api.PresubmitError('Changelist must have a QA= field.')]
def CheckDoNotSubmitInDescription(input_api, output_api):
"""Checks that the user didn't add 'DO NOT ''SUBMIT' to the CL description.
"""
keyword = 'DO NOT ''SUBMIT'
if keyword in input_api.change.DescriptionText():
return [output_api.PresubmitError(
keyword + ' is present in the changelist description.')]
else:
return []
def CheckChangeHasDescription(input_api, output_api):
"""Checks the CL description is not empty."""
text = input_api.change.DescriptionText()
if text.strip() == '':
if input_api.is_committing:
return [output_api.PresubmitError('Add a description to the CL.')]
else:
return [output_api.PresubmitNotifyResult('Add a description to the CL.')]
return []
def CheckChangeWasUploaded(input_api, output_api):
"""Checks that the issue was uploaded before committing."""
if input_api.is_committing and not input_api.change.issue:
return [output_api.PresubmitError(
'Issue wasn\'t uploaded. Please upload first.')]
return []
### Content checks
def CheckDoNotSubmitInFiles(input_api, output_api):
"""Checks that the user didn't add 'DO NOT ''SUBMIT' to any files."""
# We want to check every text file, not just source files.
file_filter = lambda x : x
keyword = 'DO NOT ''SUBMIT'
errors = _FindNewViolationsOfRule(lambda _, line : keyword not in line,
input_api, file_filter)
text = '\n'.join('Found %s in %s' % (keyword, loc) for loc in errors)
if text:
return [output_api.PresubmitError(text)]
return []
def CheckChangeLintsClean(input_api, output_api, source_file_filter=None):
"""Checks that all '.cc' and '.h' files pass cpplint.py."""
_RE_IS_TEST = input_api.re.compile(r'.*tests?.(cc|h)$')
result = []
cpplint = input_api.cpplint
# Access to a protected member _XX of a client class
# pylint: disable=W0212
cpplint._cpplint_state.ResetErrorCounts()
# Justifications for each filter:
#
# - build/include : Too many; fix in the future.
# - build/include_order : Not happening; #ifdefed includes.
# - build/namespace : I'm surprised by how often we violate this rule.
# - readability/casting : Mistakes a whole bunch of function pointer.
# - runtime/int : Can be fixed long term; volume of errors too high
# - runtime/virtual : Broken now, but can be fixed in the future?
# - whitespace/braces : We have a lot of explicit scoping in chrome code.
cpplint._SetFilters('-build/include,-build/include_order,-build/namespace,'
'-readability/casting,-runtime/int,-runtime/virtual,'
'-whitespace/braces')
# We currently are more strict with normal code than unit tests; 4 and 5 are
# the verbosity level that would normally be passed to cpplint.py through
# --verbose=#. Hopefully, in the future, we can be more verbose.
files = [f.AbsoluteLocalPath() for f in
input_api.AffectedSourceFiles(source_file_filter)]
for file_name in files:
if _RE_IS_TEST.match(file_name):
level = 5
else:
level = 4
cpplint.ProcessFile(file_name, level)
if cpplint._cpplint_state.error_count > 0:
if input_api.is_committing:
res_type = output_api.PresubmitError
else:
res_type = output_api.PresubmitPromptWarning
result = [res_type('Changelist failed cpplint.py check.')]
return result
def CheckChangeHasNoCR(input_api, output_api, source_file_filter=None):
"""Checks no '\r' (CR) character is in any source files."""
cr_files = []
for f in input_api.AffectedSourceFiles(source_file_filter):
if '\r' in input_api.ReadFile(f, 'rb'):
cr_files.append(f.LocalPath())
if cr_files:
return [output_api.PresubmitPromptWarning(
'Found a CR character in these files:', items=cr_files)]
return []
def CheckSvnModifiedDirectories(input_api, output_api, source_file_filter=None):
"""Checks for files in svn modified directories.
They will get submitted on accident because svn commits recursively by
default, and that's very dangerous.
"""
if input_api.change.scm != 'svn':
return []
errors = []
current_cl_files = input_api.change.GetModifiedFiles()
all_modified_files = input_api.change.GetAllModifiedFiles()
# Filter out files in the current CL.
modified_files = [f for f in all_modified_files if f not in current_cl_files]
modified_abspaths = [input_api.os_path.abspath(f) for f in modified_files]
for f in input_api.AffectedFiles(file_filter=source_file_filter):
if f.Action() == 'M' and f.IsDirectory():
curpath = f.AbsoluteLocalPath()
bad_files = []
# Check if any of the modified files in other CLs are under curpath.
for i in xrange(len(modified_files)):
abspath = modified_abspaths[i]
if input_api.os_path.commonprefix([curpath, abspath]) == curpath:
bad_files.append(modified_files[i])
if bad_files:
if input_api.is_committing:
error_type = output_api.PresubmitPromptWarning
else:
error_type = output_api.PresubmitNotifyResult
errors.append(error_type(
'Potential accidental commits in changelist %s:' % f.LocalPath(),
items=bad_files))
return errors
def CheckChangeHasOnlyOneEol(input_api, output_api, source_file_filter=None):
"""Checks the files ends with one and only one \n (LF)."""
eof_files = []
for f in input_api.AffectedSourceFiles(source_file_filter):
contents = input_api.ReadFile(f, 'rb')
# Check that the file ends in one and only one newline character.
if len(contents) > 1 and (contents[-1:] != '\n' or contents[-2:-1] == '\n'):
eof_files.append(f.LocalPath())
if eof_files:
return [output_api.PresubmitPromptWarning(
'These files should end in one (and only one) newline character:',
items=eof_files)]
return []
def CheckChangeHasNoCrAndHasOnlyOneEol(input_api, output_api,
source_file_filter=None):
"""Runs both CheckChangeHasNoCR and CheckChangeHasOnlyOneEOL in one pass.
It is faster because it is reading the file only once.
"""
cr_files = []
eof_files = []
for f in input_api.AffectedSourceFiles(source_file_filter):
contents = input_api.ReadFile(f, 'rb')
if '\r' in contents:
cr_files.append(f.LocalPath())
# Check that the file ends in one and only one newline character.
if len(contents) > 1 and (contents[-1:] != '\n' or contents[-2:-1] == '\n'):
eof_files.append(f.LocalPath())
outputs = []
if cr_files:
outputs.append(output_api.PresubmitPromptWarning(
'Found a CR character in these files:', items=cr_files))
if eof_files:
outputs.append(output_api.PresubmitPromptWarning(
'These files should end in one (and only one) newline character:',
items=eof_files))
return outputs
def _ReportErrorFileAndLine(filename, line_num, dummy_line):
"""Default error formatter for _FindNewViolationsOfRule."""
return '%s:%s' % (filename, line_num)
def _FindNewViolationsOfRule(callable_rule, input_api, source_file_filter=None,
error_formatter=_ReportErrorFileAndLine):
"""Find all newly introduced violations of a per-line rule (a callable).
Arguments:
callable_rule: a callable taking a file extension and line of input and
returning True if the rule is satisfied and False if there was a problem.
input_api: object to enumerate the affected files.
source_file_filter: a filter to be passed to the input api.
error_formatter: a callable taking (filename, line_number, line) and
returning a formatted error string.
Returns:
A list of the newly-introduced violations reported by the rule.
"""
errors = []
for f in input_api.AffectedFiles(include_deletes=False,
file_filter=source_file_filter):
# For speed, we do two passes, checking first the full file. Shelling out
# to the SCM to determine the changed region can be quite expensive on
# Win32. Assuming that most files will be kept problem-free, we can
# skip the SCM operations most of the time.
extension = str(f.LocalPath()).rsplit('.', 1)[-1]
if all(callable_rule(extension, line) for line in f.NewContents()):
continue # No violation found in full text: can skip considering diff.
for line_num, line in f.ChangedContents():
if not callable_rule(extension, line):
errors.append(error_formatter(f.LocalPath(), line_num, line))
return errors
def CheckChangeHasNoTabs(input_api, output_api, source_file_filter=None):
"""Checks that there are no tab characters in any of the text files to be
submitted.
"""
# In addition to the filter, make sure that makefiles are blacklisted.
if not source_file_filter:
# It's the default filter.
source_file_filter = input_api.FilterSourceFile
def filter_more(affected_file):
basename = input_api.os_path.basename(affected_file.LocalPath())
return (not (basename in ('Makefile', 'makefile') or
basename.endswith('.mk')) and
source_file_filter(affected_file))
tabs = _FindNewViolationsOfRule(lambda _, line : '\t' not in line,
input_api, filter_more)
if tabs:
return [output_api.PresubmitPromptWarning('Found a tab character in:',
long_text='\n'.join(tabs))]
return []
def CheckChangeTodoHasOwner(input_api, output_api, source_file_filter=None):
"""Checks that the user didn't add TODO(name) without an owner."""
unowned_todo = input_api.re.compile('TO''DO[^(]')
errors = _FindNewViolationsOfRule(lambda _, x : not unowned_todo.search(x),
input_api, source_file_filter)
errors = ['Found TO''DO with no owner in ' + x for x in errors]
if errors:
return [output_api.PresubmitPromptWarning('\n'.join(errors))]
return []
def CheckChangeHasNoStrayWhitespace(input_api, output_api,
source_file_filter=None):
"""Checks that there is no stray whitespace at source lines end."""
errors = _FindNewViolationsOfRule(lambda _, line : line.rstrip() == line,
input_api, source_file_filter)
if errors:
return [output_api.PresubmitPromptWarning(
'Found line ending with white spaces in:',
long_text='\n'.join(errors))]
return []
def CheckLongLines(input_api, output_api, maxlen, source_file_filter=None):
"""Checks that there aren't any lines longer than maxlen characters in any of
the text files to be submitted.
"""
maxlens = {
'java': 100,
# This is specifically for Android's handwritten makefiles (Android.mk).
'mk': 200,
'': maxlen,
}
# Language specific exceptions to max line length.
# '.h' is considered an obj-c file extension, since OBJC_EXCEPTIONS are a
# superset of CPP_EXCEPTIONS.
CPP_FILE_EXTS = ('c', 'cc')
CPP_EXCEPTIONS = ('#define', '#endif', '#if', '#include', '#pragma')
JAVA_FILE_EXTS = ('java',)
JAVA_EXCEPTIONS = ('import ', 'package ')
OBJC_FILE_EXTS = ('h', 'm', 'mm')
OBJC_EXCEPTIONS = ('#define', '#endif', '#if', '#import', '#include',
'#pragma')
LANGUAGE_EXCEPTIONS = [
(CPP_FILE_EXTS, CPP_EXCEPTIONS),
(JAVA_FILE_EXTS, JAVA_EXCEPTIONS),
(OBJC_FILE_EXTS, OBJC_EXCEPTIONS),
]
def no_long_lines(file_extension, line):
# Check for language specific exceptions.
if any(file_extension in exts and line.startswith(exceptions)
for exts, exceptions in LANGUAGE_EXCEPTIONS):
return True
file_maxlen = maxlens.get(file_extension, maxlens[''])
# Stupidly long symbols that needs to be worked around if takes 66% of line.
long_symbol = file_maxlen * 2 / 3
# Hard line length limit at 50% more.
extra_maxlen = file_maxlen * 3 / 2
line_len = len(line)
if line_len <= file_maxlen:
return True
if line_len > extra_maxlen:
return False
if any((url in line) for url in ('file://', 'http://', 'https://')):
return True
if 'url(' in line and file_extension == 'css':
return True
return input_api.re.match(
r'.*[A-Za-z][A-Za-z_0-9]{%d,}.*' % long_symbol, line)
def format_error(filename, line_num, line):
return '%s, line %s, %s chars' % (filename, line_num, len(line))
errors = _FindNewViolationsOfRule(no_long_lines, input_api,
source_file_filter,
error_formatter=format_error)
if errors:
msg = 'Found lines longer than %s characters (first 5 shown).' % maxlen
return [output_api.PresubmitPromptWarning(msg, items=errors[:5])]
else:
return []
def CheckLicense(input_api, output_api, license_re, source_file_filter=None,
accept_empty_files=True):
"""Verifies the license header.
"""
license_re = input_api.re.compile(license_re, input_api.re.MULTILINE)
bad_files = []
for f in input_api.AffectedSourceFiles(source_file_filter):
contents = input_api.ReadFile(f, 'rb')
if accept_empty_files and not contents:
continue
if not license_re.search(contents):
bad_files.append(f.LocalPath())
if bad_files:
if input_api.is_committing:
res_type = output_api.PresubmitPromptWarning
else:
res_type = output_api.PresubmitNotifyResult
return [res_type(
'License must match:\n%s\n' % license_re.pattern +
'Found a bad license header in these files:', items=bad_files)]
return []
def CheckChangeSvnEolStyle(input_api, output_api, source_file_filter=None):
"""Checks that the source files have svn:eol-style=LF."""
return CheckSvnProperty(input_api, output_api,
'svn:eol-style', 'LF',
input_api.AffectedSourceFiles(source_file_filter))
def CheckSvnForCommonMimeTypes(input_api, output_api):
"""Checks that common binary file types have the correct svn:mime-type."""
output = []
files = input_api.AffectedFiles(include_deletes=False)
def IsExts(x, exts):
path = x.LocalPath()
for extension in exts:
if path.endswith(extension):
return True
return False
def FilterFiles(extension):
return filter(lambda x: IsExts(x, extension), files)
def RunCheck(mime_type, files):
output.extend(CheckSvnProperty(input_api, output_api, 'svn:mime-type',
mime_type, files))
RunCheck('application/pdf', FilterFiles(['.pdf']))
RunCheck('image/bmp', FilterFiles(['.bmp']))
RunCheck('image/gif', FilterFiles(['.gif']))
RunCheck('image/png', FilterFiles(['.png']))
RunCheck('image/jpeg', FilterFiles(['.jpg', '.jpeg', '.jpe']))
RunCheck('image/vnd.microsoft.icon', FilterFiles(['.ico']))
return output
def CheckSvnProperty(input_api, output_api, prop, expected, affected_files):
"""Checks that affected_files files have prop=expected."""
if input_api.change.scm != 'svn':
return []
bad = filter(lambda f: f.Property(prop) != expected, affected_files)
if bad:
if input_api.is_committing:
res_type = output_api.PresubmitError
else:
res_type = output_api.PresubmitNotifyResult
message = 'Run the command: svn pset %s %s \\' % (prop, expected)
return [res_type(message, items=bad)]
return []
### Other checks
def CheckDoNotSubmit(input_api, output_api):
return (
CheckDoNotSubmitInDescription(input_api, output_api) +
CheckDoNotSubmitInFiles(input_api, output_api)
)
def CheckTreeIsOpen(input_api, output_api,
url=None, closed=None, json_url=None):
"""Check whether to allow commit without prompt.
Supports two styles:
1. Checks that an url's content doesn't match a regexp that would mean that
the tree is closed. (old)
2. Check the json_url to decide whether to allow commit without prompt.
Args:
input_api: input related apis.
output_api: output related apis.
url: url to use for regex based tree status.
closed: regex to match for closed status.
json_url: url to download json style status.
"""
if not input_api.is_committing:
return []
try:
if json_url:
connection = input_api.urllib2.urlopen(json_url)
status = input_api.json.loads(connection.read())
connection.close()
if not status['can_commit_freely']:
short_text = 'Tree state is: ' + status['general_state']
long_text = status['message'] + '\n' + json_url
return [output_api.PresubmitError(short_text, long_text=long_text)]
else:
# TODO(bradnelson): drop this once all users are gone.
connection = input_api.urllib2.urlopen(url)
status = connection.read()
connection.close()
if input_api.re.match(closed, status):
long_text = status + '\n' + url
return [output_api.PresubmitError('The tree is closed.',
long_text=long_text)]
except IOError as e:
return [output_api.PresubmitError('Error fetching tree status.',
long_text=str(e))]
return []
def GetUnitTestsInDirectory(
input_api, output_api, directory, whitelist=None, blacklist=None, env=None):
"""Lists all files in a directory and runs them. Doesn't recurse.
It's mainly a wrapper for RunUnitTests. Use whitelist and blacklist to filter
tests accordingly.
"""
unit_tests = []
test_path = input_api.os_path.abspath(
input_api.os_path.join(input_api.PresubmitLocalPath(), directory))
def check(filename, filters):
return any(True for i in filters if input_api.re.match(i, filename))
to_run = found = 0
for filename in input_api.os_listdir(test_path):
found += 1
fullpath = input_api.os_path.join(test_path, filename)
if not input_api.os_path.isfile(fullpath):
continue
if whitelist and not check(filename, whitelist):
continue
if blacklist and check(filename, blacklist):
continue
unit_tests.append(input_api.os_path.join(directory, filename))
to_run += 1
input_api.logging.debug('Found %d files, running %d' % (found, to_run))
if not to_run:
return [
output_api.PresubmitPromptWarning(
'Out of %d files, found none that matched w=%r, b=%r in directory %s'
% (found, whitelist, blacklist, directory))
]
return GetUnitTests(input_api, output_api, unit_tests, env)
def GetUnitTests(input_api, output_api, unit_tests, env=None):
"""Runs all unit tests in a directory.
On Windows, sys.executable is used for unit tests ending with ".py".
"""
# We don't want to hinder users from uploading incomplete patches.
if input_api.is_committing:
message_type = output_api.PresubmitError
else:
message_type = output_api.PresubmitPromptWarning
results = []
for unit_test in unit_tests:
cmd = []
if input_api.platform == 'win32' and unit_test.endswith('.py'):
# Windows needs some help.
cmd = [input_api.python_executable]
cmd.append(unit_test)
if input_api.verbose:
cmd.append('--verbose')
kwargs = {'cwd': input_api.PresubmitLocalPath()}
if env:
kwargs['env'] = env
results.append(input_api.Command(
name=unit_test,
cmd=cmd,
kwargs=kwargs,
message=message_type))
return results
def GetUnitTestsRecursively(input_api, output_api, directory,
whitelist, blacklist):
"""Gets all files in the directory tree (git repo) that match the whitelist.
Restricts itself to only find files within the Change's source repo, not
dependencies.
"""
def check(filename):
return (any(input_api.re.match(f, filename) for f in whitelist) and
not any(input_api.re.match(f, filename) for f in blacklist))
tests = []
to_run = found = 0
for filepath in input_api.change.AllFiles(directory):
found += 1
if check(filepath):
to_run += 1
tests.append(filepath)
input_api.logging.debug('Found %d files, running %d' % (found, to_run))
if not to_run:
return [
output_api.PresubmitPromptWarning(
'Out of %d files, found none that matched w=%r, b=%r in directory %s'
% (found, whitelist, blacklist, directory))
]
return GetUnitTests(input_api, output_api, tests)
def GetPythonUnitTests(input_api, output_api, unit_tests):
"""Run the unit tests out of process, capture the output and use the result
code to determine success.
DEPRECATED.
"""
# We don't want to hinder users from uploading incomplete patches.
if input_api.is_committing:
message_type = output_api.PresubmitError
else:
message_type = output_api.PresubmitNotifyResult
results = []
for unit_test in unit_tests:
# Run the unit tests out of process. This is because some unit tests
# stub out base libraries and don't clean up their mess. It's too easy to
# get subtle bugs.
cwd = None
env = None
unit_test_name = unit_test
# 'python -m test.unit_test' doesn't work. We need to change to the right
# directory instead.
if '.' in unit_test:
# Tests imported in submodules (subdirectories) assume that the current
# directory is in the PYTHONPATH. Manually fix that.
unit_test = unit_test.replace('.', '/')
cwd = input_api.os_path.dirname(unit_test)
unit_test = input_api.os_path.basename(unit_test)
env = input_api.environ.copy()
# At least on Windows, it seems '.' must explicitly be in PYTHONPATH
backpath = [
'.', input_api.os_path.pathsep.join(['..'] * (cwd.count('/') + 1))
]
if env.get('PYTHONPATH'):
backpath.append(env.get('PYTHONPATH'))
env['PYTHONPATH'] = input_api.os_path.pathsep.join((backpath))
cmd = [input_api.python_executable, '-m', '%s' % unit_test]
results.append(input_api.Command(
name=unit_test_name,
cmd=cmd,
kwargs={'env': env, 'cwd': cwd},
message=message_type))
return results
def RunUnitTestsInDirectory(input_api, *args, **kwargs):
"""Run tests in a directory serially.
For better performance, use GetUnitTestsInDirectory and then
pass to input_api.RunTests.
"""
return input_api.RunTests(
GetUnitTestsInDirectory(input_api, *args, **kwargs), False)
def RunUnitTests(input_api, *args, **kwargs):
"""Run tests serially.
For better performance, use GetUnitTests and then pass to
input_api.RunTests.
"""
return input_api.RunTests(GetUnitTests(input_api, *args, **kwargs), False)
def RunPythonUnitTests(input_api, *args, **kwargs):
"""Run python tests in a directory serially.
DEPRECATED
"""
return input_api.RunTests(
GetPythonUnitTests(input_api, *args, **kwargs), False)
def _FetchAllFiles(input_api, white_list, black_list):
"""Hack to fetch all files."""
# We cannot use AffectedFiles here because we want to test every python
# file on each single python change. It's because a change in a python file
# can break another unmodified file.
# Use code similar to InputApi.FilterSourceFile()
def Find(filepath, filters):
for item in filters:
if input_api.re.match(item, filepath):
return True
return False
files = []
path_len = len(input_api.PresubmitLocalPath())
for dirpath, dirnames, filenames in input_api.os_walk(
input_api.PresubmitLocalPath()):
# Passes dirnames in black list to speed up search.
for item in dirnames[:]:
filepath = input_api.os_path.join(dirpath, item)[path_len + 1:]
if Find(filepath, black_list):
dirnames.remove(item)
for item in filenames:
filepath = input_api.os_path.join(dirpath, item)[path_len + 1:]
if Find(filepath, white_list) and not Find(filepath, black_list):
files.append(filepath)
return files
def GetPylint(input_api, output_api, white_list=None, black_list=None,
disabled_warnings=None, extra_paths_list=None):
"""Run pylint on python files.
The default white_list enforces looking only at *.py files.
"""
white_list = tuple(white_list or ('.*\.py$',))
black_list = tuple(black_list or input_api.DEFAULT_BLACK_LIST)
extra_paths_list = extra_paths_list or []
if input_api.is_committing:
error_type = output_api.PresubmitError
else:
error_type = output_api.PresubmitPromptWarning
# Only trigger if there is at least one python file affected.
def rel_path(regex):
"""Modifies a regex for a subject to accept paths relative to root."""
def samefile(a, b):
# Default implementation for platforms lacking os.path.samefile
# (like Windows).
return input_api.os_path.abspath(a) == input_api.os_path.abspath(b)
samefile = getattr(input_api.os_path, 'samefile', samefile)
if samefile(input_api.PresubmitLocalPath(),
input_api.change.RepositoryRoot()):
return regex
prefix = input_api.os_path.join(input_api.os_path.relpath(
input_api.PresubmitLocalPath(), input_api.change.RepositoryRoot()), '')
return input_api.re.escape(prefix) + regex
src_filter = lambda x: input_api.FilterSourceFile(
x, map(rel_path, white_list), map(rel_path, black_list))
if not input_api.AffectedSourceFiles(src_filter):
input_api.logging.info('Skipping pylint: no matching changes.')
return []
extra_args = ['--rcfile=%s' % input_api.os_path.join(_HERE, 'pylintrc')]
if disabled_warnings:
extra_args.extend(['-d', ','.join(disabled_warnings)])
files = _FetchAllFiles(input_api, white_list, black_list)
if not files:
return []
files.sort()
input_api.logging.info('Running pylint on %d files', len(files))
input_api.logging.debug('Running pylint on: %s', files)
# Copy the system path to the environment so pylint can find the right
# imports.
env = input_api.environ.copy()
import sys
env['PYTHONPATH'] = input_api.os_path.pathsep.join(
extra_paths_list + sys.path).encode('utf8')
def GetPylintCmd(files):
# Windows needs help running python files so we explicitly specify
# the interpreter to use. It also has limitations on the size of
# the command-line, so we pass arguments via a pipe.
if len(files) == 1:
description = files[0]
else:
description = '%s files' % len(files)
return input_api.Command(
name='Pylint (%s)' % description,
cmd=[input_api.python_executable,
input_api.os_path.join(_HERE, 'third_party', 'pylint.py'),
'--args-on-stdin'],
kwargs={'env': env, 'stdin': '\n'.join(files + extra_args)},
message=error_type)
# Always run pylint and pass it all the py files at once.
# Passing py files one at time is slower and can produce
# different results. input_api.verbose used to be used
# to enable this behaviour but differing behaviour in
# verbose mode is not desirable.
# Leave this unreachable code in here so users can make
# a quick local edit to diagnose pylint issues more
# easily.
if True:
return [GetPylintCmd(files)]
else:
return map(lambda x: GetPylintCmd([x]), files)
def RunPylint(input_api, *args, **kwargs):
"""Legacy presubmit function.
For better performance, get all tests and then pass to
input_api.RunTests.
"""
return input_api.RunTests(GetPylint(input_api, *args, **kwargs), False)
# TODO(dpranke): Get the host_url from the input_api instead
def CheckRietveldTryJobExecution(dummy_input_api, dummy_output_api,
dummy_host_url, dummy_platforms,
dummy_owner):
# Temporarily 'fix' the check while the Rietveld API is being upgraded to
# something sensible.
return []
def CheckBuildbotPendingBuilds(input_api, output_api, url, max_pendings,
ignored):
try:
connection = input_api.urllib2.urlopen(url)
raw_data = connection.read()
connection.close()
except IOError:
return [output_api.PresubmitNotifyResult('%s is not accessible' % url)]
try:
data = input_api.json.loads(raw_data)
except ValueError:
return [output_api.PresubmitNotifyResult('Received malformed json while '
'looking up buildbot status')]
out = []
for (builder_name, builder) in data.iteritems():
if builder_name in ignored:
continue
if builder.get('state', '') == 'offline':
continue
pending_builds_len = len(builder.get('pending_builds', []))
if pending_builds_len > max_pendings:
out.append('%s has %d build(s) pending' %
(builder_name, pending_builds_len))
if out:
return [output_api.PresubmitPromptWarning(
'Build(s) pending. It is suggested to wait that no more than %d '
'builds are pending.' % max_pendings,
long_text='\n'.join(out))]
return []
def CheckOwners(input_api, output_api, source_file_filter=None,
author_counts_as_owner=True):
if input_api.is_committing:
if input_api.tbr:
return [output_api.PresubmitNotifyResult(
'--tbr was specified, skipping OWNERS check')]
if not input_api.change.issue:
return [output_api.PresubmitError("OWNERS check failed: this change has "
"no Rietveld issue number, so we can't check it for approvals.")]
needed = 'LGTM from an OWNER'
output = output_api.PresubmitError
else:
needed = 'OWNER reviewers'
output = output_api.PresubmitNotifyResult
affected_files = set([f.LocalPath() for f in
input_api.change.AffectedFiles(file_filter=source_file_filter)])
owners_db = input_api.owners_db
owner_email, reviewers = _RietveldOwnerAndReviewers(
input_api,
owners_db.email_regexp,
approval_needed=input_api.is_committing)
owner_email = owner_email or input_api.change.author_email
if author_counts_as_owner and owner_email:
reviewers_plus_owner = set([owner_email]).union(reviewers)
missing_files = owners_db.files_not_covered_by(affected_files,
reviewers_plus_owner)
else:
missing_files = owners_db.files_not_covered_by(affected_files, reviewers)
if missing_files:
output_list = [
output('Missing %s for these files:\n %s' %
(needed, '\n '.join(sorted(missing_files))))]
if not input_api.is_committing:
suggested_owners = owners_db.reviewers_for(missing_files, owner_email)
output_list.append(output('Suggested OWNERS: ' +
'(Use "git-cl owners" to interactively select owners.)\n %s' %
('\n '.join(suggested_owners or []))))
return output_list
if input_api.is_committing and not reviewers:
return [output('Missing LGTM from someone other than %s' % owner_email)]
return []
def _GetRietveldIssueProps(input_api, messages):
"""Gets the issue properties from rietveld."""
issue = input_api.change.issue
if issue and input_api.rietveld:
return input_api.rietveld.get_issue_properties(
issue=int(issue), messages=messages)
def _ReviewersFromChange(change):
"""Return the reviewers specified in the |change|, if any."""
reviewers = set()
if change.R:
reviewers.update(set([r.strip() for r in change.R.split(',')]))
if change.TBR:
reviewers.update(set([r.strip() for r in change.TBR.split(',')]))
# Drop reviewers that aren't specified in email address format.
return set(reviewer for reviewer in reviewers if '@' in reviewer)
def _RietveldOwnerAndReviewers(input_api, email_regexp, approval_needed=False):
"""Return the owner and reviewers of a change, if any.
If approval_needed is True, only reviewers who have approved the change
will be returned.
"""
issue_props = _GetRietveldIssueProps(input_api, True)
if not issue_props:
reviewers = set()
if not approval_needed:
reviewers = _ReviewersFromChange(input_api.change)
return None, reviewers
if not approval_needed:
return issue_props['owner_email'], set(issue_props['reviewers'])
owner_email = issue_props['owner_email']
def match_reviewer(r):
return email_regexp.match(r) and r != owner_email
messages = issue_props.get('messages', [])
approvers = set(
m['sender'] for m in messages
if m.get('approval') and match_reviewer(m['sender']))
return owner_email, approvers
def _CheckConstNSObject(input_api, output_api, source_file_filter):
"""Checks to make sure no objective-c files have |const NSSomeClass*|."""
pattern = input_api.re.compile(
r'const\s+NS(?!(Point|Range|Rect|Size)\s*\*)\w*\s*\*')
def objective_c_filter(f):
return (source_file_filter(f) and
input_api.os_path.splitext(f.LocalPath())[1] in ('.h', '.m', '.mm'))
files = []
for f in input_api.AffectedSourceFiles(objective_c_filter):
contents = input_api.ReadFile(f)
if pattern.search(contents):
files.append(f)
if files:
if input_api.is_committing:
res_type = output_api.PresubmitPromptWarning
else:
res_type = output_api.PresubmitNotifyResult
return [ res_type('|const NSClass*| is wrong, see ' +
'http://dev.chromium.org/developers/clang-mac',
files) ]
return []
def CheckSingletonInHeaders(input_api, output_api, source_file_filter=None):
"""Checks to make sure no header files have |Singleton<|."""
pattern = input_api.re.compile(r'(?<!class\s)Singleton\s*<')
files = []
for f in input_api.AffectedSourceFiles(source_file_filter):
if (f.LocalPath().endswith('.h') or f.LocalPath().endswith('.hxx') or
f.LocalPath().endswith('.hpp') or f.LocalPath().endswith('.inl')):
contents = input_api.ReadFile(f)
for line in contents.splitlines(False):
if (not input_api.re.match(r'//', line) and # Strip C++ comment.
pattern.search(line)):
files.append(f)
break
if files:
return [ output_api.PresubmitError(
'Found Singleton<T> in the following header files.\n' +
'Please move them to an appropriate source file so that the ' +
'template gets instantiated in a single compilation unit.',
files) ]
return []
def PanProjectChecks(input_api, output_api,
excluded_paths=None, text_files=None,
license_header=None, project_name=None,
owners_check=True, maxlen=80):
"""Checks that ALL chromium orbit projects should use.
These are checks to be run on all Chromium orbit project, including:
Chromium
Native Client
V8
When you update this function, please take this broad scope into account.
Args:
input_api: Bag of input related interfaces.
output_api: Bag of output related interfaces.
excluded_paths: Don't include these paths in common checks.
text_files: Which file are to be treated as documentation text files.
license_header: What license header should be on files.
project_name: What is the name of the project as it appears in the license.
Returns:
A list of warning or error objects.
"""
excluded_paths = tuple(excluded_paths or [])
text_files = tuple(text_files or (
r'.+\.txt$',
r'.+\.json$',
))
project_name = project_name or 'Chromium'
# Accept any year number from 2006 to the current year, or the special
# 2006-20xx string used on the oldest files. 2006-20xx is deprecated, but
# tolerated on old files.
current_year = int(input_api.time.strftime('%Y'))
allowed_years = (str(s) for s in reversed(xrange(2006, current_year + 1)))
years_re = '(' + '|'.join(allowed_years) + '|2006-2008|2006-2009|2006-2010)'
# The (c) is deprecated, but tolerate it until it's removed from all files.
license_header = license_header or (
r'.*? Copyright (\(c\) )?%(year)s The %(project)s Authors\. '
r'All rights reserved\.\n'
r'.*? Use of this source code is governed by a BSD-style license that '
r'can be\n'
r'.*? found in the LICENSE file\.(?: \*/)?\n'
) % {
'year': years_re,
'project': project_name,
}
results = []
# This code loads the default black list (e.g. third_party, experimental, etc)
# and add our black list (breakpad, skia and v8 are still not following
# google style and are not really living this repository).
# See presubmit_support.py InputApi.FilterSourceFile for the (simple) usage.
black_list = input_api.DEFAULT_BLACK_LIST + excluded_paths
white_list = input_api.DEFAULT_WHITE_LIST + text_files
sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list)
text_files = lambda x: input_api.FilterSourceFile(
x, black_list=black_list, white_list=white_list)
snapshot_memory = []
def snapshot(msg):
"""Measures & prints performance warning if a rule is running slow."""
dt2 = input_api.time.clock()
if snapshot_memory:
delta_ms = int(1000*(dt2 - snapshot_memory[0]))
if delta_ms > 500:
print " %s took a long time: %dms" % (snapshot_memory[1], delta_ms)
snapshot_memory[:] = (dt2, msg)
if owners_check:
snapshot("checking owners")
results.extend(input_api.canned_checks.CheckOwners(
input_api, output_api, source_file_filter=None))
snapshot("checking long lines")
results.extend(input_api.canned_checks.CheckLongLines(
input_api, output_api, maxlen, source_file_filter=sources))
snapshot( "checking tabs")
results.extend(input_api.canned_checks.CheckChangeHasNoTabs(
input_api, output_api, source_file_filter=sources))
snapshot( "checking stray whitespace")
results.extend(input_api.canned_checks.CheckChangeHasNoStrayWhitespace(
input_api, output_api, source_file_filter=sources))
snapshot("checking nsobjects")
results.extend(_CheckConstNSObject(
input_api, output_api, source_file_filter=sources))
snapshot("checking singletons")
results.extend(CheckSingletonInHeaders(
input_api, output_api, source_file_filter=sources))
# The following checks are only done on commit, since the commit bot will
# auto-fix most of these.
if input_api.is_committing:
snapshot("checking eol style")
results.extend(input_api.canned_checks.CheckChangeSvnEolStyle(
input_api, output_api, source_file_filter=text_files))
snapshot("checking svn mime types")
results.extend(input_api.canned_checks.CheckSvnForCommonMimeTypes(
input_api, output_api))
snapshot("checking license")
results.extend(input_api.canned_checks.CheckLicense(
input_api, output_api, license_header, source_file_filter=sources))
snapshot("checking was uploaded")
results.extend(input_api.canned_checks.CheckChangeWasUploaded(
input_api, output_api))
snapshot("checking description")
results.extend(input_api.canned_checks.CheckChangeHasDescription(
input_api, output_api))
results.extend(input_api.canned_checks.CheckDoNotSubmitInDescription(
input_api, output_api))
snapshot("checking do not submit in files")
results.extend(input_api.canned_checks.CheckDoNotSubmitInFiles(
input_api, output_api))
snapshot("done")
return results
def CheckPatchFormatted(input_api, output_api):
import git_cl
cmd = ['cl', 'format', '--dry-run', input_api.PresubmitLocalPath()]
code, _ = git_cl.RunGitWithCode(cmd, suppress_stderr=True)
if code == 2:
return [output_api.PresubmitPromptWarning(
'Your patch is not formatted, please run git cl format.')]
# As this is just a warning, ignore all other errors if the user
# happens to have a broken clang-format, doesn't use git, etc etc.
return []
|
Phonebooth/depot_tools
|
presubmit_canned_checks.py
|
Python
|
bsd-3-clause
| 40,994 | 0.011538 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# encfs.py
#
# Copyright 2013 Antergos
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
""" Configures Antergos to encrypt user's home with encFS """
#import logging
import os
import shutil
import subprocess
def setup(username, dest_dir):
""" Encrypt user's home folder """
# encfs pam_mount packages are needed
# pam_encfs from AUR
# https://wiki.debian.org/TransparentEncryptionForHomeFolder
# Edit configuration files
name = os.path.join(dest_dir, "etc/security/pam_encfs.conf")
shutil.copy(name, name + ".cnchi")
with open(name, "r") as pam_encfs:
lines = pam_encfs.readlines()
i = len(lines) - 1
lines[i] = "# " + lines[i]
with open(name, "w") as pam_encfs:
pam_encfs.write(lines)
pam_encfs.write("# Added by Cnchi - Antergos Installer\n")
pam_encfs.write("-\t/home/.encfs\t-\t-v\t-\n")
name = os.path.join(dest_dir, "etc/security/pam_env.conf")
shutil.copy(name, name + ".cnchi")
with open(name, "a") as pam_env:
pam_env.write("# Added by Cnchi - Antergos Installer\n")
pam_env.write("# Set the ICEAUTHORITY file location to allow GNOME to start on encfs $HOME\n")
pam_env.write("ICEAUTHORITY DEFAULT=/tmp/.ICEauthority_@{PAM_USER}\n")
name = os.path.join(dest_dir, "etc/fuse.conf")
shutil.copy(name, name + ".cnchi")
with open(name, "a") as fuse_conf:
fuse_conf.write("# Added by Cnchi - Antergos Installer\n")
fuse_conf.write("user_allow_other\n")
name = os.path.join(dest_dir, "etc/pam.d/system-login")
shutil.copy(name, name + ".cnchi")
with open(name, "a") as system_login:
system_login.write("# Added by Cnchi - Antergos Installer\n")
system_login.write("session required\tpam_encfs.so\n")
system_login.write("session optional\tpam_mount.so\n")
name = os.path.join(dest_dir, "etc/pam.d/system-auth")
shutil.copy(name, name + ".cnchi")
with open(name, "a") as system_auth:
system_auth.write("# Added by Cnchi - Antergos Installer\n")
system_auth.write("auth sufficient\tpam_encfs.so\n")
system_auth.write("auth optional\tpam_mount.so\n")
# Setup finished
# Move user home dir out of the way
mounted_dir = os.path.join(self.dest_dir, "home/", username)
backup_dir = os.path.join(self.dest_dir, "var/tmp/", username)
subprocess.check_call(['mv', src_dir, backup_dir])
# Create necessary dirs, encrypted and mounted(unecrypted)
encrypted_dir = os.path.join(self.dest_dir, "home/.encfs/", username)
subprocess.check_call(['mkdir', '-p', encrypted_dir, mounted_dir])
# Set owner
subprocess.check_call(['chown', '%s:users' % username, encrypted_dir, mounted_dir])
# Create encrypted directory
subprocess.check_call(['encfs', '-v', encrypted_dir, mounted_dir])
# Restore user home files
src = os.path.join(backup_dir, "*")
subprocess.check_call(['mv', src, mounted_dir])
src = os.path.join(backup_dir, ".[A-Za-z0-9]*")
subprocess.check_call(['mv', src, mounted_dir])
# Delete home backup
subprocess.check_call(['rmdir', backup_dir])
|
prescott66/Cnchi
|
src/encfs.py
|
Python
|
gpl-3.0
| 3,865 | 0.001035 |
import jmri.jmrit.jython.Jynstrument as Jynstrument
import jmri.jmrit.catalog.NamedIcon as NamedIcon
import jmri.jmrit.symbolicprog.tabbedframe.PaneOpsProgAction as PaneOpsProgAction
import javax.swing.JButton as JButton
class DecoderPro(Jynstrument):
def getExpectedContextClassName(self):
return "javax.swing.JComponent"
def init(self):
jbNew = JButton( PaneOpsProgAction() )
jbNew.setIcon( NamedIcon("resources/decoderpro.gif","resources/decoderpro.gif") )
jbNew.addMouseListener(self.getMouseListeners()[0]) # In order to get the popupmenu on the button too
jbNew.setToolTipText( jbNew.getText() )
jbNew.setText( None )
self.add(jbNew)
def quit(self):
pass
|
ctag/cpe453
|
JMRI/jython/Jynstruments/Launchers/DecoderPro.jyn/DecoderPro.py
|
Python
|
gpl-2.0
| 744 | 0.021505 |
"""Application base, containing global templates."""
default_app_config = 'pontoon.base.apps.BaseConfig'
MOZILLA_REPOS = (
'ssh://hg.mozilla.org/users/m_owca.info/firefox-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/firefox-for-android-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/thunderbird-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/lightning-aurora/',
'ssh://hg.mozilla.org/users/m_owca.info/seamonkey-aurora/',
)
class SyncError(RuntimeError):
"""Error class for errors relating to the project sync process."""
|
vivekanand1101/pontoon
|
pontoon/base/__init__.py
|
Python
|
bsd-3-clause
| 561 | 0 |
from Estructura import espaceado
class Arbol_Sintactico_Abstracto:
def __init__(self,alcance,hijos):
self.hijos = hijos
self.alcance = alcance
self.cont = 1
def imprimir(self,tabulacion):
if (len(self.hijos) > 1):
print tabulacion + "SECUENCIA"
for hijo in self.hijos:
hijo.nivel = 1
hijo.imprimir(espaceado(tabulacion))
def ejecutar(self):
for hijo in self.hijos:
hijo.nivel = 1
hijo.ejecutar()
|
danmt/NEO
|
Codigo_Fuente/etapa4/Instrucciones/Arbol_Sintactico_Abstracto.py
|
Python
|
gpl-3.0
| 428 | 0.044393 |
import glob
from subprocess import call
test_failures = {}
test_successes = {}
files = [file for file in glob.glob('../**/build.gradle', recursive=True)]
for f in files:
if f.startswith('../test'):
continue
# clean all projects in the platform before executing build
print("Cleaning all projects first...")
call(['../gradlew', '-p', '../', 'clean'])
print("Executing " + f + "...")
rc = call(['../gradlew', '-b', f, 'build'])
if rc == 0:
test_successes[f] = rc
else:
test_failures[f] = rc
print("Return code: " + str(rc))
print("FAILURES:")
for key in test_failures:
print(key + ": " + "FAILED(rc=" + str(test_failures[key]) + ")!")
print("\n\n")
print("SUCCESSES:")
for key in test_successes:
print(key + ": PASS")
|
IBMStreams/streamsx.health
|
test/test-builds.py
|
Python
|
apache-2.0
| 792 | 0.001263 |
# -*- coding: utf-8 -*-
"""
pygments.filters
~~~~~~~~~~~~~~~~
Module containing filter lookup functions and default
filters.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.token import String, Comment, Keyword, Name, Error, Whitespace, \
string_to_tokentype
from pygments.filter import Filter
from pygments.util import get_list_opt, get_int_opt, get_bool_opt, \
get_choice_opt, ClassNotFound, OptionError
from pygments.plugin import find_plugin_filters
def find_filter_class(filtername):
"""
Lookup a filter by name. Return None if not found.
"""
if filtername in FILTERS:
return FILTERS[filtername]
for name, cls in find_plugin_filters():
if name == filtername:
return cls
return None
def get_filter_by_name(filtername, **options):
"""
Return an instantiated filter. Options are passed to the filter
initializer if wanted. Raise a ClassNotFound if not found.
"""
cls = find_filter_class(filtername)
if cls:
return cls(**options)
else:
raise ClassNotFound('filter %r not found' % filtername)
def get_all_filters():
"""
Return a generator of all filter names.
"""
for name in FILTERS:
yield name
for name, _ in find_plugin_filters():
yield name
def _replace_special(ttype, value, regex, specialttype,
replacefunc=lambda x: x):
last = 0
for match in regex.finditer(value):
start, end = match.start(), match.end()
if start != last:
yield ttype, value[last:start]
yield specialttype, replacefunc(value[start:end])
last = end
if last != len(value):
yield ttype, value[last:]
class CodeTagFilter(Filter):
"""
Highlight special code tags in comments and docstrings.
Options accepted:
`codetags` : list of strings
A list of strings that are flagged as code tags. The default is to
highlight ``XXX``, ``TODO``, ``BUG`` and ``NOTE``.
"""
def __init__(self, **options):
Filter.__init__(self, **options)
tags = get_list_opt(options, 'codetags',
['XXX', 'TODO', 'BUG', 'NOTE'])
self.tag_re = re.compile(r'\b(%s)\b' % '|'.join([
re.escape(tag) for tag in tags if tag
]))
def filter(self, lexer, stream):
regex = self.tag_re
for ttype, value in stream:
if ttype in String.Doc or \
ttype in Comment and \
ttype not in Comment.Preproc:
for sttype, svalue in _replace_special(ttype, value, regex,
Comment.Special):
yield sttype, svalue
else:
yield ttype, value
class KeywordCaseFilter(Filter):
"""
Convert keywords to lowercase or uppercase or capitalize them, which
means first letter uppercase, rest lowercase.
This can be useful e.g. if you highlight Pascal code and want to adapt the
code to your styleguide.
Options accepted:
`case` : string
The casing to convert keywords to. Must be one of ``'lower'``,
``'upper'`` or ``'capitalize'``. The default is ``'lower'``.
"""
def __init__(self, **options):
Filter.__init__(self, **options)
case = get_choice_opt(options, 'case', ['lower', 'upper', 'capitalize'], 'lower')
self.convert = getattr(unicode, case)
def filter(self, lexer, stream):
for ttype, value in stream:
if ttype in Keyword:
yield ttype, self.convert(value)
else:
yield ttype, value
class NameHighlightFilter(Filter):
"""
Highlight a normal Name token with a different token type.
Example::
filter = NameHighlightFilter(
names=['foo', 'bar', 'baz'],
tokentype=Name.Function,
)
This would highlight the names "foo", "bar" and "baz"
as functions. `Name.Function` is the default token type.
Options accepted:
`names` : list of strings
A list of names that should be given the different token type.
There is no default.
`tokentype` : TokenType or string
A token type or a string containing a token type name that is
used for highlighting the strings in `names`. The default is
`Name.Function`.
"""
def __init__(self, **options):
Filter.__init__(self, **options)
self.names = set(get_list_opt(options, 'names', []))
tokentype = options.get('tokentype')
if tokentype:
self.tokentype = string_to_tokentype(tokentype)
else:
self.tokentype = Name.Function
def filter(self, lexer, stream):
for ttype, value in stream:
if ttype is Name and value in self.names:
yield self.tokentype, value
else:
yield ttype, value
class ErrorToken(Exception):
pass
class RaiseOnErrorTokenFilter(Filter):
"""
Raise an exception when the lexer generates an error token.
Options accepted:
`excclass` : Exception class
The exception class to raise.
The default is `pygments.filters.ErrorToken`.
*New in Pygments 0.8.*
"""
def __init__(self, **options):
Filter.__init__(self, **options)
self.exception = options.get('excclass', ErrorToken)
try:
# issubclass() will raise TypeError if first argument is not a class
if not issubclass(self.exception, Exception):
raise TypeError
except TypeError:
raise OptionError('excclass option is not an exception class')
def filter(self, lexer, stream):
for ttype, value in stream:
if ttype is Error:
raise self.exception(value)
yield ttype, value
class VisibleWhitespaceFilter(Filter):
"""
Convert tabs, newlines and/or spaces to visible characters.
Options accepted:
`spaces` : string or bool
If this is a one-character string, spaces will be replaces by this string.
If it is another true value, spaces will be replaced by ``·`` (unicode
MIDDLE DOT). If it is a false value, spaces will not be replaced. The
default is ``False``.
`tabs` : string or bool
The same as for `spaces`, but the default replacement character is ``»``
(unicode RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK). The default value
is ``False``. Note: this will not work if the `tabsize` option for the
lexer is nonzero, as tabs will already have been expanded then.
`tabsize` : int
If tabs are to be replaced by this filter (see the `tabs` option), this
is the total number of characters that a tab should be expanded to.
The default is ``8``.
`newlines` : string or bool
The same as for `spaces`, but the default replacement character is ``¶``
(unicode PILCROW SIGN). The default value is ``False``.
`wstokentype` : bool
If true, give whitespace the special `Whitespace` token type. This allows
styling the visible whitespace differently (e.g. greyed out), but it can
disrupt background colors. The default is ``True``.
*New in Pygments 0.8.*
"""
def __init__(self, **options):
Filter.__init__(self, **options)
for name, default in {'spaces': u'·', 'tabs': u'»', 'newlines': u'¶'}.items():
opt = options.get(name, False)
if isinstance(opt, basestring) and len(opt) == 1:
setattr(self, name, opt)
else:
setattr(self, name, (opt and default or ''))
tabsize = get_int_opt(options, 'tabsize', 8)
if self.tabs:
self.tabs += ' '*(tabsize-1)
if self.newlines:
self.newlines += '\n'
self.wstt = get_bool_opt(options, 'wstokentype', True)
def filter(self, lexer, stream):
if self.wstt:
spaces = self.spaces or ' '
tabs = self.tabs or '\t'
newlines = self.newlines or '\n'
regex = re.compile(r'\s')
def replacefunc(wschar):
if wschar == ' ':
return spaces
elif wschar == '\t':
return tabs
elif wschar == '\n':
return newlines
return wschar
for ttype, value in stream:
for sttype, svalue in _replace_special(ttype, value, regex,
Whitespace, replacefunc):
yield sttype, svalue
else:
spaces, tabs, newlines = self.spaces, self.tabs, self.newlines
# simpler processing
for ttype, value in stream:
if spaces:
value = value.replace(' ', spaces)
if tabs:
value = value.replace('\t', tabs)
if newlines:
value = value.replace('\n', newlines)
yield ttype, value
class GobbleFilter(Filter):
"""
Gobbles source code lines (eats initial characters).
This filter drops the first ``n`` characters off every line of code. This
may be useful when the source code fed to the lexer is indented by a fixed
amount of space that isn't desired in the output.
Options accepted:
`n` : int
The number of characters to gobble.
*New in Pygments 1.2.*
"""
def __init__(self, **options):
Filter.__init__(self, **options)
self.n = get_int_opt(options, 'n', 0)
def gobble(self, value, left):
if left < len(value):
return value[left:], 0
else:
return '', left - len(value)
def filter(self, lexer, stream):
n = self.n
left = n # How many characters left to gobble.
for ttype, value in stream:
# Remove ``left`` tokens from first line, ``n`` from all others.
parts = value.split('\n')
(parts[0], left) = self.gobble(parts[0], left)
for i in range(1, len(parts)):
(parts[i], left) = self.gobble(parts[i], n)
value = '\n'.join(parts)
if value != '':
yield ttype, value
class TokenMergeFilter(Filter):
"""
Merges consecutive tokens with the same token type in the output stream of a
lexer.
*New in Pygments 1.2.*
"""
def __init__(self, **options):
Filter.__init__(self, **options)
def filter(self, lexer, stream):
current_type = None
current_value = None
for ttype, value in stream:
if ttype is current_type:
current_value += value
else:
if current_type is not None:
yield current_type, current_value
current_type = ttype
current_value = value
if current_type is not None:
yield current_type, current_value
FILTERS = {
'codetagify': CodeTagFilter,
'keywordcase': KeywordCaseFilter,
'highlight': NameHighlightFilter,
'raiseonerror': RaiseOnErrorTokenFilter,
'whitespace': VisibleWhitespaceFilter,
'gobble': GobbleFilter,
'tokenmerge': TokenMergeFilter,
}
|
JulienMcJay/eclock
|
windows/Python27/Lib/site-packages/pygments/filters/__init__.py
|
Python
|
gpl-2.0
| 11,486 | 0.000871 |
class BasePlugin(object):
"""
Extend this/copy its structure to create plugins. Your plugin
class must be `Plugin` to be loaded. Can include commands (command_*),
admin commands (admin_). Additionally, yaib will look functions for
many of the connection events.
Any commands with a docstring will be automatically added to the
help command output, categorized by plugin name.
Command docstrings can include {nick} and {command_prefix} which
will automatically be replaced in the help text with the current
values.
"""
name = 'BasePlugin'
def __init__(self, yaib, configuration):
self.yaib = yaib
# save a shortcut to just this plugin's settings
self.settings = self.yaib.getPluginSettings(self.name)
# configure the plugin
self.configure(configuration)
# create any default settings
self.createDefaultSettings()
@property
def command_prefix(self):
# this is a property so it stays updated, even if the setting changes
return self.yaib.command_prefix
@property
def nick(self):
return self.yaib.nick
def configure(self, configuration):
"""
Overwrite this to handle configuration.
@param configuration: (object) the entire yaib config file.
"""
pass
def createDefaultSettings(self):
"""
Called during initialization.
Use self.settings.setMulti({...}, initial=True)
"""
pass
def getDbSession(self):
return self.yaib.persistence.getDbSession()
def formatDoc(self, message):
"""Formats the given message with the {nick} and {command_prefix}."""
return self.yaib.formatDoc(message)
def callLater(self, delay, func, *args, **kwargs):
"""
Wait for the delay (in seconds) then call the function with
the given arguments."""
return self.yaib.callLater(delay, func, *args, **kwargs)
def onShutdown(self):
"""Called when yaib is shutting down. Clean anything
up and save all the settings necessary."""
pass
def send(self, channel, message):
"""Send a message in the given channel."""
return self.yaib.sendMessage(channel, message)
def reply(self, channel, nick, message):
"""
If the channel is the bot (ie, was a private message to the bot)
sends a message back to the sender, otherwise sends to the channel.
"""
return self.send(
channel if channel != self.nick else nick,
message
)
def action(self, channel, action):
"""Send an action in the given channel."""
return self.yaib.action(channel, action)
def onPluginsLoaded(self):
"""Called when ALL the plugins are loaded."""
pass
def onNickChange(self, nick, old_nick):
"""Called when {nick}'s nick changes."""
pass
def onConnected(self):
"""Called when connected to a server."""
pass
def onMessageOfTheDay(self, message):
"""Called with the server's message of the day."""
pass
def onNotification(self, user, nick, channel, message):
"""Called when noticed"""
pass
def onUserAction(self, user, nick, channel, action):
"""Called when a user performs an action."""
pass
def onPrivateMessage(self, user, nick, message):
"""Called when a user sends {nick} a private message"""
pass
def onMessage(self, user, nick, channel, message, highlight):
"""Called when something is said in a channel"""
pass
def onSend(self, channel, message):
"""Called when {nick} sends a message to a channel (can be PM)."""
pass
def onAction(self, channel, action):
"""Called when {nick} does an action in a channel"""
pass
def onCommand(self, user, nick, channel, command, more):
"""Called when {nick} runs a command on behalf of a user."""
pass
def onAdminCommand(self, user, nick, channel, command, more):
"""Called when {nick} runs an admin command on behalf of a user."""
pass
def onJoined(self, channel):
"""Called after joining a channel."""
pass
def onLeave(self, channel):
"""Called after leaving a channel."""
pass
def onKicked(self, kicker_user, kicker, channel, message):
"""Called when {nick} is kicked from a channel."""
pass
def onUserJoined(self, user, nick, channel):
"""Called when a user joins a channel."""
pass
def onUserLeave(self, user, nick, channel):
"""Called when a user leaves a channel."""
pass
def onUserQuit(self, user, nick, quitMessage):
"""Called when a user disconnects from the server."""
pass
def onUserKicked(self, kickee, channel, kicker_user, kicker, message):
"""Called when a user is kicked from a channel"""
pass
def onUserRenamed(self, user, old_nick, new_nick):
"""Called when a user changes their nick"""
pass
def onUserList(self, channel_type, channel_name, user_list):
"""
Called when user_list is given for a channel (ie, upon joining the
channel).
NOTE: this is a list of nicks, not user strings.
"""
pass
|
collingreen/yaib
|
plugins/baseplugin.py
|
Python
|
mit
| 5,580 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-11-01 22:55
from __future__ import unicode_literals
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
('contentcuration', '0038_contentnode_author'),
]
operations = [
migrations.AlterField(
model_name='formatpreset',
name='id',
field=models.CharField(choices=[('high_res_video', 'High Resolution'), ('low_res_video', 'Low Resolution'), ('vector_video', 'Vectorized'), ('video_thumbnail', 'Thumbnail'), ('video_subtitle', 'Subtitle'), ('audio', 'Audio'), ('audio_thumbnail', 'Thumbnail'), ('document', 'Document'), (
'document_thumbnail', 'Thumbnail'), ('exercise', 'Exercise'), ('exercise_thumbnail', 'Thumbnail'), ('exercise_image', 'Exercise Image'), ('exercise_graphie', 'Exercise Graphie'), ('channel_thumbnail', 'Channel Thumbnail')], max_length=150, primary_key=True, serialize=False),
),
]
|
DXCanas/content-curation
|
contentcuration/contentcuration/migrations/0039_auto_20161101_1555.py
|
Python
|
mit
| 1,022 | 0.001957 |
# Copyright (C) 2014-2015 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014-2015 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014-2015 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# This code is partially taken from django-rest-framework:
# Copyright (c) 2011-2014, Tom Christie
from django.core.urlresolvers import RegexURLResolver
from django.conf.urls import patterns, url, include
from .settings import api_settings
def apply_suffix_patterns(urlpatterns, suffix_pattern, suffix_required):
ret = []
for urlpattern in urlpatterns:
if isinstance(urlpattern, RegexURLResolver):
# Set of included URL patterns
regex = urlpattern.regex.pattern
namespace = urlpattern.namespace
app_name = urlpattern.app_name
kwargs = urlpattern.default_kwargs
# Add in the included patterns, after applying the suffixes
patterns = apply_suffix_patterns(urlpattern.url_patterns,
suffix_pattern,
suffix_required)
ret.append(url(regex, include(patterns, namespace, app_name), kwargs))
else:
# Regular URL pattern
regex = urlpattern.regex.pattern.rstrip("$") + suffix_pattern
view = urlpattern._callback or urlpattern._callback_str
kwargs = urlpattern.default_args
name = urlpattern.name
# Add in both the existing and the new urlpattern
if not suffix_required:
ret.append(urlpattern)
ret.append(url(regex, view, kwargs, name))
return ret
def format_suffix_patterns(urlpatterns, suffix_required=False, allowed=None):
"""
Supplement existing urlpatterns with corresponding patterns that also
include a ".format" suffix. Retains urlpattern ordering.
urlpatterns:
A list of URL patterns.
suffix_required:
If `True`, only suffixed URLs will be generated, and non-suffixed
URLs will not be used. Defaults to `False`.
allowed:
An optional tuple/list of allowed suffixes. eg ["json", "api"]
Defaults to `None`, which allows any suffix.
"""
suffix_kwarg = api_settings.FORMAT_SUFFIX_KWARG
if allowed:
if len(allowed) == 1:
allowed_pattern = allowed[0]
else:
allowed_pattern = "(%s)" % "|".join(allowed)
suffix_pattern = r"\.(?P<%s>%s)$" % (suffix_kwarg, allowed_pattern)
else:
suffix_pattern = r"\.(?P<%s>[a-z0-9]+)$" % suffix_kwarg
return apply_suffix_patterns(urlpatterns, suffix_pattern, suffix_required)
|
bdang2012/taiga-back-casting
|
taiga/base/api/urlpatterns.py
|
Python
|
agpl-3.0
| 3,316 | 0.000302 |
#!/usr/local/bin/python
import sys
import urllib
import urllib2
import json
import datetime
YAHOO_URL = 'http://query.yahooapis.com/v1/public/yql?env=http%3A%2F%2Fdatatables.org%2Falltables.env&format=json&diagnostics=true&q='
def getJSON(fileName):
f = open(fileName)
jsonData = json.load(f)
f.close()
return jsonData
def writeJSON(jsonData, fileName):
f = open(fileName, 'w')
json.dump(jsonData, f)
f.close()
def fixSymbol(symbol) :
if len(symbol) > 1 and symbol[-2] == "/":
symbol = symbol[:-2] + '-' + symbol[-1]
if '/' in symbol :
symbol = symbol.split('/')[0]
return symbol.replace('^', '-P').rstrip()
def getReturn(returns):
if len(returns.keys()) == 0:
return 0
firstDate = returns.keys()[0]
lastDate = returns.keys()[0]
for date in returns.keys():
if date < firstDate:
firstDate = date
if date > lastDate:
lastDate = date
openPrice = float(returns[firstDate][0])
closePrice = float(returns[lastDate][1])
return (closePrice - openPrice) / openPrice
def getReturnForCompany(symbol, date, numOfDays):
endDate = datetime.datetime.strptime(date, '%Y-%m-%d') + datetime.timedelta(days=numOfDays)
sym = fixSymbol(symbol)
query = 'select * from yahoo.finance.historicaldata where symbol = "'+sym+'" and startDate = "'+str(date)+'" and endDate = "'+str(endDate.date())+'"'
encoded_query = urllib.quote(query)
try:
url = YAHOO_URL + encoded_query
jsonRawData = urllib2.urlopen(url)
jsonData = json.load(jsonRawData)
if jsonData['query']['results'] == None:
return 0.0
if type(jsonData['query']['results']['quote']) == type({}):
quotes = [jsonData['query']['results']['quote']]
else:
quotes = jsonData['query']['results']['quote']
returns = {}
for data in quotes:
returns[data['Date']] = (data['Open'], data['Close'])
return getReturn(returns)
except:
return 0.0
def returnsJSONSnippet(jsonData, days):
returns = {}
progress = 0
size = float(len(jsonData.keys()))
for article in jsonData.keys():
date = jsonData[article]['date']
companies = jsonData[article]['company']
articleReturns = []
for company in companies:
articleReturns.append(getReturnForCompany(company, date, days))
articleReturn = sum(articleReturns) / len(articleReturns)
returns[article] = articleReturn
if progress % 100 == 0:
print progress / size, progress, 'out of', size
progress += 1
return returns
def returnsJSONFull(jsonData, days):
returns = {}
progress = 0
size = float(len(jsonData))
for article in jsonData:
date = article['date']
companies = article['company']
articleReturns = []
for company in companies:
articleReturns.append(getReturnForCompany(company, date, days))
articleReturn = sum(articleReturns) / len(articleReturns)
key = article['title'][0] + ' ' + article['text']
returns[key] = articleReturn
if progress % 100 == 0:
print progress / size, progress, 'out of', size
progress += 1
return returns
inputFile = sys.argv[2]
outputFile = sys.argv[3]
days = int(sys.argv[4])
jsonData = getJSON(inputFile)
if sys.argv[1] == 'snippet':
jsonToWrite = returnsJSONSnippet(jsonData, days)
elif sys.argv[1] == 'full':
jsonToWrite = returnsJSONFull(jsonData, days)
writeJSON(jsonToWrite, outputFile)
|
hassaanm/business-articles
|
downloadReturns.py
|
Python
|
apache-2.0
| 3,618 | 0.004422 |
#!/usr/bin/env python
# coding=utf-8
import json
import sys
data = {
'g1': {
'hosts': [
'172.17.0.2'
]
}
}
with open('w.log', 'w') as f:
f.write(str(sys.argv))
print json.dumps(data)
|
tao12345666333/Talk-Is-Cheap
|
ansible/group.py
|
Python
|
mit
| 227 | 0 |
import random
import Image
import ImageFont
import ImageDraw
import ImageFilter
import hashlib
from random_words import RandomWords
def gen_captcha(text, fnt, fnt_sz, file_name, fmt='JPEG'):
"""Generate a captcha image"""
# randomly select the foreground color
fgcolor = random.randint(0,0xffff00)
# make the background color the opposite of fgcolor
bgcolor = fgcolor ^ 0xffffff
# create a font object
font = ImageFont.truetype(fnt,fnt_sz)
# determine dimensions of the text
dim = font.getsize(text)
# create a new image slightly larger that the text
im = Image.new('RGB', (dim[0]+5,dim[1]+5), bgcolor)
d = ImageDraw.Draw(im)
x, y = im.size
r = random.randint
# draw 100 random colored boxes on the background
for num in range(100):
d.rectangle((r(0,x),r(0,y),r(0,x),r(0,y)),fill=r(0,0xffffff))
# add the text to the image
d.text((3,3), text, font=font, fill=fgcolor)
im = im.filter(ImageFilter.EDGE_ENHANCE_MORE)
# save the image to a file
im.save(file_name, format=fmt)
def new_word():
rw = RandomWords()
word = rw.random_word()
return word, hashlib.sha224(word).hexdigest()
if __name__ == '__main__':
"""Example: This grabs a random word from the dictionary 'words' (one
word per line) and generates a jpeg image named 'test.jpg' using
the truetype font 'porkys.ttf' with a font size of 25.
"""
words = open('static/words').readlines()
word = words[random.randint(1,len(words))]
gen_captcha(word.strip(), 'static/porkys.ttf', 25, "captchas/test.jpg")
|
gutosurrex/feed2twister
|
flask_app/captcha.py
|
Python
|
gpl-3.0
| 1,492 | 0.032842 |
"""
accounts.test_views
===================
Tests the REST API calls.
Add more specific social registration tests
"""
import responses
from django.core.urlresolvers import reverse
from django.core import mail
from django.contrib.sites.models import Site
from django.contrib.auth import get_user_model
from django.test.utils import override_settings
from rest_framework import status
from rest_framework.test import APIClient, APITestCase
from allauth.account import app_settings
from allauth.socialaccount.models import SocialApp
from allauth.socialaccount.providers.facebook.provider import GRAPH_API_URL
from .serializers import LoginSerializer
class TestAccounts(APITestCase):
""" Tests normal use - non social login. """
def setUp(self):
self.login_url = reverse('accounts:rest_login')
self.logout_url = reverse('accounts:rest_logout')
self.register_url = reverse('accounts:rest_register')
self.password_reset_url = reverse('accounts:rest_password_reset')
self.rest_password_reset_confirm_url = reverse('accounts:rest_password_reset_confirm')
self.password_change_url = reverse('accounts:rest_password_change')
self.verify_url = reverse('accounts:rest_verify_email')
self.user_url = reverse('accounts:rest_user_details')
self.client = APIClient()
self.reusable_user_data = {'username': 'admin', 'email': 'admin@email.com', 'password': 'password12'}
self.reusable_user_data_change_password = {'username': 'admin', 'email': 'admin@email.com', 'password': 'password_same'}
self.reusable_register_user_data = {'username': 'admin', 'email': 'admin@email.com', 'password1': 'password12', 'password2': 'password12'}
self.reusable_register_user_data1 = {'username': 'admin1', 'email': 'admin1@email.com', 'password1': 'password12', 'password2': 'password12'}
self.reusable_register_user_data_no_username = {'email': 'admin@email.com', 'password1': 'password12', 'password2': 'password12'}
self.reusable_register_user_data_no_email = {'username': 'admin', 'password1': 'password12', 'password2': 'password12'}
self.change_password_data_incorrect = {"new_password1": "password_not_same", "new_password2": "password_same"}
self.change_password_data = {"new_password1": "password_same", "new_password2": "password_same"}
self.change_password_data_old_password_field_enabled = {"old_password": "password12", "new_password1": "password_same", "new_password2": "password_same"}
def create_user_and_login(self):
""" Helper function to create a basic user, login and assign token credentials. """
get_user_model().objects.create_user('admin', 'admin@email.com', 'password12')
response = self.client.post(self.login_url, self.reusable_user_data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK, "Snap! Basic Login has failed with a helper function 'create_user_and_login'. Something is really wrong here.")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + response.data['key'])
def _generate_uid_and_token(self, user):
result = {}
from django.utils.encoding import force_bytes
from django.contrib.auth.tokens import default_token_generator
from django import VERSION
if VERSION[1] == 5:
from django.utils.http import int_to_base36
result['uid'] = int_to_base36(user.pk)
else:
from django.utils.http import urlsafe_base64_encode
result['uid'] = urlsafe_base64_encode(force_bytes(user.pk))
result['token'] = default_token_generator.make_token(user)
return result
def cleanUp(self):
pass
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME)
def test_login_basic_username_auth_method(self):
""" Tests basic functionality of login with authentication method of username. """
# Assumes you provide username,password and returns a token
get_user_model().objects.create_user('admin3', '', 'password12')
data = {"username": 'admin3', "email": "", "password": 'password12'}
response = self.client.post(self.login_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertIn('key', response.content)
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.EMAIL,
ACCOUNT_EMAIL_REQUIRED=True)
def test_login_basic_email_auth_method(self):
""" Tests basic functionality of login with authentication method of email. """
# Assumes you provide username,password and returns a token
get_user_model().objects.create_user('admin', 'email.login@gmail.com', 'password12')
data = {"username": '', "email": "email.login@gmail.com", "password": 'password12'}
response = self.client.post(self.login_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertIn('key', response.content)
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME_EMAIL)
def test_login_basic_username_email_auth_method(self):
""" Tests basic functionality of login with authentication method of username or email. """
# Assumes you provide username,password and returns a token
get_user_model().objects.create_user('admin', 'email.login@gmail.com', 'password12')
# Check email
data = {"username": '', "email": "email.login@gmail.com", "password": 'password12'}
response = self.client.post(self.login_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
# Check username
data = {"username": 'admin', "email": '', "password": 'password12'}
response = self.client.post(self.login_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertIn('key', response.content)
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME)
def test_login_auth_method_username_fail_no_users_in_db(self):
""" Tests login fails with a 400 when no users in db for login auth method of 'username'. """
serializer = LoginSerializer({'username': 'admin', 'password': 'password12'})
response = self.client.post(self.login_url, serializer.data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.EMAIL)
def test_login_email_auth_method_fail_no_users_in_db(self):
""" Tests login fails with a 400 when no users in db for login auth method of 'email'. """
serializer = LoginSerializer({'username': 'admin', 'password': 'password12'})
response = self.client.post(self.login_url, serializer.data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME_EMAIL)
def test_login_username_email_auth_method_fail_no_users_in_db(self):
""" Tests login fails with a 400 when no users in db for login auth method of 'username_email'. """
serializer = LoginSerializer({'username': 'admin', 'password': 'password12'})
response = self.client.post(self.login_url, serializer.data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
def common_test_login_fail_incorrect_change(self):
# Create user, login and try and change password INCORRECTLY
self.create_user_and_login()
self.client.post(self.password_change_url, data=self.change_password_data_incorrect, format='json')
# Remove credentials
self.client.credentials()
response = self.client.post(self.login_url, self.reusable_user_data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertIn('key', response.content)
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME)
def test_login_username_auth_method_fail_incorrect_password_change(self):
""" Tests login fails with an incorrect/invalid password change (login auth username). """
self.common_test_login_fail_incorrect_change()
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.EMAIL)
def test_login_email_auth_method_fail_incorrect_password_change(self):
""" Tests login fails with an incorrect/invalid password change (login auth email). """
self.common_test_login_fail_incorrect_change()
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME_EMAIL)
def test_login_username_email_auth_method_fail_incorrect_password_change(self):
""" Tests login fails with an incorrect/invalid password change (login auth username_email). """
self.common_test_login_fail_incorrect_change()
def common_test_login_correct_password_change(self):
# Create user, login and try and change password successfully
self.create_user_and_login()
response = self.client.post(self.password_change_url, data=self.change_password_data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
# Remove credentials
self.client.credentials()
response = self.client.post(self.login_url, self.reusable_user_data_change_password, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertIn('key', response.content)
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME)
def test_login_username_auth_method_correct_password_change(self):
""" Tests login is succesful with a correct password change (login auth username). """
self.common_test_login_correct_password_change()
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.EMAIL)
def test_login_email_auth_method_correct_password_change(self):
""" Tests login is succesful with a correct password change (login auth email). """
self.common_test_login_correct_password_change()
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME_EMAIL)
def test_login_username_email_auth_method_correct_password_change(self):
""" Tests login is succesful with a correct password change (login auth username_email). """
self.common_test_login_correct_password_change()
def test_login_fail_no_input(self):
""" Tests login fails when you provide no username and no email (login auth username_email). """
get_user_model().objects.create_user('admin', 'email.login@gmail.com', 'password12')
data = {"username": '', "email": '', "password": ''}
response = self.client.post(self.login_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME)
def test_login_username_auth_method_fail_no_input(self):
""" Tests login fails when you provide no username (login auth username). """
get_user_model().objects.create_user('admin', 'email.login@gmail.com', 'password12')
data = {"username": '', "email": "email.login@gmail.com", "password": 'password12'}
response = self.client.post(self.login_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.EMAIL)
def test_login_email_auth_method_fail_no_input(self):
""" Tests login fails when you provide no username (login auth email). """
get_user_model().objects.create_user('admin', 'email.login@gmail.com', 'password12')
data = {"username": "admin", "email": '', "password": 'password12'}
response = self.client.post(self.login_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
@override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME_EMAIL)
def test_login_username_email_auth_method_fail_no_input(self):
""" Tests login fails when you provide no username and no email (login auth username_email). """
get_user_model().objects.create_user('admin', 'email.login@gmail.com', 'password12')
data = {"username": '', "email": '', "password": 'password12'}
response = self.client.post(self.login_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
# need to check for token
# test login with password change
# test login with wrong password chaneg if fails
def test_logout(self):
""" Tests basic logout functionality. """
self.create_user_and_login()
response = self.client.post(self.logout_url, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.content, '{"success":"Successfully logged out."}')
def test_logout_but_already_logged_out(self):
""" Tests logout when already logged out. """
self.create_user_and_login()
response = self.client.post(self.logout_url, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.content, '{"success":"Successfully logged out."}')
self.client.credentials() # remember to remove manual token credential
response = self.client.post(self.logout_url, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK, response.content)
self.assertEquals(response.content, '{"success":"Successfully logged out."}')
def test_change_password_basic(self):
""" Tests basic functionality of 'change of password'. """
self.create_user_and_login()
response = self.client.post(self.password_change_url, data=self.change_password_data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.content, '{"success":"New password has been saved."}')
def test_change_password_basic_fails_not_authorised(self):
""" Tests basic functionality of 'change of password' fails if not authorised. """
get_user_model().objects.create_user('admin', 'admin@email.com', 'password12')
response = self.client.post(self.password_change_url, data=self.change_password_data, format='json')
self.assertEquals(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEquals(response.content, '{"detail":"Authentication credentials were not provided."}')
def common_change_password_login_fail_with_old_password(self, password_change_data):
self.create_user_and_login()
response = self.client.post(self.password_change_url, data=password_change_data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.client.credentials() # Remove credentials
response = self.client.post(self.login_url, self.reusable_user_data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
def common_change_password_login_pass_with_new_password(self, password_change_data):
self.create_user_and_login()
response = self.client.post(self.password_change_url, password_change_data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.client.credentials() # Remove credentials
response = self.client.post(self.login_url, self.reusable_user_data_change_password, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
def common_change_password_login_fail_with_old_password_pass_with_new_password(self, password_change_data):
""" Tests change of password with old password fails but new password successes. """
self.create_user_and_login()
response = self.client.post(self.password_change_url, password_change_data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK, response.content)
self.client.credentials() # Remove credentials
response = self.client.post(self.login_url, self.reusable_user_data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
response = self.client.post(self.login_url, self.reusable_user_data_change_password, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK, response.content)
def test_change_password_login_fail_with_old_password(self):
""" Tests change of password with old password. """
self.common_change_password_login_fail_with_old_password(self.change_password_data)
def test_change_password_login_pass_with_new_password(self):
""" Tests change of password with new password. """
self.common_change_password_login_pass_with_new_password(self.change_password_data)
def test_change_password_login_fail_with_old_password_pass_with_new_password(self):
""" Tests change of password with old password fails but new password successes. """
self.common_change_password_login_fail_with_old_password_pass_with_new_password(self.change_password_data)
@override_settings(OLD_PASSWORD_FIELD_ENABLED=True)
def test_change_password_old_password_field_required_old_password_field_enabled(self):
""" Tests basic functionality of 'change of password' fails if old password not given as part of input (old password field enabled). """
self.create_user_and_login()
response = self.client.post(self.password_change_url, data=self.change_password_data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEquals(response.content, '{"old_password":["This field is required."]}')
@override_settings(OLD_PASSWORD_FIELD_ENABLED=True)
def test_change_password_basic_old_password_field_enabled(self):
""" Tests basic functionality of 'change of password' (old password enabled). """
self.create_user_and_login()
response = self.client.post(self.password_change_url, data=self.change_password_data_old_password_field_enabled, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.content, '{"success":"New password has been saved."}')
@override_settings(OLD_PASSWORD_FIELD_ENABLED=True)
def test_change_password_basic_fails_not_authorised_old_password_field_enabled(self):
""" Tests basic functionality of 'change of password' fails if not authorised (old password field enabled). """
get_user_model().objects.create_user('admin', 'admin@email.com', 'password12')
response = self.client.post(self.password_change_url, data=self.change_password_data_old_password_field_enabled, format='json')
self.assertEquals(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEquals(response.content, '{"detail":"Authentication credentials were not provided."}')
@override_settings(OLD_PASSWORD_FIELD_ENABLED=True)
def test_change_password_login_fail_with_old_password_old_password_field_enabled(self):
""" Tests change of password with old password (old password field enabled). """
self.common_change_password_login_fail_with_old_password(self.change_password_data_old_password_field_enabled)
@override_settings(OLD_PASSWORD_FIELD_ENABLED=True)
def test_change_password_login_pass_with_new_password_old_password_field_enabled(self):
""" Tests change of password with new password (old password field enabled). """
self.common_change_password_login_pass_with_new_password(self.change_password_data_old_password_field_enabled)
@override_settings(OLD_PASSWORD_FIELD_ENABLED=True)
def test_change_password_login_fail_with_old_password_pass_with_new_password_old_password_field_enabled(self):
""" Tests change of password with old password fails but new password successes (old password field enabled). """
self.common_change_password_login_fail_with_old_password_pass_with_new_password(self.change_password_data_old_password_field_enabled)
"""
Registrations Tests
===================
"""
def common_test_registration_basic(self, data):
response = self.client.post(self.register_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_201_CREATED, response.content)
return response
@override_settings(ACCOUNT_EMAIL_REQUIRED=True, ACCOUNT_USERNAME_REQUIRED=True)
def test_registration_basic(self):
""" Tests basic functionality of registration. """
self.common_test_registration_basic(self.reusable_register_user_data)
@override_settings(ACCOUNT_EMAIL_REQUIRED=True, ACCOUNT_USERNAME_REQUIRED=False)
def test_registration_basic_no_username(self):
""" Tests basic functionality of registration (no username required). """
self.common_test_registration_basic(self.reusable_register_user_data_no_username)
@override_settings(ACCOUNT_EMAIL_REQUIRED=False, ACCOUNT_USERNAME_REQUIRED=True)
def test_registration_basic_no_email(self):
""" Tests basic functionality of registration (no username required). """
self.common_test_registration_basic(self.reusable_register_user_data_no_email)
@override_settings(ACCOUNTS_REGISTRATION_OPEN=False)
def test_registration_basic_registration_not_open(self):
""" Tests basic registration fails if registration is closed. """
response = self.client.post(self.register_url, self.reusable_register_user_data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST, response.content)
@override_settings(ACCOUNT_EMAIL_VERIFICATION="none")
def test_registration_email_verification_not_necessary(self):
""" Tests you can log in without email verification """
self.common_test_registration_basic(self.reusable_register_user_data)
response = self.client.post(self.login_url, self.reusable_user_data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
@override_settings(ACCOUNT_EMAIL_VERIFICATION="optional")
def test_registration_email_verification_neccessary(self):
""" Tests you can log in without email verification """
self.common_test_registration_basic(self.reusable_register_user_data)
response = self.client.post(self.login_url, self.reusable_user_data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
def common_test_registration(self):
self.common_test_registration_basic(self.reusable_register_user_data1)
response = self.client.post(self.login_url, {'email': 'admin1@email.com', 'password': 'password12'}, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
def common_test_registration_email_verification_not_necessary_email(self):
self.common_test_registration_basic(self.reusable_register_user_data1)
response = self.client.post(self.login_url, {'email': 'admin1@email.com', 'password': 'password12'}, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
def common_test_registration_email_verification_not_necessary_username(self):
self.common_test_registration_basic(self.reusable_register_user_data1)
response = self.client.post(self.login_url, {'username': 'admin1', 'password': 'password12'}, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
@override_settings(ACCOUNT_EMAIL_VERIFICATION="none", ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.EMAIL)
def test_registration_email_verification_neccessary_email(self):
""" Tests you can log in without email verification """
self.common_test_registration_email_verification_not_necessary_email()
@override_settings(ACCOUNT_EMAIL_VERIFICATION="optional", ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.EMAIL)
def test_registration_email_verification_neccessary_optional_email(self):
""" Tests you can log in without email verification """
self.common_test_registration_email_verification_not_necessary_email()
@override_settings(ACCOUNT_EMAIL_VERIFICATION="none", ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME)
def test_registration_email_verification_neccessary_username(self):
""" Tests you can log in without email verification """
self.common_test_registration_email_verification_not_necessary_username()
@override_settings(ACCOUNT_EMAIL_VERIFICATION="optional", ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME)
def test_registration_email_verification_neccessary_optional_username(self):
""" Tests you can log in without email verification """
self.common_test_registration_email_verification_not_necessary_username()
@override_settings(ACCOUNT_EMAIL_VERIFICATION="none", ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME_EMAIL)
def test_registration_email_verification_neccessary_username_email(self):
""" Tests you canT log in without email verification for username & email auth. """
self.common_test_registration_basic(self.reusable_register_user_data1)
response = self.client.post(self.login_url, {'username': 'admin1', 'email': 'admin1@email.com', 'password': 'password12'}, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
@override_settings(ACCOUNT_EMAIL_VERIFICATION="optional", ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME_EMAIL)
def test_registration_email_verification_neccessary_optional_username_email(self):
""" Tests you canT log in without email verification for username & email auth. """
self.common_test_registration_basic(self.reusable_register_user_data1)
response = self.client.post(self.login_url, {'username': 'admin1', 'email': 'admin1@email.com', 'password': 'password12'}, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
@override_settings(ACCOUNT_EMAIL_VERIFICATION="mandatory", ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME)
def test_registration_email_verification_necessary_login_fail_username(self):
""" Tests you can log in without email verification """
self.common_test_registration_basic(self.reusable_register_user_data1)
response = self.client.post(self.login_url, {'username': 'admin1', 'password': 'password12'}, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST, response.content)
@override_settings(ACCOUNT_EMAIL_VERIFICATION="mandatory", ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.EMAIL)
def test_registration_email_verification_necessary_login_fail_email(self):
""" Tests you can log in without email verification """
self.common_test_registration_basic(self.reusable_register_user_data1)
response = self.client.post(self.login_url, {'email': 'admin1@email.com', 'password': 'password12'}, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST, response.content)
@override_settings(ACCOUNT_EMAIL_VERIFICATION="mandatory", ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME_EMAIL)
def test_registration_email_verification_necessary_login_fail_username_email(self):
""" Tests you can log in without email verification """
self.common_test_registration_basic({'username': 'admin_man', 'email': 'admin1@email.com', 'password1': 'password12', 'password2': 'password12'})
response = self.client.post(self.login_url, {'username': 'admin_man', 'password': 'password12'}, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
def common_registration_email_verification_neccessary_verified_login(self, login_data):
mail_count = len(mail.outbox)
reg_response = self.common_test_registration_basic(self.reusable_register_user_data1)
self.assertEquals(len(mail.outbox), mail_count + 1)
new_user = get_user_model().objects.latest('id')
login_response = self.client.post(self.login_url, login_data, format='json')
self.assertEquals(login_response.status_code, status.HTTP_400_BAD_REQUEST)
# verify email
email_confirmation = new_user.emailaddress_set.get(email=self.reusable_register_user_data1['email']).emailconfirmation_set.order_by('-created')[0]
verify_response = self.client.post(self.verify_url, {'key': email_confirmation.key}, format='json')
self.assertEquals(verify_response.status_code, status.HTTP_200_OK)
login_response = self.client.post(self.login_url, login_data, format='json')
self.assertEquals(login_response.status_code, status.HTTP_200_OK)
@override_settings(ACCOUNT_EMAIL_VERIFICATION="mandatory", ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME)
def test_registration_email_verification_neccessary_verified_login_username(self):
""" Tests you can log in without email verification """
self.common_registration_email_verification_neccessary_verified_login({'username': 'admin1', 'password': 'password12'})
@override_settings(ACCOUNT_EMAIL_VERIFICATION="mandatory", ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.EMAIL)
def test_registration_email_verification_neccessary_verified_login_email(self):
""" Tests you can log in without email verification """
self.common_registration_email_verification_neccessary_verified_login({'email': 'admin1@email.com', 'password': 'password12'})
@override_settings(ACCOUNT_EMAIL_VERIFICATION="mandatory", ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME_EMAIL)
def test_registration_email_verification_neccessary_verified_login_username_email(self):
""" Tests you can log in without email verification """
self.common_registration_email_verification_neccessary_verified_login({'username': 'admin1', 'password': 'password12'})
"""
Password Reset Tests
====================
"""
def test_password_reset(self):
""" Test basic functionality of password reset. """
get_user_model().objects.create_user('admin', 'admin@email.com', 'password12')
payload = {'email': 'admin@email.com'}
response = self.client.post(self.password_reset_url, payload, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.content, '{"success":"Password reset e-mail has been sent."}')
@override_settings(ACCOUNTS_PASSWORD_RESET_NOTIFY_EMAIL_NOT_IN_SYSTEM=True)
def test_password_reset_fail_no_user_with_email_no_notify_not_in_system(self):
""" Test basic functionality of password reset fails when there is no email on record (notify email not in system). """
payload = {'email': 'admin@email.com'}
response = self.client.post(self.password_reset_url, payload, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEquals(response.content, '{"error":"User with email doesn\'t exist. Did not send reset email."}')
@override_settings(ACCOUNTS_PASSWORD_RESET_NOTIFY_EMAIL_NOT_IN_SYSTEM=False)
def test_password_reset_no_user_with_email_no_notify_not_in_system(self):
""" Test basic functionality of password reset fails when there is no email on record. """
payload = {'email': 'admin@email.com'}
response = self.client.post(self.password_reset_url, payload, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.content, '{"success":"Password reset e-mail has been sent."}')
def test_password_reset_confirm_fail_invalid_token(self):
""" Test password reset confirm fails if token is invalid. """
user = get_user_model().objects.create_user('admin', 'admin@email.com', 'password12')
url_kwargs = self._generate_uid_and_token(user)
data = {
'new_password1': 'new_password',
'new_password2': 'new_password',
'uid': url_kwargs['uid'],
'token': '-wrong-token-'
}
response = self.client.post(self.rest_password_reset_confirm_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEquals(response.content, '{"token":["Invalid value"]}')
def test_password_reset_confirm_fail_invalid_uid(self):
""" Test password reset confirm fails if uid is invalid. """
user = get_user_model().objects.create_user('admin', 'admin@email.com', 'password12')
url_kwargs = self._generate_uid_and_token(user)
data = {
'new_password1': 'new_password',
'new_password2': 'new_password',
'uid': 0,
'token': url_kwargs['token']
}
response = self.client.post(self.rest_password_reset_confirm_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEquals(response.content, '{"uid":["Invalid value"]}')
def test_password_reset_confirm_fail_passwords_not_the_same(self):
""" Test password reset confirm fails if uid is invalid. """
user = get_user_model().objects.create_user('admin', 'admin@email.com', 'password12')
url_kwargs = self._generate_uid_and_token(user)
data = {
'new_password1': 'new_password',
'new_password2': 'new_not_the_same_password',
'uid': url_kwargs['uid'],
'token': url_kwargs['token']
}
response = self.client.post(self.rest_password_reset_confirm_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEquals(response.content, '{"new_password2":["The two password fields didn\'t match."]}')
def test_password_reset_confirm_login(self):
""" Tests password reset confirm works -> can login afterwards. """
user = get_user_model().objects.create_user('admin', 'admin@email.com', 'password12')
url_kwargs = self._generate_uid_and_token(user)
data = {
'new_password1': 'new_password',
'new_password2': 'new_password',
'uid': url_kwargs['uid'],
'token': url_kwargs['token']
}
response = self.client.post(self.rest_password_reset_confirm_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
response = self.client.post(self.login_url, {'username': 'admin', 'email': 'admin@email.com', 'password': 'new_password'}, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
def test_password_reset_confirm_login_fails_with_old_password(self):
""" Tests password reset confirm fails with old password. """
user = get_user_model().objects.create_user('admin', 'admin@email.com', 'password12')
url_kwargs = self._generate_uid_and_token(user)
data = {
'new_password1': 'new_password',
'new_password2': 'new_password',
'uid': url_kwargs['uid'],
'token': url_kwargs['token']
}
response = self.client.post(self.rest_password_reset_confirm_url, data, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
response = self.client.post(self.login_url, {'username': 'admin', 'email': 'admin@email.com', 'password': 'password12'}, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
"""
User Detail Tests
=================
"""
def test_user_details_get(self):
""" Test to retrieve user details. """
self.create_user_and_login()
response = self.client.get(self.user_url, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.content, '{"username":"admin","email":"admin@email.com","first_name":"","last_name":""}')
def test_user_details_put(self):
""" Test to put update user details. """
self.create_user_and_login()
response = self.client.put(self.user_url, {"username":"changed","email":"changed@email.com","first_name":"changed","last_name":"name"}, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.content, '{"username":"changed","email":"changed@email.com","first_name":"changed","last_name":"name"}')
def test_user_details_patch(self):
""" Test to patch update user details. """
self.create_user_and_login()
response = self.client.patch(self.user_url, {'username': 'changed_username', 'email': 'changed@email.com'}, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertEquals(response.content, '{"username":"changed_username","email":"changed@email.com","first_name":"","last_name":""}')
def test_user_details_put_not_authenticated(self):
""" Test to put update user details. """
get_user_model().objects.create_user('admin', 'admin@email.com', 'password12')
response = self.client.put(self.user_url, {"username":"changed","email":"changed@email.com","first_name":"changed","last_name":"name"}, format='json')
self.assertEquals(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_user_details_patch_not_authenticated(self):
""" Test to patch update user details. """
get_user_model().objects.create_user('admin', 'admin@email.com', 'password12')
response = self.client.patch(self.user_url, {'username': 'changed_username', 'email': 'changed@email.com'}, format='json')
self.assertEquals(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_user_details_get_not_authenticated(self):
""" Test to retrieve user details. """
get_user_model().objects.create_user('admin', 'admin@email.com', 'password12')
response = self.client.get(self.user_url, format='json')
self.assertEquals(response.status_code, status.HTTP_401_UNAUTHORIZED)
class TestAccountsSocial(APITestCase):
""" Tests normal for social login. """
urls = 'accounts.test_social_urls'
def setUp(self):
self.fb_login_url = reverse('fb_login')
social_app = SocialApp.objects.create(
provider='facebook',
name='Facebook',
client_id='123123123',
secret='321321321',
)
site = Site.objects.get_current()
social_app.sites.add(site)
self.graph_api_url = GRAPH_API_URL + '/me'
@responses.activate
def test_social_auth(self):
""" Tests Social Login. """
resp_body = '{"id":"123123123123","first_name":"John","gender":"male","last_name":"Smith","link":"https:\\/\\/www.facebook.com\\/john.smith","locale":"en_US","name":"John Smith","timezone":2,"updated_time":"2014-08-13T10:14:38+0000","username":"john.smith","verified":true}' # noqa
responses.add(
responses.GET,
self.graph_api_url,
body=resp_body,
status=200,
content_type='application/json'
)
users_count = get_user_model().objects.all().count()
response = self.client.post(self.fb_login_url, {'access_token': 'abc123'}, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertIn('key', response.data)
self.assertEqual(get_user_model().objects.all().count(), users_count + 1)
@responses.activate
def test_social_auth_only_one_user_created(self):
""" Tests Social Login. """
resp_body = '{"id":"123123123123","first_name":"John","gender":"male","last_name":"Smith","link":"https:\\/\\/www.facebook.com\\/john.smith","locale":"en_US","name":"John Smith","timezone":2,"updated_time":"2014-08-13T10:14:38+0000","username":"john.smith","verified":true}' # noqa
responses.add(
responses.GET,
self.graph_api_url,
body=resp_body,
status=200,
content_type='application/json'
)
users_count = get_user_model().objects.all().count()
response = self.client.post(self.fb_login_url, {'access_token': 'abc123'}, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertIn('key', response.data)
self.assertEqual(get_user_model().objects.all().count(), users_count + 1)
# make sure that second request will not create a new user
response = self.client.post(self.fb_login_url, {'access_token': 'abc123'}, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
self.assertIn('key', response.data)
self.assertEqual(get_user_model().objects.all().count(), users_count + 1)
@responses.activate
def test_failed_social_auth(self):
# fake response
responses.add(
responses.GET,
self.graph_api_url,
body='',
status=400,
content_type='application/json'
)
response = self.client.post(self.fb_login_url, {'access_token': 'abc123'}, format='json')
self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
|
JTarball/docker-django-polymer
|
docker/app/app/backend/apps/accounts/test_views.py
|
Python
|
gpl-2.0
| 42,275 | 0.005417 |
import web
import base
import local
def orNone(a, b):
if a is None:
return b
return a
class Field(object):
def __init__(self, name, description, primary=False, validator=None):
self.name = name
self.primary = primary
self.description = description
if validator == None:
validator = lambda *args: (True, '')
self.validator = validator
self.record = None # yet
def full_name(self):
return self.record.record_name + '_' + self.name
def validate(self, value):
return self.validator(value)
def html_input(self, default_values):
return "<input type='text' id='%s' name='%s' value='%s'>" % (
self.full_name(),
self.full_name(),
default_values.get(self.full_name(), '')
)
class PasswordField(Field):
def __init__(self, *args, **kwargs):
Field.__init__(self, *args, **kwargs)
def validate(self, value):
#if len(value) < 8:
# return False, 'pass_too_short'
return True, ''
def html_input(self, default_values):
return "<input type='password' id='%s' name='%s'>" % (
self.full_name(),
self.full_name(),
)
class Record(object):
def __init__(self, record_name,
table=None, fields=[],
role=None,
add_title=None,
admin_title=None,
add_action=None,
remove_action=None):
self.record_name = record_name
self.table = table
self.fields = fields
self.add_action = add_action
self.remove_action = remove_action
self.primary_key = None
self._check_primary_key()
for field in self.fields:
field.record = self
self.role = role
self.add_title = orNone(add_title, 'Dar de alta %s' % (self.record_name,))
self.admin_title = orNone(admin_title, 'Administrar %s' % (self.record_name,))
def all_elements(self):
if self.table is None:
return []
return local.db.select(self.table)
def _check_primary_key(self):
nprimary = 0
for field in self.fields:
if field.primary:
nprimary += 1
self.primary_key = field
if nprimary != 1:
raise Exception('Warning: %s should have exactly one primary key' % (
self.record_name)
)
def class_admin(self, parent_class=base.Content):
metaself = self
class C(parent_class):
role_required = self.role
def request(self):
"Main administration page."
return local.render.admin_list(
record=metaself,
)
return C
def class_add_service(self, parent_class=base.Content):
metaself = self
class C(parent_class):
role_required = self.role
def request(self):
"Render the form for creating instances of this record."
cookies = web.cookies()
input = web.input()
# get the default value for each field
# from the cookies
default_values = {}
for field in metaself.fields:
default = cookies.get('last_' + field.full_name(), None)
if default is None:
default = ''
default_values[field.full_name()] = default
if input.get('errfield', False):
focus_on = input.errfield
else:
focus_on = metaself.fields[0].full_name()
return local.render.add_form(
input=web.input(),
action='/%s/add' % (metaself.record_name,),
description=metaself.add_title,
fields=metaself.fields,
default_values=default_values,
focus=focus_on
)
return C
def class_add(self, parent_class=base.Action):
metaself = self
class C(parent_class):
role_required = self.role
def request(self):
"Add an instance of this record."
data = web.input()
# Check that the values for each field are valid
bad_fields = []
errmsg = False
any_error = False
dictionary = {}
for field in metaself.fields:
value = data.get(field.full_name())
dictionary[field.name] = value
web.setcookie('last_' + field.full_name(), value)
ok, message = field.validate(value)
if not ok:
any_error = True
bad_fields.append('error_' + field.full_name())
if not errmsg:
errmsg = message
if any_error:
raise web.seeother('/%s/add_service?errmsg=%s%s' % (
metaself.record_name,
errmsg,
''.join(['&%s=1' % (f,) for f in bad_fields])
))
if metaself.table is not None:
# Check that there are no repeated keys
primary_value = dictionary[metaself.primary_key.name]
it = local.db.query('select count(*) as total from ' + metaself.table + \
' where ' + metaself.primary_key.name + '=$primary_value',
vars=locals())
if it[0].total > 0:
raise web.seeother('/%s/add_service?errmsg=already_exists&error_%s=1' % (
metaself.record_name,
metaself.primary_key.full_name()
))
if metaself.table is not None and metaself.add_action is None:
local.db.insert(metaself.table, **dictionary)
if metaself.add_action is not None:
metaself.add_action(dictionary)
else:
raise web.seeother('/%s/admin' % (metaself.record_name,))
return C
def class_remove(self, parent_class=base.Action):
metaself = self
class C(parent_class):
role_required = self.role
def request(self):
dictionary = {}
if metaself.remove_action is not None:
metaself.remove_action(dictionary)
return C
|
mateoqac/unqTip
|
language/vgbs/web/forms.py
|
Python
|
gpl-3.0
| 6,865 | 0.004661 |
# -*- coding: utf-8 -*-
# Licensed under the MIT license
# http://opensource.org/licenses/mit-license.php
# Copyright 2014, Hartmut Goebel <h.goebel@goebel-consult.de>
"""
Test cases for L{backends.ampache_storage}
"""
from lxml import etree
from twisted.trial import unittest
from coherence.backends import ampache_storage
SONG = '''
<!-- taken from https://github.com/ampache/ampache/wiki/XML-API
but the original was not valid XML, so we can not trust it
-->
<root>
<song id="3180">
<title>Hells Bells</title>
<artist id="129348">AC/DC</artist>
<album id="2910">Back in Black</album>
<tag id="2481" count="3">Rock & Roll</tag>
<tag id="2482" count="1">Rock</tag>
<tag id="2483" count="1">Roll</tag>
<track>4</track>
<time>234</time>
<url>http://localhost/play/index.php?oid=123908...</url>
<size>654321</size>
<art>http://localhost/image.php?id=129348</art>
<preciserating>3</preciserating>
<rating>2.9</rating>
</song>
</root>
'''
SONG_370 = '''
<!-- real-world example from Ampache 3.7.0 -->
<root>
<song id="3440">
<title><![CDATA[Achilles Last Stand]]></title>
<artist id="141"><![CDATA[Led Zeppelin]]></artist>
<album id="359"><![CDATA[Presence]]></album>
<tag id="" count="0"><![CDATA[]]></tag>
<filename><![CDATA[/mnt/Musique/Led Zeppelin/Presence/01 - Achilles Last Stand.mp3]]></filename>
<track>1</track>
<time>625</time>
<year>1976</year>
<bitrate>248916</bitrate>
<mode>vbr</mode>
<mime>audio/mpeg</mime>
<url><![CDATA[http://songserver/ampache/play/index.php?ssid=1e11a4&type=song&oid=3440&uid=4&name=Led%20Zeppelin%20-%20Achilles%20Last%20Stand.mp3]]></url>
<size>19485595</size>
<mbid></mbid>
<album_mbid></album_mbid>
<artist_mbid></artist_mbid>
<art><![CDATA[http://songserver/ampache/image.php?id=359&object_type=album&auth=1e11a40&name=art.]]></art>
<preciserating>0</preciserating>
<rating>0</rating>
<averagerating></averagerating>
</song>
</root>
'''
class DummyStore:
def __init__(self):
pass
proxy = False
class TestAmpache(unittest.TestCase):
def setUp(self):
pass
def test_song(self):
"""Test songs with XML from Ampache 3.7.0"""
doc = etree.fromstring(SONG)
song = doc.find('song')
store = DummyStore()
track = ampache_storage.Track(store, song)
self.assertEqual(track.get_id(), 'song.3180')
self.assertEqual(track.parent_id, 'album.2910')
self.assertEqual(track.duration, '0:03:54')
self.assertEqual(track.get_url(),
'http://localhost/play/index.php?oid=123908...')
self.assertEqual(track.get_name(), 'Hells Bells')
self.assertEqual(track.title, 'Hells Bells')
self.assertEqual(track.artist, 'AC/DC')
self.assertEqual(track.album, 'Back in Black')
self.assertEqual(track.genre, None)
self.assertEqual(track.track_nr, '4')
self.assertEqual(track.cover, 'http://localhost/image.php?id=129348')
self.assertEqual(track.mimetype, 'audio/mpeg') # guessed
self.assertEqual(track.size, 654321)
self.assertIs(track.get_path(), None)
self.assertEqual(track.get_children(), [])
self.assertEqual(track.get_child_count(), 0)
def test_song_370(self):
"""Test songs with XML from Ampache 3.7.0"""
doc = etree.fromstring(SONG_370)
song = doc.find('song')
store = DummyStore()
track = ampache_storage.Track(store, song)
self.assertEqual(track.get_id(), 'song.3440')
self.assertEqual(track.parent_id, 'album.359')
self.assertEqual(track.duration, '0:10:25')
self.assertEqual(track.get_url(), 'http://songserver/ampache/play/index.php?ssid=1e11a4&type=song&oid=3440&uid=4&name=Led%20Zeppelin%20-%20Achilles%20Last%20Stand.mp3')
self.assertEqual(track.get_name(), 'Achilles Last Stand')
self.assertEqual(track.title, 'Achilles Last Stand')
self.assertEqual(track.artist, 'Led Zeppelin')
self.assertEqual(track.album, 'Presence')
self.assertEqual(track.genre, None)
self.assertEqual(track.track_nr, '1')
self.assertEqual(track.cover, 'http://songserver/ampache/image.php?id=359&object_type=album&auth=1e11a40&name=art.')
self.assertEqual(track.mimetype, 'audio/mpeg')
self.assertEqual(track.size, 19485595)
self.assertIs(track.get_path(), None)
self.assertEqual(track.get_children(), [])
self.assertEqual(track.get_child_count(), 0)
|
unintended/Cohen
|
tests/backends/test_ampache_storage.py
|
Python
|
mit
| 4,553 | 0.001098 |
from pygame import K_UP, K_DOWN, K_LEFT, K_RIGHT
from Caracter import Caracter
class CommandHandler(object):
#0 1 2 3 4 5 6 7 8 9 10 11 12 13
_automata_transitions= [[11,11,0, 4, 0, 0, 11,11,0, 11,0, 11,13,0],#up
[9, 2, 0, 0, 0, 0, 9, 9, 0, 0, 0, 12,0, 0],#down
[0, 6, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],#left
[1, 0, 0, 0, 5, 0, 7, 0, 0, 0, 0, 1, 0, 0]]#right
# The final states
final_list = [3,7,9,11,13]
final_state = 0
def __init__(self, caracter):
self.caracter = caracter
self.actual_state = 0
def refresh_state(self, in_key):
self.final_state = 0
input_code = -1
if in_key == K_UP: input_code = 0
elif in_key == K_DOWN: input_code = 1
elif in_key == K_LEFT: input_code = 2
elif in_key == K_RIGHT: input_code = 3
self.actual_state = self._automata_transitions[input_code][self.actual_state]
if self.actual_state == 3:
if self.caracter.onGround == False:
self.caracter.pendingRoll = True
elif self.actual_state == 7: self.caracter.doSprint()
elif self.actual_state == 9:
if self.caracter.onGround == False:
self.caracter.pendingGetDown = True
else:
self.caracter.doGetDown()
elif self.actual_state == 11: self.caracter.doJump()
elif self.actual_state == 13: self.caracter.doClimb()
#print "estado atual:" + str(self.actual_state)
if self.final_state in self.final_list :
self.actual_state = 0
return self.final_state
return self.actual_state
|
r0qs/chubby
|
Fonte/Command.py
|
Python
|
gpl-3.0
| 1,775 | 0.023662 |
"""
CRISPR_db_parser
Madeleine Bonsma
March 7, 2015
Updated May 3, 2016
This script takes a list of spacers downloaded from the CRISPRdb
website and splits them into individual files, one file per organism.
Result files are saved in "data/spacers".
"""
import linecache
import os
# CRISPR db parser
# MB Mar 07 2015
filename = "data/spacerdatabase.txt" # File from CRISPRdb to sort
spacer_db = open(filename, "r")
# check if directory for saving exists
directory = "data/spacers"
if not os.path.exists(directory):
os.makedirs(directory)
# places to dump accession numbers during execution
refseq_list = []
refseq_dict = {}
for num, line in enumerate(spacer_db, 1):
check = True # awkward while loop
if line[0] == ">": # use the headers, indicated by >, to sort
# delete 1st character to make loop same each time around
line = line[1:]
counter = 0
while check:
counter += 1
# this part of the header is the NCBI accession
refseq = line[0:9]
if refseq not in refseq_list:
# open new file if it's a new bacteria
refseq_dict[refseq] = open(
"data/spacers/%s.fasta" % refseq,
"w"
)
if "|" in line:
# if more than one bacteria contain spacer
i = line.index("|")
# include in header the locus identifier and spacer
# position identifier
writeline = line[10:i]
writeline2 = writeline.replace('_', '.')
else:
# if it's only one bacteria
writeline = line[10:]
writeline2 = writeline.replace('_', '.')
# write header and spacer to file
refseq_dict[refseq].write(">" + writeline2 + "\n")
refseq_dict[refseq].write(
linecache.getline("%s" % filename, num + 1)
)
# since the file is organized alphabetically by the
# first bacteria in the header, if we see a different
# first bacteria we can close the previous file to free
# up space. This might be buggy.
if counter == 1:
try:
refseq_prev = linecache.getline(
"%s" % filename,
num - 2
)[1:10]
refseq_dict[refseq_prev].close()
except:
# throws exception on the first time through,
# otherwise wouldn't
pass
refseq_list.append(refseq)
if refseq in refseq_list:
if "|" in line:
i = line.index("|")
# include in header the locus identifier and spacer
# position identifier
writeline = line[10:i]
writeline2 = writeline.replace('_', '.')
else:
writeline = line[10:]
writeline2 = writeline.replace('_', '.')
refseq_dict[refseq].write(">" + writeline2 + "\n")
refseq_dict[refseq].write(
linecache.getline("%s" % filename, num + 1)
)
try:
i = line.index("|")
# change the header so that the next bacteria is up for
# the loop
line = line[i + 1:]
except:
check = False
for key in refseq_dict:
if not refseq_dict[key].closed:
refseq_dict[key].close()
spacer_db.close()
|
goyalsid/phageParser
|
parserscripts/crispr_db_parser.py
|
Python
|
mit
| 3,791 | 0.000528 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import Table
from sqlalchemy import text
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
consumers = Table("consumers", meta, autoload=True)
if not hasattr(consumers.c, "generation"):
# This is adding a column to an existing table, so the server_default
# bit will make existing rows 0 for that column.
consumers.create_column(Column("generation", Integer, default=0,
server_default=text("0"), nullable=False))
|
rahulunair/nova
|
nova/db/sqlalchemy/api_migrations/migrate_repo/versions/059_add_consumer_generation.py
|
Python
|
apache-2.0
| 1,178 | 0.000849 |
from Screen import Screen
from Screens.DefaultWizard import DefaultWizard
from ServiceScan import ServiceScan
from Components.config import config, ConfigSubsection, ConfigSelection, \
ConfigYesNo, ConfigInteger, getConfigListEntry, ConfigSlider, ConfigEnableDisable
from Components.ActionMap import NumberActionMap, ActionMap
from Components.ConfigList import ConfigListScreen
from Components.NimManager import nimmanager, getConfigSatlist
from Components.Label import Label
from Tools.Directories import resolveFilename, SCOPE_DEFAULTPARTITIONMOUNTDIR, SCOPE_DEFAULTDIR, SCOPE_DEFAULTPARTITION
from Tools.HardwareInfo import HardwareInfo
from Screens.MessageBox import MessageBox
from enigma import eTimer, eDVBFrontendParametersSatellite, eComponentScan, \
eDVBSatelliteEquipmentControl, eDVBFrontendParametersTerrestrial, \
eDVBFrontendParametersCable, eConsoleAppContainer, eDVBResourceManager
def buildTerTransponder(frequency,
inversion=2, bandwidth = 3, fechigh = 6, feclow = 6,
modulation = 2, transmission = 2, guard = 4,
hierarchy = 4):
# print "freq", frequency, "inv", inversion, "bw", bandwidth, "fech", fechigh, "fecl", feclow, "mod", modulation, "tm", transmission, "guard", guard, "hierarchy", hierarchy
parm = eDVBFrontendParametersTerrestrial()
parm.frequency = frequency
parm.inversion = inversion
parm.bandwidth = bandwidth
parm.code_rate_HP = fechigh
parm.code_rate_LP = feclow
parm.modulation = modulation
parm.transmission_mode = transmission
parm.guard_interval = guard
parm.hierarchy = hierarchy
return parm
def getInitialTransponderList(tlist, pos):
list = nimmanager.getTransponders(pos)
for x in list:
if x[0] == 0: #SAT
parm = eDVBFrontendParametersSatellite()
parm.frequency = x[1]
parm.symbol_rate = x[2]
parm.polarisation = x[3]
parm.fec = x[4]
parm.inversion = x[7]
parm.orbital_position = pos
parm.system = x[5]
parm.modulation = x[6]
parm.rolloff = x[8]
parm.pilot = x[9]
tlist.append(parm)
def getInitialCableTransponderList(tlist, nim):
list = nimmanager.getTranspondersCable(nim)
for x in list:
if x[0] == 1: #CABLE
parm = eDVBFrontendParametersCable()
parm.frequency = x[1]
parm.symbol_rate = x[2]
parm.modulation = x[3]
parm.fec_inner = x[4]
parm.inversion = parm.Inversion_Unknown
#print "frequency:", x[1]
#print "symbol_rate:", x[2]
#print "modulation:", x[3]
#print "fec_inner:", x[4]
#print "inversion:", 2
tlist.append(parm)
def getInitialTerrestrialTransponderList(tlist, region):
list = nimmanager.getTranspondersTerrestrial(region)
#self.transponders[self.parsedTer].append((2,freq,bw,const,crh,crl,guard,transm,hierarchy,inv))
#def buildTerTransponder(frequency, inversion = 2, bandwidth = 3, fechigh = 6, feclow = 6,
#modulation = 2, transmission = 2, guard = 4, hierarchy = 4):
for x in list:
if x[0] == 2: #TERRESTRIAL
parm = buildTerTransponder(x[1], x[9], x[2], x[4], x[5], x[3], x[7], x[6], x[8])
tlist.append(parm)
cable_bands = {
"DVBC_BAND_EU_VHF_I" : 1 << 0,
"DVBC_BAND_EU_MID" : 1 << 1,
"DVBC_BAND_EU_VHF_III" : 1 << 2,
"DVBC_BAND_EU_SUPER" : 1 << 3,
"DVBC_BAND_EU_HYPER" : 1 << 4,
"DVBC_BAND_EU_UHF_IV" : 1 << 5,
"DVBC_BAND_EU_UHF_V" : 1 << 6,
"DVBC_BAND_US_LO" : 1 << 7,
"DVBC_BAND_US_MID" : 1 << 8,
"DVBC_BAND_US_HI" : 1 << 9,
"DVBC_BAND_US_SUPER" : 1 << 10,
"DVBC_BAND_US_HYPER" : 1 << 11,
}
class CableTransponderSearchSupport:
# def setCableTransponderSearchResult(self, tlist):
# pass
# def cableTransponderSearchFinished(self):
# pass
def tryGetRawFrontend(self, feid):
res_mgr = eDVBResourceManager.getInstance()
if res_mgr:
raw_channel = res_mgr.allocateRawChannel(self.feid)
if raw_channel:
frontend = raw_channel.getFrontend()
if frontend:
frontend.closeFrontend() # immediate close...
del frontend
del raw_channel
return True
return False
def cableTransponderSearchSessionClosed(self, *val):
print "cableTransponderSearchSessionClosed, val", val
self.cable_search_container.appClosed.remove(self.cableTransponderSearchClosed)
self.cable_search_container.dataAvail.remove(self.getCableTransponderData)
if val and len(val):
if val[0]:
self.setCableTransponderSearchResult(self.__tlist)
else:
self.cable_search_container.sendCtrlC()
self.setCableTransponderSearchResult(None)
self.cable_search_container = None
self.cable_search_session = None
self.__tlist = None
self.cableTransponderSearchFinished()
def cableTransponderSearchClosed(self, retval):
print "cableTransponderSearch finished", retval
self.cable_search_session.close(True)
def getCableTransponderData(self, str):
#prepend any remaining data from the previous call
str = self.remainingdata + str
#split in lines
lines = str.split('\n')
#'str' should end with '\n', so when splitting, the last line should be empty. If this is not the case, we received an incomplete line
if len(lines[-1]):
#remember this data for next time
self.remainingdata = lines[-1]
lines = lines[0:-1]
else:
self.remainingdata = ""
for line in lines:
data = line.split()
if len(data):
if data[0] == 'OK':
print str
parm = eDVBFrontendParametersCable()
qam = { "QAM16" : parm.Modulation_QAM16,
"QAM32" : parm.Modulation_QAM32,
"QAM64" : parm.Modulation_QAM64,
"QAM128" : parm.Modulation_QAM128,
"QAM256" : parm.Modulation_QAM256 }
inv = { "INVERSION_OFF" : parm.Inversion_Off,
"INVERSION_ON" : parm.Inversion_On,
"INVERSION_AUTO" : parm.Inversion_Unknown }
fec = { "FEC_AUTO" : parm.FEC_Auto,
"FEC_1_2" : parm.FEC_1_2,
"FEC_2_3" : parm.FEC_2_3,
"FEC_3_4" : parm.FEC_3_4,
"FEC_5_6": parm.FEC_5_6,
"FEC_7_8" : parm.FEC_7_8,
"FEC_8_9" : parm.FEC_8_9,
"FEC_NONE" : parm.FEC_None }
parm.frequency = int(data[1])
parm.symbol_rate = int(data[2])
parm.fec_inner = fec[data[3]]
parm.modulation = qam[data[4]]
parm.inversion = inv[data[5]]
self.__tlist.append(parm)
tmpstr = _("Try to find used Transponders in cable network.. please wait...")
tmpstr += "\n\n"
tmpstr += data[1]
tmpstr += " kHz "
tmpstr += data[0]
self.cable_search_session["text"].setText(tmpstr)
def startCableTransponderSearch(self, nim_idx):
if not self.tryGetRawFrontend(nim_idx):
self.session.nav.stopService()
if not self.tryGetRawFrontend(nim_idx):
if self.session.pipshown: # try to disable pip
self.session.pipshown = False
del self.session.pip
if not self.tryGetRawFrontend(nim_idx):
self.cableTransponderSearchFinished()
return
self.__tlist = [ ]
self.remainingdata = ""
self.cable_search_container = eConsoleAppContainer()
self.cable_search_container.appClosed.append(self.cableTransponderSearchClosed)
self.cable_search_container.dataAvail.append(self.getCableTransponderData)
cableConfig = config.Nims[nim_idx].cable
tunername = nimmanager.getNimName(nim_idx)
try:
bus = nimmanager.getI2CDevice(nim_idx)
if bus is None:
print "ERROR: could not get I2C device for nim", nim_idx, "for cable transponder search"
bus = 2
except:
# older API
if nim_idx < 2:
if HardwareInfo().get_device_name() == "dm500hd":
bus = 2
else:
bus = nim_idx
else:
if nim_idx == 2:
bus = 2 # DM8000 first nim is /dev/i2c/2
else:
bus = 4 # DM8000 second num is /dev/i2c/4
if tunername == "CXD1981":
cmd = "cxd1978 --init --scan --verbose --wakeup --inv 2 --bus %d" % bus
else:
cmd = "tda1002x --init --scan --verbose --wakeup --inv 2 --bus %d" % bus
if cableConfig.scan_type.value == "bands":
cmd += " --scan-bands "
bands = 0
if cableConfig.scan_band_EU_VHF_I.value:
bands |= cable_bands["DVBC_BAND_EU_VHF_I"]
if cableConfig.scan_band_EU_MID.value:
bands |= cable_bands["DVBC_BAND_EU_MID"]
if cableConfig.scan_band_EU_VHF_III.value:
bands |= cable_bands["DVBC_BAND_EU_VHF_III"]
if cableConfig.scan_band_EU_UHF_IV.value:
bands |= cable_bands["DVBC_BAND_EU_UHF_IV"]
if cableConfig.scan_band_EU_UHF_V.value:
bands |= cable_bands["DVBC_BAND_EU_UHF_V"]
if cableConfig.scan_band_EU_SUPER.value:
bands |= cable_bands["DVBC_BAND_EU_SUPER"]
if cableConfig.scan_band_EU_HYPER.value:
bands |= cable_bands["DVBC_BAND_EU_HYPER"]
if cableConfig.scan_band_US_LOW.value:
bands |= cable_bands["DVBC_BAND_US_LO"]
if cableConfig.scan_band_US_MID.value:
bands |= cable_bands["DVBC_BAND_US_MID"]
if cableConfig.scan_band_US_HIGH.value:
bands |= cable_bands["DVBC_BAND_US_HI"]
if cableConfig.scan_band_US_SUPER.value:
bands |= cable_bands["DVBC_BAND_US_SUPER"]
if cableConfig.scan_band_US_HYPER.value:
bands |= cable_bands["DVBC_BAND_US_HYPER"]
cmd += str(bands)
else:
cmd += " --scan-stepsize "
cmd += str(cableConfig.scan_frequency_steps.value)
if cableConfig.scan_mod_qam16.value:
cmd += " --mod 16"
if cableConfig.scan_mod_qam32.value:
cmd += " --mod 32"
if cableConfig.scan_mod_qam64.value:
cmd += " --mod 64"
if cableConfig.scan_mod_qam128.value:
cmd += " --mod 128"
if cableConfig.scan_mod_qam256.value:
cmd += " --mod 256"
if cableConfig.scan_sr_6900.value:
cmd += " --sr 6900000"
if cableConfig.scan_sr_6875.value:
cmd += " --sr 6875000"
if cableConfig.scan_sr_ext1.value > 450:
cmd += " --sr "
cmd += str(cableConfig.scan_sr_ext1.value)
cmd += "000"
if cableConfig.scan_sr_ext2.value > 450:
cmd += " --sr "
cmd += str(cableConfig.scan_sr_ext2.value)
cmd += "000"
print "TDA1002x CMD is", cmd
self.cable_search_container.execute(cmd)
tmpstr = _("Try to find used transponders in cable network.. please wait...")
tmpstr += "\n\n..."
self.cable_search_session = self.session.openWithCallback(self.cableTransponderSearchSessionClosed, MessageBox, tmpstr, MessageBox.TYPE_INFO)
class DefaultSatLists(DefaultWizard):
def __init__(self, session, silent = True, showSteps = False):
self.xmlfile = "defaultsatlists.xml"
DefaultWizard.__init__(self, session, silent, showSteps, neededTag = "services")
print "configuredSats:", nimmanager.getConfiguredSats()
def setDirectory(self):
self.directory = []
self.directory.append(resolveFilename(SCOPE_DEFAULTDIR))
import os
os.system("mount %s %s" % (resolveFilename(SCOPE_DEFAULTPARTITION), resolveFilename(SCOPE_DEFAULTPARTITIONMOUNTDIR)))
self.directory.append(resolveFilename(SCOPE_DEFAULTPARTITIONMOUNTDIR))
def statusCallback(self, status, progress):
print "statusCallback:", status, progress
from Components.DreamInfoHandler import DreamInfoHandler
if status == DreamInfoHandler.STATUS_DONE:
self["text"].setText(_("The installation of the default services lists is finished.") + "\n\n" + _("Please press OK to continue."))
self.markDone()
self.disableKeys = False
class ScanSetup(ConfigListScreen, Screen, CableTransponderSearchSupport):
def __init__(self, session):
Screen.__init__(self, session)
self.finished_cb = None
self.updateSatList()
self.service = session.nav.getCurrentService()
self.feinfo = None
self.networkid = 0
frontendData = None
if self.service is not None:
self.feinfo = self.service.frontendInfo()
frontendData = self.feinfo and self.feinfo.getAll(True)
self.createConfig(frontendData)
del self.feinfo
del self.service
self["actions"] = NumberActionMap(["SetupActions"],
{
"ok": self.keyGo,
"cancel": self.keyCancel,
}, -2)
self.statusTimer = eTimer()
self.statusTimer.callback.append(self.updateStatus)
#self.statusTimer.start(5000, True)
self.list = []
ConfigListScreen.__init__(self, self.list)
if not self.scan_nims.value == "":
self.createSetup()
self["introduction"] = Label(_("Press OK to start the scan"))
else:
self["introduction"] = Label(_("Nothing to scan!\nPlease setup your tuner settings before you start a service scan."))
def runAsync(self, finished_cb):
self.finished_cb = finished_cb
self.keyGo()
def updateSatList(self):
self.satList = []
for slot in nimmanager.nim_slots:
if slot.isCompatible("DVB-S"):
self.satList.append(nimmanager.getSatListForNim(slot.slot))
else:
self.satList.append(None)
def createSetup(self):
self.list = []
self.multiscanlist = []
index_to_scan = int(self.scan_nims.value)
print "ID: ", index_to_scan
self.tunerEntry = getConfigListEntry(_("Tuner"), self.scan_nims)
self.list.append(self.tunerEntry)
if self.scan_nims == [ ]:
return
self.typeOfScanEntry = None
self.systemEntry = None
self.modulationEntry = None
nim = nimmanager.nim_slots[index_to_scan]
if nim.isCompatible("DVB-S"):
self.typeOfScanEntry = getConfigListEntry(_("Type of scan"), self.scan_type)
self.list.append(self.typeOfScanEntry)
elif nim.isCompatible("DVB-C"):
self.typeOfScanEntry = getConfigListEntry(_("Type of scan"), self.scan_typecable)
self.list.append(self.typeOfScanEntry)
elif nim.isCompatible("DVB-T"):
self.typeOfScanEntry = getConfigListEntry(_("Type of scan"), self.scan_typeterrestrial)
self.list.append(self.typeOfScanEntry)
self.scan_networkScan.value = False
if nim.isCompatible("DVB-S"):
if self.scan_type.value == "single_transponder":
self.updateSatList()
if nim.isCompatible("DVB-S2"):
self.systemEntry = getConfigListEntry(_('System'), self.scan_sat.system)
self.list.append(self.systemEntry)
else:
# downgrade to dvb-s, in case a -s2 config was active
self.scan_sat.system.value = eDVBFrontendParametersSatellite.System_DVB_S
self.list.append(getConfigListEntry(_('Satellite'), self.scan_satselection[index_to_scan]))
self.list.append(getConfigListEntry(_('Frequency'), self.scan_sat.frequency))
self.list.append(getConfigListEntry(_('Inversion'), self.scan_sat.inversion))
self.list.append(getConfigListEntry(_('Symbol Rate'), self.scan_sat.symbolrate))
self.list.append(getConfigListEntry(_("Polarity"), self.scan_sat.polarization))
if self.scan_sat.system.value == eDVBFrontendParametersSatellite.System_DVB_S:
self.list.append(getConfigListEntry(_("FEC"), self.scan_sat.fec))
elif self.scan_sat.system.value == eDVBFrontendParametersSatellite.System_DVB_S2:
self.list.append(getConfigListEntry(_("FEC"), self.scan_sat.fec_s2))
self.modulationEntry = getConfigListEntry(_('Modulation'), self.scan_sat.modulation)
self.list.append(self.modulationEntry)
self.list.append(getConfigListEntry(_('Rolloff'), self.scan_sat.rolloff))
self.list.append(getConfigListEntry(_('Pilot'), self.scan_sat.pilot))
elif self.scan_type.value == "single_satellite":
self.updateSatList()
print self.scan_satselection[index_to_scan]
self.list.append(getConfigListEntry(_("Satellite"), self.scan_satselection[index_to_scan]))
self.scan_networkScan.value = True
elif self.scan_type.value.find("multisat") != -1:
tlist = []
SatList = nimmanager.getSatListForNim(index_to_scan)
for x in SatList:
if self.Satexists(tlist, x[0]) == 0:
tlist.append(x[0])
sat = ConfigEnableDisable(default = self.scan_type.value.find("_yes") != -1 and True or False)
configEntry = getConfigListEntry(nimmanager.getSatDescription(x[0]), sat)
self.list.append(configEntry)
self.multiscanlist.append((x[0], sat))
self.scan_networkScan.value = True
elif nim.isCompatible("DVB-C"):
if self.scan_typecable.value == "single_transponder":
self.list.append(getConfigListEntry(_("Frequency"), self.scan_cab.frequency))
self.list.append(getConfigListEntry(_("Inversion"), self.scan_cab.inversion))
self.list.append(getConfigListEntry(_("Symbol Rate"), self.scan_cab.symbolrate))
self.list.append(getConfigListEntry(_("Modulation"), self.scan_cab.modulation))
self.list.append(getConfigListEntry(_("FEC"), self.scan_cab.fec))
if config.Nims[index_to_scan].cable.scan_networkid.value:
self.networkid = config.Nims[index_to_scan].cable.scan_networkid.value
self.scan_networkScan.value = True
elif nim.isCompatible("DVB-T"):
if self.scan_typeterrestrial.value == "single_transponder":
self.list.append(getConfigListEntry(_("Frequency"), self.scan_ter.frequency))
self.list.append(getConfigListEntry(_("Inversion"), self.scan_ter.inversion))
self.list.append(getConfigListEntry(_("Bandwidth"), self.scan_ter.bandwidth))
self.list.append(getConfigListEntry(_("Code rate high"), self.scan_ter.fechigh))
self.list.append(getConfigListEntry(_("Code rate low"), self.scan_ter.feclow))
self.list.append(getConfigListEntry(_("Modulation"), self.scan_ter.modulation))
self.list.append(getConfigListEntry(_("Transmission mode"), self.scan_ter.transmission))
self.list.append(getConfigListEntry(_("Guard interval mode"), self.scan_ter.guard))
self.list.append(getConfigListEntry(_("Hierarchy mode"), self.scan_ter.hierarchy))
self.list.append(getConfigListEntry(_("Network scan"), self.scan_networkScan))
self.list.append(getConfigListEntry(_("Clear before scan"), self.scan_clearallservices))
self.list.append(getConfigListEntry(_("Only Free scan"), self.scan_onlyfree))
self["config"].list = self.list
self["config"].l.setList(self.list)
def Satexists(self, tlist, pos):
for x in tlist:
if x == pos:
return 1
return 0
def newConfig(self):
cur = self["config"].getCurrent()
print "cur is", cur
if cur == self.typeOfScanEntry or \
cur == self.tunerEntry or \
cur == self.systemEntry or \
(self.modulationEntry and self.systemEntry[1].value == eDVBFrontendParametersSatellite.System_DVB_S2 and cur == self.modulationEntry):
self.createSetup()
def createConfig(self, frontendData):
#("Type", frontendData["system"], TYPE_TEXT),
#("Modulation", frontendData["modulation"], TYPE_TEXT),
#("Orbital position", frontendData["orbital_position"], TYPE_VALUE_DEC),
#("Frequency", frontendData["frequency"], TYPE_VALUE_DEC),
#("Symbolrate", frontendData["symbol_rate"], TYPE_VALUE_DEC),
#("Polarization", frontendData["polarization"], TYPE_TEXT),
#("Inversion", frontendData["inversion"], TYPE_TEXT),
#("FEC inner", frontendData["fec_inner"], TYPE_TEXT),
#)
#elif frontendData["tuner_type"] == "DVB-C":
#return ( ("NIM", ['A', 'B', 'C', 'D'][frontendData["tuner_number"]], TYPE_TEXT),
#("Type", frontendData["tuner_type"], TYPE_TEXT),
#("Frequency", frontendData["frequency"], TYPE_VALUE_DEC),
#("Symbolrate", frontendData["symbol_rate"], TYPE_VALUE_DEC),
#("Modulation", frontendData["modulation"], TYPE_TEXT),
#("Inversion", frontendData["inversion"], TYPE_TEXT),
# ("FEC inner", frontendData["fec_inner"], TYPE_TEXT),
#)
#elif frontendData["tuner_type"] == "DVB-T":
#return ( ("NIM", ['A', 'B', 'C', 'D'][frontendData["tuner_number"]], TYPE_TEXT),
#("Type", frontendData["tuner_type"], TYPE_TEXT),
#("Frequency", frontendData["frequency"], TYPE_VALUE_DEC),
#("Inversion", frontendData["inversion"], TYPE_TEXT),
#("Bandwidth", frontendData["bandwidth"], TYPE_VALUE_DEC),
#("CodeRateLP", frontendData["code_rate_lp"], TYPE_TEXT),
#("CodeRateHP", frontendData["code_rate_hp"], TYPE_TEXT),
#("Constellation", frontendData["constellation"], TYPE_TEXT),
#("Transmission Mode", frontendData["transmission_mode"], TYPE_TEXT),
#("Guard Interval", frontendData["guard_interval"], TYPE_TEXT),
#("Hierarchy Inform.", frontendData["hierarchy_information"], TYPE_TEXT),
defaultSat = {
"orbpos": 192,
"system": eDVBFrontendParametersSatellite.System_DVB_S,
"frequency": 11836,
"inversion": eDVBFrontendParametersSatellite.Inversion_Unknown,
"symbolrate": 27500,
"polarization": eDVBFrontendParametersSatellite.Polarisation_Horizontal,
"fec": eDVBFrontendParametersSatellite.FEC_Auto,
"fec_s2": eDVBFrontendParametersSatellite.FEC_9_10,
"modulation": eDVBFrontendParametersSatellite.Modulation_QPSK }
defaultCab = {
"frequency": 466,
"inversion": eDVBFrontendParametersCable.Inversion_Unknown,
"modulation": eDVBFrontendParametersCable.Modulation_QAM64,
"fec": eDVBFrontendParametersCable.FEC_Auto,
"symbolrate": 6900 }
defaultTer = {
"frequency" : 466000,
"inversion" : eDVBFrontendParametersTerrestrial.Inversion_Unknown,
"bandwidth" : eDVBFrontendParametersTerrestrial.Bandwidth_7MHz,
"fechigh" : eDVBFrontendParametersTerrestrial.FEC_Auto,
"feclow" : eDVBFrontendParametersTerrestrial.FEC_Auto,
"modulation" : eDVBFrontendParametersTerrestrial.Modulation_Auto,
"transmission_mode" : eDVBFrontendParametersTerrestrial.TransmissionMode_Auto,
"guard_interval" : eDVBFrontendParametersTerrestrial.GuardInterval_Auto,
"hierarchy": eDVBFrontendParametersTerrestrial.Hierarchy_Auto }
if frontendData is not None:
ttype = frontendData.get("tuner_type", "UNKNOWN")
if ttype == "DVB-S":
defaultSat["system"] = frontendData.get("system", eDVBFrontendParametersSatellite.System_DVB_S)
defaultSat["frequency"] = frontendData.get("frequency", 0) / 1000
defaultSat["inversion"] = frontendData.get("inversion", eDVBFrontendParametersSatellite.Inversion_Unknown)
defaultSat["symbolrate"] = frontendData.get("symbol_rate", 0) / 1000
defaultSat["polarization"] = frontendData.get("polarization", eDVBFrontendParametersSatellite.Polarisation_Horizontal)
if defaultSat["system"] == eDVBFrontendParametersSatellite.System_DVB_S2:
defaultSat["fec_s2"] = frontendData.get("fec_inner", eDVBFrontendParametersSatellite.FEC_Auto)
defaultSat["rolloff"] = frontendData.get("rolloff", eDVBFrontendParametersSatellite.RollOff_alpha_0_35)
defaultSat["pilot"] = frontendData.get("pilot", eDVBFrontendParametersSatellite.Pilot_Unknown)
else:
defaultSat["fec"] = frontendData.get("fec_inner", eDVBFrontendParametersSatellite.FEC_Auto)
defaultSat["modulation"] = frontendData.get("modulation", eDVBFrontendParametersSatellite.Modulation_QPSK)
defaultSat["orbpos"] = frontendData.get("orbital_position", 0)
elif ttype == "DVB-C":
defaultCab["frequency"] = frontendData.get("frequency", 0) / 1000
defaultCab["symbolrate"] = frontendData.get("symbol_rate", 0) / 1000
defaultCab["inversion"] = frontendData.get("inversion", eDVBFrontendParametersCable.Inversion_Unknown)
defaultCab["fec"] = frontendData.get("fec_inner", eDVBFrontendParametersCable.FEC_Auto)
defaultCab["modulation"] = frontendData.get("modulation", eDVBFrontendParametersCable.Modulation_QAM16)
elif ttype == "DVB-T":
defaultTer["frequency"] = frontendData.get("frequency", 0)
defaultTer["inversion"] = frontendData.get("inversion", eDVBFrontendParametersTerrestrial.Inversion_Unknown)
defaultTer["bandwidth"] = frontendData.get("bandwidth", eDVBFrontendParametersTerrestrial.Bandwidth_7MHz)
defaultTer["fechigh"] = frontendData.get("code_rate_hp", eDVBFrontendParametersTerrestrial.FEC_Auto)
defaultTer["feclow"] = frontendData.get("code_rate_lp", eDVBFrontendParametersTerrestrial.FEC_Auto)
defaultTer["modulation"] = frontendData.get("constellation", eDVBFrontendParametersTerrestrial.Modulation_Auto)
defaultTer["transmission_mode"] = frontendData.get("transmission_mode", eDVBFrontendParametersTerrestrial.TransmissionMode_Auto)
defaultTer["guard_interval"] = frontendData.get("guard_interval", eDVBFrontendParametersTerrestrial.GuardInterval_Auto)
defaultTer["hierarchy"] = frontendData.get("hierarchy_information", eDVBFrontendParametersTerrestrial.Hierarchy_Auto)
self.scan_sat = ConfigSubsection()
self.scan_cab = ConfigSubsection()
self.scan_ter = ConfigSubsection()
self.scan_type = ConfigSelection(default = "single_transponder", choices = [("single_transponder", _("Single transponder")), ("single_satellite", _("Single satellite")), ("multisat", _("Multisat")), ("multisat_yes", _("Multisat"))])
self.scan_typecable = ConfigSelection(default = "single_transponder", choices = [("single_transponder", _("Single transponder")), ("complete", _("Complete"))])
self.scan_typeterrestrial = ConfigSelection(default = "single_transponder", choices = [("single_transponder", _("Single transponder")), ("complete", _("Complete"))])
self.scan_clearallservices = ConfigSelection(default = "no", choices = [("no", _("no")), ("yes", _("yes")), ("yes_hold_feeds", _("yes (keep feeds)"))])
self.scan_onlyfree = ConfigYesNo(default = False)
self.scan_networkScan = ConfigYesNo(default = False)
nim_list = []
# collect all nims which are *not* set to "nothing"
for n in nimmanager.nim_slots:
if n.config_mode == "nothing":
continue
if n.config_mode == "advanced" and len(nimmanager.getSatListForNim(n.slot)) < 1:
continue
if n.config_mode in ("loopthrough", "satposdepends"):
root_id = nimmanager.sec.getRoot(n.slot_id, int(n.config.connectedTo.value))
if n.type == nimmanager.nim_slots[root_id].type: # check if connected from a DVB-S to DVB-S2 Nim or vice versa
continue
nim_list.append((str(n.slot), n.friendly_full_description))
self.scan_nims = ConfigSelection(choices = nim_list)
# status
self.scan_snr = ConfigSlider()
self.scan_snr.enabled = False
self.scan_agc = ConfigSlider()
self.scan_agc.enabled = False
self.scan_ber = ConfigSlider()
self.scan_ber.enabled = False
# sat
self.scan_sat.system = ConfigSelection(default = defaultSat["system"], choices = [
(eDVBFrontendParametersSatellite.System_DVB_S, _("DVB-S")),
(eDVBFrontendParametersSatellite.System_DVB_S2, _("DVB-S2"))])
self.scan_sat.frequency = ConfigInteger(default = defaultSat["frequency"], limits = (1, 99999))
self.scan_sat.inversion = ConfigSelection(default = defaultSat["inversion"], choices = [
(eDVBFrontendParametersSatellite.Inversion_Off, _("off")),
(eDVBFrontendParametersSatellite.Inversion_On, _("on")),
(eDVBFrontendParametersSatellite.Inversion_Unknown, _("Auto"))])
self.scan_sat.symbolrate = ConfigInteger(default = defaultSat["symbolrate"], limits = (1, 99999))
self.scan_sat.polarization = ConfigSelection(default = defaultSat["polarization"], choices = [
(eDVBFrontendParametersSatellite.Polarisation_Horizontal, _("horizontal")),
(eDVBFrontendParametersSatellite.Polarisation_Vertical, _("vertical")),
(eDVBFrontendParametersSatellite.Polarisation_CircularLeft, _("circular left")),
(eDVBFrontendParametersSatellite.Polarisation_CircularRight, _("circular right"))])
self.scan_sat.fec = ConfigSelection(default = defaultSat["fec"], choices = [
(eDVBFrontendParametersSatellite.FEC_Auto, _("Auto")),
(eDVBFrontendParametersSatellite.FEC_1_2, "1/2"),
(eDVBFrontendParametersSatellite.FEC_2_3, "2/3"),
(eDVBFrontendParametersSatellite.FEC_3_4, "3/4"),
(eDVBFrontendParametersSatellite.FEC_5_6, "5/6"),
(eDVBFrontendParametersSatellite.FEC_7_8, "7/8"),
(eDVBFrontendParametersSatellite.FEC_None, _("None"))])
self.scan_sat.fec_s2 = ConfigSelection(default = defaultSat["fec_s2"], choices = [
(eDVBFrontendParametersSatellite.FEC_1_2, "1/2"),
(eDVBFrontendParametersSatellite.FEC_2_3, "2/3"),
(eDVBFrontendParametersSatellite.FEC_3_4, "3/4"),
(eDVBFrontendParametersSatellite.FEC_3_5, "3/5"),
(eDVBFrontendParametersSatellite.FEC_4_5, "4/5"),
(eDVBFrontendParametersSatellite.FEC_5_6, "5/6"),
(eDVBFrontendParametersSatellite.FEC_7_8, "7/8"),
(eDVBFrontendParametersSatellite.FEC_8_9, "8/9"),
(eDVBFrontendParametersSatellite.FEC_9_10, "9/10")])
self.scan_sat.modulation = ConfigSelection(default = defaultSat["modulation"], choices = [
(eDVBFrontendParametersSatellite.Modulation_QPSK, "QPSK"),
(eDVBFrontendParametersSatellite.Modulation_8PSK, "8PSK")])
self.scan_sat.rolloff = ConfigSelection(default = defaultSat.get("rolloff", eDVBFrontendParametersSatellite.RollOff_alpha_0_35), choices = [
(eDVBFrontendParametersSatellite.RollOff_alpha_0_35, "0.35"),
(eDVBFrontendParametersSatellite.RollOff_alpha_0_25, "0.25"),
(eDVBFrontendParametersSatellite.RollOff_alpha_0_20, "0.20")])
self.scan_sat.pilot = ConfigSelection(default = defaultSat.get("pilot", eDVBFrontendParametersSatellite.Pilot_Unknown), choices = [
(eDVBFrontendParametersSatellite.Pilot_Off, _("off")),
(eDVBFrontendParametersSatellite.Pilot_On, _("on")),
(eDVBFrontendParametersSatellite.Pilot_Unknown, _("Auto"))])
# cable
self.scan_cab.frequency = ConfigInteger(default = defaultCab["frequency"], limits = (50, 999))
self.scan_cab.inversion = ConfigSelection(default = defaultCab["inversion"], choices = [
(eDVBFrontendParametersCable.Inversion_Off, _("off")),
(eDVBFrontendParametersCable.Inversion_On, _("on")),
(eDVBFrontendParametersCable.Inversion_Unknown, _("Auto"))])
self.scan_cab.modulation = ConfigSelection(default = defaultCab["modulation"], choices = [
(eDVBFrontendParametersCable.Modulation_QAM16, "16-QAM"),
(eDVBFrontendParametersCable.Modulation_QAM32, "32-QAM"),
(eDVBFrontendParametersCable.Modulation_QAM64, "64-QAM"),
(eDVBFrontendParametersCable.Modulation_QAM128, "128-QAM"),
(eDVBFrontendParametersCable.Modulation_QAM256, "256-QAM")])
self.scan_cab.fec = ConfigSelection(default = defaultCab["fec"], choices = [
(eDVBFrontendParametersCable.FEC_Auto, _("Auto")),
(eDVBFrontendParametersCable.FEC_1_2, "1/2"),
(eDVBFrontendParametersCable.FEC_2_3, "2/3"),
(eDVBFrontendParametersCable.FEC_3_4, "3/4"),
(eDVBFrontendParametersCable.FEC_5_6, "5/6"),
(eDVBFrontendParametersCable.FEC_7_8, "7/8"),
(eDVBFrontendParametersCable.FEC_8_9, "8/9"),
(eDVBFrontendParametersCable.FEC_None, _("None"))])
self.scan_cab.symbolrate = ConfigInteger(default = defaultCab["symbolrate"], limits = (1, 99999))
# terrestial
self.scan_ter.frequency = ConfigInteger(default = 466000, limits = (50000, 999000))
self.scan_ter.inversion = ConfigSelection(default = defaultTer["inversion"], choices = [
(eDVBFrontendParametersTerrestrial.Inversion_Off, _("off")),
(eDVBFrontendParametersTerrestrial.Inversion_On, _("on")),
(eDVBFrontendParametersTerrestrial.Inversion_Unknown, _("Auto"))])
# WORKAROUND: we can't use BW-auto
self.scan_ter.bandwidth = ConfigSelection(default = defaultTer["bandwidth"], choices = [
(eDVBFrontendParametersTerrestrial.Bandwidth_8MHz, "8MHz"),
(eDVBFrontendParametersTerrestrial.Bandwidth_7MHz, "7MHz"),
(eDVBFrontendParametersTerrestrial.Bandwidth_6MHz, "6MHz")])
#, (eDVBFrontendParametersTerrestrial.Bandwidth_Auto, _("Auto"))))
self.scan_ter.fechigh = ConfigSelection(default = defaultTer["fechigh"], choices = [
(eDVBFrontendParametersTerrestrial.FEC_1_2, "1/2"),
(eDVBFrontendParametersTerrestrial.FEC_2_3, "2/3"),
(eDVBFrontendParametersTerrestrial.FEC_3_4, "3/4"),
(eDVBFrontendParametersTerrestrial.FEC_5_6, "5/6"),
(eDVBFrontendParametersTerrestrial.FEC_7_8, "7/8"),
(eDVBFrontendParametersTerrestrial.FEC_Auto, _("Auto"))])
self.scan_ter.feclow = ConfigSelection(default = defaultTer["feclow"], choices = [
(eDVBFrontendParametersTerrestrial.FEC_1_2, "1/2"),
(eDVBFrontendParametersTerrestrial.FEC_2_3, "2/3"),
(eDVBFrontendParametersTerrestrial.FEC_3_4, "3/4"),
(eDVBFrontendParametersTerrestrial.FEC_5_6, "5/6"),
(eDVBFrontendParametersTerrestrial.FEC_7_8, "7/8"),
(eDVBFrontendParametersTerrestrial.FEC_Auto, _("Auto"))])
self.scan_ter.modulation = ConfigSelection(default = defaultTer["modulation"], choices = [
(eDVBFrontendParametersTerrestrial.Modulation_QPSK, "QPSK"),
(eDVBFrontendParametersTerrestrial.Modulation_QAM16, "QAM16"),
(eDVBFrontendParametersTerrestrial.Modulation_QAM64, "QAM64"),
(eDVBFrontendParametersTerrestrial.Modulation_Auto, _("Auto"))])
self.scan_ter.transmission = ConfigSelection(default = defaultTer["transmission_mode"], choices = [
(eDVBFrontendParametersTerrestrial.TransmissionMode_2k, "2K"),
(eDVBFrontendParametersTerrestrial.TransmissionMode_8k, "8K"),
(eDVBFrontendParametersTerrestrial.TransmissionMode_Auto, _("Auto"))])
self.scan_ter.guard = ConfigSelection(default = defaultTer["guard_interval"], choices = [
(eDVBFrontendParametersTerrestrial.GuardInterval_1_32, "1/32"),
(eDVBFrontendParametersTerrestrial.GuardInterval_1_16, "1/16"),
(eDVBFrontendParametersTerrestrial.GuardInterval_1_8, "1/8"),
(eDVBFrontendParametersTerrestrial.GuardInterval_1_4, "1/4"),
(eDVBFrontendParametersTerrestrial.GuardInterval_Auto, _("Auto"))])
self.scan_ter.hierarchy = ConfigSelection(default = defaultTer["hierarchy"], choices = [
(eDVBFrontendParametersTerrestrial.Hierarchy_None, _("None")),
(eDVBFrontendParametersTerrestrial.Hierarchy_1, "1"),
(eDVBFrontendParametersTerrestrial.Hierarchy_2, "2"),
(eDVBFrontendParametersTerrestrial.Hierarchy_4, "4"),
(eDVBFrontendParametersTerrestrial.Hierarchy_Auto, _("Auto"))])
self.scan_scansat = {}
for sat in nimmanager.satList:
#print sat[1]
self.scan_scansat[sat[0]] = ConfigYesNo(default = False)
self.scan_satselection = []
for slot in nimmanager.nim_slots:
if slot.isCompatible("DVB-S"):
self.scan_satselection.append(getConfigSatlist(defaultSat["orbpos"], self.satList[slot.slot]))
else:
self.scan_satselection.append(None)
return True
def keyLeft(self):
ConfigListScreen.keyLeft(self)
self.newConfig()
def keyRight(self):
ConfigListScreen.keyRight(self)
self.newConfig()
def updateStatus(self):
print "updatestatus"
def addSatTransponder(self, tlist, frequency, symbol_rate, polarisation, fec, inversion, orbital_position, system, modulation, rolloff, pilot):
print "Add Sat: frequ: " + str(frequency) + " symbol: " + str(symbol_rate) + " pol: " + str(polarisation) + " fec: " + str(fec) + " inversion: " + str(inversion) + " modulation: " + str(modulation) + " system: " + str(system) + " rolloff" + str(rolloff) + " pilot" + str(pilot)
print "orbpos: " + str(orbital_position)
parm = eDVBFrontendParametersSatellite()
parm.modulation = modulation
parm.system = system
parm.frequency = frequency * 1000
parm.symbol_rate = symbol_rate * 1000
parm.polarisation = polarisation
parm.fec = fec
parm.inversion = inversion
parm.orbital_position = orbital_position
parm.rolloff = rolloff
parm.pilot = pilot
tlist.append(parm)
def addCabTransponder(self, tlist, frequency, symbol_rate, modulation, fec, inversion):
print "Add Cab: frequ: " + str(frequency) + " symbol: " + str(symbol_rate) + " pol: " + str(modulation) + " fec: " + str(fec) + " inversion: " + str(inversion)
parm = eDVBFrontendParametersCable()
parm.frequency = frequency * 1000
parm.symbol_rate = symbol_rate * 1000
parm.modulation = modulation
parm.fec = fec
parm.inversion = inversion
tlist.append(parm)
def addTerTransponder(self, tlist, *args, **kwargs):
tlist.append(buildTerTransponder(*args, **kwargs))
def keyGo(self):
if self.scan_nims.value == "":
return
tlist = []
flags = None
startScan = True
removeAll = True
index_to_scan = int(self.scan_nims.value)
if self.scan_nims == [ ]:
self.session.open(MessageBox, _("No tuner is enabled!\nPlease setup your tuner settings before you start a service scan."), MessageBox.TYPE_ERROR)
return
nim = nimmanager.nim_slots[index_to_scan]
print "nim", nim.slot
if nim.isCompatible("DVB-S"):
print "is compatible with DVB-S"
if self.scan_type.value == "single_transponder":
# these lists are generated for each tuner, so this has work.
assert len(self.satList) > index_to_scan
assert len(self.scan_satselection) > index_to_scan
nimsats = self.satList[index_to_scan]
selsatidx = self.scan_satselection[index_to_scan].index
# however, the satList itself could be empty. in that case, "index" is 0 (for "None").
if len(nimsats):
orbpos = nimsats[selsatidx][0]
if self.scan_sat.system.value == eDVBFrontendParametersSatellite.System_DVB_S:
fec = self.scan_sat.fec.value
else:
fec = self.scan_sat.fec_s2.value
print "add sat transponder"
self.addSatTransponder(tlist, self.scan_sat.frequency.value,
self.scan_sat.symbolrate.value,
self.scan_sat.polarization.value,
fec,
self.scan_sat.inversion.value,
orbpos,
self.scan_sat.system.value,
self.scan_sat.modulation.value,
self.scan_sat.rolloff.value,
self.scan_sat.pilot.value)
removeAll = False
elif self.scan_type.value == "single_satellite":
sat = self.satList[index_to_scan][self.scan_satselection[index_to_scan].index]
getInitialTransponderList(tlist, sat[0])
elif self.scan_type.value.find("multisat") != -1:
SatList = nimmanager.getSatListForNim(index_to_scan)
for x in self.multiscanlist:
if x[1].value:
print " " + str(x[0])
getInitialTransponderList(tlist, x[0])
elif nim.isCompatible("DVB-C"):
if self.scan_typecable.value == "single_transponder":
self.addCabTransponder(tlist, self.scan_cab.frequency.value,
self.scan_cab.symbolrate.value,
self.scan_cab.modulation.value,
self.scan_cab.fec.value,
self.scan_cab.inversion.value)
removeAll = False
elif self.scan_typecable.value == "complete":
if config.Nims[index_to_scan].cable.scan_type.value == "provider":
getInitialCableTransponderList(tlist, index_to_scan)
else:
startScan = False
elif nim.isCompatible("DVB-T"):
if self.scan_typeterrestrial.value == "single_transponder":
self.addTerTransponder(tlist,
self.scan_ter.frequency.value * 1000,
inversion = self.scan_ter.inversion.value,
bandwidth = self.scan_ter.bandwidth.value,
fechigh = self.scan_ter.fechigh.value,
feclow = self.scan_ter.feclow.value,
modulation = self.scan_ter.modulation.value,
transmission = self.scan_ter.transmission.value,
guard = self.scan_ter.guard.value,
hierarchy = self.scan_ter.hierarchy.value)
removeAll = False
elif self.scan_typeterrestrial.value == "complete":
getInitialTerrestrialTransponderList(tlist, nimmanager.getTerrestrialDescription(index_to_scan))
flags = self.scan_networkScan.value and eComponentScan.scanNetworkSearch or 0
tmp = self.scan_clearallservices.value
if tmp == "yes":
flags |= eComponentScan.scanRemoveServices
elif tmp == "yes_hold_feeds":
flags |= eComponentScan.scanRemoveServices
flags |= eComponentScan.scanDontRemoveFeeds
if tmp != "no" and not removeAll:
flags |= eComponentScan.scanDontRemoveUnscanned
if self.scan_onlyfree.value:
flags |= eComponentScan.scanOnlyFree
for x in self["config"].list:
x[1].save()
if startScan:
self.startScan(tlist, flags, index_to_scan, self.networkid)
else:
self.flags = flags
self.feid = index_to_scan
self.tlist = []
self.startCableTransponderSearch(self.feid)
def setCableTransponderSearchResult(self, tlist):
self.tlist = tlist
def cableTransponderSearchFinished(self):
if self.tlist is None:
self.tlist = []
else:
self.startScan(self.tlist, self.flags, self.feid)
def startScan(self, tlist, flags, feid, networkid = 0):
if len(tlist):
# flags |= eComponentScan.scanSearchBAT
if self.finished_cb:
self.session.openWithCallback(self.finished_cb, ServiceScan, [{"transponders": tlist, "feid": feid, "flags": flags, "networkid": networkid}])
else:
self.session.open(ServiceScan, [{"transponders": tlist, "feid": feid, "flags": flags, "networkid": networkid}])
else:
if self.finished_cb:
self.session.openWithCallback(self.finished_cb, MessageBox, _("Nothing to scan!\nPlease setup your tuner settings before you start a service scan."), MessageBox.TYPE_ERROR)
else:
self.session.open(MessageBox, _("Nothing to scan!\nPlease setup your tuner settings before you start a service scan."), MessageBox.TYPE_ERROR)
def keyCancel(self):
for x in self["config"].list:
x[1].cancel()
self.close()
class ScanSimple(ConfigListScreen, Screen, CableTransponderSearchSupport):
def getNetworksForNim(self, nim):
if nim.isCompatible("DVB-S"):
networks = nimmanager.getSatListForNim(nim.slot)
elif not nim.empty:
networks = [ nim.type ] # "DVB-C" or "DVB-T". TODO: seperate networks for different C/T tuners, if we want to support that.
else:
# empty tuners provide no networks.
networks = [ ]
return networks
def __init__(self, session):
Screen.__init__(self, session)
self["actions"] = ActionMap(["SetupActions"],
{
"ok": self.keyGo,
"cancel": self.keyCancel,
}, -2)
self.list = []
tlist = []
known_networks = [ ]
nims_to_scan = [ ]
self.finished_cb = None
for nim in nimmanager.nim_slots:
# collect networks provided by this tuner
need_scan = False
networks = self.getNetworksForNim(nim)
print "nim %d provides" % nim.slot, networks
print "known:", known_networks
# we only need to scan on the first tuner which provides a network.
# this gives the first tuner for each network priority for scanning.
for x in networks:
if x not in known_networks:
need_scan = True
print x, "not in ", known_networks
known_networks.append(x)
# don't offer to scan nims if nothing is connected
if not nimmanager.somethingConnected(nim.slot):
need_scan = False
if need_scan:
nims_to_scan.append(nim)
# we save the config elements to use them on keyGo
self.nim_enable = [ ]
if len(nims_to_scan):
self.scan_clearallservices = ConfigSelection(default = "yes", choices = [("no", _("no")), ("yes", _("yes")), ("yes_hold_feeds", _("yes (keep feeds)"))])
self.list.append(getConfigListEntry(_("Clear before scan"), self.scan_clearallservices))
for nim in nims_to_scan:
nimconfig = ConfigYesNo(default = True)
nimconfig.nim_index = nim.slot
self.nim_enable.append(nimconfig)
self.list.append(getConfigListEntry(_("Scan ") + nim.slot_name + " (" + nim.friendly_type + ")", nimconfig))
ConfigListScreen.__init__(self, self.list)
self["header"] = Label(_("Automatic Scan"))
self["footer"] = Label(_("Press OK to scan"))
def runAsync(self, finished_cb):
self.finished_cb = finished_cb
self.keyGo()
def keyGo(self):
self.scanList = []
self.known_networks = set()
self.nim_iter=0
self.buildTransponderList()
def buildTransponderList(self): # this method is called multiple times because of asynchronous stuff
APPEND_NOW = 0
SEARCH_CABLE_TRANSPONDERS = 1
action = APPEND_NOW
n = self.nim_iter < len(self.nim_enable) and self.nim_enable[self.nim_iter] or None
self.nim_iter += 1
if n:
if n.value: # check if nim is enabled
flags = 0
nim = nimmanager.nim_slots[n.nim_index]
networks = set(self.getNetworksForNim(nim))
networkid = 0
# don't scan anything twice
networks.discard(self.known_networks)
tlist = [ ]
if nim.isCompatible("DVB-S"):
# get initial transponders for each satellite to be scanned
for sat in networks:
getInitialTransponderList(tlist, sat[0])
elif nim.isCompatible("DVB-C"):
if config.Nims[nim.slot].cable.scan_type.value == "provider":
getInitialCableTransponderList(tlist, nim.slot)
else:
action = SEARCH_CABLE_TRANSPONDERS
networkid = config.Nims[nim.slot].cable.scan_networkid.value
elif nim.isCompatible("DVB-T"):
getInitialTerrestrialTransponderList(tlist, nimmanager.getTerrestrialDescription(nim.slot))
else:
assert False
flags |= eComponentScan.scanNetworkSearch #FIXMEEE.. use flags from cables / satellites / terrestrial.xml
tmp = self.scan_clearallservices.value
if tmp == "yes":
flags |= eComponentScan.scanRemoveServices
elif tmp == "yes_hold_feeds":
flags |= eComponentScan.scanRemoveServices
flags |= eComponentScan.scanDontRemoveFeeds
if action == APPEND_NOW:
self.scanList.append({"transponders": tlist, "feid": nim.slot, "flags": flags})
elif action == SEARCH_CABLE_TRANSPONDERS:
self.flags = flags
self.feid = nim.slot
self.networkid = networkid
self.startCableTransponderSearch(nim.slot)
return
else:
assert False
self.buildTransponderList() # recursive call of this function !!!
return
# when we are here, then the recursion is finished and all enabled nims are checked
# so we now start the real transponder scan
self.startScan(self.scanList)
def startScan(self, scanList):
if len(scanList):
if self.finished_cb:
self.session.openWithCallback(self.finished_cb, ServiceScan, scanList = scanList)
else:
self.session.open(ServiceScan, scanList = scanList)
else:
if self.finished_cb:
self.session.openWithCallback(self.finished_cb, MessageBox, _("Nothing to scan!\nPlease setup your tuner settings before you start a service scan."), MessageBox.TYPE_ERROR)
else:
self.session.open(MessageBox, _("Nothing to scan!\nPlease setup your tuner settings before you start a service scan."), MessageBox.TYPE_ERROR)
def setCableTransponderSearchResult(self, tlist):
if tlist is not None:
self.scanList.append({"transponders": tlist, "feid": self.feid, "flags": self.flags})
def cableTransponderSearchFinished(self):
self.buildTransponderList()
def keyCancel(self):
self.close()
def Satexists(self, tlist, pos):
for x in tlist:
if x == pos:
return 1
return 0
|
libo/Enigma2
|
lib/python/Screens/ScanSetup.py
|
Python
|
gpl-2.0
| 45,681 | 0.032924 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def get_connector_properties(root_helper, my_ip, multipath, enforce_multipath,
host=None):
"""Fake os-brick."""
props = {}
props['ip'] = my_ip
props['host'] = host
iscsi = ISCSIConnector('')
props['initiator'] = iscsi.get_initiator()
props['wwpns'] = ['100010604b019419']
props['wwnns'] = ['200010604b019419']
props['multipath'] = multipath
props['platform'] = 'x86_64'
props['os_type'] = 'linux2'
return props
class ISCSIConnector(object):
"""Mimick the iSCSI connector."""
def __init__(self, root_helper, driver=None,
execute=None, use_multipath=False,
device_scan_attempts=3,
*args, **kwargs):
self.root_herlp = root_helper,
self.execute = execute
def get_initiator(self):
return "fake_iscsi.iqn"
|
nikesh-mahalka/nova
|
nova/tests/unit/virt/libvirt/fake_os_brick_connector.py
|
Python
|
apache-2.0
| 1,443 | 0 |
# -*- coding: utf-8 -*-
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import google.api_core.grpc_helpers
import google.api_core.operations_v1
from google.cloud.asset_v1p2beta1.proto import asset_service_pb2_grpc
class AssetServiceGrpcTransport(object):
"""gRPC transport class providing stubs for
google.cloud.asset.v1p2beta1 AssetService API.
The transport provides access to the raw gRPC stubs,
which can be used to take advantage of advanced
features of gRPC.
"""
# The scopes needed to make gRPC calls to all of the methods defined
# in this service.
_OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",)
def __init__(
self, channel=None, credentials=None, address="cloudasset.googleapis.com:443"
):
"""Instantiate the transport class.
Args:
channel (grpc.Channel): A ``Channel`` instance through
which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
address (str): The address where the service is hosted.
"""
# If both `channel` and `credentials` are specified, raise an
# exception (channels come with credentials baked in already).
if channel is not None and credentials is not None:
raise ValueError(
"The `channel` and `credentials` arguments are mutually " "exclusive."
)
# Create the channel.
if channel is None:
channel = self.create_channel(
address=address,
credentials=credentials,
options={
"grpc.max_send_message_length": -1,
"grpc.max_receive_message_length": -1,
}.items(),
)
self._channel = channel
# gRPC uses objects called "stubs" that are bound to the
# channel and provide a basic method for each RPC.
self._stubs = {
"asset_service_stub": asset_service_pb2_grpc.AssetServiceStub(channel)
}
# Because this API includes a method that returns a
# long-running operation (proto: google.longrunning.Operation),
# instantiate an LRO client.
self._operations_client = google.api_core.operations_v1.OperationsClient(
channel
)
@classmethod
def create_channel(
cls, address="cloudasset.googleapis.com:443", credentials=None, **kwargs
):
"""Create and return a gRPC channel object.
Args:
address (str): The host for the channel to use.
credentials (~.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
kwargs (dict): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
"""
return google.api_core.grpc_helpers.create_channel(
address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs
)
@property
def channel(self):
"""The gRPC channel used by the transport.
Returns:
grpc.Channel: A gRPC channel object.
"""
return self._channel
@property
def export_assets(self):
"""Return the gRPC stub for :meth:`AssetServiceClient.export_assets`.
Exports assets with time and resource types to a given Cloud Storage
location. The output format is newline-delimited JSON. This API
implements the ``google.longrunning.Operation`` API allowing you to keep
track of the export.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["asset_service_stub"].ExportAssets
@property
def batch_get_assets_history(self):
"""Return the gRPC stub for :meth:`AssetServiceClient.batch_get_assets_history`.
Batch gets the update history of assets that overlap a time window. For
RESOURCE content, this API outputs history with asset in both non-delete
or deleted status. For IAM\_POLICY content, this API outputs history
when the asset and its attached IAM POLICY both exist. This can create
gaps in the output history.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["asset_service_stub"].BatchGetAssetsHistory
@property
def create_feed(self):
"""Return the gRPC stub for :meth:`AssetServiceClient.create_feed`.
Creates a feed in a parent project/folder/organization to listen to its
asset updates.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["asset_service_stub"].CreateFeed
@property
def get_feed(self):
"""Return the gRPC stub for :meth:`AssetServiceClient.get_feed`.
Gets details about an asset feed.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["asset_service_stub"].GetFeed
@property
def list_feeds(self):
"""Return the gRPC stub for :meth:`AssetServiceClient.list_feeds`.
Lists all asset feeds in a parent project/folder/organization.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["asset_service_stub"].ListFeeds
@property
def update_feed(self):
"""Return the gRPC stub for :meth:`AssetServiceClient.update_feed`.
Updates an asset feed configuration.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["asset_service_stub"].UpdateFeed
@property
def delete_feed(self):
"""Return the gRPC stub for :meth:`AssetServiceClient.delete_feed`.
Deletes an asset feed.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["asset_service_stub"].DeleteFeed
|
tseaver/google-cloud-python
|
asset/google/cloud/asset_v1p2beta1/gapic/transports/asset_service_grpc_transport.py
|
Python
|
apache-2.0
| 7,923 | 0.001262 |
"""Tests for certbot_nginx._internal.http_01"""
import unittest
import josepy as jose
try:
import mock
except ImportError: # pragma: no cover
from unittest import mock # type: ignore
from acme import challenges
from certbot import achallenges
from certbot.tests import acme_util
from certbot.tests import util as test_util
from certbot_nginx._internal.obj import Addr
import test_util as util
AUTH_KEY = jose.JWKRSA.load(test_util.load_vector("rsa512_key.pem"))
class HttpPerformTest(util.NginxTest):
"""Test the NginxHttp01 challenge."""
account_key = AUTH_KEY
achalls = [
achallenges.KeyAuthorizationAnnotatedChallenge(
challb=acme_util.chall_to_challb(
challenges.HTTP01(token=b"kNdwjwOeX0I_A8DXt9Msmg"), "pending"),
domain="www.example.com", account_key=account_key),
achallenges.KeyAuthorizationAnnotatedChallenge(
challb=acme_util.chall_to_challb(
challenges.HTTP01(
token=b"\xba\xa9\xda?<m\xaewmx\xea\xad\xadv\xf4\x02\xc9y"
b"\x80\xe2_X\t\xe7\xc7\xa4\t\xca\xf7&\x945"
), "pending"),
domain="ipv6.com", account_key=account_key),
achallenges.KeyAuthorizationAnnotatedChallenge(
challb=acme_util.chall_to_challb(
challenges.HTTP01(
token=b"\x8c\x8a\xbf_-f\\cw\xee\xd6\xf8/\xa5\xe3\xfd"
b"\xeb9\xf1\xf5\xb9\xefVM\xc9w\xa4u\x9c\xe1\x87\xb4"
), "pending"),
domain="www.example.org", account_key=account_key),
achallenges.KeyAuthorizationAnnotatedChallenge(
challb=acme_util.chall_to_challb(
challenges.HTTP01(token=b"kNdwjxOeX0I_A8DXt9Msmg"), "pending"),
domain="migration.com", account_key=account_key),
achallenges.KeyAuthorizationAnnotatedChallenge(
challb=acme_util.chall_to_challb(
challenges.HTTP01(token=b"kNdwjxOeX0I_A8DXt9Msmg"), "pending"),
domain="ipv6ssl.com", account_key=account_key),
]
def setUp(self):
super().setUp()
config = self.get_nginx_configurator(
self.config_path, self.config_dir, self.work_dir, self.logs_dir)
from certbot_nginx._internal import http_01
self.http01 = http_01.NginxHttp01(config)
def test_perform0(self):
responses = self.http01.perform()
self.assertEqual([], responses)
@mock.patch("certbot_nginx._internal.configurator.NginxConfigurator.save")
def test_perform1(self, mock_save):
self.http01.add_chall(self.achalls[0])
response = self.achalls[0].response(self.account_key)
responses = self.http01.perform()
self.assertEqual([response], responses)
self.assertEqual(mock_save.call_count, 1)
def test_perform2(self):
acme_responses = []
for achall in self.achalls:
self.http01.add_chall(achall)
acme_responses.append(achall.response(self.account_key))
http_responses = self.http01.perform()
self.assertEqual(len(http_responses), 5)
for i in range(5):
self.assertEqual(http_responses[i], acme_responses[i])
def test_mod_config(self):
self.http01.add_chall(self.achalls[0])
self.http01.add_chall(self.achalls[2])
self.http01._mod_config() # pylint: disable=protected-access
self.http01.configurator.save()
self.http01.configurator.parser.load()
# vhosts = self.http01.configurator.parser.get_vhosts()
# for vhost in vhosts:
# pass
# if the name matches
# check that the location block is in there and is correct
# if vhost.addrs == set(v_addr1):
# response = self.achalls[0].response(self.account_key)
# else:
# response = self.achalls[2].response(self.account_key)
# self.assertEqual(vhost.addrs, set(v_addr2_print))
# self.assertEqual(vhost.names, set([response.z_domain.decode('ascii')]))
@mock.patch('certbot_nginx._internal.parser.NginxParser.add_server_directives')
def test_mod_config_http_and_https(self, mock_add_server_directives):
"""A server_name with both HTTP and HTTPS vhosts should get modded in both vhosts"""
self.configuration.https_port = 443
self.http01.add_chall(self.achalls[3]) # migration.com
self.http01._mod_config() # pylint: disable=protected-access
# Domain has an HTTP and HTTPS vhost
# 2 * 'rewrite' + 2 * 'return 200 keyauthz' = 4
self.assertEqual(mock_add_server_directives.call_count, 4)
@mock.patch('certbot_nginx._internal.parser.nginxparser.dump')
@mock.patch('certbot_nginx._internal.parser.NginxParser.add_server_directives')
def test_mod_config_only_https(self, mock_add_server_directives, mock_dump):
"""A server_name with only an HTTPS vhost should get modded"""
self.http01.add_chall(self.achalls[4]) # ipv6ssl.com
self.http01._mod_config() # pylint: disable=protected-access
# It should modify the existing HTTPS vhost
self.assertEqual(mock_add_server_directives.call_count, 2)
# since there was no suitable HTTP vhost or default HTTP vhost, a non-empty one
# should have been created and written to the challenge conf file
self.assertNotEqual(mock_dump.call_args[0][0], [])
@mock.patch('certbot_nginx._internal.parser.NginxParser.add_server_directives')
def test_mod_config_deduplicate(self, mock_add_server_directives):
"""A vhost that appears in both HTTP and HTTPS vhosts only gets modded once"""
achall = achallenges.KeyAuthorizationAnnotatedChallenge(
challb=acme_util.chall_to_challb(
challenges.HTTP01(token=b"kNdwjxOeX0I_A8DXt9Msmg"), "pending"),
domain="ssl.both.com", account_key=AUTH_KEY)
self.http01.add_chall(achall)
self.http01._mod_config() # pylint: disable=protected-access
# Should only get called 5 times, rather than 6, because two vhosts are the same
self.assertEqual(mock_add_server_directives.call_count, 5*2)
def test_mod_config_insert_bucket_directive(self):
nginx_conf = self.http01.configurator.parser.abs_path('nginx.conf')
expected = ['server_names_hash_bucket_size', '128']
original_conf = self.http01.configurator.parser.parsed[nginx_conf]
self.assertFalse(util.contains_at_depth(original_conf, expected, 2))
self.http01.add_chall(self.achalls[0])
self.http01._mod_config() # pylint: disable=protected-access
self.http01.configurator.save()
self.http01.configurator.parser.load()
generated_conf = self.http01.configurator.parser.parsed[nginx_conf]
self.assertTrue(util.contains_at_depth(generated_conf, expected, 2))
def test_mod_config_update_bucket_directive_in_included_file(self):
# save old example.com config
example_com_loc = self.http01.configurator.parser.abs_path('sites-enabled/example.com')
with open(example_com_loc) as f:
original_example_com = f.read()
# modify example.com config
modified_example_com = 'server_names_hash_bucket_size 64;\n' + original_example_com
with open(example_com_loc, 'w') as f:
f.write(modified_example_com)
self.http01.configurator.parser.load()
# run change
self.http01.add_chall(self.achalls[0])
self.http01._mod_config() # pylint: disable=protected-access
self.http01.configurator.save()
self.http01.configurator.parser.load()
# not in nginx.conf
expected = ['server_names_hash_bucket_size', '128']
nginx_conf_loc = self.http01.configurator.parser.abs_path('nginx.conf')
nginx_conf = self.http01.configurator.parser.parsed[nginx_conf_loc]
self.assertFalse(util.contains_at_depth(nginx_conf, expected, 2))
# is updated in example.com conf
generated_conf = self.http01.configurator.parser.parsed[example_com_loc]
self.assertTrue(util.contains_at_depth(generated_conf, expected, 0))
# put back example.com config
with open(example_com_loc, 'w') as f:
f.write(original_example_com)
self.http01.configurator.parser.load()
@mock.patch("certbot_nginx._internal.configurator.NginxConfigurator.ipv6_info")
def test_default_listen_addresses_no_memoization(self, ipv6_info):
# pylint: disable=protected-access
ipv6_info.return_value = (True, True)
self.http01._default_listen_addresses()
self.assertEqual(ipv6_info.call_count, 1)
ipv6_info.return_value = (False, False)
self.http01._default_listen_addresses()
self.assertEqual(ipv6_info.call_count, 2)
@mock.patch("certbot_nginx._internal.configurator.NginxConfigurator.ipv6_info")
def test_default_listen_addresses_t_t(self, ipv6_info):
# pylint: disable=protected-access
ipv6_info.return_value = (True, True)
addrs = self.http01._default_listen_addresses()
http_addr = Addr.fromstring("80")
http_ipv6_addr = Addr.fromstring("[::]:80")
self.assertEqual(addrs, [http_addr, http_ipv6_addr])
@mock.patch("certbot_nginx._internal.configurator.NginxConfigurator.ipv6_info")
def test_default_listen_addresses_t_f(self, ipv6_info):
# pylint: disable=protected-access
ipv6_info.return_value = (True, False)
addrs = self.http01._default_listen_addresses()
http_addr = Addr.fromstring("80")
http_ipv6_addr = Addr.fromstring("[::]:80 ipv6only=on")
self.assertEqual(addrs, [http_addr, http_ipv6_addr])
@mock.patch("certbot_nginx._internal.configurator.NginxConfigurator.ipv6_info")
def test_default_listen_addresses_f_f(self, ipv6_info):
# pylint: disable=protected-access
ipv6_info.return_value = (False, False)
addrs = self.http01._default_listen_addresses()
http_addr = Addr.fromstring("80")
self.assertEqual(addrs, [http_addr])
if __name__ == "__main__":
unittest.main() # pragma: no cover
|
letsencrypt/letsencrypt
|
certbot-nginx/tests/http_01_test.py
|
Python
|
apache-2.0
| 10,269 | 0.003019 |
def grade(tid, answer):
if answer.find("'twas_sum_EZ_programming,_am_I_rite?") != -1:
return { "correct": True, "message": "Nice job!" }
return { "correct": False, "message": "If you're confused, read some tutorials :)" }
|
EasyCTF/easyctf-2015
|
api/problems/programming/addition/addition_grader.py
|
Python
|
mit
| 225 | 0.04 |
####################################################################################################
# Copyright (C) 2016 by Ingo Keller, Katrin Lohan #
# <brutusthetschiepel@gmail.com> #
# #
# This file is part of pyJD (Python/Yarp Tools for the JD robot). #
# #
# pyJD is free software: you can redistribute it and/or modify it under the terms of the #
# GNU Affero General Public License as published by the Free Software Foundation, either #
# version 3 of the License, or (at your option) any later version. #
# #
# pyJD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; #
# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. #
# See the GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with pyJD. If not, see <http://www.gnu.org/licenses/>. #
####################################################################################################
import argparse
import socket
import time
import yarp
EMSG_YARP_NOT_FOUND = "Could not connect to the yarp server. Try running 'yarp detect'."
EMSG_ROBOT_NOT_FOUND = 'Could not connect to the robot at %s:%s'
class EZModule(yarp.RFModule):
""" The EZBModule class provides a base class for developing modules for the JD robot.
"""
# Default IP Address and Port for the JD Humanoid Robot.
TCP_IP = '192.168.1.1'
TCP_PORT = 23
# Existing motor ID's are D0-D9, D12-D14 and D16-D18 there are more limits
LIMITS = [ (30, 180),
(70, 170),
(0, 170),
(0, 170),
(0, 60),
(0, 180),
(0, 90),
(0, 60),
(0, 180),
(0, 180),
(0, 180),
(0, 160),
(0, 180),
(0, 130),
(0, 180),
(0, 160),
(0, 180),
(50, 130),
(0, 180),
(0, 180),
(0, 180) ]
def __init__(self, ip, port, prefix):
yarp.RFModule.__init__(self)
self.ip = ip
self.port = int(port)
self.prefix = prefix
# self.last_pos = [-1] * len(EZModule.LIMITS)
def configure(self, rf):
name = self.__class__.__name__
if self.prefix:
name = self.prefix + '/' + name
self.setName(name)
# RPC Port
self.rpc_port = yarp.RpcServer()
# name settings
port_name = '/%s/%s' % (name, 'rpc')
if not self.rpc_port.open(port_name):
raise RuntimeError, EMSG_YARP_NOT_FOUND
self.attach_rpc_server(self.rpc_port)
return True
def interruptModule(self):
self.rpc_port.interrupt()
for x in dir(self):
if x.endswith('Port') and 'interrupt' in dir(getattr(self, x)):
getattr(self, x).interrupt()
return True
def close(self):
self.rpc_port.close()
for x in dir(self):
if x.endswith('Port') and 'close' in dir(getattr(self, x)):
getattr(self, x).close()
return True
def getPeriod(self):
return 0.1
def updateModule(self):
# XXX: I do not know why we need that, but if method is empty the module gets stuck
time.sleep(0.000001)
return True
def createInputPort(self, name, mode = 'unbuffered'):
""" This method returns an input port.
@param obj - the object that the port is created for
@param name - if a name is provided it gets appended to the modules name
@param buffered - if buffered is True a buffered port will be used otherwise not;
default is True.
@result port
"""
return self.__createPort(name + ':i', None, mode)
def __createPort(self, name, target = None, mode = 'unbuffered'):
""" This method returns a port object.
@param name - yarp name for the port
@param obj - object for which the port is created
@param buffered - if buffered is True a buffered port will be used otherwise not;
default is True.
@result port
"""
# create port
if mode == 'buffered':
port = yarp.BufferedPortBottle()
elif mode == 'rpcclient':
port = yarp.RpcClient()
elif mode == 'rpcserver':
port = yarp.RpcServer()
else:
port = yarp.Port()
# build port name
port_name = ['']
# prefix handling
if hasattr(self, 'prefix') and self.prefix:
port_name.append(self.prefix)
port_name.append(self.__class__.__name__)
port_name.append(name)
# open port
if not port.open('/'.join(port_name)):
raise RuntimeError, EMSG_YARP_NOT_FOUND
# add output if given
if target:
port.addOutput(target)
if hasattr(self, '_ports'):
self._ports.append(port)
return port
def createOutputPort(self, name, target = None, mode = 'unbuffered'):
""" This method returns an output port.
@param obj - the object that the port is created for
@param name - if a name is provided it gets appended to the modules name
@param buffered - if buffered is True a buffered port will be used otherwise not;
default is True.
@result port
"""
return self.__createPort(name + ':o', target, mode)
####################################################################################################
#
# Default methods for running the modules standalone
#
####################################################################################################
def createArgParser():
""" This method creates a base argument parser.
@return Argument Parser object
"""
parser = argparse.ArgumentParser(description='Create a JDModule to control the JD robot.')
parser.add_argument( '-i', '--ip',
dest = 'ip',
default = str(EZModule.TCP_IP),
help = 'IP address for the JD robot.')
parser.add_argument( '-p', '--port',
dest = 'port',
default = str(EZModule.TCP_PORT),
help = 'Port for the JD robot')
parser.add_argument( '-n', '--name',
dest = 'name',
default = '',
help = 'Name prefix for Yarp port names')
return parser.parse_args()
def main(module_cls):
""" This is a main method to run a module from command line.
@param module_cls - an EZModule based class that can be started as a standalone module.
"""
args = createArgParser()
yarp.Network.init()
resource_finder = yarp.ResourceFinder()
resource_finder.setVerbose(True)
# resource_finder.configure(argc,argv);
module = module_cls(args.ip, args.port, args.name)
module.runModule(resource_finder)
yarp.Network.fini()
|
BrutusTT/pyJD
|
pyJD/EZModule.py
|
Python
|
agpl-3.0
| 8,093 | 0.011121 |
from sys import argv
script, user_name = argv
# Decalare text or prompt to be seen by the user
# for all request for inout
prompt = '> '
print "Hi %s, I'm the %s script." % (user_name, script)
print "I'd like to ask you a few questions."
print "Do you like me %s?" % user_name
# The 'prompt = >' is seen by user as they are asked for some input
likes = raw_input(prompt)
print "Where do you live %s?" % user_name
# The 'prompt = >' is seen by user as they are asked for some input
lives = raw_input(prompt)
print "What kind of computer do you have?"
# The 'prompt = >' is seen by user as they are asked for some input
computer = raw_input(prompt)
print """
Alright, so you said %r about liking me.
You live in %r. Not sure where that is.
And you have a %r computer. Nice.
""" % (likes, lives, computer)
|
udoyen/pythonlearning
|
1-35/ex14.py
|
Python
|
mit
| 808 | 0 |
# ******************************************************************************
# Copyright 2017-2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ******************************************************************************
"""
Defines text datatset handling.
"""
import logging
import numpy as np
import os
import h5py
from neon.data.dataiterator import NervanaDataIterator, ArrayIterator
from neon.data.datasets import Dataset
from neon.data.text_preprocessing import pad_sentences, pad_data
logger = logging.getLogger(__name__)
class Text(NervanaDataIterator):
"""
This class defines methods for loading and iterating over text datasets.
"""
def __init__(self, time_steps, path, vocab=None, tokenizer=None,
onehot_input=True, reverse_target=False, get_prev_target=False):
"""
Construct a text dataset object.
Arguments:
time_steps (int) : Length of a sequence.
path (str) : Path to text file.
vocab (python.set) : A set of unique tokens.
tokenizer (function) : Tokenizer function.
onehot_input (boolean): One-hot representation of input
reverse_target (boolean): for sequence to sequence models, set to
True to reverse target sequence. Also
disables shifting target by one.
get_prev_target (boolean): for sequence to sequence models, set to
True for training data to provide correct
target from previous time step as decoder
input. If condition, shape will be a tuple
of shapes, corresponding to encoder and
decoder inputs.
"""
super(Text, self).__init__(name=None)
self.seq_length = time_steps
self.onehot_input = onehot_input
self.batch_index = 0
self.reverse_target = reverse_target
self.get_prev_target = get_prev_target
X, y = self._get_data(path, tokenizer, vocab)
# reshape to preserve sentence continuity across batches
self.X = X.reshape(self.be.bsz, self.nbatches, time_steps)
self.y = y.reshape(self.be.bsz, self.nbatches, time_steps)
# stuff below this comment needs to be cleaned up and commented
self.nout = self.nclass
if self.onehot_input:
self.shape = (self.nout, time_steps)
self.dev_X = self.be.iobuf((self.nout, time_steps))
if self.get_prev_target:
self.dev_Z = self.be.iobuf((self.nout, time_steps))
else:
self.shape = (time_steps, 1)
self.dev_X = self.be.iobuf(time_steps, dtype=np.int32)
if self.get_prev_target:
self.dev_Z = self.be.iobuf(time_steps, dtype=np.int32)
self.decoder_shape = self.shape
self.dev_y = self.be.iobuf((self.nout, time_steps))
self.dev_lbl = self.be.iobuf(time_steps, dtype=np.int32)
self.dev_lblflat = self.dev_lbl.reshape((1, -1))
def _get_data(self, path, tokenizer, vocab):
text = open(path).read()
tokens = self.get_tokens(text, tokenizer)
# make this a static method
extra_tokens = len(tokens) % (self.be.bsz * self.seq_length)
if extra_tokens:
tokens = tokens[:-extra_tokens]
self.nbatches = len(tokens) // (self.be.bsz * self.seq_length)
self.ndata = self.nbatches * self.be.bsz # no leftovers
self.vocab = sorted(self.get_vocab(tokens, vocab))
self.nclass = len(self.vocab)
# vocab dicts
self.token_to_index = dict((t, i) for i, t in enumerate(self.vocab))
self.index_to_token = dict((i, t) for i, t in enumerate(self.vocab))
# map tokens to indices
X = np.asarray([self.token_to_index[t] for t in tokens], dtype=np.uint32)
if self.reverse_target:
y = X.copy()
else:
y = np.concatenate((X[1:], X[:1]))
return X, y
@staticmethod
def create_valid_file(path, valid_split=0.1):
"""
Create separate files for training and validation.
Arguments:
path(str): Path to data file.
valid_split(float, optional): Fraction of data to set aside for validation.
Returns:
str, str : Paths to train file and validation file
"""
text = open(path).read()
# create train and valid paths
filename, ext = os.path.splitext(path)
train_path = filename + '_train' + ext
valid_path = filename + '_valid' + ext
# split data
train_split = int(len(text) * (1 - valid_split))
train_text = text[:train_split]
valid_text = text[train_split:]
# write train file
with open(train_path, 'w') as train_file:
train_file.write(train_text)
# write valid file
with open(valid_path, 'w') as valid_file:
valid_file.write(valid_text)
return train_path, valid_path
@staticmethod
def get_tokens(string, tokenizer=None):
"""
Map string to a list of tokens.
Arguments:
string(str): String to be tokenized.
token(object): Tokenizer object.
tokenizer (function) : Tokenizer function.
Returns:
list : A list of tokens
"""
# (if tokenizer is None, we have a list of characters)
if tokenizer is None:
return string
else:
return tokenizer(string)
@staticmethod
def get_vocab(tokens, vocab=None):
"""
Construct vocabulary from the given tokens.
Arguments:
tokens(list): List of tokens.
vocab: (Default value = None)
Returns:
python.set : A set of unique tokens
"""
# (if vocab is not None, we check that it contains all tokens)
if vocab is None:
return set(tokens)
else:
vocab = set(vocab)
assert vocab >= set(tokens), "the predefined vocab must contain all the tokens"
return vocab
@staticmethod
def pad_sentences(sentences, sentence_length=None, dtype=np.int32, pad_val=0.):
"""
Deprecated, use neon.data.text_preprocessing.pad_sentences.
"""
logger.error('pad_sentences in the Text class is deprecated. This function '
'is now in neon.data.text_preprocessing.')
return pad_sentences(sentences,
sentence_length=sentence_length,
dtype=dtype,
pad_val=pad_val)
@staticmethod
def pad_data(path, vocab_size=20000, sentence_length=100, oov=2,
start=1, index_from=3, seed=113, test_split=0.2):
"""
Deprecated, use neon.data.text_preprocessing.pad_data.
"""
logger.error('pad_data in the Text class is deprecated. This function'
'is now in neon.data.text_preprocessing')
return pad_data(path,
vocab_size=vocab_size,
sentence_length=sentence_length,
oov=oov,
start=start,
index_from=index_from,
seed=seed,
test_split=test_split)
def reset(self):
"""
Reset the starting index of this dataset back to zero.
Relevant for when one wants to call repeated evaluations on the dataset
but don't want to wrap around for the last uneven minibatch
Not necessary when ndata is divisible by batch size
"""
self.batch_index = 0
def __iter__(self):
"""
Generator that can be used to iterate over this dataset.
Yields:
tuple : the next minibatch of data.
"""
self.batch_index = 0
while self.batch_index < self.nbatches:
X_batch = self.X[:, self.batch_index, :].T.astype(np.float32, order='C')
if self.reverse_target is False:
y_batch = self.y[:, self.batch_index, :].T.astype(np.float32, order='C')
else:
# reverse target sequence
y_batch = self.y[:, self.batch_index, ::-1].T.astype(np.float32, order='C')
self.dev_lbl.set(y_batch)
self.dev_y[:] = self.be.onehot(self.dev_lblflat, axis=0)
if self.onehot_input:
self.dev_lbl.set(X_batch)
self.dev_X[:] = self.be.onehot(self.dev_lblflat, axis=0)
if self.get_prev_target:
self.dev_Z[:, self.be.bsz:] = self.dev_y[:, :-self.be.bsz]
self.dev_Z[:, 0:self.be.bsz] = 0 # zero-hot, no input
else:
self.dev_X.set(X_batch)
if self.get_prev_target:
self.dev_lbl.set(y_batch)
self.dev_Z[1:, :] = self.dev_lbl[:-1, :]
self.dev_Z[0, :] = 0
self.batch_index += 1
if self.get_prev_target:
yield (self.dev_X, self.dev_Z), self.dev_y
else:
yield self.dev_X, self.dev_y
class TextNMT(Text):
"""
Datasets for neural machine translation on French / English bilingual datasets.
Available at http://www-lium.univ-lemans.fr/~schwenk/cslm_joint_paper/data/bitexts.tgz
Arguments:
time_steps (int) : Length of a sequence.
path (str) : Path to text file.
tokenizer (function) : Tokenizer function.
onehot_input (boolean): One-hot representation of input
get_prev_target (boolean): for sequence to sequence models, set to
True for training data to provide correct
target from previous time step as decoder
input. If condition, shape will be a tuple
of shapes, corresponding to encoder and
decoder inputs.
split (str): "train" or "valid" split of the dataset
dataset (str): 'un2000' for the United Nations dataset or 'eurparl7'
for the European Parliament datset.
subset_pct (float): Percentage of the dataset to use (100 is the full dataset)
"""
def __init__(self, time_steps, path, tokenizer=None,
onehot_input=False, get_prev_target=False, split=None,
dataset='un2000', subset_pct=100):
"""
Load French and English sentence data from file.
"""
assert dataset in ('europarl7', 'un2000'), "invalid dataset"
processed_file = os.path.join(path, dataset + '-' + split + '.h5')
assert os.path.exists(processed_file), "Dataset at '" + processed_file + "' not found"
self.subset_pct = subset_pct
super(TextNMT, self).__init__(time_steps, processed_file, vocab=None, tokenizer=tokenizer,
onehot_input=onehot_input, get_prev_target=get_prev_target,
reverse_target=True)
def _get_data(self, path, tokenizer, vocab):
"""
Tokenizer and vocab are unused but provided to match superclass method signature
"""
def vocab_to_dicts(vocab):
t2i = dict((t, i) for i, t in enumerate(vocab))
i2t = dict((i, t) for i, t in enumerate(vocab))
return t2i, i2t
# get saved processed data
logger.debug("Loading parsed data from %s", path)
with h5py.File(path, 'r') as f:
self.s_vocab = f['s_vocab'][:].tolist()
self.t_vocab = f['t_vocab'][:].tolist()
self.s_token_to_index, self.s_index_to_token = vocab_to_dicts(self.s_vocab)
self.t_token_to_index, self.t_index_to_token = vocab_to_dicts(self.t_vocab)
X = f['X'][:]
y = f['y'][:]
self.nclass = len(self.t_vocab)
# Trim subset and patial minibatch
if self.subset_pct < 100:
X = X[:int(X.shape[0] * self.subset_pct / 100.), :]
y = y[:int(y.shape[0] * self.subset_pct / 100.), :]
logger.debug("subset %d%% of data", self.subset_pct*100)
extra_sentences = X.shape[0] % self.be.bsz
if extra_sentences:
X = X[:-extra_sentences, :]
y = y[:-extra_sentences, :]
logger.debug("removing %d extra sentences", extra_sentences)
self.nbatches = X.shape[0] // self.be.bsz
self.ndata = self.nbatches * self.be.bsz # no leftovers
return X, y
class Shakespeare(Dataset):
"""
Shakespeare data set from http://cs.stanford.edu/people/karpathy/char-rnn.
"""
def __init__(self, timesteps, path='.'):
url = 'http://cs.stanford.edu/people/karpathy/char-rnn'
super(Shakespeare, self).__init__('shakespeare_input.txt',
url,
4573338,
path=path)
self.timesteps = timesteps
def load_data(self):
self.filepath = self.load_zip(self.filename, self.size)
return self.filepath
def gen_iterators(self):
self.load_data()
train_path, valid_path = Text.create_valid_file(self.filepath)
self._data_dict = {}
self._data_dict['train'] = Text(self.timesteps, train_path)
vocab = self._data_dict['train'].vocab
self._data_dict['valid'] = Text(self.timesteps, valid_path, vocab=vocab)
return self._data_dict
class PTB(Dataset):
"""
Penn Treebank data set from http://arxiv.org/pdf/1409.2329v5.pdf
Arguments:
timesteps (int): number of timesteps to embed the data
onehot_input (bool):
tokenizer (str): name of the tokenizer function within this
class to use on the data
"""
def __init__(self, timesteps, path='.',
onehot_input=True,
tokenizer=None,
reverse_target=False,
get_prev_target=False):
url = 'https://raw.githubusercontent.com/wojzaremba/lstm/master/data'
self.filemap = {'train': 5101618,
'test': 449945,
'valid': 399782}
keys = list(self.filemap.keys())
filenames = [self.gen_filename(phase) for phase in keys]
sizes = [self.filemap[phase] for phase in keys]
super(PTB, self).__init__(filenames,
url,
sizes,
path=path)
self.timesteps = timesteps
self.onehot_input = onehot_input
self.tokenizer = tokenizer
if tokenizer is not None:
assert hasattr(self, self.tokenizer)
self.tokenizer_func = getattr(self, self.tokenizer)
else:
self.tokenizer_func = None
self.reverse_target = reverse_target
self.get_prev_target = get_prev_target
@staticmethod
def newline_tokenizer(s):
"""
Tokenizer which breaks on newlines.
Arguments:
s (str): String to tokenize.
Returns:
str: String with "<eos>" in place of newlines.
"""
# replace newlines with '<eos>' so that
# the newlines count as words
return s.replace('\n', '<eos>').split()
@staticmethod
def gen_filename(phase):
"""
Filename generator.
Arguments:
phase(str): Phase
Returns:
string: ptb.<phase>.txt
"""
return 'ptb.%s.txt' % phase
def load_data(self):
self.file_paths = {}
for phase in self.filemap:
fn = self.gen_filename(phase)
size = self.filemap[phase]
self.file_paths[phase] = self.load_zip(fn, size)
return self.file_paths
def gen_iterators(self):
self.load_data()
self._data_dict = {}
self.vocab = None
for phase in ['train', 'test', 'valid']:
file_path = self.file_paths[phase]
get_prev_target = self.get_prev_target if phase is 'train' else False
self._data_dict[phase] = Text(self.timesteps,
file_path,
tokenizer=self.tokenizer_func,
onehot_input=self.onehot_input,
vocab=self.vocab,
reverse_target=self.reverse_target,
get_prev_target=get_prev_target)
if self.vocab is None:
self.vocab = self._data_dict['train'].vocab
return self._data_dict
class HutterPrize(Dataset):
"""
Hutter Prize data set from http://prize.hutter1.net/
"""
def __init__(self, path='.'):
super(HutterPrize, self).__init__('enwik8.zip',
'http://mattmahoney.net/dc',
35012219,
path=path)
def load_data(self):
self.filepath = self.load_zip(self.filename, self.size)
return self.filepath
class IMDB(Dataset):
"""
IMDB data set from http://www.aclweb.org/anthology/P11-1015..
"""
def __init__(self, vocab_size, sentence_length, path='.'):
url = 'https://s3.amazonaws.com/text-datasets'
super(IMDB, self).__init__('imdb.pkl',
url,
33213513,
path=path)
self.vocab_size = vocab_size
self.sentence_length = sentence_length
self.filepath = None
def load_data(self):
self.filepath = self.load_zip(self.filename, self.size)
return self.filepath
def gen_iterators(self):
if self.filepath is None:
self.load_data()
data = pad_data(self.filepath, vocab_size=self.vocab_size,
sentence_length=self.sentence_length)
(X_train, y_train), (X_test, y_test), nclass = data
self._data_dict = {'nclass': nclass}
self._data_dict['train'] = ArrayIterator(X_train, y_train, nclass=2)
self._data_dict['test'] = ArrayIterator(X_test, y_test, nclass=2)
return self._data_dict
class SICK(Dataset):
"""
Semantic Similarity dataset from qcri.org (Semeval 2014).
Arguments:
path (str): path to SICK_data directory
"""
def __init__(self, path='SICK_data/'):
url = 'http://alt.qcri.org/semeval2014/task1/data/uploads/'
self.filemap = {'train': 87341,
'test_annotated': 93443,
'trial': 16446}
keys = list(self.filemap.keys())
self.zip_paths = None
self.file_paths = [self.gen_filename(phase) for phase in keys]
self.sizes = [self.filemap[phase] for phase in keys]
super(SICK, self).__init__(filename=self.file_paths,
url=url,
size=self.sizes,
path=path)
@staticmethod
def gen_zipname(phase):
"""
Zip filename generator.
Arguments:
phase(str): Phase of training/evaluation
Returns:
string: sick_<phase>.zip
"""
return "sick_{}.zip".format(phase)
@staticmethod
def gen_filename(phase):
"""
Filename generator for the extracted zip files.
Arguments:
phase(str): Phase of training/evaluation
Returns:
string: SICK_<phase>.txt
"""
return "SICK_{}.txt".format(phase)
def load_data(self):
"""
Conditional data loader will download and extract zip files if not found locally.
"""
self.zip_paths = {}
for phase in self.filemap:
zn = self.gen_zipname(phase)
size = self.filemap[phase]
self.zip_paths[phase] = self.load_zip(zn, size)
return self.zip_paths
def load_eval_data(self):
"""
Load the SICK semantic-relatedness dataset. Data is a tab-delimited txt file,
in the format: Sentence1\tSentence2\tScore. Data is downloaded and extracted
from zip files if not found in directory specified by self.path.
Returns:
tuple of tuples of np.array: three tuples containing A & B sentences
for train, dev, and text, along with a fourth tuple containing
the scores for each AB pair.
"""
if self.zip_paths is None:
self.load_data()
trainA, trainB, devA, devB, testA, testB = [], [], [], [], [], []
trainS, devS, testS = [], [], []
with open(self.path + self.gen_filename('train'), 'rb') as f:
for line in f:
text = line.strip().split(b'\t')
trainA.append(text[1])
trainB.append(text[2])
trainS.append(text[3])
with open(self.path + self.gen_filename('trial'), 'rb') as f:
for line in f:
text = line.strip().split(b'\t')
devA.append(text[1])
devB.append(text[2])
devS.append(text[3])
with open(self.path + self.gen_filename('test_annotated'), 'rb') as f:
for line in f:
text = line.strip().split(b'\t')
testA.append(text[1])
testB.append(text[2])
testS.append(text[3])
trainS = [float(s) for s in trainS[1:]]
devS = [float(s) for s in devS[1:]]
testS = [float(s) for s in testS[1:]]
return ((np.array(trainA[1:]), np.array(trainB[1:])),
(np.array(devA[1:]), np.array(devB[1:])),
(np.array(testA[1:]), np.array(testB[1:])),
(np.array(trainS), np.array(devS), np.array(testS)))
|
NervanaSystems/neon
|
neon/data/text.py
|
Python
|
apache-2.0
| 22,858 | 0.001181 |
# Playlist.py
#
# reads all available playlists, adjusts song paths, removes not copied songs,
# writes resulting playlist to destination
import mlsSong as sng
import config
import glob
import os
import sys
import codecs
def Playlist():
# get a list of all playlists
playlists = glob.glob(config.SOURCE_PLAYLISTFOLDER + "\\*.m3u*")
# keep only the file name
for (i, playlist) in enumerate(playlists):
(filepath, filename) = os.path.split(playlist)
playlists[i] = filename
# Winamp fail: playlists are saved with pretty random-looking names.
# Look up the new names in a look-up file. Playlists that are not found
# won't be copied.
for oldPlaylist in playlists:
newPlaylist = ""
for lutPlaylist in config.PLAYLIST_LUT:
print oldPlaylist
print lutPlaylist[0]
if lutPlaylist[0] == oldPlaylist:
newPlaylist = lutPlaylist[1]
print "Playlist name conversion: from", oldPlaylist, "to", newPlaylist
break
if newPlaylist == "":
print "No playlist name conversion found for", oldPlaylist
break
# "s" as in Source_playlist
# -------------------------
# open source playlist
try:
s = codecs.open(config.SOURCE_PLAYLISTFOLDER + "\\" + oldPlaylist, 'r', encoding='UTF-8')
## s = open(config.SOURCE_PLAYLISTFOLDER + "\\" + oldPlaylist, 'r')
except:
print "Playlist", oldPlaylist, "could not be read!"
continue
# "d" as in Destination_playlist
# ------------------------------
# check if destination playlist file already exists
try:
d = open(config.DEST_PLAYLISTFOLDER + "\\" + newPlaylist, 'r')
except:
# file does not exist, create it
d = open(config.DEST_PLAYLISTFOLDER + "\\" + newPlaylist, 'w')
else:
# file already exists, delete it and create a new one
d.close()
os.remove(config.DEST_PLAYLISTFOLDER + "\\" + newPlaylist)
d = open(config.DEST_PLAYLISTFOLDER + "\\" + newPlaylist, 'w')
# write header line
d.write("#EXTM3U\n")
# read first line, it should be '#EXTM3U'
b = s.readline()
print b
if b == '#EXTM3U\r\n':
print "EXTM3U playlist."
extm3u = True
else:
extm3u = False
# I'm pretty sure b is already the first song, so don't read another
# line before properly processing it
skipFirst = True
for lines in s:
if extm3u:
a = s.readline() # 'EXTINF:' song.trackLength,Artist - Title
# This line can be left unchanged.
if not skipFirst:
b = s.readline() # file path: strip SOURCE_MUSICFOLDER, replace it with DEST_MUSICFOLDER
print b
b = b.replace(config.SOURCE_MUSICFOLDER, config.DEST_MUSICFOLDER)
print b
else:
skipFirst = False
# process b:
# - if b is a relative path, convert it to absolute
# ... TO DO
# - find song, where config.songList[x].fileNameOld = b
# ... TO DO
# - if config.songList[x].added == 0: continue (song was not copied; don't add it to playlist)
# ... TO DO
# write new path to b
b = config.songList[x].fileNameNew + "\n"
if not extm3u:
# create line a
a = "EXTINF:" + config.songList[x].trackLength + ","
a = a + config.songList[x].trackArtist + " - "
a = a + config.songList[x].trackTitle + "\n"
d.write(a)
d.write(b)
s.close()
d.close()
|
RalpH-himself/MusicLibrarySyncForMSC
|
mlsPlaylist.py
|
Python
|
gpl-3.0
| 3,927 | 0.003056 |
# -*- coding: utf-8 -*-
"""
celery.utils.mail
~~~~~~~~~~~~~~~~~
How task error emails are formatted and sent.
"""
from __future__ import absolute_import
import sys
import smtplib
import socket
import traceback
import warnings
from email.mime.text import MIMEText
from .functional import maybe_list
from .imports import symbol_by_name
supports_timeout = sys.version_info >= (2, 6)
_local_hostname = None
def get_local_hostname():
global _local_hostname
if _local_hostname is None:
_local_hostname = socket.getfqdn()
return _local_hostname
class SendmailWarning(UserWarning):
"""Problem happened while sending the email message."""
class Message(object):
def __init__(self, to=None, sender=None, subject=None,
body=None, charset='us-ascii'):
self.to = maybe_list(to)
self.sender = sender
self.subject = subject
self.body = body
self.charset = charset
def __repr__(self):
return '<Email: To:%r Subject:%r>' % (self.to, self.subject)
def __str__(self):
msg = MIMEText(self.body, 'plain', self.charset)
msg['Subject'] = self.subject
msg['From'] = self.sender
msg['To'] = ', '.join(self.to)
return msg.as_string()
class Mailer(object):
supports_timeout = supports_timeout
def __init__(self, host='localhost', port=0, user=None, password=None,
timeout=2, use_ssl=False, use_tls=False):
self.host = host
self.port = port
self.user = user
self.password = password
self.timeout = timeout
self.use_ssl = use_ssl
self.use_tls = use_tls
def send(self, message, fail_silently=False):
try:
if self.supports_timeout:
self._send(message, timeout=self.timeout)
else:
import socket
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(self.timeout)
try:
self._send(message)
finally:
socket.setdefaulttimeout(old_timeout)
except Exception, exc:
if not fail_silently:
raise
warnings.warn(SendmailWarning(
'Mail could not be sent: %r %r\n%r' % (
exc, {'To': ', '.join(message.to),
'Subject': message.subject},
traceback.format_stack())))
def _send(self, message, **kwargs):
Client = smtplib.SMTP_SSL if self.use_ssl else smtplib.SMTP
client = Client(self.host, self.port,
local_hostname=get_local_hostname(), **kwargs)
if self.use_tls:
client.ehlo()
client.starttls()
client.ehlo()
if self.user and self.password:
client.login(self.user, self.password)
client.sendmail(message.sender, message.to, str(message))
try:
client.quit()
except socket.sslerror:
client.close()
class ErrorMail(object):
"""Defines how and when task error e-mails should be sent.
:param task: The task instance that raised the error.
:attr:`subject` and :attr:`body` are format strings which
are passed a context containing the following keys:
* name
Name of the task.
* id
UUID of the task.
* exc
String representation of the exception.
* args
Positional arguments.
* kwargs
Keyword arguments.
* traceback
String representation of the traceback.
* hostname
Worker hostname.
"""
# pep8.py borks on a inline signature separator and
# says "trailing whitespace" ;)
EMAIL_SIGNATURE_SEP = '-- '
#: Format string used to generate error email subjects.
subject = """\
[celery@%(hostname)s] Error: Task %(name)s (%(id)s): %(exc)s
"""
#: Format string used to generate error email content.
body = """
Task %%(name)s with id %%(id)s raised exception:\n%%(exc)r
Task was called with args: %%(args)s kwargs: %%(kwargs)s.
The contents of the full traceback was:
%%(traceback)s
%(EMAIL_SIGNATURE_SEP)s
Just to let you know,
py-celery at %%(hostname)s.
""" % {'EMAIL_SIGNATURE_SEP': EMAIL_SIGNATURE_SEP}
error_whitelist = None
def __init__(self, task, **kwargs):
self.task = task
self.email_subject = kwargs.get('subject', self.subject)
self.email_body = kwargs.get('body', self.body)
self.error_whitelist = getattr(task, 'error_whitelist', None) or ()
def should_send(self, context, exc):
"""Returns true or false depending on if a task error mail
should be sent for this type of error."""
allow_classes = tuple(map(symbol_by_name, self.error_whitelist))
return not self.error_whitelist or isinstance(exc, allow_classes)
def format_subject(self, context):
return self.subject.strip() % context
def format_body(self, context):
return self.body.strip() % context
def send(self, context, exc, fail_silently=True):
if self.should_send(context, exc):
self.task.app.mail_admins(self.format_subject(context),
self.format_body(context),
fail_silently=fail_silently)
|
mozilla/firefox-flicks
|
vendor-local/lib/python/celery/utils/mail.py
|
Python
|
bsd-3-clause
| 5,402 | 0 |
from unidown.tools import unlink_dir_rec
class TestDeleteDirRec:
def test_non_existence(self, tmp_path):
no_folder = tmp_path.joinpath("./donotexist/")
assert not no_folder.exists()
unlink_dir_rec(no_folder)
assert not no_folder.exists()
def test_recursive(self, tmp_path):
for number in range(1, 4):
with tmp_path.joinpath(str(number)).open('w'):
pass
sub_folder = tmp_path.joinpath("sub")
sub_folder.mkdir(parents=True, exist_ok=True)
for number in range(1, 4):
with sub_folder.joinpath(str(number)).open('w'):
pass
tmp_path.joinpath("sub2").mkdir()
unlink_dir_rec(tmp_path)
assert not tmp_path.exists()
|
IceflowRE/MR-eBook-Downloader
|
tests/tools_test.py
|
Python
|
gpl-3.0
| 762 | 0 |
"""Validators class."""
# -*- coding: utf-8 -*-
from wtforms import ValidationError
class UniqueValidator(object):
"""Validador para chequear variables unicas."""
def __init__(self, model, field, message=None):
self.model = model
self.field = field
if not message:
message = u'Existe otro Elemento con el mismo valor.'
self.message = message
def __call__(self, form, field):
_id = None
params = {self.field: field.data,
'deleted': False}
existing = self.model.objects.filter(**params).first()
if 'id' in form.data:
_id = str(form.id.data)
if existing and (_id is None or _id != str(existing.id)):
raise ValidationError(self.message)
|
janol77/flask-app
|
app/libs/validators.py
|
Python
|
gpl-3.0
| 775 | 0 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import gzip as gz
import os
import tempfile
from unittest import mock
from unittest.mock import Mock
import boto3
import pytest
from botocore.exceptions import ClientError, NoCredentialsError
from airflow.exceptions import AirflowException
from airflow.models import Connection
from airflow.providers.amazon.aws.hooks.s3 import S3Hook, provide_bucket_name, unify_bucket_name_and_key
try:
from moto import mock_s3
except ImportError:
mock_s3 = None
# This class needs to be separated out because if there are earlier mocks in the same class
# the tests will fail on teardown.
class TestAwsS3HookNoMock:
def test_check_for_bucket_raises_error_with_invalid_conn_id(self, monkeypatch):
monkeypatch.delenv('AWS_PROFILE', raising=False)
monkeypatch.delenv('AWS_ACCESS_KEY_ID', raising=False)
monkeypatch.delenv('AWS_SECRET_ACCESS_KEY', raising=False)
hook = S3Hook(aws_conn_id="does_not_exist")
with pytest.raises(NoCredentialsError):
hook.check_for_bucket("test-non-existing-bucket")
@pytest.mark.skipif(mock_s3 is None, reason='moto package not present')
class TestAwsS3Hook:
@mock_s3
def test_get_conn(self):
hook = S3Hook()
assert hook.get_conn() is not None
@mock_s3
def test_use_threads_default_value(self):
hook = S3Hook()
assert hook.transfer_config.use_threads is True
@mock_s3
def test_use_threads_set_value(self):
hook = S3Hook(transfer_config_args={"use_threads": False})
assert hook.transfer_config.use_threads is False
def test_parse_s3_url(self):
parsed = S3Hook.parse_s3_url("s3://test/this/is/not/a-real-key.txt")
assert parsed == ("test", "this/is/not/a-real-key.txt"), "Incorrect parsing of the s3 url"
def test_parse_s3_object_directory(self):
parsed = S3Hook.parse_s3_url("s3://test/this/is/not/a-real-s3-directory/")
assert parsed == ("test", "this/is/not/a-real-s3-directory/"), "Incorrect parsing of the s3 url"
def test_check_for_bucket(self, s3_bucket):
hook = S3Hook()
assert hook.check_for_bucket(s3_bucket) is True
assert hook.check_for_bucket('not-a-bucket') is False
@mock_s3
def test_get_bucket(self):
hook = S3Hook()
assert hook.get_bucket('bucket') is not None
@mock_s3
def test_create_bucket_default_region(self):
hook = S3Hook()
hook.create_bucket(bucket_name='new_bucket')
assert hook.get_bucket('new_bucket') is not None
@mock_s3
def test_create_bucket_us_standard_region(self, monkeypatch):
monkeypatch.delenv('AWS_DEFAULT_REGION', raising=False)
hook = S3Hook()
hook.create_bucket(bucket_name='new_bucket', region_name='us-east-1')
bucket = hook.get_bucket('new_bucket')
assert bucket is not None
region = bucket.meta.client.get_bucket_location(Bucket=bucket.name).get('LocationConstraint')
# https://github.com/spulec/moto/pull/1961
# If location is "us-east-1", LocationConstraint should be None
assert region is None
@mock_s3
def test_create_bucket_other_region(self):
hook = S3Hook()
hook.create_bucket(bucket_name='new_bucket', region_name='us-east-2')
bucket = hook.get_bucket('new_bucket')
assert bucket is not None
region = bucket.meta.client.get_bucket_location(Bucket=bucket.name).get('LocationConstraint')
assert region == 'us-east-2'
def test_check_for_prefix(self, s3_bucket):
hook = S3Hook()
bucket = hook.get_bucket(s3_bucket)
bucket.put_object(Key='a', Body=b'a')
bucket.put_object(Key='dir/b', Body=b'b')
assert hook.check_for_prefix(bucket_name=s3_bucket, prefix='dir/', delimiter='/') is True
assert hook.check_for_prefix(bucket_name=s3_bucket, prefix='a', delimiter='/') is False
def test_list_prefixes(self, s3_bucket):
hook = S3Hook()
bucket = hook.get_bucket(s3_bucket)
bucket.put_object(Key='a', Body=b'a')
bucket.put_object(Key='dir/b', Body=b'b')
assert [] == hook.list_prefixes(s3_bucket, prefix='non-existent/')
assert ['dir/'] == hook.list_prefixes(s3_bucket, delimiter='/')
assert ['a'] == hook.list_keys(s3_bucket, delimiter='/')
assert ['dir/b'] == hook.list_keys(s3_bucket, prefix='dir/')
def test_list_prefixes_paged(self, s3_bucket):
hook = S3Hook()
bucket = hook.get_bucket(s3_bucket)
# we don't need to test the paginator that's covered by boto tests
keys = [f"{i}/b" for i in range(2)]
dirs = [f"{i}/" for i in range(2)]
for key in keys:
bucket.put_object(Key=key, Body=b'a')
assert sorted(dirs) == sorted(hook.list_prefixes(s3_bucket, delimiter='/', page_size=1))
def test_list_keys(self, s3_bucket):
hook = S3Hook()
bucket = hook.get_bucket(s3_bucket)
bucket.put_object(Key='a', Body=b'a')
bucket.put_object(Key='dir/b', Body=b'b')
assert [] == hook.list_keys(s3_bucket, prefix='non-existent/')
assert ['a', 'dir/b'] == hook.list_keys(s3_bucket)
assert ['a'] == hook.list_keys(s3_bucket, delimiter='/')
assert ['dir/b'] == hook.list_keys(s3_bucket, prefix='dir/')
def test_list_keys_paged(self, s3_bucket):
hook = S3Hook()
bucket = hook.get_bucket(s3_bucket)
keys = [str(i) for i in range(2)]
for key in keys:
bucket.put_object(Key=key, Body=b'a')
assert sorted(keys) == sorted(hook.list_keys(s3_bucket, delimiter='/', page_size=1))
def test_check_for_key(self, s3_bucket):
hook = S3Hook()
bucket = hook.get_bucket(s3_bucket)
bucket.put_object(Key='a', Body=b'a')
assert hook.check_for_key('a', s3_bucket) is True
assert hook.check_for_key(f's3://{s3_bucket}//a') is True
assert hook.check_for_key('b', s3_bucket) is False
assert hook.check_for_key(f's3://{s3_bucket}//b') is False
def test_get_key(self, s3_bucket):
hook = S3Hook()
bucket = hook.get_bucket(s3_bucket)
bucket.put_object(Key='a', Body=b'a')
assert hook.get_key('a', s3_bucket).key == 'a'
assert hook.get_key(f's3://{s3_bucket}/a').key == 'a'
def test_read_key(self, s3_bucket):
hook = S3Hook()
bucket = hook.get_bucket(s3_bucket)
bucket.put_object(Key='my_key', Body=b'Cont\xC3\xA9nt')
assert hook.read_key('my_key', s3_bucket) == 'Contént'
# As of 1.3.2, Moto doesn't support select_object_content yet.
@mock.patch('airflow.providers.amazon.aws.hooks.base_aws.AwsBaseHook.get_client_type')
def test_select_key(self, mock_get_client_type, s3_bucket):
mock_get_client_type.return_value.select_object_content.return_value = {
'Payload': [{'Records': {'Payload': b'Cont\xC3\xA9nt'}}]
}
hook = S3Hook()
assert hook.select_key('my_key', s3_bucket) == 'Contént'
def test_check_for_wildcard_key(self, s3_bucket):
hook = S3Hook()
bucket = hook.get_bucket(s3_bucket)
bucket.put_object(Key='abc', Body=b'a')
bucket.put_object(Key='a/b', Body=b'a')
assert hook.check_for_wildcard_key('a*', s3_bucket) is True
assert hook.check_for_wildcard_key('abc', s3_bucket) is True
assert hook.check_for_wildcard_key(f's3://{s3_bucket}//a*') is True
assert hook.check_for_wildcard_key(f's3://{s3_bucket}//abc') is True
assert hook.check_for_wildcard_key('a', s3_bucket) is False
assert hook.check_for_wildcard_key('b', s3_bucket) is False
assert hook.check_for_wildcard_key(f's3://{s3_bucket}//a') is False
assert hook.check_for_wildcard_key(f's3://{s3_bucket}//b') is False
def test_get_wildcard_key(self, s3_bucket):
hook = S3Hook()
bucket = hook.get_bucket(s3_bucket)
bucket.put_object(Key='abc', Body=b'a')
bucket.put_object(Key='a/b', Body=b'a')
# The boto3 Class API is _odd_, and we can't do an isinstance check as
# each instance is a different class, so lets just check one property
# on S3.Object. Not great but...
assert hook.get_wildcard_key('a*', s3_bucket).key == 'a/b'
assert hook.get_wildcard_key('a*', s3_bucket, delimiter='/').key == 'abc'
assert hook.get_wildcard_key('abc', s3_bucket, delimiter='/').key == 'abc'
assert hook.get_wildcard_key(f's3://{s3_bucket}/a*').key == 'a/b'
assert hook.get_wildcard_key(f's3://{s3_bucket}/a*', delimiter='/').key == 'abc'
assert hook.get_wildcard_key(f's3://{s3_bucket}/abc', delimiter='/').key == 'abc'
assert hook.get_wildcard_key('a', s3_bucket) is None
assert hook.get_wildcard_key('b', s3_bucket) is None
assert hook.get_wildcard_key(f's3://{s3_bucket}/a') is None
assert hook.get_wildcard_key(f's3://{s3_bucket}/b') is None
def test_load_string(self, s3_bucket):
hook = S3Hook()
hook.load_string("Contént", "my_key", s3_bucket)
resource = boto3.resource('s3').Object(s3_bucket, 'my_key') # pylint: disable=no-member
assert resource.get()['Body'].read() == b'Cont\xC3\xA9nt'
def test_load_string_compress(self, s3_bucket):
hook = S3Hook()
hook.load_string("Contént", "my_key", s3_bucket, compression='gzip')
resource = boto3.resource('s3').Object(s3_bucket, 'my_key') # pylint: disable=no-member
data = gz.decompress(resource.get()['Body'].read())
assert data == b'Cont\xC3\xA9nt'
def test_load_string_compress_exception(self, s3_bucket):
hook = S3Hook()
with pytest.raises(NotImplementedError):
hook.load_string("Contént", "my_key", s3_bucket, compression='bad-compression')
def test_load_string_acl(self, s3_bucket):
hook = S3Hook()
hook.load_string("Contént", "my_key", s3_bucket, acl_policy='public-read')
response = boto3.client('s3').get_object_acl(Bucket=s3_bucket, Key="my_key", RequestPayer='requester')
assert (response['Grants'][1]['Permission'] == 'READ') and (
response['Grants'][0]['Permission'] == 'FULL_CONTROL'
)
def test_load_bytes(self, s3_bucket):
hook = S3Hook()
hook.load_bytes(b"Content", "my_key", s3_bucket)
resource = boto3.resource('s3').Object(s3_bucket, 'my_key') # pylint: disable=no-member
assert resource.get()['Body'].read() == b'Content'
def test_load_bytes_acl(self, s3_bucket):
hook = S3Hook()
hook.load_bytes(b"Content", "my_key", s3_bucket, acl_policy='public-read')
response = boto3.client('s3').get_object_acl(Bucket=s3_bucket, Key="my_key", RequestPayer='requester')
assert (response['Grants'][1]['Permission'] == 'READ') and (
response['Grants'][0]['Permission'] == 'FULL_CONTROL'
)
def test_load_fileobj(self, s3_bucket):
hook = S3Hook()
with tempfile.TemporaryFile() as temp_file:
temp_file.write(b"Content")
temp_file.seek(0)
hook.load_file_obj(temp_file, "my_key", s3_bucket)
resource = boto3.resource('s3').Object(s3_bucket, 'my_key') # pylint: disable=no-member
assert resource.get()['Body'].read() == b'Content'
def test_load_fileobj_acl(self, s3_bucket):
hook = S3Hook()
with tempfile.TemporaryFile() as temp_file:
temp_file.write(b"Content")
temp_file.seek(0)
hook.load_file_obj(temp_file, "my_key", s3_bucket, acl_policy='public-read')
response = boto3.client('s3').get_object_acl(
Bucket=s3_bucket, Key="my_key", RequestPayer='requester'
) # pylint: disable=no-member # noqa: E501 # pylint: disable=C0301
assert (response['Grants'][1]['Permission'] == 'READ') and (
response['Grants'][0]['Permission'] == 'FULL_CONTROL'
)
def test_load_file_gzip(self, s3_bucket):
hook = S3Hook()
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
temp_file.write(b"Content")
temp_file.seek(0)
hook.load_file(temp_file.name, "my_key", s3_bucket, gzip=True)
resource = boto3.resource('s3').Object(s3_bucket, 'my_key') # pylint: disable=no-member
assert gz.decompress(resource.get()['Body'].read()) == b'Content'
os.unlink(temp_file.name)
def test_load_file_acl(self, s3_bucket):
hook = S3Hook()
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
temp_file.write(b"Content")
temp_file.seek(0)
hook.load_file(temp_file.name, "my_key", s3_bucket, gzip=True, acl_policy='public-read')
response = boto3.client('s3').get_object_acl(
Bucket=s3_bucket, Key="my_key", RequestPayer='requester'
) # pylint: disable=no-member # noqa: E501 # pylint: disable=C0301
assert (response['Grants'][1]['Permission'] == 'READ') and (
response['Grants'][0]['Permission'] == 'FULL_CONTROL'
)
os.unlink(temp_file.name)
def test_copy_object_acl(self, s3_bucket):
hook = S3Hook()
with tempfile.NamedTemporaryFile() as temp_file:
temp_file.write(b"Content")
temp_file.seek(0)
hook.load_file_obj(temp_file, "my_key", s3_bucket)
hook.copy_object("my_key", "my_key", s3_bucket, s3_bucket)
response = boto3.client('s3').get_object_acl(
Bucket=s3_bucket, Key="my_key", RequestPayer='requester'
) # pylint: disable=no-member # noqa: E501 # pylint: disable=C0301
assert (response['Grants'][0]['Permission'] == 'FULL_CONTROL') and (len(response['Grants']) == 1)
@mock_s3
def test_delete_bucket_if_bucket_exist(self, s3_bucket):
# assert if the bucket is created
mock_hook = S3Hook()
mock_hook.create_bucket(bucket_name=s3_bucket)
assert mock_hook.check_for_bucket(bucket_name=s3_bucket)
mock_hook.delete_bucket(bucket_name=s3_bucket, force_delete=True)
assert not mock_hook.check_for_bucket(s3_bucket)
@mock_s3
def test_delete_bucket_if_not_bucket_exist(self, s3_bucket):
# assert if exception is raised if bucket not present
mock_hook = S3Hook()
with pytest.raises(ClientError) as ctx:
assert mock_hook.delete_bucket(bucket_name=s3_bucket, force_delete=True)
assert ctx.value.response['Error']['Code'] == 'NoSuchBucket'
@mock.patch.object(S3Hook, 'get_connection', return_value=Connection(schema='test_bucket'))
def test_provide_bucket_name(self, mock_get_connection):
class FakeS3Hook(S3Hook):
@provide_bucket_name
def test_function(self, bucket_name=None):
return bucket_name
fake_s3_hook = FakeS3Hook()
test_bucket_name = fake_s3_hook.test_function()
assert test_bucket_name == mock_get_connection.return_value.schema
test_bucket_name = fake_s3_hook.test_function(bucket_name='bucket')
assert test_bucket_name == 'bucket'
def test_delete_objects_key_does_not_exist(self, s3_bucket):
hook = S3Hook()
with pytest.raises(AirflowException) as ctx:
hook.delete_objects(bucket=s3_bucket, keys=['key-1'])
assert isinstance(ctx.value, AirflowException)
assert str(ctx.value) == "Errors when deleting: ['key-1']"
def test_delete_objects_one_key(self, mocked_s3_res, s3_bucket):
key = 'key-1'
mocked_s3_res.Object(s3_bucket, key).put(Body=b'Data')
hook = S3Hook()
hook.delete_objects(bucket=s3_bucket, keys=[key])
assert [o.key for o in mocked_s3_res.Bucket(s3_bucket).objects.all()] == []
def test_delete_objects_many_keys(self, mocked_s3_res, s3_bucket):
num_keys_to_remove = 1001
keys = []
for index in range(num_keys_to_remove):
key = f'key-{index}'
mocked_s3_res.Object(s3_bucket, key).put(Body=b'Data')
keys.append(key)
assert sum(1 for _ in mocked_s3_res.Bucket(s3_bucket).objects.all()) == num_keys_to_remove
hook = S3Hook()
hook.delete_objects(bucket=s3_bucket, keys=keys)
assert [o.key for o in mocked_s3_res.Bucket(s3_bucket).objects.all()] == []
def test_unify_bucket_name_and_key(self):
class FakeS3Hook(S3Hook):
@unify_bucket_name_and_key
def test_function_with_wildcard_key(self, wildcard_key, bucket_name=None):
return bucket_name, wildcard_key
@unify_bucket_name_and_key
def test_function_with_key(self, key, bucket_name=None):
return bucket_name, key
@unify_bucket_name_and_key
def test_function_with_test_key(self, test_key, bucket_name=None):
return bucket_name, test_key
fake_s3_hook = FakeS3Hook()
test_bucket_name_with_wildcard_key = fake_s3_hook.test_function_with_wildcard_key('s3://foo/bar*.csv')
assert ('foo', 'bar*.csv') == test_bucket_name_with_wildcard_key
test_bucket_name_with_key = fake_s3_hook.test_function_with_key('s3://foo/bar.csv')
assert ('foo', 'bar.csv') == test_bucket_name_with_key
with pytest.raises(ValueError) as ctx:
fake_s3_hook.test_function_with_test_key('s3://foo/bar.csv')
assert isinstance(ctx.value, ValueError)
@mock.patch('airflow.providers.amazon.aws.hooks.s3.NamedTemporaryFile')
def test_download_file(self, mock_temp_file):
mock_temp_file.return_value.__enter__ = Mock(return_value=mock_temp_file)
s3_hook = S3Hook(aws_conn_id='s3_test')
s3_hook.check_for_key = Mock(return_value=True)
s3_obj = Mock()
s3_obj.download_fileobj = Mock(return_value=None)
s3_hook.get_key = Mock(return_value=s3_obj)
key = 'test_key'
bucket = 'test_bucket'
s3_hook.download_file(key=key, bucket_name=bucket)
s3_hook.check_for_key.assert_called_once_with(key, bucket)
s3_hook.get_key.assert_called_once_with(key, bucket)
s3_obj.download_fileobj.assert_called_once_with(mock_temp_file)
def test_generate_presigned_url(self, s3_bucket):
hook = S3Hook()
presigned_url = hook.generate_presigned_url(
client_method="get_object", params={'Bucket': s3_bucket, 'Key': "my_key"}
)
url = presigned_url.split("?")[1]
params = {x[0]: x[1] for x in [x.split("=") for x in url[0:].split("&")]}
assert {"AWSAccessKeyId", "Signature", "Expires"}.issubset(set(params.keys()))
def test_should_throw_error_if_extra_args_is_not_dict(self):
with pytest.raises(ValueError):
S3Hook(extra_args=1)
def test_should_throw_error_if_extra_args_contains_unknown_arg(self, s3_bucket):
hook = S3Hook(extra_args={"unknown_s3_args": "value"})
with tempfile.TemporaryFile() as temp_file:
temp_file.write(b"Content")
temp_file.seek(0)
with pytest.raises(ValueError):
hook.load_file_obj(temp_file, "my_key", s3_bucket, acl_policy='public-read')
def test_should_pass_extra_args(self, s3_bucket):
hook = S3Hook(extra_args={"ContentLanguage": "value"})
with tempfile.TemporaryFile() as temp_file:
temp_file.write(b"Content")
temp_file.seek(0)
hook.load_file_obj(temp_file, "my_key", s3_bucket, acl_policy='public-read')
resource = boto3.resource('s3').Object(s3_bucket, 'my_key') # pylint: disable=no-member
assert resource.get()['ContentLanguage'] == "value"
@mock_s3
def test_get_bucket_tagging_no_tags_raises_error(self):
hook = S3Hook()
hook.create_bucket(bucket_name='new_bucket')
with pytest.raises(ClientError, match=r".*NoSuchTagSet.*"):
hook.get_bucket_tagging(bucket_name='new_bucket')
@mock_s3
def test_get_bucket_tagging_no_bucket_raises_error(self):
hook = S3Hook()
with pytest.raises(ClientError, match=r".*NoSuchBucket.*"):
hook.get_bucket_tagging(bucket_name='new_bucket')
@mock_s3
def test_put_bucket_tagging_with_valid_set(self):
hook = S3Hook()
hook.create_bucket(bucket_name='new_bucket')
tag_set = [{'Key': 'Color', 'Value': 'Green'}]
hook.put_bucket_tagging(bucket_name='new_bucket', tag_set=tag_set)
assert hook.get_bucket_tagging(bucket_name='new_bucket') == tag_set
@mock_s3
def test_put_bucket_tagging_with_pair(self):
hook = S3Hook()
hook.create_bucket(bucket_name='new_bucket')
tag_set = [{'Key': 'Color', 'Value': 'Green'}]
key = 'Color'
value = 'Green'
hook.put_bucket_tagging(bucket_name='new_bucket', key=key, value=value)
assert hook.get_bucket_tagging(bucket_name='new_bucket') == tag_set
@mock_s3
def test_put_bucket_tagging_with_pair_and_set(self):
hook = S3Hook()
hook.create_bucket(bucket_name='new_bucket')
expected = [{'Key': 'Color', 'Value': 'Green'}, {'Key': 'Fruit', 'Value': 'Apple'}]
tag_set = [{'Key': 'Color', 'Value': 'Green'}]
key = 'Fruit'
value = 'Apple'
hook.put_bucket_tagging(bucket_name='new_bucket', tag_set=tag_set, key=key, value=value)
result = hook.get_bucket_tagging(bucket_name='new_bucket')
assert len(result) == 2
assert result == expected
@mock_s3
def test_put_bucket_tagging_with_key_but_no_value_raises_error(self):
hook = S3Hook()
hook.create_bucket(bucket_name='new_bucket')
key = 'Color'
with pytest.raises(ValueError):
hook.put_bucket_tagging(bucket_name='new_bucket', key=key)
@mock_s3
def test_put_bucket_tagging_with_value_but_no_key_raises_error(self):
hook = S3Hook()
hook.create_bucket(bucket_name='new_bucket')
value = 'Color'
with pytest.raises(ValueError):
hook.put_bucket_tagging(bucket_name='new_bucket', value=value)
@mock_s3
def test_put_bucket_tagging_with_key_and_set_raises_error(self):
hook = S3Hook()
hook.create_bucket(bucket_name='new_bucket')
tag_set = [{'Key': 'Color', 'Value': 'Green'}]
key = 'Color'
with pytest.raises(ValueError):
hook.put_bucket_tagging(bucket_name='new_bucket', key=key, tag_set=tag_set)
@mock_s3
def test_put_bucket_tagging_with_value_and_set_raises_error(self):
hook = S3Hook()
hook.create_bucket(bucket_name='new_bucket')
tag_set = [{'Key': 'Color', 'Value': 'Green'}]
value = 'Green'
with pytest.raises(ValueError):
hook.put_bucket_tagging(bucket_name='new_bucket', value=value, tag_set=tag_set)
@mock_s3
def test_put_bucket_tagging_when_tags_exist_overwrites(self):
hook = S3Hook()
hook.create_bucket(bucket_name='new_bucket')
initial_tag_set = [{'Key': 'Color', 'Value': 'Green'}]
hook.put_bucket_tagging(bucket_name='new_bucket', tag_set=initial_tag_set)
assert len(hook.get_bucket_tagging(bucket_name='new_bucket')) == 1
assert hook.get_bucket_tagging(bucket_name='new_bucket') == initial_tag_set
new_tag_set = [{'Key': 'Fruit', 'Value': 'Apple'}]
hook.put_bucket_tagging(bucket_name='new_bucket', tag_set=new_tag_set)
result = hook.get_bucket_tagging(bucket_name='new_bucket')
assert len(result) == 1
assert result == new_tag_set
@mock_s3
def test_delete_bucket_tagging(self):
hook = S3Hook()
hook.create_bucket(bucket_name='new_bucket')
tag_set = [{'Key': 'Color', 'Value': 'Green'}]
hook.put_bucket_tagging(bucket_name='new_bucket', tag_set=tag_set)
hook.get_bucket_tagging(bucket_name='new_bucket')
hook.delete_bucket_tagging(bucket_name='new_bucket')
with pytest.raises(ClientError, match=r".*NoSuchTagSet.*"):
hook.get_bucket_tagging(bucket_name='new_bucket')
@mock_s3
def test_delete_bucket_tagging_with_no_tags(self):
hook = S3Hook()
hook.create_bucket(bucket_name='new_bucket')
hook.delete_bucket_tagging(bucket_name='new_bucket')
with pytest.raises(ClientError, match=r".*NoSuchTagSet.*"):
hook.get_bucket_tagging(bucket_name='new_bucket')
|
nathanielvarona/airflow
|
tests/providers/amazon/aws/hooks/test_s3.py
|
Python
|
apache-2.0
| 25,423 | 0.002085 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-10-02 21:26
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("order", "0004_auto_20160111_1108"),
("wellsfargo", "0007_financingplan_advertising_enabled"),
]
operations = [
migrations.CreateModel(
name="FraudScreenResult",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"screen_type",
models.CharField(max_length=25, verbose_name="Fraud Screen Type"),
),
(
"decision",
models.CharField(
choices=[
("REJECT", "Transaction was rejected"),
("ACCEPT", "Transaction was accepted"),
("ERROR", "Error occurred while running fraud screen"),
],
max_length=25,
verbose_name="Decision",
),
),
("message", models.TextField(verbose_name="Message")),
("created_datetime", models.DateTimeField(auto_now_add=True)),
("modified_datetime", models.DateTimeField(auto_now=True)),
(
"order",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="order.Order"
),
),
],
options={
"ordering": ("-created_datetime", "-id"),
},
),
]
|
thelabnyc/django-oscar-wfrs
|
src/wellsfargo/migrations/0008_fraudscreenresult.py
|
Python
|
isc
| 1,998 | 0.001502 |
#-*- coding: utf-8 -*-
import os, re
from traceback import format_exc as fme
from exprParser import Parser, ParserContext
class Shell:
echo = False
echoStrs = {
'on': True,
'off': False,
'true': True,
'false': False,
}
commands = {}
history = []
values = []
ops = []
def processEchoCommand(self, args):
try:
if len(args) == 0:
pass
else:
echoStateStr = args[0].lower()
self.echo = self.echoStrs[echoStateStr]
print 'echo = %r' % (self.echo, )
except Exception, e:
self.error('invalid echo setting value %s' % (echoStateStr, ))
def processExitCommand(self, args):
self.msg('bye!')
exit()
def makeHistoryCommandArgs(self, args):
h = self.history
if len(args) > 0:
arg = args[0]
if arg.isdigit():
return int(arg)
return len(h)
def processHistoryCommand(self, args):
h = self.history
historyLen = self.makeHistoryCommandArgs(args)
for item, i in zip(h, reversed(range(historyLen))):
self.msg('%d. %s' % (i + 1, item))
def processOps(self, args):
ops = self.ops
for op in self.ops:
self.msg(op)
def msg(self, txt):
print txt
def error(self, msg):
print msg
def installCommands(self):
c = self.commands
c[':echo'] = self.processEchoCommand
c[':exit'] = self.processExitCommand
c[':history'] = self.processHistoryCommand
c[':ops'] = self.processOps
def inputOperation(self, userInput):
parser = Parser()
context = ParserContext()
context.unnamedVariables = self.values
parser.context = context
parser.parse(userInput)
d = parser.ret
self.values.append(d)
self.msg('$%d=' % (len(self.values), ))
self.msg(str(d))
#self.printDeterminant(self.values[-1])
return True
def isValidDeterminant(self, d):
rl = -1
for r in d:
if rl == -1:
rl = len(r)
elif len(r) != rl:
self.msg('invalid determinant')
return False
return True
def printDeterminant(self, d):
msg = ''
for r in d:
msg += '|'
for e in r:
msg +=str(e) + '\t'
msg += '|\n'
self.msg(msg)
def processOperationInput(self, userInput):
userInput = userInput.strip()
return self.inputOperation(userInput)
return False
def runShell(self):
self.installCommands()
while 1:
userInput = raw_input('>>')
if len(userInput.strip()) == 0:
continue
if True == self.echo:
self.msg(userInput)
inputs = userInput.split(' ')
if len(inputs) > 0:
cmdName = inputs[0]
if cmdName in self.commands:
try:
self.history.append(userInput)
self.commands[cmdName](inputs[1:])
except Exception, e:
print e
print fme()
elif self.processOperationInput(userInput):
self.ops.append(userInput)
pass
else:
self.error('unknow command/operation "%s"' % (userInput))
if __name__ == '__main__':
s = Shell()
s.runShell()
|
qinggeng/tools
|
MatrixSkatch/shell.py
|
Python
|
unlicense
| 2,810 | 0.041637 |
#####################################################################
# u1.py
#
# (c) Copyright 2021, Benjamin Parzella. All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#####################################################################
"""SECS 1 byte unsigned integer variable type."""
from .base_number import BaseNumber
class U1(BaseNumber):
"""
Secs type for 1 byte unsigned data.
:param value: initial value
:type value: list/integer
:param count: number of items this value
:type count: integer
"""
format_code = 0o51
text_code = "U1"
_base_type = int
_min = 0
_max = 255
_bytes = 1
_struct_code = "B"
preferred_types = [int]
|
bparzella/secsgem
|
secsgem/secs/variables/u1.py
|
Python
|
lgpl-2.1
| 1,183 | 0 |
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.template import Library
from treemenus.models import MenuItem
register = Library()
class MenuItemExtension(models.Model):
menu_item = models.OneToOneField(MenuItem, related_name="extension")
visivel = models.BooleanField(default=False)
css = models.CharField(_(u'CSS style'), null=True, blank=True, max_length=300)
|
ansp-2015/arquea
|
menuextension/models.py
|
Python
|
mpl-2.0
| 652 | 0.001534 |
# $Id: icmp.py,v 1.1.1.1 2005/10/29 18:20:48 provos Exp $
from dpkt import Packet, in_cksum as _icmp_cksum
import ip
# Types (icmp_type) and codes (icmp_code) -
# http://www.iana.org/assignments/icmp-parameters
ICMP_CODE_NONE = 0 # for types without codes
ICMP_ECHOREPLY = 0 # echo reply
ICMP_UNREACH = 3 # dest unreachable, codes:
ICMP_UNREACH_NET = 0 # bad net
ICMP_UNREACH_HOST = 1 # bad host
ICMP_UNREACH_PROTO = 2 # bad protocol
ICMP_UNREACH_PORT = 3 # bad port
ICMP_UNREACH_NEEDFRAG = 4 # IP_DF caused drop
ICMP_UNREACH_SRCFAIL = 5 # src route failed
ICMP_UNREACH_NET_UNKNOWN = 6 # unknown net
ICMP_UNREACH_HOST_UNKNOWN = 7 # unknown host
ICMP_UNREACH_ISOLATED = 8 # src host isolated
ICMP_UNREACH_NET_PROHIB = 9 # for crypto devs
ICMP_UNREACH_HOST_PROHIB = 10 # ditto
ICMP_UNREACH_TOSNET = 11 # bad tos for net
ICMP_UNREACH_TOSHOST = 12 # bad tos for host
ICMP_UNREACH_FILTER_PROHIB = 13 # prohibited access
ICMP_UNREACH_HOST_PRECEDENCE = 14 # precedence error
ICMP_UNREACH_PRECEDENCE_CUTOFF = 15 # precedence cutoff
ICMP_SRCQUENCH = 4 # packet lost, slow down
ICMP_REDIRECT = 5 # shorter route, codes:
ICMP_REDIRECT_NET = 0 # for network
ICMP_REDIRECT_HOST = 1 # for host
ICMP_REDIRECT_TOSNET = 2 # for tos and net
ICMP_REDIRECT_TOSHOST = 3 # for tos and host
ICMP_ALTHOSTADDR = 6 # alternate host address
ICMP_ECHO = 8 # echo service
ICMP_RTRADVERT = 9 # router advertise, codes:
ICMP_RTRADVERT_NORMAL = 0 # normal
ICMP_RTRADVERT_NOROUTE_COMMON = 16 # selective routing
ICMP_RTRSOLICIT = 10 # router solicitation
ICMP_TIMEXCEED = 11 # time exceeded, code:
ICMP_TIMEXCEED_INTRANS = 0 # ttl==0 in transit
ICMP_TIMEXCEED_REASS = 1 # ttl==0 in reass
ICMP_PARAMPROB = 12 # ip header bad
ICMP_PARAMPROB_ERRATPTR = 0 # req. opt. absent
ICMP_PARAMPROB_OPTABSENT = 1 # req. opt. absent
ICMP_PARAMPROB_LENGTH = 2 # bad length
ICMP_TSTAMP = 13 # timestamp request
ICMP_TSTAMPREPLY = 14 # timestamp reply
ICMP_INFO = 15 # information request
ICMP_INFOREPLY = 16 # information reply
ICMP_MASK = 17 # address mask request
ICMP_MASKREPLY = 18 # address mask reply
ICMP_TRACEROUTE = 30 # traceroute
ICMP_DATACONVERR = 31 # data conversion error
ICMP_MOBILE_REDIRECT = 32 # mobile host redirect
ICMP_IP6_WHEREAREYOU = 33 # IPv6 where-are-you
ICMP_IP6_IAMHERE = 34 # IPv6 i-am-here
ICMP_MOBILE_REG = 35 # mobile registration req
ICMP_MOBILE_REGREPLY = 36 # mobile registration reply
ICMP_DNS = 37 # domain name request
ICMP_DNSREPLY = 38 # domain name reply
ICMP_SKIP = 39 # SKIP
ICMP_PHOTURIS = 40 # Photuris
ICMP_PHOTURIS_UNKNOWN_INDEX = 0 # unknown sec index
ICMP_PHOTURIS_AUTH_FAILED = 1 # auth failed
ICMP_PHOTURIS_DECOMPRESS_FAILED = 2 # decompress failed
ICMP_PHOTURIS_DECRYPT_FAILED = 3 # decrypt failed
ICMP_PHOTURIS_NEED_AUTHN = 4 # no authentication
ICMP_PHOTURIS_NEED_AUTHZ = 5 # no authorization
ICMP_TYPE_MAX = 40
class ICMP(Packet):
"""Internet Control Message Protocol."""
__hdr__ = (
('type', 'B', 8),
('code', 'B', 0),
('sum', 'H', 0)
)
class Echo(Packet):
__hdr__ = (('id', 'H', 0), ('seq', 'H', 0))
class Quote(Packet):
__hdr__ = (('pad', 'I', 0),)
def unpack(self, buf):
Packet.unpack(self, buf)
self.data = self.ip = ip.IP(self.data)
class Unreach(Quote):
__hdr__ = (('pad', 'H', 0), ('mtu', 'H', 0))
class Quench(Quote):
pass
class Redirect(Quote):
__hdr__ = (('gw', 'I', 0),)
class ParamProbe(Quote):
__hdr__ = (('ptr', 'B', 0), ('pad1', 'B', 0), ('pad2', 'H', 0))
class TimeExceed(Quote):
pass
_typesw = { 0:Echo, 3:Unreach, 4:Quench, 5:Redirect, 8:Echo,
11:TimeExceed }
def unpack(self, buf):
Packet.unpack(self, buf)
try:
self.data = self._typesw[self.type](self.data)
setattr(self, self.data.__class__.__name__.lower(), self.data)
except:
self.data = buf
def __str__(self):
if not self.sum:
self.sum = _icmp_cksum(Packet.__str__(self))
return Packet.__str__(self)
|
Banjong1990/honey
|
dpkt/dpkt/icmp.py
|
Python
|
gpl-2.0
| 4,112 | 0.034776 |
import sys
ROBOT_LISTENER_API_VERSION = 2
def start_keyword(name, attrs):
sys.stdout.write('start keyword %s\n' % name)
sys.stderr.write('start keyword %s\n' % name)
def end_keyword(name, attrs):
sys.stdout.write('end keyword %s\n' % name)
sys.stderr.write('end keyword %s\n' % name)
|
dkentw/robotframework
|
atest/robot/standard_libraries/builtin/listener_printing_start_end_kw.py
|
Python
|
apache-2.0
| 306 | 0 |
from elasticsearch import TransportError
import olympia.core.logger
from olympia.amo.utils import render
log = olympia.core.logger.getLogger('z.es')
class ElasticsearchExceptionMiddleware(object):
def process_exception(self, request, exception):
if issubclass(exception.__class__, TransportError):
log.exception(u'Elasticsearch error')
return render(request, 'search/down.html', status=503)
|
lavish205/olympia
|
src/olympia/search/middleware.py
|
Python
|
bsd-3-clause
| 434 | 0 |
class Egg(object):
def __init__(self, xpos, ypos, t, s):
self.x = xpos # x-coordinate
self.y = ypos # y-coordinate
self.tilt = t # Left and right angle offset
self.angle = 0 # Used to define the tilt
self.scalar = s / 100.0 # Height of the egg
def wobble(self):
self.tilt = cos(self.angle) / 8
self.angle += 0.1
def display(self):
noStroke()
fill(255)
with pushMatrix():
translate(self.x, self.y)
rotate(self.tilt)
scale(self.scalar)
with beginShape():
vertex(0, -100)
bezierVertex(25, -100, 40, -65, 40, -40)
bezierVertex(40, -15, 25, 0, 0, 0)
bezierVertex(-25, 0, -40, -15, -40, -40)
bezierVertex(-40, -65, -25, -100, 0, -100)
|
kantel/processingpy
|
sketches/modes/PythonMode/examples/Basics/Objects/CompositeObjects/egg.py
|
Python
|
mit
| 858 | 0.001166 |
import numpy as np
def CG(A, X, B, maxiter=20, tolerance=1.0e-10, verbose=False):
"""Solve X*A=B using conjugate gradient method.
``X`` and ``B`` are ``ndarrays```of shape ``(m, nx, ny, nz)``
coresponding to matrices of size ``m*n`` (``n=nx*ny*nz``) and
``A`` is a callable representing an ``n*n`` matrix::
A(X, Y)
will store ``X*A`` in the output array ``Y``.
On return ``X`` will be the solution to ``X*A=B`` within
``tolerance``."""
m = len(X)
shape = (m, 1, 1, 1)
R = np.empty(X.shape, X.dtype.char)
Q = np.empty(X.shape, X.dtype.char)
A(X, R)
R -= B
P = R.copy()
c1 = A.sum(np.reshape([abs(np.vdot(r, r)) for r in R], shape))
for i in range(maxiter):
error = sum(c1.ravel())
if verbose:
print 'CG-%d: %e' % (i, error)
if error < tolerance:
return i, error
A(P, Q)
#alpha = c1 / reshape([vdot(p, q) for p, q in zip(P, Q)], shape)
alpha = c1 / A.sum(np.reshape([np.vdot(q,p)
for p, q in zip(P, Q)], shape))
X -= alpha * P
R -= alpha * Q
c0 = c1
c1 = A.sum(np.reshape([abs(np.vdot(r, r)) for r in R], shape))
beta = c1 / c0
P *= beta
P += R
raise ArithmeticError('Did not converge!')
|
qsnake/gpaw
|
gpaw/utilities/cg.py
|
Python
|
gpl-3.0
| 1,346 | 0.003715 |
"""
Grade API v1 URL specification
"""
from django.conf.urls import url, patterns
import views
urlpatterns = patterns(
'',
url(r'^grades/courses/$', views.CourseGradeList.as_view()),
url(r'^grades/courses/(?P<org>[A-Za-z0-9_.-]+)[+](?P<name>[A-Za-z0-9_.-]+)[+](?P<run>[A-Za-z0-9_.-]+)/$', views.CourseGradeDetail.as_view()),
url(r'^grades/students/$', views.StudentList.as_view()),
url(r'^grades/students/(?P<student_id>[0-9]+)/$', views.StudentGradeDetail.as_view()),
)
|
jaygoswami2303/course_dashboard_api
|
v2/GradeAPI/urls.py
|
Python
|
mit
| 492 | 0.006098 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for Requests."""
from __future__ import division
import json
import os
import pickle
import unittest
import requests
import pytest
from requests.adapters import HTTPAdapter
from requests.auth import HTTPDigestAuth
from requests.compat import (
Morsel, cookielib, getproxies, str, urljoin, urlparse)
from requests.cookies import cookiejar_from_dict, morsel_to_cookie
from requests.exceptions import InvalidURL, MissingSchema
from requests.structures import CaseInsensitiveDict
try:
import StringIO
except ImportError:
import io as StringIO
HTTPBIN = os.environ.get('HTTPBIN_URL', 'http://httpbin.org/')
# Issue #1483: Make sure the URL always has a trailing slash
HTTPBIN = HTTPBIN.rstrip('/') + '/'
def httpbin(*suffix):
"""Returns url for HTTPBIN resource."""
return urljoin(HTTPBIN, '/'.join(suffix))
class RequestsTestCase(unittest.TestCase):
_multiprocess_can_split_ = True
def setUp(self):
"""Create simple data set with headers."""
pass
def tearDown(self):
"""Teardown."""
pass
def test_entry_points(self):
requests.session
requests.session().get
requests.session().head
requests.get
requests.head
requests.put
requests.patch
requests.post
def test_invalid_url(self):
with pytest.raises(MissingSchema):
requests.get('hiwpefhipowhefopw')
with pytest.raises(InvalidURL):
requests.get('http://')
def test_basic_building(self):
req = requests.Request()
req.url = 'http://kennethreitz.org/'
req.data = {'life': '42'}
pr = req.prepare()
assert pr.url == req.url
assert pr.body == 'life=42'
def test_no_content_length(self):
get_req = requests.Request('GET', httpbin('get')).prepare()
assert 'Content-Length' not in get_req.headers
head_req = requests.Request('HEAD', httpbin('head')).prepare()
assert 'Content-Length' not in head_req.headers
def test_path_is_not_double_encoded(self):
request = requests.Request('GET', "http://0.0.0.0/get/test case").prepare()
assert request.path_url == '/get/test%20case'
def test_params_are_added_before_fragment(self):
request = requests.Request('GET',
"http://example.com/path#fragment", params={"a": "b"}).prepare()
assert request.url == "http://example.com/path?a=b#fragment"
request = requests.Request('GET',
"http://example.com/path?key=value#fragment", params={"a": "b"}).prepare()
assert request.url == "http://example.com/path?key=value&a=b#fragment"
def test_mixed_case_scheme_acceptable(self):
s = requests.Session()
s.proxies = getproxies()
parts = urlparse(httpbin('get'))
schemes = ['http://', 'HTTP://', 'hTTp://', 'HttP://',
'https://', 'HTTPS://', 'hTTps://', 'HttPs://']
for scheme in schemes:
url = scheme + parts.netloc + parts.path
r = requests.Request('GET', url)
r = s.send(r.prepare())
assert r.status_code == 200, 'failed for scheme {0}'.format(scheme)
def test_HTTP_200_OK_GET_ALTERNATIVE(self):
r = requests.Request('GET', httpbin('get'))
s = requests.Session()
s.proxies = getproxies()
r = s.send(r.prepare())
assert r.status_code == 200
def test_HTTP_302_ALLOW_REDIRECT_GET(self):
r = requests.get(httpbin('redirect', '1'))
assert r.status_code == 200
# def test_HTTP_302_ALLOW_REDIRECT_POST(self):
# r = requests.post(httpbin('status', '302'), data={'some': 'data'})
# self.assertEqual(r.status_code, 200)
def test_HTTP_200_OK_GET_WITH_PARAMS(self):
heads = {'User-agent': 'Mozilla/5.0'}
r = requests.get(httpbin('user-agent'), headers=heads)
assert heads['User-agent'] in r.text
assert r.status_code == 200
def test_HTTP_200_OK_GET_WITH_MIXED_PARAMS(self):
heads = {'User-agent': 'Mozilla/5.0'}
r = requests.get(httpbin('get') + '?test=true', params={'q': 'test'}, headers=heads)
assert r.status_code == 200
def test_set_cookie_on_301(self):
s = requests.session()
url = httpbin('cookies/set?foo=bar')
r = s.get(url)
assert s.cookies['foo'] == 'bar'
def test_cookie_sent_on_redirect(self):
s = requests.session()
s.get(httpbin('cookies/set?foo=bar'))
r = s.get(httpbin('redirect/1')) # redirects to httpbin('get')
assert 'Cookie' in r.json()['headers']
def test_cookie_removed_on_expire(self):
s = requests.session()
s.get(httpbin('cookies/set?foo=bar'))
assert s.cookies['foo'] == 'bar'
s.get(
httpbin('response-headers'),
params={
'Set-Cookie':
'foo=deleted; expires=Thu, 01-Jan-1970 00:00:01 GMT'
}
)
assert 'foo' not in s.cookies
def test_cookie_quote_wrapped(self):
s = requests.session()
s.get(httpbin('cookies/set?foo="bar:baz"'))
assert s.cookies['foo'] == '"bar:baz"'
def test_cookie_persists_via_api(self):
s = requests.session()
r = s.get(httpbin('redirect/1'), cookies={'foo': 'bar'})
assert 'foo' in r.request.headers['Cookie']
assert 'foo' in r.history[0].request.headers['Cookie']
def test_request_cookie_overrides_session_cookie(self):
s = requests.session()
s.cookies['foo'] = 'bar'
r = s.get(httpbin('cookies'), cookies={'foo': 'baz'})
assert r.json()['cookies']['foo'] == 'baz'
# Session cookie should not be modified
assert s.cookies['foo'] == 'bar'
def test_request_cookies_not_persisted(self):
s = requests.session()
s.get(httpbin('cookies'), cookies={'foo': 'baz'})
# Sending a request with cookies should not add cookies to the session
assert not s.cookies
def test_generic_cookiejar_works(self):
cj = cookielib.CookieJar()
cookiejar_from_dict({'foo': 'bar'}, cj)
s = requests.session()
s.cookies = cj
r = s.get(httpbin('cookies'))
# Make sure the cookie was sent
assert r.json()['cookies']['foo'] == 'bar'
# Make sure the session cj is still the custom one
assert s.cookies is cj
def test_param_cookiejar_works(self):
cj = cookielib.CookieJar()
cookiejar_from_dict({'foo' : 'bar'}, cj)
s = requests.session()
r = s.get(httpbin('cookies'), cookies=cj)
# Make sure the cookie was sent
assert r.json()['cookies']['foo'] == 'bar'
def test_requests_in_history_are_not_overridden(self):
resp = requests.get(httpbin('redirect/3'))
urls = [r.url for r in resp.history]
req_urls = [r.request.url for r in resp.history]
assert urls == req_urls
def test_user_agent_transfers(self):
heads = {
'User-agent': 'Mozilla/5.0 (github.com/kennethreitz/requests)'
}
r = requests.get(httpbin('user-agent'), headers=heads)
assert heads['User-agent'] in r.text
heads = {
'user-agent': 'Mozilla/5.0 (github.com/kennethreitz/requests)'
}
r = requests.get(httpbin('user-agent'), headers=heads)
assert heads['user-agent'] in r.text
def test_HTTP_200_OK_HEAD(self):
r = requests.head(httpbin('get'))
assert r.status_code == 200
def test_HTTP_200_OK_PUT(self):
r = requests.put(httpbin('put'))
assert r.status_code == 200
def test_BASICAUTH_TUPLE_HTTP_200_OK_GET(self):
auth = ('user', 'pass')
url = httpbin('basic-auth', 'user', 'pass')
r = requests.get(url, auth=auth)
assert r.status_code == 200
r = requests.get(url)
assert r.status_code == 401
s = requests.session()
s.auth = auth
r = s.get(url)
assert r.status_code == 200
def test_basicauth_with_netrc(self):
auth = ('user', 'pass')
wrong_auth = ('wronguser', 'wrongpass')
url = httpbin('basic-auth', 'user', 'pass')
def get_netrc_auth_mock(url):
return auth
requests.sessions.get_netrc_auth = get_netrc_auth_mock
# Should use netrc and work.
r = requests.get(url)
assert r.status_code == 200
# Given auth should override and fail.
r = requests.get(url, auth=wrong_auth)
assert r.status_code == 401
s = requests.session()
# Should use netrc and work.
r = s.get(url)
assert r.status_code == 200
# Given auth should override and fail.
s.auth = wrong_auth
r = s.get(url)
assert r.status_code == 401
def test_DIGEST_HTTP_200_OK_GET(self):
auth = HTTPDigestAuth('user', 'pass')
url = httpbin('digest-auth', 'auth', 'user', 'pass')
r = requests.get(url, auth=auth)
assert r.status_code == 200
r = requests.get(url)
assert r.status_code == 401
s = requests.session()
s.auth = HTTPDigestAuth('user', 'pass')
r = s.get(url)
assert r.status_code == 200
def test_DIGEST_AUTH_RETURNS_COOKIE(self):
url = httpbin('digest-auth', 'auth', 'user', 'pass')
auth = HTTPDigestAuth('user', 'pass')
r = requests.get(url)
assert r.cookies['fake'] == 'fake_value'
r = requests.get(url, auth=auth)
assert r.status_code == 200
def test_DIGEST_AUTH_SETS_SESSION_COOKIES(self):
url = httpbin('digest-auth', 'auth', 'user', 'pass')
auth = HTTPDigestAuth('user', 'pass')
s = requests.Session()
s.get(url, auth=auth)
assert s.cookies['fake'] == 'fake_value'
def test_DIGEST_STREAM(self):
auth = HTTPDigestAuth('user', 'pass')
url = httpbin('digest-auth', 'auth', 'user', 'pass')
r = requests.get(url, auth=auth, stream=True)
assert r.raw.read() != b''
r = requests.get(url, auth=auth, stream=False)
assert r.raw.read() == b''
def test_DIGESTAUTH_WRONG_HTTP_401_GET(self):
auth = HTTPDigestAuth('user', 'wrongpass')
url = httpbin('digest-auth', 'auth', 'user', 'pass')
r = requests.get(url, auth=auth)
assert r.status_code == 401
r = requests.get(url)
assert r.status_code == 401
s = requests.session()
s.auth = auth
r = s.get(url)
assert r.status_code == 401
def test_DIGESTAUTH_QUOTES_QOP_VALUE(self):
auth = HTTPDigestAuth('user', 'pass')
url = httpbin('digest-auth', 'auth', 'user', 'pass')
r = requests.get(url, auth=auth)
assert '"auth"' in r.request.headers['Authorization']
def test_POSTBIN_GET_POST_FILES(self):
url = httpbin('post')
post1 = requests.post(url).raise_for_status()
post1 = requests.post(url, data={'some': 'data'})
assert post1.status_code == 200
with open('requirements.txt') as f:
post2 = requests.post(url, files={'some': f})
assert post2.status_code == 200
post4 = requests.post(url, data='[{"some": "json"}]')
assert post4.status_code == 200
with pytest.raises(ValueError):
requests.post(url, files = ['bad file data'])
def test_POSTBIN_GET_POST_FILES_WITH_DATA(self):
url = httpbin('post')
post1 = requests.post(url).raise_for_status()
post1 = requests.post(url, data={'some': 'data'})
assert post1.status_code == 200
with open('requirements.txt') as f:
post2 = requests.post(url, data={'some': 'data'}, files={'some': f})
assert post2.status_code == 200
post4 = requests.post(url, data='[{"some": "json"}]')
assert post4.status_code == 200
with pytest.raises(ValueError):
requests.post(url, files = ['bad file data'])
def test_conflicting_post_params(self):
url = httpbin('post')
with open('requirements.txt') as f:
pytest.raises(ValueError, "requests.post(url, data='[{\"some\": \"data\"}]', files={'some': f})")
pytest.raises(ValueError, "requests.post(url, data=u'[{\"some\": \"data\"}]', files={'some': f})")
def test_request_ok_set(self):
r = requests.get(httpbin('status', '404'))
assert not r.ok
def test_status_raising(self):
r = requests.get(httpbin('status', '404'))
with pytest.raises(requests.exceptions.HTTPError):
r.raise_for_status()
r = requests.get(httpbin('status', '500'))
assert not r.ok
def test_decompress_gzip(self):
r = requests.get(httpbin('gzip'))
r.content.decode('ascii')
def test_unicode_get(self):
url = httpbin('/get')
requests.get(url, params={'foo': 'føø'})
requests.get(url, params={'føø': 'føø'})
requests.get(url, params={'føø': 'føø'})
requests.get(url, params={'foo': 'foo'})
requests.get(httpbin('ø'), params={'foo': 'foo'})
def test_unicode_header_name(self):
requests.put(httpbin('put'), headers={str('Content-Type'): 'application/octet-stream'}, data='\xff') # compat.str is unicode.
def test_pyopenssl_redirect(self):
requests.get('https://httpbin.org/status/301')
def test_urlencoded_get_query_multivalued_param(self):
r = requests.get(httpbin('get'), params=dict(test=['foo', 'baz']))
assert r.status_code == 200
assert r.url == httpbin('get?test=foo&test=baz')
def test_different_encodings_dont_break_post(self):
r = requests.post(httpbin('post'),
data={'stuff': json.dumps({'a': 123})},
params={'blah': 'asdf1234'},
files={'file': ('test_requests.py', open(__file__, 'rb'))})
assert r.status_code == 200
def test_unicode_multipart_post(self):
r = requests.post(httpbin('post'),
data={'stuff': u'ëlïxr'},
files={'file': ('test_requests.py', open(__file__, 'rb'))})
assert r.status_code == 200
r = requests.post(httpbin('post'),
data={'stuff': u'ëlïxr'.encode('utf-8')},
files={'file': ('test_requests.py', open(__file__, 'rb'))})
assert r.status_code == 200
r = requests.post(httpbin('post'),
data={'stuff': 'elixr'},
files={'file': ('test_requests.py', open(__file__, 'rb'))})
assert r.status_code == 200
r = requests.post(httpbin('post'),
data={'stuff': 'elixr'.encode('utf-8')},
files={'file': ('test_requests.py', open(__file__, 'rb'))})
assert r.status_code == 200
def test_unicode_multipart_post_fieldnames(self):
filename = os.path.splitext(__file__)[0] + '.py'
r = requests.Request(method='POST',
url=httpbin('post'),
data={'stuff'.encode('utf-8'): 'elixr'},
files={'file': ('test_requests.py',
open(filename, 'rb'))})
prep = r.prepare()
assert b'name="stuff"' in prep.body
assert b'name="b\'stuff\'"' not in prep.body
def test_unicode_method_name(self):
files = {'file': open('test_requests.py', 'rb')}
r = requests.request(method=u'POST', url=httpbin('post'), files=files)
assert r.status_code == 200
def test_custom_content_type(self):
r = requests.post(httpbin('post'),
data={'stuff': json.dumps({'a': 123})},
files={'file1': ('test_requests.py', open(__file__, 'rb')),
'file2': ('test_requests', open(__file__, 'rb'),
'text/py-content-type')})
assert r.status_code == 200
assert b"text/py-content-type" in r.request.body
def test_hook_receives_request_arguments(self):
def hook(resp, **kwargs):
assert resp is not None
assert kwargs != {}
requests.Request('GET', HTTPBIN, hooks={'response': hook})
def test_session_hooks_are_used_with_no_request_hooks(self):
hook = lambda x, *args, **kwargs: x
s = requests.Session()
s.hooks['response'].append(hook)
r = requests.Request('GET', HTTPBIN)
prep = s.prepare_request(r)
assert prep.hooks['response'] != []
assert prep.hooks['response'] == [hook]
def test_session_hooks_are_overriden_by_request_hooks(self):
hook1 = lambda x, *args, **kwargs: x
hook2 = lambda x, *args, **kwargs: x
assert hook1 is not hook2
s = requests.Session()
s.hooks['response'].append(hook2)
r = requests.Request('GET', HTTPBIN, hooks={'response': [hook1]})
prep = s.prepare_request(r)
assert prep.hooks['response'] == [hook1]
def test_prepared_request_hook(self):
def hook(resp, **kwargs):
resp.hook_working = True
return resp
req = requests.Request('GET', HTTPBIN, hooks={'response': hook})
prep = req.prepare()
s = requests.Session()
s.proxies = getproxies()
resp = s.send(prep)
assert hasattr(resp, 'hook_working')
def test_prepared_from_session(self):
class DummyAuth(requests.auth.AuthBase):
def __call__(self, r):
r.headers['Dummy-Auth-Test'] = 'dummy-auth-test-ok'
return r
req = requests.Request('GET', httpbin('headers'))
assert not req.auth
s = requests.Session()
s.auth = DummyAuth()
prep = s.prepare_request(req)
resp = s.send(prep)
assert resp.json()['headers']['Dummy-Auth-Test'] == 'dummy-auth-test-ok'
def test_links(self):
r = requests.Response()
r.headers = {
'cache-control': 'public, max-age=60, s-maxage=60',
'connection': 'keep-alive',
'content-encoding': 'gzip',
'content-type': 'application/json; charset=utf-8',
'date': 'Sat, 26 Jan 2013 16:47:56 GMT',
'etag': '"6ff6a73c0e446c1f61614769e3ceb778"',
'last-modified': 'Sat, 26 Jan 2013 16:22:39 GMT',
'link': ('<https://api.github.com/users/kennethreitz/repos?'
'page=2&per_page=10>; rel="next", <https://api.github.'
'com/users/kennethreitz/repos?page=7&per_page=10>; '
' rel="last"'),
'server': 'GitHub.com',
'status': '200 OK',
'vary': 'Accept',
'x-content-type-options': 'nosniff',
'x-github-media-type': 'github.beta',
'x-ratelimit-limit': '60',
'x-ratelimit-remaining': '57'
}
assert r.links['next']['rel'] == 'next'
def test_cookie_parameters(self):
key = 'some_cookie'
value = 'some_value'
secure = True
domain = 'test.com'
rest = {'HttpOnly': True}
jar = requests.cookies.RequestsCookieJar()
jar.set(key, value, secure=secure, domain=domain, rest=rest)
assert len(jar) == 1
assert 'some_cookie' in jar
cookie = list(jar)[0]
assert cookie.secure == secure
assert cookie.domain == domain
assert cookie._rest['HttpOnly'] == rest['HttpOnly']
def test_cookie_as_dict_keeps_len(self):
key = 'some_cookie'
value = 'some_value'
key1 = 'some_cookie1'
value1 = 'some_value1'
jar = requests.cookies.RequestsCookieJar()
jar.set(key, value)
jar.set(key1, value1)
d1 = dict(jar)
d2 = dict(jar.iteritems())
d3 = dict(jar.items())
assert len(jar) == 2
assert len(d1) == 2
assert len(d2) == 2
assert len(d3) == 2
def test_cookie_as_dict_keeps_items(self):
key = 'some_cookie'
value = 'some_value'
key1 = 'some_cookie1'
value1 = 'some_value1'
jar = requests.cookies.RequestsCookieJar()
jar.set(key, value)
jar.set(key1, value1)
d1 = dict(jar)
d2 = dict(jar.iteritems())
d3 = dict(jar.items())
assert d1['some_cookie'] == 'some_value'
assert d2['some_cookie'] == 'some_value'
assert d3['some_cookie1'] == 'some_value1'
def test_cookie_as_dict_keys(self):
key = 'some_cookie'
value = 'some_value'
key1 = 'some_cookie1'
value1 = 'some_value1'
jar = requests.cookies.RequestsCookieJar()
jar.set(key, value)
jar.set(key1, value1)
keys = jar.keys()
assert keys == list(keys)
# make sure one can use keys multiple times
assert list(keys) == list(keys)
def test_cookie_as_dict_values(self):
key = 'some_cookie'
value = 'some_value'
key1 = 'some_cookie1'
value1 = 'some_value1'
jar = requests.cookies.RequestsCookieJar()
jar.set(key, value)
jar.set(key1, value1)
values = jar.values()
assert values == list(values)
# make sure one can use values multiple times
assert list(values) == list(values)
def test_cookie_as_dict_items(self):
key = 'some_cookie'
value = 'some_value'
key1 = 'some_cookie1'
value1 = 'some_value1'
jar = requests.cookies.RequestsCookieJar()
jar.set(key, value)
jar.set(key1, value1)
items = jar.items()
assert items == list(items)
# make sure one can use items multiple times
assert list(items) == list(items)
def test_time_elapsed_blank(self):
r = requests.get(httpbin('get'))
td = r.elapsed
total_seconds = ((td.microseconds + (td.seconds + td.days * 24 * 3600)
* 10**6) / 10**6)
assert total_seconds > 0.0
def test_response_is_iterable(self):
r = requests.Response()
io = StringIO.StringIO('abc')
read_ = io.read
def read_mock(amt, decode_content=None):
return read_(amt)
setattr(io, 'read', read_mock)
r.raw = io
assert next(iter(r))
io.close()
def test_request_and_response_are_pickleable(self):
r = requests.get(httpbin('get'))
# verify we can pickle the original request
assert pickle.loads(pickle.dumps(r.request))
# verify we can pickle the response and that we have access to
# the original request.
pr = pickle.loads(pickle.dumps(r))
assert r.request.url == pr.request.url
assert r.request.headers == pr.request.headers
def test_get_auth_from_url(self):
url = 'http://user:pass@complex.url.com/path?query=yes'
assert ('user', 'pass') == requests.utils.get_auth_from_url(url)
def test_get_auth_from_url_encoded_spaces(self):
url = 'http://user:pass%20pass@complex.url.com/path?query=yes'
assert ('user', 'pass pass') == requests.utils.get_auth_from_url(url)
def test_get_auth_from_url_not_encoded_spaces(self):
url = 'http://user:pass pass@complex.url.com/path?query=yes'
assert ('user', 'pass pass') == requests.utils.get_auth_from_url(url)
def test_get_auth_from_url_percent_chars(self):
url = 'http://user%25user:pass@complex.url.com/path?query=yes'
assert ('user%user', 'pass') == requests.utils.get_auth_from_url(url)
def test_get_auth_from_url_encoded_hashes(self):
url = 'http://user:pass%23pass@complex.url.com/path?query=yes'
assert ('user', 'pass#pass') == requests.utils.get_auth_from_url(url)
def test_cannot_send_unprepared_requests(self):
r = requests.Request(url=HTTPBIN)
with pytest.raises(ValueError):
requests.Session().send(r)
def test_http_error(self):
error = requests.exceptions.HTTPError()
assert not error.response
response = requests.Response()
error = requests.exceptions.HTTPError(response=response)
assert error.response == response
error = requests.exceptions.HTTPError('message', response=response)
assert str(error) == 'message'
assert error.response == response
def test_session_pickling(self):
r = requests.Request('GET', httpbin('get'))
s = requests.Session()
s = pickle.loads(pickle.dumps(s))
s.proxies = getproxies()
r = s.send(r.prepare())
assert r.status_code == 200
def test_fixes_1329(self):
"""
Ensure that header updates are done case-insensitively.
"""
s = requests.Session()
s.headers.update({'ACCEPT': 'BOGUS'})
s.headers.update({'accept': 'application/json'})
r = s.get(httpbin('get'))
headers = r.request.headers
assert headers['accept'] == 'application/json'
assert headers['Accept'] == 'application/json'
assert headers['ACCEPT'] == 'application/json'
def test_uppercase_scheme_redirect(self):
parts = urlparse(httpbin('html'))
url = "HTTP://" + parts.netloc + parts.path
r = requests.get(httpbin('redirect-to'), params={'url': url})
assert r.status_code == 200
assert r.url.lower() == url.lower()
def test_transport_adapter_ordering(self):
s = requests.Session()
order = ['https://', 'http://']
assert order == list(s.adapters)
s.mount('http://git', HTTPAdapter())
s.mount('http://github', HTTPAdapter())
s.mount('http://github.com', HTTPAdapter())
s.mount('http://github.com/about/', HTTPAdapter())
order = [
'http://github.com/about/',
'http://github.com',
'http://github',
'http://git',
'https://',
'http://',
]
assert order == list(s.adapters)
s.mount('http://gittip', HTTPAdapter())
s.mount('http://gittip.com', HTTPAdapter())
s.mount('http://gittip.com/about/', HTTPAdapter())
order = [
'http://github.com/about/',
'http://gittip.com/about/',
'http://github.com',
'http://gittip.com',
'http://github',
'http://gittip',
'http://git',
'https://',
'http://',
]
assert order == list(s.adapters)
s2 = requests.Session()
s2.adapters = {'http://': HTTPAdapter()}
s2.mount('https://', HTTPAdapter())
assert 'http://' in s2.adapters
assert 'https://' in s2.adapters
def test_header_remove_is_case_insensitive(self):
# From issue #1321
s = requests.Session()
s.headers['foo'] = 'bar'
r = s.get(httpbin('get'), headers={'FOO': None})
assert 'foo' not in r.request.headers
def test_params_are_merged_case_sensitive(self):
s = requests.Session()
s.params['foo'] = 'bar'
r = s.get(httpbin('get'), params={'FOO': 'bar'})
assert r.json()['args'] == {'foo': 'bar', 'FOO': 'bar'}
def test_long_authinfo_in_url(self):
url = 'http://{0}:{1}@{2}:9000/path?query#frag'.format(
'E8A3BE87-9E3F-4620-8858-95478E385B5B',
'EA770032-DA4D-4D84-8CE9-29C6D910BF1E',
'exactly-------------sixty-----------three------------characters',
)
r = requests.Request('GET', url).prepare()
assert r.url == url
def test_header_keys_are_native(self):
headers = {u'unicode': 'blah', 'byte'.encode('ascii'): 'blah'}
r = requests.Request('GET', httpbin('get'), headers=headers)
p = r.prepare()
# This is testing that they are builtin strings. A bit weird, but there
# we go.
assert 'unicode' in p.headers.keys()
assert 'byte' in p.headers.keys()
def test_can_send_nonstring_objects_with_files(self):
data = {'a': 0.0}
files = {'b': 'foo'}
r = requests.Request('POST', httpbin('post'), data=data, files=files)
p = r.prepare()
assert 'multipart/form-data' in p.headers['Content-Type']
def test_autoset_header_values_are_native(self):
data = 'this is a string'
length = '16'
req = requests.Request('POST', httpbin('post'), data=data)
p = req.prepare()
assert p.headers['Content-Length'] == length
def test_oddball_schemes_dont_check_URLs(self):
test_urls = (
'data:image/gif;base64,R0lGODlhAQABAHAAACH5BAUAAAAALAAAAAABAAEAAAICRAEAOw==',
'file:///etc/passwd',
'magnet:?xt=urn:btih:be08f00302bc2d1d3cfa3af02024fa647a271431',
)
for test_url in test_urls:
req = requests.Request('GET', test_url)
preq = req.prepare()
assert test_url == preq.url
class TestContentEncodingDetection(unittest.TestCase):
def test_none(self):
encodings = requests.utils.get_encodings_from_content('')
assert not len(encodings)
def test_html_charset(self):
"""HTML5 meta charset attribute"""
content = '<meta charset="UTF-8">'
encodings = requests.utils.get_encodings_from_content(content)
assert len(encodings) == 1
assert encodings[0] == 'UTF-8'
def test_html4_pragma(self):
"""HTML4 pragma directive"""
content = '<meta http-equiv="Content-type" content="text/html;charset=UTF-8">'
encodings = requests.utils.get_encodings_from_content(content)
assert len(encodings) == 1
assert encodings[0] == 'UTF-8'
def test_xhtml_pragma(self):
"""XHTML 1.x served with text/html MIME type"""
content = '<meta http-equiv="Content-type" content="text/html;charset=UTF-8" />'
encodings = requests.utils.get_encodings_from_content(content)
assert len(encodings) == 1
assert encodings[0] == 'UTF-8'
def test_xml(self):
"""XHTML 1.x served as XML"""
content = '<?xml version="1.0" encoding="UTF-8"?>'
encodings = requests.utils.get_encodings_from_content(content)
assert len(encodings) == 1
assert encodings[0] == 'UTF-8'
def test_precedence(self):
content = '''
<?xml version="1.0" encoding="XML"?>
<meta charset="HTML5">
<meta http-equiv="Content-type" content="text/html;charset=HTML4" />
'''.strip()
encodings = requests.utils.get_encodings_from_content(content)
assert encodings == ['HTML5', 'HTML4', 'XML']
class TestCaseInsensitiveDict(unittest.TestCase):
def test_mapping_init(self):
cid = CaseInsensitiveDict({'Foo': 'foo','BAr': 'bar'})
assert len(cid) == 2
assert 'foo' in cid
assert 'bar' in cid
def test_iterable_init(self):
cid = CaseInsensitiveDict([('Foo', 'foo'), ('BAr', 'bar')])
assert len(cid) == 2
assert 'foo' in cid
assert 'bar' in cid
def test_kwargs_init(self):
cid = CaseInsensitiveDict(FOO='foo', BAr='bar')
assert len(cid) == 2
assert 'foo' in cid
assert 'bar' in cid
def test_docstring_example(self):
cid = CaseInsensitiveDict()
cid['Accept'] = 'application/json'
assert cid['aCCEPT'] == 'application/json'
assert list(cid) == ['Accept']
def test_len(self):
cid = CaseInsensitiveDict({'a': 'a', 'b': 'b'})
cid['A'] = 'a'
assert len(cid) == 2
def test_getitem(self):
cid = CaseInsensitiveDict({'Spam': 'blueval'})
assert cid['spam'] == 'blueval'
assert cid['SPAM'] == 'blueval'
def test_fixes_649(self):
"""__setitem__ should behave case-insensitively."""
cid = CaseInsensitiveDict()
cid['spam'] = 'oneval'
cid['Spam'] = 'twoval'
cid['sPAM'] = 'redval'
cid['SPAM'] = 'blueval'
assert cid['spam'] == 'blueval'
assert cid['SPAM'] == 'blueval'
assert list(cid.keys()) == ['SPAM']
def test_delitem(self):
cid = CaseInsensitiveDict()
cid['Spam'] = 'someval'
del cid['sPam']
assert 'spam' not in cid
assert len(cid) == 0
def test_contains(self):
cid = CaseInsensitiveDict()
cid['Spam'] = 'someval'
assert 'Spam' in cid
assert 'spam' in cid
assert 'SPAM' in cid
assert 'sPam' in cid
assert 'notspam' not in cid
def test_get(self):
cid = CaseInsensitiveDict()
cid['spam'] = 'oneval'
cid['SPAM'] = 'blueval'
assert cid.get('spam') == 'blueval'
assert cid.get('SPAM') == 'blueval'
assert cid.get('sPam') == 'blueval'
assert cid.get('notspam', 'default') == 'default'
def test_update(self):
cid = CaseInsensitiveDict()
cid['spam'] = 'blueval'
cid.update({'sPam': 'notblueval'})
assert cid['spam'] == 'notblueval'
cid = CaseInsensitiveDict({'Foo': 'foo','BAr': 'bar'})
cid.update({'fOO': 'anotherfoo', 'bAR': 'anotherbar'})
assert len(cid) == 2
assert cid['foo'] == 'anotherfoo'
assert cid['bar'] == 'anotherbar'
def test_update_retains_unchanged(self):
cid = CaseInsensitiveDict({'foo': 'foo', 'bar': 'bar'})
cid.update({'foo': 'newfoo'})
assert cid['bar'] == 'bar'
def test_iter(self):
cid = CaseInsensitiveDict({'Spam': 'spam', 'Eggs': 'eggs'})
keys = frozenset(['Spam', 'Eggs'])
assert frozenset(iter(cid)) == keys
def test_equality(self):
cid = CaseInsensitiveDict({'SPAM': 'blueval', 'Eggs': 'redval'})
othercid = CaseInsensitiveDict({'spam': 'blueval', 'eggs': 'redval'})
assert cid == othercid
del othercid['spam']
assert cid != othercid
assert cid == {'spam': 'blueval', 'eggs': 'redval'}
def test_setdefault(self):
cid = CaseInsensitiveDict({'Spam': 'blueval'})
assert cid.setdefault('spam', 'notblueval') == 'blueval'
assert cid.setdefault('notspam', 'notblueval') == 'notblueval'
def test_lower_items(self):
cid = CaseInsensitiveDict({
'Accept': 'application/json',
'user-Agent': 'requests',
})
keyset = frozenset(lowerkey for lowerkey, v in cid.lower_items())
lowerkeyset = frozenset(['accept', 'user-agent'])
assert keyset == lowerkeyset
def test_preserve_key_case(self):
cid = CaseInsensitiveDict({
'Accept': 'application/json',
'user-Agent': 'requests',
})
keyset = frozenset(['Accept', 'user-Agent'])
assert frozenset(i[0] for i in cid.items()) == keyset
assert frozenset(cid.keys()) == keyset
assert frozenset(cid) == keyset
def test_preserve_last_key_case(self):
cid = CaseInsensitiveDict({
'Accept': 'application/json',
'user-Agent': 'requests',
})
cid.update({'ACCEPT': 'application/json'})
cid['USER-AGENT'] = 'requests'
keyset = frozenset(['ACCEPT', 'USER-AGENT'])
assert frozenset(i[0] for i in cid.items()) == keyset
assert frozenset(cid.keys()) == keyset
assert frozenset(cid) == keyset
class UtilsTestCase(unittest.TestCase):
def test_super_len_io_streams(self):
""" Ensures that we properly deal with different kinds of IO streams. """
# uses StringIO or io.StringIO (see import above)
from io import BytesIO
from requests.utils import super_len
assert super_len(StringIO.StringIO()) == 0
assert super_len(StringIO.StringIO('with so much drama in the LBC')) == 29
assert super_len(BytesIO()) == 0
assert super_len(BytesIO(b"it's kinda hard bein' snoop d-o-double-g")) == 40
try:
import cStringIO
except ImportError:
pass
else:
assert super_len(cStringIO.StringIO('but some how, some way...')) == 25
def test_get_environ_proxies_ip_ranges(self):
""" Ensures that IP addresses are correctly matches with ranges in no_proxy variable """
from requests.utils import get_environ_proxies
os.environ['no_proxy'] = "192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1"
assert get_environ_proxies('http://192.168.0.1:5000/') == {}
assert get_environ_proxies('http://192.168.0.1/') == {}
assert get_environ_proxies('http://172.16.1.1/') == {}
assert get_environ_proxies('http://172.16.1.1:5000/') == {}
assert get_environ_proxies('http://192.168.1.1:5000/') != {}
assert get_environ_proxies('http://192.168.1.1/') != {}
def test_get_environ_proxies(self):
""" Ensures that IP addresses are correctly matches with ranges in no_proxy variable """
from requests.utils import get_environ_proxies
os.environ['no_proxy'] = "127.0.0.1,localhost.localdomain,192.168.0.0/24,172.16.1.1"
assert get_environ_proxies('http://localhost.localdomain:5000/v1.0/') == {}
assert get_environ_proxies('http://www.requests.com/') != {}
def test_is_ipv4_address(self):
from requests.utils import is_ipv4_address
assert is_ipv4_address('8.8.8.8')
assert not is_ipv4_address('8.8.8.8.8')
assert not is_ipv4_address('localhost.localdomain')
def test_is_valid_cidr(self):
from requests.utils import is_valid_cidr
assert not is_valid_cidr('8.8.8.8')
assert is_valid_cidr('192.168.1.0/24')
def test_dotted_netmask(self):
from requests.utils import dotted_netmask
assert dotted_netmask(8) == '255.0.0.0'
assert dotted_netmask(24) == '255.255.255.0'
assert dotted_netmask(25) == '255.255.255.128'
def test_address_in_network(self):
from requests.utils import address_in_network
assert address_in_network('192.168.1.1', '192.168.1.0/24')
assert not address_in_network('172.16.0.1', '192.168.1.0/24')
def test_get_auth_from_url(self):
""" Ensures that username and password in well-encoded URI as per RFC 3986 are correclty extracted """
from requests.utils import get_auth_from_url
from requests.compat import quote
percent_encoding_test_chars = "%!*'();:@&=+$,/?#[] "
url_address = "request.com/url.html#test"
url = "http://" + quote(percent_encoding_test_chars, '') + ':' + quote(percent_encoding_test_chars, '') + '@' + url_address
(username, password) = get_auth_from_url(url)
assert username == percent_encoding_test_chars
assert password == percent_encoding_test_chars
class TestMorselToCookieExpires(unittest.TestCase):
"""Tests for morsel_to_cookie when morsel contains expires."""
def test_expires_valid_str(self):
"""Test case where we convert expires from string time."""
morsel = Morsel()
morsel['expires'] = 'Thu, 01-Jan-1970 00:00:01 GMT'
cookie = morsel_to_cookie(morsel)
assert cookie.expires == 1
def test_expires_invalid_int(self):
"""Test case where an invalid type is passed for expires."""
morsel = Morsel()
morsel['expires'] = 100
with pytest.raises(TypeError):
morsel_to_cookie(morsel)
def test_expires_invalid_str(self):
"""Test case where an invalid string is input."""
morsel = Morsel()
morsel['expires'] = 'woops'
with pytest.raises(ValueError):
morsel_to_cookie(morsel)
def test_expires_none(self):
"""Test case where expires is None."""
morsel = Morsel()
morsel['expires'] = None
cookie = morsel_to_cookie(morsel)
assert cookie.expires is None
class TestMorselToCookieMaxAge(unittest.TestCase):
"""Tests for morsel_to_cookie when morsel contains max-age."""
def test_max_age_valid_int(self):
"""Test case where a valid max age in seconds is passed."""
morsel = Morsel()
morsel['max-age'] = 60
cookie = morsel_to_cookie(morsel)
assert isinstance(cookie.expires, int)
def test_max_age_invalid_str(self):
"""Test case where a invalid max age is passed."""
morsel = Morsel()
morsel['max-age'] = 'woops'
with pytest.raises(TypeError):
morsel_to_cookie(morsel)
if __name__ == '__main__':
unittest.main()
|
iilab/ltfhc-next
|
system/python-requests/test_requests.py
|
Python
|
apache-2.0
| 40,903 | 0.001076 |
from __future__ import print_function
import os
import threading
import re
import pint
ureg = pint.UnitRegistry()
MM = ureg.millimeter
METER = ureg.meter
SEC = ureg.second
DEG = ureg.degree
RAD = ureg.radian
DRAW_LOCK = threading.RLock()
DEBOUNCE = 1
HEAD = NAME_HEAD = 'HEAD'
TORSO = NAME_TORSO = 'TORSO'
NAME_PAN = 'PAN'
NAME_TILT = 'TILT'
INDEX_HEAD = 1
INDEX_TORSO = 2
SYSTEM_STARTUP_SPEECH = 'Hello.'
SYSTEM_SHUTDOWN_SPEECH = 'Goodbye.'
NAME_TO_INDEX = {
NAME_HEAD: INDEX_HEAD,
NAME_TORSO: INDEX_TORSO,
}
INDEX_TO_NAME = {
INDEX_HEAD: NAME_HEAD,
INDEX_TORSO: NAME_TORSO,
}
# Things to expect from running `udevadm info --query=all --name=/dev/ttyACM*`
DEVICE_SIGNATURES = {
NAME_TORSO: [
#'ID_MODEL_FROM_DATABASE=Uno R3 (CDC ACM)',
'arduino__www.arduino.cc__0043_854363236313514132d0',
],
NAME_HEAD: [
#'ID_MODEL=Arduino_Leonardo',
#'leonardo',
'arduino__www.arduino.cc__0043_5533330393435171b041',
],
}
PALETTE = [
('banner', 'black', 'light gray'),
('streak', 'black', 'dark red'),
('bg', 'black', 'dark blue'),
]
HIGHLIGHT_COLOR = 'banner'
LINE_LASER_PIN = 20
ID_NULL = ''
ID_PAN_ANGLE = 'a'
ID_BUMPER = 'b'
ID_PAN_SPEED = 'c'
ID_TILT_ANGLE = 'd'
ID_EDGE = 'e'
ID_STATUS_BUTTON = 'f'
ID_BATTERY_VOLTAGE = 'g'
ID_BATTERY_TEMP = 'h'
ID_IDENTIFY = 'i'
ID_EXTERNAL_POWER = 'j'
ID_POWER_BUTTON = 'k'
ID_LED = 'l'
ID_LED_AUTO = 'm'
ID_MOTOR_SPEED = 'n'
ID_GO_TO_CENTER = 'o'
ID_PING = 'p'
ID_GET_VALUE = 'q'
ID_PAN_FULL_REV_COUNT = 'r'
ID_ALL_STOP = 's'
ID_CALIBRATE = 't'
ID_ULTRASONIC = 'u'
ID_PONG = 'v'
ID_FORCE_SENSORS = 'w'
ID_TWIST = 'x'
ID_PAN_CENTERMARK = 'y'
ID_SET_VALUE = 'z'
ID_PAN_POWER = 'A'
ID_TILT_POWER = 'B'
ID_SONAR_POWER = 'C'
ID_ARDUINO_TEMP = 'D'
ID_IMU_EULER = 'E'
ID_RECHARGE_POWERDOWN = 'F'
ID_BATTERY_CHARGE_RATIO = 'G'
ID_IMU_ACCELEROMETER = 'H'
#ID_MICROPHONE_ENABLE = 'I'
ID_IMU_GYROSCOPE = 'J'
ID_IMU_MAGNETOMETER = 'K'
ID_LOG = 'L'
ID_MOTOR_ACCEL = 'M'
ID_IMU_CALIBRATION = 'N'
ID_MOTOR_CALIBRATION = 'O'
ID_MOTOR_ENCODER = 'P'
ID_TWIST_DONE = 'Q'
ID_MOTOR_ERROR = 'R'
ID_GO_TO_SLEEP = 'S'
ID_SHUTDOWN = 'T'
# 'U'
# 'V'
# 'W'
ID_CRASH = 'X'
# 'Y'
ID_HASH = 'Z'
# These are used to lookup callbacks. Do Not Change.
ALL_IDS = {
ID_PAN_ANGLE: 'pan angle',
ID_BUMPER: 'bumper',
ID_PAN_SPEED: 'pan speed',
ID_TILT_ANGLE: 'tilt angle',
ID_EDGE: 'edge',
ID_STATUS_BUTTON: 'status button',
ID_BATTERY_VOLTAGE: 'battery voltage',
ID_BATTERY_TEMP: 'battery temperature',
ID_IDENTIFY: 'identify',
ID_EXTERNAL_POWER: 'external power',
ID_POWER_BUTTON: 'power button',
ID_LED: 'led',
ID_LED_AUTO: 'led auto',
ID_MOTOR_SPEED: 'motor speed',
ID_MOTOR_ACCEL: 'motor acceleration',
ID_PING: 'ping',
ID_FORCE_SENSORS: 'force sensors',
ID_GO_TO_CENTER: 'go to center',
ID_GET_VALUE: 'get value',
ID_PAN_FULL_REV_COUNT: 'pan full rev count',
ID_CALIBRATE: 'calibrate',
ID_ALL_STOP: 'all stop',
ID_ULTRASONIC: 'ultrasonic',
ID_PONG: 'pong',
ID_PAN_CENTERMARK: 'pan centermark',
ID_SET_VALUE: 'set value',
ID_PAN_POWER: 'pan power',
ID_TILT_POWER: 'tilt power',
ID_SONAR_POWER: 'sonar power',
ID_ARDUINO_TEMP: 'arduino temperature',
ID_RECHARGE_POWERDOWN: 'recharge powerdown',
ID_BATTERY_CHARGE_RATIO: 'battery charge ratio',
ID_LOG: 'log',
ID_GO_TO_SLEEP: 'sleep',
ID_SHUTDOWN: 'shutdown',
ID_CRASH: 'crash',
ID_HASH: 'hash',
ID_IMU_EULER: 'imu euler',
ID_IMU_ACCELEROMETER: 'imu accelerometer',
ID_IMU_GYROSCOPE: 'imu gyroscope',
ID_IMU_MAGNETOMETER: 'imu magnetometer',
ID_IMU_CALIBRATION: 'imu calibration',
ID_MOTOR_CALIBRATION: 'motor calibration',
ID_MOTOR_ENCODER: 'motor encoder',
ID_MOTOR_ERROR: 'motor error',
ID_TWIST: 'twist',
ID_TWIST_DONE: 'twist done',
}
NAME_TO_IDS = dict((re.sub(r'[^a-z]+', '_', v.lower()), k) for k, v in ALL_IDS.iteritems())
MOVEMENT_ERROR_NONE = 0
MOVEMENT_ERROR_EDGE = 1
MOVEMENT_ERROR_ULTRASONIC = 2
MOVEMENT_ERROR_TILT = 3
MOVEMENT_ERROR_ACCEL = 4
MOVEMENT_ERROR_ENCODER = 5
MOVEMENT_ERROR_BUMPER = 6
# Movement will be halted if ultrasonics detect we're this distance from an obstacle.
MOVEMENT_ULTRASONIC_THRESHOLD_CM = 5
# This amount of tilt will be allowed on the logic x or y axis before movement is cancelled.
MOVEMENT_MAX_TILT = 10
# These map to ROS messages.
BOTH_FORMATS_OUT = {
# ID_ALL_STOP: [],
# ID_IDENTIFY: [],
# ID_LED: [('state', bool)],
# ID_LED_AUTO: [('state', bool)],
# ID_PING: [],
# ID_GET_VALUE: [('id', int)],
ID_PONG: [('total', int)],
ID_ARDUINO_TEMP: [('temperature', float)],
ID_MOTOR_CALIBRATION: [('name', str), ('state', int)],
}
HEAD_FORMATS_OUT = {
ID_PAN_ANGLE: [('angle', int)],
ID_PAN_FULL_REV_COUNT: [('count', int)],
ID_PAN_CENTERMARK: [('state', int)],
ID_TILT_ANGLE: [('angle', int)],
}
TORSO_FORMATS_OUT = {
ID_BUMPER: [('index', 'uint8'), ('state', int)],
ID_EDGE: [('index', 'uint8'), ('state', int)],
ID_BATTERY_VOLTAGE: [('voltage', float)],
ID_BATTERY_TEMP: [('temperature', float)],
ID_EXTERNAL_POWER: [('state1', int), ('state2', int)],
# ID_LED: [('state', bool)],
# ID_LED_AUTO: [('state', bool)],
# ID_MOTOR_SPEED: [('left', int), ('right', int)],
ID_ULTRASONIC: [('index', 'uint8'), ('distance', int)],
# ID_SONAR_POWER: [('state', bool)],
ID_IMU_EULER: [('x', float), ('y', float), ('z', float)],
ID_IMU_ACCELEROMETER: [('x', float), ('y', float), ('z', float)],
ID_IMU_GYROSCOPE: [('x', float), ('y', float), ('z', float)],
ID_IMU_MAGNETOMETER: [('x', float), ('y', float), ('z', float)],
ID_IMU_CALIBRATION: [
('system', int),
('gyroscope', int),
('accelerometer', int),
('magnetometer', int),
],
# ID_RECHARGE_POWERDOWN: [],
ID_BATTERY_CHARGE_RATIO: [('charge', int)],
# ID_GO_TO_SLEEP: [('duration', int)],
# ID_SHUTDOWN: [],
# ID_MOTOR_ACCEL: [('acceleration', float)],
ID_STATUS_BUTTON: [('state', int)],
ID_MOTOR_ENCODER: [('channel', int), ('count', int)],
ID_MOTOR_ERROR: [('error', int)],# single byte
ID_TWIST_DONE: [('error', int)], # 0=no error, 1=edge, 2=ultrasonic, 3=tilt, 4=accel, 5=encoder
}
BOTH_FORMATS_IN = {
ID_ALL_STOP: [],
ID_LED: [('index', int), ('state', int)],
ID_LED_AUTO: [('state', int)],
ID_GET_VALUE: [('id', str)],
ID_FORCE_SENSORS: [('state', int)],
}
HEAD_FORMATS_IN = {
ID_PAN_SPEED: [('speed', 'int32')],
ID_GO_TO_CENTER: [('type', str)],
ID_CALIBRATE: [('type', str)],
ID_PAN_ANGLE: [('angle', int)],
ID_TILT_ANGLE: [('angle', int)],
ID_TILT_POWER: [('enabled', int)],
ID_PAN_POWER: [('enabled', int)],
# ID_MICROPHONE_ENABLE: [('state', int)],
}
TORSO_FORMATS_IN = {
ID_SONAR_POWER: [('enabled', int)],
ID_MOTOR_SPEED: [('left', int), ('right', int)],
ID_MOTOR_ACCEL: [('acceleration', int)],
ID_RECHARGE_POWERDOWN: [],
ID_GO_TO_SLEEP: [('duration', int)],
ID_SHUTDOWN: [],
# Mimics Twist format. http://docs.ros.org/api/geometry_msgs/html/msg/Twist.html
# Linear is linear.x, Angular is angular.z.
ID_TWIST: [('linear', float), ('angular', float), ('seconds', float), ('force', int)],
}
# Packets using these IDs will require acknowledgement.
ACK_IDS = set([
ID_LED,
ID_LED_AUTO,
ID_SONAR_POWER,
ID_MOTOR_SPEED,
ID_MOTOR_ACCEL,
ID_GO_TO_CENTER,
ID_TILT_ANGLE,
ID_PAN_ANGLE,
])
MOTOR_FORWARD = 'forward'
MOTOR_REVERSE = 'reverse'
MOTOR_TURN_CW = 'turn_cw'
MOTOR_TURN_CCW = 'turn_ccw'
MOTOR_BREAK = 'break'
MOTOR_PIVOT_LEFT_CW = 'pivot_left_cw'
MOTOR_PIVOT_LEFT_CCW = 'pivot_left_ccw'
MOTOR_PIVOT_RIGHT_CW = 'pivot_right_cw'
MOTOR_PIVOT_RIGHT_CCW = 'pivot_right_ccw'
# ComMotion Manual, Page 4
# The desired speed from -255 to +255. Positive values are forward, negative values are reverse.
MOTOR_MAX_SPEED = 255
MOTOR_EIGTH_SPEED = int(round(MOTOR_MAX_SPEED * 0.125))
MOTOR_QUARTER_SPEED = int(round(MOTOR_MAX_SPEED * 0.25))
MOTOR_HALF_SPEED = int(round(MOTOR_MAX_SPEED * 0.5))
MOTOR_THREE_QUARTER_SPEED = int(round(MOTOR_MAX_SPEED * 0.75))
MOTOR_DEFAULT_SPEED = MOTOR_QUARTER_SPEED
MOTOR_DEFAULT_ACCEL = 128 # velocity_units/sec
MOTOR_MIN_ACCEL = 1
MOTOR_MAX_ACCEL = MOTOR_MAX_SPEED
# Measured.
MOTOR_MAX_SPEED_REAL = 745 * MM/SEC
MOTOR_DEFAULT_ACCEL_REAL = float(MOTOR_DEFAULT_ACCEL) / MOTOR_MAX_SPEED * MOTOR_MAX_SPEED_REAL / SEC
# Pololu 2282 Gear Motor => 464.64 counts per revolution of the gearbox's output shaft
# Driver wheel radius = 14 mm
# Tread length = 228 mm
#(revolution_of_shaft/counts) * (wheel_diameter)/(revolution_of_shaft)
#(revolution_of_shaft/464.6 counts) * (2*pi*14 mm)/(1 revolution_of_shaft) * (1m/1000mm) = meter/count
#METERS_PER_COUNT = (3.14159265 * 0.1524) / 64000 * (1/1000.)
#TODO:the 464.6 counts may mean for quadrature, but we're only using a single channel
# Note, ROS distance assumes meters.
METERS_PER_COUNT = (3.141592653589793 * 28) / 464.6 / 1000.
# Convert the relative speed to absolute velocity in meters/second.
SPEED_TO_VELOCITY = 0.35/MOTOR_MAX_SPEED
VELOCITY_TO_SPEED = MOTOR_MAX_SPEED/0.35
TILT_CENTER = 90
TILT_MIN = 90-65
TILT_MAX = 90+65
PAN_MAX = 360
OK = 'OK'
PYTHON_TO_ROS_TYPES = {
bool: 'bool',
int: 'int32',
float: 'float32',
str: 'string',
}
# The maximum width of the body.
TORSO_DIAMETER_MM = 126 + 24
TORSO_DIAMETER = TORSO_DIAMETER_MM * MM
# The distance between the treads.
TORSO_TREAD_WIDTH = 100 * MM
TORSO_TREAD_WIDTH_METERS = TORSO_TREAD_WIDTH.to(METER).magnitude
# The distance from the ground to the center of the head.
HEIGHT_CENTER_HEIGHT_MM = 235
HEIGHT_CENTER_HEIGHT = HEIGHT_CENTER_HEIGHT_MM * MM
ARDUINO_PING_TIMEOUT = 5
MOTION_WANDER = 'wander'
MOTION_FORWARD_X_MM = 'forward_x_mm'
MOTION_TURN_X_DEGREES = 'turn_x_degrees'
MOTION_PVIOT_X_DEGREES = 'pivot_x_degrees'
MOTIONS = [
(MOTION_WANDER, 'wander'),
(MOTION_FORWARD_X_MM, 'forward'),
(MOTION_TURN_X_DEGREES, 'turn'),
(MOTION_PVIOT_X_DEGREES, 'pivot'),
]
SOUND_TTS = 'tts'
SOUND_TONE = 'tone'
# CPU temperature limits (in Celcius)
# The Pi starts to underclock itself at 85C and the components get damaged at 90C
CPU_TEMP_ERROR = 85 # over this shown error
CPU_TEMP_WARN = 82.5 # over this shown warning, below shown ok
CPU_USAGE_PERCENT_ERROR = 99
CPU_USAGE_PERCENT_WARN = 90
# CPU clock speed limits.
CPU_CLOCK_SPEED_PERCENT_ERROR = 25
CPU_CLOCK_SPEED_PERCENT_WARN = 50
# Disk limits.
DISK_USAGE_PERCENT_ERROR = 95
DISK_USAGE_PERCENT_WARN = 90
# Memory limits.
MEMORY_USAGE_PERCENT_ERROR = 95
MEMORY_USAGE_PERCENT_WARN = 90
# Links
BASE_FOOTPRINT = 'base_footprint'
BASE_LINK = 'base_link'
NECK = 'neck'
HEAD = 'head'
ODOM = 'odom'
# Joints
FOOTPRINT_TO_TORSO_JOINT = 'footprint_to_base_link_joint'
TORSO_TO_NECK_JOINT = 'base_link_to_neck_joint'
NECK_TO_HEAD_JOINT = 'neck_to_head_joint'
HEAD_TO_CAMERA_JOINT = 'head_to_camera_joint'
# Battery limits.
BATTERY_CHARGE_RATIO_ERROR = 0.8
BATTERY_CHARGE_RATIO_WARN = 0.85
# Camera.
# http://elinux.org/Rpi_Camera_Module
CAMERA_ANGLE_OF_VIEW_H = 54
CAMERA_ANGLE_OF_VIEW_V = 41
EXPORT_TO_ARDUINO = [
'METERS_PER_COUNT',
'TORSO_TREAD_WIDTH_METERS',
'VELOCITY_TO_SPEED',
'SPEED_TO_VELOCITY',
'MOVEMENT_ERROR_NONE',
'MOVEMENT_ERROR_EDGE',
'MOVEMENT_ERROR_ULTRASONIC',
'MOVEMENT_ERROR_TILT',
'MOVEMENT_ERROR_ACCEL',
'MOVEMENT_ERROR_ENCODER',
'MOVEMENT_ERROR_BUMPER',
'MOVEMENT_ULTRASONIC_THRESHOLD_CM',
'MOVEMENT_MAX_TILT',
]
# Diagnostic part names.
def write_ros_messages(d, prefix):
msg_dir = '../../../ros_homebot_msgs/msg'
for k, v in d.iteritems():
name = re.sub(r'[^a-z]+', ' ', ALL_IDS[k])
name = (''.join(map(str.title, name.split(' '))))
if name != 'Pong':
name = name + 'Change'
#name = prefix.title() + name
v = [('device', 'uint8')] + v
print(name, v)
with open(os.path.join(msg_dir, '%s.msg' % name), 'w') as fout:
for _name, _type in v:
_ros_type = PYTHON_TO_ROS_TYPES.get(_type, _type)
print('%s %s' % (_ros_type, _name), file=fout)
def write_ros_services(d, prefix):
msg_dir = '../../../ros_homebot_msgs/srv'
for k, v in d.iteritems():
name = re.sub(r'[^a-z]+', ' ', ALL_IDS[k])
name = (''.join(map(str.title, name.split(' '))))
#name = prefix.title() + name
#v = [('device', 'uint8')] + v
print(name, v)
with open(os.path.join(msg_dir, '%s.srv' % name), 'w') as fout:
for _name, _type in v:
_ros_type = PYTHON_TO_ROS_TYPES.get(_type, _type)
print('%s %s' % (_ros_type, _name), file=fout)
print('---', file=fout)
def write_cpp_headers():
# Output the IDs to a C/C++ header.
with open('../../../ros_homebot_firmware/common/src/ID.h', 'w') as fout:
print('// AUTO-GENERATED. DO NOT EDIT. SEE homebot/constants.py.', file=fout)
items = [
_ for _ in globals().items()
if _[0].startswith('ID_')]
for _name, _value in sorted(items, key=lambda o: o[1]):
print("#define %s '%s'" % (_name.ljust(4*6), _value), file=fout)
items = [
_ for _ in globals().items()
if _[0].startswith('NAME_') and not _[0].startswith('NAME_TO_')]
for _name, _value in sorted(items, key=lambda o: o[0]):
print('#define %s "%s"' % (_name.ljust(4*6), _value), file=fout)
for _name in EXPORT_TO_ARDUINO:
_value = globals()[_name]
print('#define %s %s' % (_name.ljust(4*6), repr(_value)), file=fout)
print('Wrote ID.h.')
if __name__ == '__main__':
write_cpp_headers()
print('''
Now run:
cd /home/`user`/git/homebot/src/ros
. ./setup.bash
catkin_make --pkg ros_homebot_msgs
''')
t = HEAD_FORMATS_OUT.copy()
t.update(BOTH_FORMATS_OUT)
write_ros_messages(t, NAME_HEAD)
t = TORSO_FORMATS_OUT.copy()
t.update(BOTH_FORMATS_OUT)
write_ros_messages(t, NAME_TORSO)
t = HEAD_FORMATS_IN.copy()
t.update(BOTH_FORMATS_IN)
write_ros_services(t, NAME_HEAD)
t = TORSO_FORMATS_IN.copy()
t.update(BOTH_FORMATS_IN)
write_ros_services(t, NAME_TORSO)
os.system('cd ../../../ros_homebot_msgs; python update_makelist.py')
print('Remember to run:\n')
print(' fab prod homebot.rebuild_messages')
|
chrisspen/homebot
|
src/ros/src/ros_homebot_python/src/ros_homebot_python/constants.py
|
Python
|
mit
| 14,480 | 0.003384 |
import hashlib
import json
import logging
import os
import subprocess
import sys
import time
from collections import defaultdict
from shutil import copy
from shutil import copyfile
from shutil import copystat
from shutil import copytree
from tempfile import mkdtemp
import boto3
import botocore
import yaml
import sys
from .helpers import archive
from .helpers import get_environment_variable_value
from .helpers import LambdaContext
from .helpers import mkdir
from .helpers import read
from .helpers import timestamp
ARN_PREFIXES = {
"cn-north-1": "aws-cn",
"cn-northwest-1": "aws-cn",
"us-gov-west-1": "aws-us-gov",
}
log = logging.getLogger(__name__)
def load_source(module_name, module_path):
"""Loads a python module from the path of the corresponding file."""
if sys.version_info[0] == 3 and sys.version_info[1] >= 5:
import importlib.util
spec = importlib.util.spec_from_file_location(module_name, module_path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
elif sys.version_info[0] == 3 and sys.version_info[1] < 5:
import importlib.machinery
loader = importlib.machinery.SourceFileLoader(module_name, module_path)
module = loader.load_module()
return module
def cleanup_old_versions(
src, keep_last_versions, config_file="config.yaml", profile_name=None,
):
"""Deletes old deployed versions of the function in AWS Lambda.
Won't delete $Latest and any aliased version
:param str src:
The path to your Lambda ready project (folder must contain a valid
config.yaml and handler module (e.g.: service.py).
:param int keep_last_versions:
The number of recent versions to keep and not delete
"""
if keep_last_versions <= 0:
print("Won't delete all versions. Please do this manually")
else:
path_to_config_file = os.path.join(src, config_file)
cfg = read_cfg(path_to_config_file, profile_name)
profile_name = cfg.get("profile")
aws_access_key_id = cfg.get("aws_access_key_id")
aws_secret_access_key = cfg.get("aws_secret_access_key")
client = get_client(
"lambda",
profile_name,
aws_access_key_id,
aws_secret_access_key,
cfg.get("region"),
)
response = client.list_versions_by_function(
FunctionName=cfg.get("function_name"),
)
versions = response.get("Versions")
if len(response.get("Versions")) < keep_last_versions:
print("Nothing to delete. (Too few versions published)")
else:
version_numbers = [
elem.get("Version") for elem in versions[1:-keep_last_versions]
]
for version_number in version_numbers:
try:
client.delete_function(
FunctionName=cfg.get("function_name"),
Qualifier=version_number,
)
except botocore.exceptions.ClientError as e:
print(f"Skipping Version {version_number}: {e}")
def deploy(
src,
requirements=None,
local_package=None,
config_file="config.yaml",
profile_name=None,
preserve_vpc=False,
):
"""Deploys a new function to AWS Lambda.
:param str src:
The path to your Lambda ready project (folder must contain a valid
config.yaml and handler module (e.g.: service.py).
:param str local_package:
The path to a local package with should be included in the deploy as
well (and/or is not available on PyPi)
"""
# Load and parse the config file.
path_to_config_file = os.path.join(src, config_file)
cfg = read_cfg(path_to_config_file, profile_name)
# Copy all the pip dependencies required to run your code into a temporary
# folder then add the handler file in the root of this directory.
# Zip the contents of this folder into a single file and output to the dist
# directory.
path_to_zip_file = build(
src,
config_file=config_file,
requirements=requirements,
local_package=local_package,
)
existing_config = get_function_config(cfg)
if existing_config:
update_function(
cfg, path_to_zip_file, existing_config, preserve_vpc=preserve_vpc
)
else:
create_function(cfg, path_to_zip_file)
def deploy_s3(
src,
requirements=None,
local_package=None,
config_file="config.yaml",
profile_name=None,
preserve_vpc=False,
):
"""Deploys a new function via AWS S3.
:param str src:
The path to your Lambda ready project (folder must contain a valid
config.yaml and handler module (e.g.: service.py).
:param str local_package:
The path to a local package with should be included in the deploy as
well (and/or is not available on PyPi)
"""
# Load and parse the config file.
path_to_config_file = os.path.join(src, config_file)
cfg = read_cfg(path_to_config_file, profile_name)
# Copy all the pip dependencies required to run your code into a temporary
# folder then add the handler file in the root of this directory.
# Zip the contents of this folder into a single file and output to the dist
# directory.
path_to_zip_file = build(
src,
config_file=config_file,
requirements=requirements,
local_package=local_package,
)
use_s3 = True
s3_file = upload_s3(cfg, path_to_zip_file, use_s3)
existing_config = get_function_config(cfg)
if existing_config:
update_function(
cfg,
path_to_zip_file,
existing_config,
use_s3=use_s3,
s3_file=s3_file,
preserve_vpc=preserve_vpc,
)
else:
create_function(cfg, path_to_zip_file, use_s3=use_s3, s3_file=s3_file)
def upload(
src,
requirements=None,
local_package=None,
config_file="config.yaml",
profile_name=None,
):
"""Uploads a new function to AWS S3.
:param str src:
The path to your Lambda ready project (folder must contain a valid
config.yaml and handler module (e.g.: service.py).
:param str local_package:
The path to a local package with should be included in the deploy as
well (and/or is not available on PyPi)
"""
# Load and parse the config file.
path_to_config_file = os.path.join(src, config_file)
cfg = read_cfg(path_to_config_file, profile_name)
# Copy all the pip dependencies required to run your code into a temporary
# folder then add the handler file in the root of this directory.
# Zip the contents of this folder into a single file and output to the dist
# directory.
path_to_zip_file = build(
src,
config_file=config_file,
requirements=requirements,
local_package=local_package,
)
upload_s3(cfg, path_to_zip_file)
def invoke(
src,
event_file="event.json",
config_file="config.yaml",
profile_name=None,
verbose=False,
):
"""Simulates a call to your function.
:param str src:
The path to your Lambda ready project (folder must contain a valid
config.yaml and handler module (e.g.: service.py).
:param str alt_event:
An optional argument to override which event file to use.
:param bool verbose:
Whether to print out verbose details.
"""
# Load and parse the config file.
path_to_config_file = os.path.join(src, config_file)
cfg = read_cfg(path_to_config_file, profile_name)
# Set AWS_PROFILE environment variable based on `--profile` option.
if profile_name:
os.environ["AWS_PROFILE"] = profile_name
# Load environment variables from the config file into the actual
# environment.
env_vars = cfg.get("environment_variables")
if env_vars:
for key, value in env_vars.items():
os.environ[key] = get_environment_variable_value(value)
# Load and parse event file.
path_to_event_file = os.path.join(src, event_file)
event = read(path_to_event_file, loader=json.loads)
# Tweak to allow module to import local modules
try:
sys.path.index(src)
except ValueError:
sys.path.append(src)
handler = cfg.get("handler")
# Inspect the handler string (<module>.<function name>) and translate it
# into a function we can execute.
fn = get_callable_handler_function(src, handler)
timeout = cfg.get("timeout")
if timeout:
context = LambdaContext(cfg.get("function_name"), timeout)
else:
context = LambdaContext(cfg.get("function_name"))
start = time.time()
results = fn(event, context)
end = time.time()
print("{0}".format(results))
if verbose:
print(
"\nexecution time: {:.8f}s\nfunction execution "
"timeout: {:2}s".format(end - start, cfg.get("timeout", 15))
)
def init(src, minimal=False):
"""Copies template files to a given directory.
:param str src:
The path to output the template lambda project files.
:param bool minimal:
Minimal possible template files (excludes event.json).
"""
templates_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "project_templates",
)
for filename in os.listdir(templates_path):
if (minimal and filename == "event.json") or filename.endswith(".pyc"):
continue
dest_path = os.path.join(templates_path, filename)
if not os.path.isdir(dest_path):
copy(dest_path, src)
def build(
src,
requirements=None,
local_package=None,
config_file="config.yaml",
profile_name=None,
):
"""Builds the file bundle.
:param str src:
The path to your Lambda ready project (folder must contain a valid
config.yaml and handler module (e.g.: service.py).
:param str local_package:
The path to a local package with should be included in the deploy as
well (and/or is not available on PyPi)
"""
# Load and parse the config file.
path_to_config_file = os.path.join(src, config_file)
cfg = read_cfg(path_to_config_file, profile_name)
# Get the absolute path to the output directory and create it if it doesn't
# already exist.
dist_directory = cfg.get("dist_directory", "dist")
path_to_dist = os.path.join(src, dist_directory)
mkdir(path_to_dist)
# Combine the name of the Lambda function with the current timestamp to use
# for the output filename.
function_name = cfg.get("function_name")
output_filename = "{0}-{1}.zip".format(timestamp(), function_name)
path_to_temp = mkdtemp(prefix="aws-lambda")
pip_install_to_target(
path_to_temp, requirements=requirements, local_package=local_package,
)
# Hack for Zope.
if "zope" in os.listdir(path_to_temp):
print(
"Zope packages detected; fixing Zope package paths to "
"make them importable.",
)
# Touch.
with open(os.path.join(path_to_temp, "zope/__init__.py"), "wb"):
pass
# Gracefully handle whether ".zip" was included in the filename or not.
output_filename = (
"{0}.zip".format(output_filename)
if not output_filename.endswith(".zip")
else output_filename
)
# Allow definition of source code directories we want to build into our
# zipped package.
build_config = defaultdict(**cfg.get("build", {}))
build_source_directories = build_config.get("source_directories", "")
build_source_directories = (
build_source_directories
if build_source_directories is not None
else ""
)
source_directories = [
d.strip() for d in build_source_directories.split(",")
]
files = []
for filename in os.listdir(src):
if os.path.isfile(filename):
if filename == ".DS_Store":
continue
if filename == config_file:
continue
print("Bundling: %r" % filename)
files.append(os.path.join(src, filename))
elif os.path.isdir(filename) and filename in source_directories:
print("Bundling directory: %r" % filename)
files.append(os.path.join(src, filename))
# "cd" into `temp_path` directory.
os.chdir(path_to_temp)
for f in files:
if os.path.isfile(f):
_, filename = os.path.split(f)
# Copy handler file into root of the packages folder.
copyfile(f, os.path.join(path_to_temp, filename))
copystat(f, os.path.join(path_to_temp, filename))
elif os.path.isdir(f):
src_path_length = len(src) + 1
destination_folder = os.path.join(
path_to_temp, f[src_path_length:]
)
copytree(f, destination_folder)
# Zip them together into a single file.
# TODO: Delete temp directory created once the archive has been compiled.
path_to_zip_file = archive("./", path_to_dist, output_filename)
return path_to_zip_file
def get_callable_handler_function(src, handler):
"""Translate a string of the form "module.function" into a callable
function.
:param str src:
The path to your Lambda project containing a valid handler file.
:param str handler:
A dot delimited string representing the `<module>.<function name>`.
"""
# "cd" into `src` directory.
os.chdir(src)
module_name, function_name = handler.split(".")
filename = get_handler_filename(handler)
path_to_module_file = os.path.join(src, filename)
module = load_source(module_name, path_to_module_file)
return getattr(module, function_name)
def get_handler_filename(handler):
"""Shortcut to get the filename from the handler string.
:param str handler:
A dot delimited string representing the `<module>.<function name>`.
"""
module_name, _ = handler.split(".")
return "{0}.py".format(module_name)
def _install_packages(path, packages):
"""Install all packages listed to the target directory.
Ignores any package that includes Python itself and python-lambda as well
since its only needed for deploying and not running the code
:param str path:
Path to copy installed pip packages to.
:param list packages:
A list of packages to be installed via pip.
"""
def _filter_blacklist(package):
blacklist = ["-i", "#", "Python==", "python-lambda=="]
return all(package.startswith(entry) is False for entry in blacklist)
filtered_packages = filter(_filter_blacklist, packages)
for package in filtered_packages:
if package.startswith("-e "):
package = package.replace("-e ", "")
print("Installing {package}".format(package=package))
subprocess.check_call(
[
sys.executable,
"-m",
"pip",
"install",
package,
"-t",
path,
"--ignore-installed",
]
)
print(
"Install directory contents are now: {directory}".format(
directory=os.listdir(path)
)
)
def pip_install_to_target(path, requirements=None, local_package=None):
"""For a given active virtualenv, gather all installed pip packages then
copy (re-install) them to the path provided.
:param str path:
Path to copy installed pip packages to.
:param str requirements:
If set, only the packages in the supplied requirements file are
installed.
If not set then installs all packages found via pip freeze.
:param str local_package:
The path to a local package with should be included in the deploy as
well (and/or is not available on PyPi)
"""
packages = []
if not requirements:
print("Gathering pip packages")
pkgStr = subprocess.check_output(
[sys.executable, "-m", "pip", "freeze"]
)
packages.extend(pkgStr.decode("utf-8").splitlines())
else:
if os.path.exists(requirements):
print("Gathering requirement packages")
data = read(requirements)
packages.extend(data.splitlines())
if not packages:
print("No dependency packages installed!")
if local_package is not None:
if not isinstance(local_package, (list, tuple)):
local_package = [local_package]
for l_package in local_package:
packages.append(l_package)
_install_packages(path, packages)
def get_role_name(region, account_id, role):
"""Shortcut to insert the `account_id` and `role` into the iam string."""
prefix = ARN_PREFIXES.get(region, "aws")
return "arn:{0}:iam::{1}:role/{2}".format(prefix, account_id, role)
def get_account_id(
profile_name, aws_access_key_id, aws_secret_access_key, region=None,
):
"""Query STS for a users' account_id"""
client = get_client(
"sts", profile_name, aws_access_key_id, aws_secret_access_key, region,
)
return client.get_caller_identity().get("Account")
def get_client(
client,
profile_name,
aws_access_key_id,
aws_secret_access_key,
region=None,
):
"""Shortcut for getting an initialized instance of the boto3 client."""
boto3.setup_default_session(
profile_name=profile_name,
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
region_name=region,
)
return boto3.client(client)
def create_function(cfg, path_to_zip_file, use_s3=False, s3_file=None):
"""Register and upload a function to AWS Lambda."""
print("Creating your new Lambda function")
byte_stream = read(path_to_zip_file, binary_file=True)
profile_name = cfg.get("profile")
aws_access_key_id = cfg.get("aws_access_key_id")
aws_secret_access_key = cfg.get("aws_secret_access_key")
account_id = get_account_id(
profile_name,
aws_access_key_id,
aws_secret_access_key,
cfg.get("region",),
)
role = get_role_name(
cfg.get("region"),
account_id,
cfg.get("role", "lambda_basic_execution"),
)
client = get_client(
"lambda",
profile_name,
aws_access_key_id,
aws_secret_access_key,
cfg.get("region"),
)
# Do we prefer development variable over config?
buck_name = os.environ.get("S3_BUCKET_NAME") or cfg.get("bucket_name")
func_name = os.environ.get("LAMBDA_FUNCTION_NAME") or cfg.get(
"function_name"
)
print("Creating lambda function with name: {}".format(func_name))
if use_s3:
kwargs = {
"FunctionName": func_name,
"Runtime": cfg.get("runtime", "python2.7"),
"Role": role,
"Handler": cfg.get("handler"),
"Code": {
"S3Bucket": "{}".format(buck_name),
"S3Key": "{}".format(s3_file),
},
"Description": cfg.get("description", ""),
"Timeout": cfg.get("timeout", 15),
"MemorySize": cfg.get("memory_size", 512),
"VpcConfig": {
"SubnetIds": cfg.get("subnet_ids", []),
"SecurityGroupIds": cfg.get("security_group_ids", []),
},
"Publish": True,
}
else:
kwargs = {
"FunctionName": func_name,
"Runtime": cfg.get("runtime", "python2.7"),
"Role": role,
"Handler": cfg.get("handler"),
"Code": {"ZipFile": byte_stream},
"Description": cfg.get("description", ""),
"Timeout": cfg.get("timeout", 15),
"MemorySize": cfg.get("memory_size", 512),
"VpcConfig": {
"SubnetIds": cfg.get("subnet_ids", []),
"SecurityGroupIds": cfg.get("security_group_ids", []),
},
"Publish": True,
}
if "tags" in cfg:
kwargs.update(
Tags={key: str(value) for key, value in cfg.get("tags").items()}
)
if "environment_variables" in cfg:
kwargs.update(
Environment={
"Variables": {
key: get_environment_variable_value(value)
for key, value in cfg.get("environment_variables").items()
},
},
)
client.create_function(**kwargs)
concurrency = get_concurrency(cfg)
if concurrency > 0:
client.put_function_concurrency(
FunctionName=func_name, ReservedConcurrentExecutions=concurrency
)
def update_function(
cfg,
path_to_zip_file,
existing_cfg,
use_s3=False,
s3_file=None,
preserve_vpc=False,
):
"""Updates the code of an existing Lambda function"""
print("Updating your Lambda function")
byte_stream = read(path_to_zip_file, binary_file=True)
profile_name = cfg.get("profile")
aws_access_key_id = cfg.get("aws_access_key_id")
aws_secret_access_key = cfg.get("aws_secret_access_key")
account_id = get_account_id(
profile_name,
aws_access_key_id,
aws_secret_access_key,
cfg.get("region",),
)
role = get_role_name(
cfg.get("region"),
account_id,
cfg.get("role", "lambda_basic_execution"),
)
client = get_client(
"lambda",
profile_name,
aws_access_key_id,
aws_secret_access_key,
cfg.get("region"),
)
# Do we prefer development variable over config?
buck_name = os.environ.get("S3_BUCKET_NAME") or cfg.get("bucket_name")
if use_s3:
client.update_function_code(
FunctionName=cfg.get("function_name"),
S3Bucket="{}".format(buck_name),
S3Key="{}".format(s3_file),
Publish=True,
)
else:
client.update_function_code(
FunctionName=cfg.get("function_name"),
ZipFile=byte_stream,
Publish=True,
)
kwargs = {
"FunctionName": cfg.get("function_name"),
"Role": role,
"Runtime": cfg.get("runtime"),
"Handler": cfg.get("handler"),
"Description": cfg.get("description", ""),
"Timeout": cfg.get("timeout", 15),
"MemorySize": cfg.get("memory_size", 512),
}
if preserve_vpc:
kwargs["VpcConfig"] = existing_cfg.get("Configuration", {}).get(
"VpcConfig"
)
if kwargs["VpcConfig"] is None:
kwargs["VpcConfig"] = {
"SubnetIds": cfg.get("subnet_ids", []),
"SecurityGroupIds": cfg.get("security_group_ids", []),
}
else:
del kwargs["VpcConfig"]["VpcId"]
else:
kwargs["VpcConfig"] = {
"SubnetIds": cfg.get("subnet_ids", []),
"SecurityGroupIds": cfg.get("security_group_ids", []),
}
if "environment_variables" in cfg:
kwargs.update(
Environment={
"Variables": {
key: str(get_environment_variable_value(value))
for key, value in cfg.get("environment_variables").items()
},
},
)
ret = client.update_function_configuration(**kwargs)
concurrency = get_concurrency(cfg)
if concurrency > 0:
client.put_function_concurrency(
FunctionName=cfg.get("function_name"),
ReservedConcurrentExecutions=concurrency,
)
elif "Concurrency" in existing_cfg:
client.delete_function_concurrency(
FunctionName=cfg.get("function_name")
)
if "tags" in cfg:
tags = {key: str(value) for key, value in cfg.get("tags").items()}
if tags != existing_cfg.get("Tags"):
if existing_cfg.get("Tags"):
client.untag_resource(
Resource=ret["FunctionArn"],
TagKeys=list(existing_cfg["Tags"].keys()),
)
client.tag_resource(Resource=ret["FunctionArn"], Tags=tags)
def upload_s3(cfg, path_to_zip_file, *use_s3):
"""Upload a function to AWS S3."""
print("Uploading your new Lambda function")
profile_name = cfg.get("profile")
aws_access_key_id = cfg.get("aws_access_key_id")
aws_secret_access_key = cfg.get("aws_secret_access_key")
client = get_client(
"s3",
profile_name,
aws_access_key_id,
aws_secret_access_key,
cfg.get("region"),
)
byte_stream = b""
with open(path_to_zip_file, mode="rb") as fh:
byte_stream = fh.read()
s3_key_prefix = cfg.get("s3_key_prefix", "/dist")
checksum = hashlib.new("md5", byte_stream).hexdigest()
timestamp = str(time.time())
filename = "{prefix}{checksum}-{ts}.zip".format(
prefix=s3_key_prefix, checksum=checksum, ts=timestamp,
)
# Do we prefer development variable over config?
buck_name = os.environ.get("S3_BUCKET_NAME") or cfg.get("bucket_name")
func_name = os.environ.get("LAMBDA_FUNCTION_NAME") or cfg.get(
"function_name"
)
kwargs = {
"Bucket": "{}".format(buck_name),
"Key": "{}".format(filename),
"Body": byte_stream,
}
client.put_object(**kwargs)
print("Finished uploading {} to S3 bucket {}".format(func_name, buck_name))
if use_s3:
return filename
def get_function_config(cfg):
"""Check whether a function exists or not and return its config"""
function_name = cfg.get("function_name")
profile_name = cfg.get("profile")
aws_access_key_id = cfg.get("aws_access_key_id")
aws_secret_access_key = cfg.get("aws_secret_access_key")
client = get_client(
"lambda",
profile_name,
aws_access_key_id,
aws_secret_access_key,
cfg.get("region"),
)
try:
return client.get_function(FunctionName=function_name)
except client.exceptions.ResourceNotFoundException as e:
if "Function not found" in str(e):
return False
def get_concurrency(cfg):
"""Return the Reserved Concurrent Executions if present in the config"""
concurrency = int(cfg.get("concurrency", 0))
return max(0, concurrency)
def read_cfg(path_to_config_file, profile_name):
cfg = read(path_to_config_file, loader=yaml.full_load)
if profile_name is not None:
cfg["profile"] = profile_name
elif "AWS_PROFILE" in os.environ:
cfg["profile"] = os.environ["AWS_PROFILE"]
return cfg
|
nficano/python-lambda
|
aws_lambda/aws_lambda.py
|
Python
|
isc
| 26,779 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2008 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
#
class XmlNamespace(object):
def __init__(self, namespace):
self._namespace = namespace
def name(self, tag):
return "{%s}%s" % (self._namespace, tag)
class XmlNamer(object):
"""Initialize me with a DOM node or a DOM document node (the
toplevel node you get when parsing an XML file). Then use me
to generate fully qualified XML names.
>>> xml = '<office:document-styles xmlns:office="urn:oasis:names:tc:opendocument:xmlns:office:1.0"></office>'
>>> from lxml import etree
>>> namer = XmlNamer(etree.fromstring(xml))
>>> namer.name('office', 'blah')
{urn:oasis:names:tc:opendocument:xmlns:office:1.0}blah
>>> namer.name('office:blah')
{urn:oasis:names:tc:opendocument:xmlns:office:1.0}blah
I can also give you XmlNamespace objects if you give me the abbreviated
namespace name. These are useful if you need to reference a namespace
continuously.
>>> office_ns = name.namespace('office')
>>> office_ns.name('foo')
{urn:oasis:names:tc:opendocument:xmlns:office:1.0}foo
"""
def __init__(self, dom_node):
# Allow the user to pass a dom node of the
# XML document nodle
if hasattr(dom_node, 'nsmap'):
self.nsmap = dom_node.nsmap
else:
self.nsmap = dom_node.getroot().nsmap
def name(self, namespace_shortcut, tag=None):
# If the user doesn't pass an argument into 'tag'
# then namespace_shortcut contains a tag of the form
# 'short-namespace:tag'
if tag is None:
try:
namespace_shortcut, tag = namespace_shortcut.split(':')
except ValueError:
# If there is no namespace in namespace_shortcut.
tag = namespace_shortcut.lstrip("{}")
return tag
return "{%s}%s" % (self.nsmap[namespace_shortcut], tag)
def namespace(self, namespace_shortcut):
return XmlNamespace(self.nsmap[namespace_shortcut])
|
bluemini/kuma
|
vendor/packages/translate/storage/xml_name.py
|
Python
|
mpl-2.0
| 2,748 | 0.000364 |
# Django settings for temp project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {'ENGINE': 'django.db.backends.sqlite3'}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '1s^z*4c6clc@+)c8dstu#eh4bi5907+&h_$2_&=y!3=a_!))u6'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'test_urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'temp.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'messages_extends',
)
MESSAGE_STORAGE = 'messages_extends.storages.FallbackStorage'
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
imposeren/django-messages-extends
|
test_settings.py
|
Python
|
mit
| 4,967 | 0.000604 |
# -*- coding: utf-8 -*-
"""
Plugins related to folders and paths
"""
from hyde.plugin import Plugin
from hyde.fs import Folder
class FlattenerPlugin(Plugin):
"""
The plugin class for flattening nested folders.
"""
def __init__(self, site):
super(FlattenerPlugin, self).__init__(site)
def begin_site(self):
"""
Finds all the folders that need flattening and changes the
relative deploy path of all resources in those folders.
"""
items = []
try:
items = self.site.config.flattener.items
except AttributeError:
pass
for item in items:
node = None
target = ''
try:
node = self.site.content.node_from_relative_path(item.source)
target = Folder(item.target)
except AttributeError:
continue
if node:
for resource in node.walk_resources():
target_path = target.child(resource.name)
self.logger.debug(
'Flattening resource path [%s] to [%s]' %
(resource, target_path))
resource.relative_deploy_path = target_path
|
stiell/hyde
|
hyde/ext/plugins/folders.py
|
Python
|
mit
| 1,263 | 0.002375 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-06-25 15:19
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('shipping', '0005_auto_20170616_1351'),
('teamstore', '0003_auto_20170624_1533'),
]
operations = [
migrations.AddField(
model_name='teamstore',
name='shipping_method',
field=models.ForeignKey(default=2, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='shipping.ShippingMethod', verbose_name='team shipping method'),
),
]
|
jonathanmeier5/teamstore
|
saleor/teamstore/migrations/0004_teamstore_shipping_method.py
|
Python
|
bsd-3-clause
| 672 | 0.001488 |
"""
A sub-package for efficiently dealing with polynomials.
Within the documentation for this sub-package, a "finite power series,"
i.e., a polynomial (also referred to simply as a "series") is represented
by a 1-D numpy array of the polynomial's coefficients, ordered from lowest
order term to highest. For example, array([1,2,3]) represents
``P_0 + 2*P_1 + 3*P_2``, where P_n is the n-th order basis polynomial
applicable to the specific module in question, e.g., `polynomial` (which
"wraps" the "standard" basis) or `chebyshev`. For optimal performance,
all operations on polynomials, including evaluation at an argument, are
implemented as operations on the coefficients. Additional (module-specific)
information can be found in the docstring for the module of interest.
"""
from polynomial import *
from chebyshev import *
from polyutils import *
from numpy.testing import Tester
test = Tester(__file__).test
bench = Tester(__file__).bench
|
teoliphant/numpy-refactor
|
numpy/polynomial/__init__.py
|
Python
|
bsd-3-clause
| 951 | 0 |
''' This is the library for getting DotA2 insight information.
Problem:
there was no way to get dota internal data about heroes, their abilities...
in suitable for further work form.
Solution:
this library allows you to get access to all the data in the in-game files.
But information about single hero does not have much use, so there is a
way to get stats of selected heroes or get information about certain
match.
'''
from atod.meta import meta_info
from atod.models.interfaces import Member, Group
from atod.models.ability import Ability
from atod.models.abilities import Abilities
from atod.models.hero import Hero
from atod.models.heroes import Heroes
from atod.models.match import Match
from atod.utils.pick import get_recommendations
# from atod.utils import dota_api
|
gasabr/AtoD
|
atod/__init__.py
|
Python
|
mit
| 806 | 0.001241 |
from django.conf import settings
from django.conf.urls import url
from plans.views import CreateOrderView, OrderListView, InvoiceDetailView, AccountActivationView, \
OrderPaymentReturnView, CurrentPlanView, UpgradePlanView, OrderView, BillingInfoRedirectView, \
BillingInfoCreateView, BillingInfoUpdateView, BillingInfoDeleteView, CreateOrderPlanChangeView, ChangePlanView, \
PricingView, FakePaymentsView
urlpatterns = [
url(r'^pricing/$', PricingView.as_view(), name='pricing'),
url(r'^account/$', CurrentPlanView.as_view(), name='current_plan'),
url(r'^account/activation/$', AccountActivationView.as_view(), name='account_activation'),
url(r'^upgrade/$', UpgradePlanView.as_view(), name='upgrade_plan'),
url(r'^order/extend/new/(?P<pk>\d+)/$', CreateOrderView.as_view(), name='create_order_plan'),
url(r'^order/upgrade/new/(?P<pk>\d+)/$', CreateOrderPlanChangeView.as_view(), name='create_order_plan_change'),
url(r'^change/(?P<pk>\d+)/$', ChangePlanView.as_view(), name='change_plan'),
url(r'^order/$', OrderListView.as_view(), name='order_list'),
url(r'^order/(?P<pk>\d+)/$', OrderView.as_view(), name='order'),
url(r'^order/(?P<pk>\d+)/payment/success/$', OrderPaymentReturnView.as_view(status='success'),
name='order_payment_success'),
url(r'^order/(?P<pk>\d+)/payment/failure/$', OrderPaymentReturnView.as_view(status='failure'),
name='order_payment_failure'),
url(r'^billing/$', BillingInfoRedirectView.as_view(), name='billing_info'),
url(r'^billing/create/$', BillingInfoCreateView.as_view(), name='billing_info_create'),
url(r'^billing/update/$', BillingInfoUpdateView.as_view(), name='billing_info_update'),
url(r'^billing/delete/$', BillingInfoDeleteView.as_view(), name='billing_info_delete'),
url(r'^invoice/(?P<pk>\d+)/preview/html/$', InvoiceDetailView.as_view(), name='invoice_preview_html'),
]
if getattr(settings, 'DEBUG', False):
urlpatterns += [
url(r'^fakepayments/(?P<pk>\d+)/$', FakePaymentsView.as_view(), name='fake_payments'),
]
|
cypreess/django-plans
|
plans/urls.py
|
Python
|
mit
| 2,070 | 0.006763 |
""" Tests for foe.command.interpret._setup_readline """
from foe.command.interpret import _get_input
def test_no_config():
assert not _get_input()
|
robobrobro/foe
|
test/command/interpret/test_get_input.py
|
Python
|
mit
| 153 | 0.006536 |
"""
Provide functionality to TTS.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/tts/
"""
import asyncio
import ctypes
import functools as ft
import hashlib
import logging
import mimetypes
import os
import re
import io
from aiohttp import web
import voluptuous as vol
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.setup import async_prepare_setup_platform
from homeassistant.core import callback
from homeassistant.config import load_yaml_config_file
from homeassistant.components.http import HomeAssistantView
from homeassistant.components.media_player import (
SERVICE_PLAY_MEDIA, MEDIA_TYPE_MUSIC, ATTR_MEDIA_CONTENT_ID,
ATTR_MEDIA_CONTENT_TYPE, DOMAIN as DOMAIN_MP)
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_per_platform
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ["mutagen==1.37.0"]
DOMAIN = 'tts'
DEPENDENCIES = ['http']
_LOGGER = logging.getLogger(__name__)
MEM_CACHE_FILENAME = 'filename'
MEM_CACHE_VOICE = 'voice'
CONF_LANG = 'language'
CONF_CACHE = 'cache'
CONF_CACHE_DIR = 'cache_dir'
CONF_TIME_MEMORY = 'time_memory'
DEFAULT_CACHE = True
DEFAULT_CACHE_DIR = "tts"
DEFAULT_TIME_MEMORY = 300
SERVICE_SAY = 'say'
SERVICE_CLEAR_CACHE = 'clear_cache'
ATTR_MESSAGE = 'message'
ATTR_CACHE = 'cache'
ATTR_LANGUAGE = 'language'
ATTR_OPTIONS = 'options'
_RE_VOICE_FILE = re.compile(
r"([a-f0-9]{40})_([^_]+)_([^_]+)_([a-z_]+)\.[a-z0-9]{3,4}")
KEY_PATTERN = '{0}_{1}_{2}_{3}'
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend({
vol.Optional(CONF_CACHE, default=DEFAULT_CACHE): cv.boolean,
vol.Optional(CONF_CACHE_DIR, default=DEFAULT_CACHE_DIR): cv.string,
vol.Optional(CONF_TIME_MEMORY, default=DEFAULT_TIME_MEMORY):
vol.All(vol.Coerce(int), vol.Range(min=60, max=57600)),
})
SCHEMA_SERVICE_SAY = vol.Schema({
vol.Required(ATTR_MESSAGE): cv.string,
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Optional(ATTR_CACHE): cv.boolean,
vol.Optional(ATTR_LANGUAGE): cv.string,
vol.Optional(ATTR_OPTIONS): dict,
})
SCHEMA_SERVICE_CLEAR_CACHE = vol.Schema({})
@asyncio.coroutine
def async_setup(hass, config):
"""Set up TTS."""
tts = SpeechManager(hass)
try:
conf = config[DOMAIN][0] if config.get(DOMAIN, []) else {}
use_cache = conf.get(CONF_CACHE, DEFAULT_CACHE)
cache_dir = conf.get(CONF_CACHE_DIR, DEFAULT_CACHE_DIR)
time_memory = conf.get(CONF_TIME_MEMORY, DEFAULT_TIME_MEMORY)
yield from tts.async_init_cache(use_cache, cache_dir, time_memory)
except (HomeAssistantError, KeyError) as err:
_LOGGER.error("Error on cache init %s", err)
return False
hass.http.register_view(TextToSpeechView(tts))
descriptions = yield from hass.loop.run_in_executor(
None, load_yaml_config_file,
os.path.join(os.path.dirname(__file__), 'services.yaml'))
@asyncio.coroutine
def async_setup_platform(p_type, p_config, disc_info=None):
"""Set up a tts platform."""
platform = yield from async_prepare_setup_platform(
hass, config, DOMAIN, p_type)
if platform is None:
return
try:
if hasattr(platform, 'async_get_engine'):
provider = yield from platform.async_get_engine(
hass, p_config)
else:
provider = yield from hass.loop.run_in_executor(
None, platform.get_engine, hass, p_config)
if provider is None:
_LOGGER.error("Error setting up platform %s", p_type)
return
tts.async_register_engine(p_type, provider, p_config)
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Error setting up platform %s", p_type)
return
@asyncio.coroutine
def async_say_handle(service):
"""Service handle for say."""
entity_ids = service.data.get(ATTR_ENTITY_ID)
message = service.data.get(ATTR_MESSAGE)
cache = service.data.get(ATTR_CACHE)
language = service.data.get(ATTR_LANGUAGE)
options = service.data.get(ATTR_OPTIONS)
try:
url = yield from tts.async_get_url(
p_type, message, cache=cache, language=language,
options=options
)
except HomeAssistantError as err:
_LOGGER.error("Error on init tts: %s", err)
return
data = {
ATTR_MEDIA_CONTENT_ID: url,
ATTR_MEDIA_CONTENT_TYPE: MEDIA_TYPE_MUSIC,
}
if entity_ids:
data[ATTR_ENTITY_ID] = entity_ids
yield from hass.services.async_call(
DOMAIN_MP, SERVICE_PLAY_MEDIA, data, blocking=True)
hass.services.async_register(
DOMAIN, "{}_{}".format(p_type, SERVICE_SAY), async_say_handle,
descriptions.get(SERVICE_SAY), schema=SCHEMA_SERVICE_SAY)
setup_tasks = [async_setup_platform(p_type, p_config) for p_type, p_config
in config_per_platform(config, DOMAIN)]
if setup_tasks:
yield from asyncio.wait(setup_tasks, loop=hass.loop)
@asyncio.coroutine
def async_clear_cache_handle(service):
"""Handle clear cache service call."""
yield from tts.async_clear_cache()
hass.services.async_register(
DOMAIN, SERVICE_CLEAR_CACHE, async_clear_cache_handle,
descriptions.get(SERVICE_CLEAR_CACHE),
schema=SCHEMA_SERVICE_CLEAR_CACHE)
return True
class SpeechManager(object):
"""Representation of a speech store."""
def __init__(self, hass):
"""Initialize a speech store."""
self.hass = hass
self.providers = {}
self.use_cache = DEFAULT_CACHE
self.cache_dir = DEFAULT_CACHE_DIR
self.time_memory = DEFAULT_TIME_MEMORY
self.file_cache = {}
self.mem_cache = {}
@asyncio.coroutine
def async_init_cache(self, use_cache, cache_dir, time_memory):
"""Init config folder and load file cache."""
self.use_cache = use_cache
self.time_memory = time_memory
def init_tts_cache_dir(cache_dir):
"""Init cache folder."""
if not os.path.isabs(cache_dir):
cache_dir = self.hass.config.path(cache_dir)
if not os.path.isdir(cache_dir):
_LOGGER.info("Create cache dir %s.", cache_dir)
os.mkdir(cache_dir)
return cache_dir
try:
self.cache_dir = yield from self.hass.loop.run_in_executor(
None, init_tts_cache_dir, cache_dir)
except OSError as err:
raise HomeAssistantError("Can't init cache dir {}".format(err))
def get_cache_files():
"""Return a dict of given engine files."""
cache = {}
folder_data = os.listdir(self.cache_dir)
for file_data in folder_data:
record = _RE_VOICE_FILE.match(file_data)
if record:
key = KEY_PATTERN.format(
record.group(1), record.group(2), record.group(3),
record.group(4)
)
cache[key.lower()] = file_data.lower()
return cache
try:
cache_files = yield from self.hass.loop.run_in_executor(
None, get_cache_files)
except OSError as err:
raise HomeAssistantError("Can't read cache dir {}".format(err))
if cache_files:
self.file_cache.update(cache_files)
@asyncio.coroutine
def async_clear_cache(self):
"""Read file cache and delete files."""
self.mem_cache = {}
def remove_files():
"""Remove files from filesystem."""
for _, filename in self.file_cache.items():
try:
os.remove(os.path.join(self.cache_dir, filename))
except OSError as err:
_LOGGER.warning(
"Can't remove cache file '%s': %s", filename, err)
yield from self.hass.loop.run_in_executor(None, remove_files)
self.file_cache = {}
@callback
def async_register_engine(self, engine, provider, config):
"""Register a TTS provider."""
provider.hass = self.hass
if provider.name is None:
provider.name = engine
self.providers[engine] = provider
@asyncio.coroutine
def async_get_url(self, engine, message, cache=None, language=None,
options=None):
"""Get URL for play message.
This method is a coroutine.
"""
provider = self.providers[engine]
msg_hash = hashlib.sha1(bytes(message, 'utf-8')).hexdigest()
use_cache = cache if cache is not None else self.use_cache
# Languages
language = language or provider.default_language
if language is None or \
language not in provider.supported_languages:
raise HomeAssistantError("Not supported language {0}".format(
language))
# Options
if provider.default_options and options:
options = provider.default_options.copy().update(options)
options = options or provider.default_options
if options is not None:
invalid_opts = [opt_name for opt_name in options.keys()
if opt_name not in provider.supported_options]
if invalid_opts:
raise HomeAssistantError(
"Invalid options found: %s", invalid_opts)
options_key = ctypes.c_size_t(hash(frozenset(options))).value
else:
options_key = '-'
key = KEY_PATTERN.format(
msg_hash, language, options_key, engine).lower()
# Is speech already in memory
if key in self.mem_cache:
filename = self.mem_cache[key][MEM_CACHE_FILENAME]
# Is file store in file cache
elif use_cache and key in self.file_cache:
filename = self.file_cache[key]
self.hass.async_add_job(self.async_file_to_mem(key))
# Load speech from provider into memory
else:
filename = yield from self.async_get_tts_audio(
engine, key, message, use_cache, language, options)
return "{}/api/tts_proxy/{}".format(
self.hass.config.api.base_url, filename)
@asyncio.coroutine
def async_get_tts_audio(self, engine, key, message, cache, language,
options):
"""Receive TTS and store for view in cache.
This method is a coroutine.
"""
provider = self.providers[engine]
extension, data = yield from provider.async_get_tts_audio(
message, language, options)
if data is None or extension is None:
raise HomeAssistantError(
"No TTS from {} for '{}'".format(engine, message))
# Create file infos
filename = ("{}.{}".format(key, extension)).lower()
data = self.write_tags(
filename, data, provider, message, language, options)
# Save to memory
self._async_store_to_memcache(key, filename, data)
if cache:
self.hass.async_add_job(
self.async_save_tts_audio(key, filename, data))
return filename
@asyncio.coroutine
def async_save_tts_audio(self, key, filename, data):
"""Store voice data to file and file_cache.
This method is a coroutine.
"""
voice_file = os.path.join(self.cache_dir, filename)
def save_speech():
"""Store speech to filesystem."""
with open(voice_file, 'wb') as speech:
speech.write(data)
try:
yield from self.hass.loop.run_in_executor(None, save_speech)
self.file_cache[key] = filename
except OSError:
_LOGGER.error("Can't write %s", filename)
@asyncio.coroutine
def async_file_to_mem(self, key):
"""Load voice from file cache into memory.
This method is a coroutine.
"""
filename = self.file_cache.get(key)
if not filename:
raise HomeAssistantError("Key {} not in file cache!".format(key))
voice_file = os.path.join(self.cache_dir, filename)
def load_speech():
"""Load a speech from filesystem."""
with open(voice_file, 'rb') as speech:
return speech.read()
try:
data = yield from self.hass.loop.run_in_executor(None, load_speech)
except OSError:
del self.file_cache[key]
raise HomeAssistantError("Can't read {}".format(voice_file))
self._async_store_to_memcache(key, filename, data)
@callback
def _async_store_to_memcache(self, key, filename, data):
"""Store data to memcache and set timer to remove it."""
self.mem_cache[key] = {
MEM_CACHE_FILENAME: filename,
MEM_CACHE_VOICE: data,
}
@callback
def async_remove_from_mem():
"""Cleanup memcache."""
self.mem_cache.pop(key)
self.hass.loop.call_later(self.time_memory, async_remove_from_mem)
@asyncio.coroutine
def async_read_tts(self, filename):
"""Read a voice file and return binary.
This method is a coroutine.
"""
record = _RE_VOICE_FILE.match(filename.lower())
if not record:
raise HomeAssistantError("Wrong tts file format!")
key = KEY_PATTERN.format(
record.group(1), record.group(2), record.group(3), record.group(4))
if key not in self.mem_cache:
if key not in self.file_cache:
raise HomeAssistantError("%s not in cache!", key)
yield from self.async_file_to_mem(key)
content, _ = mimetypes.guess_type(filename)
return (content, self.mem_cache[key][MEM_CACHE_VOICE])
@staticmethod
def write_tags(filename, data, provider, message, language, options):
"""Write ID3 tags to file.
Async friendly.
"""
import mutagen
data_bytes = io.BytesIO(data)
data_bytes.name = filename
data_bytes.seek(0)
album = provider.name
artist = language
if options is not None:
if options.get('voice') is not None:
artist = options.get('voice')
try:
tts_file = mutagen.File(data_bytes, easy=True)
if tts_file is not None:
tts_file['artist'] = artist
tts_file['album'] = album
tts_file['title'] = message
tts_file.save(data_bytes)
except mutagen.MutagenError as err:
_LOGGER.error("ID3 tag error: %s", err)
return data_bytes.getvalue()
class Provider(object):
"""Represent a single provider."""
hass = None
name = None
@property
def default_language(self):
"""Default language."""
return None
@property
def supported_languages(self):
"""List of supported languages."""
return None
@property
def supported_options(self):
"""List of supported options like voice, emotionen."""
return None
@property
def default_options(self):
"""Dict include default options."""
return None
def get_tts_audio(self, message, language, options=None):
"""Load tts audio file from provider."""
raise NotImplementedError()
def async_get_tts_audio(self, message, language, options=None):
"""Load tts audio file from provider.
Return a tuple of file extension and data as bytes.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.loop.run_in_executor(
None, ft.partial(
self.get_tts_audio, message, language, options=options))
class TextToSpeechView(HomeAssistantView):
"""TTS view to serve an speech audio."""
requires_auth = False
url = "/api/tts_proxy/{filename}"
name = "api:tts:speech"
def __init__(self, tts):
"""Initialize a tts view."""
self.tts = tts
@asyncio.coroutine
def get(self, request, filename):
"""Start a get request."""
try:
content, data = yield from self.tts.async_read_tts(filename)
except HomeAssistantError as err:
_LOGGER.error("Error on load tts: %s", err)
return web.Response(status=404)
return web.Response(body=data, content_type=content)
|
JshWright/home-assistant
|
homeassistant/components/tts/__init__.py
|
Python
|
apache-2.0
| 16,893 | 0 |
from __future__ import print_function
import itertools,cmath,re
from .helpers import SkipThisVertex,extractAntiSymmetricIndices
from .converter import py2cpp
from .lorentzparser import parse_lorentz
import string,re
def compare(a,b) :
num=abs(a-b)
den=abs(a+b)
if(den == 0. and 1e-10) :
return True
return num/den<1e-10
def evaluate(x,model,parmsubs):
import cmath
return eval(x,
{'cmath':cmath,
'complexconjugate':model.function_library.complexconjugate},
parmsubs)
# ordering for EW VVV vertices (ordering not an issue as all same spin)
def VVVordering(vertex) :
pattern = "if((p1->id()==%s&&p2->id()==%s&&p3->id()==%s)"+\
"||(p1->id()==%s&&p2->id()==%s&&p3->id()==%s)||"+\
"(p1->id()==%s&&p2->id()==%s&&p3->id()==%s)) {norm(-norm());}"
ordering = pattern%(vertex.particles[1].pdg_code,
vertex.particles[0].pdg_code,
vertex.particles[2].pdg_code,
vertex.particles[0].pdg_code,
vertex.particles[2].pdg_code,
vertex.particles[1].pdg_code,
vertex.particles[2].pdg_code,
vertex.particles[1].pdg_code,
vertex.particles[0].pdg_code)
return ordering
def tensorCouplings(vertex,value,prefactors,L,lorentztag,pos,all_couplings,order) :
# split the structure into its different terms for analysis
ordering=""
structures = extractStructures(L)
if(lorentztag == 'SST') :
terms=[['P(1003,2)','P(2003,1)'],
['P(1003,1)','P(2003,2)'],
['P(-1,1)','P(-1,2)','Metric(1003,2003)'],
['Metric(1003,2003)']]
signs=[1.,1.,-1.,-1.]
new_couplings=[False]*len(terms)
elif(lorentztag == 'FFT' ) :
terms=[['P(2003,1)','Gamma(1003,2,1)'],
['P(2003,2)','Gamma(1003,2,1)'],
['P(1003,1)','Gamma(2003,2,1)'],
['P(1003,2)','Gamma(2003,2,1)'],
['P(-1,1)','Gamma(-1,2,1)','Metric(1003,2003)'],
['P(-1,2)','Gamma(-1,2,1)','Metric(1003,2003)'],
['Metric(1003,2003)']]
signs=[1.,-1.,1.,-1.,-0.5,0.5,1.]
new_couplings=[False]*3*len(terms)
elif(lorentztag == 'VVT' ) :
terms=[['P(-1,1)','P(-1,2)','Metric(1,2003)','Metric(2,1003)'], # from C term
['P(-1,1)','P(-1,2)','Metric(1,1003)','Metric(2,2003)'], # from C term
['P(-1,1)','P(-1,2)','Metric(1,2)','Metric(1003,2003)'], # from C term
['P(1,2)','P(2,1)','Metric(1003,2003)'], # from D term (sym)
['P(1,2)','P(2003,1)','Metric(2,1003)'], # 1st term
['P(1,2)','P(1003,1)','Metric(2,2003)'], # 1st swap
['P(2,1)','P(2003,2)','Metric(1,1003)'], # 2nd term
['P(2,1)','P(1003,2)','Metric(1,2003)'], # 2nd swap
['P(1003,2)','P(2003,1)','Metric(1,2)'], # 3rd term
['P(1003,1)','P(2003,2)','Metric(1,2)'], # 3rd swap
['Metric(1,2003)','Metric(2,1003)'], # from mass term
['Metric(1,1003)','Metric(2,2003)'], # from mass term
['Metric(1,2)','Metric(1003,2003)'], # from mass term
['P(1,1)','P(2,1)','Metric(1003,2003)'], # gauge terms
['P(1,2)','P(2,2)','Metric(1003,2003)'], # gauge terms
['P(1,1)','P(2,2)','Metric(1003,2003)'], # gauge terms
['P(1003,1)','P(1,1)','Metric(2,2003)'], # gauge terms
['P(1003,2)','P(2,2)','Metric(1,2003)'], # gauge terms
['P(2003,1)','P(1,1)','Metric(2,1003)'], # gauge terms
['P(2003,2)','P(2,2)','Metric(1,1003)'], # gauge terms
]
signs=[1.,1.,-1.,1.,-1.,-1.,-1.,-1.,1.,1.,1.,1.,-1.,1.,1.,0.25,-1.,-1.,-1.,-1.]
new_couplings=[False]*len(terms)
elif(lorentztag == 'FFVT' ) :
terms = [['Gamma(2004,2,1)','Metric(3,1004)'],
['Gamma(1004,2,1)','Metric(3,2004)'],
['Gamma(3,2,1)','Metric(1004,2004)'],
['Gamma(2004,2,-1)','Metric(3,1004)'],
['Gamma(1004,2,-1)','Metric(3,2004)'],
['Gamma(3,2,-1)','Metric(1004,2004)']]
signs=[1.,1.,-0.5,1.,1.,-0.5]
new_couplings=[False]*3*len(terms)
elif(lorentztag == 'VVVT' ) :
# the F(mu nu,rho sigma lambda) terms first
terms = [['P(2004,2)','Metric(1,1004)','Metric(2,3)'],['P(2004,3)','Metric(1,1004)','Metric(2,3)'],
['P(1004,2)','Metric(1,2004)','Metric(2,3)'],['P(1004,3)','Metric(1,2004)','Metric(2,3)'],
['P(2004,3)','Metric(1,3)','Metric(2,1004)'],['P(2004,1)','Metric(1,3)','Metric(2,1004)'],
['P(1004,3)','Metric(1,3)','Metric(2,2004)'],['P(1004,1)','Metric(1,3)','Metric(2,2004)'],
['P(2004,1)','Metric(1,2)','Metric(3,1004)'],['P(2004,2)','Metric(1,2)','Metric(3,1004)'],
['P(1004,1)','Metric(1,2)','Metric(3,2004)'],['P(1004,2)','Metric(1,2)','Metric(3,2004)'],
['P(3,1)','Metric(1,2004)','Metric(2,1004)'],['P(3,2)','Metric(1,2004)','Metric(2,1004)'],
['P(3,1)','Metric(1,1004)','Metric(2,2004)'],['P(3,2)','Metric(1,1004)','Metric(2,2004)'],
['P(3,1)','Metric(1,2)','Metric(1004,2004)'],['P(3,2)','Metric(1,2)','Metric(1004,2004)'],
['P(2,3)','Metric(1,2004)','Metric(3,1004)'],['P(2,1)','Metric(1,2004)','Metric(3,1004)'],
['P(2,3)','Metric(1,1004)','Metric(3,2004)'],['P(2,1)','Metric(1,1004)','Metric(3,2004)'],
['P(2,3)','Metric(1,3)','Metric(1004,2004)'],['P(2,1)','Metric(1,3)','Metric(1004,2004)'],
['P(1,2)','Metric(2,2004)','Metric(3,1004)'],['P(1,3)','Metric(2,2004)','Metric(3,1004)'],
['P(1,2)','Metric(2,1004)','Metric(3,2004)'],['P(1,3)','Metric(2,1004)','Metric(3,2004)'],
['P(1,2)','Metric(2,3)','Metric(1004,2004)'],['P(1,3)','Metric(2,3)','Metric(1004,2004)']]
signs = [1.,-1.,1.,-1.,1.,-1.,1.,-1.,1.,-1.,1.,-1.,
1.,-1.,1.,-1.,-1.,1.,1.,-1.,1.,-1.,-1.,1.,1.,-1.,1.,-1.,-1.,1.]
new_couplings=[False]*len(terms)
l = lambda c: len(pos[c])
if l(8)!=3 :
ordering = VVVordering(vertex)
# unknown
else :
raise Exception('Unknown data type "%s".' % lorentztag)
iterm=0
try :
for term in terms:
for perm in itertools.permutations(term):
label = '*'.join(perm)
for istruct in range(0,len(structures)) :
if label in structures[istruct] :
reminder = structures[istruct].replace(label,'1.',1)
loc=iterm
if(reminder.find("ProjM")>=0) :
reminder=re.sub("\*ProjM\(.*,.\)","",reminder)
loc+=len(terms)
elif(reminder.find("ProjP")>=0) :
reminder=re.sub("\*ProjP\(.*,.\)","",reminder)
loc+=2*len(terms)
structures[istruct] = "Done"
val = eval(reminder, {'cmath':cmath} )*signs[iterm]
if(new_couplings[loc]) :
new_couplings[loc] += val
else :
new_couplings[loc] = val
iterm+=1
except :
SkipThisVertex()
# check we've handled all the terms
for val in structures:
if(val!="Done") :
raise SkipThisVertex()
# special for FFVT
if(lorentztag=="FFVT") :
t_couplings=new_couplings
new_couplings=[False]*9
for i in range(0,9) :
j = i+3*(i//3)
k = i+3+3*(i//3)
if( not t_couplings[j]) :
new_couplings[i] = t_couplings[k]
else :
new_couplings[i] = t_couplings[j]
# set the couplings
for icoup in range(0,len(new_couplings)) :
if(new_couplings[icoup]) :
new_couplings[icoup] = '(%s) * (%s) *(%s)' % (new_couplings[icoup],prefactors,value)
if(len(all_couplings)==0) :
all_couplings=new_couplings
else :
for icoup in range(0,len(new_couplings)) :
if(new_couplings[icoup] and all_couplings[icoup]) :
all_couplings[icoup] = '(%s) + (%s) ' % (new_couplings[icoup],all_couplings[icoup])
elif(new_couplings[icoup]) :
all_couplings[icoup] = new_couplings[icoup]
# return the results
return (ordering,all_couplings)
def processTensorCouplings(lorentztag,vertex,model,parmsubs,all_couplings,order) :
# check for fermion vertices (i.e. has L/R couplings)
fermions = "FF" in lorentztag
# test and process the values of the couplings
tval = ["Unknown"]*3
value = ["Unknown"]*3
# loop over the colours
for icolor in range(0,len(all_couplings)) :
lmax = len(all_couplings[icolor])
if(fermions) : lmax //=3
# loop over the different terms
for ix in range(0,lmax) :
test = [False]*3
imax=3
# normal case
if( not fermions ) :
test[0] = all_couplings[icolor][ix]
imax=1
else :
# first case vector but no L/R couplings
if( not all_couplings[icolor][lmax+ix] and
not all_couplings[icolor][2*lmax+ix] ) :
test[0] = all_couplings[icolor][ix]
imax=1
# special for mass terms and massless particles
if(not all_couplings[icolor][ix]) :
code = abs(vertex.particles[order[0]-1].pdg_code)
if(ix==6 and (code ==12 or code ==14 or code==16) ) :
continue
else :
raise SkipThisVertex()
# second case L/R couplings
elif( not all_couplings[icolor][ix] ) :
# L=R, replace with vector
if(all_couplings[icolor][lmax+ix] ==
all_couplings[icolor][2*lmax+ix]) :
test[0] = all_couplings[icolor][lmax+ix]
imax=1
else :
test[1] = all_couplings[icolor][lmax+ix]
test[2] = all_couplings[icolor][2*lmax+ix]
else :
raise SkipThisVertex()
# special handling of mass terms
# scalar divide by m**2
if((ix==3 and lorentztag=="SST") or
( ix>=10 and ix<=12 and lorentztag=="VVT" )) :
for i in range(0,len(test)) :
if(test[i]) :
test[i] = '(%s)/%s**2' % (test[i],vertex.particles[order[0]-1].mass.value)
# fermions divide by 4*m
elif(ix==6 and lorentztag=="FFT" and
float(vertex.particles[order[0]-1].mass.value) != 0. ) :
for i in range(0,len(test)) :
if(test[i]) :
test[i] = '-(%s)/%s/4' % (test[i],vertex.particles[order[0]-1].mass.value)
# set values on first pass
if((tval[0]=="Unknown" and fermions ) or
(not fermions and tval[0]=="Unknown" and tval[1]=="Unknown" and tval[2]=="Unknown")) :
value = test
for i in range(0,len(test)) :
if(test[i]) : tval[i] = evaluate(test[i],model,parmsubs)
else :
for i in range(0,imax) :
if(not test[i] and not tval[i]) :
continue
# special for vector gauge terms
if(lorentztag=="VVT" and ix>=13) :
continue
if(not test[i] or tval[i]=="Unknown") :
# special for mass terms and vectors
if(lorentztag=="VVT" and ix >=10 and ix <=12 and
float(vertex.particles[order[0]-1].mass.value) == 0. ) :
continue
raise SkipThisVertex()
tval2 = evaluate(test[i],model,parmsubs)
if(abs(tval[i]-tval2)>1e-6) :
# special for fermion mass term if fermions massless
if(lorentztag=="FFT" and ix ==6 and tval2 == 0. and
float(vertex.particles[order[0]-1].mass.value) == 0. ) :
continue
raise SkipThisVertex()
# simple clean up
for i in range(0,len(value)):
if(value[i]) :
value[i] = value[i].replace("(1.0) * ","").replace(" * (1)","")
# put everything together
coup_left = 0.
coup_right = 0.
coup_norm = 0.
if(lorentztag == "SST" or lorentztag == "VVT" or
lorentztag == "VVVT" or lorentztag == "FFT" ) :
coup_norm = value[0]
if(value[1] or value[2]) :
raise SkipThisVertex()
elif(lorentztag=="FFVT") :
if(not value[1] and not value[2]) :
coup_norm = value[0]
coup_left = "1."
coup_right = "1."
elif(not value[0]) :
coup_norm = "1."
if(value[1] and value[2]) :
coup_left = value[1]
coup_right = value[2]
elif(value[1]) :
coup_left = value[1]
coup_right = "0."
elif(value[2]) :
coup_left = "0."
coup_right = value[2]
else :
raise SkipThisVertex()
else :
raise SkipThisVertex()
else :
raise SkipThisVertex()
# return the answer
return (coup_left,coup_right,coup_norm)
def extractStructures(L) :
structure1 = L.structure.replace(")-P",") - P").replace(")+P",") + P")
structure1 = structure1.split()
structures =[]
sign=''
for struct in structure1 :
if(struct=='+') :
continue
elif(struct=='-') :
sign='-'
else :
structures.append(sign+struct.strip())
sign=''
return structures
def changeSign(sign1,sign2) :
if((sign1=="+" and sign2=="+") or\
(sign1=="-" and sign2=="-")) :
return "+"
else :
return "-"
def epsilonOrder(eps) :
terms,sign = extractAntiSymmetricIndices(eps,"Epsilon(")
return (sign,"Epsilon(%s,%s,%s,%s)" % (terms[0],terms[1],terms[2],terms[3]))
def VVSEpsilon(couplings,struct) :
if(struct.find("Epsilon")<0) :
return
fact=""
sign="+"
if(struct[-1]==")") :
fact=struct.split("(")[0]
if(fact.find("Epsilon")>=0) :
fact=""
else :
struct=struct.split("(",1)[1][:-1]
if(fact[0]=="-") :
sign="-"
fact=fact[1:]
split = struct.split("*")
# find the epsilon
eps=""
for piece in split :
if(piece.find("Epsilon")>=0) :
eps=piece
split.remove(piece)
break
# and any prefactors
for piece in split :
if(piece.find("P(")<0) :
split.remove(piece)
if(piece[0]=="+" or piece[0]=="-") :
sign=changeSign(sign,piece[0])
piece=piece[1:]
if(fact=="") :
fact=piece
else :
fact = "( %s ) * ( %s )" % (fact , piece)
# now sort out the momenta
for piece in split :
terms=piece.split(",")
terms[0]=terms[0].strip("P(")
terms[1]=terms[1].strip(")")
eps=eps.replace(terms[0],"P%s"%terms[1])
(nsign,eps)=epsilonOrder(eps)
if(nsign>0) : sign=changeSign(sign,"-")
if(fact=="") : fact="1."
if(eps!="Epsilon(1,2,P1,P2)") :
return
if(couplings[6]==0.) :
couplings[6] = "( %s%s )" % (sign,fact)
else :
couplings[6] = "( %s ) + ( %s%s )" % (couplings[6],sign,fact)
def scalarVectorCouplings(value,prefactors,L,lorentztag,all_couplings,order) :
# set up the types of term we are looking for
if(lorentztag=="VVS") :
couplings=[0.,0.,0.,0.,0.,0.,0.]
terms=[['P(-1,%s)' % order[0],
'P(-1,%s)' % order[1],
'Metric(%s,%s)' %(order[0],order[1])],
['P(1,%s)' % order[0],
'P(2,%s)' % order[0]],
['P(1,%s)' % order[0],
'P(2,%s)' % order[1]],
['P(1,%s)' % order[1],
'P(2,%s)' % order[0]],
['P(1,%s)' % order[1],
'P(2,%s)' % order[1]],
['Metric(%s,%s)'%(order[0],order[1])]]
elif(lorentztag=="VVSS") :
couplings=[0.]
terms=[['Metric(%s,%s)' % (order[0],order[1])]]
elif(lorentztag=="VSS"):
couplings=[0.,0.]
terms=[['P(%s,%s)' % (order[0],order[2])],
['P(%s,%s)' % (order[0],order[1])]]
# extract the lorentz structures
structures = extractStructures(L)
# handle the scalar couplings
itype=-1
try :
for term in terms:
itype+=1
for perm in itertools.permutations(term):
label = '*'.join(perm)
for istruct in range(0,len(structures)) :
if label in structures[istruct] :
reminder = structures[istruct].replace(label,'1.',1)
couplings[itype]+=eval(reminder, {'cmath':cmath} )
structures[istruct]='Done'
except :
raise SkipThisVertex()
# special for VVS and epsilon
# handle the pseudoscalar couplings
for struct in structures :
if(struct != "Done" ) :
if(lorentztag=="VVS") :
VVSEpsilon(couplings,struct)
else :
raise SkipThisVertex()
# put it all together
if(len(all_couplings)==0) :
for ic in range(0,len(couplings)) :
if(couplings[ic]!=0.) :
all_couplings.append('(%s) * (%s) * (%s)' % (prefactors,value,couplings[ic]))
else :
all_couplings.append(False)
else :
for ic in range(0,len(couplings)) :
if(couplings[ic]!=0. and all_couplings[ic]) :
all_couplings[ic] = '(%s) * (%s) * (%s) + (%s) ' % (prefactors,value,
couplings[ic],all_couplings[ic])
elif(couplings[ic]!=0) :
all_couplings[ic] = '(%s) * (%s) * (%s) ' % (prefactors,value,couplings[ic])
return all_couplings
def processScalarVectorCouplings(lorentztag,vertex,model,parmsubs,all_couplings,header,order) :
# check the values
tval = [False]*len(all_couplings[0])
value =[False]*len(all_couplings[0])
for icolor in range(0,len(all_couplings)) :
for ix in range(0,len(all_couplings[icolor])) :
if(not value[ix]) :
value[ix] = all_couplings[icolor][ix]
if(value[ix] and not tval[ix]) :
tval[ix] = evaluate(value[ix],model,parmsubs)
elif(value[ix]) :
tval2 = evaluate(all_couplings[icolor][0],model,parmsubs)
if(abs(tval[ix]-tval2)>1e-6) :
raise SkipThisVertex()
append = ""
symbols = set()
coup_norm=0.
if(lorentztag=="VVS") :
if(not value[0] and not value[1] and not value[2] and \
not value[3] and not value[4] and not value[6] and value[5]) :
coup_norm=value[5]
else :
for ix in range(0,len(value)) :
if(value[ix]) :
value[ix], sym = py2cpp(value[ix])
symbols |= sym
else :
value[ix]=0.
lorentztag = 'GeneralVVS'
header="kinematics(true);"
# g_mu,nv piece of coupling
if(value[5]!=0.) :
append +='a00( %s + Complex(( %s )* GeV2/invariant(1,2)));\n' % ( value[0],value[5])
else :
append +='a00( %s );\n' % value[0]
# other couplings
append += 'a11( %s );\n a12( %s );\n a21( %s );\n a22( %s );\n aEp( %s );\n' % \
( value[1],value[2],value[3],value[4],value[6] )
coup_norm="1."
elif(lorentztag=="VVSS") :
coup_norm = value[0]
elif(lorentztag=="VSS") :
if(abs(tval[0]+tval[1])>1e-6) :
for ix in range(0,len(value)) :
if(value[ix]) :
value[ix], sym = py2cpp(value[ix])
symbols |= sym
else :
value[ix]=0.
coup_norm = "1."
append = 'if(p2->id()==%s) { a( %s ); b( %s);}\n else { a( %s ); b( %s);}' \
% (vertex.particles[order[1]-1].pdg_code,
value[0],value[1],value[1],value[0])
else :
coup_norm = value[1]
append = 'if(p2->id()!=%s){norm(-norm());}' \
% vertex.particles[order[1]-1].pdg_code
# return the answer
return (coup_norm,append,lorentztag,header,symbols)
def getIndices(term) :
if(term[0:2]=="P(") :
indices = term.strip(")").strip("P(").split(",")
mom = int(indices[1])
index = int(indices[0])
return (True,mom,index)
else :
return (False,0,0)
def lorentzScalar(vertex,L) :
dotProduct = """(invariant( i[{i1}], i[{i2}] )/GeV2)"""
structures=L.structure.split()
output="("
for struct in structures:
if(struct=="+" or struct=="-") :
output+=struct
continue
structure = struct.split("*")
worked = False
mom=-1
newTerm=""
while True :
term = structure[-1]
structure.pop()
(momentum,mom,index) = getIndices(term)
if( not momentum) : break
# look for the matching momenta
for term in structure :
(momentum,mom2,index2) = getIndices(term)
if(index2==index) :
structure.remove(term)
dot = dotProduct.format(i1=mom-1,i2=mom2-1)
if(newTerm=="") :
newTerm = dot
else :
newTerm = " ( %s) * ( %s ) " % (newTerm,dot)
if(len(structure)==0) :
worked = True
break
if(not worked) :
return False
else :
output+=newTerm
output+=")"
return output
kinematicsline = """\
long id [3]={{{id1},{id2},{id3}}};
long id2[3]={{p1->id(),p2->id(),p3->id()}};
unsigned int i[3];
for(unsigned int ix=0;ix<3;++ix) {{
for(unsigned int iy=0;iy<3;++iy) {{
if(id[ix]==id2[iy]) {{
i[ix] = iy;
id2[iy]=0;
break;
}}
}}
}}
double hw_kine1 = {kine};
"""
kinematicsline2 = """\
long id [4]={{{id1},{id2},{id3},{id4}}};
long id2[4]={{p1->id(),p2->id(),p3->id(),p4->id()}};
unsigned int i[4];
for(unsigned int ix=0;ix<4;++ix) {{
for(unsigned int iy=0;iy<4;++iy) {{
if(id[ix]==id2[iy]) {{
i[ix] = iy;
id2[iy]=0;
break;
}}
}}
}}
double hw_kine1 = {kine};
"""
kinematicsline3 ="""\
double hw_kine{i} = {kine};
"""
def scalarCouplings(vertex,value,prefactors,L,lorentztag,
all_couplings,prepend,header) :
try :
val = int(L.structure)
except :
output = lorentzScalar(vertex,L)
if( not output ) :
raise SkipThisVertex()
else :
if(prepend=="") :
if(lorentztag=="SSS") :
# order doesn't matter here, all same spin
prepend = kinematicsline.format(id1=vertex.particles[0].pdg_code,
id2=vertex.particles[1].pdg_code,
id3=vertex.particles[2].pdg_code,
kine=output)
else :
# order doesn't matter here, all same spin
prepend = kinematicsline2.format(id1=vertex.particles[0].pdg_code,
id2=vertex.particles[1].pdg_code,
id3=vertex.particles[2].pdg_code,
id4=vertex.particles[3].pdg_code,
kine=output)
value = "(%s) *(hw_kine1)" % value
else :
osplit=prepend.split("\n")
i=-1
while osplit[i]=="":
i=i-1
ikin=int(osplit[i].split("=")[0].replace("double hw_kine",""))+1
prepend +=kinematicsline3.format(kine=output,i=ikin)
value = "(%s) *(hw_kine%s)" % (value,ikin)
header="kinematics(true);"
if(len(all_couplings)==0) :
all_couplings.append('(%s) * (%s)' % (prefactors,value))
else :
all_couplings[0] = '(%s) * (%s) + (%s)' % (prefactors,value,all_couplings[0])
return (prepend, header,all_couplings)
def processScalarCouplings(model,parmsubs,all_couplings) :
tval = False
value = False
for icolor in range(0,len(all_couplings)) :
if(len(all_couplings[icolor])!=1) :
raise SkipThisVertex()
if(not value) :
value = all_couplings[icolor][0]
m = re.findall('hw_kine[0-9]*', all_couplings[icolor][0])
if m:
for kine in m:
# bizarre number for checks, must be a better option
parmsubs[kine] = 987654321.
if(not tval) :
value = all_couplings[icolor][0]
tval = evaluate(value,model,parmsubs)
else :
tval2 = evaluate(all_couplings[icolor][0],model,parmsubs)
if(abs(tval[i]-tval2)>1e-6) :
raise SkipThisVertex()
# cleanup and return the answer
return value.replace("(1.0) * ","").replace(" * (1)","")
def vectorCouplings(vertex,value,prefactors,L,lorentztag,pos,
all_couplings,append,qcd,order) :
structures=extractStructures(L)
terms=[]
signs=[]
if(lorentztag=="VVV") :
terms=[['P(%s,%s)' % (order[2],order[0]),'Metric(%s,%s)' % (order[0],order[1])],
['P(%s,%s)' % (order[2],order[1]),'Metric(%s,%s)' % (order[0],order[1])],
['P(%s,%s)' % (order[1],order[0]),'Metric(%s,%s)' % (order[0],order[2])],
['P(%s,%s)' % (order[1],order[2]),'Metric(%s,%s)' % (order[0],order[2])],
['P(%s,%s)' % (order[0],order[1]),'Metric(%s,%s)' % (order[1],order[2])],
['P(%s,%s)' % (order[0],order[2]),'Metric(%s,%s)' % (order[1],order[2])]]
signs=[1.,-1.,-1.,1.,1.,-1.]
elif(lorentztag=="VVVV") :
terms=[['Metric(%s,%s)' % (order[0],order[3]),'Metric(%s,%s)' % (order[1],order[2])],
['Metric(%s,%s)' % (order[0],order[2]),'Metric(%s,%s)' % (order[1],order[3])],
['Metric(%s,%s)' % (order[0],order[1]),'Metric(%s,%s)' % (order[2],order[3])]]
signs=[1.,1.,1.]
elif(lorentztag=="VVVS") :
terms=[['P(%s,%s)' % (order[2],order[0]),'Metric(%s,%s)' % (order[0],order[1])],
['P(%s,%s)' % (order[2],order[1]),'Metric(%s,%s)' % (order[0],order[1])],
['P(%s,%s)' % (order[1],order[0]),'Metric(%s,%s)' % (order[0],order[2])],
['P(%s,%s)' % (order[1],order[2]),'Metric(%s,%s)' % (order[0],order[2])],
['P(%s,%s)' % (order[0],order[1]),'Metric(%s,%s)' % (order[1],order[2])],
['P(%s,%s)' % (order[0],order[2]),'Metric(%s,%s)' % (order[1],order[2])],
['Epsilon(1,2,3,-1)','P(-1,1)'],['Epsilon(1,2,3,-1)','P(-1,2)'],
['Epsilon(1,2,3,-1)','P(-1,3)']]
signs=[1.,-1.,-1.,1.,1.,-1.,1.,1.,1.]
# extract the couplings
new_couplings = [False]*len(terms)
iterm=0
try :
for term in terms:
for perm in itertools.permutations(term):
label = '*'.join(perm)
for istruct in range(0,len(structures)) :
if label in structures[istruct] :
reminder = structures[istruct].replace(label,'1.',1)
structures[istruct] = "Done"
val = eval(reminder, {'cmath':cmath} )*signs[iterm]
if(new_couplings[iterm]) :
new_couplings[iterm] += val
else :
new_couplings[iterm] = val
iterm += 1
except :
raise SkipThisVertex()
# check we've handled all the terms
for val in structures:
if(val!="Done") :
raise SkipThisVertex()
# set the couplings
for icoup in range(0,len(new_couplings)) :
if(new_couplings[icoup]) :
new_couplings[icoup] = '(%s) * (%s) *(%s)' % (new_couplings[icoup],prefactors,value)
if(len(all_couplings)==0) :
all_couplings=new_couplings
else :
for icoup in range(0,len(new_couplings)) :
if(new_couplings[icoup] and all_couplings[icoup]) :
all_couplings[icoup] = '(%s) * (%s) *(%s) + (%s) ' % (new_couplings[icoup],prefactors,value,all_couplings[icoup])
elif(new_couplings[icoup]) :
all_couplings[icoup] = new_couplings[icoup]
# ordering for VVV type vertices
if(len(pos[8]) != 3 and (lorentztag=="VVV" or lorentztag=="VVVS")) :
append = VVVordering(vertex)
return all_couplings,append
def processVectorCouplings(lorentztag,vertex,model,parmsubs,all_couplings,append,header) :
value = False
tval = False
if(lorentztag=="VVV") :
for icolor in range(0,len(all_couplings)) :
# loop over the different terms
for ix in range(0,len(all_couplings[icolor])) :
if(not value) :
value = all_couplings[icolor][ix]
tval = evaluate(value,model,parmsubs)
else :
if all_couplings[icolor][ix] == False:
raise SkipThisVertex()
else:
tval2 = evaluate(all_couplings[icolor][ix],model,parmsubs)
if(abs(tval-tval2)>1e-6) :
raise SkipThisVertex()
elif(lorentztag=="VVVV") :
order=[]
colours = vertex.color
if(len(colours)==1) :
tval=[]
for i in range(0,3) :
tval.append(evaluate(all_couplings[0][i],model,parmsubs))
if(compare(tval[2],-2.*tval[1]) and
compare(tval[2],-2.*tval[0]) ) :
order=[0,1,2,3]
value = "0.5*(%s)" % all_couplings[0][2]
elif(compare(tval[1],-2.*tval[2]) and
compare(tval[1],-2.*tval[0]) ) :
order=[0,2,1,3]
value = "0.5*(%s)" % all_couplings[0][1]
elif(compare(tval[0],-2.*tval[2]) and
compare(tval[0],-2.*tval[1]) ) :
order=[0,3,1,2]
value = "0.5*(%s)" % all_couplings[0][0]
else:
raise SkipThisVertex()
pattern = \
"bool done[4]={false,false,false,false};\n" + \
" tcPDPtr part[4]={p1,p2,p3,p4};\n" + \
" unsigned int iorder[4]={0,0,0,0};\n" + \
" for(unsigned int ix=0;ix<4;++ix) {\n" + \
" if(!done[0] && part[ix]->id()==%s) {done[0]=true; iorder[%s] = ix; continue;}\n" + \
" if(!done[1] && part[ix]->id()==%s) {done[1]=true; iorder[%s] = ix; continue;}\n" + \
" if(!done[2] && part[ix]->id()==%s) {done[2]=true; iorder[%s] = ix; continue;}\n" + \
" if(!done[3] && part[ix]->id()==%s) {done[3]=true; iorder[%s] = ix; continue;}\n" + \
" }\n" + \
" setType(2);\n" + \
" setOrder(iorder[0],iorder[1],iorder[2],iorder[3]);"
# order doesn't matter here same spin
append = pattern % ( vertex.particles[0].pdg_code,order[0],
vertex.particles[1].pdg_code,order[1],
vertex.particles[2].pdg_code,order[2],
vertex.particles[3].pdg_code,order[3] )
else :
for icolor in range(0,len(all_couplings)) :
col=colours[icolor].split("*")
if(len(col)==2 and "f(" in col[0] and "f(" in col[1]) :
sign = 1
for i in range(0,2) :
col[i],stemp = extractAntiSymmetricIndices(col[i],"f(")
for ix in range(0,len(col[i])): col[i][ix]=int(col[i][ix])
sign *=stemp
if(col[0][0]>col[1][0]) : col[0],col[1] = col[1],col[0]
# first flow
if(col[0][0]==1 and col[0][1]==2 and col[1][0] ==3 and col[1][1] == 4) :
if(all_couplings[icolor][2] or not all_couplings[icolor][0] or
not all_couplings[icolor][1]) :
raise SkipThisVertex()
if(not value) :
value = all_couplings[icolor][1]
tval = evaluate(value,model,parmsubs)
tval2 = -evaluate(all_couplings[icolor][0],model,parmsubs)
tval3 = evaluate(all_couplings[icolor][1],model,parmsubs)
elif(col[0][0]==1 and col[0][1]==3 and col[1][0] ==2 and col[1][1] == 4) :
if(all_couplings[icolor][1] or not all_couplings[icolor][0] or
not all_couplings[icolor][2]) :
raise SkipThisVertex()
if(not value) :
value = all_couplings[icolor][2]
tval = evaluate(value,model,parmsubs)
tval2 = -evaluate(all_couplings[icolor][0],model,parmsubs)
tval3 = evaluate(all_couplings[icolor][2],model,parmsubs)
elif(col[0][0]==1 and col[0][1]==4 and col[1][0] ==2 and col[1][1] == 3) :
if(all_couplings[icolor][0] or not all_couplings[icolor][1] or
not all_couplings[icolor][2]) :
raise SkipThisVertex()
if(not value) :
value = all_couplings[icolor][2]
tval = evaluate(value,model,parmsubs)
tval2 = -evaluate(all_couplings[icolor][1],model,parmsubs)
tval3 = evaluate(all_couplings[icolor][2],model,parmsubs)
else :
raise SkipThisVertex()
if(abs(tval-tval2)>1e-6 or abs(tval-tval3)>1e-6 ) :
raise SkipThisVertex()
append = 'setType(1);\nsetOrder(0,1,2,3);'
else :
print('unknown colour structure for VVVV vertex')
raise SkipThisVertex()
elif(lorentztag=="VVVS") :
try :
# two distinct cases 0-5 = , 6-8=
if(all_couplings[0][0]) :
imin=0
imax=6
header="scalar(true);"
else :
imin=6
imax=9
header="scalar(false);"
for icolor in range(0,len(all_couplings)) :
# loop over the different terms
for ix in range(imin,imax) :
if(not value) :
value = all_couplings[icolor][ix]
tval = evaluate(value,model,parmsubs)
else :
tval2 = evaluate(value,model,parmsubs)
if(abs(tval-tval2)>1e-6) :
raise SkipThisVertex()
except :
SkipThisVertex()
# cleanup and return the answer
value = value.replace("(1.0) * ","").replace(" * (1)","")
return (value,append,header)
def fermionCouplings(value,prefactors,L,all_couplings,order) :
new_couplings=[False,False]
try :
new_couplings[0],new_couplings[1] = parse_lorentz(L.structure)
except :
raise SkipThisVertex()
for i in range(0,2) :
if new_couplings[i]:
new_couplings[i] = '(%s) * (%s) * (%s)' % (prefactors,new_couplings[i],value)
if(len(all_couplings)==0) :
all_couplings=new_couplings
else :
for i in range(0,len(new_couplings)) :
if(new_couplings[i] and all_couplings[i]) :
all_couplings[i] = '(%s) + (%s) ' % (new_couplings[i],all_couplings[i])
elif(new_couplings[i]) :
all_couplings[i] = new_couplings[i]
return all_couplings
def processFermionCouplings(lorentztag,vertex,model,parmsubs,all_couplings,order) :
leftcontent = all_couplings[0][0] if all_couplings[0][0] else "0."
rightcontent = all_couplings[0][1] if all_couplings[0][1] else "0."
tval=[evaluate( leftcontent,model,parmsubs),
evaluate(rightcontent,model,parmsubs)]
for icolor in range(0,len(all_couplings)) :
# loop over the different terms
for ix in range(0,len(all_couplings[icolor])) :
tval2 = evaluate(all_couplings[icolor][ix],model,parmsubs) if all_couplings[icolor][ix] else 0.
if(abs(tval[ix]-tval2)>1e-6) :
raise SkipThisVertex()
normcontent = "1."
append=""
if lorentztag == 'FFV':
append = ('if(p1->id()!=%s) {Complex ltemp=left(), rtemp=right(); left(-rtemp); right(-ltemp);}'
% vertex.particles[order[0]-1].pdg_code)
return normcontent,leftcontent,rightcontent,append
def RSCouplings(value,prefactors,L,all_couplings,order) :
raise SkipThisVertex()
|
hep-mirrors/herwig
|
Models/Feynrules/python/ufo2peg/check_lorentz.py
|
Python
|
gpl-3.0
| 38,432 | 0.032863 |
'''
Copyright (C) 2012 mentalsmash.org <contact@mentalsmash.org>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Library General Public
License as published by the Free Software Foundation; either
version 2 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Library General Public License for more details.
You should have received a copy of the GNU Library General Public
License along with this library; if not, write to the
Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
Boston, MA 02110-1301, USA.
'''
|
mentalsmash/geek-map
|
mentalsmash/__init__.py
|
Python
|
gpl-2.0
| 775 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Custom managers for the Division model.
"""
from __future__ import unicode_literals
from calaccess_processed.managers import BulkLoadSQLManager
class OCDAssemblyDivisionManager(BulkLoadSQLManager):
"""
Custom manager for state assembly OCD Divisions.
"""
def get_queryset(self):
"""
Filters down to state assembly divisions.
"""
qs = super(OCDAssemblyDivisionManager, self).get_queryset()
return qs.filter(subid1='ca', subtype2='sldl')
class OCDSenateDivisionManager(BulkLoadSQLManager):
"""
Custom manager for state senate OCD Divisions.
"""
def get_queryset(self):
"""
Filters down to state senate divisions.
"""
qs = super(OCDSenateDivisionManager, self).get_queryset()
return qs.filter(subid1='ca', subtype2='sldu')
class OCDCaliforniaDivisionManager(BulkLoadSQLManager):
"""
Custom manager for OCD Divisions in California.
"""
def get_queryset(self):
"""
Filters down to divisions in California.
"""
qs = super(OCDCaliforniaDivisionManager, self).get_queryset()
return qs.filter(subid1='ca')
def california(self):
"""
Returns state of California division.
"""
qs = super(OCDCaliforniaDivisionManager, self).get_queryset()
return qs.get(id='ocd-division/country:us/state:ca')
|
california-civic-data-coalition/django-calaccess-processed-data
|
calaccess_processed_elections/managers/opencivicdata/core/divisions.py
|
Python
|
mit
| 1,449 | 0 |
"""
OpenStreetMap to Land Use/Land Cover Maps
"""
def osm_to_sqdb(osmXml, osmSQLITE):
"""
Convert OSM file to SQLITE DB
"""
from gasp.toshp.gdal import ogr_btw_driver
return ogr_btw_driver(osmXml, osmSQLITE, supportForSpatialLite=True)
def osm_project(osmSQLITE, srs_epsg):
"""
Reproject OSMDATA to a specific Spatial Reference System
"""
from gasp.gdal.proj import ogr2ogr_transform_inside_sqlite
from .var import osmTableData
osmtables = {}
for table in osmTableData:
ogr2ogr_transform_inside_sqlite(
osmSQLITE, table, 4326, srs_epsg,
'{}_{}'.format(table, str(srs_epsg)),
sql="SELECT * FROM {}".format(osmTableData[table])
)
osmtables[table] = '{}_{}'.format(table, str(srs_epsg))
return osmtables
def raster_based(osmdata, nomenclature, refRaster, lulcRst,
epsg=3857, overwrite=None):
"""
Convert OSM Data into Land Use/Land Cover Information
An raster based approach.
TODO: Add detailed description
"""
# ************************************************************************ #
# Python Modules from Reference Packages #
# ************************************************************************ #
import os
import pandas
# ************************************************************************ #
# Senpy dependencies #
# ************************************************************************ #
from gasp.oss.ops import create_folder
from gasp.grs import run_grass
# Rules to be used
from gasp.osm2lulc.rules.rule1 import raster_selection as selection
from gasp.osm2lulc.rules.rule2 import raster_get_roads as get_roads
from gasp.osm2lulc.rules.rule3_4 import raster_area as area
from gasp.osm2lulc.rules.rule5 import basic_buffer
from gasp.osm2lulc.rules.rule7 import assign_points_tag_to_buildings
# ************************************************************************ #
# Global Settings #
# ************************************************************************ #
from .var import PRIORITIES
workspace = os.path.join(os.path.dirname(lulcRst), 'osmtolulc')
# Check if workspace exists
if os.path.exists(workspace):
if overwrite:
create_folder(workspace)
else:
raise ValueError('Path {} already exists'.format(workspace))
else:
create_folder(workspace)
# ************************************************************************ #
# Convert OSM file to SQLITE DB #
# ************************************************************************ #
osm_db = osm_to_sqdb(osmdata, os.path.join(workspace, 'osm.sqlite'))
# ************************************************************************ #
# Transform SRS of OSM Data #
# ************************************************************************ #
osmTableData = osm_project(osm_db, epsg)
# ************************************************************************ #
# Start a GRASS GIS Session #
# ************************************************************************ #
grass_base = run_grass(
workspace, grassBIN='grass74', location='grloc', srs=epsg)
import grass.script as grass
import grass.script.setup as gsetup
gsetup.init(grass_base, workspace, 'grloc', 'PERMANENT')
# ************************************************************************ #
# IMPORT SOME GASP MODULES FOR GRASS GIS #
# ************************************************************************ #
from gasp.grsconv import grass_converter
from gasp.grs.g import raster_to_region
from gasp.grs.r import mosaic_raster
# ************************************************************************ #
# SET GRASS GIS LOCATION EXTENT #
# ************************************************************************ #
extRst = grass_converter(refRaster, 'extent_raster')
raster_to_region(extRst)
# ************************************************************************ #
# MapResults #
mergeOut = {}
# ************************************************************************ #
# ************************************************************************ #
# 1 - Selection Rule #
# ************************************************************************ #
"""
selOut = {
cls_code : rst_name, ...
}
"""
selOut = selection(
osm_db, nomenclature, osmTableData['polygons'], workspace
)
for cls in selOut:
mergeOut[cls] = [selOut[cls]]
# ************************************************************************ #
# 2 - Get Information About Roads Location #
# ************************************************************************ #
"""
roads = {
cls_code : rst_name, ...
}
"""
roads = get_roads(
osm_db, nomenclature,
osmTableData['lines'], osmTableData['polygons'], workspace
)
for cls in roads:
if cls not in mergeOut:
mergeOut[cls] = [roads[cls]]
else:
mergeOut[cls].append(roads[cls])
# ************************************************************************ #
# 3 - Area Upper than #
# ************************************************************************ #
"""
auOut = {
cls_code : rst_name, ...
}
"""
auOut = area(
osm_db, nomenclature, osmTableData['polygons'], workspace, UPPER=True
)
for cls in auOut:
if cls not in mergeOut:
mergeOut[cls] = [auOut[cls]]
else:
mergeOut[cls].append(auOut[cls])
# ************************************************************************ #
# 4 - Area Lower than #
# ************************************************************************ #
"""
alOut = {
cls_code : rst_name, ...
}
"""
alOut = area(
osm_db, nomenclature, osmTableData['polygons'], workspace, UPPER=None
)
for cls in alOut:
if cls not in mergeOut:
mergeOut[cls] = [alOut[cls]]
else:
mergeOut[cls].append(alOut[cls])
# ************************************************************************ #
# 5 - Get data from lines table (railway | waterway) #
# ************************************************************************ #
"""
bfOut = {
cls_code : [rst_name, ...], ...
}
"""
bfOut = basic_buffer(
osm_db, nomenclature, osmTableData['lines'], workspace
)
for cls in bfOut:
if cls not in mergeOut:
mergeOut[cls] = bfOut[cls]
else:
mergeOut[cls] += bfOut[cls]
# ************************************************************************ #
# 7 - Assign untagged Buildings to tags #
# ************************************************************************ #
if nomenclature != "GLOBE_LAND_30":
buildsOut = assign_points_tag_to_buildings(
osm_db, nomenclature,
osmTableData['points'], osmTableData['polygons'], workspace
)
for cls in buildsOut:
if cls not in mergeOut:
mergeOut[cls] = [buildsOut[cls]]
else:
mergeOut[cls].append(buildsOut[cls])
# ************************************************************************ #
# Produce LULC Map #
# ************************************************************************ #
"""
Merge all results for one cls into one raster
mergeOut = {
cls_code : [rst_name, rst_name, ...], ...
}
into
mergeOut = {
cls_code : patched_raster, ...
}
"""
for cls in mergeOut:
if len(mergeOut[cls]) == 1:
mergeOut[cls] = mergeOut[cls][0]
else:
mosaic_raster(mergeOut[cls], 'mosaic_{}'.format(str(cls)))
mergeOut[cls] = 'mosaic_{}'.format(str(cls))
"""
Merge all Class Raster using a priority rule
"""
__priorities = PRIORITIES[nomenclature]
lst_rst = []
for cls in __priorities:
if cls not in mergeOut:
continue
else:
lst_rst.append(mergeOut[cls])
mosaic_raster(lst_rst, os.path.splitext(os.path.basename(lulcRst))[0])
grass_converter(os.path.splitext(os.path.basename(lulcRst))[0], lulcRst)
return lulcRst
# ---------------------------------------------------------------------------- #
# ---------------------------------------------------------------------------- #
# ---------------------------------------------------------------------------- #
def vector_based(osmdata, nomenclature, refRaster, lulcShp,
epsg=3857, overwrite=None):
"""
Convert OSM Data into Land Use/Land Cover Information
An vector based approach.
TODO: Add a detailed description.
"""
# ************************************************************************ #
# Python Modules from Reference Packages #
# ************************************************************************ #
import os
import pandas
# ************************************************************************ #
# GASP dependencies #
# ************************************************************************ #
from gasp.oss.ops import create_folder
from gasp.grs import run_grass
from gasp.gdal.manage.general import ogr_merge
# Rules to be used
from gasp.osm2lulc.rules.rule1 import vector_selection as selection
from gasp.osm2lulc.rules.rule2 import vector_get_roads as get_roads
from gasp.osm2lulc.rules.rule3_4 import vector_area as area
from gasp.osm2lulc.rules.rule5 import vector_base_buffer as basic_buffer
from gasp.osm2lulc.rules.rule7 import vector_assign_pntags_to_build
# ************************************************************************ #
# Global Settings #
# ************************************************************************ #
from .var import osmTableData, PRIORITIES
workspace = os.path.join(os.path.dirname(lulcShp), 'osmtolulc')
# Check if workspace exists
if os.path.exists(workspace):
if overwrite:
create_folder(workspace)
else:
raise ValueError('Path {} already exists'.format(workspace))
else:
create_folder(workspace)
# ************************************************************************ #
# Convert OSM file to SQLITE DB #
# ************************************************************************ #
osm_db = osm_to_sqdb(osmdata, os.path.join(workspace, 'osm.sqlite'))
# ************************************************************************ #
# Transform SRS of OSM Data #
# ************************************************************************ #
osmTableData = osm_project(osm_db, epsg)
# ************************************************************************ #
# Start a GRASS GIS Session #
# ************************************************************************ #
grass_base = run_grass(workspace, grassBIN='grass74', location='grloc', srs=epsg)
import grass.script as grass
import grass.script.setup as gsetup
gsetup.init(grass_base, workspace, 'grloc', 'PERMANENT')
# ************************************************************************ #
# IMPORT SOME GASP MODULES FOR GRASS GIS #
# ************************************************************************ #
from gasp.grsconv import grass_converter
from gasp.grs.g import raster_to_region
from gasp.grs.v.overlay import union
from gasp.grs.v.overlay import erase
from gasp.grs.db import reset_table
# ************************************************************************ #
# SET GRASS GIS LOCATION EXTENT #
# ************************************************************************ #
extRst = grass_converter(refRaster, 'extent_raster')
raster_to_region(extRst)
# ************************************************************************ #
# MapResults #
# ************************************************************************ #
mergeOut = {}
# ************************************************************************ #
# 1 - Selection Rule #
# ************************************************************************ #
"""
selOut = {
cls_code : rst_name, ...
}
"""
selOut = selection(
osm_db, nomenclature, osmTableData['polygons'], workspace
)
for cls in selOut:
mergeOut[cls] = [selOut[cls]]
# ************************************************************************ #
# 2 - Get Information About Roads Location #
# ************************************************************************ #
"""
roads = {
cls_code : rst_name, ...
}
"""
roads = get_roads(
osm_db, nomenclature,
osmTableData['lines'], osmTableData['polygons'], workspace
)
for cls in roads:
if cls not in mergeOut:
mergeOut[cls] = [roads[cls]]
else:
mergeOut[cls].append(roads[cls])
# ************************************************************************ #
# 3 - Area Upper than #
# ************************************************************************ #
"""
auOut = {
cls_code : rst_name, ...
}
"""
auOut = area(osm_db, nomenclature, osmTableData['polygons'],
workspace, UPPER=True)
for cls in auOut:
if cls not in mergeOut:
mergeOut[cls] = [auOut[cls]]
else:
mergeOut[cls].append(auOut[cls])
# ************************************************************************ #
# 4 - Area Lower than #
# ************************************************************************ #
"""
alOut = {
cls_code : rst_name, ...
}
"""
alOut = area(osm_db, nomenclature, osmTableData['polygons'],
workspace, UPPER=False)
for cls in alOut:
if cls not in mergeOut:
mergeOut[cls] = [alOut[cls]]
else:
mergeOut[cls].append(alOut[cls])
# ************************************************************************ #
# 5 - Get data from lines table (railway | waterway) #
# ************************************************************************ #
"""
bfOut = {
cls_code : [rst_name, ...], ...
}
"""
bfOut = basic_buffer(
osm_db, nomenclature, osmTableData['lines'], workspace
)
for cls in bfOut:
if cls not in mergeOut:
mergeOut[cls] = bfOut[cls]
else:
mergeOut[cls] += bfOut[cls]
# ************************************************************************ #
# 7 - Assign untagged Buildings to tags #
# ************************************************************************ #
if nomenclature != "GLOBE_LAND_30":
buildsOut = vector_assign_pntags_to_build(
osm_db, nomenclature, osmTableData['points'],
osmTableData['polygons'], workspace
)
for cls in buildsOut:
if cls not in mergeOut:
mergeOut[cls] = [buildsOut[cls]]
else:
mergeOut[cls].append(buildsOut[cls])
# ************************************************************************ #
# Produce LULC Map #
# ************************************************************************ #
"""
Merge all results for one cls into one feature class
mergeOut = {
cls_code : [fc_name, fc_name, ...], ...
}
into
mergeOut = {
cls_code : patched_fc, ...
}
"""
for cls in mergeOut:
if len(mergeOut[cls]) == 1:
mergeOut[cls] = mergeOut[cls][0]
else:
for i in range(1, len(mergeOut[cls])):
union(mergeOut[cls][i-1], mergeOut[cls][i],
"merge_{}_{}".format(str(cls), str(i))
)
mergeOut[cls][i] = "merge_{}_{}".format(str(cls), str(i))
mergeOut[cls] = mergeOut[cls][-1]
"""
Merge all Classes into one feature class using a priority rule
"""
__priorities = PRIORITIES[nomenclature]
# Erase overlapping areas by priority
for cls in range(len(__priorities)):
if cls + 1 == len(__priorities): break
if __priorities[cls] not in mergeOut: continue
else:
for i in range(cls+1, len(__priorities)):
if __priorities[i] not in mergeOut:
continue
else:
erase(
mergeOut[__priorities[i]],
mergeOut[__priorities[cls]],
"{}_{}".format(mergeOut[__priorities[i]], str(i))
)
mergeOut[__priorities[i]] = "{}_{}".format(
mergeOut[__priorities[i]], str(i)
)
# Export all classes
lst_merge = []
for cls in mergeOut:
reset_table(mergeOut[cls], {'cls' : 'varchar(5)'}, {'cls' : str(cls)})
convert(
mergeOut[cls],
os.path.join(workspace, "lulc_{}.gml".format(str(cls))),
geom_type='area'
)
lst_merge.append(os.path.join(workspace, "lulc_{}.gml".format(str(cls))))
ogr_merge(lst_merge, lulcShp, srs=epsg, fields_to_copy=['cls'])
return lulcShp
|
JoaquimPatriarca/senpy-for-gis
|
gasp/osm2lulc/grs.py
|
Python
|
gpl-3.0
| 17,992 | 0.008393 |
# -*- coding: utf-8 -*-
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('entries', '0005_resultsmode_json'),
]
operations = [
migrations.AlterField(
model_name='resultsmode',
name='json',
field=models.TextField(default='', blank=True),
),
]
|
mjtamlyn/archery-scoring
|
entries/migrations/0006_auto_20150612_2307.py
|
Python
|
bsd-3-clause
| 372 | 0 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for trainer hooks."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import numpy as np
from tensorflow.contrib.boosted_trees.python.utils import losses
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import googletest
class LossesTest(test_util.TensorFlowTestCase):
def test_per_example_exp_loss(self):
def _logit(p):
return np.log(p) - np.log(1 - p)
labels_positive = array_ops.ones([10, 1], dtypes.float32)
weights = array_ops.ones([10, 1], dtypes.float32)
labels_negative = array_ops.zeros([10, 1], dtypes.float32)
predictions_probs = np.array(
[[0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], [0.8], [0.9], [0.99]],
dtype=np.float32)
prediction_logits = _logit(predictions_probs)
eps = 0.2
with self.test_session():
predictions_tensor = constant_op.constant(
prediction_logits, dtype=dtypes.float32)
loss_for_positives, _ = losses.per_example_exp_loss(
labels_positive, weights, predictions_tensor, eps=eps)
loss_for_negatives, _ = losses.per_example_exp_loss(
labels_negative, weights, predictions_tensor, eps=eps)
pos_loss = loss_for_positives.eval()
neg_loss = loss_for_negatives.eval()
# For positive labels, points <= 0.3 get max loss of e.
# For negative labels, these points have minimum loss of 1/e.
for i in range(2):
self.assertEqual(math.exp(1), pos_loss[i])
self.assertEqual(math.exp(-1), neg_loss[i])
# For positive lables, p oints with predictions 0.7 and larger get minimum
# loss value of 1/e. For negative labels, these points are wrongly
# classified and get loss e.
for i in range(6, 10):
self.assertEqual(math.exp(-1), pos_loss[i])
self.assertEqual(math.exp(1), neg_loss[i])
# Points in between 0.5-eps, 0..5+eps get loss exp(-label_m*y), where
# y = 1/eps *x -1/(2eps), where x is the probability and label_m is either
# 1 or -1 (for label of 0).
for i in range(2, 6):
self.assertAlmostEqual(
math.exp(-1.0 * (predictions_probs[i] * 1.0 / eps - 0.5 / eps)),
pos_loss[i])
self.assertAlmostEqual(
math.exp(1.0 * (predictions_probs[i] * 1.0 / eps - 0.5 / eps)),
neg_loss[i])
def test_per_example_squared_loss(self):
def _squared_loss(p, y):
return np.mean(1.0 * (p - y) * (p - y))
labels = np.array([[0.123], [224.2], [-3], [2], [.3]], dtype=np.float32)
weights = array_ops.ones([5, 1], dtypes.float32)
predictions = np.array(
[[0.123], [23.2], [233], [52], [3]], dtype=np.float32)
with self.test_session():
loss_tensor, _ = losses.per_example_squared_loss(labels, weights,
predictions)
loss = loss_tensor.eval()
for i in range(5):
self.assertAlmostEqual(
_squared_loss(labels[i], predictions[i]), loss[i])
if __name__ == "__main__":
googletest.main()
|
mixturemodel-flow/tensorflow
|
tensorflow/contrib/boosted_trees/python/utils/losses_test.py
|
Python
|
apache-2.0
| 3,957 | 0.006823 |
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import pytest
from pants.backend.core.tasks.list_goals import ListGoals
from pants.backend.core.tasks.task import Task
from pants.goal.goal import Goal
from pants.goal.task_registrar import TaskRegistrar
from pants_test.tasks.task_test_base import ConsoleTaskTestBase
class ListGoalsTest(ConsoleTaskTestBase):
_INSTALLED_HEADER = 'Installed goals:'
_UNDOCUMENTED_HEADER = 'Undocumented goals:'
_LIST_GOALS_NAME = 'goals'
_LIST_GOALS_DESC = 'List all documented goals.'
_LLAMA_NAME = 'llama'
_LLAMA_DESC = 'With such handsome fiber, no wonder everyone loves Llamas.'
_ALPACA_NAME = 'alpaca'
@classmethod
def task_type(cls):
return ListGoals
class LlamaTask(Task):
pass
class AlpacaTask(Task):
pass
def test_list_goals(self):
Goal.clear()
self.assert_console_output(self._INSTALLED_HEADER)
TaskRegistrar(name=self._LIST_GOALS_NAME, action=ListGoals)\
.install().with_description(self._LIST_GOALS_DESC)
self.assert_console_output(
self._INSTALLED_HEADER,
' {0}: {1}'.format(self._LIST_GOALS_NAME, self._LIST_GOALS_DESC),
)
TaskRegistrar(name=self._LLAMA_NAME, action=ListGoalsTest.LlamaTask)\
.install().with_description(self._LLAMA_DESC)
self.assert_console_output(
self._INSTALLED_HEADER,
' {0}: {1}'.format(self._LIST_GOALS_NAME, self._LIST_GOALS_DESC),
' {0}: {1}'.format(self._LLAMA_NAME, self._LLAMA_DESC),
)
TaskRegistrar(name=self._ALPACA_NAME, action=ListGoalsTest.AlpacaTask, dependencies=[self._LLAMA_NAME])\
.install()
self.assert_console_output(
self._INSTALLED_HEADER,
' {0}: {1}'.format(self._LIST_GOALS_NAME, self._LIST_GOALS_DESC),
' {0}: {1}'.format(self._LLAMA_NAME, self._LLAMA_DESC),
)
def test_list_goals_all(self):
Goal.clear()
TaskRegistrar(name=self._LIST_GOALS_NAME, action=ListGoals)\
.install().with_description(self._LIST_GOALS_DESC)
TaskRegistrar(name=self._LLAMA_NAME, action=ListGoalsTest.LlamaTask)\
.install().with_description(self._LLAMA_DESC)
TaskRegistrar(name=self._ALPACA_NAME, action=ListGoalsTest.AlpacaTask, dependencies=[self._LLAMA_NAME])\
.install()
self.assert_console_output(
self._INSTALLED_HEADER,
' {0}: {1}'.format(self._LIST_GOALS_NAME, self._LIST_GOALS_DESC),
' {0}: {1}'.format(self._LLAMA_NAME, self._LLAMA_DESC),
'',
self._UNDOCUMENTED_HEADER,
' {0}'.format(self._ALPACA_NAME),
options={ 'all': True }
)
# TODO(John Sirois): Re-enable when fixing up ListGoals `--graph` in
# https://github.com/pantsbuild/pants/issues/918
@pytest.mark.xfail
def test_list_goals_graph(self):
Goal.clear()
TaskRegistrar(name=self._LIST_GOALS_NAME, action=ListGoals)\
.install().with_description(self._LIST_GOALS_DESC)
TaskRegistrar(name=self._LLAMA_NAME, action=ListGoalsTest.LlamaTask)\
.install().with_description(self._LLAMA_DESC)
TaskRegistrar(name=self._ALPACA_NAME, action=ListGoalsTest.AlpacaTask, dependencies=[self._LLAMA_NAME])\
.install()
self.assert_console_output(
'digraph G {\n rankdir=LR;\n graph [compound=true];',
' subgraph cluster_goals {\n node [style=filled];\n color = blue;\n label = "goals";',
' goals_goals [label="goals"];',
' }',
' subgraph cluster_llama {\n node [style=filled];\n color = blue;\n label = "llama";',
' llama_llama [label="llama"];',
' }',
' subgraph cluster_alpaca {\n node [style=filled];\n color = blue;\n label = "alpaca";',
' alpaca_alpaca [label="alpaca"];',
' }',
' alpaca_alpaca -> llama_llama [ltail=cluster_alpaca lhead=cluster_llama];',
'}',
options={ 'graph': True }
)
|
pgroudas/pants
|
tests/python/pants_test/tasks/test_list_goals.py
|
Python
|
apache-2.0
| 4,088 | 0.009295 |
import json
import argparse
import os
from listener.config import MIN_TIME, MAX_TIME, PRESSURE_AVERAGE_AMOUNT
from listener.replayer import Replayer
from listener.utilities import convert_time, average
from listener.calculate import (calculate_temp_NTC, calculate_press,
calculate_height, calculate_gyr)
from collections import deque
from io import StringIO
parser = argparse.ArgumentParser(prog="Replayer",
description="Replay a CanSat log file for "
"listener.")
parser.add_argument("input_file")
args = parser.parse_args()
input_file = os.path.abspath(args.input_file)
input_handle = open(input_file, "r")
out_file = "static.json"
out_handle = open(out_file, "w")
replayer = Replayer(MIN_TIME, MAX_TIME, input_handle, StringIO(),
False, True)
full_data = replayer.start()
last_pressure_values = deque(maxlen=PRESSURE_AVERAGE_AMOUNT)
data_temperature = []
data_pressure = []
data_height = []
data_gyro = []
for datapoint in full_data:
if not MIN_TIME <= datapoint["Time"] <= MAX_TIME:
continue # Skip
pressure = calculate_press(datapoint["Press"])
last_pressure_values.append(pressure)
time = convert_time(datapoint["Time"] - MIN_TIME)
data_temperature.append([time, calculate_temp_NTC(datapoint["NTC"])])
pressure = average(last_pressure_values)
data_pressure.append([time, pressure])
data_height.append([time, calculate_height(pressure)])
data_gyro.append([time, calculate_gyr(datapoint["GyrZ"])])
all_data = {
"Temp": data_temperature,
"Press": data_pressure,
"Height": data_height,
"Gyro": data_gyro
}
out_handle.write(json.dumps(all_data))
input_handle.close()
out_handle.close()
|
dkkline/CanSat14-15
|
unused_code/generate_static_json_data.py
|
Python
|
mit
| 1,797 | 0 |
## Automatically adapted for numpy.oldnumeric Apr 14, 2008 by -c
from builtins import range
def writeMeshMatlabFormat(mesh,meshFileBase):
"""
build array data structures for matlab finite element mesh representation
and write to a file to view and play with in matlatb
in matlab can then print mesh with
pdemesh(p,e,t)
where
p is the vertex or point matrix
e is the edge matrix, and
t is the element matrix
points matrix is [2 x num vertices]
format :
row 1 = x coord,
row 2 = y coord for nodes in mesh
edge matrix is [7 x num edges]
format:
row 1 = start vertex number
row 2 = end vertex number
row 3 = start value in edge parameterization, should be 0
row 4 = end value in edge parameterization, should be 1
row 5 = global edge id, base 1
row 6 = subdomain on left? always 1 for now
row 7 = subdomain on right? always 0 for now
element matrix is [4 x num elements]
row 1 = vertex 1 global number
row 2 = vertex 2 global number
row 3 = vertex 3 global number
row 4 = triangle subdomain number
where 1,2,3 is a local counter clockwise numbering of vertices in
triangle
"""
import numpy as numpy
matlabBase = 1
p = numpy.zeros((2,mesh['nNodes_global']),numpy.float_)
e = numpy.zeros((7,mesh['nElementBoundaries_global']),numpy.float_)
t = numpy.zeros((4,mesh['nElements_global']),numpy.float_)
#load p,e,t and write file
mfile = open(meshFileBase+'.m','w')
mfile.write('p = [ ... \n')
for nN in range(mesh['nNodes_global']):
p[0,nN]=mesh['nodeArray'][nN,0]
p[1,nN]=mesh['nodeArray'][nN,1]
mfile.write('%g %g \n' % tuple(p[:,nN]))
mfile.write(']; \n')
mfile.write("p = p\';\n") #need transpose for matlab
mfile.write('e = [ ... \n')
for ebN in range(mesh['nElementBoundaries_global']):
e[0,ebN]=mesh['elementBoundaryNodesArray'][ebN,0] + matlabBase #global node number of start node base 1
e[1,ebN]=mesh['elementBoundaryNodesArray'][ebN,1] + matlabBase #global node number of end node base 1
e[2,ebN]=0.0 #edge param. is 0 to 1
e[3,ebN]=1.0
e[4,ebN]=ebN + matlabBase #global edge number base 1
e[5,ebN]=0 #subdomain to left
e[6,ebN]=1 #subdomain to right
mfile.write('%g %g %g %g %g %g %g \n' % tuple(e[:,ebN]))
mfile.write(']; \n')
mfile.write("e = e\';\n") #need transpose for matlab
#write triangles last
mfile.write('t = [ ... \n')
for eN in range(mesh['nElements_global']):
t[0,eN]=mesh['elementNodesArray'][eN,0]+matlabBase #global node number for vertex 0
t[1,eN]=mesh['elementNodesArray'][eN,1]+matlabBase #global node number for vertex 0
t[2,eN]=mesh['elementNodesArray'][eN,2]+matlabBase #global node number for vertex 0
t[3,eN]=1 #subdomain id
mfile.write('%g %g %g %g \n' % tuple(t[:,eN]))
mfile.write(']; \n');
mfile.write("t = t\';\n") #need transpose for matlab
mfile.close()
return p,e,t
########################################################################
if __name__ == '__main__':
import os,shelve
import ppmatlab,numpy.oldnumeric as numpy
os.listdir('./results')
filename = './results/re_forsyth2_ss_2d_pre_forsyth2_ss_2d_c0p1_n_mesh_results.dat'
res = shelve.open(filename)
mesh = res['mesh']
mmfile = 'forsyth2MeshMatlab'
p,e,t = ppmatlab.writeMeshMatlabFormat(mesh,mmfile)
|
erdc/proteus
|
scripts/ppmatlab.py
|
Python
|
mit
| 3,523 | 0.023843 |
import sys
import vtk
from vtk.test import Testing
class TestGhostPoints(Testing.vtkTest):
def testLinear(self):
pts = vtk.vtkPoints()
pts.SetNumberOfPoints(4)
pts.InsertPoint(0, (0, 0, 0))
pts.InsertPoint(1, (1, 0, 0))
pts.InsertPoint(2, (0.5, 1, 0))
pts.InsertPoint(3, (0.5, 0.5, 1))
te = vtk.vtkTetra()
ptIds = te.GetPointIds()
for i in range(4):
ptIds.SetId(i, i)
ghosts = vtk.vtkUnsignedCharArray()
ghosts.SetName(vtk.vtkDataSetAttributes.GhostArrayName())
ghosts.SetNumberOfTuples(4)
ghosts.SetValue(0, 1)
ghosts.SetValue(1, 1)
ghosts.SetValue(2, 1)
ghosts.SetValue(3, 0)
grid = vtk.vtkUnstructuredGrid()
grid.Allocate(1, 1)
grid.InsertNextCell(te.GetCellType(), te.GetPointIds())
grid.SetPoints(pts)
grid.GetPointData().AddArray(ghosts)
dss = vtk.vtkDataSetSurfaceFilter()
dss.SetInputData(grid)
dss.Update()
self.assertEqual(dss.GetOutput().GetNumberOfCells(), 3)
def testNonLinear(self):
pts = vtk.vtkPoints()
pts.SetNumberOfPoints(10)
pts.InsertPoint(0, (0, 0, 0))
pts.InsertPoint(1, (1, 0, 0))
pts.InsertPoint(2, (0.5, 1, 0))
pts.InsertPoint(3, (0.5, 0.5, 1))
pts.InsertPoint(4, (0.5, 0, 0))
pts.InsertPoint(5, (1.25, 0.5, 0))
pts.InsertPoint(6, (0.25, 0.5, 0))
pts.InsertPoint(7, (0.25, 0.25, 0.5))
pts.InsertPoint(8, (0.75, 0.25, 0.5))
pts.InsertPoint(9, (0.5, 0.75, 0.5))
te = vtk.vtkQuadraticTetra()
ptIds = te.GetPointIds()
for i in range(10):
ptIds.SetId(i, i)
ghosts = vtk.vtkUnsignedCharArray()
ghosts.SetName(vtk.vtkDataSetAttributes.GhostArrayName())
ghosts.SetNumberOfTuples(10)
ghosts.SetValue(0, 1)
ghosts.SetValue(1, 1)
ghosts.SetValue(2, 1)
ghosts.SetValue(3, 0)
ghosts.SetValue(4, 1)
ghosts.SetValue(5, 1)
ghosts.SetValue(6, 1)
ghosts.SetValue(7, 0)
ghosts.SetValue(8, 0)
grid = vtk.vtkUnstructuredGrid()
grid.Allocate(1, 1)
grid.InsertNextCell(te.GetCellType(), te.GetPointIds())
grid.SetPoints(pts)
grid.GetPointData().AddArray(ghosts)
ugg = vtk.vtkUnstructuredGridGeometryFilter()
ugg.SetInputData(grid)
dss = vtk.vtkDataSetSurfaceFilter()
dss.SetNonlinearSubdivisionLevel(2)
dss.SetInputConnection(ugg.GetOutputPort())
dss.Update()
self.assertEqual(dss.GetOutput().GetNumberOfCells(), 48)
if __name__ == "__main__":
Testing.main([(TestGhostPoints, 'test')])
|
hlzz/dotfiles
|
graphics/VTK-7.0.0/Filters/Geometry/Testing/Python/TestGhostPoints.py
|
Python
|
bsd-3-clause
| 2,835 | 0.000705 |
# encoding: utf-8
# module gi._gobject._gobject
# from /usr/lib/python2.7/dist-packages/gi/_gobject/_gobject.so
# by generator 1.130
# no doc
# no imports
# Variables with simple values
G_MAXDOUBLE = 1.7976931348623157e+308
G_MAXFLOAT = 3.4028234663852886e+38
G_MAXINT = 2147483647
G_MAXINT16 = 32767
G_MAXINT32 = 2147483647
G_MAXINT64 = 9223372036854775807L
G_MAXINT8 = 127
G_MAXLONG = 2147483647L
G_MAXOFFSET = 9223372036854775807L
G_MAXSHORT = 32767
G_MAXSIZE = 4294967295L
G_MAXSSIZE = 2147483647L
G_MAXUINT = 4294967295L
G_MAXUINT16 = 65535
G_MAXUINT32 = 4294967295L
G_MAXUINT64 = 18446744073709551615L
G_MAXUINT8 = 255
G_MAXULONG = 4294967295L
G_MAXUSHORT = 65535
G_MINDOUBLE = 2.2250738585072014e-308
G_MINFLOAT = 1.1754943508222875e-38
G_MININT = -2147483648
G_MININT16 = -32768
G_MININT32 = -2147483648
G_MININT64 = -9223372036854775808L
G_MININT8 = -128
G_MINLONG = -2147483648L
G_MINOFFSET = -9223372036854775808L
G_MINSHORT = -32768
PARAM_CONSTRUCT = 4
PARAM_CONSTRUCT_ONLY = 8
PARAM_LAX_VALIDATION = 16
PARAM_READABLE = 1
PARAM_READWRITE = 3
PARAM_WRITABLE = 2
SIGNAL_ACTION = 32
SIGNAL_DETAILED = 16
SIGNAL_NO_HOOKS = 64
SIGNAL_NO_RECURSE = 8
SIGNAL_RUN_CLEANUP = 4
SIGNAL_RUN_FIRST = 1
SIGNAL_RUN_LAST = 2
# functions
def add_emission_hook(*args, **kwargs): # real signature unknown
pass
def list_properties(*args, **kwargs): # real signature unknown
pass
def new(*args, **kwargs): # real signature unknown
pass
def remove_emission_hook(*args, **kwargs): # real signature unknown
pass
def signal_accumulator_true_handled(*args, **kwargs): # real signature unknown
pass
def signal_list_ids(*args, **kwargs): # real signature unknown
pass
def signal_list_names(*args, **kwargs): # real signature unknown
pass
def signal_lookup(*args, **kwargs): # real signature unknown
pass
def signal_name(*args, **kwargs): # real signature unknown
pass
def signal_new(*args, **kwargs): # real signature unknown
pass
def signal_query(*args, **kwargs): # real signature unknown
pass
def threads_init(*args, **kwargs): # real signature unknown
pass
def type_children(*args, **kwargs): # real signature unknown
pass
def type_from_name(*args, **kwargs): # real signature unknown
pass
def type_interfaces(*args, **kwargs): # real signature unknown
pass
def type_is_a(*args, **kwargs): # real signature unknown
pass
def type_name(*args, **kwargs): # real signature unknown
pass
def type_parent(*args, **kwargs): # real signature unknown
pass
def type_register(*args, **kwargs): # real signature unknown
pass
def _install_metaclass(*args, **kwargs): # real signature unknown
pass
# classes
class GBoxed(object):
# no doc
def copy(self, *args, **kwargs): # real signature unknown
pass
def __eq__(self, y): # real signature unknown; restored from __doc__
""" x.__eq__(y) <==> x==y """
pass
def __ge__(self, y): # real signature unknown; restored from __doc__
""" x.__ge__(y) <==> x>=y """
pass
def __gt__(self, y): # real signature unknown; restored from __doc__
""" x.__gt__(y) <==> x>y """
pass
def __hash__(self): # real signature unknown; restored from __doc__
""" x.__hash__() <==> hash(x) """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __le__(self, y): # real signature unknown; restored from __doc__
""" x.__le__(y) <==> x<=y """
pass
def __lt__(self, y): # real signature unknown; restored from __doc__
""" x.__lt__(y) <==> x<y """
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
def __ne__(self, y): # real signature unknown; restored from __doc__
""" x.__ne__(y) <==> x!=y """
pass
def __repr__(self): # real signature unknown; restored from __doc__
""" x.__repr__() <==> repr(x) """
pass
__gtype__ = None # (!) real value is ''
class GEnum(int):
# no doc
def __eq__(self, y): # real signature unknown; restored from __doc__
""" x.__eq__(y) <==> x==y """
pass
def __ge__(self, y): # real signature unknown; restored from __doc__
""" x.__ge__(y) <==> x>=y """
pass
def __gt__(self, y): # real signature unknown; restored from __doc__
""" x.__gt__(y) <==> x>y """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __le__(self, y): # real signature unknown; restored from __doc__
""" x.__le__(y) <==> x<=y """
pass
def __lt__(self, y): # real signature unknown; restored from __doc__
""" x.__lt__(y) <==> x<y """
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
def __ne__(self, y): # real signature unknown; restored from __doc__
""" x.__ne__(y) <==> x!=y """
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
pass
def __repr__(self): # real signature unknown; restored from __doc__
""" x.__repr__() <==> repr(x) """
pass
def __str__(self): # real signature unknown; restored from __doc__
""" x.__str__() <==> str(x) """
pass
value_name = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
value_nick = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__gtype__ = None # (!) real value is ''
class GFlags(int):
# no doc
def __add__(self, y): # real signature unknown; restored from __doc__
""" x.__add__(y) <==> x+y """
pass
def __and__(self, y): # real signature unknown; restored from __doc__
""" x.__and__(y) <==> x&y """
pass
def __divmod__(self, y): # real signature unknown; restored from __doc__
""" x.__divmod__(y) <==> divmod(x, y) """
pass
def __div__(self, y): # real signature unknown; restored from __doc__
""" x.__div__(y) <==> x/y """
pass
def __eq__(self, y): # real signature unknown; restored from __doc__
""" x.__eq__(y) <==> x==y """
pass
def __ge__(self, y): # real signature unknown; restored from __doc__
""" x.__ge__(y) <==> x>=y """
pass
def __gt__(self, y): # real signature unknown; restored from __doc__
""" x.__gt__(y) <==> x>y """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __le__(self, y): # real signature unknown; restored from __doc__
""" x.__le__(y) <==> x<=y """
pass
def __lt__(self, y): # real signature unknown; restored from __doc__
""" x.__lt__(y) <==> x<y """
pass
def __mod__(self, y): # real signature unknown; restored from __doc__
""" x.__mod__(y) <==> x%y """
pass
def __mul__(self, y): # real signature unknown; restored from __doc__
""" x.__mul__(y) <==> x*y """
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
def __ne__(self, y): # real signature unknown; restored from __doc__
""" x.__ne__(y) <==> x!=y """
pass
def __or__(self, y): # real signature unknown; restored from __doc__
""" x.__or__(y) <==> x|y """
pass
def __pow__(self, y, z=None): # real signature unknown; restored from __doc__
""" x.__pow__(y[, z]) <==> pow(x, y[, z]) """
pass
def __radd__(self, y): # real signature unknown; restored from __doc__
""" x.__radd__(y) <==> y+x """
pass
def __rand__(self, y): # real signature unknown; restored from __doc__
""" x.__rand__(y) <==> y&x """
pass
def __rdivmod__(self, y): # real signature unknown; restored from __doc__
""" x.__rdivmod__(y) <==> divmod(y, x) """
pass
def __rdiv__(self, y): # real signature unknown; restored from __doc__
""" x.__rdiv__(y) <==> y/x """
pass
def __repr__(self): # real signature unknown; restored from __doc__
""" x.__repr__() <==> repr(x) """
pass
def __rmod__(self, y): # real signature unknown; restored from __doc__
""" x.__rmod__(y) <==> y%x """
pass
def __rmul__(self, y): # real signature unknown; restored from __doc__
""" x.__rmul__(y) <==> y*x """
pass
def __ror__(self, y): # real signature unknown; restored from __doc__
""" x.__ror__(y) <==> y|x """
pass
def __rpow__(self, x, z=None): # real signature unknown; restored from __doc__
""" y.__rpow__(x[, z]) <==> pow(x, y[, z]) """
pass
def __rsub__(self, y): # real signature unknown; restored from __doc__
""" x.__rsub__(y) <==> y-x """
pass
def __rxor__(self, y): # real signature unknown; restored from __doc__
""" x.__rxor__(y) <==> y^x """
pass
def __str__(self): # real signature unknown; restored from __doc__
""" x.__str__() <==> str(x) """
pass
def __sub__(self, y): # real signature unknown; restored from __doc__
""" x.__sub__(y) <==> x-y """
pass
def __xor__(self, y): # real signature unknown; restored from __doc__
""" x.__xor__(y) <==> x^y """
pass
first_value_name = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
first_value_nick = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
value_names = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
value_nicks = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__gtype__ = None # (!) real value is ''
class GInterface(object):
""" Interface GInterface """
def __init__(self, *args, **kwargs): # real signature unknown
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
__gdoc__ = 'Interface GInterface\n\n'
__gtype__ = None # (!) real value is ''
class GObject(object):
"""
Object GObject
Signals from GObject:
notify (GParam)
"""
def chain(self, *args, **kwargs): # real signature unknown
pass
def connect(self, *args, **kwargs): # real signature unknown
pass
def connect_after(self, *args, **kwargs): # real signature unknown
pass
def connect_object(self, *args, **kwargs): # real signature unknown
pass
def connect_object_after(self, *args, **kwargs): # real signature unknown
pass
def disconnect(self, *args, **kwargs): # real signature unknown
pass
def disconnect_by_func(self, *args, **kwargs): # real signature unknown
pass
def emit(self, *args, **kwargs): # real signature unknown
pass
def emit_stop_by_name(self, *args, **kwargs): # real signature unknown
pass
def freeze_notify(self, *args, **kwargs): # real signature unknown
pass
def get_data(self, *args, **kwargs): # real signature unknown
pass
def get_properties(self, *args, **kwargs): # real signature unknown
pass
def get_property(self, *args, **kwargs): # real signature unknown
pass
def handler_block(self, *args, **kwargs): # real signature unknown
pass
def handler_block_by_func(self, *args, **kwargs): # real signature unknown
pass
def handler_disconnect(self, *args, **kwargs): # real signature unknown
pass
def handler_is_connected(self, *args, **kwargs): # real signature unknown
pass
def handler_unblock(self, *args, **kwargs): # real signature unknown
pass
def handler_unblock_by_func(self, *args, **kwargs): # real signature unknown
pass
def notify(self, *args, **kwargs): # real signature unknown
pass
def set_data(self, *args, **kwargs): # real signature unknown
pass
def set_properties(self, *args, **kwargs): # real signature unknown
pass
def set_property(self, *args, **kwargs): # real signature unknown
pass
def stop_emission(self, *args, **kwargs): # real signature unknown
pass
def thaw_notify(self, *args, **kwargs): # real signature unknown
pass
def weak_ref(self, *args, **kwargs): # real signature unknown
pass
def __copy__(self, *args, **kwargs): # real signature unknown
pass
def __deepcopy__(self, *args, **kwargs): # real signature unknown
pass
def __delattr__(self, name): # real signature unknown; restored from __doc__
""" x.__delattr__('name') <==> del x.name """
pass
def __eq__(self, y): # real signature unknown; restored from __doc__
""" x.__eq__(y) <==> x==y """
pass
def __ge__(self, y): # real signature unknown; restored from __doc__
""" x.__ge__(y) <==> x>=y """
pass
def __gt__(self, y): # real signature unknown; restored from __doc__
""" x.__gt__(y) <==> x>y """
pass
def __hash__(self): # real signature unknown; restored from __doc__
""" x.__hash__() <==> hash(x) """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __le__(self, y): # real signature unknown; restored from __doc__
""" x.__le__(y) <==> x<=y """
pass
def __lt__(self, y): # real signature unknown; restored from __doc__
""" x.__lt__(y) <==> x<y """
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
def __ne__(self, y): # real signature unknown; restored from __doc__
""" x.__ne__(y) <==> x!=y """
pass
def __repr__(self): # real signature unknown; restored from __doc__
""" x.__repr__() <==> repr(x) """
pass
def __setattr__(self, name, value): # real signature unknown; restored from __doc__
""" x.__setattr__('name', value) <==> x.name = value """
pass
__grefcount__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
props = None # (!) real value is ''
__dict__ = None # (!) real value is ''
__gdoc__ = 'Object GObject\n\nSignals from GObject:\n notify (GParam)\n\n'
__gtype__ = None # (!) real value is ''
class GObjectWeakRef(object):
""" A GObject weak reference """
def unref(self, *args, **kwargs): # real signature unknown
pass
def __call__(self, *more): # real signature unknown; restored from __doc__
""" x.__call__(...) <==> x(...) """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
class GParamSpec(object):
# no doc
def __eq__(self, y): # real signature unknown; restored from __doc__
""" x.__eq__(y) <==> x==y """
pass
def __ge__(self, y): # real signature unknown; restored from __doc__
""" x.__ge__(y) <==> x>=y """
pass
def __gt__(self, y): # real signature unknown; restored from __doc__
""" x.__gt__(y) <==> x>y """
pass
def __hash__(self): # real signature unknown; restored from __doc__
""" x.__hash__() <==> hash(x) """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __le__(self, y): # real signature unknown; restored from __doc__
""" x.__le__(y) <==> x<=y """
pass
def __lt__(self, y): # real signature unknown; restored from __doc__
""" x.__lt__(y) <==> x<y """
pass
def __ne__(self, y): # real signature unknown; restored from __doc__
""" x.__ne__(y) <==> x!=y """
pass
def __repr__(self): # real signature unknown; restored from __doc__
""" x.__repr__() <==> repr(x) """
pass
class GPointer(object):
# no doc
def __eq__(self, y): # real signature unknown; restored from __doc__
""" x.__eq__(y) <==> x==y """
pass
def __ge__(self, y): # real signature unknown; restored from __doc__
""" x.__ge__(y) <==> x>=y """
pass
def __gt__(self, y): # real signature unknown; restored from __doc__
""" x.__gt__(y) <==> x>y """
pass
def __hash__(self): # real signature unknown; restored from __doc__
""" x.__hash__() <==> hash(x) """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __le__(self, y): # real signature unknown; restored from __doc__
""" x.__le__(y) <==> x<=y """
pass
def __lt__(self, y): # real signature unknown; restored from __doc__
""" x.__lt__(y) <==> x<y """
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
def __ne__(self, y): # real signature unknown; restored from __doc__
""" x.__ne__(y) <==> x!=y """
pass
def __repr__(self): # real signature unknown; restored from __doc__
""" x.__repr__() <==> repr(x) """
pass
__gtype__ = None # (!) real value is ''
class GType(object):
# no doc
def from_name(self, *args, **kwargs): # real signature unknown
pass
def has_value_table(self, *args, **kwargs): # real signature unknown
pass
def is_a(self, *args, **kwargs): # real signature unknown
pass
def is_abstract(self, *args, **kwargs): # real signature unknown
pass
def is_classed(self, *args, **kwargs): # real signature unknown
pass
def is_deep_derivable(self, *args, **kwargs): # real signature unknown
pass
def is_derivable(self, *args, **kwargs): # real signature unknown
pass
def is_instantiatable(self, *args, **kwargs): # real signature unknown
pass
def is_interface(self, *args, **kwargs): # real signature unknown
pass
def is_value_abstract(self, *args, **kwargs): # real signature unknown
pass
def is_value_type(self, *args, **kwargs): # real signature unknown
pass
def __eq__(self, y): # real signature unknown; restored from __doc__
""" x.__eq__(y) <==> x==y """
pass
def __ge__(self, y): # real signature unknown; restored from __doc__
""" x.__ge__(y) <==> x>=y """
pass
def __gt__(self, y): # real signature unknown; restored from __doc__
""" x.__gt__(y) <==> x>y """
pass
def __hash__(self): # real signature unknown; restored from __doc__
""" x.__hash__() <==> hash(x) """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __le__(self, y): # real signature unknown; restored from __doc__
""" x.__le__(y) <==> x<=y """
pass
def __lt__(self, y): # real signature unknown; restored from __doc__
""" x.__lt__(y) <==> x<y """
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
def __ne__(self, y): # real signature unknown; restored from __doc__
""" x.__ne__(y) <==> x!=y """
pass
def __repr__(self): # real signature unknown; restored from __doc__
""" x.__repr__() <==> repr(x) """
pass
children = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
depth = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
fundamental = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
interfaces = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
name = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
parent = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
pytype = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
class Warning(Warning):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
# variables with complex values
features = {
'generic-c-marshaller': True,
}
pygobject_version = (
3,
2,
0,
)
TYPE_GSTRING = None # (!) real value is ''
TYPE_INVALID = None # (!) real value is ''
_PyGObject_API = None # (!) real value is ''
|
akiokio/centralfitestoque
|
src/.pycharm_helpers/python_stubs/-1807332816/gi/_gobject/_gobject.py
|
Python
|
bsd-2-clause
| 21,406 | 0.010511 |
"""Gaussian processes regression."""
# Authors: Jan Hendrik Metzen <jhm@informatik.uni-bremen.de>
# Modified by: Pete Green <p.l.green@liverpool.ac.uk>
# License: BSD 3 clause
import warnings
from operator import itemgetter
import numpy as np
from scipy.linalg import cholesky, cho_solve
import scipy.optimize
from ..base import BaseEstimator, RegressorMixin, clone
from ..base import MultiOutputMixin
from .kernels import RBF, ConstantKernel as C
from ..preprocessing._data import _handle_zeros_in_scale
from ..utils import check_random_state
from ..utils.optimize import _check_optimize_result
class GaussianProcessRegressor(MultiOutputMixin,
RegressorMixin, BaseEstimator):
"""Gaussian process regression (GPR).
The implementation is based on Algorithm 2.1 of Gaussian Processes
for Machine Learning (GPML) by Rasmussen and Williams.
In addition to standard scikit-learn estimator API,
GaussianProcessRegressor:
* allows prediction without prior fitting (based on the GP prior)
* provides an additional method `sample_y(X)`, which evaluates samples
drawn from the GPR (prior or posterior) at given inputs
* exposes a method `log_marginal_likelihood(theta)`, which can be used
externally for other ways of selecting hyperparameters, e.g., via
Markov chain Monte Carlo.
Read more in the :ref:`User Guide <gaussian_process>`.
.. versionadded:: 0.18
Parameters
----------
kernel : kernel instance, default=None
The kernel specifying the covariance function of the GP. If None is
passed, the kernel ``ConstantKernel(1.0, constant_value_bounds="fixed"
* RBF(1.0, length_scale_bounds="fixed")`` is used as default. Note that
the kernel hyperparameters are optimized during fitting unless the
bounds are marked as "fixed".
alpha : float or ndarray of shape (n_samples,), default=1e-10
Value added to the diagonal of the kernel matrix during fitting.
This can prevent a potential numerical issue during fitting, by
ensuring that the calculated values form a positive definite matrix.
It can also be interpreted as the variance of additional Gaussian
measurement noise on the training observations. Note that this is
different from using a `WhiteKernel`. If an array is passed, it must
have the same number of entries as the data used for fitting and is
used as datapoint-dependent noise level. Allowing to specify the
noise level directly as a parameter is mainly for convenience and
for consistency with Ridge.
optimizer : "fmin_l_bfgs_b" or callable, default="fmin_l_bfgs_b"
Can either be one of the internally supported optimizers for optimizing
the kernel's parameters, specified by a string, or an externally
defined optimizer passed as a callable. If a callable is passed, it
must have the signature::
def optimizer(obj_func, initial_theta, bounds):
# * 'obj_func': the objective function to be minimized, which
# takes the hyperparameters theta as a parameter and an
# optional flag eval_gradient, which determines if the
# gradient is returned additionally to the function value
# * 'initial_theta': the initial value for theta, which can be
# used by local optimizers
# * 'bounds': the bounds on the values of theta
....
# Returned are the best found hyperparameters theta and
# the corresponding value of the target function.
return theta_opt, func_min
Per default, the 'L-BFGS-B' algorithm from scipy.optimize.minimize
is used. If None is passed, the kernel's parameters are kept fixed.
Available internal optimizers are::
'fmin_l_bfgs_b'
n_restarts_optimizer : int, default=0
The number of restarts of the optimizer for finding the kernel's
parameters which maximize the log-marginal likelihood. The first run
of the optimizer is performed from the kernel's initial parameters,
the remaining ones (if any) from thetas sampled log-uniform randomly
from the space of allowed theta-values. If greater than 0, all bounds
must be finite. Note that n_restarts_optimizer == 0 implies that one
run is performed.
normalize_y : bool, default=False
Whether the target values y are normalized, the mean and variance of
the target values are set equal to 0 and 1 respectively. This is
recommended for cases where zero-mean, unit-variance priors are used.
Note that, in this implementation, the normalisation is reversed
before the GP predictions are reported.
.. versionchanged:: 0.23
copy_X_train : bool, default=True
If True, a persistent copy of the training data is stored in the
object. Otherwise, just a reference to the training data is stored,
which might cause predictions to change if the data is modified
externally.
random_state : int, RandomState instance or None, default=None
Determines random number generation used to initialize the centers.
Pass an int for reproducible results across multiple function calls.
See :term:`Glossary <random_state>`.
Attributes
----------
X_train_ : array-like of shape (n_samples, n_features) or list of object
Feature vectors or other representations of training data (also
required for prediction).
y_train_ : array-like of shape (n_samples,) or (n_samples, n_targets)
Target values in training data (also required for prediction)
kernel_ : kernel instance
The kernel used for prediction. The structure of the kernel is the
same as the one passed as parameter but with optimized hyperparameters
L_ : array-like of shape (n_samples, n_samples)
Lower-triangular Cholesky decomposition of the kernel in ``X_train_``
alpha_ : array-like of shape (n_samples,)
Dual coefficients of training data points in kernel space
log_marginal_likelihood_value_ : float
The log-marginal-likelihood of ``self.kernel_.theta``
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
Examples
--------
>>> from sklearn.datasets import make_friedman2
>>> from sklearn.gaussian_process import GaussianProcessRegressor
>>> from sklearn.gaussian_process.kernels import DotProduct, WhiteKernel
>>> X, y = make_friedman2(n_samples=500, noise=0, random_state=0)
>>> kernel = DotProduct() + WhiteKernel()
>>> gpr = GaussianProcessRegressor(kernel=kernel,
... random_state=0).fit(X, y)
>>> gpr.score(X, y)
0.3680...
>>> gpr.predict(X[:2,:], return_std=True)
(array([653.0..., 592.1...]), array([316.6..., 316.6...]))
"""
def __init__(self, kernel=None, *, alpha=1e-10,
optimizer="fmin_l_bfgs_b", n_restarts_optimizer=0,
normalize_y=False, copy_X_train=True, random_state=None):
self.kernel = kernel
self.alpha = alpha
self.optimizer = optimizer
self.n_restarts_optimizer = n_restarts_optimizer
self.normalize_y = normalize_y
self.copy_X_train = copy_X_train
self.random_state = random_state
def fit(self, X, y):
"""Fit Gaussian process regression model.
Parameters
----------
X : array-like of shape (n_samples, n_features) or list of object
Feature vectors or other representations of training data.
y : array-like of shape (n_samples,) or (n_samples, n_targets)
Target values
Returns
-------
self : returns an instance of self.
"""
if self.kernel is None: # Use an RBF kernel as default
self.kernel_ = C(1.0, constant_value_bounds="fixed") \
* RBF(1.0, length_scale_bounds="fixed")
else:
self.kernel_ = clone(self.kernel)
self._rng = check_random_state(self.random_state)
if self.kernel_.requires_vector_input:
X, y = self._validate_data(X, y, multi_output=True, y_numeric=True,
ensure_2d=True, dtype="numeric")
else:
X, y = self._validate_data(X, y, multi_output=True, y_numeric=True,
ensure_2d=False, dtype=None)
# Normalize target value
if self.normalize_y:
self._y_train_mean = np.mean(y, axis=0)
self._y_train_std = _handle_zeros_in_scale(
np.std(y, axis=0), copy=False
)
# Remove mean and make unit variance
y = (y - self._y_train_mean) / self._y_train_std
else:
self._y_train_mean = np.zeros(1)
self._y_train_std = 1
if np.iterable(self.alpha) \
and self.alpha.shape[0] != y.shape[0]:
if self.alpha.shape[0] == 1:
self.alpha = self.alpha[0]
else:
raise ValueError("alpha must be a scalar or an array "
"with same number of entries as y. (%d != %d)"
% (self.alpha.shape[0], y.shape[0]))
self.X_train_ = np.copy(X) if self.copy_X_train else X
self.y_train_ = np.copy(y) if self.copy_X_train else y
if self.optimizer is not None and self.kernel_.n_dims > 0:
# Choose hyperparameters based on maximizing the log-marginal
# likelihood (potentially starting from several initial values)
def obj_func(theta, eval_gradient=True):
if eval_gradient:
lml, grad = self.log_marginal_likelihood(
theta, eval_gradient=True, clone_kernel=False)
return -lml, -grad
else:
return -self.log_marginal_likelihood(theta,
clone_kernel=False)
# First optimize starting from theta specified in kernel
optima = [(self._constrained_optimization(obj_func,
self.kernel_.theta,
self.kernel_.bounds))]
# Additional runs are performed from log-uniform chosen initial
# theta
if self.n_restarts_optimizer > 0:
if not np.isfinite(self.kernel_.bounds).all():
raise ValueError(
"Multiple optimizer restarts (n_restarts_optimizer>0) "
"requires that all bounds are finite.")
bounds = self.kernel_.bounds
for iteration in range(self.n_restarts_optimizer):
theta_initial = \
self._rng.uniform(bounds[:, 0], bounds[:, 1])
optima.append(
self._constrained_optimization(obj_func, theta_initial,
bounds))
# Select result from run with minimal (negative) log-marginal
# likelihood
lml_values = list(map(itemgetter(1), optima))
self.kernel_.theta = optima[np.argmin(lml_values)][0]
self.kernel_._check_bounds_params()
self.log_marginal_likelihood_value_ = -np.min(lml_values)
else:
self.log_marginal_likelihood_value_ = \
self.log_marginal_likelihood(self.kernel_.theta,
clone_kernel=False)
# Precompute quantities required for predictions which are independent
# of actual query points
K = self.kernel_(self.X_train_)
K[np.diag_indices_from(K)] += self.alpha
try:
self.L_ = cholesky(K, lower=True) # Line 2
except np.linalg.LinAlgError as exc:
exc.args = ("The kernel, %s, is not returning a "
"positive definite matrix. Try gradually "
"increasing the 'alpha' parameter of your "
"GaussianProcessRegressor estimator."
% self.kernel_,) + exc.args
raise
self.alpha_ = cho_solve((self.L_, True), self.y_train_) # Line 3
return self
def predict(self, X, return_std=False, return_cov=False):
"""Predict using the Gaussian process regression model
We can also predict based on an unfitted model by using the GP prior.
In addition to the mean of the predictive distribution, optionally also
returns its standard deviation (`return_std=True`) or covariance
(`return_cov=True`). Note that at most one of the two can be requested.
Parameters
----------
X : array-like of shape (n_samples, n_features) or list of object
Query points where the GP is evaluated.
return_std : bool, default=False
If True, the standard-deviation of the predictive distribution at
the query points is returned along with the mean.
return_cov : bool, default=False
If True, the covariance of the joint predictive distribution at
the query points is returned along with the mean.
Returns
-------
y_mean : ndarray of shape (n_samples,) or (n_samples, n_targets)
Mean of predictive distribution a query points.
y_std : ndarray of shape (n_samples,), optional
Standard deviation of predictive distribution at query points.
Only returned when `return_std` is True.
y_cov : ndarray of shape (n_samples, n_samples), optional
Covariance of joint predictive distribution a query points.
Only returned when `return_cov` is True.
"""
if return_std and return_cov:
raise RuntimeError(
"At most one of return_std or return_cov can be requested.")
if self.kernel is None or self.kernel.requires_vector_input:
X = self._validate_data(X, ensure_2d=True, dtype="numeric",
reset=False)
else:
X = self._validate_data(X, ensure_2d=False, dtype=None,
reset=False)
if not hasattr(self, "X_train_"): # Unfitted;predict based on GP prior
if self.kernel is None:
kernel = (C(1.0, constant_value_bounds="fixed") *
RBF(1.0, length_scale_bounds="fixed"))
else:
kernel = self.kernel
y_mean = np.zeros(X.shape[0])
if return_cov:
y_cov = kernel(X)
return y_mean, y_cov
elif return_std:
y_var = kernel.diag(X)
return y_mean, np.sqrt(y_var)
else:
return y_mean
else: # Predict based on GP posterior
K_trans = self.kernel_(X, self.X_train_)
y_mean = K_trans.dot(self.alpha_) # Line 4 (y_mean = f_star)
# undo normalisation
y_mean = self._y_train_std * y_mean + self._y_train_mean
if return_cov:
# Solve K @ V = K_trans.T
V = cho_solve((self.L_, True), K_trans.T) # Line 5
y_cov = self.kernel_(X) - K_trans.dot(V) # Line 6
# undo normalisation
y_cov = y_cov * self._y_train_std**2
return y_mean, y_cov
elif return_std:
# Solve K @ V = K_trans.T
V = cho_solve((self.L_, True), K_trans.T) # Line 5
# Compute variance of predictive distribution
# Use einsum to avoid explicitly forming the large matrix
# K_trans @ V just to extract its diagonal afterward.
y_var = self.kernel_.diag(X)
y_var -= np.einsum("ij,ji->i", K_trans, V)
# Check if any of the variances is negative because of
# numerical issues. If yes: set the variance to 0.
y_var_negative = y_var < 0
if np.any(y_var_negative):
warnings.warn("Predicted variances smaller than 0. "
"Setting those variances to 0.")
y_var[y_var_negative] = 0.0
# undo normalisation
y_var = y_var * self._y_train_std**2
return y_mean, np.sqrt(y_var)
else:
return y_mean
def sample_y(self, X, n_samples=1, random_state=0):
"""Draw samples from Gaussian process and evaluate at X.
Parameters
----------
X : array-like of shape (n_samples_X, n_features) or list of object
Query points where the GP is evaluated.
n_samples : int, default=1
Number of samples drawn from the Gaussian process per query point
random_state : int, RandomState instance or None, default=0
Determines random number generation to randomly draw samples.
Pass an int for reproducible results across multiple function
calls.
See :term:`Glossary <random_state>`.
Returns
-------
y_samples : ndarray of shape (n_samples_X, n_samples), or \
(n_samples_X, n_targets, n_samples)
Values of n_samples samples drawn from Gaussian process and
evaluated at query points.
"""
rng = check_random_state(random_state)
y_mean, y_cov = self.predict(X, return_cov=True)
if y_mean.ndim == 1:
y_samples = rng.multivariate_normal(y_mean, y_cov, n_samples).T
else:
y_samples = \
[rng.multivariate_normal(y_mean[:, i], y_cov,
n_samples).T[:, np.newaxis]
for i in range(y_mean.shape[1])]
y_samples = np.hstack(y_samples)
return y_samples
def log_marginal_likelihood(self, theta=None, eval_gradient=False,
clone_kernel=True):
"""Returns log-marginal likelihood of theta for training data.
Parameters
----------
theta : array-like of shape (n_kernel_params,) default=None
Kernel hyperparameters for which the log-marginal likelihood is
evaluated. If None, the precomputed log_marginal_likelihood
of ``self.kernel_.theta`` is returned.
eval_gradient : bool, default=False
If True, the gradient of the log-marginal likelihood with respect
to the kernel hyperparameters at position theta is returned
additionally. If True, theta must not be None.
clone_kernel : bool, default=True
If True, the kernel attribute is copied. If False, the kernel
attribute is modified, but may result in a performance improvement.
Returns
-------
log_likelihood : float
Log-marginal likelihood of theta for training data.
log_likelihood_gradient : ndarray of shape (n_kernel_params,), optional
Gradient of the log-marginal likelihood with respect to the kernel
hyperparameters at position theta.
Only returned when eval_gradient is True.
"""
if theta is None:
if eval_gradient:
raise ValueError(
"Gradient can only be evaluated for theta!=None")
return self.log_marginal_likelihood_value_
if clone_kernel:
kernel = self.kernel_.clone_with_theta(theta)
else:
kernel = self.kernel_
kernel.theta = theta
if eval_gradient:
K, K_gradient = kernel(self.X_train_, eval_gradient=True)
else:
K = kernel(self.X_train_)
K[np.diag_indices_from(K)] += self.alpha
try:
L = cholesky(K, lower=True) # Line 2
except np.linalg.LinAlgError:
return (-np.inf, np.zeros_like(theta)) \
if eval_gradient else -np.inf
# Support multi-dimensional output of self.y_train_
y_train = self.y_train_
if y_train.ndim == 1:
y_train = y_train[:, np.newaxis]
alpha = cho_solve((L, True), y_train) # Line 3
# Compute log-likelihood (compare line 7)
log_likelihood_dims = -0.5 * np.einsum("ik,ik->k", y_train, alpha)
log_likelihood_dims -= np.log(np.diag(L)).sum()
log_likelihood_dims -= K.shape[0] / 2 * np.log(2 * np.pi)
log_likelihood = log_likelihood_dims.sum(-1) # sum over dimensions
if eval_gradient: # compare Equation 5.9 from GPML
tmp = np.einsum("ik,jk->ijk", alpha, alpha) # k: output-dimension
tmp -= cho_solve((L, True), np.eye(K.shape[0]))[:, :, np.newaxis]
# Compute "0.5 * trace(tmp.dot(K_gradient))" without
# constructing the full matrix tmp.dot(K_gradient) since only
# its diagonal is required
log_likelihood_gradient_dims = \
0.5 * np.einsum("ijl,jik->kl", tmp, K_gradient)
log_likelihood_gradient = log_likelihood_gradient_dims.sum(-1)
if eval_gradient:
return log_likelihood, log_likelihood_gradient
else:
return log_likelihood
def _constrained_optimization(self, obj_func, initial_theta, bounds):
if self.optimizer == "fmin_l_bfgs_b":
opt_res = scipy.optimize.minimize(
obj_func, initial_theta, method="L-BFGS-B", jac=True,
bounds=bounds)
_check_optimize_result("lbfgs", opt_res)
theta_opt, func_min = opt_res.x, opt_res.fun
elif callable(self.optimizer):
theta_opt, func_min = \
self.optimizer(obj_func, initial_theta, bounds=bounds)
else:
raise ValueError("Unknown optimizer %s." % self.optimizer)
return theta_opt, func_min
def _more_tags(self):
return {'requires_fit': False}
|
kevin-intel/scikit-learn
|
sklearn/gaussian_process/_gpr.py
|
Python
|
bsd-3-clause
| 22,428 | 0 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2014, Brian Coca <briancoca+ansible@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r'''
---
module: debconf
short_description: Configure a .deb package
description:
- Configure a .deb package using debconf-set-selections.
- Or just query existing selections.
version_added: "1.6"
notes:
- This module requires the command line debconf tools.
- A number of questions have to be answered (depending on the package).
Use 'debconf-show <package>' on any Debian or derivative with the package
installed to see questions/settings available.
- Some distros will always record tasks involving the setting of passwords as changed. This is due to debconf-get-selections masking passwords.
requirements:
- debconf
- debconf-utils
options:
name:
description:
- Name of package to configure.
type: str
required: true
aliases: [ pkg ]
question:
description:
- A debconf configuration setting.
type: str
aliases: [ selection, setting ]
vtype:
description:
- The type of the value supplied.
- C(seen) was added in Ansible 2.2.
type: str
choices: [ boolean, error, multiselect, note, password, seen, select, string, text, title ]
value:
description:
- Value to set the configuration to.
type: str
aliases: [ answer ]
unseen:
description:
- Do not set 'seen' flag when pre-seeding.
type: bool
default: no
author:
- Brian Coca (@bcoca)
'''
EXAMPLES = r'''
- name: Set default locale to fr_FR.UTF-8
debconf:
name: locales
question: locales/default_environment_locale
value: fr_FR.UTF-8
vtype: select
- name: Set to generate locales
debconf:
name: locales
question: locales/locales_to_be_generated
value: en_US.UTF-8 UTF-8, fr_FR.UTF-8 UTF-8
vtype: multiselect
- name: Accept oracle license
debconf:
name: oracle-java7-installer
question: shared/accepted-oracle-license-v1-1
value: 'true'
vtype: select
- name: Specifying package you can register/return the list of questions and current values
debconf:
name: tzdata
'''
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import AnsibleModule
def get_selections(module, pkg):
cmd = [module.get_bin_path('debconf-show', True), pkg]
rc, out, err = module.run_command(' '.join(cmd))
if rc != 0:
module.fail_json(msg=err)
selections = {}
for line in out.splitlines():
(key, value) = line.split(':', 1)
selections[key.strip('*').strip()] = value.strip()
return selections
def set_selection(module, pkg, question, vtype, value, unseen):
setsel = module.get_bin_path('debconf-set-selections', True)
cmd = [setsel]
if unseen:
cmd.append('-u')
if vtype == 'boolean':
if value == 'True':
value = 'true'
elif value == 'False':
value = 'false'
data = ' '.join([pkg, question, vtype, value])
return module.run_command(cmd, data=data)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(type='str', required=True, aliases=['pkg']),
question=dict(type='str', aliases=['selection', 'setting']),
vtype=dict(type='str', choices=['boolean', 'error', 'multiselect', 'note', 'password', 'seen', 'select', 'string', 'text', 'title']),
value=dict(type='str', aliases=['answer']),
unseen=dict(type='bool'),
),
required_together=(['question', 'vtype', 'value'],),
supports_check_mode=True,
)
# TODO: enable passing array of options and/or debconf file from get-selections dump
pkg = module.params["name"]
question = module.params["question"]
vtype = module.params["vtype"]
value = module.params["value"]
unseen = module.params["unseen"]
prev = get_selections(module, pkg)
changed = False
msg = ""
if question is not None:
if vtype is None or value is None:
module.fail_json(msg="when supplying a question you must supply a valid vtype and value")
# if question doesn't exist, value cannot match
if question not in prev:
changed = True
else:
existing = prev[question]
# ensure we compare booleans supplied to the way debconf sees them (true/false strings)
if vtype == 'boolean':
value = to_text(value).lower()
existing = to_text(prev[question]).lower()
if value != existing:
changed = True
if changed:
if not module.check_mode:
rc, msg, e = set_selection(module, pkg, question, vtype, value, unseen)
if rc:
module.fail_json(msg=e)
curr = {question: value}
if question in prev:
prev = {question: prev[question]}
else:
prev[question] = ''
if module._diff:
after = prev.copy()
after.update(curr)
diff_dict = {'before': prev, 'after': after}
else:
diff_dict = {}
module.exit_json(changed=changed, msg=msg, current=curr, previous=prev, diff=diff_dict)
module.exit_json(changed=changed, msg=msg, current=prev)
if __name__ == '__main__':
main()
|
azaghal/ansible
|
lib/ansible/modules/debconf.py
|
Python
|
gpl-3.0
| 5,533 | 0.002169 |
"""
Tests for Discussion API views
"""
from datetime import datetime
import json
from urlparse import urlparse
import ddt
import httpretty
import mock
from pytz import UTC
from django.core.urlresolvers import reverse
from rest_framework.test import APIClient
from common.test.utils import disable_signal
from discussion_api import api
from discussion_api.tests.utils import (
CommentsServiceMockMixin,
make_minimal_cs_comment,
make_minimal_cs_thread,
)
from student.tests.factories import CourseEnrollmentFactory, UserFactory
from util.testing import UrlResetMixin
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
class DiscussionAPIViewTestMixin(CommentsServiceMockMixin, UrlResetMixin):
"""
Mixin for common code in tests of Discussion API views. This includes
creation of common structures (e.g. a course, user, and enrollment), logging
in the test client, utility functions, and a test case for unauthenticated
requests. Subclasses must set self.url in their setUp methods.
"""
client_class = APIClient
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(DiscussionAPIViewTestMixin, self).setUp()
self.maxDiff = None # pylint: disable=invalid-name
self.course = CourseFactory.create(
org="x",
course="y",
run="z",
start=datetime.now(UTC),
discussion_topics={"Test Topic": {"id": "test_topic"}}
)
self.password = "password"
self.user = UserFactory.create(password=self.password)
CourseEnrollmentFactory.create(user=self.user, course_id=self.course.id)
self.client.login(username=self.user.username, password=self.password)
def assert_response_correct(self, response, expected_status, expected_content):
"""
Assert that the response has the given status code and parsed content
"""
self.assertEqual(response.status_code, expected_status)
parsed_content = json.loads(response.content)
self.assertEqual(parsed_content, expected_content)
def test_not_authenticated(self):
self.client.logout()
response = self.client.get(self.url)
self.assert_response_correct(
response,
401,
{"developer_message": "Authentication credentials were not provided."}
)
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
class CourseViewTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase):
"""Tests for CourseView"""
def setUp(self):
super(CourseViewTest, self).setUp()
self.url = reverse("discussion_course", kwargs={"course_id": unicode(self.course.id)})
def test_404(self):
response = self.client.get(
reverse("course_topics", kwargs={"course_id": "non/existent/course"})
)
self.assert_response_correct(
response,
404,
{"developer_message": "Not found."}
)
def test_get_success(self):
response = self.client.get(self.url)
self.assert_response_correct(
response,
200,
{
"id": unicode(self.course.id),
"blackouts": [],
"thread_list_url": "http://testserver/api/discussion/v1/threads/?course_id=x%2Fy%2Fz",
"following_thread_list_url": (
"http://testserver/api/discussion/v1/threads/?course_id=x%2Fy%2Fz&following=True"
),
"topics_url": "http://testserver/api/discussion/v1/course_topics/x/y/z",
}
)
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
class CourseTopicsViewTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase):
"""Tests for CourseTopicsView"""
def setUp(self):
super(CourseTopicsViewTest, self).setUp()
self.url = reverse("course_topics", kwargs={"course_id": unicode(self.course.id)})
def test_404(self):
response = self.client.get(
reverse("course_topics", kwargs={"course_id": "non/existent/course"})
)
self.assert_response_correct(
response,
404,
{"developer_message": "Not found."}
)
def test_get_success(self):
response = self.client.get(self.url)
self.assert_response_correct(
response,
200,
{
"courseware_topics": [],
"non_courseware_topics": [{
"id": "test_topic",
"name": "Test Topic",
"children": [],
"thread_list_url":
"http://testserver/api/discussion/v1/threads/?course_id=x%2Fy%2Fz&topic_id=test_topic",
}],
}
)
@ddt.ddt
@httpretty.activate
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
class ThreadViewSetListTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase):
"""Tests for ThreadViewSet list"""
def setUp(self):
super(ThreadViewSetListTest, self).setUp()
self.author = UserFactory.create()
self.url = reverse("thread-list")
def test_course_id_missing(self):
response = self.client.get(self.url)
self.assert_response_correct(
response,
400,
{"field_errors": {"course_id": {"developer_message": "This field is required."}}}
)
def test_404(self):
response = self.client.get(self.url, {"course_id": unicode("non/existent/course")})
self.assert_response_correct(
response,
404,
{"developer_message": "Not found."}
)
def test_basic(self):
self.register_get_user_response(self.user, upvoted_ids=["test_thread"])
source_threads = [{
"type": "thread",
"id": "test_thread",
"course_id": unicode(self.course.id),
"commentable_id": "test_topic",
"group_id": None,
"user_id": str(self.author.id),
"username": self.author.username,
"anonymous": False,
"anonymous_to_peers": False,
"created_at": "2015-04-28T00:00:00Z",
"updated_at": "2015-04-28T11:11:11Z",
"thread_type": "discussion",
"title": "Test Title",
"body": "Test body",
"pinned": False,
"closed": False,
"abuse_flaggers": [],
"votes": {"up_count": 4},
"comments_count": 5,
"unread_comments_count": 3,
"read": False,
"endorsed": False
}]
expected_threads = [{
"id": "test_thread",
"course_id": unicode(self.course.id),
"topic_id": "test_topic",
"group_id": None,
"group_name": None,
"author": self.author.username,
"author_label": None,
"created_at": "2015-04-28T00:00:00Z",
"updated_at": "2015-04-28T11:11:11Z",
"type": "discussion",
"title": "Test Title",
"raw_body": "Test body",
"rendered_body": "<p>Test body</p>",
"pinned": False,
"closed": False,
"following": False,
"abuse_flagged": False,
"voted": True,
"vote_count": 4,
"comment_count": 5,
"unread_comment_count": 3,
"comment_list_url": "http://testserver/api/discussion/v1/comments/?thread_id=test_thread",
"endorsed_comment_list_url": None,
"non_endorsed_comment_list_url": None,
"editable_fields": ["abuse_flagged", "following", "voted"],
"read": False,
"has_endorsed": False,
}]
self.register_get_threads_response(source_threads, page=1, num_pages=2)
response = self.client.get(self.url, {"course_id": unicode(self.course.id)})
self.assert_response_correct(
response,
200,
{
"results": expected_threads,
"next": "http://testserver/api/discussion/v1/threads/?course_id=x%2Fy%2Fz&page=2",
"previous": None,
"text_search_rewrite": None,
}
)
self.assert_last_query_params({
"user_id": [unicode(self.user.id)],
"course_id": [unicode(self.course.id)],
"sort_key": ["activity"],
"sort_order": ["desc"],
"page": ["1"],
"per_page": ["10"],
"recursive": ["False"],
})
@ddt.data("unread", "unanswered")
def test_view_query(self, query):
threads = [make_minimal_cs_thread()]
self.register_get_user_response(self.user)
self.register_get_threads_response(threads, page=1, num_pages=1)
self.client.get(
self.url,
{
"course_id": unicode(self.course.id),
"view": query,
}
)
self.assert_last_query_params({
"user_id": [unicode(self.user.id)],
"course_id": [unicode(self.course.id)],
"sort_key": ["activity"],
"sort_order": ["desc"],
"recursive": ["False"],
"page": ["1"],
"per_page": ["10"],
query: ["true"],
})
def test_pagination(self):
self.register_get_user_response(self.user)
self.register_get_threads_response([], page=1, num_pages=1)
response = self.client.get(
self.url,
{"course_id": unicode(self.course.id), "page": "18", "page_size": "4"}
)
self.assert_response_correct(
response,
404,
{"developer_message": "Not found."}
)
self.assert_last_query_params({
"user_id": [unicode(self.user.id)],
"course_id": [unicode(self.course.id)],
"sort_key": ["activity"],
"sort_order": ["desc"],
"page": ["18"],
"per_page": ["4"],
"recursive": ["False"],
})
def test_text_search(self):
self.register_get_user_response(self.user)
self.register_get_threads_search_response([], None)
response = self.client.get(
self.url,
{"course_id": unicode(self.course.id), "text_search": "test search string"}
)
self.assert_response_correct(
response,
200,
{"results": [], "next": None, "previous": None, "text_search_rewrite": None}
)
self.assert_last_query_params({
"user_id": [unicode(self.user.id)],
"course_id": [unicode(self.course.id)],
"sort_key": ["activity"],
"sort_order": ["desc"],
"page": ["1"],
"per_page": ["10"],
"recursive": ["False"],
"text": ["test search string"],
})
def test_following(self):
self.register_get_user_response(self.user)
self.register_subscribed_threads_response(self.user, [], page=1, num_pages=1)
response = self.client.get(
self.url,
{
"course_id": unicode(self.course.id),
"page": "1",
"page_size": "4",
"following": "True",
}
)
self.assert_response_correct(
response,
200,
{"results": [], "next": None, "previous": None, "text_search_rewrite": None}
)
self.assertEqual(
urlparse(httpretty.last_request().path).path,
"/api/v1/users/{}/subscribed_threads".format(self.user.id)
)
@ddt.data(
("last_activity_at", "activity"),
("comment_count", "comments"),
("vote_count", "votes")
)
@ddt.unpack
def test_order_by(self, http_query, cc_query):
"""
Tests the order_by parameter
Arguments:
http_query (str): Query string sent in the http request
cc_query (str): Query string used for the comments client service
"""
threads = [make_minimal_cs_thread()]
self.register_get_user_response(self.user)
self.register_get_threads_response(threads, page=1, num_pages=1)
self.client.get(
self.url,
{
"course_id": unicode(self.course.id),
"order_by": http_query,
}
)
self.assert_last_query_params({
"user_id": [unicode(self.user.id)],
"course_id": [unicode(self.course.id)],
"sort_order": ["desc"],
"recursive": ["False"],
"page": ["1"],
"per_page": ["10"],
"sort_key": [cc_query],
})
@ddt.data("asc", "desc")
def test_order_direction(self, query):
threads = [make_minimal_cs_thread()]
self.register_get_user_response(self.user)
self.register_get_threads_response(threads, page=1, num_pages=1)
self.client.get(
self.url,
{
"course_id": unicode(self.course.id),
"order_direction": query,
}
)
self.assert_last_query_params({
"user_id": [unicode(self.user.id)],
"course_id": [unicode(self.course.id)],
"sort_key": ["activity"],
"recursive": ["False"],
"page": ["1"],
"per_page": ["10"],
"sort_order": [query],
})
@httpretty.activate
@disable_signal(api, 'thread_created')
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
class ThreadViewSetCreateTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase):
"""Tests for ThreadViewSet create"""
def setUp(self):
super(ThreadViewSetCreateTest, self).setUp()
self.url = reverse("thread-list")
def test_basic(self):
self.register_get_user_response(self.user)
self.register_post_thread_response({
"id": "test_thread",
"username": self.user.username,
"created_at": "2015-05-19T00:00:00Z",
"updated_at": "2015-05-19T00:00:00Z",
})
request_data = {
"course_id": unicode(self.course.id),
"topic_id": "test_topic",
"type": "discussion",
"title": "Test Title",
"raw_body": "Test body",
}
expected_response_data = {
"id": "test_thread",
"course_id": unicode(self.course.id),
"topic_id": "test_topic",
"group_id": None,
"group_name": None,
"author": self.user.username,
"author_label": None,
"created_at": "2015-05-19T00:00:00Z",
"updated_at": "2015-05-19T00:00:00Z",
"type": "discussion",
"title": "Test Title",
"raw_body": "Test body",
"rendered_body": "<p>Test body</p>",
"pinned": False,
"closed": False,
"following": False,
"abuse_flagged": False,
"voted": False,
"vote_count": 0,
"comment_count": 0,
"unread_comment_count": 0,
"comment_list_url": "http://testserver/api/discussion/v1/comments/?thread_id=test_thread",
"endorsed_comment_list_url": None,
"non_endorsed_comment_list_url": None,
"editable_fields": ["abuse_flagged", "following", "raw_body", "title", "topic_id", "type", "voted"],
"read": False,
"has_endorsed": False,
}
response = self.client.post(
self.url,
json.dumps(request_data),
content_type="application/json"
)
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(response_data, expected_response_data)
self.assertEqual(
httpretty.last_request().parsed_body,
{
"course_id": [unicode(self.course.id)],
"commentable_id": ["test_topic"],
"thread_type": ["discussion"],
"title": ["Test Title"],
"body": ["Test body"],
"user_id": [str(self.user.id)],
}
)
def test_error(self):
request_data = {
"topic_id": "dummy",
"type": "discussion",
"title": "dummy",
"raw_body": "dummy",
}
response = self.client.post(
self.url,
json.dumps(request_data),
content_type="application/json"
)
expected_response_data = {
"field_errors": {"course_id": {"developer_message": "This field is required."}}
}
self.assertEqual(response.status_code, 400)
response_data = json.loads(response.content)
self.assertEqual(response_data, expected_response_data)
@httpretty.activate
@disable_signal(api, 'thread_edited')
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
class ThreadViewSetPartialUpdateTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase):
"""Tests for ThreadViewSet partial_update"""
def setUp(self):
super(ThreadViewSetPartialUpdateTest, self).setUp()
self.url = reverse("thread-detail", kwargs={"thread_id": "test_thread"})
def test_basic(self):
self.register_get_user_response(self.user)
cs_thread = make_minimal_cs_thread({
"id": "test_thread",
"course_id": unicode(self.course.id),
"commentable_id": "original_topic",
"username": self.user.username,
"user_id": str(self.user.id),
"created_at": "2015-05-29T00:00:00Z",
"updated_at": "2015-05-29T00:00:00Z",
"thread_type": "discussion",
"title": "Original Title",
"body": "Original body",
})
self.register_get_thread_response(cs_thread)
self.register_put_thread_response(cs_thread)
request_data = {"raw_body": "Edited body"}
expected_response_data = {
"id": "test_thread",
"course_id": unicode(self.course.id),
"topic_id": "original_topic",
"group_id": None,
"group_name": None,
"author": self.user.username,
"author_label": None,
"created_at": "2015-05-29T00:00:00Z",
"updated_at": "2015-05-29T00:00:00Z",
"type": "discussion",
"title": "Original Title",
"raw_body": "Edited body",
"rendered_body": "<p>Edited body</p>",
"pinned": False,
"closed": False,
"following": False,
"abuse_flagged": False,
"voted": False,
"vote_count": 0,
"comment_count": 0,
"unread_comment_count": 0,
"comment_list_url": "http://testserver/api/discussion/v1/comments/?thread_id=test_thread",
"endorsed_comment_list_url": None,
"non_endorsed_comment_list_url": None,
"editable_fields": ["abuse_flagged", "following", "raw_body", "title", "topic_id", "type", "voted"],
"read": False,
"has_endorsed": False,
}
response = self.client.patch( # pylint: disable=no-member
self.url,
json.dumps(request_data),
content_type="application/json"
)
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(response_data, expected_response_data)
self.assertEqual(
httpretty.last_request().parsed_body,
{
"course_id": [unicode(self.course.id)],
"commentable_id": ["original_topic"],
"thread_type": ["discussion"],
"title": ["Original Title"],
"body": ["Edited body"],
"user_id": [str(self.user.id)],
"anonymous": ["False"],
"anonymous_to_peers": ["False"],
"closed": ["False"],
"pinned": ["False"],
}
)
def test_error(self):
self.register_get_user_response(self.user)
cs_thread = make_minimal_cs_thread({
"id": "test_thread",
"course_id": unicode(self.course.id),
"user_id": str(self.user.id),
})
self.register_get_thread_response(cs_thread)
request_data = {"title": ""}
response = self.client.patch( # pylint: disable=no-member
self.url,
json.dumps(request_data),
content_type="application/json"
)
expected_response_data = {
"field_errors": {"title": {"developer_message": "This field may not be blank."}}
}
self.assertEqual(response.status_code, 400)
response_data = json.loads(response.content)
self.assertEqual(response_data, expected_response_data)
@httpretty.activate
@disable_signal(api, 'thread_deleted')
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
class ThreadViewSetDeleteTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase):
"""Tests for ThreadViewSet delete"""
def setUp(self):
super(ThreadViewSetDeleteTest, self).setUp()
self.url = reverse("thread-detail", kwargs={"thread_id": "test_thread"})
self.thread_id = "test_thread"
def test_basic(self):
self.register_get_user_response(self.user)
cs_thread = make_minimal_cs_thread({
"id": self.thread_id,
"course_id": unicode(self.course.id),
"username": self.user.username,
"user_id": str(self.user.id),
})
self.register_get_thread_response(cs_thread)
self.register_delete_thread_response(self.thread_id)
response = self.client.delete(self.url)
self.assertEqual(response.status_code, 204)
self.assertEqual(response.content, "")
self.assertEqual(
urlparse(httpretty.last_request().path).path,
"/api/v1/threads/{}".format(self.thread_id)
)
self.assertEqual(httpretty.last_request().method, "DELETE")
def test_delete_nonexistent_thread(self):
self.register_get_thread_error_response(self.thread_id, 404)
response = self.client.delete(self.url)
self.assertEqual(response.status_code, 404)
@httpretty.activate
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
class CommentViewSetListTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase):
"""Tests for CommentViewSet list"""
def setUp(self):
super(CommentViewSetListTest, self).setUp()
self.author = UserFactory.create()
self.url = reverse("comment-list")
self.thread_id = "test_thread"
def test_thread_id_missing(self):
response = self.client.get(self.url)
self.assert_response_correct(
response,
400,
{"field_errors": {"thread_id": {"developer_message": "This field is required."}}}
)
def test_404(self):
self.register_get_thread_error_response(self.thread_id, 404)
response = self.client.get(self.url, {"thread_id": self.thread_id})
self.assert_response_correct(
response,
404,
{"developer_message": "Not found."}
)
def test_basic(self):
self.register_get_user_response(self.user, upvoted_ids=["test_comment"])
source_comments = [{
"type": "comment",
"id": "test_comment",
"thread_id": self.thread_id,
"parent_id": None,
"user_id": str(self.author.id),
"username": self.author.username,
"anonymous": False,
"anonymous_to_peers": False,
"created_at": "2015-05-11T00:00:00Z",
"updated_at": "2015-05-11T11:11:11Z",
"body": "Test body",
"endorsed": False,
"abuse_flaggers": [],
"votes": {"up_count": 4},
"children": [],
}]
expected_comments = [{
"id": "test_comment",
"thread_id": self.thread_id,
"parent_id": None,
"author": self.author.username,
"author_label": None,
"created_at": "2015-05-11T00:00:00Z",
"updated_at": "2015-05-11T11:11:11Z",
"raw_body": "Test body",
"rendered_body": "<p>Test body</p>",
"endorsed": False,
"endorsed_by": None,
"endorsed_by_label": None,
"endorsed_at": None,
"abuse_flagged": False,
"voted": True,
"vote_count": 4,
"children": [],
"editable_fields": ["abuse_flagged", "voted"],
}]
self.register_get_thread_response({
"id": self.thread_id,
"course_id": unicode(self.course.id),
"thread_type": "discussion",
"children": source_comments,
"resp_total": 100,
})
response = self.client.get(self.url, {"thread_id": self.thread_id})
self.assert_response_correct(
response,
200,
{
"results": expected_comments,
"next": "http://testserver/api/discussion/v1/comments/?page=2&thread_id={}".format(
self.thread_id
),
"previous": None,
}
)
self.assert_query_params_equal(
httpretty.httpretty.latest_requests[-2],
{
"recursive": ["True"],
"resp_skip": ["0"],
"resp_limit": ["10"],
"user_id": [str(self.user.id)],
"mark_as_read": ["False"],
}
)
def test_pagination(self):
"""
Test that pagination parameters are correctly plumbed through to the
comments service and that a 404 is correctly returned if a page past the
end is requested
"""
self.register_get_user_response(self.user)
self.register_get_thread_response(make_minimal_cs_thread({
"id": self.thread_id,
"course_id": unicode(self.course.id),
"thread_type": "discussion",
"children": [],
"resp_total": 10,
}))
response = self.client.get(
self.url,
{"thread_id": self.thread_id, "page": "18", "page_size": "4"}
)
self.assert_response_correct(
response,
404,
{"developer_message": "Not found."}
)
self.assert_query_params_equal(
httpretty.httpretty.latest_requests[-2],
{
"recursive": ["True"],
"resp_skip": ["68"],
"resp_limit": ["4"],
"user_id": [str(self.user.id)],
"mark_as_read": ["False"],
}
)
@httpretty.activate
@disable_signal(api, 'comment_deleted')
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
class CommentViewSetDeleteTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase):
"""Tests for ThreadViewSet delete"""
def setUp(self):
super(CommentViewSetDeleteTest, self).setUp()
self.url = reverse("comment-detail", kwargs={"comment_id": "test_comment"})
self.comment_id = "test_comment"
def test_basic(self):
self.register_get_user_response(self.user)
cs_thread = make_minimal_cs_thread({
"id": "test_thread",
"course_id": unicode(self.course.id),
})
self.register_get_thread_response(cs_thread)
cs_comment = make_minimal_cs_comment({
"id": self.comment_id,
"course_id": cs_thread["course_id"],
"thread_id": cs_thread["id"],
"username": self.user.username,
"user_id": str(self.user.id),
})
self.register_get_comment_response(cs_comment)
self.register_delete_comment_response(self.comment_id)
response = self.client.delete(self.url)
self.assertEqual(response.status_code, 204)
self.assertEqual(response.content, "")
self.assertEqual(
urlparse(httpretty.last_request().path).path,
"/api/v1/comments/{}".format(self.comment_id)
)
self.assertEqual(httpretty.last_request().method, "DELETE")
def test_delete_nonexistent_comment(self):
self.register_get_comment_error_response(self.comment_id, 404)
response = self.client.delete(self.url)
self.assertEqual(response.status_code, 404)
@httpretty.activate
@disable_signal(api, 'comment_created')
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
class CommentViewSetCreateTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase):
"""Tests for CommentViewSet create"""
def setUp(self):
super(CommentViewSetCreateTest, self).setUp()
self.url = reverse("comment-list")
def test_basic(self):
self.register_get_user_response(self.user)
self.register_get_thread_response(
make_minimal_cs_thread({
"id": "test_thread",
"course_id": unicode(self.course.id),
"commentable_id": "test_topic",
})
)
self.register_post_comment_response(
{
"id": "test_comment",
"username": self.user.username,
"created_at": "2015-05-27T00:00:00Z",
"updated_at": "2015-05-27T00:00:00Z",
},
thread_id="test_thread"
)
request_data = {
"thread_id": "test_thread",
"raw_body": "Test body",
}
expected_response_data = {
"id": "test_comment",
"thread_id": "test_thread",
"parent_id": None,
"author": self.user.username,
"author_label": None,
"created_at": "2015-05-27T00:00:00Z",
"updated_at": "2015-05-27T00:00:00Z",
"raw_body": "Test body",
"rendered_body": "<p>Test body</p>",
"endorsed": False,
"endorsed_by": None,
"endorsed_by_label": None,
"endorsed_at": None,
"abuse_flagged": False,
"voted": False,
"vote_count": 0,
"children": [],
"editable_fields": ["abuse_flagged", "raw_body", "voted"],
}
response = self.client.post(
self.url,
json.dumps(request_data),
content_type="application/json"
)
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(response_data, expected_response_data)
self.assertEqual(
urlparse(httpretty.last_request().path).path,
"/api/v1/threads/test_thread/comments"
)
self.assertEqual(
httpretty.last_request().parsed_body,
{
"course_id": [unicode(self.course.id)],
"body": ["Test body"],
"user_id": [str(self.user.id)],
}
)
def test_error(self):
response = self.client.post(
self.url,
json.dumps({}),
content_type="application/json"
)
expected_response_data = {
"field_errors": {"thread_id": {"developer_message": "This field is required."}}
}
self.assertEqual(response.status_code, 400)
response_data = json.loads(response.content)
self.assertEqual(response_data, expected_response_data)
@disable_signal(api, 'comment_edited')
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
class CommentViewSetPartialUpdateTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase):
"""Tests for CommentViewSet partial_update"""
def setUp(self):
super(CommentViewSetPartialUpdateTest, self).setUp()
httpretty.reset()
httpretty.enable()
self.addCleanup(httpretty.disable)
self.register_get_user_response(self.user)
self.url = reverse("comment-detail", kwargs={"comment_id": "test_comment"})
cs_thread = make_minimal_cs_thread({
"id": "test_thread",
"course_id": unicode(self.course.id),
})
self.register_get_thread_response(cs_thread)
cs_comment = make_minimal_cs_comment({
"id": "test_comment",
"course_id": cs_thread["course_id"],
"thread_id": cs_thread["id"],
"username": self.user.username,
"user_id": str(self.user.id),
"created_at": "2015-06-03T00:00:00Z",
"updated_at": "2015-06-03T00:00:00Z",
"body": "Original body",
})
self.register_get_comment_response(cs_comment)
self.register_put_comment_response(cs_comment)
def test_basic(self):
request_data = {"raw_body": "Edited body"}
expected_response_data = {
"id": "test_comment",
"thread_id": "test_thread",
"parent_id": None,
"author": self.user.username,
"author_label": None,
"created_at": "2015-06-03T00:00:00Z",
"updated_at": "2015-06-03T00:00:00Z",
"raw_body": "Edited body",
"rendered_body": "<p>Edited body</p>",
"endorsed": False,
"endorsed_by": None,
"endorsed_by_label": None,
"endorsed_at": None,
"abuse_flagged": False,
"voted": False,
"vote_count": 0,
"children": [],
"editable_fields": ["abuse_flagged", "raw_body", "voted"],
}
response = self.client.patch( # pylint: disable=no-member
self.url,
json.dumps(request_data),
content_type="application/json"
)
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(response_data, expected_response_data)
self.assertEqual(
httpretty.last_request().parsed_body,
{
"body": ["Edited body"],
"course_id": [unicode(self.course.id)],
"user_id": [str(self.user.id)],
"anonymous": ["False"],
"anonymous_to_peers": ["False"],
"endorsed": ["False"],
}
)
def test_error(self):
request_data = {"raw_body": ""}
response = self.client.patch( # pylint: disable=no-member
self.url,
json.dumps(request_data),
content_type="application/json"
)
expected_response_data = {
"field_errors": {"raw_body": {"developer_message": "This field may not be blank."}}
}
self.assertEqual(response.status_code, 400)
response_data = json.loads(response.content)
self.assertEqual(response_data, expected_response_data)
@httpretty.activate
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
class ThreadViewSetRetrieveTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase):
"""Tests for ThreadViewSet Retrieve"""
def setUp(self):
super(ThreadViewSetRetrieveTest, self).setUp()
self.url = reverse("thread-detail", kwargs={"thread_id": "test_thread"})
self.thread_id = "test_thread"
def test_basic(self):
self.register_get_user_response(self.user)
cs_thread = make_minimal_cs_thread({
"id": self.thread_id,
"course_id": unicode(self.course.id),
"commentable_id": "test_topic",
"username": self.user.username,
"user_id": str(self.user.id),
"title": "Test Title",
"body": "Test body",
"created_at": "2015-05-29T00:00:00Z",
"updated_at": "2015-05-29T00:00:00Z"
})
expected_response_data = {
"author": self.user.username,
"author_label": None,
"created_at": "2015-05-29T00:00:00Z",
"updated_at": "2015-05-29T00:00:00Z",
"raw_body": "Test body",
"rendered_body": "<p>Test body</p>",
"abuse_flagged": False,
"voted": False,
"vote_count": 0,
"editable_fields": ["abuse_flagged", "following", "raw_body", "title", "topic_id", "type", "voted"],
"course_id": unicode(self.course.id),
"topic_id": "test_topic",
"group_id": None,
"group_name": None,
"title": "Test Title",
"pinned": False,
"closed": False,
"following": False,
"comment_count": 0,
"unread_comment_count": 0,
"comment_list_url": "http://testserver/api/discussion/v1/comments/?thread_id=test_thread",
"endorsed_comment_list_url": None,
"non_endorsed_comment_list_url": None,
"read": False,
"has_endorsed": False,
"id": "test_thread",
"type": "discussion",
"response_count": 0,
}
self.register_get_thread_response(cs_thread)
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content), expected_response_data)
self.assertEqual(httpretty.last_request().method, "GET")
def test_retrieve_nonexistent_thread(self):
self.register_get_thread_error_response(self.thread_id, 404)
response = self.client.get(self.url)
self.assertEqual(response.status_code, 404)
|
adoosii/edx-platform
|
lms/djangoapps/discussion_api/tests/test_views.py
|
Python
|
agpl-3.0
| 38,334 | 0.001513 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.