repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
jamiefolsom/edx-platform
|
lms/djangoapps/instructor/tests/test_services.py
|
33
|
3567
|
"""
Tests for the InstructorService
"""
import json
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
from courseware.models import StudentModule
from instructor.services import InstructorService
from instructor.tests.test_tools import msk_from_problem_urlname
from nose.plugins.attrib import attr
from student.models import CourseEnrollment
from student.tests.factories import UserFactory
@attr('shard_1')
class InstructorServiceTests(SharedModuleStoreTestCase):
"""
Tests for the InstructorService
"""
@classmethod
def setUpClass(cls):
super(InstructorServiceTests, cls).setUpClass()
cls.course = CourseFactory.create()
cls.problem_location = msk_from_problem_urlname(
cls.course.id,
'robot-some-problem-urlname'
)
cls.other_problem_location = msk_from_problem_urlname(
cls.course.id,
'robot-some-other_problem-urlname'
)
cls.problem_urlname = unicode(cls.problem_location)
cls.other_problem_urlname = unicode(cls.other_problem_location)
def setUp(self):
super(InstructorServiceTests, self).setUp()
self.student = UserFactory()
CourseEnrollment.enroll(self.student, self.course.id)
self.service = InstructorService()
self.module_to_reset = StudentModule.objects.create(
student=self.student,
course_id=self.course.id,
module_state_key=self.problem_location,
state=json.dumps({'attempts': 2}),
)
def test_reset_student_attempts_delete(self):
"""
Test delete student state.
"""
# make sure the attempt is there
self.assertEqual(
StudentModule.objects.filter(
student=self.module_to_reset.student,
course_id=self.course.id,
module_state_key=self.module_to_reset.module_state_key,
).count(),
1
)
self.service.delete_student_attempt(
self.student.username,
unicode(self.course.id),
self.problem_urlname
)
# make sure the module has been deleted
self.assertEqual(
StudentModule.objects.filter(
student=self.module_to_reset.student,
course_id=self.course.id,
module_state_key=self.module_to_reset.module_state_key,
).count(),
0
)
def test_reset_bad_content_id(self):
"""
Negative test of trying to reset attempts with bad content_id
"""
result = self.service.delete_student_attempt(
self.student.username,
unicode(self.course.id),
'foo/bar/baz'
)
self.assertIsNone(result)
def test_reset_bad_user(self):
"""
Negative test of trying to reset attempts with bad user identifier
"""
result = self.service.delete_student_attempt(
'bad_student',
unicode(self.course.id),
'foo/bar/baz'
)
self.assertIsNone(result)
def test_reset_non_existing_attempt(self):
"""
Negative test of trying to reset attempts with bad user identifier
"""
result = self.service.delete_student_attempt(
self.student.username,
unicode(self.course.id),
self.other_problem_urlname
)
self.assertIsNone(result)
|
agpl-3.0
|
vicky2135/lucious
|
oscar/lib/python2.7/site-packages/django/db/backends/sqlite3/features.py
|
38
|
2739
|
from __future__ import unicode_literals
from django.db import utils
from django.db.backends.base.features import BaseDatabaseFeatures
from django.utils import six
from django.utils.functional import cached_property
from .base import Database
try:
import pytz
except ImportError:
pytz = None
class DatabaseFeatures(BaseDatabaseFeatures):
# SQLite cannot handle us only partially reading from a cursor's result set
# and then writing the same rows to the database in another cursor. This
# setting ensures we always read result sets fully into memory all in one
# go.
can_use_chunked_reads = False
test_db_allows_multiple_connections = False
supports_unspecified_pk = True
supports_timezones = False
supports_1000_query_parameters = False
supports_mixed_date_datetime_comparisons = False
has_bulk_insert = True
can_combine_inserts_with_and_without_auto_increment_pk = False
supports_foreign_keys = False
supports_column_check_constraints = False
autocommits_when_autocommit_is_off = True
can_introspect_decimal_field = False
can_introspect_positive_integer_field = True
can_introspect_small_integer_field = True
supports_transactions = True
atomic_transactions = False
can_rollback_ddl = True
supports_paramstyle_pyformat = False
supports_sequence_reset = False
can_clone_databases = True
supports_temporal_subtraction = True
ignores_quoted_identifier_case = True
@cached_property
def uses_savepoints(self):
return Database.sqlite_version_info >= (3, 6, 8)
@cached_property
def can_release_savepoints(self):
return self.uses_savepoints
@cached_property
def can_share_in_memory_db(self):
return (
six.PY3 and
Database.__name__ == 'sqlite3.dbapi2' and
Database.sqlite_version_info >= (3, 7, 13)
)
@cached_property
def supports_stddev(self):
"""Confirm support for STDDEV and related stats functions
SQLite supports STDDEV as an extension package; so
connection.ops.check_expression_support() can't unilaterally
rule out support for STDDEV. We need to manually check
whether the call works.
"""
with self.connection.cursor() as cursor:
cursor.execute('CREATE TABLE STDDEV_TEST (X INT)')
try:
cursor.execute('SELECT STDDEV(*) FROM STDDEV_TEST')
has_support = True
except utils.DatabaseError:
has_support = False
cursor.execute('DROP TABLE STDDEV_TEST')
return has_support
@cached_property
def has_zoneinfo_database(self):
return pytz is not None
|
bsd-3-clause
|
sbesson/PyGithub
|
tests/RepositoryKey.py
|
3
|
3820
|
############################ Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2012 Zearin <zearin@gonk.net> #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2014 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2016 Jannis Gebauer <ja.geb@me.com> #
# Copyright 2016 Peter Buckley <dx-pbuckley@users.noreply.github.com> #
# Copyright 2017 Simon <spam@esemi.ru> #
# Copyright 2018 Laurent Raufaste <analogue@glop.org> #
# Copyright 2018 Wan Liuyang <tsfdye@gmail.com> #
# Copyright 2018 sfdye <tsfdye@gmail.com> #
# #
# This file is part of PyGithub. #
# http://pygithub.readthedocs.io/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
################################################################################
import datetime
from . import Framework
class RepositoryKey(Framework.TestCase):
def setUp(self):
super().setUp()
# When recording test, be sure to create a deploy key for yourself on
# Github and update it here.
self.key = self.g.get_user("lra").get_repo("mackup").get_key(21870881)
def testAttributes(self):
self.assertEqual(self.key.id, 21870881)
self.assertEqual(
self.key.key,
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDLOoLSVPwG1OSgVSeEXNbfIofYdxR5zs3u4PryhnamfFPYwi2vZW3ZxeI1oRcDh2VEdwGvlN5VUduKJNoOWMVzV2jSyR8CeDHH+I0soQCC7kfJVodU96HcPMzZ6MuVwSfD4BFGvKMXyCnBUqzo28BGHFwVQG8Ya9gL6/cTbuWywgM4xaJgMHv1OVcESXBtBkrqOneTJuOgeEmP0RfUnIAK/3/wbg9mfiBq7JV4cmWAg1xNE8GJoAbci59Tdx1dQgVuuqdQGk5jzNusOVneyMtGEB+p7UpPLJsGBW29rsMt7ITUbXM/kl9v11vPtWb+oOUThoFsDYmsWy7fGGP9YAFB",
)
self.assertEqual(self.key.title, "PyGithub Test Key")
self.assertEqual(
self.key.url, "https://api.github.com/repos/lra/mackup/keys/21870881"
)
self.assertEqual(self.key.created_at, datetime.datetime(2017, 2, 22, 8, 16, 23))
self.assertTrue(self.key.verified)
self.assertTrue(self.key.read_only)
self.assertEqual(
repr(self.key), 'RepositoryKey(title="PyGithub Test Key", id=21870881)'
)
def testDelete(self):
self.key.delete()
|
lgpl-3.0
|
jamesliu/mxnet
|
python/mxnet/rnn/rnn.py
|
44
|
4264
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable=too-many-arguments, no-member
"""Functions for constructing recurrent neural networks."""
import warnings
from ..model import save_checkpoint, load_checkpoint
from .rnn_cell import BaseRNNCell
def rnn_unroll(cell, length, inputs=None, begin_state=None, input_prefix='', layout='NTC'):
"""Deprecated. Please use cell.unroll instead"""
warnings.warn('rnn_unroll is deprecated. Please call cell.unroll directly.')
return cell.unroll(length=length, inputs=inputs, begin_state=begin_state,
input_prefix=input_prefix, layout=layout)
def save_rnn_checkpoint(cells, prefix, epoch, symbol, arg_params, aux_params):
"""Save checkpoint for model using RNN cells.
Unpacks weight before saving.
Parameters
----------
cells : RNNCell or list of RNNCells
The RNN cells used by this symbol.
prefix : str
Prefix of model name.
epoch : int
The epoch number of the model.
symbol : Symbol
The input symbol
arg_params : dict of str to NDArray
Model parameter, dict of name to NDArray of net's weights.
aux_params : dict of str to NDArray
Model parameter, dict of name to NDArray of net's auxiliary states.
Notes
-----
- ``prefix-symbol.json`` will be saved for symbol.
- ``prefix-epoch.params`` will be saved for parameters.
"""
if isinstance(cells, BaseRNNCell):
cells = [cells]
for cell in cells:
arg_params = cell.unpack_weights(arg_params)
save_checkpoint(prefix, epoch, symbol, arg_params, aux_params)
def load_rnn_checkpoint(cells, prefix, epoch):
"""Load model checkpoint from file.
Pack weights after loading.
Parameters
----------
cells : RNNCell or list of RNNCells
The RNN cells used by this symbol.
prefix : str
Prefix of model name.
epoch : int
Epoch number of model we would like to load.
Returns
-------
symbol : Symbol
The symbol configuration of computation network.
arg_params : dict of str to NDArray
Model parameter, dict of name to NDArray of net's weights.
aux_params : dict of str to NDArray
Model parameter, dict of name to NDArray of net's auxiliary states.
Notes
-----
- symbol will be loaded from ``prefix-symbol.json``.
- parameters will be loaded from ``prefix-epoch.params``.
"""
sym, arg, aux = load_checkpoint(prefix, epoch)
if isinstance(cells, BaseRNNCell):
cells = [cells]
for cell in cells:
arg = cell.pack_weights(arg)
return sym, arg, aux
def do_rnn_checkpoint(cells, prefix, period=1):
"""Make a callback to checkpoint Module to prefix every epoch.
unpacks weights used by cells before saving.
Parameters
----------
cells : RNNCell or list of RNNCells
The RNN cells used by this symbol.
prefix : str
The file prefix to checkpoint to
period : int
How many epochs to wait before checkpointing. Default is 1.
Returns
-------
callback : function
The callback function that can be passed as iter_end_callback to fit.
"""
period = int(max(1, period))
# pylint: disable=unused-argument
def _callback(iter_no, sym=None, arg=None, aux=None):
"""The checkpoint function."""
if (iter_no + 1) % period == 0:
save_rnn_checkpoint(cells, prefix, iter_no+1, sym, arg, aux)
return _callback
|
apache-2.0
|
simphony/simphony-remote
|
jupyterhub/remoteappmanager_config.py
|
1
|
2462
|
# # --------------------
# # Docker configuration
# # --------------------
# #
# # Configuration options for connecting to the docker machine.
# # These options override the default provided by the local environment
# # variables.
# #
# # The endpoint of the docker machine, specified as a URL.
# # By default, it is obtained by DOCKER_HOST envvar. On Linux in a vanilla
# # install, the connection uses a unix socket by default.
#
# docker_host = "tcp://192.168.99.100:2376"
# # Docker realm is used to identify the containers that are managed by this
# # particular instance of simphony-remote. It will be the first entry in
# # the container name, and will also be added as part of a run-time container
# # label. You generally should not change this unless you have multiple
# # installations of simphony-remote all using the same docker host.
#
# docker_realm = "whatever"
#
# # TLS configuration
# # -----------------
# #
# # Set this to True to enable TLS connection with the docker client
#
# tls = True
#
# # Enables verification of the certificates. By default, this is the
# # result of the DOCKER_TLS_VERIFY envvar. Set to False to skip verification/
#
# tls_verify = True
#
# # Full paths of the CA certificate, certificate and key of the docker
# # machine. Normally these are computed from the DOCKER_CERT_PATH.
# # If you want to use a recognised CA for verification, set the tls_ca to
# # an empty string
#
# tls_ca = "/path/to/ca.pem"
# tls_cert = "/path/to/cert.pem"
# tls_key = "/path/to/key.pem"
#
# # ----------
# # Accounting
# # ----------
# # Notes on os.path:
# # 1. When running with system-user mode, both the current directory and '~'
# # are the system user's home directory.
# # 2. When running in virtual-user mode, the current directory is the
# # directory where jupyterhub is started, '~' would be evaluated according to
# # the spawned process's owner's home directory (not the virtual user's
# # home directory)
#
# # CSV database support
#
# database_class = "remoteappmanager.db.csv_db.CSVDatabase"
# database_kwargs = {
# "csv_file_path": os.path.abspath("./remoteappmanager.csv")}
#
# # Sqlite database support
#
# database_class = "remoteappmanager.db.orm.ORMDatabase"
# database_kwargs = {
# "url": "sqlite:///"+os.path.abspath('./remoteappmanager.db')}
# # ----------------
# # Google Analytics
# # ----------------
# # Put your tracking id from Google Analytics here.
# ga_tracking_id = "UA-XXXXXX-X"
|
bsd-3-clause
|
csrocha/OpenUpgrade
|
addons/website_forum/tests/test_forum.py
|
87
|
7632
|
# -*- coding: utf-8 -*-
from openerp.addons.website_forum.tests.common import KARMA, TestForumCommon
from openerp.addons.website_forum.models.forum import KarmaError
from openerp.exceptions import Warning, AccessError
from openerp.tools import mute_logger
class TestForum(TestForumCommon):
@mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models')
def test_ask(self):
Post = self.env['forum.post']
# Public user asks a question: not allowed
with self.assertRaises(AccessError):
Post.sudo(self.user_public).create({
'name': " Question ?",
'forum_id': self.forum.id,
})
# Portal user asks a question with tags: not allowed, unsufficient karma
with self.assertRaises(KarmaError):
Post.sudo(self.user_portal).create({
'name': " Q_0",
'forum_id': self.forum.id,
'tag_ids': [(0, 0, {'name': 'Tag0', 'forum_id': self.forum.id})]
})
# Portal user asks a question with tags: ok if enough karma
self.user_portal.karma = KARMA['ask']
Post.sudo(self.user_portal).create({
'name': " Q0",
'forum_id': self.forum.id,
'tag_ids': [(0, 0, {'name': 'Tag0', 'forum_id': self.forum.id})]
})
self.assertEqual(self.user_portal.karma, KARMA['ask'] + KARMA['gen_que_new'], 'website_forum: wrong karma generation when asking question')
@mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models')
def test_answer(self):
Post = self.env['forum.post']
# Answers its own question: not allowed, unsufficient karma
with self.assertRaises(KarmaError):
Post.sudo(self.user_employee).create({
'name': " A0",
'forum_id': self.forum.id,
'parent_id': self.post.id,
})
# Answers on question: ok if enough karma
self.user_employee.karma = KARMA['ans']
Post.sudo(self.user_employee).create({
'name': " A0",
'forum_id': self.forum.id,
'parent_id': self.post.id,
})
self.assertEqual(self.user_employee.karma, KARMA['ans'], 'website_forum: wrong karma generation when answering question')
@mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models')
def test_vote_crash(self):
Post = self.env['forum.post']
self.user_employee.karma = KARMA['ans']
emp_answer = Post.sudo(self.user_employee).create({
'name': 'TestAnswer',
'forum_id': self.forum.id,
'parent_id': self.post.id})
# upvote its own post
with self.assertRaises(Warning):
emp_answer.vote(upvote=True)
# not enough karma
with self.assertRaises(KarmaError):
self.post.sudo(self.user_portal).vote(upvote=True)
def test_vote(self):
self.post.create_uid.karma = KARMA['ask']
self.user_portal.karma = KARMA['upv']
self.post.sudo(self.user_portal).vote(upvote=True)
self.assertEqual(self.post.create_uid.karma, KARMA['ask'] + KARMA['gen_que_upv'], 'website_forum: wrong karma generation of upvoted question author')
@mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models')
def test_downvote_crash(self):
Post = self.env['forum.post']
self.user_employee.karma = KARMA['ans']
emp_answer = Post.sudo(self.user_employee).create({
'name': 'TestAnswer',
'forum_id': self.forum.id,
'parent_id': self.post.id})
# downvote its own post
with self.assertRaises(Warning):
emp_answer.vote(upvote=False)
# not enough karma
with self.assertRaises(KarmaError):
self.post.sudo(self.user_portal).vote(upvote=False)
def test_downvote(self):
self.post.create_uid.karma = 50
self.user_portal.karma = KARMA['dwv']
self.post.sudo(self.user_portal).vote(upvote=False)
self.assertEqual(self.post.create_uid.karma, 50 + KARMA['gen_que_dwv'], 'website_forum: wrong karma generation of downvoted question author')
def test_comment_crash(self):
with self.assertRaises(KarmaError):
self.post.sudo(self.user_portal).message_post(body='Should crash', type='comment')
def test_comment(self):
self.post.sudo(self.user_employee).message_post(body='Test0', type='notification')
self.user_employee.karma = KARMA['com_all']
self.post.sudo(self.user_employee).message_post(body='Test1', type='comment')
self.assertEqual(len(self.post.message_ids), 4, 'website_forum: wrong behavior of message_post')
def test_convert_answer_to_comment_crash(self):
Post = self.env['forum.post']
# converting a question does nothing
msg_ids = self.post.sudo(self.user_portal).convert_answer_to_comment()
self.assertEqual(msg_ids[0], False, 'website_forum: question to comment conversion failed')
self.assertEqual(Post.search([('name', '=', 'TestQuestion')])[0].forum_id.name, 'TestForum', 'website_forum: question to comment conversion failed')
with self.assertRaises(KarmaError):
self.answer.sudo(self.user_portal).convert_answer_to_comment()
def test_convert_answer_to_comment(self):
self.user_portal.karma = KARMA['com_conv_all']
post_author = self.answer.create_uid.partner_id
msg_ids = self.answer.sudo(self.user_portal).convert_answer_to_comment()
self.assertEqual(len(msg_ids), 1, 'website_forum: wrong answer to comment conversion')
msg = self.env['mail.message'].browse(msg_ids[0])
self.assertEqual(msg.author_id, post_author, 'website_forum: wrong answer to comment conversion')
self.assertIn('I am an anteater', msg.body, 'website_forum: wrong answer to comment conversion')
def test_edit_post_crash(self):
with self.assertRaises(KarmaError):
self.post.sudo(self.user_portal).write({'name': 'I am not your father.'})
def test_edit_post(self):
self.post.create_uid.karma = KARMA['edit_own']
self.post.write({'name': 'Actually I am your dog.'})
self.user_portal.karma = KARMA['edit_all']
self.post.sudo(self.user_portal).write({'name': 'Actually I am your cat.'})
def test_close_post_crash(self):
with self.assertRaises(KarmaError):
self.post.sudo(self.user_portal).close(None)
def test_close_post_own(self):
self.post.create_uid.karma = KARMA['close_own']
self.post.close(None)
def test_close_post_all(self):
self.user_portal.karma = KARMA['close_all']
self.post.sudo(self.user_portal).close(None)
def test_deactivate_post_crash(self):
with self.assertRaises(KarmaError):
self.post.sudo(self.user_portal).write({'active': False})
def test_deactivate_post_own(self):
self.post.create_uid.karma = KARMA['unlink_own']
self.post.write({'active': False})
def test_deactivate_post_all(self):
self.user_portal.karma = KARMA['unlink_all']
self.post.sudo(self.user_portal).write({'active': False})
def test_unlink_post_crash(self):
with self.assertRaises(KarmaError):
self.post.sudo(self.user_portal).unlink()
def test_unlink_post_own(self):
self.post.create_uid.karma = KARMA['unlink_own']
self.post.unlink()
def test_unlink_post_all(self):
self.user_portal.karma = KARMA['unlink_all']
self.post.sudo(self.user_portal).unlink()
|
agpl-3.0
|
tseaver/google-cloud-python
|
logging/tests/unit/test_entries.py
|
4
|
26350
|
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import mock
class Test_logger_name_from_path(unittest.TestCase):
def _call_fut(self, path):
from google.cloud.logging.entries import logger_name_from_path
return logger_name_from_path(path)
def test_w_simple_name(self):
LOGGER_NAME = "LOGGER_NAME"
PROJECT = "my-project-1234"
PATH = "projects/%s/logs/%s" % (PROJECT, LOGGER_NAME)
logger_name = self._call_fut(PATH)
self.assertEqual(logger_name, LOGGER_NAME)
def test_w_name_w_all_extras(self):
LOGGER_NAME = "LOGGER_NAME-part.one~part.two%part-three"
PROJECT = "my-project-1234"
PATH = "projects/%s/logs/%s" % (PROJECT, LOGGER_NAME)
logger_name = self._call_fut(PATH)
self.assertEqual(logger_name, LOGGER_NAME)
class Test__int_or_none(unittest.TestCase):
def _call_fut(self, value):
from google.cloud.logging.entries import _int_or_none
return _int_or_none(value)
def test_w_none(self):
self.assertIsNone(self._call_fut(None))
def test_w_int(self):
self.assertEqual(self._call_fut(123), 123)
def test_w_str(self):
self.assertEqual(self._call_fut("123"), 123)
class TestLogEntry(unittest.TestCase):
PROJECT = "PROJECT"
LOGGER_NAME = "LOGGER_NAME"
@staticmethod
def _get_target_class():
from google.cloud.logging.entries import LogEntry
return LogEntry
def _make_one(self, *args, **kw):
return self._get_target_class()(*args, **kw)
def test_ctor_defaults(self):
from google.cloud.logging.entries import _GLOBAL_RESOURCE
entry = self._make_one()
self.assertIsNone(entry.log_name)
self.assertIsNone(entry.logger)
self.assertIsNone(entry.labels)
self.assertIsNone(entry.insert_id)
self.assertIsNone(entry.severity)
self.assertIsNone(entry.http_request)
self.assertIsNone(entry.timestamp)
self.assertIs(entry.resource, _GLOBAL_RESOURCE)
self.assertIsNone(entry.trace)
self.assertIsNone(entry.span_id)
self.assertIsNone(entry.trace_sampled)
self.assertIsNone(entry.source_location)
self.assertIsNone(entry.operation)
self.assertIsNone(entry.payload)
def test_ctor_explicit(self):
import datetime
from google.cloud.logging.resource import Resource
LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME)
IID = "IID"
TIMESTAMP = datetime.datetime.now()
LABELS = {"foo": "bar", "baz": "qux"}
SEVERITY = "CRITICAL"
METHOD = "POST"
URI = "https://api.example.com/endpoint"
STATUS = "500"
REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS}
resource = Resource(type="global", labels={})
TRACE = "12345678-1234-5678-1234-567812345678"
SPANID = "000000000000004a"
FILE = "my_file.py"
LINE_NO = 123
FUNCTION = "my_function"
SOURCE_LOCATION = {"file": FILE, "line": LINE_NO, "function": FUNCTION}
OP_ID = "OP_ID"
PRODUCER = "PRODUCER"
OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False}
logger = _Logger(self.LOGGER_NAME, self.PROJECT)
entry = self._make_one(
log_name=LOG_NAME,
logger=logger,
insert_id=IID,
timestamp=TIMESTAMP,
labels=LABELS,
severity=SEVERITY,
http_request=REQUEST,
resource=resource,
trace=TRACE,
span_id=SPANID,
trace_sampled=True,
source_location=SOURCE_LOCATION,
operation=OPERATION,
)
self.assertEqual(entry.log_name, LOG_NAME)
self.assertIs(entry.logger, logger)
self.assertEqual(entry.insert_id, IID)
self.assertEqual(entry.timestamp, TIMESTAMP)
self.assertEqual(entry.labels, LABELS)
self.assertEqual(entry.severity, SEVERITY)
self.assertEqual(entry.http_request["requestMethod"], METHOD)
self.assertEqual(entry.http_request["requestUrl"], URI)
self.assertEqual(entry.http_request["status"], STATUS)
self.assertEqual(entry.resource, resource)
self.assertEqual(entry.trace, TRACE)
self.assertEqual(entry.span_id, SPANID)
self.assertTrue(entry.trace_sampled)
source_location = entry.source_location
self.assertEqual(source_location["file"], FILE)
self.assertEqual(source_location["line"], LINE_NO)
self.assertEqual(source_location["function"], FUNCTION)
self.assertEqual(entry.operation, OPERATION)
self.assertIsNone(entry.payload)
def test_from_api_repr_missing_data_no_loggers(self):
client = _Client(self.PROJECT)
LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME)
API_REPR = {"logName": LOG_NAME}
klass = self._get_target_class()
entry = klass.from_api_repr(API_REPR, client)
self.assertEqual(entry.log_name, LOG_NAME)
logger = entry.logger
self.assertIsInstance(logger, _Logger)
self.assertEqual(logger.name, self.LOGGER_NAME)
self.assertIsNone(entry.insert_id)
self.assertIsNone(entry.timestamp)
self.assertIsNone(entry.severity)
self.assertIsNone(entry.http_request)
self.assertIsNone(entry.trace)
self.assertIsNone(entry.span_id)
self.assertIsNone(entry.trace_sampled)
self.assertIsNone(entry.source_location)
self.assertIsNone(entry.operation)
self.assertIs(logger.client, client)
self.assertIsNone(entry.payload)
def test_from_api_repr_w_loggers_no_logger_match(self):
from datetime import datetime
from google.cloud._helpers import UTC
from google.cloud.logging.resource import Resource
klass = self._get_target_class()
client = _Client(self.PROJECT)
SEVERITY = "CRITICAL"
IID = "IID"
NOW = datetime.utcnow().replace(tzinfo=UTC)
TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW)
LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME)
LABELS = {"foo": "bar", "baz": "qux"}
METHOD = "POST"
URI = "https://api.example.com/endpoint"
RESOURCE = Resource(
type="gae_app",
labels={
"type": "gae_app",
"labels": {"module_id": "default", "version": "test"},
},
)
STATUS = "500"
TRACE = "12345678-1234-5678-1234-567812345678"
SPANID = "000000000000004a"
FILE = "my_file.py"
LINE_NO = 123
FUNCTION = "my_function"
SOURCE_LOCATION = {"file": FILE, "line": str(LINE_NO), "function": FUNCTION}
OP_ID = "OP_ID"
PRODUCER = "PRODUCER"
OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False}
API_REPR = {
"logName": LOG_NAME,
"insertId": IID,
"timestamp": TIMESTAMP,
"labels": LABELS,
"severity": SEVERITY,
"httpRequest": {
"requestMethod": METHOD,
"requestUrl": URI,
"status": STATUS,
},
"resource": RESOURCE._to_dict(),
"trace": TRACE,
"spanId": SPANID,
"traceSampled": True,
"sourceLocation": SOURCE_LOCATION,
"operation": OPERATION,
}
loggers = {}
entry = klass.from_api_repr(API_REPR, client, loggers=loggers)
self.assertEqual(entry.log_name, LOG_NAME)
logger = entry.logger
self.assertIsInstance(logger, _Logger)
self.assertEqual(logger.name, self.LOGGER_NAME)
self.assertEqual(entry.insert_id, IID)
self.assertEqual(entry.timestamp, NOW)
self.assertIsNone(entry.received_timestamp)
self.assertEqual(entry.labels, LABELS)
self.assertEqual(entry.severity, SEVERITY)
self.assertEqual(entry.http_request["requestMethod"], METHOD)
self.assertEqual(entry.http_request["requestUrl"], URI)
self.assertEqual(entry.http_request["status"], STATUS)
self.assertIs(logger.client, client)
self.assertEqual(logger.name, self.LOGGER_NAME)
self.assertEqual(loggers, {LOG_NAME: logger})
self.assertEqual(entry.resource, RESOURCE)
self.assertEqual(entry.trace, TRACE)
self.assertEqual(entry.span_id, SPANID)
self.assertTrue(entry.trace_sampled)
source_location = entry.source_location
self.assertEqual(source_location["file"], FILE)
self.assertEqual(source_location["line"], LINE_NO)
self.assertEqual(source_location["function"], FUNCTION)
self.assertEqual(entry.operation, OPERATION)
self.assertIsNone(entry.payload)
def test_from_api_repr_w_loggers_w_logger_match(self):
from datetime import datetime
from datetime import timedelta
from google.cloud._helpers import UTC
client = _Client(self.PROJECT)
IID = "IID"
NOW = datetime.utcnow().replace(tzinfo=UTC)
LATER = NOW + timedelta(seconds=1)
TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW)
RECEIVED = _datetime_to_rfc3339_w_nanos(LATER)
LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME)
LABELS = {"foo": "bar", "baz": "qux"}
TRACE = "12345678-1234-5678-1234-567812345678"
SPANID = "000000000000004a"
FILE = "my_file.py"
LINE_NO = 123
FUNCTION = "my_function"
SOURCE_LOCATION = {"file": FILE, "line": str(LINE_NO), "function": FUNCTION}
OP_ID = "OP_ID"
PRODUCER = "PRODUCER"
OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False}
API_REPR = {
"logName": LOG_NAME,
"insertId": IID,
"timestamp": TIMESTAMP,
"receiveTimestamp": RECEIVED,
"labels": LABELS,
"trace": TRACE,
"spanId": SPANID,
"traceSampled": True,
"sourceLocation": SOURCE_LOCATION,
"operation": OPERATION,
}
LOGGER = object()
loggers = {LOG_NAME: LOGGER}
klass = self._get_target_class()
entry = klass.from_api_repr(API_REPR, client, loggers=loggers)
self.assertEqual(entry.log_name, LOG_NAME)
self.assertIs(entry.logger, LOGGER)
self.assertEqual(entry.insert_id, IID)
self.assertEqual(entry.timestamp, NOW)
self.assertEqual(entry.received_timestamp, LATER)
self.assertEqual(entry.labels, LABELS)
self.assertEqual(entry.trace, TRACE)
self.assertEqual(entry.span_id, SPANID)
self.assertTrue(entry.trace_sampled)
source_location = entry.source_location
self.assertEqual(source_location["file"], FILE)
self.assertEqual(source_location["line"], LINE_NO)
self.assertEqual(source_location["function"], FUNCTION)
self.assertEqual(entry.operation, OPERATION)
self.assertIsNone(entry.payload)
def test_to_api_repr_w_source_location_no_line(self):
from google.cloud.logging.logger import _GLOBAL_RESOURCE
LOG_NAME = "test.log"
FILE = "my_file.py"
FUNCTION = "my_function"
SOURCE_LOCATION = {"file": FILE, "function": FUNCTION}
entry = self._make_one(log_name=LOG_NAME, source_location=SOURCE_LOCATION)
expected = {
"logName": LOG_NAME,
"resource": _GLOBAL_RESOURCE._to_dict(),
"sourceLocation": {"file": FILE, "line": "0", "function": FUNCTION},
}
self.assertEqual(entry.to_api_repr(), expected)
def test_to_api_repr_explicit(self):
import datetime
from google.cloud.logging.resource import Resource
from google.cloud._helpers import _datetime_to_rfc3339
LOG_NAME = "test.log"
LABELS = {"foo": "bar", "baz": "qux"}
IID = "IID"
SEVERITY = "CRITICAL"
METHOD = "POST"
URI = "https://api.example.com/endpoint"
STATUS = "500"
REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS}
TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999)
RESOURCE = Resource(
type="gae_app", labels={"module_id": "default", "version_id": "test"}
)
TRACE = "12345678-1234-5678-1234-567812345678"
SPANID = "000000000000004a"
FILE = "my_file.py"
LINE = 123
FUNCTION = "my_function"
SOURCE_LOCATION = {"file": FILE, "line": LINE, "function": FUNCTION}
OP_ID = "OP_ID"
PRODUCER = "PRODUCER"
OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False}
expected = {
"logName": LOG_NAME,
"labels": LABELS,
"insertId": IID,
"severity": SEVERITY,
"httpRequest": REQUEST,
"timestamp": _datetime_to_rfc3339(TIMESTAMP),
"resource": RESOURCE._to_dict(),
"trace": TRACE,
"spanId": SPANID,
"traceSampled": True,
"sourceLocation": {"file": FILE, "line": str(LINE), "function": FUNCTION},
"operation": OPERATION,
}
entry = self._make_one(
log_name=LOG_NAME,
labels=LABELS,
insert_id=IID,
severity=SEVERITY,
http_request=REQUEST,
timestamp=TIMESTAMP,
resource=RESOURCE,
trace=TRACE,
span_id=SPANID,
trace_sampled=True,
source_location=SOURCE_LOCATION,
operation=OPERATION,
)
self.assertEqual(entry.to_api_repr(), expected)
class TestTextEntry(unittest.TestCase):
PROJECT = "PROJECT"
LOGGER_NAME = "LOGGER_NAME"
@staticmethod
def _get_target_class():
from google.cloud.logging.entries import TextEntry
return TextEntry
def _make_one(self, *args, **kw):
return self._get_target_class()(*args, **kw)
def test_to_api_repr_defaults(self):
from google.cloud.logging.logger import _GLOBAL_RESOURCE
LOG_NAME = "test.log"
TEXT = "TESTING"
entry = self._make_one(log_name=LOG_NAME, payload=TEXT)
expected = {
"logName": LOG_NAME,
"textPayload": TEXT,
"resource": _GLOBAL_RESOURCE._to_dict(),
}
self.assertEqual(entry.to_api_repr(), expected)
def test_to_api_repr_explicit(self):
import datetime
from google.cloud.logging.resource import Resource
from google.cloud._helpers import _datetime_to_rfc3339
LOG_NAME = "test.log"
TEXT = "This is the entry text"
LABELS = {"foo": "bar", "baz": "qux"}
IID = "IID"
SEVERITY = "CRITICAL"
METHOD = "POST"
URI = "https://api.example.com/endpoint"
STATUS = "500"
REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS}
TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999)
RESOURCE = Resource(
type="gae_app", labels={"module_id": "default", "version_id": "test"}
)
TRACE = "12345678-1234-5678-1234-567812345678"
SPANID = "000000000000004a"
FILE = "my_file.py"
LINE = 123
FUNCTION = "my_function"
SOURCE_LOCATION = {"file": FILE, "line": LINE, "function": FUNCTION}
OP_ID = "OP_ID"
PRODUCER = "PRODUCER"
OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False}
expected = {
"logName": LOG_NAME,
"textPayload": TEXT,
"labels": LABELS,
"insertId": IID,
"severity": SEVERITY,
"httpRequest": REQUEST,
"timestamp": _datetime_to_rfc3339(TIMESTAMP),
"resource": RESOURCE._to_dict(),
"trace": TRACE,
"spanId": SPANID,
"traceSampled": True,
"sourceLocation": {"file": FILE, "line": str(LINE), "function": FUNCTION},
"operation": OPERATION,
}
entry = self._make_one(
log_name=LOG_NAME,
payload=TEXT,
labels=LABELS,
insert_id=IID,
severity=SEVERITY,
http_request=REQUEST,
timestamp=TIMESTAMP,
resource=RESOURCE,
trace=TRACE,
span_id=SPANID,
trace_sampled=True,
source_location=SOURCE_LOCATION,
operation=OPERATION,
)
self.assertEqual(entry.to_api_repr(), expected)
class TestStructEntry(unittest.TestCase):
PROJECT = "PROJECT"
LOGGER_NAME = "LOGGER_NAME"
@staticmethod
def _get_target_class():
from google.cloud.logging.entries import StructEntry
return StructEntry
def _make_one(self, *args, **kw):
return self._get_target_class()(*args, **kw)
def test_to_api_repr_defaults(self):
from google.cloud.logging.logger import _GLOBAL_RESOURCE
LOG_NAME = "test.log"
JSON_PAYLOAD = {"key": "value"}
entry = self._make_one(log_name=LOG_NAME, payload=JSON_PAYLOAD)
expected = {
"logName": LOG_NAME,
"jsonPayload": JSON_PAYLOAD,
"resource": _GLOBAL_RESOURCE._to_dict(),
}
self.assertEqual(entry.to_api_repr(), expected)
def test_to_api_repr_explicit(self):
import datetime
from google.cloud.logging.resource import Resource
from google.cloud._helpers import _datetime_to_rfc3339
LOG_NAME = "test.log"
JSON_PAYLOAD = {"key": "value"}
LABELS = {"foo": "bar", "baz": "qux"}
IID = "IID"
SEVERITY = "CRITICAL"
METHOD = "POST"
URI = "https://api.example.com/endpoint"
STATUS = "500"
REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS}
TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999)
RESOURCE = Resource(
type="gae_app", labels={"module_id": "default", "version_id": "test"}
)
TRACE = "12345678-1234-5678-1234-567812345678"
SPANID = "000000000000004a"
FILE = "my_file.py"
LINE = 123
FUNCTION = "my_function"
SOURCE_LOCATION = {"file": FILE, "line": LINE, "function": FUNCTION}
OP_ID = "OP_ID"
PRODUCER = "PRODUCER"
OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False}
expected = {
"logName": LOG_NAME,
"jsonPayload": JSON_PAYLOAD,
"labels": LABELS,
"insertId": IID,
"severity": SEVERITY,
"httpRequest": REQUEST,
"timestamp": _datetime_to_rfc3339(TIMESTAMP),
"resource": RESOURCE._to_dict(),
"trace": TRACE,
"spanId": SPANID,
"traceSampled": True,
"sourceLocation": {"file": FILE, "line": str(LINE), "function": FUNCTION},
"operation": OPERATION,
}
entry = self._make_one(
log_name=LOG_NAME,
payload=JSON_PAYLOAD,
labels=LABELS,
insert_id=IID,
severity=SEVERITY,
http_request=REQUEST,
timestamp=TIMESTAMP,
resource=RESOURCE,
trace=TRACE,
span_id=SPANID,
trace_sampled=True,
source_location=SOURCE_LOCATION,
operation=OPERATION,
)
self.assertEqual(entry.to_api_repr(), expected)
class TestProtobufEntry(unittest.TestCase):
PROJECT = "PROJECT"
LOGGER_NAME = "LOGGER_NAME"
@staticmethod
def _get_target_class():
from google.cloud.logging.entries import ProtobufEntry
return ProtobufEntry
def _make_one(self, *args, **kw):
return self._get_target_class()(*args, **kw)
def test_constructor_basic(self):
payload = {"foo": "bar"}
pb_entry = self._make_one(payload=payload, logger=mock.sentinel.logger)
self.assertIs(pb_entry.payload, payload)
self.assertIsNone(pb_entry.payload_pb)
self.assertIs(pb_entry.payload_json, payload)
self.assertIs(pb_entry.logger, mock.sentinel.logger)
self.assertIsNone(pb_entry.insert_id)
self.assertIsNone(pb_entry.timestamp)
self.assertIsNone(pb_entry.labels)
self.assertIsNone(pb_entry.severity)
self.assertIsNone(pb_entry.http_request)
self.assertIsNone(pb_entry.trace)
self.assertIsNone(pb_entry.span_id)
self.assertIsNone(pb_entry.trace_sampled)
self.assertIsNone(pb_entry.source_location)
def test_constructor_with_any(self):
from google.protobuf.any_pb2 import Any
payload = Any()
pb_entry = self._make_one(payload=payload, logger=mock.sentinel.logger)
self.assertIs(pb_entry.payload, payload)
self.assertIs(pb_entry.payload_pb, payload)
self.assertIsNone(pb_entry.payload_json)
self.assertIs(pb_entry.logger, mock.sentinel.logger)
self.assertIsNone(pb_entry.insert_id)
self.assertIsNone(pb_entry.timestamp)
self.assertIsNone(pb_entry.labels)
self.assertIsNone(pb_entry.severity)
self.assertIsNone(pb_entry.http_request)
self.assertIsNone(pb_entry.trace)
self.assertIsNone(pb_entry.span_id)
self.assertIsNone(pb_entry.trace_sampled)
self.assertIsNone(pb_entry.source_location)
def test_parse_message(self):
import json
from google.protobuf.json_format import MessageToJson
from google.protobuf.struct_pb2 import Struct, Value
message = Struct(fields={"foo": Value(bool_value=False)})
with_true = Struct(fields={"foo": Value(bool_value=True)})
payload = json.loads(MessageToJson(with_true))
entry = self._make_one(payload=payload, logger=mock.sentinel.logger)
entry.parse_message(message)
self.assertTrue(message.fields["foo"])
def test_to_api_repr_proto_defaults(self):
from google.protobuf.json_format import MessageToDict
from google.cloud.logging.logger import _GLOBAL_RESOURCE
from google.protobuf.struct_pb2 import Struct
from google.protobuf.struct_pb2 import Value
LOG_NAME = "test.log"
message = Struct(fields={"foo": Value(bool_value=True)})
entry = self._make_one(log_name=LOG_NAME, payload=message)
expected = {
"logName": LOG_NAME,
"protoPayload": MessageToDict(message),
"resource": _GLOBAL_RESOURCE._to_dict(),
}
self.assertEqual(entry.to_api_repr(), expected)
def test_to_api_repr_proto_explicit(self):
import datetime
from google.protobuf.json_format import MessageToDict
from google.cloud.logging.resource import Resource
from google.cloud._helpers import _datetime_to_rfc3339
from google.protobuf.struct_pb2 import Struct
from google.protobuf.struct_pb2 import Value
LOG_NAME = "test.log"
message = Struct(fields={"foo": Value(bool_value=True)})
LABELS = {"foo": "bar", "baz": "qux"}
IID = "IID"
SEVERITY = "CRITICAL"
METHOD = "POST"
URI = "https://api.example.com/endpoint"
STATUS = "500"
REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS}
TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999)
RESOURCE = Resource(
type="gae_app", labels={"module_id": "default", "version_id": "test"}
)
TRACE = "12345678-1234-5678-1234-567812345678"
SPANID = "000000000000004a"
FILE = "my_file.py"
LINE = 123
FUNCTION = "my_function"
SOURCE_LOCATION = {"file": FILE, "line": LINE, "function": FUNCTION}
OP_ID = "OP_ID"
PRODUCER = "PRODUCER"
OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False}
expected = {
"logName": LOG_NAME,
"protoPayload": MessageToDict(message),
"labels": LABELS,
"insertId": IID,
"severity": SEVERITY,
"httpRequest": REQUEST,
"timestamp": _datetime_to_rfc3339(TIMESTAMP),
"resource": RESOURCE._to_dict(),
"trace": TRACE,
"spanId": SPANID,
"traceSampled": True,
"sourceLocation": {"file": FILE, "line": str(LINE), "function": FUNCTION},
"operation": OPERATION,
}
entry = self._make_one(
log_name=LOG_NAME,
payload=message,
labels=LABELS,
insert_id=IID,
severity=SEVERITY,
http_request=REQUEST,
timestamp=TIMESTAMP,
resource=RESOURCE,
trace=TRACE,
span_id=SPANID,
trace_sampled=True,
source_location=SOURCE_LOCATION,
operation=OPERATION,
)
self.assertEqual(entry.to_api_repr(), expected)
def _datetime_to_rfc3339_w_nanos(value):
from google.cloud._helpers import _RFC3339_NO_FRACTION
no_fraction = value.strftime(_RFC3339_NO_FRACTION)
return "%s.%09dZ" % (no_fraction, value.microsecond * 1000)
class _Logger(object):
def __init__(self, name, client):
self.name = name
self.client = client
class _Client(object):
def __init__(self, project):
self.project = project
def logger(self, name):
return _Logger(name, self)
|
apache-2.0
|
takeshineshiro/heat
|
heat/engine/parameters.py
|
2
|
20719
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import itertools
from oslo_serialization import jsonutils
from oslo_utils import encodeutils
from oslo_utils import strutils
import six
from heat.common import exception
from heat.common.i18n import _
from heat.engine import constraints as constr
PARAMETER_KEYS = (
TYPE, DEFAULT, NO_ECHO, ALLOWED_VALUES, ALLOWED_PATTERN,
MAX_LENGTH, MIN_LENGTH, MAX_VALUE, MIN_VALUE,
DESCRIPTION, CONSTRAINT_DESCRIPTION, LABEL
) = (
'Type', 'Default', 'NoEcho', 'AllowedValues', 'AllowedPattern',
'MaxLength', 'MinLength', 'MaxValue', 'MinValue',
'Description', 'ConstraintDescription', 'Label'
)
class Schema(constr.Schema):
'''Parameter schema.'''
KEYS = (
TYPE, DESCRIPTION, DEFAULT, SCHEMA, CONSTRAINTS, HIDDEN, LABEL
) = (
'Type', 'Description', 'Default', 'Schema', 'Constraints', 'NoEcho',
'Label'
)
PARAMETER_KEYS = PARAMETER_KEYS
# For Parameters the type name for Schema.LIST is CommaDelimitedList
# and the type name for Schema.MAP is Json
TYPES = (
STRING, NUMBER, LIST, MAP, BOOLEAN,
) = (
'String', 'Number', 'CommaDelimitedList', 'Json', 'Boolean',
)
def __init__(self, data_type, description=None, default=None, schema=None,
constraints=None, hidden=False, label=None):
super(Schema, self).__init__(data_type=data_type,
description=description,
default=default,
schema=schema,
required=default is None,
constraints=constraints,
label=label)
self.hidden = hidden
# Schema class validates default value for lists assuming list type. For
# comma delimited list string supported in parameters Schema class, the
# default value has to be parsed into a list if necessary so that
# validation works.
def _validate_default(self, context):
if self.default is not None:
default_value = self.default
if self.type == self.LIST and not isinstance(self.default, list):
try:
default_value = self.default.split(',')
except (KeyError, AttributeError) as err:
raise exception.InvalidSchemaError(
message=_('Default must be a comma-delimited list '
'string: %s') % err)
elif self.type == self.LIST and isinstance(self.default, list):
default_value = [(six.text_type(x))
for x in self.default]
try:
self.validate_constraints(default_value, context,
[constr.CustomConstraint])
except (ValueError, TypeError,
exception.StackValidationFailed) as exc:
raise exception.InvalidSchemaError(
message=_('Invalid default %(default)s (%(exc)s)') %
dict(default=self.default, exc=exc))
def set_default(self, default=None):
super(Schema, self).set_default(default)
self.required = default is None
@staticmethod
def get_num(key, context):
val = context.get(key)
if val is not None:
val = Schema.str_to_num(val)
return val
@staticmethod
def _check_dict(schema_dict, allowed_keys, entity):
if not isinstance(schema_dict, dict):
raise exception.InvalidSchemaError(
message=_("Invalid %s, expected a mapping") % entity)
for key in schema_dict:
if key not in allowed_keys:
raise exception.InvalidSchemaError(
message=_("Invalid key '%(key)s' for %(entity)s") % {
"key": key, "entity": entity})
@classmethod
def _validate_dict(cls, param_name, schema_dict):
cls._check_dict(schema_dict,
cls.PARAMETER_KEYS,
"parameter (%s)" % param_name)
if cls.TYPE not in schema_dict:
raise exception.InvalidSchemaError(
message=_("Missing parameter type for parameter: %s") %
param_name)
@classmethod
def from_dict(cls, param_name, schema_dict):
"""
Return a Parameter Schema object from a legacy schema dictionary.
:param param_name: name of the parameter owning the schema; used
for more verbose logging
:type param_name: str
"""
cls._validate_dict(param_name, schema_dict)
def constraints():
desc = schema_dict.get(CONSTRAINT_DESCRIPTION)
if MIN_VALUE in schema_dict or MAX_VALUE in schema_dict:
yield constr.Range(Schema.get_num(MIN_VALUE, schema_dict),
Schema.get_num(MAX_VALUE, schema_dict),
desc)
if MIN_LENGTH in schema_dict or MAX_LENGTH in schema_dict:
yield constr.Length(Schema.get_num(MIN_LENGTH, schema_dict),
Schema.get_num(MAX_LENGTH, schema_dict),
desc)
if ALLOWED_VALUES in schema_dict:
yield constr.AllowedValues(schema_dict[ALLOWED_VALUES], desc)
if ALLOWED_PATTERN in schema_dict:
yield constr.AllowedPattern(schema_dict[ALLOWED_PATTERN], desc)
# make update_allowed true by default on TemplateResources
# as the template should deal with this.
return cls(schema_dict[TYPE],
description=schema_dict.get(DESCRIPTION),
default=schema_dict.get(DEFAULT),
constraints=list(constraints()),
hidden=str(schema_dict.get(NO_ECHO,
'false')).lower() == 'true',
label=schema_dict.get(LABEL))
def validate_value(self, value, context=None):
super(Schema, self).validate_constraints(value, context)
def __getitem__(self, key):
if key == self.TYPE:
return self.type
if key == self.HIDDEN:
return self.hidden
else:
return super(Schema, self).__getitem__(key)
@six.python_2_unicode_compatible
class Parameter(object):
'''A template parameter.'''
def __new__(cls, name, schema, value=None):
'''Create a new Parameter of the appropriate type.'''
if cls is not Parameter:
return super(Parameter, cls).__new__(cls)
# Check for fully-fledged Schema objects
if not isinstance(schema, Schema):
schema = Schema.from_dict(name, schema)
if schema.type == schema.STRING:
ParamClass = StringParam
elif schema.type == schema.NUMBER:
ParamClass = NumberParam
elif schema.type == schema.LIST:
ParamClass = CommaDelimitedListParam
elif schema.type == schema.MAP:
ParamClass = JsonParam
elif schema.type == schema.BOOLEAN:
ParamClass = BooleanParam
else:
raise ValueError(_('Invalid Parameter type "%s"') % schema.type)
return ParamClass(name, schema, value)
def __init__(self, name, schema, value=None):
'''
Initialise the Parameter with a name, schema and optional user-supplied
value.
'''
self.name = name
self.schema = schema
self.user_value = value
self.user_default = None
def validate(self, validate_value=True, context=None):
"""Validates the parameter.
This method validates if the parameter's schema is valid,
and if the default value - if present - or the user-provided
value for the parameter comply with the schema.
"""
err_msg = _("Parameter '%(name)s' is invalid: %(exp)s")
try:
self.schema.validate(context)
if not validate_value:
return
if self.user_value is not None:
self._validate(self.user_value, context)
elif self.has_default():
self._validate(self.default(), context)
else:
raise exception.UserParameterMissing(key=self.name)
except exception.StackValidationFailed as ex:
msg = err_msg % dict(name=self.name, exp=six.text_type(ex))
raise exception.StackValidationFailed(message=msg)
except exception.InvalidSchemaError as ex:
msg = err_msg % dict(name=self.name, exp=six.text_type(ex))
raise exception.InvalidSchemaError(message=msg)
def value(self):
'''Get the parameter value, optionally sanitising it for output.'''
if self.user_value is not None:
return self.user_value
if self.has_default():
return self.default()
raise exception.UserParameterMissing(key=self.name)
def has_value(self):
'''Parameter has a user or default value.'''
return self.user_value is not None or self.has_default()
def hidden(self):
'''
Return whether the parameter should be sanitised in any output to
the user.
'''
return self.schema.hidden
def description(self):
'''Return the description of the parameter.'''
return self.schema.description or ''
def label(self):
'''Return the label or param name.'''
return self.schema.label or self.name
def has_default(self):
'''Return whether the parameter has a default value.'''
return (self.schema.default is not None or
self.user_default is not None)
def default(self):
'''Return the default value of the parameter.'''
if self.user_default is not None:
return self.user_default
return self.schema.default
def set_default(self, value):
self.user_default = value
def __str__(self):
'''Return a string representation of the parameter.'''
value = self.value()
if self.hidden():
return six.text_type('******')
else:
return six.text_type(value)
class NumberParam(Parameter):
'''A template parameter of type "Number".'''
def __int__(self):
'''Return an integer representation of the parameter.'''
return int(super(NumberParam, self).value())
def __float__(self):
'''Return a float representation of the parameter.'''
return float(super(NumberParam, self).value())
def _validate(self, val, context):
try:
Schema.str_to_num(val)
except ValueError as ex:
raise exception.StackValidationFailed(message=six.text_type(ex))
self.schema.validate_value(val, context)
def value(self):
return Schema.str_to_num(super(NumberParam, self).value())
class BooleanParam(Parameter):
'''A template parameter of type "Boolean".'''
def _validate(self, val, context):
try:
strutils.bool_from_string(val, strict=True)
except ValueError as ex:
raise exception.StackValidationFailed(message=six.text_type(ex))
self.schema.validate_value(val, context)
def value(self):
if self.user_value is not None:
raw_value = self.user_value
else:
raw_value = self.default()
return strutils.bool_from_string(str(raw_value), strict=True)
class StringParam(Parameter):
'''A template parameter of type "String".'''
def _validate(self, val, context):
self.schema.validate_value(val, context)
class CommaDelimitedListParam(Parameter, collections.Sequence):
'''A template parameter of type "CommaDelimitedList".'''
def __init__(self, name, schema, value=None):
super(CommaDelimitedListParam, self).__init__(name, schema, value)
if self.has_value():
if self.user_value is not None:
self.parsed = self.parse(self.user_value)
else:
self.parsed = self.parse(self.default())
else:
self.parsed = []
def parse(self, value):
# only parse when value is not already a list
if isinstance(value, list):
return [(six.text_type(x)) for x in value]
try:
if value is not None:
if value == '':
return []
return value.split(',')
except (KeyError, AttributeError) as err:
message = _('Value must be a comma-delimited list string: %s')
raise ValueError(message % six.text_type(err))
return value
def value(self):
if self.has_value():
return self.parsed
raise exception.UserParameterMissing(key=self.name)
def __len__(self):
'''Return the length of the list.'''
return len(self.parsed)
def __getitem__(self, index):
'''Return an item from the list.'''
return self.parsed[index]
def __str__(self):
if self.hidden():
return super(CommaDelimitedListParam, self).__str__()
return ",".join(self.value())
def _validate(self, val, context):
parsed = self.parse(val)
self.schema.validate_value(parsed, context)
class JsonParam(Parameter):
"""A template parameter who's value is map or list."""
def __init__(self, name, schema, value=None):
super(JsonParam, self).__init__(name, schema, value)
if self.has_value():
if self.user_value is not None:
self.parsed = self.parse(self.user_value)
else:
self.parsed = self.parse(self.default())
else:
self.parsed = {}
def parse(self, value):
try:
val = value
if not isinstance(val, six.string_types):
# turn off oslo_serialization's clever to_primitive()
val = jsonutils.dumps(val, default=None)
if val:
return jsonutils.loads(val)
except (ValueError, TypeError) as err:
message = _('Value must be valid JSON: %s') % err
raise ValueError(message)
return value
def value(self):
if self.has_value():
return self.parsed
raise exception.UserParameterMissing(key=self.name)
def __getitem__(self, key):
return self.parsed[key]
def __iter__(self):
return iter(self.parsed)
def __len__(self):
return len(self.parsed)
def __str__(self):
if self.hidden():
return super(JsonParam, self).__str__()
return encodeutils.safe_decode(jsonutils.dumps(self.value()))
def _validate(self, val, context):
val = self.parse(val)
self.schema.validate_value(val, context)
class Parameters(collections.Mapping):
'''
The parameters of a stack, with type checking, defaults &c. specified by
the stack's template.
'''
PSEUDO_PARAMETERS = (
PARAM_STACK_ID, PARAM_STACK_NAME, PARAM_REGION
) = (
'AWS::StackId', 'AWS::StackName', 'AWS::Region'
)
def __init__(self, stack_identifier, tmpl, user_params=None,
param_defaults=None):
'''
Create the parameter container for a stack from the stack name and
template, optionally setting the user-supplied parameter values.
'''
user_params = user_params or {}
param_defaults = param_defaults or {}
def user_parameter(schema_item):
name, schema = schema_item
return Parameter(name, schema,
user_params.get(name))
self.tmpl = tmpl
self.user_params = user_params
schemata = self.tmpl.param_schemata()
user_parameters = (user_parameter(si) for si in
six.iteritems(schemata))
pseudo_parameters = self._pseudo_parameters(stack_identifier)
self.params = dict((p.name,
p) for p in itertools.chain(pseudo_parameters,
user_parameters))
for pd in six.iterkeys(param_defaults):
if pd in self.params:
self.params[pd].set_default(param_defaults[pd])
def validate(self, validate_value=True, context=None):
'''
Validates all parameters.
This method validates if all user-provided parameters are actually
defined in the template, and if all parameters are valid.
'''
self._validate_tmpl_parameters()
self._validate_user_parameters()
for param in six.itervalues(self.params):
param.validate(validate_value, context)
def __contains__(self, key):
'''Return whether the specified parameter exists.'''
return key in self.params
def __iter__(self):
'''Return an iterator over the parameter names.'''
return iter(self.params)
def __len__(self):
'''Return the number of parameters defined.'''
return len(self.params)
def __getitem__(self, key):
'''Get a parameter value.'''
return self.params[key].value()
def map(self, func, filter_func=lambda p: True):
'''
Map the supplied filter function onto each Parameter (with an
optional filter function) and return the resulting dictionary.
'''
return dict((n, func(p))
for n, p in six.iteritems(self.params) if filter_func(p))
def set_stack_id(self, stack_identifier):
'''
Set the StackId pseudo parameter value
'''
if stack_identifier is not None:
self.params[self.PARAM_STACK_ID].schema.set_default(
stack_identifier.arn())
return True
return False
def _validate_user_parameters(self):
schemata = self.tmpl.param_schemata()
for param in self.user_params:
if param not in schemata:
raise exception.UnknownUserParameter(key=param)
def _validate_tmpl_parameters(self):
param = None
for key in six.iterkeys(self.tmpl.t):
if key == 'Parameters' or key == 'parameters':
param = key
break
if param is not None:
template_params = self.tmpl.t[key] or {}
for name, attrs in six.iteritems(template_params):
if not isinstance(attrs, dict):
raise exception.InvalidTemplateParameter(key=name)
def _pseudo_parameters(self, stack_identifier):
stack_id = (stack_identifier.arn()
if stack_identifier is not None else 'None')
stack_name = stack_identifier and stack_identifier.stack_name
yield Parameter(self.PARAM_STACK_ID,
Schema(Schema.STRING, _('Stack ID'),
default=str(stack_id)))
if stack_name:
yield Parameter(self.PARAM_STACK_NAME,
Schema(Schema.STRING, _('Stack Name'),
default=stack_name))
yield Parameter(self.PARAM_REGION,
Schema(Schema.STRING,
default='ap-southeast-1',
constraints=[
constr.AllowedValues(['us-east-1',
'us-west-1',
'us-west-2',
'sa-east-1',
'eu-west-1',
'ap-southeast-1',
'ap-northeast-1']
)]))
|
apache-2.0
|
MattsFleaMarket/python-for-android
|
python3-alpha/python3-src/Lib/encodings/cp1253.py
|
272
|
13094
|
""" Python Character Mapping Codec cp1253 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1253.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp1253',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x04' # 0x04 -> END OF TRANSMISSION
'\x05' # 0x05 -> ENQUIRY
'\x06' # 0x06 -> ACKNOWLEDGE
'\x07' # 0x07 -> BELL
'\x08' # 0x08 -> BACKSPACE
'\t' # 0x09 -> HORIZONTAL TABULATION
'\n' # 0x0A -> LINE FEED
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x14' # 0x14 -> DEVICE CONTROL FOUR
'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x16 -> SYNCHRONOUS IDLE
'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x1a' # 0x1A -> SUBSTITUTE
'\x1b' # 0x1B -> ESCAPE
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> DELETE
'\u20ac' # 0x80 -> EURO SIGN
'\ufffe' # 0x81 -> UNDEFINED
'\u201a' # 0x82 -> SINGLE LOW-9 QUOTATION MARK
'\u0192' # 0x83 -> LATIN SMALL LETTER F WITH HOOK
'\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK
'\u2026' # 0x85 -> HORIZONTAL ELLIPSIS
'\u2020' # 0x86 -> DAGGER
'\u2021' # 0x87 -> DOUBLE DAGGER
'\ufffe' # 0x88 -> UNDEFINED
'\u2030' # 0x89 -> PER MILLE SIGN
'\ufffe' # 0x8A -> UNDEFINED
'\u2039' # 0x8B -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
'\ufffe' # 0x8C -> UNDEFINED
'\ufffe' # 0x8D -> UNDEFINED
'\ufffe' # 0x8E -> UNDEFINED
'\ufffe' # 0x8F -> UNDEFINED
'\ufffe' # 0x90 -> UNDEFINED
'\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK
'\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK
'\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK
'\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK
'\u2022' # 0x95 -> BULLET
'\u2013' # 0x96 -> EN DASH
'\u2014' # 0x97 -> EM DASH
'\ufffe' # 0x98 -> UNDEFINED
'\u2122' # 0x99 -> TRADE MARK SIGN
'\ufffe' # 0x9A -> UNDEFINED
'\u203a' # 0x9B -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
'\ufffe' # 0x9C -> UNDEFINED
'\ufffe' # 0x9D -> UNDEFINED
'\ufffe' # 0x9E -> UNDEFINED
'\ufffe' # 0x9F -> UNDEFINED
'\xa0' # 0xA0 -> NO-BREAK SPACE
'\u0385' # 0xA1 -> GREEK DIALYTIKA TONOS
'\u0386' # 0xA2 -> GREEK CAPITAL LETTER ALPHA WITH TONOS
'\xa3' # 0xA3 -> POUND SIGN
'\xa4' # 0xA4 -> CURRENCY SIGN
'\xa5' # 0xA5 -> YEN SIGN
'\xa6' # 0xA6 -> BROKEN BAR
'\xa7' # 0xA7 -> SECTION SIGN
'\xa8' # 0xA8 -> DIAERESIS
'\xa9' # 0xA9 -> COPYRIGHT SIGN
'\ufffe' # 0xAA -> UNDEFINED
'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xac' # 0xAC -> NOT SIGN
'\xad' # 0xAD -> SOFT HYPHEN
'\xae' # 0xAE -> REGISTERED SIGN
'\u2015' # 0xAF -> HORIZONTAL BAR
'\xb0' # 0xB0 -> DEGREE SIGN
'\xb1' # 0xB1 -> PLUS-MINUS SIGN
'\xb2' # 0xB2 -> SUPERSCRIPT TWO
'\xb3' # 0xB3 -> SUPERSCRIPT THREE
'\u0384' # 0xB4 -> GREEK TONOS
'\xb5' # 0xB5 -> MICRO SIGN
'\xb6' # 0xB6 -> PILCROW SIGN
'\xb7' # 0xB7 -> MIDDLE DOT
'\u0388' # 0xB8 -> GREEK CAPITAL LETTER EPSILON WITH TONOS
'\u0389' # 0xB9 -> GREEK CAPITAL LETTER ETA WITH TONOS
'\u038a' # 0xBA -> GREEK CAPITAL LETTER IOTA WITH TONOS
'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\u038c' # 0xBC -> GREEK CAPITAL LETTER OMICRON WITH TONOS
'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
'\u038e' # 0xBE -> GREEK CAPITAL LETTER UPSILON WITH TONOS
'\u038f' # 0xBF -> GREEK CAPITAL LETTER OMEGA WITH TONOS
'\u0390' # 0xC0 -> GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
'\u0391' # 0xC1 -> GREEK CAPITAL LETTER ALPHA
'\u0392' # 0xC2 -> GREEK CAPITAL LETTER BETA
'\u0393' # 0xC3 -> GREEK CAPITAL LETTER GAMMA
'\u0394' # 0xC4 -> GREEK CAPITAL LETTER DELTA
'\u0395' # 0xC5 -> GREEK CAPITAL LETTER EPSILON
'\u0396' # 0xC6 -> GREEK CAPITAL LETTER ZETA
'\u0397' # 0xC7 -> GREEK CAPITAL LETTER ETA
'\u0398' # 0xC8 -> GREEK CAPITAL LETTER THETA
'\u0399' # 0xC9 -> GREEK CAPITAL LETTER IOTA
'\u039a' # 0xCA -> GREEK CAPITAL LETTER KAPPA
'\u039b' # 0xCB -> GREEK CAPITAL LETTER LAMDA
'\u039c' # 0xCC -> GREEK CAPITAL LETTER MU
'\u039d' # 0xCD -> GREEK CAPITAL LETTER NU
'\u039e' # 0xCE -> GREEK CAPITAL LETTER XI
'\u039f' # 0xCF -> GREEK CAPITAL LETTER OMICRON
'\u03a0' # 0xD0 -> GREEK CAPITAL LETTER PI
'\u03a1' # 0xD1 -> GREEK CAPITAL LETTER RHO
'\ufffe' # 0xD2 -> UNDEFINED
'\u03a3' # 0xD3 -> GREEK CAPITAL LETTER SIGMA
'\u03a4' # 0xD4 -> GREEK CAPITAL LETTER TAU
'\u03a5' # 0xD5 -> GREEK CAPITAL LETTER UPSILON
'\u03a6' # 0xD6 -> GREEK CAPITAL LETTER PHI
'\u03a7' # 0xD7 -> GREEK CAPITAL LETTER CHI
'\u03a8' # 0xD8 -> GREEK CAPITAL LETTER PSI
'\u03a9' # 0xD9 -> GREEK CAPITAL LETTER OMEGA
'\u03aa' # 0xDA -> GREEK CAPITAL LETTER IOTA WITH DIALYTIKA
'\u03ab' # 0xDB -> GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA
'\u03ac' # 0xDC -> GREEK SMALL LETTER ALPHA WITH TONOS
'\u03ad' # 0xDD -> GREEK SMALL LETTER EPSILON WITH TONOS
'\u03ae' # 0xDE -> GREEK SMALL LETTER ETA WITH TONOS
'\u03af' # 0xDF -> GREEK SMALL LETTER IOTA WITH TONOS
'\u03b0' # 0xE0 -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
'\u03b1' # 0xE1 -> GREEK SMALL LETTER ALPHA
'\u03b2' # 0xE2 -> GREEK SMALL LETTER BETA
'\u03b3' # 0xE3 -> GREEK SMALL LETTER GAMMA
'\u03b4' # 0xE4 -> GREEK SMALL LETTER DELTA
'\u03b5' # 0xE5 -> GREEK SMALL LETTER EPSILON
'\u03b6' # 0xE6 -> GREEK SMALL LETTER ZETA
'\u03b7' # 0xE7 -> GREEK SMALL LETTER ETA
'\u03b8' # 0xE8 -> GREEK SMALL LETTER THETA
'\u03b9' # 0xE9 -> GREEK SMALL LETTER IOTA
'\u03ba' # 0xEA -> GREEK SMALL LETTER KAPPA
'\u03bb' # 0xEB -> GREEK SMALL LETTER LAMDA
'\u03bc' # 0xEC -> GREEK SMALL LETTER MU
'\u03bd' # 0xED -> GREEK SMALL LETTER NU
'\u03be' # 0xEE -> GREEK SMALL LETTER XI
'\u03bf' # 0xEF -> GREEK SMALL LETTER OMICRON
'\u03c0' # 0xF0 -> GREEK SMALL LETTER PI
'\u03c1' # 0xF1 -> GREEK SMALL LETTER RHO
'\u03c2' # 0xF2 -> GREEK SMALL LETTER FINAL SIGMA
'\u03c3' # 0xF3 -> GREEK SMALL LETTER SIGMA
'\u03c4' # 0xF4 -> GREEK SMALL LETTER TAU
'\u03c5' # 0xF5 -> GREEK SMALL LETTER UPSILON
'\u03c6' # 0xF6 -> GREEK SMALL LETTER PHI
'\u03c7' # 0xF7 -> GREEK SMALL LETTER CHI
'\u03c8' # 0xF8 -> GREEK SMALL LETTER PSI
'\u03c9' # 0xF9 -> GREEK SMALL LETTER OMEGA
'\u03ca' # 0xFA -> GREEK SMALL LETTER IOTA WITH DIALYTIKA
'\u03cb' # 0xFB -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA
'\u03cc' # 0xFC -> GREEK SMALL LETTER OMICRON WITH TONOS
'\u03cd' # 0xFD -> GREEK SMALL LETTER UPSILON WITH TONOS
'\u03ce' # 0xFE -> GREEK SMALL LETTER OMEGA WITH TONOS
'\ufffe' # 0xFF -> UNDEFINED
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
apache-2.0
|
DeMille/emailhooks
|
django_nonrel/django/contrib/gis/geos/libgeos.py
|
91
|
5833
|
"""
This module houses the ctypes initialization procedures, as well
as the notice and error handler function callbacks (get called
when an error occurs in GEOS).
This module also houses GEOS Pointer utilities, including
get_pointer_arr(), and GEOM_PTR.
"""
import logging
import os
import re
from ctypes import c_char_p, Structure, CDLL, CFUNCTYPE, POINTER
from ctypes.util import find_library
from django.contrib.gis.geos.error import GEOSException
from django.core.exceptions import ImproperlyConfigured
logger = logging.getLogger('django.contrib.gis')
# Custom library path set?
try:
from django.conf import settings
lib_path = settings.GEOS_LIBRARY_PATH
except (AttributeError, EnvironmentError,
ImportError, ImproperlyConfigured):
lib_path = None
# Setting the appropriate names for the GEOS-C library.
if lib_path:
lib_names = None
elif os.name == 'nt':
# Windows NT libraries
lib_names = ['geos_c', 'libgeos_c-1']
elif os.name == 'posix':
# *NIX libraries
lib_names = ['geos_c', 'GEOS']
else:
raise ImportError('Unsupported OS "%s"' % os.name)
# Using the ctypes `find_library` utility to find the path to the GEOS
# shared library. This is better than manually specifiying each library name
# and extension (e.g., libgeos_c.[so|so.1|dylib].).
if lib_names:
for lib_name in lib_names:
lib_path = find_library(lib_name)
if not lib_path is None: break
# No GEOS library could be found.
if lib_path is None:
raise ImportError('Could not find the GEOS library (tried "%s"). '
'Try setting GEOS_LIBRARY_PATH in your settings.' %
'", "'.join(lib_names))
# Getting the GEOS C library. The C interface (CDLL) is used for
# both *NIX and Windows.
# See the GEOS C API source code for more details on the library function calls:
# http://geos.refractions.net/ro/doxygen_docs/html/geos__c_8h-source.html
lgeos = CDLL(lib_path)
# The notice and error handler C function callback definitions.
# Supposed to mimic the GEOS message handler (C below):
# typedef void (*GEOSMessageHandler)(const char *fmt, ...);
NOTICEFUNC = CFUNCTYPE(None, c_char_p, c_char_p)
def notice_h(fmt, lst):
fmt, lst = fmt.decode(), lst.decode()
try:
warn_msg = fmt % lst
except:
warn_msg = fmt
logger.warn('GEOS_NOTICE: %s\n' % warn_msg)
notice_h = NOTICEFUNC(notice_h)
ERRORFUNC = CFUNCTYPE(None, c_char_p, c_char_p)
def error_h(fmt, lst):
fmt, lst = fmt.decode(), lst.decode()
try:
err_msg = fmt % lst
except:
err_msg = fmt
logger.error('GEOS_ERROR: %s\n' % err_msg)
error_h = ERRORFUNC(error_h)
#### GEOS Geometry C data structures, and utility functions. ####
# Opaque GEOS geometry structures, used for GEOM_PTR and CS_PTR
class GEOSGeom_t(Structure): pass
class GEOSPrepGeom_t(Structure): pass
class GEOSCoordSeq_t(Structure): pass
class GEOSContextHandle_t(Structure): pass
# Pointers to opaque GEOS geometry structures.
GEOM_PTR = POINTER(GEOSGeom_t)
PREPGEOM_PTR = POINTER(GEOSPrepGeom_t)
CS_PTR = POINTER(GEOSCoordSeq_t)
CONTEXT_PTR = POINTER(GEOSContextHandle_t)
# Used specifically by the GEOSGeom_createPolygon and GEOSGeom_createCollection
# GEOS routines
def get_pointer_arr(n):
"Gets a ctypes pointer array (of length `n`) for GEOSGeom_t opaque pointer."
GeomArr = GEOM_PTR * n
return GeomArr()
# Returns the string version of the GEOS library. Have to set the restype
# explicitly to c_char_p to ensure compatibility accross 32 and 64-bit platforms.
geos_version = lgeos.GEOSversion
geos_version.argtypes = None
geos_version.restype = c_char_p
# Regular expression should be able to parse version strings such as
# '3.0.0rc4-CAPI-1.3.3', '3.0.0-CAPI-1.4.1', '3.4.0dev-CAPI-1.8.0' or '3.4.0dev-CAPI-1.8.0 r0'
version_regex = re.compile(
r'^(?P<version>(?P<major>\d+)\.(?P<minor>\d+)\.(?P<subminor>\d+))'
r'((rc(?P<release_candidate>\d+))|dev)?-CAPI-(?P<capi_version>\d+\.\d+\.\d+)( r\d+)?$'
)
def geos_version_info():
"""
Returns a dictionary containing the various version metadata parsed from
the GEOS version string, including the version number, whether the version
is a release candidate (and what number release candidate), and the C API
version.
"""
ver = geos_version().decode()
m = version_regex.match(ver)
if not m:
raise GEOSException('Could not parse version info string "%s"' % ver)
return dict((key, m.group(key)) for key in (
'version', 'release_candidate', 'capi_version', 'major', 'minor', 'subminor'))
# Version numbers and whether or not prepared geometry support is available.
_verinfo = geos_version_info()
GEOS_MAJOR_VERSION = int(_verinfo['major'])
GEOS_MINOR_VERSION = int(_verinfo['minor'])
GEOS_SUBMINOR_VERSION = int(_verinfo['subminor'])
del _verinfo
GEOS_VERSION = (GEOS_MAJOR_VERSION, GEOS_MINOR_VERSION, GEOS_SUBMINOR_VERSION)
GEOS_PREPARE = GEOS_VERSION >= (3, 1, 0)
if GEOS_PREPARE:
# Here we set up the prototypes for the initGEOS_r and finishGEOS_r
# routines. These functions aren't actually called until they are
# attached to a GEOS context handle -- this actually occurs in
# geos/prototypes/threadsafe.py.
lgeos.initGEOS_r.restype = CONTEXT_PTR
lgeos.finishGEOS_r.argtypes = [CONTEXT_PTR]
else:
# When thread-safety isn't available, the initGEOS routine must be called
# first. This function takes the notice and error functions, defined
# as Python callbacks above, as parameters. Here is the C code that is
# wrapped:
# extern void GEOS_DLL initGEOS(GEOSMessageHandler notice_function, GEOSMessageHandler error_function);
lgeos.initGEOS(notice_h, error_h)
# Calling finishGEOS() upon exit of the interpreter.
import atexit
atexit.register(lgeos.finishGEOS)
|
mit
|
Tagar/incubator-airflow
|
tests/task/task_runner/test_bash_task_runner.py
|
10
|
3875
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import mock
import os
import psutil
import time
import unittest
from airflow import models, settings
from airflow.jobs import LocalTaskJob
from airflow.models import TaskInstance as TI
from airflow.task.task_runner import BashTaskRunner
from airflow.utils import timezone
from airflow.utils.state import State
from logging.config import dictConfig
from tests.core import TEST_DAG_FOLDER
DEFAULT_DATE = timezone.datetime(2016, 1, 1)
LOGGING_CONFIG = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'airflow.task': {
'format': '[%%(asctime)s] {{%%(filename)s:%%(lineno)d}} %%(levelname)s - '
'%%(message)s'
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'airflow.task',
'stream': 'ext://sys.stdout'
}
},
'loggers': {
'airflow': {
'handlers': ['console'],
'level': 'INFO',
'propagate': False
}
}
}
class TestBashTaskRunner(unittest.TestCase):
def setUp(self):
dictConfig(LOGGING_CONFIG)
def test_start_and_terminate(self):
local_task_job = mock.Mock()
local_task_job.task_instance = mock.MagicMock()
local_task_job.task_instance.run_as_user = None
local_task_job.task_instance.command_as_list.return_value = ['sleep', '1000']
runner = BashTaskRunner(local_task_job)
runner.start()
pgid = os.getpgid(runner.process.pid)
self.assertTrue(pgid)
procs = []
for p in psutil.process_iter():
try:
if os.getpgid(p.pid) == pgid:
procs.append(p)
except OSError:
pass
runner.terminate()
for p in procs:
self.assertFalse(psutil.pid_exists(p.pid))
def test_on_kill(self):
"""
Test that ensures that clearing in the UI SIGTERMS
the task
"""
path = "/tmp/airflow_on_kill"
try:
os.unlink(path)
except OSError:
pass
dagbag = models.DagBag(
dag_folder=TEST_DAG_FOLDER,
include_examples=False,
)
dag = dagbag.dags.get('test_on_kill')
task = dag.get_task('task1')
session = settings.Session()
dag.clear()
dag.create_dagrun(run_id="test",
state=State.RUNNING,
execution_date=DEFAULT_DATE,
start_date=DEFAULT_DATE,
session=session)
ti = TI(task=task, execution_date=DEFAULT_DATE)
job1 = LocalTaskJob(task_instance=ti, ignore_ti_state=True)
runner = BashTaskRunner(job1)
runner.start()
# give the task some time to startup
time.sleep(3)
runner.terminate()
f = open(path, "r")
self.assertEqual("ON_KILL_TEST", f.readline())
f.close()
if __name__ == '__main__':
unittest.main()
|
apache-2.0
|
rafiqsaleh/VERCE
|
verce-hpc-pe/src/networkx/algorithms/tests/test_clique.py
|
32
|
4909
|
#!/usr/bin/env python
from nose.tools import *
import networkx as nx
from networkx import convert_node_labels_to_integers as cnlti
class TestCliques:
def setUp(self):
z=[3,4,3,4,2,4,2,1,1,1,1]
self.G=cnlti(nx.generators.havel_hakimi_graph(z),first_label=1)
self.cl=list(nx.find_cliques(self.G))
H=nx.complete_graph(6)
H=nx.relabel_nodes(H,dict( [(i,i+1) for i in range(6)]))
H.remove_edges_from([(2,6),(2,5),(2,4),(1,3),(5,3)])
self.H=H
def test_find_cliques1(self):
cl=list(nx.find_cliques(self.G))
rcl=nx.find_cliques_recursive(self.G)
assert_equal(sorted(map(sorted,cl)), sorted(map(sorted,rcl)))
assert_equal(cl,
[[2, 6, 1, 3], [2, 6, 4], [5, 4, 7], [8, 9], [10, 11]])
def test_selfloops(self):
self.G.add_edge(1,1)
cl=list(nx.find_cliques(self.G))
rcl=nx.find_cliques_recursive(self.G)
assert_equal(sorted(map(sorted,cl)), sorted(map(sorted,rcl)))
assert_equal(cl,
[[2, 6, 1, 3], [2, 6, 4], [5, 4, 7], [8, 9], [10, 11]])
def test_find_cliques2(self):
hcl=list(nx.find_cliques(self.H))
assert_equal(sorted(map(sorted,hcl)),
[[1, 2], [1, 4, 5, 6], [2, 3], [3, 4, 6]])
def test_clique_number(self):
G=self.G
assert_equal(nx.graph_clique_number(G),4)
assert_equal(nx.graph_clique_number(G,cliques=self.cl),4)
def test_number_of_cliques(self):
G=self.G
assert_equal(nx.graph_number_of_cliques(G),5)
assert_equal(nx.graph_number_of_cliques(G,cliques=self.cl),5)
assert_equal(nx.number_of_cliques(G,1),1)
assert_equal(list(nx.number_of_cliques(G,[1]).values()),[1])
assert_equal(list(nx.number_of_cliques(G,[1,2]).values()),[1, 2])
assert_equal(nx.number_of_cliques(G,[1,2]),{1: 1, 2: 2})
assert_equal(nx.number_of_cliques(G,2),2)
assert_equal(nx.number_of_cliques(G),
{1: 1, 2: 2, 3: 1, 4: 2, 5: 1,
6: 2, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1})
assert_equal(nx.number_of_cliques(G,nodes=G.nodes()),
{1: 1, 2: 2, 3: 1, 4: 2, 5: 1,
6: 2, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1})
assert_equal(nx.number_of_cliques(G,nodes=[2,3,4]),
{2: 2, 3: 1, 4: 2})
assert_equal(nx.number_of_cliques(G,cliques=self.cl),
{1: 1, 2: 2, 3: 1, 4: 2, 5: 1,
6: 2, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1})
assert_equal(nx.number_of_cliques(G,G.nodes(),cliques=self.cl),
{1: 1, 2: 2, 3: 1, 4: 2, 5: 1,
6: 2, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1})
def test_node_clique_number(self):
G=self.G
assert_equal(nx.node_clique_number(G,1),4)
assert_equal(list(nx.node_clique_number(G,[1]).values()),[4])
assert_equal(list(nx.node_clique_number(G,[1,2]).values()),[4, 4])
assert_equal(nx.node_clique_number(G,[1,2]),{1: 4, 2: 4})
assert_equal(nx.node_clique_number(G,1),4)
assert_equal(nx.node_clique_number(G),
{1: 4, 2: 4, 3: 4, 4: 3, 5: 3, 6: 4,
7: 3, 8: 2, 9: 2, 10: 2, 11: 2})
assert_equal(nx.node_clique_number(G,cliques=self.cl),
{1: 4, 2: 4, 3: 4, 4: 3, 5: 3, 6: 4,
7: 3, 8: 2, 9: 2, 10: 2, 11: 2})
def test_cliques_containing_node(self):
G=self.G
assert_equal(nx.cliques_containing_node(G,1),
[[2, 6, 1, 3]])
assert_equal(list(nx.cliques_containing_node(G,[1]).values()),
[[[2, 6, 1, 3]]])
assert_equal(list(nx.cliques_containing_node(G,[1,2]).values()),
[[[2, 6, 1, 3]], [[2, 6, 1, 3], [2, 6, 4]]])
assert_equal(nx.cliques_containing_node(G,[1,2]),
{1: [[2, 6, 1, 3]], 2: [[2, 6, 1, 3], [2, 6, 4]]})
assert_equal(nx.cliques_containing_node(G,1),
[[2, 6, 1, 3]])
assert_equal(nx.cliques_containing_node(G,2),
[[2, 6, 1, 3], [2, 6, 4]])
assert_equal(nx.cliques_containing_node(G,2,cliques=self.cl),
[[2, 6, 1, 3], [2, 6, 4]])
assert_equal(len(nx.cliques_containing_node(G)),11)
def test_make_clique_bipartite(self):
G=self.G
B=nx.make_clique_bipartite(G)
assert_equal(sorted(B.nodes()),
[-5, -4, -3, -2, -1, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
H=nx.project_down(B)
assert_equal(H.adj,G.adj)
H1=nx.project_up(B)
assert_equal(H1.nodes(),[1, 2, 3, 4, 5])
H2=nx.make_max_clique_graph(G)
assert_equal(H1.adj,H2.adj)
@raises(nx.NetworkXNotImplemented)
def test_directed(self):
cliques=nx.find_cliques(nx.DiGraph())
|
mit
|
bearstech/ansible
|
test/units/plugins/cache/test_cache.py
|
94
|
4172
|
# (c) 2012-2015, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests import unittest, mock
from ansible.errors import AnsibleError
from ansible.plugins.cache import FactCache
from ansible.plugins.cache.base import BaseCacheModule
from ansible.plugins.cache.memory import CacheModule as MemoryCache
HAVE_MEMCACHED = True
try:
import memcache
except ImportError:
HAVE_MEMCACHED = False
else:
# Use an else so that the only reason we skip this is for lack of
# memcached, not errors importing the plugin
from ansible.plugins.cache.memcached import CacheModule as MemcachedCache
HAVE_REDIS = True
try:
import redis
except ImportError:
HAVE_REDIS = False
else:
from ansible.plugins.cache.redis import CacheModule as RedisCache
class TestFactCache(unittest.TestCase):
def setUp(self):
with mock.patch('ansible.constants.CACHE_PLUGIN', 'memory'):
self.cache = FactCache()
def test_copy(self):
self.cache['avocado'] = 'fruit'
self.cache['daisy'] = 'flower'
a_copy = self.cache.copy()
self.assertEqual(type(a_copy), dict)
self.assertEqual(a_copy, dict(avocado='fruit', daisy='flower'))
def test_plugin_load_failure(self):
# See https://github.com/ansible/ansible/issues/18751
# Note no fact_connection config set, so this will fail
with mock.patch('ansible.constants.CACHE_PLUGIN', 'json'):
self.assertRaisesRegexp(AnsibleError,
"Unable to load the facts cache plugin.*json.*",
FactCache)
class TestAbstractClass(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_subclass_error(self):
class CacheModule1(BaseCacheModule):
pass
with self.assertRaises(TypeError):
CacheModule1() # pylint: disable=abstract-class-instantiated
class CacheModule2(BaseCacheModule):
def get(self, key):
super(CacheModule2, self).get(key)
with self.assertRaises(TypeError):
CacheModule2() # pylint: disable=abstract-class-instantiated
def test_subclass_success(self):
class CacheModule3(BaseCacheModule):
def get(self, key):
super(CacheModule3, self).get(key)
def set(self, key, value):
super(CacheModule3, self).set(key, value)
def keys(self):
super(CacheModule3, self).keys()
def contains(self, key):
super(CacheModule3, self).contains(key)
def delete(self, key):
super(CacheModule3, self).delete(key)
def flush(self):
super(CacheModule3, self).flush()
def copy(self):
super(CacheModule3, self).copy()
self.assertIsInstance(CacheModule3(), CacheModule3)
@unittest.skipUnless(HAVE_MEMCACHED, 'python-memcached module not installed')
def test_memcached_cachemodule(self):
self.assertIsInstance(MemcachedCache(), MemcachedCache)
def test_memory_cachemodule(self):
self.assertIsInstance(MemoryCache(), MemoryCache)
@unittest.skipUnless(HAVE_REDIS, 'Redis python module not installed')
def test_redis_cachemodule(self):
self.assertIsInstance(RedisCache(), RedisCache)
|
gpl-3.0
|
azavea/raster-foundry
|
app-tasks/rf/src/rf/uploads/geotiff/factories.py
|
2
|
4123
|
# Factories to create scenes from geotiffs on disk somewhere
import logging
import os
import boto3
from .create_images import create_geotiff_image
from .create_scenes import create_geotiff_scene
from rf.utils import cog
from rf.utils.io import (
Visibility,
IngestStatus,
upload_tifs,
s3_bucket_and_key_from_url,
get_tempdir
)
import urllib
logger = logging.getLogger(__name__)
class GeoTiffS3SceneFactory(object):
"""A convenience class for creating Scenes from an S3 folder of multiband GeoTiffs.
Example usage:
```
from rf.utils.io import Visibility
captureDate = datetime.date(YYYY, MM, DD)
factory = GeoTiffS3SceneFactory('<Upload Here>')
for scene in factory.generate_scenes():
# do something with the created scenes
# Note that this will download GeoTIFFs locally, so it is best run somewhere with a fast
# connection to S3
```
"""
def __init__(self, upload):
"""Args:
upload (Upload): instance of upload model to create scenes for
"""
self._upload = upload
self.isProjectUpload = upload.projectId is not None
self.files = self._upload.files
self.owner = upload.owner
self.visibility = Visibility.PRIVATE
self.datasource = self._upload.datasource
self.acquisitionDate = self._upload.metadata.get('acquisitionDate')
self.cloudCover = self._upload.metadata.get('cloudCover', 0)
self.fileType = upload.fileType
self.tags = self._upload.metadata.get('tags') or ['']
self.keep_in_source_bucket = self._upload.keepInSourceBucket
def generate_scenes(self):
"""Create a Scene and associated Image for each GeoTiff in self.s3_path
Returns:
Generator of Scenes
"""
s3 = boto3.resource('s3')
for infile in self.files:
# We can't use the temp file as a context manager because it'll be opened/closed multiple
# times and by default is deleted when it's closed. So we use try/finally to ensure that
# it gets cleaned up.
bucket_name, key = s3_bucket_and_key_from_url(infile)
filename = os.path.basename(key)
logger.info('Downloading %s => %s', infile, filename)
bucket = s3.Bucket(bucket_name)
with get_tempdir() as tempdir:
tmp_fname = os.path.join(tempdir, filename)
bucket.download_file(key, tmp_fname)
if self.fileType == 'NON_SPATIAL':
tmp_fname = cog.georeference_file(tmp_fname)
cog.add_overviews(tmp_fname)
cog_path = cog.convert_to_cog(tmp_fname, tempdir)
scene = self.create_geotiff_scene(tmp_fname, os.path.splitext(filename)[0])
if self.keep_in_source_bucket:
scene.ingestLocation = upload_tifs([cog_path], self.owner, scene.id, bucket_name)[0]
else:
scene.ingestLocation = upload_tifs([cog_path], self.owner, scene.id)[0]
images = [self.create_geotiff_image(
tmp_fname, urllib.unquote(scene.ingestLocation), scene, cog_path
)]
scene.thumbnails = []
scene.images = images
yield scene
def create_geotiff_image(self, tif_path, source_uri, scene, filename):
return create_geotiff_image(tif_path, source_uri, scene=scene.id,
filename=filename, visibility=self.visibility, owner=self.owner)
def create_geotiff_scene(self, tif_path, name):
# Always COGs, now and forever
ingestStatus = IngestStatus.INGESTED
return create_geotiff_scene(
tif_path,
self.datasource,
visibility=self.visibility,
tags=self.tags,
acquisitionDate=self.acquisitionDate,
cloudCover=self.cloudCover,
name=name,
owner=self.owner,
ingestStatus=ingestStatus,
sceneType='COG'
)
|
apache-2.0
|
krafczyk/spack
|
var/spack/repos/builtin/packages/snap-korf/package.py
|
5
|
2360
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
import glob
class SnapKorf(MakefilePackage):
"""SNAP is a general purpose gene finding program suitable for both
eukaryotic and prokaryotic genomes."""
homepage = "http://korflab.ucdavis.edu/software.html"
url = "http://korflab.ucdavis.edu/Software/snap-2013-11-29.tar.gz"
version('2013-11-29', 'dfdf48e37cdb32af4eecd9201506b6e3')
depends_on('perl', type=('build', 'run'))
depends_on('boost')
depends_on('sqlite')
depends_on('sparsehash')
conflicts('%gcc@5:', when='@2013-11-29')
def install(self, spec, prefix):
mkdirp(prefix.bin)
progs = ['snap', 'fathom', 'forge', 'depend', 'exonpairs', 'hmm-info']
for p in progs:
install(p, prefix.bin)
files = glob.iglob('*.pl')
for file in files:
install(file, prefix.bin)
install_tree('Zoe', prefix.Zoe)
install_tree('HMM', prefix.HMM)
install_tree('DNA', prefix.DNA)
def setup_environment(self, spack_env, run_env):
run_env.set('ZOE', self.prefix)
run_env.prepend_path('PATH', self.prefix)
|
lgpl-2.1
|
figue/raspberry-pi-kernel
|
tools/perf/scripts/python/futex-contention.py
|
11261
|
1486
|
# futex contention
# (c) 2010, Arnaldo Carvalho de Melo <acme@redhat.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Translation of:
#
# http://sourceware.org/systemtap/wiki/WSFutexContention
#
# to perf python scripting.
#
# Measures futex contention
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Util import *
process_names = {}
thread_thislock = {}
thread_blocktime = {}
lock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time
process_names = {} # long-lived pid-to-execname mapping
def syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, uaddr, op, val, utime, uaddr2, val3):
cmd = op & FUTEX_CMD_MASK
if cmd != FUTEX_WAIT:
return # we don't care about originators of WAKE events
process_names[tid] = comm
thread_thislock[tid] = uaddr
thread_blocktime[tid] = nsecs(s, ns)
def syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, ret):
if thread_blocktime.has_key(tid):
elapsed = nsecs(s, ns) - thread_blocktime[tid]
add_stats(lock_waits, (tid, thread_thislock[tid]), elapsed)
del thread_blocktime[tid]
del thread_thislock[tid]
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
for (tid, lock) in lock_waits:
min, max, avg, count = lock_waits[tid, lock]
print "%s[%d] lock %x contended %d times, %d avg ns" % \
(process_names[tid], tid, lock, count, avg)
|
gpl-2.0
|
lanffy/VagrantForPhp
|
files/software/node-v0.12.7-linux-x86/lib/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/common.py
|
366
|
19638
|
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import with_statement
import collections
import errno
import filecmp
import os.path
import re
import tempfile
import sys
# A minimal memoizing decorator. It'll blow up if the args aren't immutable,
# among other "problems".
class memoize(object):
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
try:
return self.cache[args]
except KeyError:
result = self.func(*args)
self.cache[args] = result
return result
class GypError(Exception):
"""Error class representing an error, which is to be presented
to the user. The main entry point will catch and display this.
"""
pass
def ExceptionAppend(e, msg):
"""Append a message to the given exception's message."""
if not e.args:
e.args = (msg,)
elif len(e.args) == 1:
e.args = (str(e.args[0]) + ' ' + msg,)
else:
e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:]
def FindQualifiedTargets(target, qualified_list):
"""
Given a list of qualified targets, return the qualified targets for the
specified |target|.
"""
return [t for t in qualified_list if ParseQualifiedTarget(t)[1] == target]
def ParseQualifiedTarget(target):
# Splits a qualified target into a build file, target name and toolset.
# NOTE: rsplit is used to disambiguate the Windows drive letter separator.
target_split = target.rsplit(':', 1)
if len(target_split) == 2:
[build_file, target] = target_split
else:
build_file = None
target_split = target.rsplit('#', 1)
if len(target_split) == 2:
[target, toolset] = target_split
else:
toolset = None
return [build_file, target, toolset]
def ResolveTarget(build_file, target, toolset):
# This function resolves a target into a canonical form:
# - a fully defined build file, either absolute or relative to the current
# directory
# - a target name
# - a toolset
#
# build_file is the file relative to which 'target' is defined.
# target is the qualified target.
# toolset is the default toolset for that target.
[parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target)
if parsed_build_file:
if build_file:
# If a relative path, parsed_build_file is relative to the directory
# containing build_file. If build_file is not in the current directory,
# parsed_build_file is not a usable path as-is. Resolve it by
# interpreting it as relative to build_file. If parsed_build_file is
# absolute, it is usable as a path regardless of the current directory,
# and os.path.join will return it as-is.
build_file = os.path.normpath(os.path.join(os.path.dirname(build_file),
parsed_build_file))
# Further (to handle cases like ../cwd), make it relative to cwd)
if not os.path.isabs(build_file):
build_file = RelativePath(build_file, '.')
else:
build_file = parsed_build_file
if parsed_toolset:
toolset = parsed_toolset
return [build_file, target, toolset]
def BuildFile(fully_qualified_target):
# Extracts the build file from the fully qualified target.
return ParseQualifiedTarget(fully_qualified_target)[0]
def GetEnvironFallback(var_list, default):
"""Look up a key in the environment, with fallback to secondary keys
and finally falling back to a default value."""
for var in var_list:
if var in os.environ:
return os.environ[var]
return default
def QualifiedTarget(build_file, target, toolset):
# "Qualified" means the file that a target was defined in and the target
# name, separated by a colon, suffixed by a # and the toolset name:
# /path/to/file.gyp:target_name#toolset
fully_qualified = build_file + ':' + target
if toolset:
fully_qualified = fully_qualified + '#' + toolset
return fully_qualified
@memoize
def RelativePath(path, relative_to):
# Assuming both |path| and |relative_to| are relative to the current
# directory, returns a relative path that identifies path relative to
# relative_to.
# Convert to normalized (and therefore absolute paths).
path = os.path.realpath(path)
relative_to = os.path.realpath(relative_to)
# On Windows, we can't create a relative path to a different drive, so just
# use the absolute path.
if sys.platform == 'win32':
if (os.path.splitdrive(path)[0].lower() !=
os.path.splitdrive(relative_to)[0].lower()):
return path
# Split the paths into components.
path_split = path.split(os.path.sep)
relative_to_split = relative_to.split(os.path.sep)
# Determine how much of the prefix the two paths share.
prefix_len = len(os.path.commonprefix([path_split, relative_to_split]))
# Put enough ".." components to back up out of relative_to to the common
# prefix, and then append the part of path_split after the common prefix.
relative_split = [os.path.pardir] * (len(relative_to_split) - prefix_len) + \
path_split[prefix_len:]
if len(relative_split) == 0:
# The paths were the same.
return ''
# Turn it back into a string and we're done.
return os.path.join(*relative_split)
@memoize
def InvertRelativePath(path, toplevel_dir=None):
"""Given a path like foo/bar that is relative to toplevel_dir, return
the inverse relative path back to the toplevel_dir.
E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path)))
should always produce the empty string, unless the path contains symlinks.
"""
if not path:
return path
toplevel_dir = '.' if toplevel_dir is None else toplevel_dir
return RelativePath(toplevel_dir, os.path.join(toplevel_dir, path))
def FixIfRelativePath(path, relative_to):
# Like RelativePath but returns |path| unchanged if it is absolute.
if os.path.isabs(path):
return path
return RelativePath(path, relative_to)
def UnrelativePath(path, relative_to):
# Assuming that |relative_to| is relative to the current directory, and |path|
# is a path relative to the dirname of |relative_to|, returns a path that
# identifies |path| relative to the current directory.
rel_dir = os.path.dirname(relative_to)
return os.path.normpath(os.path.join(rel_dir, path))
# re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at
# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02
# and the documentation for various shells.
# _quote is a pattern that should match any argument that needs to be quoted
# with double-quotes by EncodePOSIXShellArgument. It matches the following
# characters appearing anywhere in an argument:
# \t, \n, space parameter separators
# # comments
# $ expansions (quoted to always expand within one argument)
# % called out by IEEE 1003.1 XCU.2.2
# & job control
# ' quoting
# (, ) subshell execution
# *, ?, [ pathname expansion
# ; command delimiter
# <, >, | redirection
# = assignment
# {, } brace expansion (bash)
# ~ tilde expansion
# It also matches the empty string, because "" (or '') is the only way to
# represent an empty string literal argument to a POSIX shell.
#
# This does not match the characters in _escape, because those need to be
# backslash-escaped regardless of whether they appear in a double-quoted
# string.
_quote = re.compile('[\t\n #$%&\'()*;<=>?[{|}~]|^$')
# _escape is a pattern that should match any character that needs to be
# escaped with a backslash, whether or not the argument matched the _quote
# pattern. _escape is used with re.sub to backslash anything in _escape's
# first match group, hence the (parentheses) in the regular expression.
#
# _escape matches the following characters appearing anywhere in an argument:
# " to prevent POSIX shells from interpreting this character for quoting
# \ to prevent POSIX shells from interpreting this character for escaping
# ` to prevent POSIX shells from interpreting this character for command
# substitution
# Missing from this list is $, because the desired behavior of
# EncodePOSIXShellArgument is to permit parameter (variable) expansion.
#
# Also missing from this list is !, which bash will interpret as the history
# expansion character when history is enabled. bash does not enable history
# by default in non-interactive shells, so this is not thought to be a problem.
# ! was omitted from this list because bash interprets "\!" as a literal string
# including the backslash character (avoiding history expansion but retaining
# the backslash), which would not be correct for argument encoding. Handling
# this case properly would also be problematic because bash allows the history
# character to be changed with the histchars shell variable. Fortunately,
# as history is not enabled in non-interactive shells and
# EncodePOSIXShellArgument is only expected to encode for non-interactive
# shells, there is no room for error here by ignoring !.
_escape = re.compile(r'(["\\`])')
def EncodePOSIXShellArgument(argument):
"""Encodes |argument| suitably for consumption by POSIX shells.
argument may be quoted and escaped as necessary to ensure that POSIX shells
treat the returned value as a literal representing the argument passed to
this function. Parameter (variable) expansions beginning with $ are allowed
to remain intact without escaping the $, to allow the argument to contain
references to variables to be expanded by the shell.
"""
if not isinstance(argument, str):
argument = str(argument)
if _quote.search(argument):
quote = '"'
else:
quote = ''
encoded = quote + re.sub(_escape, r'\\\1', argument) + quote
return encoded
def EncodePOSIXShellList(list):
"""Encodes |list| suitably for consumption by POSIX shells.
Returns EncodePOSIXShellArgument for each item in list, and joins them
together using the space character as an argument separator.
"""
encoded_arguments = []
for argument in list:
encoded_arguments.append(EncodePOSIXShellArgument(argument))
return ' '.join(encoded_arguments)
def DeepDependencyTargets(target_dicts, roots):
"""Returns the recursive list of target dependencies."""
dependencies = set()
pending = set(roots)
while pending:
# Pluck out one.
r = pending.pop()
# Skip if visited already.
if r in dependencies:
continue
# Add it.
dependencies.add(r)
# Add its children.
spec = target_dicts[r]
pending.update(set(spec.get('dependencies', [])))
pending.update(set(spec.get('dependencies_original', [])))
return list(dependencies - set(roots))
def BuildFileTargets(target_list, build_file):
"""From a target_list, returns the subset from the specified build_file.
"""
return [p for p in target_list if BuildFile(p) == build_file]
def AllTargets(target_list, target_dicts, build_file):
"""Returns all targets (direct and dependencies) for the specified build_file.
"""
bftargets = BuildFileTargets(target_list, build_file)
deptargets = DeepDependencyTargets(target_dicts, bftargets)
return bftargets + deptargets
def WriteOnDiff(filename):
"""Write to a file only if the new contents differ.
Arguments:
filename: name of the file to potentially write to.
Returns:
A file like object which will write to temporary file and only overwrite
the target if it differs (on close).
"""
class Writer(object):
"""Wrapper around file which only covers the target if it differs."""
def __init__(self):
# Pick temporary file.
tmp_fd, self.tmp_path = tempfile.mkstemp(
suffix='.tmp',
prefix=os.path.split(filename)[1] + '.gyp.',
dir=os.path.split(filename)[0])
try:
self.tmp_file = os.fdopen(tmp_fd, 'wb')
except Exception:
# Don't leave turds behind.
os.unlink(self.tmp_path)
raise
def __getattr__(self, attrname):
# Delegate everything else to self.tmp_file
return getattr(self.tmp_file, attrname)
def close(self):
try:
# Close tmp file.
self.tmp_file.close()
# Determine if different.
same = False
try:
same = filecmp.cmp(self.tmp_path, filename, False)
except OSError, e:
if e.errno != errno.ENOENT:
raise
if same:
# The new file is identical to the old one, just get rid of the new
# one.
os.unlink(self.tmp_path)
else:
# The new file is different from the old one, or there is no old one.
# Rename the new file to the permanent name.
#
# tempfile.mkstemp uses an overly restrictive mode, resulting in a
# file that can only be read by the owner, regardless of the umask.
# There's no reason to not respect the umask here, which means that
# an extra hoop is required to fetch it and reset the new file's mode.
#
# No way to get the umask without setting a new one? Set a safe one
# and then set it back to the old value.
umask = os.umask(077)
os.umask(umask)
os.chmod(self.tmp_path, 0666 & ~umask)
if sys.platform == 'win32' and os.path.exists(filename):
# NOTE: on windows (but not cygwin) rename will not replace an
# existing file, so it must be preceded with a remove. Sadly there
# is no way to make the switch atomic.
os.remove(filename)
os.rename(self.tmp_path, filename)
except Exception:
# Don't leave turds behind.
os.unlink(self.tmp_path)
raise
return Writer()
def EnsureDirExists(path):
"""Make sure the directory for |path| exists."""
try:
os.makedirs(os.path.dirname(path))
except OSError:
pass
def GetFlavor(params):
"""Returns |params.flavor| if it's set, the system's default flavor else."""
flavors = {
'cygwin': 'win',
'win32': 'win',
'darwin': 'mac',
}
if 'flavor' in params:
return params['flavor']
if sys.platform in flavors:
return flavors[sys.platform]
if sys.platform.startswith('sunos'):
return 'solaris'
if sys.platform.startswith('freebsd'):
return 'freebsd'
if sys.platform.startswith('openbsd'):
return 'openbsd'
if sys.platform.startswith('aix'):
return 'aix'
return 'linux'
def CopyTool(flavor, out_path):
"""Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it
to |out_path|."""
# aix and solaris just need flock emulation. mac and win use more complicated
# support scripts.
prefix = {
'aix': 'flock',
'solaris': 'flock',
'mac': 'mac',
'win': 'win'
}.get(flavor, None)
if not prefix:
return
# Slurp input file.
source_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), '%s_tool.py' % prefix)
with open(source_path) as source_file:
source = source_file.readlines()
# Add header and write it out.
tool_path = os.path.join(out_path, 'gyp-%s-tool' % prefix)
with open(tool_path, 'w') as tool_file:
tool_file.write(
''.join([source[0], '# Generated by gyp. Do not edit.\n'] + source[1:]))
# Make file executable.
os.chmod(tool_path, 0755)
# From Alex Martelli,
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560
# ASPN: Python Cookbook: Remove duplicates from a sequence
# First comment, dated 2001/10/13.
# (Also in the printed Python Cookbook.)
def uniquer(seq, idfun=None):
if idfun is None:
idfun = lambda x: x
seen = {}
result = []
for item in seq:
marker = idfun(item)
if marker in seen: continue
seen[marker] = 1
result.append(item)
return result
# Based on http://code.activestate.com/recipes/576694/.
class OrderedSet(collections.MutableSet):
def __init__(self, iterable=None):
self.end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.map = {} # key --> [key, prev, next]
if iterable is not None:
self |= iterable
def __len__(self):
return len(self.map)
def __contains__(self, key):
return key in self.map
def add(self, key):
if key not in self.map:
end = self.end
curr = end[1]
curr[2] = end[1] = self.map[key] = [key, curr, end]
def discard(self, key):
if key in self.map:
key, prev_item, next_item = self.map.pop(key)
prev_item[2] = next_item
next_item[1] = prev_item
def __iter__(self):
end = self.end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
# The second argument is an addition that causes a pylint warning.
def pop(self, last=True): # pylint: disable=W0221
if not self:
raise KeyError('set is empty')
key = self.end[1][0] if last else self.end[2][0]
self.discard(key)
return key
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, list(self))
def __eq__(self, other):
if isinstance(other, OrderedSet):
return len(self) == len(other) and list(self) == list(other)
return set(self) == set(other)
# Extensions to the recipe.
def update(self, iterable):
for i in iterable:
if i not in self:
self.add(i)
class CycleError(Exception):
"""An exception raised when an unexpected cycle is detected."""
def __init__(self, nodes):
self.nodes = nodes
def __str__(self):
return 'CycleError: cycle involving: ' + str(self.nodes)
def TopologicallySorted(graph, get_edges):
r"""Topologically sort based on a user provided edge definition.
Args:
graph: A list of node names.
get_edges: A function mapping from node name to a hashable collection
of node names which this node has outgoing edges to.
Returns:
A list containing all of the node in graph in topological order.
It is assumed that calling get_edges once for each node and caching is
cheaper than repeatedly calling get_edges.
Raises:
CycleError in the event of a cycle.
Example:
graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'}
def GetEdges(node):
return re.findall(r'\$\(([^))]\)', graph[node])
print TopologicallySorted(graph.keys(), GetEdges)
==>
['a', 'c', b']
"""
get_edges = memoize(get_edges)
visited = set()
visiting = set()
ordered_nodes = []
def Visit(node):
if node in visiting:
raise CycleError(visiting)
if node in visited:
return
visited.add(node)
visiting.add(node)
for neighbor in get_edges(node):
Visit(neighbor)
visiting.remove(node)
ordered_nodes.insert(0, node)
for node in sorted(graph):
Visit(node)
return ordered_nodes
def CrossCompileRequested():
# TODO: figure out how to not build extra host objects in the
# non-cross-compile case when this is enabled, and enable unconditionally.
return (os.environ.get('GYP_CROSSCOMPILE') or
os.environ.get('AR_host') or
os.environ.get('CC_host') or
os.environ.get('CXX_host') or
os.environ.get('AR_target') or
os.environ.get('CC_target') or
os.environ.get('CXX_target'))
|
mit
|
Sidney84/pa-chromium
|
native_client_sdk/src/tools/genhttpfs.py
|
79
|
2524
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This scripts generates a manifest file for the MountHttp file system.
# Files and directory paths are specified on the command-line. The names
# with glob and directories are recursed to form a list of files.
#
# For each file, the mode bits, size and path relative to the CWD are written
# to the output file which is stdout by default.
#
import glob
import optparse
import os
import sys
def main(argv):
parser = optparse.OptionParser(
usage='Usage: %prog [options] filename ...')
parser.add_option('-C', '--srcdir',
help='Change directory.', dest='srcdir', default=None)
parser.add_option('-o', '--output',
help='Output file name.', dest='output', default=None)
parser.add_option('-v', '--verbose',
help='Verbose output.', dest='verbose',
action='store_true')
parser.add_option('-r', '--recursive',
help='Recursive search.', action='store_true')
options, args = parser.parse_args(argv)
if options.output:
outfile = open(options.output, 'w')
else:
outfile = sys.stdout
if options.srcdir:
os.chdir(options.srcdir)
# Generate a set of unique file names bases on the input globs
fileset = set()
for fileglob in args:
filelist = glob.glob(fileglob)
if not filelist:
raise RuntimeError('Could not find match for "%s".\n' % fileglob)
for filename in filelist:
if os.path.isfile(filename):
fileset |= set([filename])
continue
if os.path.isdir(filename) and options.recursive:
for root, _, files in os.walk(filename):
fileset |= set([os.path.join(root, name) for name in files])
continue
raise RuntimeError('Can not handle path "%s".\n' % filename)
cwd = os.path.abspath(os.getcwd())
cwdlen = len(cwd)
for filename in sorted(fileset):
relname = os.path.abspath(filename)
if cwd not in relname:
raise RuntimeError('%s is not relative to CWD %s.\n' % filename, cwd)
relname = relname[cwdlen:]
stat = os.stat(filename)
mode = '-r--'
outfile.write('%s %d %s\n' % (mode, stat.st_size, relname))
return 0
if __name__ == '__main__':
try:
sys.exit(main(sys.argv[1:]))
except OSError, e:
sys.stderr.write('%s: %s\n' % (os.path.basename(__file__), e))
sys.exit(1)
|
bsd-3-clause
|
ammarkhann/FinalSeniorCode
|
lib/python2.7/site-packages/numpy/doc/ufuncs.py
|
87
|
5427
|
"""
===================
Universal Functions
===================
Ufuncs are, generally speaking, mathematical functions or operations that are
applied element-by-element to the contents of an array. That is, the result
in each output array element only depends on the value in the corresponding
input array (or arrays) and on no other array elements. NumPy comes with a
large suite of ufuncs, and scipy extends that suite substantially. The simplest
example is the addition operator: ::
>>> np.array([0,2,3,4]) + np.array([1,1,-1,2])
array([1, 3, 2, 6])
The unfunc module lists all the available ufuncs in numpy. Documentation on
the specific ufuncs may be found in those modules. This documentation is
intended to address the more general aspects of unfuncs common to most of
them. All of the ufuncs that make use of Python operators (e.g., +, -, etc.)
have equivalent functions defined (e.g. add() for +)
Type coercion
=============
What happens when a binary operator (e.g., +,-,\\*,/, etc) deals with arrays of
two different types? What is the type of the result? Typically, the result is
the higher of the two types. For example: ::
float32 + float64 -> float64
int8 + int32 -> int32
int16 + float32 -> float32
float32 + complex64 -> complex64
There are some less obvious cases generally involving mixes of types
(e.g. uints, ints and floats) where equal bit sizes for each are not
capable of saving all the information in a different type of equivalent
bit size. Some examples are int32 vs float32 or uint32 vs int32.
Generally, the result is the higher type of larger size than both
(if available). So: ::
int32 + float32 -> float64
uint32 + int32 -> int64
Finally, the type coercion behavior when expressions involve Python
scalars is different than that seen for arrays. Since Python has a
limited number of types, combining a Python int with a dtype=np.int8
array does not coerce to the higher type but instead, the type of the
array prevails. So the rules for Python scalars combined with arrays is
that the result will be that of the array equivalent the Python scalar
if the Python scalar is of a higher 'kind' than the array (e.g., float
vs. int), otherwise the resultant type will be that of the array.
For example: ::
Python int + int8 -> int8
Python float + int8 -> float64
ufunc methods
=============
Binary ufuncs support 4 methods.
**.reduce(arr)** applies the binary operator to elements of the array in
sequence. For example: ::
>>> np.add.reduce(np.arange(10)) # adds all elements of array
45
For multidimensional arrays, the first dimension is reduced by default: ::
>>> np.add.reduce(np.arange(10).reshape(2,5))
array([ 5, 7, 9, 11, 13])
The axis keyword can be used to specify different axes to reduce: ::
>>> np.add.reduce(np.arange(10).reshape(2,5),axis=1)
array([10, 35])
**.accumulate(arr)** applies the binary operator and generates an an
equivalently shaped array that includes the accumulated amount for each
element of the array. A couple examples: ::
>>> np.add.accumulate(np.arange(10))
array([ 0, 1, 3, 6, 10, 15, 21, 28, 36, 45])
>>> np.multiply.accumulate(np.arange(1,9))
array([ 1, 2, 6, 24, 120, 720, 5040, 40320])
The behavior for multidimensional arrays is the same as for .reduce(),
as is the use of the axis keyword).
**.reduceat(arr,indices)** allows one to apply reduce to selected parts
of an array. It is a difficult method to understand. See the documentation
at:
**.outer(arr1,arr2)** generates an outer operation on the two arrays arr1 and
arr2. It will work on multidimensional arrays (the shape of the result is
the concatenation of the two input shapes.: ::
>>> np.multiply.outer(np.arange(3),np.arange(4))
array([[0, 0, 0, 0],
[0, 1, 2, 3],
[0, 2, 4, 6]])
Output arguments
================
All ufuncs accept an optional output array. The array must be of the expected
output shape. Beware that if the type of the output array is of a different
(and lower) type than the output result, the results may be silently truncated
or otherwise corrupted in the downcast to the lower type. This usage is useful
when one wants to avoid creating large temporary arrays and instead allows one
to reuse the same array memory repeatedly (at the expense of not being able to
use more convenient operator notation in expressions). Note that when the
output argument is used, the ufunc still returns a reference to the result.
>>> x = np.arange(2)
>>> np.add(np.arange(2),np.arange(2.),x)
array([0, 2])
>>> x
array([0, 2])
and & or as ufuncs
==================
Invariably people try to use the python 'and' and 'or' as logical operators
(and quite understandably). But these operators do not behave as normal
operators since Python treats these quite differently. They cannot be
overloaded with array equivalents. Thus using 'and' or 'or' with an array
results in an error. There are two alternatives:
1) use the ufunc functions logical_and() and logical_or().
2) use the bitwise operators & and \\|. The drawback of these is that if
the arguments to these operators are not boolean arrays, the result is
likely incorrect. On the other hand, most usages of logical_and and
logical_or are with boolean arrays. As long as one is careful, this is
a convenient way to apply these operators.
"""
from __future__ import division, absolute_import, print_function
|
mit
|
bhavishya235/Smart-Email-Client
|
detect.py
|
3
|
1094
|
import math
from nltk.tokenize import *
from nltk.probability import *
def main():
THRESHOLD = -400
docs = 95
d_file = open('dictionary2').readlines()
dict_key = []
dict_occ = []
dict_indx = []
test_file = open('test', 'w')
test_file.write('0 ') #Writing label 0
i = 0
for line in d_file:
tmp = line.strip()
tmp = tmp.split()
dict_indx.append(i)
dict_key.append(tmp[0])
dict_occ.append(int(tmp[1]))
i=i+1
fin = open('newmail','r').read().lower()
tokenizer = RegexpTokenizer('[\w\.]+(@)[\w\.]+|[\w]*(http)[s]?[^"<>\s]+|\w+')
words2 = tokenizer.tokenize(fin)
words = []
for it in words2:
if len(it)>2:
words.append(it)
fdist = FreqDist(words)
f_out = open('test_idf.db', 'w')
j=0
for it in dict_key:
if it in fdist.keys():
tmp = 0.5*(0.5+ float(fdist[it])/float(fdist[fdist.max()]))*math.log(float(docs)/float((dict_occ[j]+1)), 2)
f_out.write(it+" "+str(tmp)+'\n')
if tmp>THRESHOLD:
test_file.write(str(dict_indx[j])+":"+str(tmp)+" ")
j = j+1
f_out.close()
test_file.close()
if __name__=='__main__':
main()
|
apache-2.0
|
jolid/script.module.donnie
|
lib/donnie/tvrelease.py
|
1
|
4966
|
import urllib2, urllib, sys, os, re, random, copy
from BeautifulSoup import BeautifulSoup, Tag, NavigableString
import xbmc,xbmcplugin,xbmcgui,xbmcaddon
from t0mm0.common.net import Net
from t0mm0.common.addon import Addon
from scrapers import CommonScraper
net = Net()
try:
import json
except:
# pre-frodo and python 2.4
import simplejson as json
''' ###########################################################
Usage and helper functions
############################################################'''
class TVReleaseServiceSracper(CommonScraper):
def __init__(self, settingsid, DB=None, REG=None):
if DB:
self.DB=DB
if REG:
self.REG=REG
self.addon_id = 'script.module.donnie'
self.service='tvrelease'
self.name = 'tv-release.net'
self.raiseError = False
self.referrer = 'http://tv-release.net/'
self.base_url = 'http://tv-release.net/'
self.user_agent = 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3'
self.provides = []
self.settingsid = settingsid
self._loadsettings()
self.settings_addon = self.addon
def _getShows(self, silent=False):
self.log('Do Nothing here')
def _getRecentShows(self, silent=False):
self.log('Do Nothing here')
def _getEpisodes(self, showid, show, url, pDialog, percent, silent):
self.log('Do Nothing here')
def _getMovies(self, silent=False):
self.log('Do Nothing here')
def _getRecentMovies(self, silent):
self.log('Do Nothing here')
def _getStreams(self, episodeid=None, movieid=None):
query = ""
if episodeid:
row = self.DB.query("SELECT rw_shows.showname, season, episode FROM rw_episodes JOIN rw_shows ON rw_shows.showid=rw_episodes.showid WHERE episodeid=?", [episodeid])
name = row[0].replace("'", "")
if re.search('\\(\\d\\d\\d\\d\\)$', row[0]):
name = name[0:len(name)-7]
season = row[1].zfill(2)
episode = row[2]
#query = str("%s S%sE%s" % (name, season, episode))
uri = ""
elif movieid:
row = self.DB.query("SELECT movie, year FROM rw_movies WHERE imdb=? LIMIT 1", [movieid])
movie = self.cleanQuery(row[0])
query = "%s %s" %(movie, row[1])
'''streams = []
url = "%splugins/metasearch" % self.base_url
params = {"type": "video", "filter": "cached", "api_key": api_key, "q": query}
pagedata = net.http_POST(url, params).content
if pagedata=='':
return False
data = json.loads(pagedata)
try:
files = data['files']
for f in files:
if f['type'] == 'video':
raw_url = f['id']
name = f['name']
size = int(f['size']) / (1024 * 1024)
if size > 2000:
size = size / 1024
unit = 'GB'
else :
unit = 'MB'
self.getStreamByPriority('Furk - %s ([COLOR blue]%s %s[/COLOR])' %(name, size, unit), self.service + '://' + raw_url)
except: pass
self.DB.commit()'''
def getStreamByPriority(self, link, stream):
self.log(link)
host = 'tv-release.net'
SQL = "INSERT INTO rw_stream_list(stream, url, priority, machineid) " \
"SELECT ?, ?, priority, ? " \
"FROM rw_providers " \
"WHERE mirror=? and provider=?"
self.DB.execute(SQL, [link, stream, self.REG.getSetting('machine-id'), host, self.service])
def _getServicePriority(self, link):
self.log(link)
host = 'tv-release.net'
row = self.DB.query("SELECT priority FROM rw_providers WHERE mirror=? and provider=?", [host, self.service])
return row[0]
def _resolveStream(self, stream):
raw_url = stream.replace(self.service + '://', '')
resolved_url = ''
t_files = []
t_options = []
sdialog = xbmcgui.Dialog()
api_key = self._getKey()
params = {"type": "video", "id": raw_url, "api_key": api_key, 't_files': 1}
url = "%sfile/get" % self.base_url
pagedata = net.http_POST(url, params).content
if pagedata=='':
return False
#print pagedata
data = json.loads(str(pagedata))
try:
files = data['files'][0]['t_files']
for f in files:
if re.search('^video/', f['ct']):
size = int(f['size']) / (1024 * 1024)
if size > 2000:
size = size / 1024
unit = 'GB'
else :
unit = 'MB'
t_files.append("%s ([COLOR blue]%s %s[/COLOR])" %(f['name'], size, unit))
t_options.append(f['url_dl'])
file_select = sdialog.select('Select Furk Stream', t_files)
if file_select < 0:
return resolved_url
resolved_url = str(t_options[file_select])
except: pass
self.log("Furk retruned: %s", resolved_url, level=0)
return resolved_url
def _resolveIMDB(self, uri): #Often needed if a sites movie index does not include imdb links but the movie page does
imdb = ''
print uri
pagedata = self.getURL(uri, append_base_url=True)
if pagedata=='':
return
imdb = re.search('http://www.imdb.com/title/(.+?)/', pagedata).group(1)
return imdb
def whichHost(self, host): #Sometimes needed
table = { 'Watch Blah' : 'blah.com',
'Watch Blah2' : 'blah2.com',
}
try:
host_url = table[host]
return host_url
except:
return 'Unknown'
|
gpl-2.0
|
axsauze/eventsfinder
|
django/db/models/expressions.py
|
109
|
5738
|
import datetime
from django.utils import tree
class ExpressionNode(tree.Node):
"""
Base class for all query expressions.
"""
# Arithmetic connectors
ADD = '+'
SUB = '-'
MUL = '*'
DIV = '/'
MOD = '%%' # This is a quoted % operator - it is quoted
# because it can be used in strings that also
# have parameter substitution.
# Bitwise operators - note that these are generated by .bitand()
# and .bitor(), the '&' and '|' are reserved for boolean operator
# usage.
BITAND = '&'
BITOR = '|'
def __init__(self, children=None, connector=None, negated=False):
if children is not None and len(children) > 1 and connector is None:
raise TypeError('You have to specify a connector.')
super(ExpressionNode, self).__init__(children, connector, negated)
def _combine(self, other, connector, reversed, node=None):
if isinstance(other, datetime.timedelta):
return DateModifierNode([self, other], connector)
if reversed:
obj = ExpressionNode([other], connector)
obj.add(node or self, connector)
else:
obj = node or ExpressionNode([self], connector)
obj.add(other, connector)
return obj
###################
# VISITOR METHODS #
###################
def prepare(self, evaluator, query, allow_joins):
return evaluator.prepare_node(self, query, allow_joins)
def evaluate(self, evaluator, qn, connection):
return evaluator.evaluate_node(self, qn, connection)
#############
# OPERATORS #
#############
def __add__(self, other):
return self._combine(other, self.ADD, False)
def __sub__(self, other):
return self._combine(other, self.SUB, False)
def __mul__(self, other):
return self._combine(other, self.MUL, False)
def __truediv__(self, other):
return self._combine(other, self.DIV, False)
def __div__(self, other): # Python 2 compatibility
return type(self).__truediv__(self, other)
def __mod__(self, other):
return self._combine(other, self.MOD, False)
def __and__(self, other):
raise NotImplementedError(
"Use .bitand() and .bitor() for bitwise logical operations."
)
def bitand(self, other):
return self._combine(other, self.BITAND, False)
def __or__(self, other):
raise NotImplementedError(
"Use .bitand() and .bitor() for bitwise logical operations."
)
def bitor(self, other):
return self._combine(other, self.BITOR, False)
def __radd__(self, other):
return self._combine(other, self.ADD, True)
def __rsub__(self, other):
return self._combine(other, self.SUB, True)
def __rmul__(self, other):
return self._combine(other, self.MUL, True)
def __rtruediv__(self, other):
return self._combine(other, self.DIV, True)
def __rdiv__(self, other): # Python 2 compatibility
return type(self).__rtruediv__(self, other)
def __rmod__(self, other):
return self._combine(other, self.MOD, True)
def __rand__(self, other):
raise NotImplementedError(
"Use .bitand() and .bitor() for bitwise logical operations."
)
def __ror__(self, other):
raise NotImplementedError(
"Use .bitand() and .bitor() for bitwise logical operations."
)
def prepare_database_save(self, unused):
return self
class F(ExpressionNode):
"""
An expression representing the value of the given field.
"""
def __init__(self, name):
super(F, self).__init__(None, None, False)
self.name = name
def __deepcopy__(self, memodict):
obj = super(F, self).__deepcopy__(memodict)
obj.name = self.name
return obj
def prepare(self, evaluator, query, allow_joins):
return evaluator.prepare_leaf(self, query, allow_joins)
def evaluate(self, evaluator, qn, connection):
return evaluator.evaluate_leaf(self, qn, connection)
class DateModifierNode(ExpressionNode):
"""
Node that implements the following syntax:
filter(end_date__gt=F('start_date') + datetime.timedelta(days=3, seconds=200))
which translates into:
POSTGRES:
WHERE end_date > (start_date + INTERVAL '3 days 200 seconds')
MYSQL:
WHERE end_date > (start_date + INTERVAL '3 0:0:200:0' DAY_MICROSECOND)
ORACLE:
WHERE end_date > (start_date + INTERVAL '3 00:03:20.000000' DAY(1) TO SECOND(6))
SQLITE:
WHERE end_date > django_format_dtdelta(start_date, "+" "3", "200", "0")
(A custom function is used in order to preserve six digits of fractional
second information on sqlite, and to format both date and datetime values.)
Note that microsecond comparisons are not well supported with MySQL, since
MySQL does not store microsecond information.
Only adding and subtracting timedeltas is supported, attempts to use other
operations raise a TypeError.
"""
def __init__(self, children, connector, negated=False):
if len(children) != 2:
raise TypeError('Must specify a node and a timedelta.')
if not isinstance(children[1], datetime.timedelta):
raise TypeError('Second child must be a timedelta.')
if connector not in (self.ADD, self.SUB):
raise TypeError('Connector must be + or -, not %s' % connector)
super(DateModifierNode, self).__init__(children, connector, negated)
def evaluate(self, evaluator, qn, connection):
return evaluator.evaluate_date_modifier_node(self, qn, connection)
|
bsd-3-clause
|
chennan47/osf.io
|
osf/migrations/0060_reviews.py
|
5
|
4956
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-14 11:09
from __future__ import unicode_literals
from django.conf import settings
from django.core.management.sql import emit_post_migrate_signal
from django.db import migrations, models
import django.db.models.deletion
from api.preprint_providers.permissions import GroupHelper
import osf.models.base
import osf.utils.fields
def create_provider_auth_groups(apps, schema_editor):
# this is to make sure that the permissions created in an earlier migration exist!
emit_post_migrate_signal(2, False, 'default')
PreprintProvider = apps.get_model('osf', 'PreprintProvider')
for provider in PreprintProvider.objects.all():
GroupHelper(provider).update_provider_auth_groups()
class Migration(migrations.Migration):
dependencies = [
('osf', '0059_merge_20170914_1100'),
('guardian', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Action',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('_id', models.CharField(db_index=True, default=osf.models.base.generate_object_id, max_length=24, unique=True)),
('trigger', models.CharField(choices=[('accept', 'Accept'), ('edit_comment', 'Edit_Comment'), ('reject', 'Reject'), ('submit', 'Submit')], max_length=31)),
('from_state', models.CharField(choices=[('accepted', 'Accepted'), ('initial', 'Initial'), ('pending', 'Pending'), ('rejected', 'Rejected')], max_length=31)),
('to_state', models.CharField(choices=[('accepted', 'Accepted'), ('initial', 'Initial'), ('pending', 'Pending'), ('rejected', 'Rejected')], max_length=31)),
('comment', models.TextField(blank=True)),
('is_deleted', models.BooleanField(default=False)),
('date_created', osf.utils.fields.NonNaiveDateTimeField(auto_now_add=True)),
('date_modified', osf.utils.fields.NonNaiveDateTimeField(auto_now=True)),
('creator', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.AlterModelOptions(
name='preprintprovider',
options={'permissions': (('view_submissions', 'Can view all submissions to this provider'), ('add_moderator', 'Can add other users as moderators for this provider'), ('view_actions', 'Can view actions on submissions to this provider'), ('add_reviewer', 'Can add other users as reviewers for this provider'), ('review_assigned_submissions', 'Can submit reviews for submissions to this provider which have been assigned to this user'), ('assign_reviewer', 'Can assign reviewers to review specific submissions to this provider'), ('set_up_moderation', 'Can set up moderation for this provider'), ('view_assigned_submissions', 'Can view submissions to this provider which have been assigned to this user'), ('edit_reviews_settings', 'Can edit reviews settings for this provider'), ('accept_submissions', 'Can accept submissions to this provider'), ('reject_submissions', 'Can reject submissions to this provider'), ('edit_review_comments', 'Can edit comments on actions for this provider'), ('view_preprintprovider', 'Can view preprint provider details'))},
),
migrations.AddField(
model_name='preprintprovider',
name='reviews_comments_anonymous',
field=models.NullBooleanField(),
),
migrations.AddField(
model_name='preprintprovider',
name='reviews_comments_private',
field=models.NullBooleanField(),
),
migrations.AddField(
model_name='preprintprovider',
name='reviews_workflow',
field=models.CharField(blank=True, choices=[(None, 'None'), ('post-moderation', 'Post-Moderation'), ('pre-moderation', 'Pre-Moderation')], max_length=15, null=True),
),
migrations.AddField(
model_name='preprintservice',
name='date_last_transitioned',
field=models.DateTimeField(blank=True, db_index=True, null=True),
),
migrations.AddField(
model_name='preprintservice',
name='reviews_state',
field=models.CharField(choices=[('accepted', 'Accepted'), ('initial', 'Initial'), ('pending', 'Pending'), ('rejected', 'Rejected')], db_index=True, default='initial', max_length=15),
),
migrations.AddField(
model_name='action',
name='target',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='actions', to='osf.PreprintService'),
),
migrations.RunPython(
create_provider_auth_groups
)
]
|
apache-2.0
|
bratsche/Neutron-Drive
|
google_appengine/google/appengine/api/images/images_not_implemented_stub.py
|
5
|
1491
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A NotImplemented Images API stub for when the PIL library is not found."""
class ImagesNotImplementedServiceStub(object):
"""Stub version of images API which raises a NotImplementedError."""
def MakeSyncCall(self, service, call, request, response, request_id=None):
"""Main entry point.
Args:
service: str, must be 'images'.
call: str, name of the RPC to make, must be part of ImagesService.
request: pb object, corresponding args to the 'call' argument.
response: pb object, return value for the 'call' argument.
request_id: A unique string identifying the request associated with the
API call.
"""
raise NotImplementedError("Unable to find the Python PIL library. Please "
"view the SDK documentation for details about "
"installing PIL on your system.")
|
bsd-3-clause
|
rootulp/exercism
|
python/luhn/luhn_test.py
|
12
|
1526
|
from collections import Counter
import unittest
from luhn import Luhn
class LuhnTests(unittest.TestCase):
def test_addends(self):
# uses a Counter to avoid specifying order of return value
self.assertEqual(Counter([1, 4, 1, 4, 1]),
Counter(Luhn(12121).addends()))
def test_addends_large(self):
# uses a Counter to avoid specifying order of return value
self.assertEqual(Counter([7, 6, 6, 1]),
Counter(Luhn(8631).addends()))
def test_checksum1(self):
self.assertEqual(2, Luhn(4913).checksum())
def test_ckecksum2(self):
self.assertEqual(1, Luhn(201773).checksum())
def test_invalid_number(self):
self.assertFalse(Luhn(738).is_valid())
def test_valid_number(self):
self.assertTrue(Luhn(8739567).is_valid())
def test_create_valid_number1(self):
self.assertEqual(1230, Luhn.create(123))
def test_create_valid_number2(self):
self.assertEqual(8739567, Luhn.create(873956))
def test_create_valid_number3(self):
self.assertEqual(8372637564, Luhn.create(837263756))
def test_is_valid_can_be_called_repeatedly(self):
# This test was added, because we saw many implementations
# in which the first call to is_valid() worked, but the
# second call failed().
number = Luhn(8739567)
self.assertTrue(number.is_valid())
self.assertTrue(number.is_valid())
if __name__ == '__main__':
unittest.main()
|
mit
|
sobjornstad/esc
|
tests/esc/test_helpme.py
|
1
|
3706
|
"""
Tests for the on-line help system.
"""
from decimal import Decimal
import pytest
from esc import builtin_stubs
from esc import display
from esc import helpme
from esc.status import status
from esc.commands import main_menu
from esc.registers import Registry
from esc.stack import StackState
# pylint: disable=redefined-outer-name
def test_status_message_anonymous():
"""
Anonymous functions should use their key as a status description.
"""
add_func = main_menu.child('+')
assert helpme.status_message(add_func) == "Help: '+' (press any key to return)"
def test_status_message_nonanonymous():
"Named functions should use their description instead."
exchange_func = main_menu.child('x')
assert (helpme.status_message(exchange_func) ==
"Help: 'exchange bos, sos' (press any key to return)")
def test_status_message_builtin():
"Builtins have a description."
quit_func = builtin_stubs.Quit()
assert helpme.status_message(quit_func) == "Help: 'quit' (press any key to return)"
class MockScreen:
"Mock for screen()."
helpw = None
called = set()
def getch_status(self):
self.called.add('getch_status')
return ord('q') # get back out of help
def refresh_stack(self, ss):
self.called.add('refresh_stack')
def refresh_status(self):
self.called.add('refresh_status')
def show_help_window(self,
is_menu,
help_title,
signature_info,
doc,
simulated_result):
self.called.add('show_help_window')
def display_menu(self, command):
self.called.add('display_menu')
def mock_screen(self):
"""
Fast way to create a callable that can return a singleton-ish
thing, to monkey-patch the global screen() object.
"""
return self
@pytest.fixture
def help_test_case(monkeypatch):
"Set up environment for a get_help() test case."
ss = StackState()
ss.push((Decimal(2), Decimal(3)))
registry = Registry()
mock_screen = MockScreen()
monkeypatch.setattr(helpme, 'screen', mock_screen.mock_screen)
monkeypatch.setattr(display, 'screen', mock_screen.mock_screen)
return ss, registry, mock_screen
default_call_set = {
'getch_status',
'refresh_stack',
'refresh_status',
'show_help_window',
}
def test_get_help_simple(help_test_case):
"We can get help on a simple (non-menu) function."
ss, registry, mock_screen = help_test_case
helpme.get_help('+', main_menu, ss, registry)
assert mock_screen.called == default_call_set
def test_get_help_menu_item(help_test_case):
"We can get help on a function in a menu."
ss, registry, mock_screen = help_test_case
constants_menu = main_menu.child('i')
helpme.get_help('p', constants_menu, ss, registry)
assert mock_screen.called == default_call_set
def test_get_help_menu(help_test_case):
"We can get help on a menu itself."
ss, registry, mock_screen = help_test_case
helpme.get_help('i', main_menu, ss, registry)
assert mock_screen.called == default_call_set.union({'display_menu'})
def test_get_help_invalid_key(help_test_case, monkeypatch):
"""
An error message is printed to the status bar when we ask for help on a
nonexistent menu item.
"""
ss, registry, mock_screen = help_test_case
the_error = None
def my_error(msg):
nonlocal the_error
the_error = msg
monkeypatch.setattr(status, 'error', my_error)
helpme.get_help('Q', main_menu, ss, registry)
assert the_error == "There's no option 'Q' in this menu."
|
gpl-3.0
|
uskudnik/ggrc-core
|
src/ggrc_workflows/migrations/versions/20140529010047_1d33919af441_initial_ggrc_workflows_migration.py
|
2
|
12596
|
"""Initial ggrc_workflows migration
Revision ID: 1d33919af441
Revises: None
Create Date: 2014-05-29 01:00:47.198955
"""
# revision identifiers, used by Alembic.
revision = '1d33919af441'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('tasks',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('end_date', sa.Date(), nullable=True),
sa.Column('start_date', sa.Date(), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('title', sa.String(length=250), nullable=False),
sa.Column('slug', sa.String(length=250), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('modified_by_id', sa.Integer(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('context_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['context_id'], ['contexts.id'], name='fk_tasks_context_id'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('slug', name='uq_tasks'),
sa.UniqueConstraint('title', name='uq_t_tasks')
)
op.create_index('fk_tasks_contexts', 'tasks', ['context_id'], unique=False)
op.create_table('task_entries',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('modified_by_id', sa.Integer(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('context_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['context_id'], ['contexts.id'], name='fk_task_entries_context_id'),
sa.PrimaryKeyConstraint('id')
)
op.create_index('fk_task_entries_contexts', 'task_entries', ['context_id'], unique=False)
op.create_table('workflows',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('end_date', sa.Date(), nullable=True),
sa.Column('start_date', sa.Date(), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('title', sa.String(length=250), nullable=False),
sa.Column('slug', sa.String(length=250), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('modified_by_id', sa.Integer(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('context_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['context_id'], ['contexts.id'], name='fk_workflows_context_id'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('slug', name='uq_workflows'),
sa.UniqueConstraint('title', name='uq_t_workflows')
)
op.create_index('fk_workflows_contexts', 'workflows', ['context_id'], unique=False)
op.create_table('workflow_people',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('workflow_id', sa.Integer(), nullable=False),
sa.Column('person_id', sa.Integer(), nullable=False),
sa.Column('status', sa.String(length=250), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('modified_by_id', sa.Integer(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('context_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['context_id'], ['contexts.id'], name='fk_workflow_people_context_id'),
sa.ForeignKeyConstraint(['person_id'], ['people.id'], name='fk_workflow_people_person_id'),
sa.ForeignKeyConstraint(['workflow_id'], ['workflows.id'], name='fk_workflow_people_workflow_id'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('workflow_id', 'person_id')
)
op.create_index('fk_workflow_people_contexts', 'workflow_people', ['context_id'], unique=False)
op.create_index('ix_person_id', 'workflow_people', ['person_id'], unique=False)
op.create_index('ix_workflow_id', 'workflow_people', ['workflow_id'], unique=False)
op.create_table('workflow_tasks',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('workflow_id', sa.Integer(), nullable=False),
sa.Column('task_id', sa.Integer(), nullable=False),
sa.Column('end_date', sa.Date(), nullable=True),
sa.Column('start_date', sa.Date(), nullable=True),
sa.Column('status', sa.String(length=250), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('modified_by_id', sa.Integer(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('context_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['context_id'], ['contexts.id'], name='fk_workflow_tasks_context_id'),
sa.ForeignKeyConstraint(['task_id'], ['tasks.id'], name='fk_workflow_tasks_task_id'),
sa.ForeignKeyConstraint(['workflow_id'], ['workflows.id'], name='fk_workflow_tasks_workflow_id'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('workflow_id', 'task_id')
)
op.create_index('fk_workflow_tasks_contexts', 'workflow_tasks', ['context_id'], unique=False)
op.create_table('task_groups',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('workflow_id', sa.Integer(), nullable=False),
sa.Column('contact_id', sa.Integer(), nullable=True),
sa.Column('end_date', sa.Date(), nullable=True),
sa.Column('start_date', sa.Date(), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('title', sa.String(length=250), nullable=False),
sa.Column('slug', sa.String(length=250), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('modified_by_id', sa.Integer(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('context_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['contact_id'], ['people.id'], name='fk_task_groups_contact_id'),
sa.ForeignKeyConstraint(['context_id'], ['contexts.id'], name='fk_task_groups_context_id'),
sa.ForeignKeyConstraint(['workflow_id'], ['workflows.id'], name='fk_task_groups_workflow_id'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('slug', name='uq_task_groups'),
sa.UniqueConstraint('title', name='uq_t_task_groups')
)
op.create_index('fk_task_groups_contact', 'task_groups', ['contact_id'], unique=False)
op.create_index('fk_task_groups_contexts', 'task_groups', ['context_id'], unique=False)
op.create_table('workflow_objects',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('workflow_id', sa.Integer(), nullable=False),
sa.Column('object_id', sa.Integer(), nullable=False),
sa.Column('object_type', sa.String(length=250), nullable=False),
sa.Column('end_date', sa.Date(), nullable=True),
sa.Column('start_date', sa.Date(), nullable=True),
sa.Column('status', sa.String(length=250), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('modified_by_id', sa.Integer(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('context_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['context_id'], ['contexts.id'], name='fk_workflow_objects_context_id'),
sa.ForeignKeyConstraint(['workflow_id'], ['workflows.id'], name='fk_workflow_objects_workflow_id'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('workflow_id', 'object_id', 'object_type')
)
op.create_index('fk_workflow_objects_contexts', 'workflow_objects', ['context_id'], unique=False)
op.create_index('ix_workflow_id', 'workflow_objects', ['workflow_id'], unique=False)
op.create_table('task_group_objects',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('task_group_id', sa.Integer(), nullable=False),
sa.Column('object_id', sa.Integer(), nullable=False),
sa.Column('object_type', sa.String(length=250), nullable=False),
sa.Column('end_date', sa.Date(), nullable=True),
sa.Column('start_date', sa.Date(), nullable=True),
sa.Column('status', sa.String(length=250), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('modified_by_id', sa.Integer(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('context_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['context_id'], ['contexts.id'], name='fk_task_group_objects_context_id'),
sa.ForeignKeyConstraint(['task_group_id'], ['task_groups.id'], name='fk_task_group_objects_task_group_id'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('task_group_id', 'object_id', 'object_type')
)
op.create_index('fk_task_group_objects_contexts', 'task_group_objects', ['context_id'], unique=False)
op.create_index('ix_task_group_id', 'task_group_objects', ['task_group_id'], unique=False)
op.create_table('task_group_tasks',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('task_group_id', sa.Integer(), nullable=False),
sa.Column('task_id', sa.Integer(), nullable=False),
sa.Column('end_date', sa.Date(), nullable=True),
sa.Column('start_date', sa.Date(), nullable=True),
sa.Column('status', sa.String(length=250), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('modified_by_id', sa.Integer(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('context_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['context_id'], ['contexts.id'], name='fk_task_group_tasks_context_id'),
sa.ForeignKeyConstraint(['task_group_id'], ['task_groups.id'], name='fk_task_group_tasks_task_group_id'),
sa.ForeignKeyConstraint(['task_id'], ['tasks.id'], name='fk_task_group_tasks_task_id'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('task_group_id', 'task_id')
)
op.create_index('fk_task_group_tasks_contexts', 'task_group_tasks', ['context_id'], unique=False)
def downgrade():
op.drop_constraint('fk_task_group_tasks_context_id', 'task_group_tasks', type_='foreignkey')
op.drop_index('fk_task_group_tasks_contexts', table_name='task_group_tasks')
op.drop_table('task_group_tasks')
op.drop_index('ix_task_group_id', table_name='task_group_objects')
op.drop_constraint('fk_task_group_objects_context_id', 'task_group_objects', type_='foreignkey')
op.drop_index('fk_task_group_objects_contexts', table_name='task_group_objects')
op.drop_table('task_group_objects')
op.drop_index('ix_workflow_id', table_name='workflow_objects')
op.drop_constraint('fk_workflow_objects_context_id', 'workflow_objects', type_='foreignkey')
op.drop_index('fk_workflow_objects_contexts', table_name='workflow_objects')
op.drop_table('workflow_objects')
op.drop_constraint('fk_task_groups_context_id', 'task_groups', type_='foreignkey')
op.drop_index('fk_task_groups_contexts', table_name='task_groups')
op.drop_constraint('fk_task_groups_contact_id', 'task_groups', type_='foreignkey')
op.drop_index('fk_task_groups_contact', table_name='task_groups')
op.drop_table('task_groups')
op.drop_constraint('fk_workflow_tasks_context_id', 'workflow_tasks', type_='foreignkey')
op.drop_index('fk_workflow_tasks_contexts', table_name='workflow_tasks')
op.drop_table('workflow_tasks')
op.drop_index('ix_workflow_id', table_name='workflow_people')
op.drop_constraint('fk_workflow_people_person_id', 'workflow_people', type_='foreignkey')
op.drop_index('ix_person_id', table_name='workflow_people')
op.drop_constraint('fk_workflow_people_context_id', 'workflow_people', type_='foreignkey')
op.drop_index('fk_workflow_people_contexts', table_name='workflow_people')
op.drop_table('workflow_people')
op.drop_constraint('fk_workflows_context_id', 'workflows', type_='foreignkey')
op.drop_index('fk_workflows_contexts', table_name='workflows')
op.drop_table('workflows')
op.drop_constraint('fk_task_entries_context_id', 'task_entries', type_='foreignkey')
op.drop_index('fk_task_entries_contexts', table_name='task_entries')
op.drop_table('task_entries')
op.drop_constraint('fk_tasks_context_id', 'tasks', type_='foreignkey')
op.drop_index('fk_tasks_contexts', table_name='tasks')
op.drop_table('tasks')
|
apache-2.0
|
jeffdwyatt/taiga-back
|
taiga/feedback/apps.py
|
21
|
1263
|
# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.apps import AppConfig
from django.apps import apps
from django.conf import settings
from django.conf.urls import include, url
from .routers import router
class FeedbackAppConfig(AppConfig):
name = "taiga.feedback"
verbose_name = "Feedback"
def ready(self):
if settings.FEEDBACK_ENABLED:
from taiga.urls import urlpatterns
urlpatterns.append(url(r'^api/v1/', include(router.urls)))
|
agpl-3.0
|
kpiorno/kivy3dgui
|
kivy3dgui/objloader.py
|
1
|
5490
|
from kivy.logger import Logger
import os
class MeshData(object):
def __init__(self, **kwargs):
self.name = kwargs.get("name")
self.vertex_format = [
('v_pos', 3, 'float'),
('v_normal', 3, 'float'),
('v_tc0', 2, 'float')]
self.vertices = []
self.indices = []
def calculate_normals(self):
for i in range(len(self.indices) / (3)):
fi = i * 3
v1i = self.indices[fi]
v2i = self.indices[fi + 1]
v3i = self.indices[fi + 2]
vs = self.vertices
p1 = [vs[v1i + c] for c in range(3)]
p2 = [vs[v2i + c] for c in range(3)]
p3 = [vs[v3i + c] for c in range(3)]
u,v = [0,0,0], [0,0,0]
for j in range(3):
v[j] = p2[j] - p1[j]
u[j] = p3[j] - p1[j]
n = [0,0,0]
n[0] = u[1] * v[2] - u[2] * v[1]
n[1] = u[2] * v[0] - u[0] * v[2]
n[2] = u[0] * v[1] - u[1] * v[0]
for k in range(3):
self.vertices[v1i + 3 + k] = n[k]
self.vertices[v2i + 3 + k] = n[k]
self.vertices[v3i + 3 + k] = n[k]
class ObjFile:
def finish_object(self):
if self._current_object == None:
return
mesh = [MeshData()]
cont_mesh=0
idx = 0
for f in self.faces:
verts = f[0]
norms = f[1]
tcs = f[2]
material_ = list(map(float, f[3]))
if len(mesh[cont_mesh].indices) == 65535:
mesh.append(MeshData())
cont_mesh+=1
idx=0
for i in range(3):
#get normal components
n = (0.0, 0.0, 0.0)
if norms[i] != -1:
n = self.normals[norms[i]-1]
#get texture coordinate components
t = (0.4, 0.4)
if tcs[i] != -1:
t = self.texcoords[tcs[i]-1]
#get vertex components
v = self.vertices[verts[i]-1]
data = [v[0], v[1], v[2], n[0], n[1], n[2], t[0], t[1], material_[0], material_[1], material_[2]]
mesh[cont_mesh].vertices.extend(data)
tri = [idx, idx+1, idx+2]
mesh[cont_mesh].indices.extend(tri)
idx += 3
self.objects[self._current_object] = mesh
#mesh.calculate_normals()
self.faces = []
def __init__(self, filename, swapyz=False):
"""Loads a Wavefront OBJ file. """
self.objects = {}
self.vertices = []
self.normals = []
self.texcoords = []
self.faces = []
self.mtl = None
self._current_object = None
material = None
for line in open(filename, "r"):
if line.startswith('#'): continue
if line.startswith('s'): continue
values = line.split()
if not values: continue
if values[0] == 'o':
self.finish_object()
self._current_object = values[1]
elif values[0] == 'mtllib':
mtl_path = mtl_filename = values[1]
if (os.path.isabs(filename) and not os.path.isabs(mtl_filename)) or \
(os.path.dirname(filename) and not os.path.dirname(mtl_filename)):
# if needed, correct the mtl path to be relative or same-dir to/as the object path
mtl_path = os.path.join(os.path.dirname(filename), mtl_filename)
self.mtl = MTL(mtl_path)
elif values[0] in ('usemtl', 'usemat'):
material = values[1]
if values[0] == 'v':
v = list(map(float, values[1:4]))
if swapyz:
v = v[0], v[2], v[1]
self.vertices.append(v)
elif values[0] == 'vn':
v = list(map(float, values[1:4]))
if swapyz:
v = v[0], v[2], v[1]
self.normals.append(v)
elif values[0] == 'vt':
self.texcoords.append(list(map(float, values[1:3])))
elif values[0] == 'f':
face = []
texcoords = []
norms = []
for v in values[1:]:
w = v.split('/')
face.append(int(w[0]))
if len(w) >= 2 and len(w[1]) > 0:
texcoords.append(int(w[1]))
else:
texcoords.append(-1)
if len(w) >= 3 and len(w[2]) > 0:
norms.append(int(w[2]))
else:
norms.append(-1)
self.faces.append((face, norms, texcoords, self.mtl[material]["Kd"] if self.mtl!=None else [1., 1., 1.]))
self.finish_object()
def MTL(filename):
contents = {}
mtl = None
if not os.path.exists(filename):
return
for line in open(filename, "r"):
if line.startswith('#'): continue
values = line.split()
if not values: continue
if values[0] == 'newmtl':
mtl = contents[values[1]] = {}
elif mtl is None:
raise ValueError("mtl file doesn't start with newmtl stmt")
mtl[values[0]] = values[1:]
return contents
|
mit
|
pasiegel/SickGear
|
lib/hachoir_core/iso639.py
|
129
|
18544
|
# -*- coding: utf-8 -*-
"""
ISO639-2 standart: the module only contains the dictionary ISO639_2
which maps a language code in three letters (eg. "fre") to a language
name in english (eg. "French").
"""
# ISO-639, the list comes from:
# http://www.loc.gov/standards/iso639-2/php/English_list.php
_ISO639 = (
(u"Abkhazian", "abk", "ab"),
(u"Achinese", "ace", None),
(u"Acoli", "ach", None),
(u"Adangme", "ada", None),
(u"Adygei", "ady", None),
(u"Adyghe", "ady", None),
(u"Afar", "aar", "aa"),
(u"Afrihili", "afh", None),
(u"Afrikaans", "afr", "af"),
(u"Afro-Asiatic (Other)", "afa", None),
(u"Ainu", "ain", None),
(u"Akan", "aka", "ak"),
(u"Akkadian", "akk", None),
(u"Albanian", "alb/sqi", "sq"),
(u"Alemani", "gsw", None),
(u"Aleut", "ale", None),
(u"Algonquian languages", "alg", None),
(u"Altaic (Other)", "tut", None),
(u"Amharic", "amh", "am"),
(u"Angika", "anp", None),
(u"Apache languages", "apa", None),
(u"Arabic", "ara", "ar"),
(u"Aragonese", "arg", "an"),
(u"Aramaic", "arc", None),
(u"Arapaho", "arp", None),
(u"Araucanian", "arn", None),
(u"Arawak", "arw", None),
(u"Armenian", "arm/hye", "hy"),
(u"Aromanian", "rup", None),
(u"Artificial (Other)", "art", None),
(u"Arumanian", "rup", None),
(u"Assamese", "asm", "as"),
(u"Asturian", "ast", None),
(u"Athapascan languages", "ath", None),
(u"Australian languages", "aus", None),
(u"Austronesian (Other)", "map", None),
(u"Avaric", "ava", "av"),
(u"Avestan", "ave", "ae"),
(u"Awadhi", "awa", None),
(u"Aymara", "aym", "ay"),
(u"Azerbaijani", "aze", "az"),
(u"Bable", "ast", None),
(u"Balinese", "ban", None),
(u"Baltic (Other)", "bat", None),
(u"Baluchi", "bal", None),
(u"Bambara", "bam", "bm"),
(u"Bamileke languages", "bai", None),
(u"Banda", "bad", None),
(u"Bantu (Other)", "bnt", None),
(u"Basa", "bas", None),
(u"Bashkir", "bak", "ba"),
(u"Basque", "baq/eus", "eu"),
(u"Batak (Indonesia)", "btk", None),
(u"Beja", "bej", None),
(u"Belarusian", "bel", "be"),
(u"Bemba", "bem", None),
(u"Bengali", "ben", "bn"),
(u"Berber (Other)", "ber", None),
(u"Bhojpuri", "bho", None),
(u"Bihari", "bih", "bh"),
(u"Bikol", "bik", None),
(u"Bilin", "byn", None),
(u"Bini", "bin", None),
(u"Bislama", "bis", "bi"),
(u"Blin", "byn", None),
(u"Bokmål, Norwegian", "nob", "nb"),
(u"Bosnian", "bos", "bs"),
(u"Braj", "bra", None),
(u"Breton", "bre", "br"),
(u"Buginese", "bug", None),
(u"Bulgarian", "bul", "bg"),
(u"Buriat", "bua", None),
(u"Burmese", "bur/mya", "my"),
(u"Caddo", "cad", None),
(u"Carib", "car", None),
(u"Castilian", "spa", "es"),
(u"Catalan", "cat", "ca"),
(u"Caucasian (Other)", "cau", None),
(u"Cebuano", "ceb", None),
(u"Celtic (Other)", "cel", None),
(u"Central American Indian (Other)", "cai", None),
(u"Chagatai", "chg", None),
(u"Chamic languages", "cmc", None),
(u"Chamorro", "cha", "ch"),
(u"Chechen", "che", "ce"),
(u"Cherokee", "chr", None),
(u"Chewa", "nya", "ny"),
(u"Cheyenne", "chy", None),
(u"Chibcha", "chb", None),
(u"Chichewa", "nya", "ny"),
(u"Chinese", "chi/zho", "zh"),
(u"Chinook jargon", "chn", None),
(u"Chipewyan", "chp", None),
(u"Choctaw", "cho", None),
(u"Chuang", "zha", "za"),
(u"Church Slavic", "chu", "cu"),
(u"Church Slavonic", "chu", "cu"),
(u"Chuukese", "chk", None),
(u"Chuvash", "chv", "cv"),
(u"Classical Nepal Bhasa", "nwc", None),
(u"Classical Newari", "nwc", None),
(u"Coptic", "cop", None),
(u"Cornish", "cor", "kw"),
(u"Corsican", "cos", "co"),
(u"Cree", "cre", "cr"),
(u"Creek", "mus", None),
(u"Creoles and pidgins (Other)", "crp", None),
(u"Creoles and pidgins, English based (Other)", "cpe", None),
(u"Creoles and pidgins, French-based (Other)", "cpf", None),
(u"Creoles and pidgins, Portuguese-based (Other)", "cpp", None),
(u"Crimean Tatar", "crh", None),
(u"Crimean Turkish", "crh", None),
(u"Croatian", "scr/hrv", "hr"),
(u"Cushitic (Other)", "cus", None),
(u"Czech", "cze/ces", "cs"),
(u"Dakota", "dak", None),
(u"Danish", "dan", "da"),
(u"Dargwa", "dar", None),
(u"Dayak", "day", None),
(u"Delaware", "del", None),
(u"Dhivehi", "div", "dv"),
(u"Dimili", "zza", None),
(u"Dimli", "zza", None),
(u"Dinka", "din", None),
(u"Divehi", "div", "dv"),
(u"Dogri", "doi", None),
(u"Dogrib", "dgr", None),
(u"Dravidian (Other)", "dra", None),
(u"Duala", "dua", None),
(u"Dutch", "dut/nld", "nl"),
(u"Dutch, Middle (ca.1050-1350)", "dum", None),
(u"Dyula", "dyu", None),
(u"Dzongkha", "dzo", "dz"),
(u"Eastern Frisian", "frs", None),
(u"Efik", "efi", None),
(u"Egyptian (Ancient)", "egy", None),
(u"Ekajuk", "eka", None),
(u"Elamite", "elx", None),
(u"English", "eng", "en"),
(u"English, Middle (1100-1500)", "enm", None),
(u"English, Old (ca.450-1100)", "ang", None),
(u"Erzya", "myv", None),
(u"Esperanto", "epo", "eo"),
(u"Estonian", "est", "et"),
(u"Ewe", "ewe", "ee"),
(u"Ewondo", "ewo", None),
(u"Fang", "fan", None),
(u"Fanti", "fat", None),
(u"Faroese", "fao", "fo"),
(u"Fijian", "fij", "fj"),
(u"Filipino", "fil", None),
(u"Finnish", "fin", "fi"),
(u"Finno-Ugrian (Other)", "fiu", None),
(u"Flemish", "dut/nld", "nl"),
(u"Fon", "fon", None),
(u"French", "fre/fra", "fr"),
(u"French, Middle (ca.1400-1600)", "frm", None),
(u"French, Old (842-ca.1400)", "fro", None),
(u"Friulian", "fur", None),
(u"Fulah", "ful", "ff"),
(u"Ga", "gaa", None),
(u"Gaelic", "gla", "gd"),
(u"Galician", "glg", "gl"),
(u"Ganda", "lug", "lg"),
(u"Gayo", "gay", None),
(u"Gbaya", "gba", None),
(u"Geez", "gez", None),
(u"Georgian", "geo/kat", "ka"),
(u"German", "ger/deu", "de"),
(u"German, Low", "nds", None),
(u"German, Middle High (ca.1050-1500)", "gmh", None),
(u"German, Old High (ca.750-1050)", "goh", None),
(u"Germanic (Other)", "gem", None),
(u"Gikuyu", "kik", "ki"),
(u"Gilbertese", "gil", None),
(u"Gondi", "gon", None),
(u"Gorontalo", "gor", None),
(u"Gothic", "got", None),
(u"Grebo", "grb", None),
(u"Greek, Ancient (to 1453)", "grc", None),
(u"Greek, Modern (1453-)", "gre/ell", "el"),
(u"Greenlandic", "kal", "kl"),
(u"Guarani", "grn", "gn"),
(u"Gujarati", "guj", "gu"),
(u"Gwich´in", "gwi", None),
(u"Haida", "hai", None),
(u"Haitian", "hat", "ht"),
(u"Haitian Creole", "hat", "ht"),
(u"Hausa", "hau", "ha"),
(u"Hawaiian", "haw", None),
(u"Hebrew", "heb", "he"),
(u"Herero", "her", "hz"),
(u"Hiligaynon", "hil", None),
(u"Himachali", "him", None),
(u"Hindi", "hin", "hi"),
(u"Hiri Motu", "hmo", "ho"),
(u"Hittite", "hit", None),
(u"Hmong", "hmn", None),
(u"Hungarian", "hun", "hu"),
(u"Hupa", "hup", None),
(u"Iban", "iba", None),
(u"Icelandic", "ice/isl", "is"),
(u"Ido", "ido", "io"),
(u"Igbo", "ibo", "ig"),
(u"Ijo", "ijo", None),
(u"Iloko", "ilo", None),
(u"Inari Sami", "smn", None),
(u"Indic (Other)", "inc", None),
(u"Indo-European (Other)", "ine", None),
(u"Indonesian", "ind", "id"),
(u"Ingush", "inh", None),
(u"Interlingua", "ina", "ia"),
(u"Interlingue", "ile", "ie"),
(u"Inuktitut", "iku", "iu"),
(u"Inupiaq", "ipk", "ik"),
(u"Iranian (Other)", "ira", None),
(u"Irish", "gle", "ga"),
(u"Irish, Middle (900-1200)", "mga", None),
(u"Irish, Old (to 900)", "sga", None),
(u"Iroquoian languages", "iro", None),
(u"Italian", "ita", "it"),
(u"Japanese", "jpn", "ja"),
(u"Javanese", "jav", "jv"),
(u"Judeo-Arabic", "jrb", None),
(u"Judeo-Persian", "jpr", None),
(u"Kabardian", "kbd", None),
(u"Kabyle", "kab", None),
(u"Kachin", "kac", None),
(u"Kalaallisut", "kal", "kl"),
(u"Kalmyk", "xal", None),
(u"Kamba", "kam", None),
(u"Kannada", "kan", "kn"),
(u"Kanuri", "kau", "kr"),
(u"Kara-Kalpak", "kaa", None),
(u"Karachay-Balkar", "krc", None),
(u"Karelian", "krl", None),
(u"Karen", "kar", None),
(u"Kashmiri", "kas", "ks"),
(u"Kashubian", "csb", None),
(u"Kawi", "kaw", None),
(u"Kazakh", "kaz", "kk"),
(u"Khasi", "kha", None),
(u"Khmer", "khm", "km"),
(u"Khoisan (Other)", "khi", None),
(u"Khotanese", "kho", None),
(u"Kikuyu", "kik", "ki"),
(u"Kimbundu", "kmb", None),
(u"Kinyarwanda", "kin", "rw"),
(u"Kirdki", "zza", None),
(u"Kirghiz", "kir", "ky"),
(u"Kirmanjki", "zza", None),
(u"Klingon", "tlh", None),
(u"Komi", "kom", "kv"),
(u"Kongo", "kon", "kg"),
(u"Konkani", "kok", None),
(u"Korean", "kor", "ko"),
(u"Kosraean", "kos", None),
(u"Kpelle", "kpe", None),
(u"Kru", "kro", None),
(u"Kuanyama", "kua", "kj"),
(u"Kumyk", "kum", None),
(u"Kurdish", "kur", "ku"),
(u"Kurukh", "kru", None),
(u"Kutenai", "kut", None),
(u"Kwanyama", "kua", "kj"),
(u"Ladino", "lad", None),
(u"Lahnda", "lah", None),
(u"Lamba", "lam", None),
(u"Lao", "lao", "lo"),
(u"Latin", "lat", "la"),
(u"Latvian", "lav", "lv"),
(u"Letzeburgesch", "ltz", "lb"),
(u"Lezghian", "lez", None),
(u"Limburgan", "lim", "li"),
(u"Limburger", "lim", "li"),
(u"Limburgish", "lim", "li"),
(u"Lingala", "lin", "ln"),
(u"Lithuanian", "lit", "lt"),
(u"Lojban", "jbo", None),
(u"Low German", "nds", None),
(u"Low Saxon", "nds", None),
(u"Lower Sorbian", "dsb", None),
(u"Lozi", "loz", None),
(u"Luba-Katanga", "lub", "lu"),
(u"Luba-Lulua", "lua", None),
(u"Luiseno", "lui", None),
(u"Lule Sami", "smj", None),
(u"Lunda", "lun", None),
(u"Luo (Kenya and Tanzania)", "luo", None),
(u"Lushai", "lus", None),
(u"Luxembourgish", "ltz", "lb"),
(u"Macedo-Romanian", "rup", None),
(u"Macedonian", "mac/mkd", "mk"),
(u"Madurese", "mad", None),
(u"Magahi", "mag", None),
(u"Maithili", "mai", None),
(u"Makasar", "mak", None),
(u"Malagasy", "mlg", "mg"),
(u"Malay", "may/msa", "ms"),
(u"Malayalam", "mal", "ml"),
(u"Maldivian", "div", "dv"),
(u"Maltese", "mlt", "mt"),
(u"Manchu", "mnc", None),
(u"Mandar", "mdr", None),
(u"Mandingo", "man", None),
(u"Manipuri", "mni", None),
(u"Manobo languages", "mno", None),
(u"Manx", "glv", "gv"),
(u"Maori", "mao/mri", "mi"),
(u"Marathi", "mar", "mr"),
(u"Mari", "chm", None),
(u"Marshallese", "mah", "mh"),
(u"Marwari", "mwr", None),
(u"Masai", "mas", None),
(u"Mayan languages", "myn", None),
(u"Mende", "men", None),
(u"Mi'kmaq", "mic", None),
(u"Micmac", "mic", None),
(u"Minangkabau", "min", None),
(u"Mirandese", "mwl", None),
(u"Miscellaneous languages", "mis", None),
(u"Mohawk", "moh", None),
(u"Moksha", "mdf", None),
(u"Moldavian", "mol", "mo"),
(u"Mon-Khmer (Other)", "mkh", None),
(u"Mongo", "lol", None),
(u"Mongolian", "mon", "mn"),
(u"Mossi", "mos", None),
(u"Multiple languages", "mul", None),
(u"Munda languages", "mun", None),
(u"N'Ko", "nqo", None),
(u"Nahuatl", "nah", None),
(u"Nauru", "nau", "na"),
(u"Navaho", "nav", "nv"),
(u"Navajo", "nav", "nv"),
(u"Ndebele, North", "nde", "nd"),
(u"Ndebele, South", "nbl", "nr"),
(u"Ndonga", "ndo", "ng"),
(u"Neapolitan", "nap", None),
(u"Nepal Bhasa", "new", None),
(u"Nepali", "nep", "ne"),
(u"Newari", "new", None),
(u"Nias", "nia", None),
(u"Niger-Kordofanian (Other)", "nic", None),
(u"Nilo-Saharan (Other)", "ssa", None),
(u"Niuean", "niu", None),
(u"No linguistic content", "zxx", None),
(u"Nogai", "nog", None),
(u"Norse, Old", "non", None),
(u"North American Indian", "nai", None),
(u"North Ndebele", "nde", "nd"),
(u"Northern Frisian", "frr", None),
(u"Northern Sami", "sme", "se"),
(u"Northern Sotho", "nso", None),
(u"Norwegian", "nor", "no"),
(u"Norwegian Bokmål", "nob", "nb"),
(u"Norwegian Nynorsk", "nno", "nn"),
(u"Nubian languages", "nub", None),
(u"Nyamwezi", "nym", None),
(u"Nyanja", "nya", "ny"),
(u"Nyankole", "nyn", None),
(u"Nynorsk, Norwegian", "nno", "nn"),
(u"Nyoro", "nyo", None),
(u"Nzima", "nzi", None),
(u"Occitan (post 1500)", "oci", "oc"),
(u"Oirat", "xal", None),
(u"Ojibwa", "oji", "oj"),
(u"Old Bulgarian", "chu", "cu"),
(u"Old Church Slavonic", "chu", "cu"),
(u"Old Newari", "nwc", None),
(u"Old Slavonic", "chu", "cu"),
(u"Oriya", "ori", "or"),
(u"Oromo", "orm", "om"),
(u"Osage", "osa", None),
(u"Ossetian", "oss", "os"),
(u"Ossetic", "oss", "os"),
(u"Otomian languages", "oto", None),
(u"Pahlavi", "pal", None),
(u"Palauan", "pau", None),
(u"Pali", "pli", "pi"),
(u"Pampanga", "pam", None),
(u"Pangasinan", "pag", None),
(u"Panjabi", "pan", "pa"),
(u"Papiamento", "pap", None),
(u"Papuan (Other)", "paa", None),
(u"Pedi", "nso", None),
(u"Persian", "per/fas", "fa"),
(u"Persian, Old (ca.600-400 B.C.)", "peo", None),
(u"Philippine (Other)", "phi", None),
(u"Phoenician", "phn", None),
(u"Pilipino", "fil", None),
(u"Pohnpeian", "pon", None),
(u"Polish", "pol", "pl"),
(u"Portuguese", "por", "pt"),
(u"Prakrit languages", "pra", None),
(u"Provençal", "oci", "oc"),
(u"Provençal, Old (to 1500)", "pro", None),
(u"Punjabi", "pan", "pa"),
(u"Pushto", "pus", "ps"),
(u"Quechua", "que", "qu"),
(u"Raeto-Romance", "roh", "rm"),
(u"Rajasthani", "raj", None),
(u"Rapanui", "rap", None),
(u"Rarotongan", "rar", None),
(u"Reserved for local use", "qaa/qtz", None),
(u"Romance (Other)", "roa", None),
(u"Romanian", "rum/ron", "ro"),
(u"Romany", "rom", None),
(u"Rundi", "run", "rn"),
(u"Russian", "rus", "ru"),
(u"Salishan languages", "sal", None),
(u"Samaritan Aramaic", "sam", None),
(u"Sami languages (Other)", "smi", None),
(u"Samoan", "smo", "sm"),
(u"Sandawe", "sad", None),
(u"Sango", "sag", "sg"),
(u"Sanskrit", "san", "sa"),
(u"Santali", "sat", None),
(u"Sardinian", "srd", "sc"),
(u"Sasak", "sas", None),
(u"Saxon, Low", "nds", None),
(u"Scots", "sco", None),
(u"Scottish Gaelic", "gla", "gd"),
(u"Selkup", "sel", None),
(u"Semitic (Other)", "sem", None),
(u"Sepedi", "nso", None),
(u"Serbian", "scc/srp", "sr"),
(u"Serer", "srr", None),
(u"Shan", "shn", None),
(u"Shona", "sna", "sn"),
(u"Sichuan Yi", "iii", "ii"),
(u"Sicilian", "scn", None),
(u"Sidamo", "sid", None),
(u"Sign Languages", "sgn", None),
(u"Siksika", "bla", None),
(u"Sindhi", "snd", "sd"),
(u"Sinhala", "sin", "si"),
(u"Sinhalese", "sin", "si"),
(u"Sino-Tibetan (Other)", "sit", None),
(u"Siouan languages", "sio", None),
(u"Skolt Sami", "sms", None),
(u"Slave (Athapascan)", "den", None),
(u"Slavic (Other)", "sla", None),
(u"Slovak", "slo/slk", "sk"),
(u"Slovenian", "slv", "sl"),
(u"Sogdian", "sog", None),
(u"Somali", "som", "so"),
(u"Songhai", "son", None),
(u"Soninke", "snk", None),
(u"Sorbian languages", "wen", None),
(u"Sotho, Northern", "nso", None),
(u"Sotho, Southern", "sot", "st"),
(u"South American Indian (Other)", "sai", None),
(u"South Ndebele", "nbl", "nr"),
(u"Southern Altai", "alt", None),
(u"Southern Sami", "sma", None),
(u"Spanish", "spa", "es"),
(u"Sranan Togo", "srn", None),
(u"Sukuma", "suk", None),
(u"Sumerian", "sux", None),
(u"Sundanese", "sun", "su"),
(u"Susu", "sus", None),
(u"Swahili", "swa", "sw"),
(u"Swati", "ssw", "ss"),
(u"Swedish", "swe", "sv"),
(u"Swiss German", "gsw", None),
(u"Syriac", "syr", None),
(u"Tagalog", "tgl", "tl"),
(u"Tahitian", "tah", "ty"),
(u"Tai (Other)", "tai", None),
(u"Tajik", "tgk", "tg"),
(u"Tamashek", "tmh", None),
(u"Tamil", "tam", "ta"),
(u"Tatar", "tat", "tt"),
(u"Telugu", "tel", "te"),
(u"Tereno", "ter", None),
(u"Tetum", "tet", None),
(u"Thai", "tha", "th"),
(u"Tibetan", "tib/bod", "bo"),
(u"Tigre", "tig", None),
(u"Tigrinya", "tir", "ti"),
(u"Timne", "tem", None),
(u"Tiv", "tiv", None),
(u"tlhIngan-Hol", "tlh", None),
(u"Tlingit", "tli", None),
(u"Tok Pisin", "tpi", None),
(u"Tokelau", "tkl", None),
(u"Tonga (Nyasa)", "tog", None),
(u"Tonga (Tonga Islands)", "ton", "to"),
(u"Tsimshian", "tsi", None),
(u"Tsonga", "tso", "ts"),
(u"Tswana", "tsn", "tn"),
(u"Tumbuka", "tum", None),
(u"Tupi languages", "tup", None),
(u"Turkish", "tur", "tr"),
(u"Turkish, Ottoman (1500-1928)", "ota", None),
(u"Turkmen", "tuk", "tk"),
(u"Tuvalu", "tvl", None),
(u"Tuvinian", "tyv", None),
(u"Twi", "twi", "tw"),
(u"Udmurt", "udm", None),
(u"Ugaritic", "uga", None),
(u"Uighur", "uig", "ug"),
(u"Ukrainian", "ukr", "uk"),
(u"Umbundu", "umb", None),
(u"Undetermined", "und", None),
(u"Upper Sorbian", "hsb", None),
(u"Urdu", "urd", "ur"),
(u"Uyghur", "uig", "ug"),
(u"Uzbek", "uzb", "uz"),
(u"Vai", "vai", None),
(u"Valencian", "cat", "ca"),
(u"Venda", "ven", "ve"),
(u"Vietnamese", "vie", "vi"),
(u"Volapük", "vol", "vo"),
(u"Votic", "vot", None),
(u"Wakashan languages", "wak", None),
(u"Walamo", "wal", None),
(u"Walloon", "wln", "wa"),
(u"Waray", "war", None),
(u"Washo", "was", None),
(u"Welsh", "wel/cym", "cy"),
(u"Western Frisian", "fry", "fy"),
(u"Wolof", "wol", "wo"),
(u"Xhosa", "xho", "xh"),
(u"Yakut", "sah", None),
(u"Yao", "yao", None),
(u"Yapese", "yap", None),
(u"Yiddish", "yid", "yi"),
(u"Yoruba", "yor", "yo"),
(u"Yupik languages", "ypk", None),
(u"Zande", "znd", None),
(u"Zapotec", "zap", None),
(u"Zaza", "zza", None),
(u"Zazaki", "zza", None),
(u"Zenaga", "zen", None),
(u"Zhuang", "zha", "za"),
(u"Zulu", "zul", "zu"),
(u"Zuni", "zun", None),
)
# Bibliographic ISO-639-2 form (eg. "fre" => "French")
ISO639_2 = {}
for line in _ISO639:
for key in line[1].split("/"):
ISO639_2[key] = line[0]
del _ISO639
|
gpl-3.0
|
ellisonbg/terminado
|
demos/single.py
|
4
|
1267
|
"""A single common terminal for all websockets.
"""
import tornado.web
# This demo requires tornado_xstatic and XStatic-term.js
import tornado_xstatic
from terminado import TermSocket, SingleTermManager
from common_demo_stuff import run_and_show_browser, STATIC_DIR, TEMPLATE_DIR
class TerminalPageHandler(tornado.web.RequestHandler):
def get(self):
return self.render("termpage.html", static=self.static_url,
xstatic=self.application.settings['xstatic_url'],
ws_url_path="/websocket")
def main(argv):
term_manager = SingleTermManager(shell_command=['bash'])
handlers = [
(r"/websocket", TermSocket,
{'term_manager': term_manager}),
(r"/", TerminalPageHandler),
(r"/xstatic/(.*)", tornado_xstatic.XStaticFileHandler,
{'allowed_modules': ['termjs']})
]
app = tornado.web.Application(handlers, static_path=STATIC_DIR,
template_path=TEMPLATE_DIR,
xstatic_url = tornado_xstatic.url_maker('/xstatic/'))
app.listen(8765, 'localhost')
run_and_show_browser("http://localhost:8765/", term_manager)
if __name__ == '__main__':
main([])
|
bsd-2-clause
|
kykdev/lolliwiz_lentislte
|
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Core.py
|
11088
|
3246
|
# Core.py - Python extension for perf script, core functions
#
# Copyright (C) 2010 by Tom Zanussi <tzanussi@gmail.com>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
from collections import defaultdict
def autodict():
return defaultdict(autodict)
flag_fields = autodict()
symbolic_fields = autodict()
def define_flag_field(event_name, field_name, delim):
flag_fields[event_name][field_name]['delim'] = delim
def define_flag_value(event_name, field_name, value, field_str):
flag_fields[event_name][field_name]['values'][value] = field_str
def define_symbolic_field(event_name, field_name):
# nothing to do, really
pass
def define_symbolic_value(event_name, field_name, value, field_str):
symbolic_fields[event_name][field_name]['values'][value] = field_str
def flag_str(event_name, field_name, value):
string = ""
if flag_fields[event_name][field_name]:
print_delim = 0
keys = flag_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string += flag_fields[event_name][field_name]['values'][idx]
break
if idx and (value & idx) == idx:
if print_delim and flag_fields[event_name][field_name]['delim']:
string += " " + flag_fields[event_name][field_name]['delim'] + " "
string += flag_fields[event_name][field_name]['values'][idx]
print_delim = 1
value &= ~idx
return string
def symbol_str(event_name, field_name, value):
string = ""
if symbolic_fields[event_name][field_name]:
keys = symbolic_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string = symbolic_fields[event_name][field_name]['values'][idx]
break
if (value == idx):
string = symbolic_fields[event_name][field_name]['values'][idx]
break
return string
trace_flags = { 0x00: "NONE", \
0x01: "IRQS_OFF", \
0x02: "IRQS_NOSUPPORT", \
0x04: "NEED_RESCHED", \
0x08: "HARDIRQ", \
0x10: "SOFTIRQ" }
def trace_flag_str(value):
string = ""
print_delim = 0
keys = trace_flags.keys()
for idx in keys:
if not value and not idx:
string += "NONE"
break
if idx and (value & idx) == idx:
if print_delim:
string += " | ";
string += trace_flags[idx]
print_delim = 1
value &= ~idx
return string
def taskState(state):
states = {
0 : "R",
1 : "S",
2 : "D",
64: "DEAD"
}
if state not in states:
return "Unknown"
return states[state]
class EventHeaders:
def __init__(self, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
self.cpu = common_cpu
self.secs = common_secs
self.nsecs = common_nsecs
self.pid = common_pid
self.comm = common_comm
def ts(self):
return (self.secs * (10 ** 9)) + self.nsecs
def ts_format(self):
return "%d.%d" % (self.secs, int(self.nsecs / 1000))
|
gpl-2.0
|
tchernomax/ansible
|
test/runner/lib/executor.py
|
3
|
54420
|
"""Execute Ansible tests."""
from __future__ import absolute_import, print_function
import json
import os
import collections
import datetime
import re
import tempfile
import time
import textwrap
import functools
import pipes
import sys
import hashlib
import difflib
import filecmp
import lib.pytar
import lib.thread
from lib.core_ci import (
AnsibleCoreCI,
SshKey,
)
from lib.manage_ci import (
ManageWindowsCI,
ManageNetworkCI,
)
from lib.cloud import (
cloud_filter,
cloud_init,
get_cloud_environment,
get_cloud_platforms,
)
from lib.util import (
ApplicationWarning,
ApplicationError,
SubprocessError,
display,
run_command,
intercept_command,
remove_tree,
make_dirs,
is_shippable,
is_binary_file,
find_executable,
raw_command,
get_coverage_path,
get_available_port,
generate_pip_command,
find_python,
get_docker_completion,
)
from lib.docker_util import (
docker_pull,
docker_run,
get_docker_container_id,
get_docker_container_ip,
)
from lib.ansible_util import (
ansible_environment,
)
from lib.target import (
IntegrationTarget,
walk_external_targets,
walk_internal_targets,
walk_posix_integration_targets,
walk_network_integration_targets,
walk_windows_integration_targets,
walk_units_targets,
)
from lib.changes import (
ShippableChanges,
LocalChanges,
)
from lib.git import (
Git,
)
from lib.classification import (
categorize_changes,
)
from lib.config import (
TestConfig,
EnvironmentConfig,
IntegrationConfig,
NetworkIntegrationConfig,
PosixIntegrationConfig,
ShellConfig,
UnitsConfig,
WindowsIntegrationConfig,
)
from lib.metadata import (
ChangeDescription,
)
SUPPORTED_PYTHON_VERSIONS = (
'2.6',
'2.7',
'3.5',
'3.6',
'3.7',
)
HTTPTESTER_HOSTS = (
'ansible.http.tests',
'sni1.ansible.http.tests',
'fail.ansible.http.tests',
)
def check_startup():
"""Checks to perform at startup before running commands."""
check_legacy_modules()
def check_legacy_modules():
"""Detect conflicts with legacy core/extras module directories to avoid problems later."""
for directory in 'core', 'extras':
path = 'lib/ansible/modules/%s' % directory
for root, _, file_names in os.walk(path):
if file_names:
# the directory shouldn't exist, but if it does, it must contain no files
raise ApplicationError('Files prohibited in "%s". '
'These are most likely legacy modules from version 2.2 or earlier.' % root)
def create_shell_command(command):
"""
:type command: list[str]
:rtype: list[str]
"""
optional_vars = (
'TERM',
)
cmd = ['/usr/bin/env']
cmd += ['%s=%s' % (var, os.environ[var]) for var in optional_vars if var in os.environ]
cmd += command
return cmd
def install_command_requirements(args, python_version=None):
"""
:type args: EnvironmentConfig
:type python_version: str | None
"""
generate_egg_info(args)
if not args.requirements:
return
if isinstance(args, ShellConfig):
return
packages = []
if isinstance(args, TestConfig):
if args.coverage:
packages.append('coverage')
if args.junit:
packages.append('junit-xml')
if not python_version:
python_version = args.python_version
pip = generate_pip_command(find_python(python_version))
commands = [generate_pip_install(pip, args.command, packages=packages)]
if isinstance(args, IntegrationConfig):
for cloud_platform in get_cloud_platforms(args):
commands.append(generate_pip_install(pip, '%s.cloud.%s' % (args.command, cloud_platform)))
commands = [cmd for cmd in commands if cmd]
# only look for changes when more than one requirements file is needed
detect_pip_changes = len(commands) > 1
# first pass to install requirements, changes expected unless environment is already set up
changes = run_pip_commands(args, pip, commands, detect_pip_changes)
if not changes:
return # no changes means we can stop early
# second pass to check for conflicts in requirements, changes are not expected here
changes = run_pip_commands(args, pip, commands, detect_pip_changes)
if not changes:
return # no changes means no conflicts
raise ApplicationError('Conflicts detected in requirements. The following commands reported changes during verification:\n%s' %
'\n'.join((' '.join(pipes.quote(c) for c in cmd) for cmd in changes)))
def run_pip_commands(args, pip, commands, detect_pip_changes=False):
"""
:type args: EnvironmentConfig
:type pip: list[str]
:type commands: list[list[str]]
:type detect_pip_changes: bool
:rtype: list[list[str]]
"""
changes = []
after_list = pip_list(args, pip) if detect_pip_changes else None
for cmd in commands:
if not cmd:
continue
before_list = after_list
try:
run_command(args, cmd)
except SubprocessError as ex:
if ex.status != 2:
raise
# If pip is too old it won't understand the arguments we passed in, so we'll need to upgrade it.
# Installing "coverage" on ubuntu 16.04 fails with the error:
# AttributeError: 'Requirement' object has no attribute 'project_name'
# See: https://bugs.launchpad.net/ubuntu/xenial/+source/python-pip/+bug/1626258
# Upgrading pip works around the issue.
run_command(args, pip + ['install', '--upgrade', 'pip'])
run_command(args, cmd)
after_list = pip_list(args, pip) if detect_pip_changes else None
if before_list != after_list:
changes.append(cmd)
return changes
def pip_list(args, pip):
"""
:type args: EnvironmentConfig
:type pip: list[str]
:rtype: str
"""
stdout, _ = run_command(args, pip + ['list'], capture=True)
return stdout
def generate_egg_info(args):
"""
:type args: EnvironmentConfig
"""
if os.path.isdir('lib/ansible.egg-info'):
return
run_command(args, [args.python_executable, 'setup.py', 'egg_info'], capture=args.verbosity < 3)
def generate_pip_install(pip, command, packages=None):
"""
:type pip: list[str]
:type command: str
:type packages: list[str] | None
:rtype: list[str] | None
"""
constraints = 'test/runner/requirements/constraints.txt'
requirements = 'test/runner/requirements/%s.txt' % command
options = []
if os.path.exists(requirements) and os.path.getsize(requirements):
options += ['-r', requirements]
if packages:
options += packages
if not options:
return None
return pip + ['install', '--disable-pip-version-check', '-c', constraints] + options
def command_shell(args):
"""
:type args: ShellConfig
"""
if args.delegate:
raise Delegate()
install_command_requirements(args)
if args.inject_httptester:
inject_httptester(args)
cmd = create_shell_command(['bash', '-i'])
run_command(args, cmd)
def command_posix_integration(args):
"""
:type args: PosixIntegrationConfig
"""
all_targets = tuple(walk_posix_integration_targets(include_hidden=True))
internal_targets = command_integration_filter(args, all_targets)
command_integration_filtered(args, internal_targets, all_targets)
def command_network_integration(args):
"""
:type args: NetworkIntegrationConfig
"""
default_filename = 'test/integration/inventory.networking'
if args.inventory:
filename = os.path.join('test/integration', args.inventory)
else:
filename = default_filename
if not args.explain and not args.platform and not os.path.exists(filename):
if args.inventory:
filename = os.path.abspath(filename)
raise ApplicationError(
'Inventory not found: %s\n'
'Use --inventory to specify the inventory path.\n'
'Use --platform to provision resources and generate an inventory file.\n'
'See also inventory template: %s.template' % (filename, default_filename)
)
all_targets = tuple(walk_network_integration_targets(include_hidden=True))
internal_targets = command_integration_filter(args, all_targets, init_callback=network_init)
instances = [] # type: list [lib.thread.WrappedThread]
if args.platform:
get_coverage_path(args) # initialize before starting threads
configs = dict((config['platform_version'], config) for config in args.metadata.instance_config)
for platform_version in args.platform:
platform, version = platform_version.split('/', 1)
config = configs.get(platform_version)
if not config:
continue
instance = lib.thread.WrappedThread(functools.partial(network_run, args, platform, version, config))
instance.daemon = True
instance.start()
instances.append(instance)
while any(instance.is_alive() for instance in instances):
time.sleep(1)
remotes = [instance.wait_for_result() for instance in instances]
inventory = network_inventory(remotes)
display.info('>>> Inventory: %s\n%s' % (filename, inventory.strip()), verbosity=3)
if not args.explain:
with open(filename, 'w') as inventory_fd:
inventory_fd.write(inventory)
success = False
try:
command_integration_filtered(args, internal_targets, all_targets)
success = True
finally:
if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
for instance in instances:
instance.result.stop()
def network_init(args, internal_targets):
"""
:type args: NetworkIntegrationConfig
:type internal_targets: tuple[IntegrationTarget]
"""
if not args.platform:
return
if args.metadata.instance_config is not None:
return
platform_targets = set(a for t in internal_targets for a in t.aliases if a.startswith('network/'))
instances = [] # type: list [lib.thread.WrappedThread]
# generate an ssh key (if needed) up front once, instead of for each instance
SshKey(args)
for platform_version in args.platform:
platform, version = platform_version.split('/', 1)
platform_target = 'network/%s/' % platform
if platform_target not in platform_targets:
display.warning('Skipping "%s" because selected tests do not target the "%s" platform.' % (
platform_version, platform))
continue
instance = lib.thread.WrappedThread(functools.partial(network_start, args, platform, version))
instance.daemon = True
instance.start()
instances.append(instance)
while any(instance.is_alive() for instance in instances):
time.sleep(1)
args.metadata.instance_config = [instance.wait_for_result() for instance in instances]
def network_start(args, platform, version):
"""
:type args: NetworkIntegrationConfig
:type platform: str
:type version: str
:rtype: AnsibleCoreCI
"""
core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage, provider=args.remote_provider)
core_ci.start()
return core_ci.save()
def network_run(args, platform, version, config):
"""
:type args: NetworkIntegrationConfig
:type platform: str
:type version: str
:type config: dict[str, str]
:rtype: AnsibleCoreCI
"""
core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage, provider=args.remote_provider, load=False)
core_ci.load(config)
core_ci.wait()
manage = ManageNetworkCI(core_ci)
manage.wait()
return core_ci
def network_inventory(remotes):
"""
:type remotes: list[AnsibleCoreCI]
:rtype: str
"""
groups = dict([(remote.platform, []) for remote in remotes])
net = []
for remote in remotes:
options = dict(
ansible_host=remote.connection.hostname,
ansible_user=remote.connection.username,
ansible_ssh_private_key_file=os.path.abspath(remote.ssh_key.key),
ansible_network_os=remote.platform,
ansible_connection='local'
)
groups[remote.platform].append(
'%s %s' % (
remote.name.replace('.', '-'),
' '.join('%s="%s"' % (k, options[k]) for k in sorted(options)),
)
)
net.append(remote.platform)
groups['net:children'] = net
template = ''
for group in groups:
hosts = '\n'.join(groups[group])
template += textwrap.dedent("""
[%s]
%s
""") % (group, hosts)
inventory = template
return inventory
def command_windows_integration(args):
"""
:type args: WindowsIntegrationConfig
"""
filename = 'test/integration/inventory.winrm'
if not args.explain and not args.windows and not os.path.isfile(filename):
raise ApplicationError('Use the --windows option or provide an inventory file (see %s.template).' % filename)
all_targets = tuple(walk_windows_integration_targets(include_hidden=True))
internal_targets = command_integration_filter(args, all_targets, init_callback=windows_init)
instances = [] # type: list [lib.thread.WrappedThread]
if args.windows:
get_coverage_path(args) # initialize before starting threads
configs = dict((config['platform_version'], config) for config in args.metadata.instance_config)
for version in args.windows:
config = configs['windows/%s' % version]
instance = lib.thread.WrappedThread(functools.partial(windows_run, args, version, config))
instance.daemon = True
instance.start()
instances.append(instance)
while any(instance.is_alive() for instance in instances):
time.sleep(1)
remotes = [instance.wait_for_result() for instance in instances]
inventory = windows_inventory(remotes)
display.info('>>> Inventory: %s\n%s' % (filename, inventory.strip()), verbosity=3)
if not args.explain:
with open(filename, 'w') as inventory_fd:
inventory_fd.write(inventory)
success = False
try:
command_integration_filtered(args, internal_targets, all_targets)
success = True
finally:
if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
for instance in instances:
instance.result.stop()
# noinspection PyUnusedLocal
def windows_init(args, internal_targets): # pylint: disable=locally-disabled, unused-argument
"""
:type args: WindowsIntegrationConfig
:type internal_targets: tuple[IntegrationTarget]
"""
if not args.windows:
return
if args.metadata.instance_config is not None:
return
instances = [] # type: list [lib.thread.WrappedThread]
for version in args.windows:
instance = lib.thread.WrappedThread(functools.partial(windows_start, args, version))
instance.daemon = True
instance.start()
instances.append(instance)
while any(instance.is_alive() for instance in instances):
time.sleep(1)
args.metadata.instance_config = [instance.wait_for_result() for instance in instances]
def windows_start(args, version):
"""
:type args: WindowsIntegrationConfig
:type version: str
:rtype: AnsibleCoreCI
"""
core_ci = AnsibleCoreCI(args, 'windows', version, stage=args.remote_stage, provider=args.remote_provider)
core_ci.start()
return core_ci.save()
def windows_run(args, version, config):
"""
:type args: WindowsIntegrationConfig
:type version: str
:type config: dict[str, str]
:rtype: AnsibleCoreCI
"""
core_ci = AnsibleCoreCI(args, 'windows', version, stage=args.remote_stage, provider=args.remote_provider, load=False)
core_ci.load(config)
core_ci.wait()
manage = ManageWindowsCI(core_ci)
manage.wait()
return core_ci
def windows_inventory(remotes):
"""
:type remotes: list[AnsibleCoreCI]
:rtype: str
"""
hosts = []
for remote in remotes:
options = dict(
ansible_host=remote.connection.hostname,
ansible_user=remote.connection.username,
ansible_password=remote.connection.password,
ansible_port=remote.connection.port,
)
hosts.append(
'%s %s' % (
remote.name.replace('/', '_'),
' '.join('%s="%s"' % (k, options[k]) for k in sorted(options)),
)
)
template = """
[windows]
%s
[windows:vars]
ansible_connection=winrm
ansible_winrm_server_cert_validation=ignore
# support winrm connection tests (temporary solution, does not support testing enable/disable of pipelining)
[winrm:children]
windows
# support winrm binary module tests (temporary solution)
[testhost_binary_modules:children]
windows
"""
template = textwrap.dedent(template)
inventory = template % ('\n'.join(hosts))
return inventory
def command_integration_filter(args, targets, init_callback=None):
"""
:type args: IntegrationConfig
:type targets: collections.Iterable[IntegrationTarget]
:type init_callback: (IntegrationConfig, tuple[IntegrationTarget]) -> None
:rtype: tuple[IntegrationTarget]
"""
targets = tuple(target for target in targets if 'hidden/' not in target.aliases)
changes = get_changes_filter(args)
# special behavior when the --changed-all-target target is selected based on changes
if args.changed_all_target in changes:
# act as though the --changed-all-target target was in the include list
if args.changed_all_mode == 'include' and args.changed_all_target not in args.include:
args.include.append(args.changed_all_target)
args.delegate_args += ['--include', args.changed_all_target]
# act as though the --changed-all-target target was in the exclude list
elif args.changed_all_mode == 'exclude' and args.changed_all_target not in args.exclude:
args.exclude.append(args.changed_all_target)
require = args.require + changes
exclude = args.exclude
internal_targets = walk_internal_targets(targets, args.include, exclude, require)
environment_exclude = get_integration_filter(args, internal_targets)
environment_exclude += cloud_filter(args, internal_targets)
if environment_exclude:
exclude += environment_exclude
internal_targets = walk_internal_targets(targets, args.include, exclude, require)
if not internal_targets:
raise AllTargetsSkipped()
if args.start_at and not any(t.name == args.start_at for t in internal_targets):
raise ApplicationError('Start at target matches nothing: %s' % args.start_at)
if init_callback:
init_callback(args, internal_targets)
cloud_init(args, internal_targets)
if args.delegate:
raise Delegate(require=require, exclude=exclude, integration_targets=internal_targets)
install_command_requirements(args)
return internal_targets
def command_integration_filtered(args, targets, all_targets):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:type all_targets: tuple[IntegrationTarget]
"""
found = False
passed = []
failed = []
targets_iter = iter(targets)
all_targets_dict = dict((target.name, target) for target in all_targets)
setup_errors = []
setup_targets_executed = set()
for target in all_targets:
for setup_target in target.setup_once + target.setup_always:
if setup_target not in all_targets_dict:
setup_errors.append('Target "%s" contains invalid setup target: %s' % (target.name, setup_target))
if setup_errors:
raise ApplicationError('Found %d invalid setup aliases:\n%s' % (len(setup_errors), '\n'.join(setup_errors)))
test_dir = os.path.expanduser('~/ansible_testing')
if not args.explain and any('needs/ssh/' in target.aliases for target in targets):
max_tries = 20
display.info('SSH service required for tests. Checking to make sure we can connect.')
for i in range(1, max_tries + 1):
try:
run_command(args, ['ssh', '-o', 'BatchMode=yes', 'localhost', 'id'], capture=True)
display.info('SSH service responded.')
break
except SubprocessError:
if i == max_tries:
raise
seconds = 3
display.warning('SSH service not responding. Waiting %d second(s) before checking again.' % seconds)
time.sleep(seconds)
if args.inject_httptester:
inject_httptester(args)
start_at_task = args.start_at_task
results = {}
current_environment = None # type: EnvironmentDescription | None
for target in targets_iter:
if args.start_at and not found:
found = target.name == args.start_at
if not found:
continue
if args.list_targets:
print(target.name)
continue
tries = 2 if args.retry_on_error else 1
verbosity = args.verbosity
cloud_environment = get_cloud_environment(args, target)
original_environment = current_environment if current_environment else EnvironmentDescription(args)
current_environment = None
display.info('>>> Environment Description\n%s' % original_environment, verbosity=3)
try:
while tries:
tries -= 1
try:
if cloud_environment:
cloud_environment.setup_once()
run_setup_targets(args, test_dir, target.setup_once, all_targets_dict, setup_targets_executed, False)
start_time = time.time()
run_setup_targets(args, test_dir, target.setup_always, all_targets_dict, setup_targets_executed, True)
if not args.explain:
# create a fresh test directory for each test target
remove_tree(test_dir)
make_dirs(test_dir)
if target.script_path:
command_integration_script(args, target)
else:
command_integration_role(args, target, start_at_task)
start_at_task = None
end_time = time.time()
results[target.name] = dict(
name=target.name,
type=target.type,
aliases=target.aliases,
modules=target.modules,
run_time_seconds=int(end_time - start_time),
setup_once=target.setup_once,
setup_always=target.setup_always,
coverage=args.coverage,
coverage_label=args.coverage_label,
python_version=args.python_version,
)
break
except SubprocessError:
if cloud_environment:
cloud_environment.on_failure(target, tries)
if not original_environment.validate(target.name, throw=False):
raise
if not tries:
raise
display.warning('Retrying test target "%s" with maximum verbosity.' % target.name)
display.verbosity = args.verbosity = 6
start_time = time.time()
current_environment = EnvironmentDescription(args)
end_time = time.time()
EnvironmentDescription.check(original_environment, current_environment, target.name, throw=True)
results[target.name]['validation_seconds'] = int(end_time - start_time)
passed.append(target)
except Exception as ex:
failed.append(target)
if args.continue_on_error:
display.error(ex)
continue
display.notice('To resume at this test target, use the option: --start-at %s' % target.name)
next_target = next(targets_iter, None)
if next_target:
display.notice('To resume after this test target, use the option: --start-at %s' % next_target.name)
raise
finally:
display.verbosity = args.verbosity = verbosity
if not args.explain:
results_path = 'test/results/data/%s-%s.json' % (args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0))))
data = dict(
targets=results,
)
with open(results_path, 'w') as results_fd:
results_fd.write(json.dumps(data, sort_keys=True, indent=4))
if failed:
raise ApplicationError('The %d integration test(s) listed below (out of %d) failed. See error output above for details:\n%s' % (
len(failed), len(passed) + len(failed), '\n'.join(target.name for target in failed)))
def start_httptester(args):
"""
:type args: EnvironmentConfig
:rtype: str, list[str]
"""
# map ports from remote -> localhost -> container
# passing through localhost is only used when ansible-test is not already running inside a docker container
ports = [
dict(
remote=8080,
container=80,
),
dict(
remote=8443,
container=443,
),
]
container_id = get_docker_container_id()
if container_id:
display.info('Running in docker container: %s' % container_id, verbosity=1)
else:
for item in ports:
item['localhost'] = get_available_port()
docker_pull(args, args.httptester)
httptester_id = run_httptester(args, dict((port['localhost'], port['container']) for port in ports if 'localhost' in port))
if container_id:
container_host = get_docker_container_ip(args, httptester_id)
display.info('Found httptester container address: %s' % container_host, verbosity=1)
else:
container_host = 'localhost'
ssh_options = []
for port in ports:
ssh_options += ['-R', '%d:%s:%d' % (port['remote'], container_host, port.get('localhost', port['container']))]
return httptester_id, ssh_options
def run_httptester(args, ports=None):
"""
:type args: EnvironmentConfig
:type ports: dict[int, int] | None
:rtype: str
"""
options = [
'--detach',
]
if ports:
for localhost_port, container_port in ports.items():
options += ['-p', '%d:%d' % (localhost_port, container_port)]
httptester_id, _ = docker_run(args, args.httptester, options=options)
if args.explain:
httptester_id = 'httptester_id'
else:
httptester_id = httptester_id.strip()
return httptester_id
def inject_httptester(args):
"""
:type args: CommonConfig
"""
comment = ' # ansible-test httptester\n'
append_lines = ['127.0.0.1 %s%s' % (host, comment) for host in HTTPTESTER_HOSTS]
with open('/etc/hosts', 'r+') as hosts_fd:
original_lines = hosts_fd.readlines()
if not any(line.endswith(comment) for line in original_lines):
hosts_fd.writelines(append_lines)
# determine which forwarding mechanism to use
pfctl = find_executable('pfctl', required=False)
iptables = find_executable('iptables', required=False)
if pfctl:
kldload = find_executable('kldload', required=False)
if kldload:
try:
run_command(args, ['kldload', 'pf'], capture=True)
except SubprocessError:
pass # already loaded
rules = '''
rdr pass inet proto tcp from any to any port 80 -> 127.0.0.1 port 8080
rdr pass inet proto tcp from any to any port 443 -> 127.0.0.1 port 8443
'''
cmd = ['pfctl', '-ef', '-']
try:
run_command(args, cmd, capture=True, data=rules)
except SubprocessError:
pass # non-zero exit status on success
elif iptables:
ports = [
(80, 8080),
(443, 8443),
]
for src, dst in ports:
rule = ['-o', 'lo', '-p', 'tcp', '--dport', str(src), '-j', 'REDIRECT', '--to-port', str(dst)]
try:
# check for existing rule
cmd = ['iptables', '-t', 'nat', '-C', 'OUTPUT'] + rule
run_command(args, cmd, capture=True)
except SubprocessError:
# append rule when it does not exist
cmd = ['iptables', '-t', 'nat', '-A', 'OUTPUT'] + rule
run_command(args, cmd, capture=True)
else:
raise ApplicationError('No supported port forwarding mechanism detected.')
def run_setup_targets(args, test_dir, target_names, targets_dict, targets_executed, always):
"""
:type args: IntegrationConfig
:type test_dir: str
:type target_names: list[str]
:type targets_dict: dict[str, IntegrationTarget]
:type targets_executed: set[str]
:type always: bool
"""
for target_name in target_names:
if not always and target_name in targets_executed:
continue
target = targets_dict[target_name]
if not args.explain:
# create a fresh test directory for each test target
remove_tree(test_dir)
make_dirs(test_dir)
if target.script_path:
command_integration_script(args, target)
else:
command_integration_role(args, target, None)
targets_executed.add(target_name)
def integration_environment(args, target, cmd):
"""
:type args: IntegrationConfig
:type target: IntegrationTarget
:type cmd: list[str]
:rtype: dict[str, str]
"""
env = ansible_environment(args)
if args.inject_httptester:
env.update(dict(
HTTPTESTER='1',
))
integration = dict(
JUNIT_OUTPUT_DIR=os.path.abspath('test/results/junit'),
ANSIBLE_CALLBACK_WHITELIST='junit',
ANSIBLE_TEST_CI=args.metadata.ci_provider,
)
if args.debug_strategy:
env.update(dict(ANSIBLE_STRATEGY='debug'))
if 'non_local/' in target.aliases:
if args.coverage:
display.warning('Skipping coverage reporting for non-local test: %s' % target.name)
env.update(dict(ANSIBLE_TEST_REMOTE_INTERPRETER=''))
env.update(integration)
cloud_environment = get_cloud_environment(args, target)
if cloud_environment:
cloud_environment.configure_environment(env, cmd)
return env
def command_integration_script(args, target):
"""
:type args: IntegrationConfig
:type target: IntegrationTarget
"""
display.info('Running %s integration test script' % target.name)
cmd = ['./%s' % os.path.basename(target.script_path)]
if args.verbosity:
cmd.append('-' + ('v' * args.verbosity))
env = integration_environment(args, target, cmd)
cwd = target.path
intercept_command(args, cmd, target_name=target.name, env=env, cwd=cwd)
def command_integration_role(args, target, start_at_task):
"""
:type args: IntegrationConfig
:type target: IntegrationTarget
:type start_at_task: str | None
"""
display.info('Running %s integration test role' % target.name)
vars_file = 'integration_config.yml'
if isinstance(args, WindowsIntegrationConfig):
inventory = 'inventory.winrm'
hosts = 'windows'
gather_facts = False
elif isinstance(args, NetworkIntegrationConfig):
inventory = args.inventory or 'inventory.networking'
hosts = target.name[:target.name.find('_')]
gather_facts = False
else:
inventory = 'inventory'
hosts = 'testhost'
gather_facts = True
cloud_environment = get_cloud_environment(args, target)
if cloud_environment:
hosts = cloud_environment.inventory_hosts or hosts
playbook = '''
- hosts: %s
gather_facts: %s
roles:
- { role: %s }
''' % (hosts, gather_facts, target.name)
with tempfile.NamedTemporaryFile(dir='test/integration', prefix='%s-' % target.name, suffix='.yml') as pb_fd:
pb_fd.write(playbook.encode('utf-8'))
pb_fd.flush()
filename = os.path.basename(pb_fd.name)
display.info('>>> Playbook: %s\n%s' % (filename, playbook.strip()), verbosity=3)
cmd = ['ansible-playbook', filename, '-i', inventory, '-e', '@%s' % vars_file]
if start_at_task:
cmd += ['--start-at-task', start_at_task]
if args.tags:
cmd += ['--tags', args.tags]
if args.skip_tags:
cmd += ['--skip-tags', args.skip_tags]
if args.diff:
cmd += ['--diff']
if isinstance(args, NetworkIntegrationConfig):
if args.testcase:
cmd += ['-e', 'testcase=%s' % args.testcase]
if args.verbosity:
cmd.append('-' + ('v' * args.verbosity))
env = integration_environment(args, target, cmd)
cwd = 'test/integration'
env['ANSIBLE_ROLES_PATH'] = os.path.abspath('test/integration/targets')
intercept_command(args, cmd, target_name=target.name, env=env, cwd=cwd)
def command_units(args):
"""
:type args: UnitsConfig
"""
changes = get_changes_filter(args)
require = args.require + changes
include, exclude = walk_external_targets(walk_units_targets(), args.include, args.exclude, require)
if not include:
raise AllTargetsSkipped()
if args.delegate:
raise Delegate(require=changes)
version_commands = []
for version in SUPPORTED_PYTHON_VERSIONS:
# run all versions unless version given, in which case run only that version
if args.python and version != args.python_version:
continue
if args.requirements_mode != 'skip':
install_command_requirements(args, version)
env = ansible_environment(args)
cmd = [
'pytest',
'--boxed',
'-r', 'a',
'-n', 'auto',
'--color',
'yes' if args.color else 'no',
'--junit-xml',
'test/results/junit/python%s-units.xml' % version,
]
if args.collect_only:
cmd.append('--collect-only')
if args.verbosity:
cmd.append('-' + ('v' * args.verbosity))
if exclude:
cmd += ['--ignore=%s' % target.path for target in exclude]
cmd += [target.path for target in include]
version_commands.append((version, cmd, env))
if args.requirements_mode == 'only':
sys.exit()
for version, command, env in version_commands:
display.info('Unit test with Python %s' % version)
try:
intercept_command(args, command, target_name='units', env=env, python_version=version)
except SubprocessError as ex:
# pytest exits with status code 5 when all tests are skipped, which isn't an error for our use case
if ex.status != 5:
raise
def get_changes_filter(args):
"""
:type args: TestConfig
:rtype: list[str]
"""
paths = detect_changes(args)
if not args.metadata.change_description:
if paths:
changes = categorize_changes(args, paths, args.command)
else:
changes = ChangeDescription()
args.metadata.change_description = changes
if paths is None:
return [] # change detection not enabled, do not filter targets
if not paths:
raise NoChangesDetected()
if args.metadata.change_description.targets is None:
raise NoTestsForChanges()
return args.metadata.change_description.targets
def detect_changes(args):
"""
:type args: TestConfig
:rtype: list[str] | None
"""
if args.changed and is_shippable():
display.info('Shippable detected, collecting parameters from environment.')
paths = detect_changes_shippable(args)
elif args.changed_from or args.changed_path:
paths = args.changed_path or []
if args.changed_from:
with open(args.changed_from, 'r') as changes_fd:
paths += changes_fd.read().splitlines()
elif args.changed:
paths = detect_changes_local(args)
else:
return None # change detection not enabled
if paths is None:
return None # act as though change detection not enabled, do not filter targets
display.info('Detected changes in %d file(s).' % len(paths))
for path in paths:
display.info(path, verbosity=1)
return paths
def detect_changes_shippable(args):
"""Initialize change detection on Shippable.
:type args: TestConfig
:rtype: list[str] | None
"""
git = Git(args)
result = ShippableChanges(args, git)
if result.is_pr:
job_type = 'pull request'
elif result.is_tag:
job_type = 'tag'
else:
job_type = 'merge commit'
display.info('Processing %s for branch %s commit %s' % (job_type, result.branch, result.commit))
if not args.metadata.changes:
args.metadata.populate_changes(result.diff)
return result.paths
def detect_changes_local(args):
"""
:type args: TestConfig
:rtype: list[str]
"""
git = Git(args)
result = LocalChanges(args, git)
display.info('Detected branch %s forked from %s at commit %s' % (
result.current_branch, result.fork_branch, result.fork_point))
if result.untracked and not args.untracked:
display.warning('Ignored %s untracked file(s). Use --untracked to include them.' %
len(result.untracked))
if result.committed and not args.committed:
display.warning('Ignored %s committed change(s). Omit --ignore-committed to include them.' %
len(result.committed))
if result.staged and not args.staged:
display.warning('Ignored %s staged change(s). Omit --ignore-staged to include them.' %
len(result.staged))
if result.unstaged and not args.unstaged:
display.warning('Ignored %s unstaged change(s). Omit --ignore-unstaged to include them.' %
len(result.unstaged))
names = set()
if args.tracked:
names |= set(result.tracked)
if args.untracked:
names |= set(result.untracked)
if args.committed:
names |= set(result.committed)
if args.staged:
names |= set(result.staged)
if args.unstaged:
names |= set(result.unstaged)
if not args.metadata.changes:
args.metadata.populate_changes(result.diff)
for path in result.untracked:
if is_binary_file(path):
args.metadata.changes[path] = ((0, 0),)
continue
with open(path, 'r') as source_fd:
line_count = len(source_fd.read().splitlines())
args.metadata.changes[path] = ((1, line_count),)
return sorted(names)
def get_integration_filter(args, targets):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:rtype: list[str]
"""
if args.tox:
# tox has the same exclusions as the local environment
return get_integration_local_filter(args, targets)
if args.docker:
return get_integration_docker_filter(args, targets)
if args.remote:
return get_integration_remote_filter(args, targets)
return get_integration_local_filter(args, targets)
def common_integration_filter(args, targets, exclude):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:type exclude: list[str]
"""
override_disabled = set(target for target in args.include if target.startswith('disabled/'))
if not args.allow_disabled:
skip = 'disabled/'
override = [target.name for target in targets if override_disabled & set(target.aliases)]
skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
if skipped:
exclude.extend(skipped)
display.warning('Excluding tests marked "%s" which require --allow-disabled or prefixing with "disabled/": %s'
% (skip.rstrip('/'), ', '.join(skipped)))
override_unsupported = set(target for target in args.include if target.startswith('unsupported/'))
if not args.allow_unsupported:
skip = 'unsupported/'
override = [target.name for target in targets if override_unsupported & set(target.aliases)]
skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
if skipped:
exclude.extend(skipped)
display.warning('Excluding tests marked "%s" which require --allow-unsupported or prefixing with "unsupported/": %s'
% (skip.rstrip('/'), ', '.join(skipped)))
override_unstable = set(target for target in args.include if target.startswith('unstable/'))
if args.allow_unstable_changed:
override_unstable |= set(args.metadata.change_description.focused_targets or [])
if not args.allow_unstable:
skip = 'unstable/'
override = [target.name for target in targets if override_unstable & set(target.aliases)]
skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
if skipped:
exclude.extend(skipped)
display.warning('Excluding tests marked "%s" which require --allow-unstable or prefixing with "unstable/": %s'
% (skip.rstrip('/'), ', '.join(skipped)))
def get_integration_local_filter(args, targets):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:rtype: list[str]
"""
exclude = []
common_integration_filter(args, targets, exclude)
if not args.allow_root and os.getuid() != 0:
skip = 'needs/root/'
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which require --allow-root or running as root: %s'
% (skip.rstrip('/'), ', '.join(skipped)))
override_destructive = set(target for target in args.include if target.startswith('destructive/'))
if not args.allow_destructive:
skip = 'destructive/'
override = [target.name for target in targets if override_destructive & set(target.aliases)]
skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
if skipped:
exclude.extend(skipped)
display.warning('Excluding tests marked "%s" which require --allow-destructive or prefixing with "destructive/" to run locally: %s'
% (skip.rstrip('/'), ', '.join(skipped)))
if args.python_version.startswith('3'):
python_version = 3
else:
python_version = 2
skip = 'skip/python%d/' % python_version
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which are not supported on python %d: %s'
% (skip.rstrip('/'), python_version, ', '.join(skipped)))
return exclude
def get_integration_docker_filter(args, targets):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:rtype: list[str]
"""
exclude = []
common_integration_filter(args, targets, exclude)
skip = 'skip/docker/'
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which cannot run under docker: %s'
% (skip.rstrip('/'), ', '.join(skipped)))
if not args.docker_privileged:
skip = 'needs/privileged/'
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which require --docker-privileged to run under docker: %s'
% (skip.rstrip('/'), ', '.join(skipped)))
python_version = 2 # images are expected to default to python 2 unless otherwise specified
python_version = int(get_docker_completion().get(args.docker_raw).get('python', str(python_version)))
if args.python: # specifying a numeric --python option overrides the default python
if args.python.startswith('3'):
python_version = 3
elif args.python.startswith('2'):
python_version = 2
skip = 'skip/python%d/' % python_version
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which are not supported on python %d: %s'
% (skip.rstrip('/'), python_version, ', '.join(skipped)))
return exclude
def get_integration_remote_filter(args, targets):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:rtype: list[str]
"""
parts = args.remote.split('/', 1)
platform = parts[0]
exclude = []
common_integration_filter(args, targets, exclude)
skip = 'skip/%s/' % platform
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which are not supported on %s: %s'
% (skip.rstrip('/'), platform, ', '.join(skipped)))
python_version = 2 # remotes are expected to default to python 2
skip = 'skip/python%d/' % python_version
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which are not supported on python %d: %s'
% (skip.rstrip('/'), python_version, ', '.join(skipped)))
return exclude
class EnvironmentDescription(object):
"""Description of current running environment."""
def __init__(self, args):
"""Initialize snapshot of environment configuration.
:type args: IntegrationConfig
"""
self.args = args
if self.args.explain:
self.data = {}
return
warnings = []
versions = ['']
versions += SUPPORTED_PYTHON_VERSIONS
versions += list(set(v.split('.')[0] for v in SUPPORTED_PYTHON_VERSIONS))
python_paths = dict((v, find_executable('python%s' % v, required=False)) for v in sorted(versions))
pip_paths = dict((v, find_executable('pip%s' % v, required=False)) for v in sorted(versions))
program_versions = dict((v, self.get_version([python_paths[v], 'test/runner/versions.py'], warnings)) for v in sorted(python_paths) if python_paths[v])
pip_interpreters = dict((v, self.get_shebang(pip_paths[v])) for v in sorted(pip_paths) if pip_paths[v])
known_hosts_hash = self.get_hash(os.path.expanduser('~/.ssh/known_hosts'))
for version in sorted(versions):
self.check_python_pip_association(version, python_paths, pip_paths, pip_interpreters, warnings)
for warning in warnings:
display.warning(warning, unique=True)
self.data = dict(
python_paths=python_paths,
pip_paths=pip_paths,
program_versions=program_versions,
pip_interpreters=pip_interpreters,
known_hosts_hash=known_hosts_hash,
warnings=warnings,
)
@staticmethod
def check_python_pip_association(version, python_paths, pip_paths, pip_interpreters, warnings):
"""
:type version: str
:param python_paths: dict[str, str]
:param pip_paths: dict[str, str]
:param pip_interpreters: dict[str, str]
:param warnings: list[str]
"""
python_label = 'Python%s' % (' %s' % version if version else '')
pip_path = pip_paths.get(version)
python_path = python_paths.get(version)
if not python_path and not pip_path:
# neither python or pip is present for this version
return
if not python_path:
warnings.append('A %s interpreter was not found, yet a matching pip was found at "%s".' % (python_label, pip_path))
return
if not pip_path:
warnings.append('A %s interpreter was found at "%s", yet a matching pip was not found.' % (python_label, python_path))
return
pip_shebang = pip_interpreters.get(version)
match = re.search(r'#!\s*(?P<command>[^\s]+)', pip_shebang)
if not match:
warnings.append('A %s pip was found at "%s", but it does not have a valid shebang: %s' % (python_label, pip_path, pip_shebang))
return
pip_interpreter = os.path.realpath(match.group('command'))
python_interpreter = os.path.realpath(python_path)
if pip_interpreter == python_interpreter:
return
try:
identical = filecmp.cmp(pip_interpreter, python_interpreter)
except OSError:
identical = False
if identical:
return
warnings.append('A %s pip was found at "%s", but it uses interpreter "%s" instead of "%s".' % (
python_label, pip_path, pip_interpreter, python_interpreter))
def __str__(self):
"""
:rtype: str
"""
return json.dumps(self.data, sort_keys=True, indent=4)
def validate(self, target_name, throw):
"""
:type target_name: str
:type throw: bool
:rtype: bool
"""
current = EnvironmentDescription(self.args)
return self.check(self, current, target_name, throw)
@staticmethod
def check(original, current, target_name, throw):
"""
:type original: EnvironmentDescription
:type current: EnvironmentDescription
:type target_name: str
:type throw: bool
:rtype: bool
"""
original_json = str(original)
current_json = str(current)
if original_json == current_json:
return True
unified_diff = '\n'.join(difflib.unified_diff(
a=original_json.splitlines(),
b=current_json.splitlines(),
fromfile='original.json',
tofile='current.json',
lineterm='',
))
message = ('Test target "%s" has changed the test environment!\n'
'If these changes are necessary, they must be reverted before the test finishes.\n'
'>>> Original Environment\n'
'%s\n'
'>>> Current Environment\n'
'%s\n'
'>>> Environment Diff\n'
'%s'
% (target_name, original_json, current_json, unified_diff))
if throw:
raise ApplicationError(message)
display.error(message)
return False
@staticmethod
def get_version(command, warnings):
"""
:type command: list[str]
:type warnings: list[str]
:rtype: list[str]
"""
try:
stdout, stderr = raw_command(command, capture=True, cmd_verbosity=2)
except SubprocessError as ex:
warnings.append(u'%s' % ex)
return None # all failures are equal, we don't care why it failed, only that it did
return [line.strip() for line in ((stdout or '').strip() + (stderr or '').strip()).splitlines()]
@staticmethod
def get_shebang(path):
"""
:type path: str
:rtype: str
"""
with open(path) as script_fd:
return script_fd.readline().strip()
@staticmethod
def get_hash(path):
"""
:type path: str
:rtype: str | None
"""
if not os.path.exists(path):
return None
file_hash = hashlib.md5()
with open(path, 'rb') as file_fd:
file_hash.update(file_fd.read())
return file_hash.hexdigest()
class NoChangesDetected(ApplicationWarning):
"""Exception when change detection was performed, but no changes were found."""
def __init__(self):
super(NoChangesDetected, self).__init__('No changes detected.')
class NoTestsForChanges(ApplicationWarning):
"""Exception when changes detected, but no tests trigger as a result."""
def __init__(self):
super(NoTestsForChanges, self).__init__('No tests found for detected changes.')
class Delegate(Exception):
"""Trigger command delegation."""
def __init__(self, exclude=None, require=None, integration_targets=None):
"""
:type exclude: list[str] | None
:type require: list[str] | None
:type integration_targets: tuple[IntegrationTarget] | None
"""
super(Delegate, self).__init__()
self.exclude = exclude or []
self.require = require or []
self.integration_targets = integration_targets or tuple()
class AllTargetsSkipped(ApplicationWarning):
"""All targets skipped."""
def __init__(self):
super(AllTargetsSkipped, self).__init__('All targets skipped.')
|
gpl-3.0
|
andmos/ansible
|
test/units/modules/network/f5/test_bigip_wait.py
|
21
|
3568
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
import sys
if sys.version_info < (2, 7):
pytestmark = pytest.mark.skip("F5 Ansible modules require Python >= 2.7")
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigip_wait import Parameters
from library.modules.bigip_wait import ModuleManager
from library.modules.bigip_wait import ArgumentSpec
# In Ansible 2.8, Ansible changed import paths.
from test.units.compat import unittest
from test.units.compat.mock import Mock
from test.units.compat.mock import patch
from test.units.modules.utils import set_module_args
except ImportError:
from ansible.modules.network.f5.bigip_wait import Parameters
from ansible.modules.network.f5.bigip_wait import ModuleManager
from ansible.modules.network.f5.bigip_wait import ArgumentSpec
# Ansible 2.8 imports
from units.compat import unittest
from units.compat.mock import Mock
from units.compat.mock import patch
from units.modules.utils import set_module_args
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
delay=3,
timeout=500,
sleep=10,
msg='We timed out during waiting for BIG-IP :-('
)
p = Parameters(params=args)
assert p.delay == 3
assert p.timeout == 500
assert p.sleep == 10
assert p.msg == 'We timed out during waiting for BIG-IP :-('
def test_module_string_parameters(self):
args = dict(
delay='3',
timeout='500',
sleep='10',
msg='We timed out during waiting for BIG-IP :-('
)
p = Parameters(params=args)
assert p.delay == 3
assert p.timeout == 500
assert p.sleep == 10
assert p.msg == 'We timed out during waiting for BIG-IP :-('
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
self.patcher1 = patch('time.sleep')
self.patcher1.start()
def tearDown(self):
self.patcher1.stop()
def test_wait_already_available(self, *args):
set_module_args(dict(
password='password',
server='localhost',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(module=module)
mm._connect_to_device = Mock(return_value=True)
mm._device_is_rebooting = Mock(return_value=False)
mm._is_mprov_running_on_device = Mock(return_value=False)
mm._get_client_connection = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is False
assert results['elapsed'] == 0
|
gpl-3.0
|
Universal-Model-Converter/UMC3.0a
|
data/Python/x86/Lib/site-packages/scipy/stats/tests/test_continuous_basic.py
|
3
|
14981
|
from __future__ import division, print_function, absolute_import
import warnings
import numpy.testing as npt
import numpy as np
import nose
from scipy import stats
"""
Test all continuous distributions.
Parameters were chosen for those distributions that pass the
Kolmogorov-Smirnov test. This provides safe parameters for each
distributions so that we can perform further testing of class methods.
These tests currently check only/mostly for serious errors and exceptions,
not for numerically exact results.
TODO:
* make functioning test for skew and kurtosis
still known failures - skip for now
"""
#currently not used
DECIMAL = 5 # specify the precision of the tests # increased from 0 to 5
DECIMAL_kurt = 0
distcont = [
['alpha', (3.5704770516650459,)],
['anglit', ()],
['arcsine', ()],
['beta', (2.3098496451481823, 0.62687954300963677)],
['betaprime', (5, 6)], # avoid unbound error in entropy with (100, 86)],
['bradford', (0.29891359763170633,)],
['burr', (10.5, 4.3)], #incorrect mean and var for(0.94839838075366045, 4.3820284068855795)],
['cauchy', ()],
['chi', (78,)],
['chi2', (55,)],
['cosine', ()],
['dgamma', (1.1023326088288166,)],
['dweibull', (2.0685080649914673,)],
['erlang', (20,)], #correction numargs = 1
['expon', ()],
['exponpow', (2.697119160358469,)],
['exponweib', (2.8923945291034436, 1.9505288745913174)],
['f', (29, 18)],
['fatiguelife', (29,)], #correction numargs = 1
['fisk', (3.0857548622253179,)],
['foldcauchy', (4.7164673455831894,)],
['foldnorm', (1.9521253373555869,)],
['frechet_l', (3.6279911255583239,)],
['frechet_r', (1.8928171603534227,)],
['gamma', (1.9932305483800778,)],
['gausshyper', (13.763771604130699, 3.1189636648681431,
2.5145980350183019, 5.1811649903971615)], #veryslow
['genexpon', (9.1325976465418908, 16.231956600590632, 3.2819552690843983)],
['genextreme', (-0.1,)], # sample mean test fails for (3.3184017469423535,)],
['gengamma', (4.4162385429431925, 3.1193091679242761)],
['genhalflogistic', (0.77274727809929322,)],
['genlogistic', (0.41192440799679475,)],
['genpareto', (0.1,)], # use case with finite moments
['gilbrat', ()],
['gompertz', (0.94743713075105251,)],
['gumbel_l', ()],
['gumbel_r', ()],
['halfcauchy', ()],
['halflogistic', ()],
['halfnorm', ()],
['hypsecant', ()],
['invgamma', (2.0668996136993067,)],
['invgauss', (0.14546264555347513,)],
['invweibull', (10.58,)], # sample mean test fails at(0.58847112119264788,)]
['johnsonsb', (4.3172675099141058, 3.1837781130785063)],
['johnsonsu', (2.554395574161155, 2.2482281679651965)],
['ksone', (1000,)], #replace 22 by 100 to avoid failing range, ticket 956
['kstwobign', ()],
['laplace', ()],
['levy', ()],
['levy_l', ()],
# ['levy_stable', (0.35667405469844993,
# -0.67450531578494011)], #NotImplementedError
# rvs not tested
['loggamma', (0.41411931826052117,)],
['logistic', ()],
['loglaplace', (3.2505926592051435,)],
['lognorm', (0.95368226960575331,)],
['lomax', (1.8771398388773268,)],
['maxwell', ()],
['mielke', (10.4, 3.6)], # sample mean test fails for (4.6420495492121487, 0.59707419545516938)],
# mielke: good results if 2nd parameter >2, weird mean or var below
['nakagami', (4.9673794866666237,)],
['ncf', (27, 27, 0.41578441799226107)],
['nct', (14, 0.24045031331198066)],
['ncx2', (21, 1.0560465975116415)],
['norm', ()],
['pareto', (2.621716532144454,)],
['pearson3', (0.1,)],
['powerlaw', (1.6591133289905851,)],
['powerlognorm', (2.1413923530064087, 0.44639540782048337)],
['powernorm', (4.4453652254590779,)],
['rayleigh', ()],
['rdist', (0.9,)], # feels also slow
# ['rdist', (3.8266985793976525,)], #veryslow, especially rvs
#['rdist', (541.0,)], # from ticket #758 #veryslow
['recipinvgauss', (0.63004267809369119,)],
['reciprocal', (0.0062309367010521255, 1.0062309367010522)],
['rice', (0.7749725210111873,)],
['semicircular', ()],
['t', (2.7433514990818093,)],
['triang', (0.15785029824528218,)],
['truncexpon', (4.6907725456810478,)],
['truncnorm', (-1.0978730080013919, 2.7306754109031979)],
['tukeylambda', (3.1321477856738267,)],
['uniform', ()],
['vonmises', (3.9939042581071398,)],
['wald', ()],
['weibull_max', (2.8687961709100187,)],
['weibull_min', (1.7866166930421596,)],
['wrapcauchy', (0.031071279018614728,)]]
# for testing only specific functions
##distcont = [
## ['erlang', (20,)], #correction numargs = 1
## ['fatiguelife', (29,)], #correction numargs = 1
## ['loggamma', (0.41411931826052117,)]]
# for testing ticket:767
##distcont = [
## ['genextreme', (3.3184017469423535,)],
## ['genextreme', (0.01,)],
## ['genextreme', (0.00001,)],
## ['genextreme', (0.0,)],
## ['genextreme', (-0.01,)]
## ]
##distcont = [['gumbel_l', ()],
## ['gumbel_r', ()],
## ['norm', ()]
## ]
##distcont = [['norm', ()]]
distmissing = ['wald', 'gausshyper', 'genexpon', 'rv_continuous',
'loglaplace', 'rdist', 'semicircular', 'invweibull', 'ksone',
'cosine', 'kstwobign', 'truncnorm', 'mielke', 'recipinvgauss', 'levy',
'johnsonsu', 'levy_l', 'powernorm', 'wrapcauchy',
'johnsonsb', 'truncexpon', 'rice', 'invgauss', 'invgamma',
'powerlognorm']
distmiss = [[dist,args] for dist,args in distcont if dist in distmissing]
distslow = ['rdist', 'gausshyper', 'recipinvgauss', 'ksone', 'genexpon',
'vonmises', 'rice', 'mielke', 'semicircular', 'cosine', 'invweibull',
'powerlognorm', 'johnsonsu', 'kstwobign']
#distslow are sorted by speed (very slow to slow)
def _silence_fp_errors(func):
def wrap(*a, **kw):
olderr = np.seterr(all='ignore')
try:
return func(*a, **kw)
finally:
np.seterr(**olderr)
wrap.__name__ = func.__name__
return wrap
@_silence_fp_errors
def test_cont_basic():
# this test skips slow distributions
for distname, arg in distcont[:]:
if distname in distslow:
continue
distfn = getattr(stats, distname)
np.random.seed(765456)
sn = 1000
rvs = distfn.rvs(size=sn,*arg)
sm = rvs.mean()
sv = rvs.var()
skurt = stats.kurtosis(rvs)
sskew = stats.skew(rvs)
m,v = distfn.stats(*arg)
yield check_sample_meanvar_, distfn, arg, m, v, sm, sv, sn, distname + \
'sample mean test'
# the sample skew kurtosis test has known failures, not very good distance measure
#yield check_sample_skew_kurt, distfn, arg, sskew, skurt, distname
yield check_moment, distfn, arg, m, v, distname
yield check_cdf_ppf, distfn, arg, distname
yield check_sf_isf, distfn, arg, distname
yield check_pdf, distfn, arg, distname
if distname in ['wald']:
continue
yield check_pdf_logpdf, distfn, arg, distname
yield check_cdf_logcdf, distfn, arg, distname
yield check_sf_logsf, distfn, arg, distname
if distname in distmissing:
alpha = 0.01
yield check_distribution_rvs, distname, arg, alpha, rvs
@npt.dec.slow
def test_cont_basic_slow():
# same as above for slow distributions
for distname, arg in distcont[:]:
if distname not in distslow: continue
distfn = getattr(stats, distname)
np.random.seed(765456)
sn = 1000
rvs = distfn.rvs(size=sn,*arg)
sm = rvs.mean()
sv = rvs.var()
skurt = stats.kurtosis(rvs)
sskew = stats.skew(rvs)
m,v = distfn.stats(*arg)
yield check_sample_meanvar_, distfn, arg, m, v, sm, sv, sn, distname + \
'sample mean test'
# the sample skew kurtosis test has known failures, not very good distance measure
#yield check_sample_skew_kurt, distfn, arg, sskew, skurt, distname
yield check_moment, distfn, arg, m, v, distname
yield check_cdf_ppf, distfn, arg, distname
yield check_sf_isf, distfn, arg, distname
yield check_pdf, distfn, arg, distname
yield check_pdf_logpdf, distfn, arg, distname
yield check_cdf_logcdf, distfn, arg, distname
yield check_sf_logsf, distfn, arg, distname
#yield check_oth, distfn, arg # is still missing
if distname in distmissing:
alpha = 0.01
yield check_distribution_rvs, distname, arg, alpha, rvs
@_silence_fp_errors
def check_moment(distfn, arg, m, v, msg):
m1 = distfn.moment(1,*arg)
m2 = distfn.moment(2,*arg)
if not np.isinf(m):
npt.assert_almost_equal(m1, m, decimal=10, err_msg= msg + \
' - 1st moment')
else: # or np.isnan(m1),
npt.assert_(np.isinf(m1),
msg + ' - 1st moment -infinite, m1=%s' % str(m1))
#np.isnan(m1) temporary special treatment for loggamma
if not np.isinf(v):
npt.assert_almost_equal(m2-m1*m1, v, decimal=10, err_msg= msg + \
' - 2ndt moment')
else: #or np.isnan(m2),
npt.assert_(np.isinf(m2),
msg + ' - 2nd moment -infinite, m2=%s' % str(m2))
#np.isnan(m2) temporary special treatment for loggamma
@_silence_fp_errors
def check_sample_meanvar_(distfn, arg, m, v, sm, sv, sn, msg):
#this did not work, skipped silently by nose
#check_sample_meanvar, sm, m, msg + 'sample mean test'
#check_sample_meanvar, sv, v, msg + 'sample var test'
if not np.isinf(m):
check_sample_mean(sm, sv, sn, m)
if not np.isinf(v):
check_sample_var(sv, sn, v)
## check_sample_meanvar( sm, m, msg + 'sample mean test')
## check_sample_meanvar( sv, v, msg + 'sample var test')
def check_sample_mean(sm,v,n, popmean):
"""
from stats.stats.ttest_1samp(a, popmean):
Calculates the t-obtained for the independent samples T-test on ONE group
of scores a, given a population mean.
Returns: t-value, two-tailed prob
"""
## a = asarray(a)
## x = np.mean(a)
## v = np.var(a, ddof=1)
## n = len(a)
df = n-1
svar = ((n-1)*v) / float(df) #looks redundant
t = (sm-popmean)/np.sqrt(svar*(1.0/n))
prob = stats.betai(0.5*df,0.5,df/(df+t*t))
#return t,prob
npt.assert_(prob > 0.01, 'mean fail, t,prob = %f, %f, m,sm=%f,%f' % (t,prob,popmean,sm))
def check_sample_var(sv,n, popvar):
'''
two-sided chisquare test for sample variance equal to hypothesized variance
'''
df = n-1
chi2 = (n-1)*popvar/float(popvar)
pval = stats.chisqprob(chi2,df)*2
npt.assert_(pval > 0.01, 'var fail, t,pval = %f, %f, v,sv=%f,%f' % (chi2,pval,popvar,sv))
def check_sample_skew_kurt(distfn, arg, ss, sk, msg):
skew,kurt = distfn.stats(moments='sk',*arg)
## skew = distfn.stats(moment='s',*arg)[()]
## kurt = distfn.stats(moment='k',*arg)[()]
check_sample_meanvar( sk, kurt, msg + 'sample kurtosis test')
check_sample_meanvar( ss, skew, msg + 'sample skew test')
def check_sample_meanvar(sm,m,msg):
if not np.isinf(m) and not np.isnan(m):
npt.assert_almost_equal(sm, m, decimal=DECIMAL, err_msg= msg + \
' - finite moment')
## else:
## npt.assert_(abs(sm) > 10000), msg='infinite moment, sm = ' + str(sm))
@_silence_fp_errors
def check_cdf_ppf(distfn,arg,msg):
values = [0.001, 0.5, 0.999]
npt.assert_almost_equal(distfn.cdf(distfn.ppf(values, *arg), *arg),
values, decimal=DECIMAL, err_msg= msg + \
' - cdf-ppf roundtrip')
@_silence_fp_errors
def check_sf_isf(distfn,arg,msg):
npt.assert_almost_equal(distfn.sf(distfn.isf([0.1,0.5,0.9], *arg), *arg),
[0.1,0.5,0.9], decimal=DECIMAL, err_msg= msg + \
' - sf-isf roundtrip')
npt.assert_almost_equal(distfn.cdf([0.1,0.9], *arg),
1.0-distfn.sf([0.1,0.9], *arg),
decimal=DECIMAL, err_msg= msg + \
' - cdf-sf relationship')
@_silence_fp_errors
def check_pdf(distfn, arg, msg):
# compares pdf at median with numerical derivative of cdf
median = distfn.ppf(0.5, *arg)
eps = 1e-6
pdfv = distfn.pdf(median, *arg)
if (pdfv < 1e-4) or (pdfv > 1e4):
# avoid checking a case where pdf is close to zero or huge (singularity)
median = median + 0.1
pdfv = distfn.pdf(median, *arg)
cdfdiff = (distfn.cdf(median + eps, *arg) -
distfn.cdf(median - eps, *arg))/eps/2.0
#replace with better diff and better test (more points),
#actually, this works pretty well
npt.assert_almost_equal(pdfv, cdfdiff,
decimal=DECIMAL, err_msg= msg + ' - cdf-pdf relationship')
@_silence_fp_errors
def check_pdf_logpdf(distfn, args, msg):
# compares pdf at several points with the log of the pdf
points = np.array([0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8])
vals = distfn.ppf(points, *args)
pdf = distfn.pdf(vals, *args)
logpdf = distfn.logpdf(vals, *args)
pdf = pdf[pdf != 0]
logpdf = logpdf[np.isfinite(logpdf)]
npt.assert_almost_equal(np.log(pdf), logpdf, decimal=7, err_msg=msg + " - logpdf-log(pdf) relationship")
@_silence_fp_errors
def check_sf_logsf(distfn, args, msg):
# compares sf at several points with the log of the sf
points = np.array([0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8])
vals = distfn.ppf(points, *args)
sf = distfn.sf(vals, *args)
logsf = distfn.logsf(vals, *args)
sf = sf[sf != 0]
logsf = logsf[np.isfinite(logsf)]
npt.assert_almost_equal(np.log(sf), logsf, decimal=7, err_msg=msg + " - logsf-log(sf) relationship")
@_silence_fp_errors
def check_cdf_logcdf(distfn, args, msg):
# compares cdf at several points with the log of the cdf
points = np.array([0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8])
vals = distfn.ppf(points, *args)
cdf = distfn.cdf(vals, *args)
logcdf = distfn.logcdf(vals, *args)
cdf = cdf[cdf != 0]
logcdf = logcdf[np.isfinite(logcdf)]
npt.assert_almost_equal(np.log(cdf), logcdf, decimal=7, err_msg=msg + " - logcdf-log(cdf) relationship")
@_silence_fp_errors
def check_distribution_rvs(dist, args, alpha, rvs):
#test from scipy.stats.tests
#this version reuses existing random variables
D,pval = stats.kstest(rvs, dist, args=args, N=1000)
if (pval < alpha):
D,pval = stats.kstest(dist,'',args=args, N=1000)
npt.assert_(pval > alpha, "D = " + str(D) + "; pval = " + str(pval) +
"; alpha = " + str(alpha) + "\nargs = " + str(args))
if __name__ == "__main__":
#nose.run(argv=['', __file__])
nose.runmodule(argv=[__file__,'-s'], exit=False)
|
mit
|
zerobatu/edx-platform
|
cms/djangoapps/contentstore/views/helpers.py
|
51
|
11507
|
"""
Helper methods for Studio views.
"""
from __future__ import absolute_import
from uuid import uuid4
import urllib
from django.conf import settings
from django.http import HttpResponse
from django.shortcuts import redirect
from django.utils.translation import ugettext as _
from edxmako.shortcuts import render_to_string, render_to_response
from opaque_keys.edx.keys import UsageKey
from xblock.core import XBlock
import dogstats_wrapper as dog_stats_api
from xmodule.modulestore.django import modulestore
from xmodule.x_module import DEPRECATION_VSCOMPAT_EVENT
from xmodule.tabs import StaticTab
from contentstore.utils import reverse_course_url, reverse_library_url, reverse_usage_url
from models.settings.course_grading import CourseGradingModel
__all__ = ['edge', 'event', 'landing']
# Note: Grader types are used throughout the platform but most usages are simply in-line
# strings. In addition, new grader types can be defined on the fly anytime one is needed
# (because they're just strings). This dict is an attempt to constrain the sprawl in Studio.
GRADER_TYPES = {
"HOMEWORK": "Homework",
"LAB": "Lab",
"ENTRANCE_EXAM": "Entrance Exam",
"MIDTERM_EXAM": "Midterm Exam",
"FINAL_EXAM": "Final Exam"
}
# points to the temporary course landing page with log in and sign up
def landing(request, org, course, coursename):
return render_to_response('temp-course-landing.html', {})
# points to the temporary edge page
def edge(request):
return redirect('/')
def event(request):
'''
A noop to swallow the analytics call so that cms methods don't spook and poor developers looking at
console logs don't get distracted :-)
'''
return HttpResponse(status=204)
def render_from_lms(template_name, dictionary, context=None, namespace='main'):
"""
Render a template using the LMS MAKO_TEMPLATES
"""
return render_to_string(template_name, dictionary, context, namespace="lms." + namespace)
def get_parent_xblock(xblock):
"""
Returns the xblock that is the parent of the specified xblock, or None if it has no parent.
"""
locator = xblock.location
parent_location = modulestore().get_parent_location(locator)
if parent_location is None:
return None
return modulestore().get_item(parent_location)
def is_unit(xblock, parent_xblock=None):
"""
Returns true if the specified xblock is a vertical that is treated as a unit.
A unit is a vertical that is a direct child of a sequential (aka a subsection).
"""
if xblock.category == 'vertical':
if parent_xblock is None:
parent_xblock = get_parent_xblock(xblock)
parent_category = parent_xblock.category if parent_xblock else None
return parent_category == 'sequential'
return False
def xblock_has_own_studio_page(xblock, parent_xblock=None):
"""
Returns true if the specified xblock has an associated Studio page. Most xblocks do
not have their own page but are instead shown on the page of their parent. There
are a few exceptions:
1. Courses
2. Verticals that are either:
- themselves treated as units
- a direct child of a unit
3. XBlocks that support children
"""
category = xblock.category
if is_unit(xblock, parent_xblock):
return True
elif category == 'vertical':
if parent_xblock is None:
parent_xblock = get_parent_xblock(xblock)
return is_unit(parent_xblock) if parent_xblock else False
# All other xblocks with children have their own page
return xblock.has_children
def xblock_studio_url(xblock, parent_xblock=None):
"""
Returns the Studio editing URL for the specified xblock.
"""
if not xblock_has_own_studio_page(xblock, parent_xblock):
return None
category = xblock.category
if category == 'course':
return reverse_course_url('course_handler', xblock.location.course_key)
elif category in ('chapter', 'sequential'):
return u'{url}?show={usage_key}'.format(
url=reverse_course_url('course_handler', xblock.location.course_key),
usage_key=urllib.quote(unicode(xblock.location))
)
elif category == 'library':
library_key = xblock.location.course_key
return reverse_library_url('library_handler', library_key)
else:
return reverse_usage_url('container_handler', xblock.location)
def xblock_type_display_name(xblock, default_display_name=None):
"""
Returns the display name for the specified type of xblock. Note that an instance can be passed in
for context dependent names, e.g. a vertical beneath a sequential is a Unit.
:param xblock: An xblock instance or the type of xblock.
:param default_display_name: The default value to return if no display name can be found.
:return:
"""
if hasattr(xblock, 'category'):
category = xblock.category
if category == 'vertical' and not is_unit(xblock):
return _('Vertical')
else:
category = xblock
if category == 'chapter':
return _('Section')
elif category == 'sequential':
return _('Subsection')
elif category == 'vertical':
return _('Unit')
component_class = XBlock.load_class(category, select=settings.XBLOCK_SELECT_FUNCTION)
if hasattr(component_class, 'display_name') and component_class.display_name.default:
return _(component_class.display_name.default) # pylint: disable=translation-of-non-string
else:
return default_display_name
def xblock_primary_child_category(xblock):
"""
Returns the primary child category for the specified xblock, or None if there is not a primary category.
"""
category = xblock.category
if category == 'course':
return 'chapter'
elif category == 'chapter':
return 'sequential'
elif category == 'sequential':
return 'vertical'
return None
def usage_key_with_run(usage_key_string):
"""
Converts usage_key_string to a UsageKey, adding a course run if necessary
"""
usage_key = UsageKey.from_string(usage_key_string)
usage_key = usage_key.replace(course_key=modulestore().fill_in_run(usage_key.course_key))
return usage_key
def remove_entrance_exam_graders(course_key, user):
"""
Removes existing entrance exam graders attached to the specified course
Typically used when adding/removing an entrance exam.
"""
grading_model = CourseGradingModel.fetch(course_key)
graders = grading_model.graders
for i, grader in enumerate(graders):
if grader['type'] == GRADER_TYPES['ENTRANCE_EXAM']:
CourseGradingModel.delete_grader(course_key, i, user)
def create_xblock(parent_locator, user, category, display_name, boilerplate=None, is_entrance_exam=False):
"""
Performs the actual grunt work of creating items/xblocks -- knows nothing about requests, views, etc.
"""
store = modulestore()
usage_key = usage_key_with_run(parent_locator)
with store.bulk_operations(usage_key.course_key):
parent = store.get_item(usage_key)
dest_usage_key = usage_key.replace(category=category, name=uuid4().hex)
# get the metadata, display_name, and definition from the caller
metadata = {}
data = None
template_id = boilerplate
if template_id:
clz = parent.runtime.load_block_type(category)
if clz is not None:
template = clz.get_template(template_id)
if template is not None:
metadata = template.get('metadata', {})
data = template.get('data')
if display_name is not None:
metadata['display_name'] = display_name
# We should use the 'fields' kwarg for newer module settings/values (vs. metadata or data)
fields = {}
# Entrance Exams: Chapter module positioning
child_position = None
if settings.FEATURES.get('ENTRANCE_EXAMS', False):
if category == 'chapter' and is_entrance_exam:
fields['is_entrance_exam'] = is_entrance_exam
fields['in_entrance_exam'] = True # Inherited metadata, all children will have it
child_position = 0
# TODO need to fix components that are sending definition_data as strings, instead of as dicts
# For now, migrate them into dicts here.
if isinstance(data, basestring):
data = {'data': data}
created_block = store.create_child(
user.id,
usage_key,
dest_usage_key.block_type,
block_id=dest_usage_key.block_id,
fields=fields,
definition_data=data,
metadata=metadata,
runtime=parent.runtime,
position=child_position,
)
# Entrance Exams: Grader assignment
if settings.FEATURES.get('ENTRANCE_EXAMS', False):
course_key = usage_key.course_key
course = store.get_course(course_key)
if hasattr(course, 'entrance_exam_enabled') and course.entrance_exam_enabled:
if category == 'sequential' and parent_locator == course.entrance_exam_id:
# Clean up any pre-existing entrance exam graders
remove_entrance_exam_graders(course_key, user)
grader = {
"type": GRADER_TYPES['ENTRANCE_EXAM'],
"min_count": 0,
"drop_count": 0,
"short_label": "Entrance",
"weight": 0
}
grading_model = CourseGradingModel.update_grader_from_json(
course.id,
grader,
user
)
CourseGradingModel.update_section_grader_type(
created_block,
grading_model['type'],
user
)
# VS[compat] cdodge: This is a hack because static_tabs also have references from the course module, so
# if we add one then we need to also add it to the policy information (i.e. metadata)
# we should remove this once we can break this reference from the course to static tabs
if category == 'static_tab':
dog_stats_api.increment(
DEPRECATION_VSCOMPAT_EVENT,
tags=(
"location:create_xblock_static_tab",
u"course:{}".format(unicode(dest_usage_key.course_key)),
)
)
display_name = display_name or _("Empty") # Prevent name being None
course = store.get_course(dest_usage_key.course_key)
course.tabs.append(
StaticTab(
name=display_name,
url_slug=dest_usage_key.name,
)
)
store.update_item(course, user.id)
return created_block
def is_item_in_course_tree(item):
"""
Check that the item is in the course tree.
It's possible that the item is not in the course tree
if its parent has been deleted and is now an orphan.
"""
ancestor = item.get_parent()
while ancestor is not None and ancestor.location.category != "course":
ancestor = ancestor.get_parent()
return ancestor is not None
|
agpl-3.0
|
mancoast/CPythonPyc_test
|
crash/273_test_ssl.py
|
21
|
59319
|
# Test the support for SSL and sockets
import sys
import unittest
from test import test_support
import asyncore
import socket
import select
import time
import gc
import os
import errno
import pprint
import urllib, urlparse
import traceback
import weakref
import functools
import platform
from BaseHTTPServer import HTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
ssl = test_support.import_module("ssl")
HOST = test_support.HOST
CERTFILE = None
SVN_PYTHON_ORG_ROOT_CERT = None
def handle_error(prefix):
exc_format = ' '.join(traceback.format_exception(*sys.exc_info()))
if test_support.verbose:
sys.stdout.write(prefix + exc_format)
class BasicTests(unittest.TestCase):
def test_sslwrap_simple(self):
# A crude test for the legacy API
try:
ssl.sslwrap_simple(socket.socket(socket.AF_INET))
except IOError, e:
if e.errno == 32: # broken pipe when ssl_sock.do_handshake(), this test doesn't care about that
pass
else:
raise
try:
ssl.sslwrap_simple(socket.socket(socket.AF_INET)._sock)
except IOError, e:
if e.errno == 32: # broken pipe when ssl_sock.do_handshake(), this test doesn't care about that
pass
else:
raise
# Issue #9415: Ubuntu hijacks their OpenSSL and forcefully disables SSLv2
def skip_if_broken_ubuntu_ssl(func):
if hasattr(ssl, 'PROTOCOL_SSLv2'):
# We need to access the lower-level wrapper in order to create an
# implicit SSL context without trying to connect or listen.
try:
import _ssl
except ImportError:
# The returned function won't get executed, just ignore the error
pass
@functools.wraps(func)
def f(*args, **kwargs):
try:
s = socket.socket(socket.AF_INET)
_ssl.sslwrap(s._sock, 0, None, None,
ssl.CERT_NONE, ssl.PROTOCOL_SSLv2, None, None)
except ssl.SSLError as e:
if (ssl.OPENSSL_VERSION_INFO == (0, 9, 8, 15, 15) and
platform.linux_distribution() == ('debian', 'squeeze/sid', '')
and 'Invalid SSL protocol variant specified' in str(e)):
raise unittest.SkipTest("Patched Ubuntu OpenSSL breaks behaviour")
return func(*args, **kwargs)
return f
else:
return func
class BasicSocketTests(unittest.TestCase):
def test_constants(self):
#ssl.PROTOCOL_SSLv2
ssl.PROTOCOL_SSLv23
ssl.PROTOCOL_SSLv3
ssl.PROTOCOL_TLSv1
ssl.CERT_NONE
ssl.CERT_OPTIONAL
ssl.CERT_REQUIRED
def test_random(self):
v = ssl.RAND_status()
if test_support.verbose:
sys.stdout.write("\n RAND_status is %d (%s)\n"
% (v, (v and "sufficient randomness") or
"insufficient randomness"))
try:
ssl.RAND_egd(1)
except TypeError:
pass
else:
print "didn't raise TypeError"
ssl.RAND_add("this is a random string", 75.0)
def test_parse_cert(self):
# note that this uses an 'unofficial' function in _ssl.c,
# provided solely for this test, to exercise the certificate
# parsing code
p = ssl._ssl._test_decode_cert(CERTFILE, False)
if test_support.verbose:
sys.stdout.write("\n" + pprint.pformat(p) + "\n")
self.assertEqual(p['subject'],
((('countryName', u'US'),),
(('stateOrProvinceName', u'Delaware'),),
(('localityName', u'Wilmington'),),
(('organizationName', u'Python Software Foundation'),),
(('organizationalUnitName', u'SSL'),),
(('commonName', u'somemachine.python.org'),)),
)
# Issue #13034: the subjectAltName in some certificates
# (notably projects.developer.nokia.com:443) wasn't parsed
p = ssl._ssl._test_decode_cert(NOKIACERT)
if test_support.verbose:
sys.stdout.write("\n" + pprint.pformat(p) + "\n")
self.assertEqual(p['subjectAltName'],
(('DNS', 'projects.developer.nokia.com'),
('DNS', 'projects.forum.nokia.com'))
)
def test_DER_to_PEM(self):
with open(SVN_PYTHON_ORG_ROOT_CERT, 'r') as f:
pem = f.read()
d1 = ssl.PEM_cert_to_DER_cert(pem)
p2 = ssl.DER_cert_to_PEM_cert(d1)
d2 = ssl.PEM_cert_to_DER_cert(p2)
self.assertEqual(d1, d2)
if not p2.startswith(ssl.PEM_HEADER + '\n'):
self.fail("DER-to-PEM didn't include correct header:\n%r\n" % p2)
if not p2.endswith('\n' + ssl.PEM_FOOTER + '\n'):
self.fail("DER-to-PEM didn't include correct footer:\n%r\n" % p2)
def test_openssl_version(self):
n = ssl.OPENSSL_VERSION_NUMBER
t = ssl.OPENSSL_VERSION_INFO
s = ssl.OPENSSL_VERSION
self.assertIsInstance(n, (int, long))
self.assertIsInstance(t, tuple)
self.assertIsInstance(s, str)
# Some sanity checks follow
# >= 0.9
self.assertGreaterEqual(n, 0x900000)
# < 2.0
self.assertLess(n, 0x20000000)
major, minor, fix, patch, status = t
self.assertGreaterEqual(major, 0)
self.assertLess(major, 2)
self.assertGreaterEqual(minor, 0)
self.assertLess(minor, 256)
self.assertGreaterEqual(fix, 0)
self.assertLess(fix, 256)
self.assertGreaterEqual(patch, 0)
self.assertLessEqual(patch, 26)
self.assertGreaterEqual(status, 0)
self.assertLessEqual(status, 15)
# Version string as returned by OpenSSL, the format might change
self.assertTrue(s.startswith("OpenSSL {:d}.{:d}.{:d}".format(major, minor, fix)),
(s, t))
def test_ciphers(self):
if not test_support.is_resource_enabled('network'):
return
remote = ("svn.python.org", 443)
with test_support.transient_internet(remote[0]):
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_NONE, ciphers="ALL")
s.connect(remote)
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_NONE, ciphers="DEFAULT")
s.connect(remote)
# Error checking occurs when connecting, because the SSL context
# isn't created before.
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_NONE, ciphers="^$:,;?*'dorothyx")
with self.assertRaisesRegexp(ssl.SSLError, "No cipher can be selected"):
s.connect(remote)
@test_support.cpython_only
def test_refcycle(self):
# Issue #7943: an SSL object doesn't create reference cycles with
# itself.
s = socket.socket(socket.AF_INET)
ss = ssl.wrap_socket(s)
wr = weakref.ref(ss)
del ss
self.assertEqual(wr(), None)
def test_wrapped_unconnected(self):
# The _delegate_methods in socket.py are correctly delegated to by an
# unconnected SSLSocket, so they will raise a socket.error rather than
# something unexpected like TypeError.
s = socket.socket(socket.AF_INET)
ss = ssl.wrap_socket(s)
self.assertRaises(socket.error, ss.recv, 1)
self.assertRaises(socket.error, ss.recv_into, bytearray(b'x'))
self.assertRaises(socket.error, ss.recvfrom, 1)
self.assertRaises(socket.error, ss.recvfrom_into, bytearray(b'x'), 1)
self.assertRaises(socket.error, ss.send, b'x')
self.assertRaises(socket.error, ss.sendto, b'x', ('0.0.0.0', 0))
class NetworkedTests(unittest.TestCase):
def test_connect(self):
with test_support.transient_internet("svn.python.org"):
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_NONE)
s.connect(("svn.python.org", 443))
c = s.getpeercert()
if c:
self.fail("Peer cert %s shouldn't be here!")
s.close()
# this should fail because we have no verification certs
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED)
try:
s.connect(("svn.python.org", 443))
except ssl.SSLError:
pass
finally:
s.close()
# this should succeed because we specify the root cert
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=SVN_PYTHON_ORG_ROOT_CERT)
try:
s.connect(("svn.python.org", 443))
finally:
s.close()
def test_connect_ex(self):
# Issue #11326: check connect_ex() implementation
with test_support.transient_internet("svn.python.org"):
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=SVN_PYTHON_ORG_ROOT_CERT)
try:
self.assertEqual(0, s.connect_ex(("svn.python.org", 443)))
self.assertTrue(s.getpeercert())
finally:
s.close()
def test_non_blocking_connect_ex(self):
# Issue #11326: non-blocking connect_ex() should allow handshake
# to proceed after the socket gets ready.
with test_support.transient_internet("svn.python.org"):
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=SVN_PYTHON_ORG_ROOT_CERT,
do_handshake_on_connect=False)
try:
s.setblocking(False)
rc = s.connect_ex(('svn.python.org', 443))
# EWOULDBLOCK under Windows, EINPROGRESS elsewhere
self.assertIn(rc, (0, errno.EINPROGRESS, errno.EWOULDBLOCK))
# Wait for connect to finish
select.select([], [s], [], 5.0)
# Non-blocking handshake
while True:
try:
s.do_handshake()
break
except ssl.SSLError as err:
if err.args[0] == ssl.SSL_ERROR_WANT_READ:
select.select([s], [], [], 5.0)
elif err.args[0] == ssl.SSL_ERROR_WANT_WRITE:
select.select([], [s], [], 5.0)
else:
raise
# SSL established
self.assertTrue(s.getpeercert())
finally:
s.close()
@unittest.skipIf(os.name == "nt", "Can't use a socket as a file under Windows")
def test_makefile_close(self):
# Issue #5238: creating a file-like object with makefile() shouldn't
# delay closing the underlying "real socket" (here tested with its
# file descriptor, hence skipping the test under Windows).
with test_support.transient_internet("svn.python.org"):
ss = ssl.wrap_socket(socket.socket(socket.AF_INET))
ss.connect(("svn.python.org", 443))
fd = ss.fileno()
f = ss.makefile()
f.close()
# The fd is still open
os.read(fd, 0)
# Closing the SSL socket should close the fd too
ss.close()
gc.collect()
with self.assertRaises(OSError) as e:
os.read(fd, 0)
self.assertEqual(e.exception.errno, errno.EBADF)
def test_non_blocking_handshake(self):
with test_support.transient_internet("svn.python.org"):
s = socket.socket(socket.AF_INET)
s.connect(("svn.python.org", 443))
s.setblocking(False)
s = ssl.wrap_socket(s,
cert_reqs=ssl.CERT_NONE,
do_handshake_on_connect=False)
count = 0
while True:
try:
count += 1
s.do_handshake()
break
except ssl.SSLError, err:
if err.args[0] == ssl.SSL_ERROR_WANT_READ:
select.select([s], [], [])
elif err.args[0] == ssl.SSL_ERROR_WANT_WRITE:
select.select([], [s], [])
else:
raise
s.close()
if test_support.verbose:
sys.stdout.write("\nNeeded %d calls to do_handshake() to establish session.\n" % count)
def test_get_server_certificate(self):
with test_support.transient_internet("svn.python.org"):
pem = ssl.get_server_certificate(("svn.python.org", 443))
if not pem:
self.fail("No server certificate on svn.python.org:443!")
try:
pem = ssl.get_server_certificate(("svn.python.org", 443), ca_certs=CERTFILE)
except ssl.SSLError:
#should fail
pass
else:
self.fail("Got server certificate %s for svn.python.org!" % pem)
pem = ssl.get_server_certificate(("svn.python.org", 443), ca_certs=SVN_PYTHON_ORG_ROOT_CERT)
if not pem:
self.fail("No server certificate on svn.python.org:443!")
if test_support.verbose:
sys.stdout.write("\nVerified certificate for svn.python.org:443 is\n%s\n" % pem)
def test_algorithms(self):
# Issue #8484: all algorithms should be available when verifying a
# certificate.
# SHA256 was added in OpenSSL 0.9.8
if ssl.OPENSSL_VERSION_INFO < (0, 9, 8, 0, 15):
self.skipTest("SHA256 not available on %r" % ssl.OPENSSL_VERSION)
# NOTE: https://sha256.tbs-internet.com is another possible test host
remote = ("sha256.tbs-internet.com", 443)
sha256_cert = os.path.join(os.path.dirname(__file__), "sha256.pem")
with test_support.transient_internet("sha256.tbs-internet.com"):
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=sha256_cert,)
try:
s.connect(remote)
if test_support.verbose:
sys.stdout.write("\nCipher with %r is %r\n" %
(remote, s.cipher()))
sys.stdout.write("Certificate is:\n%s\n" %
pprint.pformat(s.getpeercert()))
finally:
s.close()
try:
import threading
except ImportError:
_have_threads = False
else:
_have_threads = True
class ThreadedEchoServer(threading.Thread):
class ConnectionHandler(threading.Thread):
"""A mildly complicated class, because we want it to work both
with and without the SSL wrapper around the socket connection, so
that we can test the STARTTLS functionality."""
def __init__(self, server, connsock):
self.server = server
self.running = False
self.sock = connsock
self.sock.setblocking(1)
self.sslconn = None
threading.Thread.__init__(self)
self.daemon = True
def show_conn_details(self):
if self.server.certreqs == ssl.CERT_REQUIRED:
cert = self.sslconn.getpeercert()
if test_support.verbose and self.server.chatty:
sys.stdout.write(" client cert is " + pprint.pformat(cert) + "\n")
cert_binary = self.sslconn.getpeercert(True)
if test_support.verbose and self.server.chatty:
sys.stdout.write(" cert binary is " + str(len(cert_binary)) + " bytes\n")
cipher = self.sslconn.cipher()
if test_support.verbose and self.server.chatty:
sys.stdout.write(" server: connection cipher is now " + str(cipher) + "\n")
def wrap_conn(self):
try:
self.sslconn = ssl.wrap_socket(self.sock, server_side=True,
certfile=self.server.certificate,
ssl_version=self.server.protocol,
ca_certs=self.server.cacerts,
cert_reqs=self.server.certreqs,
ciphers=self.server.ciphers)
except ssl.SSLError as e:
# XXX Various errors can have happened here, for example
# a mismatching protocol version, an invalid certificate,
# or a low-level bug. This should be made more discriminating.
self.server.conn_errors.append(e)
if self.server.chatty:
handle_error("\n server: bad connection attempt from " +
str(self.sock.getpeername()) + ":\n")
self.close()
self.running = False
self.server.stop()
return False
else:
return True
def read(self):
if self.sslconn:
return self.sslconn.read()
else:
return self.sock.recv(1024)
def write(self, bytes):
if self.sslconn:
return self.sslconn.write(bytes)
else:
return self.sock.send(bytes)
def close(self):
if self.sslconn:
self.sslconn.close()
else:
self.sock._sock.close()
def run(self):
self.running = True
if not self.server.starttls_server:
if isinstance(self.sock, ssl.SSLSocket):
self.sslconn = self.sock
elif not self.wrap_conn():
return
self.show_conn_details()
while self.running:
try:
msg = self.read()
if not msg:
# eof, so quit this handler
self.running = False
self.close()
elif msg.strip() == 'over':
if test_support.verbose and self.server.connectionchatty:
sys.stdout.write(" server: client closed connection\n")
self.close()
return
elif self.server.starttls_server and msg.strip() == 'STARTTLS':
if test_support.verbose and self.server.connectionchatty:
sys.stdout.write(" server: read STARTTLS from client, sending OK...\n")
self.write("OK\n")
if not self.wrap_conn():
return
elif self.server.starttls_server and self.sslconn and msg.strip() == 'ENDTLS':
if test_support.verbose and self.server.connectionchatty:
sys.stdout.write(" server: read ENDTLS from client, sending OK...\n")
self.write("OK\n")
self.sslconn.unwrap()
self.sslconn = None
if test_support.verbose and self.server.connectionchatty:
sys.stdout.write(" server: connection is now unencrypted...\n")
else:
if (test_support.verbose and
self.server.connectionchatty):
ctype = (self.sslconn and "encrypted") or "unencrypted"
sys.stdout.write(" server: read %s (%s), sending back %s (%s)...\n"
% (repr(msg), ctype, repr(msg.lower()), ctype))
self.write(msg.lower())
except ssl.SSLError:
if self.server.chatty:
handle_error("Test server failure:\n")
self.close()
self.running = False
# normally, we'd just stop here, but for the test
# harness, we want to stop the server
self.server.stop()
def __init__(self, certificate, ssl_version=None,
certreqs=None, cacerts=None,
chatty=True, connectionchatty=False, starttls_server=False,
wrap_accepting_socket=False, ciphers=None):
if ssl_version is None:
ssl_version = ssl.PROTOCOL_TLSv1
if certreqs is None:
certreqs = ssl.CERT_NONE
self.certificate = certificate
self.protocol = ssl_version
self.certreqs = certreqs
self.cacerts = cacerts
self.ciphers = ciphers
self.chatty = chatty
self.connectionchatty = connectionchatty
self.starttls_server = starttls_server
self.sock = socket.socket()
self.flag = None
if wrap_accepting_socket:
self.sock = ssl.wrap_socket(self.sock, server_side=True,
certfile=self.certificate,
cert_reqs = self.certreqs,
ca_certs = self.cacerts,
ssl_version = self.protocol,
ciphers = self.ciphers)
if test_support.verbose and self.chatty:
sys.stdout.write(' server: wrapped server socket as %s\n' % str(self.sock))
self.port = test_support.bind_port(self.sock)
self.active = False
self.conn_errors = []
threading.Thread.__init__(self)
self.daemon = True
def __enter__(self):
self.start(threading.Event())
self.flag.wait()
return self
def __exit__(self, *args):
self.stop()
self.join()
def start(self, flag=None):
self.flag = flag
threading.Thread.start(self)
def run(self):
self.sock.settimeout(0.05)
self.sock.listen(5)
self.active = True
if self.flag:
# signal an event
self.flag.set()
while self.active:
try:
newconn, connaddr = self.sock.accept()
if test_support.verbose and self.chatty:
sys.stdout.write(' server: new connection from '
+ str(connaddr) + '\n')
handler = self.ConnectionHandler(self, newconn)
handler.start()
handler.join()
except socket.timeout:
pass
except KeyboardInterrupt:
self.stop()
self.sock.close()
def stop(self):
self.active = False
class AsyncoreEchoServer(threading.Thread):
class EchoServer(asyncore.dispatcher):
class ConnectionHandler(asyncore.dispatcher_with_send):
def __init__(self, conn, certfile):
asyncore.dispatcher_with_send.__init__(self, conn)
self.socket = ssl.wrap_socket(conn, server_side=True,
certfile=certfile,
do_handshake_on_connect=False)
self._ssl_accepting = True
def readable(self):
if isinstance(self.socket, ssl.SSLSocket):
while self.socket.pending() > 0:
self.handle_read_event()
return True
def _do_ssl_handshake(self):
try:
self.socket.do_handshake()
except ssl.SSLError, err:
if err.args[0] in (ssl.SSL_ERROR_WANT_READ,
ssl.SSL_ERROR_WANT_WRITE):
return
elif err.args[0] == ssl.SSL_ERROR_EOF:
return self.handle_close()
raise
except socket.error, err:
if err.args[0] == errno.ECONNABORTED:
return self.handle_close()
else:
self._ssl_accepting = False
def handle_read(self):
if self._ssl_accepting:
self._do_ssl_handshake()
else:
data = self.recv(1024)
if data and data.strip() != 'over':
self.send(data.lower())
def handle_close(self):
self.close()
if test_support.verbose:
sys.stdout.write(" server: closed connection %s\n" % self.socket)
def handle_error(self):
raise
def __init__(self, certfile):
self.certfile = certfile
asyncore.dispatcher.__init__(self)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.port = test_support.bind_port(self.socket)
self.listen(5)
def handle_accept(self):
sock_obj, addr = self.accept()
if test_support.verbose:
sys.stdout.write(" server: new connection from %s:%s\n" %addr)
self.ConnectionHandler(sock_obj, self.certfile)
def handle_error(self):
raise
def __init__(self, certfile):
self.flag = None
self.active = False
self.server = self.EchoServer(certfile)
self.port = self.server.port
threading.Thread.__init__(self)
self.daemon = True
def __str__(self):
return "<%s %s>" % (self.__class__.__name__, self.server)
def __enter__(self):
self.start(threading.Event())
self.flag.wait()
return self
def __exit__(self, *args):
if test_support.verbose:
sys.stdout.write(" cleanup: stopping server.\n")
self.stop()
if test_support.verbose:
sys.stdout.write(" cleanup: joining server thread.\n")
self.join()
if test_support.verbose:
sys.stdout.write(" cleanup: successfully joined.\n")
def start(self, flag=None):
self.flag = flag
threading.Thread.start(self)
def run(self):
self.active = True
if self.flag:
self.flag.set()
while self.active:
asyncore.loop(0.05)
def stop(self):
self.active = False
self.server.close()
class SocketServerHTTPSServer(threading.Thread):
class HTTPSServer(HTTPServer):
def __init__(self, server_address, RequestHandlerClass, certfile):
HTTPServer.__init__(self, server_address, RequestHandlerClass)
# we assume the certfile contains both private key and certificate
self.certfile = certfile
self.allow_reuse_address = True
def __str__(self):
return ('<%s %s:%s>' %
(self.__class__.__name__,
self.server_name,
self.server_port))
def get_request(self):
# override this to wrap socket with SSL
sock, addr = self.socket.accept()
sslconn = ssl.wrap_socket(sock, server_side=True,
certfile=self.certfile)
return sslconn, addr
class RootedHTTPRequestHandler(SimpleHTTPRequestHandler):
# need to override translate_path to get a known root,
# instead of using os.curdir, since the test could be
# run from anywhere
server_version = "TestHTTPS/1.0"
root = None
def translate_path(self, path):
"""Translate a /-separated PATH to the local filename syntax.
Components that mean special things to the local file system
(e.g. drive or directory names) are ignored. (XXX They should
probably be diagnosed.)
"""
# abandon query parameters
path = urlparse.urlparse(path)[2]
path = os.path.normpath(urllib.unquote(path))
words = path.split('/')
words = filter(None, words)
path = self.root
for word in words:
drive, word = os.path.splitdrive(word)
head, word = os.path.split(word)
if word in self.root: continue
path = os.path.join(path, word)
return path
def log_message(self, format, *args):
# we override this to suppress logging unless "verbose"
if test_support.verbose:
sys.stdout.write(" server (%s:%d %s):\n [%s] %s\n" %
(self.server.server_address,
self.server.server_port,
self.request.cipher(),
self.log_date_time_string(),
format%args))
def __init__(self, certfile):
self.flag = None
self.RootedHTTPRequestHandler.root = os.path.split(CERTFILE)[0]
self.server = self.HTTPSServer(
(HOST, 0), self.RootedHTTPRequestHandler, certfile)
self.port = self.server.server_port
threading.Thread.__init__(self)
self.daemon = True
def __str__(self):
return "<%s %s>" % (self.__class__.__name__, self.server)
def start(self, flag=None):
self.flag = flag
threading.Thread.start(self)
def run(self):
if self.flag:
self.flag.set()
self.server.serve_forever(0.05)
def stop(self):
self.server.shutdown()
def bad_cert_test(certfile):
"""
Launch a server with CERT_REQUIRED, and check that trying to
connect to it with the given client certificate fails.
"""
server = ThreadedEchoServer(CERTFILE,
certreqs=ssl.CERT_REQUIRED,
cacerts=CERTFILE, chatty=False)
with server:
try:
s = ssl.wrap_socket(socket.socket(),
certfile=certfile,
ssl_version=ssl.PROTOCOL_TLSv1)
s.connect((HOST, server.port))
except ssl.SSLError, x:
if test_support.verbose:
sys.stdout.write("\nSSLError is %s\n" % x[1])
except socket.error, x:
if test_support.verbose:
sys.stdout.write("\nsocket.error is %s\n" % x[1])
else:
raise AssertionError("Use of invalid cert should have failed!")
def server_params_test(certfile, protocol, certreqs, cacertsfile,
client_certfile, client_protocol=None, indata="FOO\n",
ciphers=None, chatty=True, connectionchatty=False,
wrap_accepting_socket=False):
"""
Launch a server, connect a client to it and try various reads
and writes.
"""
server = ThreadedEchoServer(certfile,
certreqs=certreqs,
ssl_version=protocol,
cacerts=cacertsfile,
ciphers=ciphers,
chatty=chatty,
connectionchatty=connectionchatty,
wrap_accepting_socket=wrap_accepting_socket)
with server:
# try to connect
if client_protocol is None:
client_protocol = protocol
s = ssl.wrap_socket(socket.socket(),
certfile=client_certfile,
ca_certs=cacertsfile,
ciphers=ciphers,
cert_reqs=certreqs,
ssl_version=client_protocol)
s.connect((HOST, server.port))
for arg in [indata, bytearray(indata), memoryview(indata)]:
if connectionchatty:
if test_support.verbose:
sys.stdout.write(
" client: sending %s...\n" % (repr(arg)))
s.write(arg)
outdata = s.read()
if connectionchatty:
if test_support.verbose:
sys.stdout.write(" client: read %s\n" % repr(outdata))
if outdata != indata.lower():
raise AssertionError(
"bad data <<%s>> (%d) received; expected <<%s>> (%d)\n"
% (outdata[:min(len(outdata),20)], len(outdata),
indata[:min(len(indata),20)].lower(), len(indata)))
s.write("over\n")
if connectionchatty:
if test_support.verbose:
sys.stdout.write(" client: closing connection.\n")
s.close()
def try_protocol_combo(server_protocol,
client_protocol,
expect_success,
certsreqs=None):
if certsreqs is None:
certsreqs = ssl.CERT_NONE
certtype = {
ssl.CERT_NONE: "CERT_NONE",
ssl.CERT_OPTIONAL: "CERT_OPTIONAL",
ssl.CERT_REQUIRED: "CERT_REQUIRED",
}[certsreqs]
if test_support.verbose:
formatstr = (expect_success and " %s->%s %s\n") or " {%s->%s} %s\n"
sys.stdout.write(formatstr %
(ssl.get_protocol_name(client_protocol),
ssl.get_protocol_name(server_protocol),
certtype))
try:
# NOTE: we must enable "ALL" ciphers, otherwise an SSLv23 client
# will send an SSLv3 hello (rather than SSLv2) starting from
# OpenSSL 1.0.0 (see issue #8322).
server_params_test(CERTFILE, server_protocol, certsreqs,
CERTFILE, CERTFILE, client_protocol,
ciphers="ALL", chatty=False)
# Protocol mismatch can result in either an SSLError, or a
# "Connection reset by peer" error.
except ssl.SSLError:
if expect_success:
raise
except socket.error as e:
if expect_success or e.errno != errno.ECONNRESET:
raise
else:
if not expect_success:
raise AssertionError(
"Client protocol %s succeeded with server protocol %s!"
% (ssl.get_protocol_name(client_protocol),
ssl.get_protocol_name(server_protocol)))
class ThreadedTests(unittest.TestCase):
def test_rude_shutdown(self):
"""A brutal shutdown of an SSL server should raise an IOError
in the client when attempting handshake.
"""
listener_ready = threading.Event()
listener_gone = threading.Event()
s = socket.socket()
port = test_support.bind_port(s, HOST)
# `listener` runs in a thread. It sits in an accept() until
# the main thread connects. Then it rudely closes the socket,
# and sets Event `listener_gone` to let the main thread know
# the socket is gone.
def listener():
s.listen(5)
listener_ready.set()
s.accept()
s.close()
listener_gone.set()
def connector():
listener_ready.wait()
c = socket.socket()
c.connect((HOST, port))
listener_gone.wait()
try:
ssl_sock = ssl.wrap_socket(c)
except IOError:
pass
else:
self.fail('connecting to closed SSL socket should have failed')
t = threading.Thread(target=listener)
t.start()
try:
connector()
finally:
t.join()
@skip_if_broken_ubuntu_ssl
def test_echo(self):
"""Basic test of an SSL client connecting to a server"""
if test_support.verbose:
sys.stdout.write("\n")
server_params_test(CERTFILE, ssl.PROTOCOL_TLSv1, ssl.CERT_NONE,
CERTFILE, CERTFILE, ssl.PROTOCOL_TLSv1,
chatty=True, connectionchatty=True)
def test_getpeercert(self):
if test_support.verbose:
sys.stdout.write("\n")
s2 = socket.socket()
server = ThreadedEchoServer(CERTFILE,
certreqs=ssl.CERT_NONE,
ssl_version=ssl.PROTOCOL_SSLv23,
cacerts=CERTFILE,
chatty=False)
with server:
s = ssl.wrap_socket(socket.socket(),
certfile=CERTFILE,
ca_certs=CERTFILE,
cert_reqs=ssl.CERT_REQUIRED,
ssl_version=ssl.PROTOCOL_SSLv23)
s.connect((HOST, server.port))
cert = s.getpeercert()
self.assertTrue(cert, "Can't get peer certificate.")
cipher = s.cipher()
if test_support.verbose:
sys.stdout.write(pprint.pformat(cert) + '\n')
sys.stdout.write("Connection cipher is " + str(cipher) + '.\n')
if 'subject' not in cert:
self.fail("No subject field in certificate: %s." %
pprint.pformat(cert))
if ((('organizationName', 'Python Software Foundation'),)
not in cert['subject']):
self.fail(
"Missing or invalid 'organizationName' field in certificate subject; "
"should be 'Python Software Foundation'.")
s.close()
def test_empty_cert(self):
"""Connecting with an empty cert file"""
bad_cert_test(os.path.join(os.path.dirname(__file__) or os.curdir,
"nullcert.pem"))
def test_malformed_cert(self):
"""Connecting with a badly formatted certificate (syntax error)"""
bad_cert_test(os.path.join(os.path.dirname(__file__) or os.curdir,
"badcert.pem"))
def test_nonexisting_cert(self):
"""Connecting with a non-existing cert file"""
bad_cert_test(os.path.join(os.path.dirname(__file__) or os.curdir,
"wrongcert.pem"))
def test_malformed_key(self):
"""Connecting with a badly formatted key (syntax error)"""
bad_cert_test(os.path.join(os.path.dirname(__file__) or os.curdir,
"badkey.pem"))
@skip_if_broken_ubuntu_ssl
def test_protocol_sslv2(self):
"""Connecting to an SSLv2 server with various client options"""
if test_support.verbose:
sys.stdout.write("\n")
if not hasattr(ssl, 'PROTOCOL_SSLv2'):
self.skipTest("PROTOCOL_SSLv2 needed")
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv2, True)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv2, True, ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv2, True, ssl.CERT_REQUIRED)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, True)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv3, False)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_TLSv1, False)
@skip_if_broken_ubuntu_ssl
def test_protocol_sslv23(self):
"""Connecting to an SSLv23 server with various client options"""
if test_support.verbose:
sys.stdout.write("\n")
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, True)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, True)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, True, ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True, ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, True, ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, True, ssl.CERT_REQUIRED)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True, ssl.CERT_REQUIRED)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, True, ssl.CERT_REQUIRED)
@skip_if_broken_ubuntu_ssl
def test_protocol_sslv3(self):
"""Connecting to an SSLv3 server with various client options"""
if test_support.verbose:
sys.stdout.write("\n")
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, True)
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, True, ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, True, ssl.CERT_REQUIRED)
if hasattr(ssl, 'PROTOCOL_SSLv2'):
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv2, False)
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_TLSv1, False)
@skip_if_broken_ubuntu_ssl
def test_protocol_tlsv1(self):
"""Connecting to a TLSv1 server with various client options"""
if test_support.verbose:
sys.stdout.write("\n")
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, True)
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, True, ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, True, ssl.CERT_REQUIRED)
if hasattr(ssl, 'PROTOCOL_SSLv2'):
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv2, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv3, False)
def test_starttls(self):
"""Switching from clear text to encrypted and back again."""
msgs = ("msg 1", "MSG 2", "STARTTLS", "MSG 3", "msg 4", "ENDTLS", "msg 5", "msg 6")
server = ThreadedEchoServer(CERTFILE,
ssl_version=ssl.PROTOCOL_TLSv1,
starttls_server=True,
chatty=True,
connectionchatty=True)
wrapped = False
with server:
s = socket.socket()
s.setblocking(1)
s.connect((HOST, server.port))
if test_support.verbose:
sys.stdout.write("\n")
for indata in msgs:
if test_support.verbose:
sys.stdout.write(
" client: sending %s...\n" % repr(indata))
if wrapped:
conn.write(indata)
outdata = conn.read()
else:
s.send(indata)
outdata = s.recv(1024)
if (indata == "STARTTLS" and
outdata.strip().lower().startswith("ok")):
# STARTTLS ok, switch to secure mode
if test_support.verbose:
sys.stdout.write(
" client: read %s from server, starting TLS...\n"
% repr(outdata))
conn = ssl.wrap_socket(s, ssl_version=ssl.PROTOCOL_TLSv1)
wrapped = True
elif (indata == "ENDTLS" and
outdata.strip().lower().startswith("ok")):
# ENDTLS ok, switch back to clear text
if test_support.verbose:
sys.stdout.write(
" client: read %s from server, ending TLS...\n"
% repr(outdata))
s = conn.unwrap()
wrapped = False
else:
if test_support.verbose:
sys.stdout.write(
" client: read %s from server\n" % repr(outdata))
if test_support.verbose:
sys.stdout.write(" client: closing connection.\n")
if wrapped:
conn.write("over\n")
else:
s.send("over\n")
s.close()
def test_socketserver(self):
"""Using a SocketServer to create and manage SSL connections."""
server = SocketServerHTTPSServer(CERTFILE)
flag = threading.Event()
server.start(flag)
# wait for it to start
flag.wait()
# try to connect
try:
if test_support.verbose:
sys.stdout.write('\n')
with open(CERTFILE, 'rb') as f:
d1 = f.read()
d2 = ''
# now fetch the same data from the HTTPS server
url = 'https://127.0.0.1:%d/%s' % (
server.port, os.path.split(CERTFILE)[1])
with test_support.check_py3k_warnings():
f = urllib.urlopen(url)
dlen = f.info().getheader("content-length")
if dlen and (int(dlen) > 0):
d2 = f.read(int(dlen))
if test_support.verbose:
sys.stdout.write(
" client: read %d bytes from remote server '%s'\n"
% (len(d2), server))
f.close()
self.assertEqual(d1, d2)
finally:
server.stop()
server.join()
def test_wrapped_accept(self):
"""Check the accept() method on SSL sockets."""
if test_support.verbose:
sys.stdout.write("\n")
server_params_test(CERTFILE, ssl.PROTOCOL_SSLv23, ssl.CERT_REQUIRED,
CERTFILE, CERTFILE, ssl.PROTOCOL_SSLv23,
chatty=True, connectionchatty=True,
wrap_accepting_socket=True)
def test_asyncore_server(self):
"""Check the example asyncore integration."""
indata = "TEST MESSAGE of mixed case\n"
if test_support.verbose:
sys.stdout.write("\n")
server = AsyncoreEchoServer(CERTFILE)
with server:
s = ssl.wrap_socket(socket.socket())
s.connect(('127.0.0.1', server.port))
if test_support.verbose:
sys.stdout.write(
" client: sending %s...\n" % (repr(indata)))
s.write(indata)
outdata = s.read()
if test_support.verbose:
sys.stdout.write(" client: read %s\n" % repr(outdata))
if outdata != indata.lower():
self.fail(
"bad data <<%s>> (%d) received; expected <<%s>> (%d)\n"
% (outdata[:min(len(outdata),20)], len(outdata),
indata[:min(len(indata),20)].lower(), len(indata)))
s.write("over\n")
if test_support.verbose:
sys.stdout.write(" client: closing connection.\n")
s.close()
def test_recv_send(self):
"""Test recv(), send() and friends."""
if test_support.verbose:
sys.stdout.write("\n")
server = ThreadedEchoServer(CERTFILE,
certreqs=ssl.CERT_NONE,
ssl_version=ssl.PROTOCOL_TLSv1,
cacerts=CERTFILE,
chatty=True,
connectionchatty=False)
with server:
s = ssl.wrap_socket(socket.socket(),
server_side=False,
certfile=CERTFILE,
ca_certs=CERTFILE,
cert_reqs=ssl.CERT_NONE,
ssl_version=ssl.PROTOCOL_TLSv1)
s.connect((HOST, server.port))
# helper methods for standardising recv* method signatures
def _recv_into():
b = bytearray("\0"*100)
count = s.recv_into(b)
return b[:count]
def _recvfrom_into():
b = bytearray("\0"*100)
count, addr = s.recvfrom_into(b)
return b[:count]
# (name, method, whether to expect success, *args)
send_methods = [
('send', s.send, True, []),
('sendto', s.sendto, False, ["some.address"]),
('sendall', s.sendall, True, []),
]
recv_methods = [
('recv', s.recv, True, []),
('recvfrom', s.recvfrom, False, ["some.address"]),
('recv_into', _recv_into, True, []),
('recvfrom_into', _recvfrom_into, False, []),
]
data_prefix = u"PREFIX_"
for meth_name, send_meth, expect_success, args in send_methods:
indata = data_prefix + meth_name
try:
send_meth(indata.encode('ASCII', 'strict'), *args)
outdata = s.read()
outdata = outdata.decode('ASCII', 'strict')
if outdata != indata.lower():
self.fail(
"While sending with <<%s>> bad data "
"<<%r>> (%d) received; "
"expected <<%r>> (%d)\n" % (
meth_name, outdata[:20], len(outdata),
indata[:20], len(indata)
)
)
except ValueError as e:
if expect_success:
self.fail(
"Failed to send with method <<%s>>; "
"expected to succeed.\n" % (meth_name,)
)
if not str(e).startswith(meth_name):
self.fail(
"Method <<%s>> failed with unexpected "
"exception message: %s\n" % (
meth_name, e
)
)
for meth_name, recv_meth, expect_success, args in recv_methods:
indata = data_prefix + meth_name
try:
s.send(indata.encode('ASCII', 'strict'))
outdata = recv_meth(*args)
outdata = outdata.decode('ASCII', 'strict')
if outdata != indata.lower():
self.fail(
"While receiving with <<%s>> bad data "
"<<%r>> (%d) received; "
"expected <<%r>> (%d)\n" % (
meth_name, outdata[:20], len(outdata),
indata[:20], len(indata)
)
)
except ValueError as e:
if expect_success:
self.fail(
"Failed to receive with method <<%s>>; "
"expected to succeed.\n" % (meth_name,)
)
if not str(e).startswith(meth_name):
self.fail(
"Method <<%s>> failed with unexpected "
"exception message: %s\n" % (
meth_name, e
)
)
# consume data
s.read()
s.write("over\n".encode("ASCII", "strict"))
s.close()
def test_handshake_timeout(self):
# Issue #5103: SSL handshake must respect the socket timeout
server = socket.socket(socket.AF_INET)
host = "127.0.0.1"
port = test_support.bind_port(server)
started = threading.Event()
finish = False
def serve():
server.listen(5)
started.set()
conns = []
while not finish:
r, w, e = select.select([server], [], [], 0.1)
if server in r:
# Let the socket hang around rather than having
# it closed by garbage collection.
conns.append(server.accept()[0])
t = threading.Thread(target=serve)
t.start()
started.wait()
try:
try:
c = socket.socket(socket.AF_INET)
c.settimeout(0.2)
c.connect((host, port))
# Will attempt handshake and time out
self.assertRaisesRegexp(ssl.SSLError, "timed out",
ssl.wrap_socket, c)
finally:
c.close()
try:
c = socket.socket(socket.AF_INET)
c.settimeout(0.2)
c = ssl.wrap_socket(c)
# Will attempt handshake and time out
self.assertRaisesRegexp(ssl.SSLError, "timed out",
c.connect, (host, port))
finally:
c.close()
finally:
finish = True
t.join()
server.close()
def test_default_ciphers(self):
with ThreadedEchoServer(CERTFILE,
ssl_version=ssl.PROTOCOL_SSLv23,
chatty=False) as server:
sock = socket.socket()
try:
# Force a set of weak ciphers on our client socket
try:
s = ssl.wrap_socket(sock,
ssl_version=ssl.PROTOCOL_SSLv23,
ciphers="DES")
except ssl.SSLError:
self.skipTest("no DES cipher available")
with self.assertRaises((OSError, ssl.SSLError)):
s.connect((HOST, server.port))
finally:
sock.close()
self.assertIn("no shared cipher", str(server.conn_errors[0]))
def test_main(verbose=False):
global CERTFILE, SVN_PYTHON_ORG_ROOT_CERT, NOKIACERT
CERTFILE = os.path.join(os.path.dirname(__file__) or os.curdir,
"keycert.pem")
SVN_PYTHON_ORG_ROOT_CERT = os.path.join(
os.path.dirname(__file__) or os.curdir,
"https_svn_python_org_root.pem")
NOKIACERT = os.path.join(os.path.dirname(__file__) or os.curdir,
"nokia.pem")
if (not os.path.exists(CERTFILE) or
not os.path.exists(SVN_PYTHON_ORG_ROOT_CERT) or
not os.path.exists(NOKIACERT)):
raise test_support.TestFailed("Can't read certificate files!")
tests = [BasicTests, BasicSocketTests]
if test_support.is_resource_enabled('network'):
tests.append(NetworkedTests)
if _have_threads:
thread_info = test_support.threading_setup()
if thread_info and test_support.is_resource_enabled('network'):
tests.append(ThreadedTests)
try:
test_support.run_unittest(*tests)
finally:
if _have_threads:
test_support.threading_cleanup(*thread_info)
if __name__ == "__main__":
test_main()
|
gpl-3.0
|
bmentges/django-cart
|
cart/cart.py
|
1
|
2554
|
import datetime
from django.db.models import Sum
from django.db.models import F
from . import models
CART_ID = 'CART-ID'
class ItemAlreadyExists(Exception):
pass
class ItemDoesNotExist(Exception):
pass
class Cart:
def __init__(self, request):
cart_id = request.session.get(CART_ID)
if cart_id:
cart = models.Cart.objects.filter(id=cart_id, checked_out=False).first()
if cart is None:
cart = self.new(request)
else:
cart = self.new(request)
self.cart = cart
def __iter__(self):
for item in self.cart.item_set.all():
yield item
def new(self, request):
cart = models.Cart.objects.create(creation_date=datetime.datetime.now())
request.session[CART_ID] = cart.id
return cart
def add(self, product, unit_price, quantity=1):
item = models.Item.objects.filter(cart=self.cart, product=product).first()
if item:
item.unit_price = unit_price
item.quantity += int(quantity)
item.save()
else:
models.Item.objects.create(cart=self.cart, product=product, unit_price=unit_price, quantity=quantity)
def remove(self, product):
item = models.Item.objects.filter(cart=self.cart, product=product).first()
if item:
item.delete()
else:
raise ItemDoesNotExist
def update(self, product, quantity, unit_price=None):
item = models.Item.objects.filter(cart=self.cart, product=product).first()
if item:
if quantity == 0:
item.delete()
else:
item.unit_price = unit_price
item.quantity = int(quantity)
item.save()
else:
raise ItemDoesNotExist
def count(self):
return self.cart.item_set.all().aggregate(Sum('quantity')).get('quantity__sum', 0)
def summary(self):
return self.cart.item_set.all().aggregate(total=Sum(F('quantity')*F('unit_price'))).get('total', 0)
def clear(self):
self.cart.item_set.all().delete()
def is_empty(self):
return self.count() == 0
def cart_serializable(self):
representation = {}
for item in self.cart.item_set.all():
item_id = str(item.object_id)
item_dict = {
'total_price': item.total_price,
'quantity': item.quantity
}
representation[item_id] = item_dict
return representation
|
lgpl-3.0
|
librallu/cohorte-herald
|
python/herald/remote/herald_xmlrpc.py
|
1
|
10255
|
#!/usr/bin/env python
# -- Content-Encoding: UTF-8 --
"""
Pelix remote services implementation based on Herald messaging and xmlrpclib
:author: Thomas Calmant
:copyright: Copyright 2014, isandlaTech
:license: Apache License 2.0
:version: 0.0.3
:status: Alpha
..
Copyright 2014 isandlaTech
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Module version
__version_info__ = (0, 0, 3)
__version__ = ".".join(str(x) for x in __version_info__)
# Documentation strings format
__docformat__ = "restructuredtext en"
# ------------------------------------------------------------------------------
# Herald
import herald.beans as beans
import herald.remote
# iPOPO decorators
from pelix.ipopo.decorators import ComponentFactory, Requires, Validate, \
Invalidate, Property, Provides, Instantiate
# Pelix constants
import pelix.remote
import pelix.remote.transport.commons as commons
# Standard library
import logging
# XML RPC modules
try:
# Python 3
# pylint: disable=F0401
from xmlrpc.server import SimpleXMLRPCDispatcher
import xmlrpc.client as xmlrpclib
except ImportError:
# Python 2
# pylint: disable=F0401
from SimpleXMLRPCServer import SimpleXMLRPCDispatcher
import xmlrpclib
# ------------------------------------------------------------------------------
HERALDRPC_CONFIGURATION = 'herald-xmlrpc'
""" Remote Service configuration constant """
PROP_HERALDRPC_PEER = "herald.rpc.peer"
""" UID of the peer exporting a service """
PROP_HERALDRPC_SUBJECT = 'herald.rpc.subject'
""" Subject to contact the exporter """
SUBJECT_REQUEST = 'herald/rpc/xmlrpc'
""" Subject to use for requests """
SUBJECT_REPLY = 'herald/rpc/xmlrpc/reply'
""" Subject to use for replies """
_logger = logging.getLogger(__name__)
# ------------------------------------------------------------------------------
class _XmlRpcDispatcher(SimpleXMLRPCDispatcher):
"""
A XML-RPC dispatcher with a custom dispatch method
Calls the dispatch method given in the constructor
"""
def __init__(self, dispatch_method, encoding=None):
"""
Sets up the servlet
"""
SimpleXMLRPCDispatcher.__init__(self, allow_none=True,
encoding=encoding)
# Register the system.* functions
self.register_introspection_functions()
# Make a link to the dispatch method
self._dispatch_method = dispatch_method
def _simple_dispatch(self, name, params):
"""
Dispatch method
"""
try:
# Internal method
return self.funcs[name](*params)
except KeyError:
# Other method
pass
# Call the other method outside the except block, to avoid messy logs
# in case of error
return self._dispatch_method(name, params)
def dispatch(self, data):
"""
Handles a HTTP POST request
:param data: The string content of the request
:return: The XML-RPC response as a string
"""
# Dispatch
return self._marshaled_dispatch(data, self._simple_dispatch)
@ComponentFactory(herald.remote.FACTORY_HERALD_XMLRPC_EXPORTER)
@Requires('_directory', herald.SERVICE_DIRECTORY)
# SERVICE_EXPORT_PROVIDER is provided by the parent class
@Provides(herald.SERVICE_LISTENER)
@Property('_filters', herald.PROP_FILTERS, [SUBJECT_REQUEST])
@Property('_kinds', pelix.remote.PROP_REMOTE_CONFIGS_SUPPORTED,
(HERALDRPC_CONFIGURATION,))
@Instantiate('herald-rpc-exporter-xmlrpc')
class HeraldRpcServiceExporter(commons.AbstractRpcServiceExporter):
"""
Herald Remote Services exporter
"""
def __init__(self):
"""
Sets up the exporter
"""
# Call parent
super(HeraldRpcServiceExporter, self).__init__()
# Herald directory
self._directory = None
# Herald filters
self._filters = None
# Handled configurations
self._kinds = None
# Dispatcher
self._dispatcher = None
def make_endpoint_properties(self, svc_ref, name, fw_uid):
"""
Prepare properties for the ExportEndpoint to be created
:param svc_ref: Service reference
:param name: Endpoint name
:param fw_uid: Framework UID
:return: A dictionary of extra endpoint properties
"""
return {PROP_HERALDRPC_PEER: self._directory.local_uid,
PROP_HERALDRPC_SUBJECT: SUBJECT_REQUEST}
@Validate
def validate(self, context):
"""
Component validated
"""
# Call parent
super(HeraldRpcServiceExporter, self).validate(context)
# Setup the dispatcher
self._dispatcher = _XmlRpcDispatcher(self.dispatch)
@Invalidate
def invalidate(self, context):
"""
Component invalidated
"""
# Call parent
super(HeraldRpcServiceExporter, self).invalidate(context)
# Clean up
self._dispatcher = None
def herald_message(self, herald_svc, message):
"""
Received a message from Herald
:param herald_svc: The Herald service
:param message: A message bean
"""
result = self._dispatcher.dispatch(message.content)
# answer to the message
reply_msg = beans.Message(SUBJECT_REPLY, result)
reply_msg.add_header('replies-to', message.uid)
origin = message.get_header('original_sender')
if origin is None: # in the case it was not routed
origin = message.sender
herald_svc.fire(origin, reply_msg)
# ------------------------------------------------------------------------------
class _XmlRpcEndpointProxy(object):
"""
Proxy to use XML-RPC over Herald
"""
def __init__(self, name, peer, subject, send_method):
"""
Sets up the endpoint proxy
:param name: End point name
:param peer: UID of the peer to contact
:param subject: Subject to use for RPC
:param send_method: Method to use to send a request
"""
self.__name = name
self.__peer = peer
self.__subject = subject
self.__send = send_method
self.__cache = {}
def __getattr__(self, name):
"""
Prefixes the requested attribute name by the endpoint name
"""
return self.__cache.setdefault(
name, _XmlRpcMethod("{0}.{1}".format(self.__name, name),
self.__peer, self.__subject, self.__send))
class _XmlRpcMethod(object):
"""
Represents a method in a call proxy
"""
def __init__(self, method_name, peer, subject, send_method):
"""
Sets up the method
:param method_name: Full method name
:param peer: UID of the peer to contact
:param subject: Subject to use for RPC
:param send_method: Method to use to send a request
"""
self.__name = method_name
self.__peer = peer
self.__subject = subject
self.__send = send_method
def __call__(self, *args):
"""
Method is being called
"""
# Forge the request
request = xmlrpclib.dumps(args, self.__name, encoding='utf-8',
allow_none=True)
# Send it
reply_message = self.__send(self.__peer, self.__subject, request)
# Parse the reply
parser, unmarshaller = xmlrpclib.getparser()
parser.feed(reply_message.content)
parser.close()
return unmarshaller.close()
@ComponentFactory(herald.remote.FACTORY_HERALD_XMLRPC_IMPORTER)
@Requires('_herald', herald.SERVICE_HERALD)
@Requires('_directory', herald.SERVICE_DIRECTORY)
@Provides(pelix.remote.SERVICE_IMPORT_ENDPOINT_LISTENER)
@Property('_kinds', pelix.remote.PROP_REMOTE_CONFIGS_SUPPORTED,
(HERALDRPC_CONFIGURATION,))
@Instantiate('herald-rpc-importer-xmlrpc')
class HeraldRpcServiceImporter(commons.AbstractRpcServiceImporter):
"""
XML-RPC Remote Services importer
"""
def __init__(self):
"""
Sets up the exporter
"""
# Call parent
super(HeraldRpcServiceImporter, self).__init__()
# Herald service
self._herald = None
# Component properties
self._kinds = None
def __call(self, peer, subject, content):
"""
Method called by the proxy to send a message over Herald
"""
msg = beans.Message(subject, content)
msg.add_header('original_sender', self._directory.local_uid)
return self._herald.send(peer, msg)
def make_service_proxy(self, endpoint):
"""
Creates the proxy for the given ImportEndpoint
:param endpoint: An ImportEndpoint bean
:return: A service proxy
"""
# Get Peer UID information
peer_uid = endpoint.properties.get(PROP_HERALDRPC_PEER)
if not peer_uid:
_logger.warning("Herald-RPC endpoint without peer UID: %s",
endpoint)
return
# Get request subject information
subject = endpoint.properties.get(PROP_HERALDRPC_SUBJECT)
if not subject:
_logger.warning("Herald-RPC endpoint without subject: %s",
endpoint)
return
# Return the proxy
return _XmlRpcEndpointProxy(endpoint.name, peer_uid, subject,
self.__call)
def clear_service_proxy(self, endpoint):
"""
Destroys the proxy made for the given ImportEndpoint
:param endpoint: An ImportEndpoint bean
"""
# Nothing to do
return
|
apache-2.0
|
mateon1/servo
|
tests/wpt/css-tests/tools/html5lib/doc/conf.py
|
436
|
9028
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# html5lib documentation build configuration file, created by
# sphinx-quickstart on Wed May 8 00:04:49 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'html5lib'
copyright = '2006 - 2013, James Graham, Geoffrey Sneddon, and contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
sys.path.append(os.path.abspath('..'))
from html5lib import __version__
release = __version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = 'en'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', 'theme']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'html5libdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'html5lib.tex', 'html5lib Documentation',
'James Graham, Geoffrey Sneddon, and contributors', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'html5lib', 'html5lib Documentation',
['James Graham, Geoffrey Sneddon, and contributors'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'html5lib', 'html5lib Documentation',
'James Graham, Geoffrey Sneddon, and contributors', 'html5lib', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
class CExtMock(object):
"""Required for autodoc on readthedocs.org where you cannot build C extensions."""
def __init__(self, *args, **kwargs):
pass
def __call__(self, *args, **kwargs):
return CExtMock()
@classmethod
def __getattr__(cls, name):
if name in ('__file__', '__path__'):
return '/dev/null'
else:
return CExtMock()
try:
import lxml # flake8: noqa
except ImportError:
sys.modules['lxml'] = CExtMock()
sys.modules['lxml.etree'] = CExtMock()
print("warning: lxml modules mocked.")
try:
import genshi # flake8: noqa
except ImportError:
sys.modules['genshi'] = CExtMock()
sys.modules['genshi.core'] = CExtMock()
print("warning: genshi modules mocked.")
|
mpl-2.0
|
mediachain/cccoin
|
node/node_temporal.py
|
1
|
24616
|
#!/usr/bin/env python
"""
Maintains a stable view of / access to state:
- Combines multiple underlying blockchain sources,
- Temporally tracks degrees of confidence based on age, with chain reorg support.
"""
##
## Note: BUG1 refers to,
##
## When you have nested manager.dict()'s, instead of:
## h['a']['b'] = 'c'
##
## You must instead do this:
## y = h['a']
## y['b'] = 'c'
## h = y
##
from sys import maxint
import threading
import multiprocessing
class TemporalTable:
"""
Temporal in-memory database. Update and lookup historical values of a key.
TODO:
- Also create version with SQL backend.
"""
def __init__(self,
process_safe = True,
manager = False,
):
self.process_safe = process_safe
assert process_safe, 'TODO - double check thread-based concurrency support by TemporalForks & TemporalDB'
if self.process_safe:
if manager is not False:
self.manager = manager
else:
self.manager = multiprocessing.Manager()
self.process_safe = True
self.the_lock = self.manager.RLock()
self.hh = self.manager.dict()
self.current_latest = self.manager.dict()
self.all_block_nums = self.manager.dict()
self.largest_pruned = self.manager.Value('i', -maxint)
else:
self.process_safe = False
self.the_lock = threading.RLock()
self.hh = {} ## {key:{block_num:value}}
self.current_latest = {} ## {key:block_num}
self.all_block_nums = {} ## set, but have to use dict
self.largest_pruned = -maxint
def _get_largest_pruned(self):
if self.process_safe:
return self.largest_pruned.value
else:
return self.largest_pruned
def _set_largest_pruned(self, val):
if self.process_safe:
self.largest_pruned.value = val
else:
self.largest_pruned = val
def store(self, key, value, start_block, as_set_op = False, remove_set_op = False):
""" """
print ('TemporalTable.store()', locals())
assert start_block != -maxint, 'ERROR: -MAXINT IS RESERVED'
print ('STORE', key, value)
with self.the_lock:
if key not in self.hh:
if self.process_safe:
self.hh[key] = self.manager.dict()
if not as_set_op:
#self.hh[key][start_block] = value
## BUG1:
tm = self.hh[key]
tm[start_block] = value
self.hh[key] = tm
else:
self.hh[key] = {}
if as_set_op:
if start_block not in self.hh[key]:
if self.process_safe:
if as_set_op:
## copy whole previous in:
tm = self.hh[key]
if key in self.current_latest:
tm[start_block] = self.hh[key][self.current_latest[key]].copy()
else:
tm[start_block] = self.manager.dict()
self.hh[key] = tm
else:
## BUG1:
#self.hh[key][start_block] = self.manager.dict()
tm = self.hh[key]
tm[start_block] = self.manager.dict()
self.hh[key] = tm
else:
self.hh[key][start_block] = {}
## BUG1:
#self.hh[key][start_block][value] = True ## Must have already setup in previous call.
tm = self.hh[key]
tm2 = tm[start_block]
#print 'aa', key ,start_block, self.hh
#print 'hh', self.hh[key][start_block]
if remove_set_op:
tm2[value] = False ## Must have already setup in previous call.
else:
tm2[value] = True ## Must have already setup in previous call.
tm[start_block] = tm2
self.hh[key] = tm
else:
#self.hh[key][start_block] = value
## BUG1:
tm = self.hh[key]
tm[start_block] = value
self.hh[key] = tm
self.current_latest[key] = max(start_block, self.current_latest.get(key, -maxint))
self.all_block_nums[start_block] = True
def remove(self, key, value, start_block, as_set_op = False):
print ('REMOVE', locals())
with self.the_lock:
if as_set_op:
self.store(key, value, start_block, as_set_op = True, remove_set_op = True)
return
if False:
###
if start_block not in self.hh[key]:
if self.process_safe:
if as_set_op:
## copy whole previous in:
tm = self.hh[key]
if key in self.current_latest:
tm[start_block] = self.hh[key][self.current_latest[key]].copy()
else:
tm[start_block] = self.manager.dict()
self.hh[key] = tm
else:
## BUG1:
#self.hh[key][start_block] = self.manager.dict()
tm = self.hh[key]
tm[start_block] = self.manager.dict()
self.hh[key] = tm
else:
self.hh[key][start_block] = {}
###
## BUG1:
#del self.hh[key][start_block][value] ## Must have already setup in previous call.
tm = self.hh[key]
tm2 = tm[start_block]
del tm2[value]
tm[start_block] = tm2
self.hh[key] = tm
else:
## BUG1:
#del self.hh[key][start_block]
tm = self.hh[key]
del tm[start_block]
self.hh[key] = tm
self.current_latest[key] = max(start_block, self.current_latest.get(key, -maxint))
def lookup(self, key, start_block = -maxint, end_block = 'latest', default = KeyError, with_block_num = True):
""" Return only latest, between start_block and end_block. """
assert with_block_num
with self.the_lock:
if (start_block > -maxint) and (start_block <= self._get_largest_pruned()):
assert False, ('PREVIOUSLY_PRUNED_REQUESTED_BLOCK', start_block, self._get_largest_pruned())
if (key not in self.hh) or (not self.hh[key]):
if default is KeyError:
raise KeyError
if with_block_num:
return default, False
else:
return default
## Latest:
if end_block == 'latest':
end_block = max(end_block, self.current_latest[key])
## Exactly end_block:
if start_block == end_block:
if end_block in self.hh[key]:
if with_block_num:
return self.hh[key][end_block], start_block
else:
return self.hh[key][end_block]
## Closest <= block_num:
for xx in sorted(self.hh.get(key,{}).keys(), reverse = True):
if xx > end_block:
continue
if xx < start_block:
continue
if with_block_num:
return self.hh[key][xx], xx
else:
return self.hh[key][xx]
else:
if default is KeyError:
raise KeyError
if with_block_num:
return default, False
else:
return default
assert False,'should not reach'
def iterate_set_depth(self, start_block = -maxint, end_block = 'latest'):
## TODO
## [x.keys() for x in xx.tables['table2'].forks['fork1'].hh['z'].values()]
pass
def iterate_block_items(self, start_block = -maxint, end_block = 'latest'):
""" Iterate latest version of all known keys, between start_block and end_block. """
with self.the_lock:
for kk in self.current_latest.keys():
try:
rr, bn = self.lookup(kk, start_block, end_block)
except:
## not yet present in db
continue
yield (kk, rr)
def prune_historical(self, end_block):
""" Prune ONLY OUTDATED records prior to and including `end_block`, e.g. to clear outdated historical state. """
with self.the_lock:
for key in self.hh.keys():
for bn in sorted(self.hh.get(key,{}).keys()):
if bn > end_block:
break
## BUG1:
#del self.hh[key][bn]
tm = self.hh[key]
del tm[bn]
self.hh[key] = tm
self._set_largest_pruned(max(end_block, self.largest_pruned))
def wipe_newer(self, start_block):
""" Wipe blocks newer than and and including `start_block` e.g. for blockchain reorganization. """
with self.the_lock:
for key in self.hh.keys():
for bn in sorted(self.hh.get(key,{}).keys(), reverse = True):
if bn < start_block:
break
## BUG1:
#del self.hh[key][bn]
tm = self.hh[key]
del tm[bn]
self.hh[key] = tm
T_ANY_FORK = 'T_ANY_FORK'
class TemporalForks:
"""
A collection of `TemporalTable`s, one for each fork being tracked.
Lookup latest state, resolved from multiple forks.
Discard keys from direct that never got confirmed within max_confirm_time.
Use 'ANY_FORK' to indicate that action should be applied to all / consider all forks.
"""
def __init__(self,
master_fork_name, ## 'name'
fork_names, ## ['name']
max_non_master_age = False,
manager = False,
CONST_ANY_FORK = T_ANY_FORK, ## just in case you really need to change it
):
"""
- master_fork_name: name of master fork
- max_non_master_age: number of blocks before non-master blocks expire.
"""
assert master_fork_name in fork_names
assert CONST_ANY_FORK not in fork_names
self.T_ANY_FORK = CONST_ANY_FORK
if manager is not False:
self.manager = manager
else:
self.manager = multiprocessing.Manager()
self.forks = {} ## Doesn't ever change after this function, so regular dict.
for fork in fork_names:
self.forks[fork] = TemporalTable(process_safe = True, manager = self.manager)
self.master_fork_name = master_fork_name
self.max_non_master_age = max_non_master_age
self.latest_master_block_num = self.manager.Value('i', -maxint)
self.the_lock = self.manager.RLock()
def update_latest_master_block_num(self, block_num):
with self.the_lock:
self.latest_master_block_num.value = max(block_num, self.latest_master_block_num.value)
def store(self, fork_name, *args, **kw):
""" store in specific fork """
print ('TemporalForks.store()', locals())
with self.the_lock:
if True:
## You should still do this manually too:
if 'start_block' in kw:
sb = kw['start_block']
else:
sb = args[2]
self.update_latest_master_block_num(sb)
if fork_name == self.T_ANY_FORK:
assert False, 'really store in all forks?'
else:
assert fork_name in self.forks
self.forks[fork_name].store(*args, **kw)
def remove(self, fork_name, *args, **kw):
""" remove just from specific fork """
with self.the_lock:
if fork_name == self.T_ANY_FORK:
for fork_name, fork in self.forks.items():
fork.remove(*args, **kw)
else:
assert fork_name in self.forks
self.forks[fork_name].remove(*args, **kw)
def lookup(self, fork_name, key, start_block = -maxint, end_block = 'latest', default = KeyError):
""" Lookup latest non-expired from any fork. """
with self.the_lock:
if fork_name != self.T_ANY_FORK:
assert fork_name in self.forks, repr(fork_name)
return self.forks[fork_name].lookup(key = key,
start_block = start_block,
end_block = end_block,
default = default,
)
assert self.latest_master_block_num.value != -maxint, 'Must call self.update_latest_master_block_num() first.'
biggest_num = -maxint
biggest_val = False
start_block_non_master = start_block
if self.max_non_master_age is not False:
start_block_non_master = max(self.latest_master_block_num.value - self.max_non_master_age,
start_block,
)
for fork_name, fork in self.forks.items():
if fork_name != self.master_fork_name:
x_start_block = start_block_non_master
else:
x_start_block = start_block
try:
val, block_num = fork.lookup(key = key,
start_block = x_start_block,
end_block = end_block,
default = KeyError,
)
except KeyError:
continue
if block_num > biggest_num:
biggest_num = block_num
biggest_val = val
if biggest_num == -maxint:
if default is KeyError:
raise KeyError
return default, False
return biggest_val, biggest_num
def iterate_block_items(self, fork_name, start_block = -maxint, end_block = 'latest'):
with self.the_lock:
if fork_name != self.T_ANY_FORK:
assert fork_name in self.forks, repr(fork_name)
for xx in self.forks[fork_name].iterate_block_items(start_block, end_block):
yield xx
return
do_keys = set()
for fork in self.forks.values():
do_keys.update(fork.current_latest.keys())
for kk in do_keys:
try:
rr, bn = self.lookup(fork_name, kk, start_block = start_block, end_block = end_block)
except KeyError:
## not yet present in db
continue
yield (kk, rr)
def prune_historical(self, fork_name, *args, **kw):
with self.the_lock:
if fork_name != self.T_ANY_FORK:
assert fork_name in self.forks, repr(fork_name)
return self.forks[fork_name].prune_historical(*args, **kw)
for fork_name, fork in self.forks.items():
fork.prune_historical(*args, **kw)
def wipe_newer(self, fork_name, *args, **kw):
with self.the_lock:
if fork_name != self.T_ANY_FORK:
assert fork_name in self.forks, repr(fork_name)
return self.forks[fork_name].wipe_newer(*args, **kw)
for fork_name, fork in self.forks.items():
fork.wipe_newer(*args, **kw)
class TemporalDB:
"""
Synchronizes creation / access / updates to a collection of TemporalForks.
"""
def __init__(self,
table_names,
master_fork_name,
fork_names,
):
self.manager = multiprocessing.Manager()
self.the_lock = self.manager.RLock()
self.tables = {}
for table_name in table_names:
self.tables[table_name] = TemporalForks(master_fork_name = master_fork_name,
fork_names = fork_names,
manager = self.manager,
)
def __getattr__(self, func_name):
""" Proxy everything else through to appropriate TemporalForks. """
if func_name.startswith('all_'):
## Apply to all TemporalForks. Note, not for functions with return values:
def handle(*args, **kw):
x_func_name = func_name[4:]
print ('HANDLE_ALL', x_func_name, args, kw)
for table_name, table in self.tables.iteritems():
getattr(self.tables[table_name], x_func_name)(*args, **kw)
elif func_name.startswith('iterate_'):
## Apply to all TemporalForks. Note, not for functions with return values:
def handle(table_name, *args, **kw):
print ('HANDLE_ITER', table_name, func_name, args, kw)
return list(getattr(self.tables[table_name], func_name)(*args, **kw))
else:
## Proxy the rest to individual TemporalForks:
def handle(table_name, *args, **kw):
#print ('HANDLE', func_name, table_name, args, kw)
r = getattr(self.tables[table_name], func_name)(*args, **kw)
#print ('DONE_HANDLE', r)
return r
return handle
def test_temporal_table():
print ('START test_temporal_table()')
xx = TemporalTable()
xx.store('a', 'b', start_block = 1)
assert xx.lookup('a')[0] == 'b', xx.lookup('a')[0]
xx.store('a', 'c', start_block = 3)
assert xx.lookup('a')[0] == 'c', xx.lookup('a')[0]
xx.store('a', 'd', start_block = 2)
assert xx.lookup('a')[0] == 'c', xx.lookup('a')[0]
assert xx.lookup('a', end_block = 2)[0] == 'd', xx.lookup('a', end_block = 2)[0]
xx.store('e','h',1)
xx.store('e','f',2)
xx.store('e','g',3)
assert tuple(xx.iterate_block_items()) == (('a', 'c'), ('e', 'g'))
assert tuple(xx.iterate_block_items(end_block = 1)) == (('a', 'b'), ('e', 'h'))
print ('PASSED')
def test_temporal_forks():
print ('START test_temporal_forks()')
xx = TemporalForks(master_fork_name = 'fork1', fork_names = ['fork1', 'fork2'])
xx.update_latest_master_block_num(1)
xx.store('fork1', 'a', 'b', start_block = 1)
assert xx.lookup('fork1', 'a')[0] == 'b'
xx.update_latest_master_block_num(3)
xx.store('fork1', 'a', 'c', start_block = 3)
assert xx.lookup('fork1', 'a')[0] == 'c'
xx.update_latest_master_block_num(2)
xx.store('fork1', 'a', 'd', start_block = 2)
assert xx.lookup('fork1', 'a')[0] == 'c'
assert xx.lookup('fork1', 'a', end_block = 2)[0] == 'd'
xx.update_latest_master_block_num(1)
xx.store('fork1', 'e','h',1)
xx.update_latest_master_block_num(2)
xx.store('fork1', 'e','f',2)
xx.update_latest_master_block_num(3)
xx.store('fork1', 'e','g',3)
assert tuple(xx.iterate_block_items('fork1')) == (('a', 'c'), ('e', 'g'))
assert tuple(xx.iterate_block_items('fork1', end_block = 1)) == (('a', 'b'), ('e', 'h'))
print ('PASSED_FORKS_BASIC')
xx = TemporalForks(master_fork_name = 'fork1', fork_names = ['fork1', 'fork2'], max_non_master_age = 5)
xx.update_latest_master_block_num(1)
xx.store('fork1', 'z', 'e', start_block = 1)
xx.update_latest_master_block_num(2)
xx.store('fork2', 'z', 'g', start_block = 2)
assert xx.lookup(T_ANY_FORK, 'z')[0] == 'g'
xx.update_latest_master_block_num(50)
assert xx.lookup(T_ANY_FORK, 'z')[0] == 'e'
print ('PASSED_FORKS')
def test_temporal_db():
print ('START test_temporal_db()')
xx = TemporalDB(table_names = ['table1', 'table2'], master_fork_name = 'fork1', fork_names = ['fork1', 'fork2'])
xx.all_update_latest_master_block_num(1)
xx.store('table1', 'fork1', 'a', 'b', start_block = 1)
assert xx.lookup('table1', 'fork1', 'a')[0] == 'b'
xx.all_update_latest_master_block_num(3)
xx.store('table1', 'fork1', 'a', 'c', start_block = 3)
assert xx.lookup('table1', 'fork1', 'a')[0] == 'c'
xx.all_update_latest_master_block_num(2)
xx.store('table1', 'fork1', 'a', 'd', start_block = 2)
assert xx.lookup('table1', 'fork1', 'a')[0] == 'c'
assert xx.lookup('table1', 'fork1', 'a', end_block = 2)[0] == 'd'
xx.all_update_latest_master_block_num(1)
xx.store('table1', 'fork1', 'e','h',1)
xx.all_update_latest_master_block_num(2)
xx.store('table1', 'fork1', 'e','f',2)
xx.all_update_latest_master_block_num(3)
xx.store('table1', 'fork1', 'e','g',3)
assert tuple(xx.iterate_block_items('table1', 'fork1')) == (('a', 'c'), ('e', 'g'))
assert tuple(xx.iterate_block_items('table1', 'fork1', end_block = 1)) == (('a', 'b'), ('e', 'h'))
assert tuple(xx.iterate_block_items('table1', T_ANY_FORK, end_block = 1)) == (('a', 'b'), ('e', 'h'))
xx.store('table2', 'fork1', 'z', '1', start_block = 55, as_set_op = True,)
assert tuple(sorted(xx.lookup('table2', 'fork1', 'z', end_block = 57)[0].keys())) == ('1',)
xx.store('table2', 'fork1', 'z', '2', start_block = 56, as_set_op = True,)
assert tuple(sorted(xx.lookup('table2', 'fork1', 'z', end_block = 57)[0].keys())) == ('1','2',)
xx.store('table2', 'fork1', 'z', '3', start_block = 57, as_set_op = True,)
assert tuple(sorted(xx.lookup('table2', 'fork1', 'z', start_block = 57)[0].keys())) == ('1', '2', '3')
xx.remove('table2', 'fork1', 'z', '3', start_block = 58, as_set_op = True,)
assert tuple([a for a,b in xx.lookup('table2', 'fork1', 'z', start_block = 58)[0].items() if b]) == ('1', '2')
assert tuple([a for a,b in xx.lookup('table2', 'fork1', 'z', start_block = 56)[0].items() if b]) == ('1', '2')
assert tuple([a for a,b in xx.lookup('table2', 'fork1', 'z', end_block = 55)[0].items() if b]) == ('1',)
xx.remove('table2', 'fork1', 'z', '2', start_block = 59, as_set_op = True,)
assert tuple([a for a,b in xx.lookup('table2', 'fork1', 'z', end_block = 59)[0].items() if b]) == ('1',)
xx.remove('table2', 'fork1', 'z', '1', start_block = 60, as_set_op = True,)
assert tuple([a for a,b in xx.lookup('table2', 'fork1', 'z', end_block = 60)[0].items() if b]) == tuple()
xx.all_wipe_newer(T_ANY_FORK, start_block = 58)
assert tuple(sorted([a for a,b in xx.lookup('table2', 'fork1', 'z', end_block = 59)[0].items() if b])) == ('1','2','3')
#print '===FOUR ', list(sorted([(x,[a for a,b in y.items() if b]) for x,y in xx.tables['table2'].forks['fork1'].hh['z'].items()]))
print ('PASSED_DB_BASIC')
if __name__ == '__main__':
test_temporal_table()
test_temporal_forks()
test_temporal_db()
|
mit
|
chevanlol360/android_kernel_lge_vu2u
|
tools/perf/scripts/python/futex-contention.py
|
11261
|
1486
|
# futex contention
# (c) 2010, Arnaldo Carvalho de Melo <acme@redhat.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Translation of:
#
# http://sourceware.org/systemtap/wiki/WSFutexContention
#
# to perf python scripting.
#
# Measures futex contention
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Util import *
process_names = {}
thread_thislock = {}
thread_blocktime = {}
lock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time
process_names = {} # long-lived pid-to-execname mapping
def syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, uaddr, op, val, utime, uaddr2, val3):
cmd = op & FUTEX_CMD_MASK
if cmd != FUTEX_WAIT:
return # we don't care about originators of WAKE events
process_names[tid] = comm
thread_thislock[tid] = uaddr
thread_blocktime[tid] = nsecs(s, ns)
def syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, ret):
if thread_blocktime.has_key(tid):
elapsed = nsecs(s, ns) - thread_blocktime[tid]
add_stats(lock_waits, (tid, thread_thislock[tid]), elapsed)
del thread_blocktime[tid]
del thread_thislock[tid]
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
for (tid, lock) in lock_waits:
min, max, avg, count = lock_waits[tid, lock]
print "%s[%d] lock %x contended %d times, %d avg ns" % \
(process_names[tid], tid, lock, count, avg)
|
gpl-2.0
|
AutorestCI/azure-sdk-for-python
|
azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/storage_insight.py
|
2
|
2686
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .proxy_resource import ProxyResource
class StorageInsight(ProxyResource):
"""The top level storage insight resource container.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param tags: Resource tags
:type tags: dict
:param containers: The names of the blob containers that the workspace
should read
:type containers: list of str
:param tables: The names of the Azure tables that the workspace should
read
:type tables: list of str
:param storage_account: The storage account connection details
:type storage_account: :class:`StorageAccount
<azure.mgmt.loganalytics.models.StorageAccount>`
:ivar status: The status of the storage insight
:vartype status: :class:`StorageInsightStatus
<azure.mgmt.loganalytics.models.StorageInsightStatus>`
:param e_tag: The ETag of the storage insight.
:type e_tag: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'storage_account': {'required': True},
'status': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'containers': {'key': 'properties.containers', 'type': '[str]'},
'tables': {'key': 'properties.tables', 'type': '[str]'},
'storage_account': {'key': 'properties.storageAccount', 'type': 'StorageAccount'},
'status': {'key': 'properties.status', 'type': 'StorageInsightStatus'},
'e_tag': {'key': 'eTag', 'type': 'str'},
}
def __init__(self, storage_account, tags=None, containers=None, tables=None, e_tag=None):
super(StorageInsight, self).__init__(tags=tags)
self.containers = containers
self.tables = tables
self.storage_account = storage_account
self.status = None
self.e_tag = e_tag
|
mit
|
nemesiscodex/JukyOS-sugar
|
extensions/deviceicon/touchpad.py
|
1
|
4769
|
# Copyright (C) 2010, Walter Bender, Sugar Labs
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from gettext import gettext as _
import os
import gtk
import gconf
import glib
import logging
from sugar.graphics.tray import TrayIcon
from sugar.graphics.xocolor import XoColor
from sugar.graphics.palette import Palette
from sugar.graphics import style
from jarabe.frame.frameinvoker import FrameWidgetInvoker
TOUCHPAD_MODE_MOUSE = 'mouse'
TOUCHPAD_MODE_PENTABLET = 'pentablet'
TOUCHPAD_MODES = (TOUCHPAD_MODE_MOUSE, TOUCHPAD_MODE_PENTABLET)
STATUS_TEXT = (_('finger'), _('stylus'))
STATUS_ICON = ('touchpad-capacitive', 'touchpad-resistive')
# NODE_PATH is used to communicate with the touchpad device.
NODE_PATH = '/sys/devices/platform/i8042/serio1/hgpk_mode'
class DeviceView(TrayIcon):
""" Manage the touchpad mode from the device palette on the Frame. """
FRAME_POSITION_RELATIVE = 500
def __init__(self):
""" Create the icon that represents the touchpad. """
icon_name = STATUS_ICON[_read_touchpad_mode()]
client = gconf.client_get_default()
color = XoColor(client.get_string('/desktop/sugar/user/color'))
TrayIcon.__init__(self, icon_name=icon_name, xo_color=color)
self.set_palette_invoker(FrameWidgetInvoker(self))
self.connect('button-release-event', self.__button_release_event_cb)
def create_palette(self):
""" Create a palette for this icon; called by the Sugar framework
when a palette needs to be displayed. """
label = glib.markup_escape_text(_('My touchpad'))
self.palette = ResourcePalette(label, self.icon)
self.palette.set_group_id('frame')
return self.palette
def __button_release_event_cb(self, widget, event):
""" Callback for button release event; used to invoke touchpad-mode
change. """
self.palette.toggle_mode()
return True
class ResourcePalette(Palette):
""" Palette attached to the decive icon that represents the touchpas. """
def __init__(self, primary_text, icon):
""" Create the palette and initilize with current touchpad status. """
Palette.__init__(self, label=primary_text)
self._icon = icon
vbox = gtk.VBox()
self.set_content(vbox)
self._status_text = gtk.Label()
vbox.pack_start(self._status_text, padding=style.DEFAULT_PADDING)
self._status_text.show()
vbox.show()
self._mode = _read_touchpad_mode()
self._update()
def _update(self):
""" Update the label and icon based on the current mode. """
self._status_text.set_label(STATUS_TEXT[self._mode])
self._icon.props.icon_name = STATUS_ICON[self._mode]
def toggle_mode(self):
""" Toggle the touchpad mode. """
self._mode = 1 - self._mode
_write_touchpad_mode(self._mode)
self._update()
def setup(tray):
""" Initialize the devic icon; called by the shell when initializing the
Frame. """
if os.path.exists(NODE_PATH):
tray.add_device(DeviceView())
_write_touchpad_mode_str(TOUCHPAD_MODE_MOUSE)
def _read_touchpad_mode_str():
""" Read the touchpad mode string from the node path. """
node_file_handle = open(NODE_PATH, 'r')
text = node_file_handle.read().strip().lower()
node_file_handle.close()
return text
def _read_touchpad_mode():
""" Read the touchpad mode and return the mode index. """
mode_str = _read_touchpad_mode_str()
if mode_str not in TOUCHPAD_MODES:
return None
return TOUCHPAD_MODES.index(mode_str)
def _write_touchpad_mode_str(mode_str):
""" Write the touchpad mode to the node path. """
try:
node_file_handle = open(NODE_PATH, 'w')
except IOError, e:
logging.error('Error opening %s for writing: %s', NODE_PATH, e)
return
node_file_handle.write(mode_str)
node_file_handle.close()
def _write_touchpad_mode(mode_num):
""" Look up the mode (by index) and write to node path. """
return _write_touchpad_mode_str(TOUCHPAD_MODES[mode_num])
|
gpl-2.0
|
janschulz/igraph
|
interfaces/python/igraph/nexus.py
|
1
|
21903
|
# vim:ts=4:sw=4:sts=4:et
# -*- coding: utf-8 -*-
"""Interface to the Nexus online graph repository.
The classes in this file facilitate access to the Nexus online graph
repository at U{http://nexus.igraph.org}.
The main entry point of this package is the C{Nexus} variable, which is
an instance of L{NexusConnection}. Use L{NexusConnection.get} to get a particular
network from Nexus, L{NexusConnection.list} to list networks having a given set of
tags, L{NexusConnection.search} to search in the dataset descriptions, or
L{NexusConnection.info} to show the info sheet of a dataset."""
from cStringIO import StringIO
from gzip import GzipFile
from itertools import izip
from textwrap import TextWrapper
from urllib import urlencode
from urlparse import urlparse, urlunparse
from textwrap import TextWrapper
from igraph.compat import property
from igraph.configuration import Configuration
from igraph.utils import multidict
import re
import urllib2
__all__ = ["Nexus", "NexusConnection"]
__license__ = u"""\
Copyright (C) 2006-2012 Tamás Nepusz <ntamas@gmail.com>
Pázmány Péter sétány 1/a, 1117 Budapest, Hungary
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA
"""
class NexusConnection(object):
"""Connection to a remote Nexus server.
In most cases, you will not have to instantiate this object, just use
the global L{Nexus} variable which is an instance of L{NexusConnection}
and connects to the Nexus repository at U{http://nexus.igraph.org}.
Example:
>>> print Nexus.info("karate") # doctest:+ELLIPSIS
Nexus dataset 'karate' (#1)
vertices/edges: 34/78
name: Zachary's karate club
tags: social network; undirected; weighted
...
>>> karate = Nexus.get("karate")
>>> from igraph import summary
>>> summary(karate)
IGRAPH UNW- 34 78 -- Zachary's karate club network
+ attr: Author (g), Citation (g), name (g), Faction (v), id (v), name (v), weight (e)
@undocumented: _get_response, _parse_dataset_id, _parse_text_response,
_ensure_uncompressed"""
def __init__(self, nexus_url=None):
"""Constructs a connection to a remote Nexus server.
@param nexus_url: the root URL of the remote server. Leave it at its
default value (C{None}) unless you have set up your own Nexus server
and you want to connect to that. C{None} fetches the URL from
igraph's configuration file or uses the default URL if no URL
is specified in the configuration file.
"""
self.debug = False
self.url = nexus_url
self._opener = urllib2.build_opener()
def get(self, id):
"""Retrieves the dataset with the given ID from Nexus.
Dataset IDs are formatted as follows: the name of a dataset on its own
means that a single network should be returned if the dataset contains
a single network, or multiple networks should be returned if the dataset
contains multiple networks. When the name is followed by a dot and a
network ID, only a single network will be returned: the one that has the
given network ID. When the name is followed by a dot and a star, a
dictionary mapping network IDs to networks will be returned even if the
original dataset contains a single network only.
E.g., getting C{"karate"} would return a single network since the
Zachary karate club dataset contains one network only. Getting
C{"karate.*"} on the other hand would return a dictionary with one
entry that contains the Zachary karate club network.
@param id: the ID of the dataset to retrieve.
@return: an instance of L{Graph} (if a single graph has to be returned)
or a dictionary mapping network IDs to instances of L{Graph}.
"""
from igraph import load
dataset_id, network_id = self._parse_dataset_id(id)
params = dict(format="Python-igraph", id=dataset_id)
response = self._get_response("/api/dataset", params, compressed=True)
response = self._ensure_uncompressed(response)
result = load(response, format="pickle")
if network_id is None:
# If result contains a single network only, return that network.
# Otherwise return the whole dictionary
if not isinstance(result, dict):
return result
if len(result) == 1:
return result[result.keys()[0]]
return result
if network_id == "*":
# Return a dict no matter what
if not isinstance(result, dict):
result = dict(dataset_id=result)
return result
return result[network_id]
def info(self, id):
"""Retrieves informations about the dataset with the given numeric
or string ID from Nexus.
@param id: the numeric or string ID of the dataset to retrieve.
@return: an instance of L{NexusDatasetInfo}.
"""
params = dict(format="text", id=id)
response = self._get_response("/api/dataset_info", params)
return NexusDatasetInfo.FromMultiDict(self._parse_text_response(response))
def list(self, tags=None, operator="or", order="date"):
"""Retrieves a list of datasets matching a set of tags from Nexus.
@param tags: the tags the returned datasets should have. C{None}
retrieves all the datasets, a single string retrieves datasets
having that given tag. Multiple tags may also be specified as
a list, tuple or any other iterable.
@param operator: when multiple tags are given, this argument
specifies whether the retrieved datasets should match all
the tags (C{"and"}) or any of them (C{"or"}).
@param order: the order of entries; it must be one of C{"date"},
C{"name"} or C{"popularity"}.
@return: a L{NexusDatasetInfoList} object, which basically acts like a
list and yields L{NexusDatasetInfo} objects. The list is populated
lazily; i.e. the requests will be fired only when needed.
"""
params = dict(format="text", order=order)
if tags is not None:
if not hasattr(tags, "__iter__") or isinstance(tags, basestring):
params["tag"] = str(tags)
else:
params["tag"] = "|".join(str(tag) for tag in tags)
params["operator"] = operator
return NexusDatasetInfoList(self, "/api/dataset_info", params)
def search(self, query, order="date"):
"""Retrieves a list of datasets matching a query string from Nexus.
@param query: the query string. Searches are case insensitive and
Nexus searches for complete words only. The special word OR
can be used to find datasets that contain any of the given words
(instead of all of them). Exact phrases must be enclosed in
quotes in the search string. See the Nexus webpage for more
information at U{http://nexus.igraph.org/web/docs#searching}.
@param order: the order of entries; it must be one of C{"date"},
C{"name"} or C{"popularity"}.
@return: a L{NexusDatasetInfoList} object, which basically acts like a
list and yields L{NexusDatasetInfo} objects. The list is populated
lazily; i.e. the requests will be fired only when needed.
"""
params = dict(q=query, order=order, format="text")
return NexusDatasetInfoList(self, "/api/search", params)
@staticmethod
def _ensure_uncompressed(response):
"""Expects an HTTP response object, checks its Content-Encoding header,
decompresses the data and returns an in-memory buffer holding the
uncompressed data."""
compressed = response.headers.get("Content-Encoding") == "gzip"
if not compressed:
content_disp = response.headers.get("Content-Disposition", "")
compressed = bool(re.match(r'attachment; *filename=.*\.gz\"?$',
content_disp))
if compressed:
return GzipFile(fileobj=StringIO(response.read()), mode="rb")
print response.headers
return response
def _get_response(self, path, params={}, compressed=False):
"""Sends a request to Nexus at the given path with the given parameters
and returns a file-like object for the response. `compressed` denotes
whether we accept compressed responses."""
if self.url is None:
url = Configuration.instance()["remote.nexus.url"]
else:
url = self.url
url = "%s%s?%s" % (url, path, urlencode(params))
request = urllib2.Request(url)
if compressed:
request.add_header("Accept-Encoding", "gzip")
if self.debug:
print "[debug] Sending request: %s" % url
return self._opener.open(request)
@staticmethod
def _parse_dataset_id(id):
"""Parses a dataset ID used in the `get` request.
Returns the dataset ID and the network ID (the latter being C{None}
if the original ID did not contain a network ID ).
"""
dataset_id, _, network_id = str(id).partition(".")
if not network_id:
network_id = None
return dataset_id, network_id
@staticmethod
def _parse_text_response(response):
"""Parses a plain text formatted response from Nexus.
Plain text formatted responses consist of key-value pairs, separated
by C{":"}. Values may span multiple lines; in this case, the key is
omitted after the first line and the extra lines start with
whitespace.
Examples:
>>> d = Nexus._parse_text_response("Id: 17\\nName: foo")
>>> sorted(d.items())
[('Id', '17'), ('Name', 'foo')]
>>> d = Nexus._parse_text_response("Id: 42\\nName: foo\\n .\\n bar")
>>> sorted(d.items())
[('Id', '42'), ('Name', 'foo\\n\\nbar')]
"""
if isinstance(response, basestring):
response = response.split("\n")
result = multidict()
key, value = None, []
for line in response:
line = line.rstrip()
if not line:
continue
if key is not None and line[0] in ' \t':
# Line continuation
line = line.lstrip()
if line == '.':
line = ''
value.append(line)
else:
# Key-value pair
if key is not None:
result.add(key, "\n".join(value))
key, value = line.split(":", 1)
value = [value.strip()]
if key is not None:
result.add(key, "\n".join(value))
return result
@property
def url(self):
"""Returns the root URL of the Nexus repository the connection is
communicating with."""
return self._url
@url.setter
def url(self, value):
"""Sets the root URL of the Nexus repository the connection is
communicating with."""
if value is None:
self._url = None
else:
value = str(value)
parts = urlparse(value, "http", False)
self._url = urlunparse(parts)
if self._url and self._url[-1] == "/":
self._url = self._url[:-1]
class NexusDatasetInfo(object):
"""Information about a dataset in the Nexus repository.
@undocumented: _update_from_multidict, vertices_edges"""
def __init__(self, id=None, sid=None, name=None, networks=None,
vertices=None, edges=None, tags=None, attributes=None, rest=None):
self._conn = None
self.id = id
self.sid = sid
self.name = name
self.vertices = vertices
self.edges = edges
self.tags = tags
self.attributes = attributes
if networks is None:
self.networks = []
elif not isinstance(networks, (str, unicode)):
self.networks = list(networks)
else:
self.networks = [networks]
if rest:
self.rest = multidict(rest)
else:
self.rest = None
@property
def vertices_edges(self):
if self.vertices is None or self.edges is None:
return ""
elif isinstance(self.vertices, (list, tuple)) and isinstance(self.edges, (list, tuple)):
return " ".join("%s/%s" % (v,e) for v, e in izip(self.vertices, self.edges))
else:
return "%s/%s" % (self.vertices, self.edges)
@vertices_edges.setter
def vertices_edges(self, value):
if value is None:
self.vertices, self.edges = None, None
return
value = value.strip().split(" ")
if len(value) == 0:
self.vertices, self.edges = None, None
elif len(value) == 1:
self.vertices, self.edges = map(int, value[0].split("/"))
else:
self.vertices = []
self.edges = []
for ve in value:
v, e = ve.split("/", 1)
self.vertices.append(int(v))
self.edges.append(int(e))
def __repr__(self):
params = "(id=%(id)r, sid=%(sid)r, name=%(name)r, networks=%(networks)r, "\
"vertices=%(vertices)r, edges=%(edges)r, tags=%(tags)r, "\
"attributes=%(attributes)r, rest=%(rest)r)" % self.__dict__
return "%s%s" % (self.__class__.__name__, params)
def __str__(self):
if self.networks and len(self.networks) > 1:
lines = ["Nexus dataset '%s' (#%s) with %d networks" % \
(self.sid, self.id, len(self.networks))]
else:
lines = ["Nexus dataset '%(sid)s' (#%(id)s)" % self.__dict__]
lines.append("vertices/edges: %s" % self.vertices_edges)
if self.name:
lines.append("name: %s" % self.name)
if self.tags:
lines.append("tags: %s" % "; ".join(self.tags))
if self.rest:
wrapper = TextWrapper(width=76, subsequent_indent=' ')
keys = sorted(self.rest.iterkeys())
if "attribute" in self.rest:
keys.remove("attribute")
keys.append("attribute")
for key in keys:
for value in self.rest.getlist(key):
paragraphs = str(value).splitlines()
wrapper.initial_indent = "%s: " % key
for paragraph in paragraphs:
ls = wrapper.wrap(paragraph)
if ls:
lines.extend(wrapper.wrap(paragraph))
else:
lines.append(" .")
wrapper.initial_indent = " "
return "\n".join(lines)
def _update_from_multidict(self, params):
"""Updates the dataset object from a multidict representation of
key-value pairs, similar to the ones provided by the Nexus API in
plain text response."""
self.id = params.get("id")
self.sid = params.get("sid")
self.name = params.get("name")
self.vertices = params.get("vertices")
self.edges = params.get("edges")
self.tags = params.get("tags")
networks = params.get("networks")
if networks:
self.networks = networks.split()
keys_to_ignore = set("id sid name vertices edges tags networks".split())
if self.vertices is None and self.edges is None:
# Try "vertices/edges"
self.vertices_edges = params.get("vertices/edges")
keys_to_ignore.add("vertices/edges")
if self.rest is None:
self.rest = multidict()
for k in set(params.iterkeys()) - keys_to_ignore:
for v in params.getlist(k):
self.rest.add(k, v)
if self.id:
self.id = int(self.id)
if self.vertices and not isinstance(self.vertices, (list, tuple)):
self.vertices = int(self.vertices)
if self.edges and not isinstance(self.edges, (list, tuple)):
self.edges = int(self.edges)
if self.tags is not None:
self.tags = self.tags.split(";")
@classmethod
def FromMultiDict(cls, dict):
"""Constructs a Nexus dataset object from a multidict representation
of key-value pairs, similar to the ones provided by the Nexus API in
plain text response."""
result = cls()
result._update_from_multidict(dict)
return result
def download(self, network_id=None):
"""Retrieves the actual dataset from Nexus.
@param network_id: if the dataset contains multiple networks, the ID
of the network to be retrieved. C{None} returns a single network if
the dataset contains a single network, or a dictionary of networks
if the dataset contains more than one network. C{"*"} retrieves
a dictionary even if the dataset contains a single network only.
@return: a L{Graph} instance or a dictionary mapping network names to
L{Graph} instances.
"""
if self.id is None:
raise ValueError("dataset ID is empty")
conn = self._conn or Nexus
if network_id is None:
return conn.get(self.id)
return conn.get("%s.%s" % (self.id, network_id))
get = download
class NexusDatasetInfoList(object):
"""A read-only list-like object that can be used to retrieve the items
from a Nexus search result.
"""
def __init__(self, connection, method, params):
"""Constructs a Nexus dataset list that will use the given connection
and the given parameters to retrieve the search results.
@param connection: a Nexus connection object
@param method: the URL of the Nexus API method to call
@param params: the parameters to pass in the GET requests, in the
form of a Python dictionary.
"""
self._conn = connection
self._method = str(method)
self._params = params
self._length = None
self._datasets = []
self._blocksize = 10
def _fetch_results(self, index):
"""Fetches the results from Nexus such that the result item with the
given index will be available (unless the result list is shorter than
the given index of course)."""
# Calculate the start offset
page = index // self._blocksize
offset = page * self._blocksize
self._params["offset"] = offset
self._params["limit"] = self._blocksize
# Ensure that self._datasets has the necessary length
diff = (page+1) * self._blocksize - len(self._datasets)
if diff > 0:
self._datasets.extend([None] * diff)
response = self._conn._get_response(self._method, self._params)
current_dataset = None
for line in response:
key, value = line.strip().split(": ", 1)
key = key.lower()
if key == "totalsize":
# Total number of items in the search result
self._length = int(value)
elif key == "id":
# Starting a new dataset
if current_dataset:
self._datasets[offset] = current_dataset
offset += 1
current_dataset = NexusDatasetInfo(id=int(value))
current_dataset._conn = self._conn
elif key == "sid":
current_dataset.sid = value
elif key == "name":
current_dataset.name = value
elif key == "vertices":
current_dataset.vertices = int(value)
elif key == "edges":
current_dataset.edges = int(value)
elif key == "vertices/edges":
current_dataset.vertices_edges = value
elif key == "tags":
current_dataset.tags = value.split(";")
if current_dataset:
self._datasets[offset] = current_dataset
def __getitem__(self, index):
if len(self._datasets) <= index:
self._fetch_results(index)
elif self._datasets[index] is None:
self._fetch_results(index)
return self._datasets[index]
def __iter__(self):
for i in xrange(len(self)):
yield self[i]
def __len__(self):
"""Returns the number of result items."""
if self._length is None:
self._fetch_results(0)
return self._length
def __str__(self):
"""Converts the Nexus result list into a nice human-readable format."""
max_index_length = len(str(len(self))) + 2
indent = "\n" + " " * (max_index_length+1)
result = []
for index, item in enumerate(self):
formatted_item = ("[%d]" % index).rjust(max_index_length) + " " + \
str(item).replace("\n", indent)
result.append(formatted_item)
return "\n".join(result)
Nexus = NexusConnection()
|
gpl-2.0
|
jhaux/tensorflow
|
tensorflow/contrib/keras/python/keras/applications/xception.py
|
25
|
12314
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=invalid-name
"""Xception V1 model for Keras.
On ImageNet, this model gets to a top-1 validation accuracy of 0.790
and a top-5 validation accuracy of 0.945.
Do note that the input image format for this model is different than for
the VGG16 and ResNet models (299x299 instead of 224x224),
and that the input preprocessing function
is also different (same as Inception V3).
Also do note that this model is only available for the TensorFlow backend,
due to its reliance on `SeparableConvolution` layers.
# Reference
- [Xception: Deep Learning with Depthwise Separable
Convolutions](https://arxiv.org/abs/1610.02357)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.keras.python.keras import backend as K
from tensorflow.contrib.keras.python.keras import layers
from tensorflow.contrib.keras.python.keras.applications.imagenet_utils import _obtain_input_shape
from tensorflow.contrib.keras.python.keras.applications.imagenet_utils import decode_predictions # pylint: disable=unused-import
from tensorflow.contrib.keras.python.keras.engine.topology import get_source_inputs
from tensorflow.contrib.keras.python.keras.layers import Activation
from tensorflow.contrib.keras.python.keras.layers import BatchNormalization
from tensorflow.contrib.keras.python.keras.layers import Conv2D
from tensorflow.contrib.keras.python.keras.layers import Dense
from tensorflow.contrib.keras.python.keras.layers import GlobalAveragePooling2D
from tensorflow.contrib.keras.python.keras.layers import GlobalMaxPooling2D
from tensorflow.contrib.keras.python.keras.layers import Input
from tensorflow.contrib.keras.python.keras.layers import MaxPooling2D
from tensorflow.contrib.keras.python.keras.layers import SeparableConv2D
from tensorflow.contrib.keras.python.keras.models import Model
from tensorflow.contrib.keras.python.keras.utils.data_utils import get_file
from tensorflow.python.platform import tf_logging as logging
TF_WEIGHTS_PATH = 'https://github.com/fchollet/deep-learning-models/releases/download/v0.4/xception_weights_tf_dim_ordering_tf_kernels.h5'
TF_WEIGHTS_PATH_NO_TOP = 'https://github.com/fchollet/deep-learning-models/releases/download/v0.4/xception_weights_tf_dim_ordering_tf_kernels_notop.h5'
def Xception(include_top=True,
weights='imagenet',
input_tensor=None,
input_shape=None,
pooling=None,
classes=1000):
"""Instantiates the Xception architecture.
Optionally loads weights pre-trained
on ImageNet. This model is available for TensorFlow only,
and can only be used with inputs following the TensorFlow
data format `(width, height, channels)`.
You should set `image_data_format="channels_last"` in your Keras config
located at ~/.keras/keras.json.
Note that the default input image size for this model is 299x299.
Arguments:
include_top: whether to include the fully-connected
layer at the top of the network.
weights: one of `None` (random initialization)
or "imagenet" (pre-training on ImageNet).
input_tensor: optional Keras tensor (i.e. output of `layers.Input()`)
to use as image input for the model.
input_shape: optional shape tuple, only to be specified
if `include_top` is False (otherwise the input shape
has to be `(299, 299, 3)`.
It should have exactly 3 inputs channels,
and width and height should be no smaller than 71.
E.g. `(150, 150, 3)` would be one valid value.
pooling: Optional pooling mode for feature extraction
when `include_top` is `False`.
- `None` means that the output of the model will be
the 4D tensor output of the
last convolutional layer.
- `avg` means that global average pooling
will be applied to the output of the
last convolutional layer, and thus
the output of the model will be a 2D tensor.
- `max` means that global max pooling will
be applied.
classes: optional number of classes to classify images
into, only to be specified if `include_top` is True, and
if no `weights` argument is specified.
Returns:
A Keras model instance.
Raises:
ValueError: in case of invalid argument for `weights`,
or invalid input shape.
RuntimeError: If attempting to run this model with a
backend that does not support separable convolutions.
"""
if weights not in {'imagenet', None}:
raise ValueError('The `weights` argument should be either '
'`None` (random initialization) or `imagenet` '
'(pre-training on ImageNet).')
if weights == 'imagenet' and include_top and classes != 1000:
raise ValueError('If using `weights` as imagenet with `include_top`'
' as true, `classes` should be 1000')
if K.backend() != 'tensorflow':
raise RuntimeError('The Xception model is only available with '
'the TensorFlow backend.')
if K.image_data_format() != 'channels_last':
logging.warning(
'The Xception model is only available for the '
'input data format "channels_last" '
'(width, height, channels). '
'However your settings specify the default '
'data format "channels_first" (channels, width, height). '
'You should set `image_data_format="channels_last"` in your Keras '
'config located at ~/.keras/keras.json. '
'The model being returned right now will expect inputs '
'to follow the "channels_last" data format.')
K.set_image_data_format('channels_last')
old_data_format = 'channels_first'
else:
old_data_format = None
# Determine proper input shape
input_shape = _obtain_input_shape(
input_shape,
default_size=299,
min_size=71,
data_format=K.image_data_format(),
include_top=include_top)
if input_tensor is None:
img_input = Input(shape=input_shape)
else:
img_input = Input(tensor=input_tensor, shape=input_shape)
x = Conv2D(
32, (3, 3), strides=(2, 2), use_bias=False,
name='block1_conv1')(img_input)
x = BatchNormalization(name='block1_conv1_bn')(x)
x = Activation('relu', name='block1_conv1_act')(x)
x = Conv2D(64, (3, 3), use_bias=False, name='block1_conv2')(x)
x = BatchNormalization(name='block1_conv2_bn')(x)
x = Activation('relu', name='block1_conv2_act')(x)
residual = Conv2D(
128, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x)
residual = BatchNormalization()(residual)
x = SeparableConv2D(
128, (3, 3), padding='same', use_bias=False, name='block2_sepconv1')(x)
x = BatchNormalization(name='block2_sepconv1_bn')(x)
x = Activation('relu', name='block2_sepconv2_act')(x)
x = SeparableConv2D(
128, (3, 3), padding='same', use_bias=False, name='block2_sepconv2')(x)
x = BatchNormalization(name='block2_sepconv2_bn')(x)
x = MaxPooling2D(
(3, 3), strides=(2, 2), padding='same', name='block2_pool')(x)
x = layers.add([x, residual])
residual = Conv2D(
256, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x)
residual = BatchNormalization()(residual)
x = Activation('relu', name='block3_sepconv1_act')(x)
x = SeparableConv2D(
256, (3, 3), padding='same', use_bias=False, name='block3_sepconv1')(x)
x = BatchNormalization(name='block3_sepconv1_bn')(x)
x = Activation('relu', name='block3_sepconv2_act')(x)
x = SeparableConv2D(
256, (3, 3), padding='same', use_bias=False, name='block3_sepconv2')(x)
x = BatchNormalization(name='block3_sepconv2_bn')(x)
x = MaxPooling2D(
(3, 3), strides=(2, 2), padding='same', name='block3_pool')(x)
x = layers.add([x, residual])
residual = Conv2D(
728, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x)
residual = BatchNormalization()(residual)
x = Activation('relu', name='block4_sepconv1_act')(x)
x = SeparableConv2D(
728, (3, 3), padding='same', use_bias=False, name='block4_sepconv1')(x)
x = BatchNormalization(name='block4_sepconv1_bn')(x)
x = Activation('relu', name='block4_sepconv2_act')(x)
x = SeparableConv2D(
728, (3, 3), padding='same', use_bias=False, name='block4_sepconv2')(x)
x = BatchNormalization(name='block4_sepconv2_bn')(x)
x = MaxPooling2D(
(3, 3), strides=(2, 2), padding='same', name='block4_pool')(x)
x = layers.add([x, residual])
for i in range(8):
residual = x
prefix = 'block' + str(i + 5)
x = Activation('relu', name=prefix + '_sepconv1_act')(x)
x = SeparableConv2D(
728, (3, 3), padding='same', use_bias=False,
name=prefix + '_sepconv1')(x)
x = BatchNormalization(name=prefix + '_sepconv1_bn')(x)
x = Activation('relu', name=prefix + '_sepconv2_act')(x)
x = SeparableConv2D(
728, (3, 3), padding='same', use_bias=False,
name=prefix + '_sepconv2')(x)
x = BatchNormalization(name=prefix + '_sepconv2_bn')(x)
x = Activation('relu', name=prefix + '_sepconv3_act')(x)
x = SeparableConv2D(
728, (3, 3), padding='same', use_bias=False,
name=prefix + '_sepconv3')(x)
x = BatchNormalization(name=prefix + '_sepconv3_bn')(x)
x = layers.add([x, residual])
residual = Conv2D(
1024, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x)
residual = BatchNormalization()(residual)
x = Activation('relu', name='block13_sepconv1_act')(x)
x = SeparableConv2D(
728, (3, 3), padding='same', use_bias=False, name='block13_sepconv1')(x)
x = BatchNormalization(name='block13_sepconv1_bn')(x)
x = Activation('relu', name='block13_sepconv2_act')(x)
x = SeparableConv2D(
1024, (3, 3), padding='same', use_bias=False, name='block13_sepconv2')(x)
x = BatchNormalization(name='block13_sepconv2_bn')(x)
x = MaxPooling2D(
(3, 3), strides=(2, 2), padding='same', name='block13_pool')(x)
x = layers.add([x, residual])
x = SeparableConv2D(
1536, (3, 3), padding='same', use_bias=False, name='block14_sepconv1')(x)
x = BatchNormalization(name='block14_sepconv1_bn')(x)
x = Activation('relu', name='block14_sepconv1_act')(x)
x = SeparableConv2D(
2048, (3, 3), padding='same', use_bias=False, name='block14_sepconv2')(x)
x = BatchNormalization(name='block14_sepconv2_bn')(x)
x = Activation('relu', name='block14_sepconv2_act')(x)
if include_top:
x = GlobalAveragePooling2D(name='avg_pool')(x)
x = Dense(classes, activation='softmax', name='predictions')(x)
else:
if pooling == 'avg':
x = GlobalAveragePooling2D()(x)
elif pooling == 'max':
x = GlobalMaxPooling2D()(x)
# Ensure that the model takes into account
# any potential predecessors of `input_tensor`.
if input_tensor is not None:
inputs = get_source_inputs(input_tensor)
else:
inputs = img_input
# Create model.
model = Model(inputs, x, name='xception')
# load weights
if weights == 'imagenet':
if include_top:
weights_path = get_file(
'xception_weights_tf_dim_ordering_tf_kernels.h5',
TF_WEIGHTS_PATH,
cache_subdir='models')
else:
weights_path = get_file(
'xception_weights_tf_dim_ordering_tf_kernels_notop.h5',
TF_WEIGHTS_PATH_NO_TOP,
cache_subdir='models')
model.load_weights(weights_path)
if old_data_format:
K.set_image_data_format(old_data_format)
return model
def preprocess_input(x):
x /= 255.
x -= 0.5
x *= 2.
return x
|
apache-2.0
|
viennacl/pyviennacl-dev
|
pyviennacl/io.py
|
2
|
1546
|
import pyviennacl as p
import numpy as np
def read_mtx(fname, dtype=p.float32, sparse_type=p.CompressedMatrix):
"""
Read a MatrixMarket file. Assume coordinate format. Very crude!
"""
fd = open(fname)
lines = list(map(lambda x: x.strip().split(" "), fd.readlines()))
ln = -1
for line in lines:
ln += 1
if line[ln][0] == "%":
continue
else:
break
n = int(lines[ln][0])
m = int(lines[ln][1])
try: nnz = int(lines[ln][2])
except: nnz = n * m
if m == 1:
vec_type = np.result_type(dtype).type
values = list(map(lambda x: vec_type(" ".join(x)), lines[ln+1:]))
values = np.array(values)
vec = p.Vector(values, dtype=dtype)
return vec
else:
mat = sparse_type(n, m, nnz, dtype=dtype)
mat_type = p.np_result_type(mat).type
def assign(l):
try:
i, j, v = int(l[0]), int(l[1]), mat_type(l[2])
mat.insert(i-1, j-1, v)
except ValueError:
pass
result = list(map(assign, lines[ln+1:]))
return mat
def read_vector(fname, dtype=np.float32):
fd = open(fname)
lines = list(map(lambda x: x.strip().split(" "), fd.readlines()))
count = int(lines[0][0])
vector = list(map(lambda x: p.np_result_type(dtype).type(x), lines[1]))
vector = p.Vector(vector, dtype = dtype)
if vector.size != count:
raise Exception("Sizes %d and %d do not match!" % (vector.size, count))
return vector
|
mit
|
jsj2008/kdegames
|
kajongg/src/modeltest.py
|
2
|
20981
|
"""
##
## Copyright (C) 2007 Trolltech ASA. All rights reserved.
## Copyright (C) 2010-2012 Wolfgang Rohdewald <wolfgang@rohdewald.de>
##
## This file is part of the Qt Concurrent project on Trolltech Labs.
##
## This file may be used under the terms of the GNU General Public
## License version 2.0 as published by the Free Software Foundation
## and appearing in the file LICENSE.GPL included in the packaging of
## this file. Please review the following information to ensure GNU
## General Public Licensing requirements will be met:
## http://www.trolltech.com/products/qt/opensource.html
##
## If you are unsure which license is appropriate for your use, please
## review the following information:
## http://www.trolltech.com/products/qt/licensing.html or contact the
## sales department at sales@trolltech.com.
##
## This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
## WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
##
#############################################################################
"""
import sip
from PyQt4 import QtCore
class ModelTest(QtCore.QObject):
"""tests a model"""
def __init__(self, _model, parent):
"""
Connect to all of the models signals, Whenever anything happens recheck everything.
"""
QtCore.QObject.__init__(self, parent)
self._model = _model
self.model = sip.cast(_model, QtCore.QAbstractItemModel)
self.insert = []
self.remove = []
self.changing = []
self.fetchingMore = False
assert(self.model)
self.model.columnsAboutToBeInserted.connect(self.runAllTests)
self.model.columnsAboutToBeRemoved.connect(self.runAllTests)
self.model.columnsInserted.connect(self.runAllTests)
self.model.columnsRemoved.connect(self.runAllTests)
self.model.dataChanged.connect(self.runAllTests)
self.model.headerDataChanged.connect(self.runAllTests)
self.model.layoutAboutToBeChanged.connect(self.runAllTests)
self.model.layoutChanged.connect(self.runAllTests)
self.model.modelReset.connect(self.runAllTests)
self.model.rowsAboutToBeInserted.connect(self.runAllTests)
self.model.rowsAboutToBeRemoved.connect(self.runAllTests)
self.model.rowsInserted.connect(self.runAllTests)
self.model.rowsRemoved.connect(self.runAllTests)
# Special checks for inserting/removing
self.model.layoutAboutToBeChanged.connect(self.layoutAboutToBeChanged )
self.model.layoutChanged.connect(self.layoutChanged )
self.model.rowsAboutToBeInserted.connect(self.rowsAboutToBeInserted)
self.model.rowsAboutToBeRemoved.connect(self.rowsAboutToBeRemoved)
self.model.rowsInserted.connect(self.rowsInserted)
self.model.rowsRemoved.connect(self.rowsRemoved)
self.runAllTests()
def nonDestructiveBasicTest(self):
"""
nonDestructiveBasicTest tries to call a number of the basic functions (not all)
to make sure the model doesn't outright segfault, testing the functions that makes sense.
"""
assert(self.model.buddy(QtCore.QModelIndex()) == QtCore.QModelIndex())
self.model.canFetchMore(QtCore.QModelIndex())
assert(self.model.columnCount(QtCore.QModelIndex()) >= 0)
assert(self.model.data(QtCore.QModelIndex(), QtCore.Qt.DisplayRole) == QtCore.QVariant())
self.fetchingMore = True
self.model.fetchMore(QtCore.QModelIndex())
self.fetchingMore = False
flags = self.model.flags(QtCore.QModelIndex())
assert( int(flags & QtCore.Qt.ItemIsEnabled) == QtCore.Qt.ItemIsEnabled or \
int(flags & QtCore.Qt.ItemIsEnabled ) == 0 )
self.model.hasChildren(QtCore.QModelIndex())
self.model.hasIndex(0, 0)
self.model.headerData(0, QtCore.Qt.Horizontal, QtCore.Qt.DisplayRole)
self.model.index(0, 0, QtCore.QModelIndex())
self.model.itemData(QtCore.QModelIndex())
cache = QtCore.QVariant()
self.model.match(QtCore.QModelIndex(), -1, cache)
self.model.mimeTypes()
assert(self.model.parent(QtCore.QModelIndex()) == QtCore.QModelIndex())
assert(self.model.rowCount(QtCore.QModelIndex()) >= 0)
variant = QtCore.QVariant()
self.model.setData(QtCore.QModelIndex(), variant, -1)
self.model.setHeaderData(-1, QtCore.Qt.Horizontal, QtCore.QVariant())
self.model.setHeaderData(0, QtCore.Qt.Horizontal, QtCore.QVariant())
self.model.setHeaderData(999999, QtCore.Qt.Horizontal, QtCore.QVariant())
self.model.sibling(0, 0, QtCore.QModelIndex())
self.model.span(QtCore.QModelIndex())
self.model.supportedDropActions()
def rowCount(self):
"""
Tests self.model's implementation of QtCore.QAbstractItemModel::rowCount() and hasChildren()
self.models that are dynamically populated are not as fully tested here.
"""
# check top row
topindex = self.model.index(0, 0, QtCore.QModelIndex())
rows = self.model.rowCount(topindex)
assert(rows >= 0)
if rows > 0:
assert(self.model.hasChildren(topindex) == True )
secondlvl = self.model.index(0, 0, topindex)
if secondlvl.isValid():
# check a row count where parent is valid
rows = self.model.rowCount(secondlvl)
assert(rows >= 0)
if rows > 0:
assert(self.model.hasChildren(secondlvl) == True)
# The self.models rowCount() is tested more extensively in checkChildren,
# but this catches the big mistakes
def columnCount(self):
"""
Tests self.model's implementation of QtCore.QAbstractItemModel::columnCount() and hasChildren()
"""
# check top row
topidx = self.model.index(0, 0, QtCore.QModelIndex())
assert(self.model.columnCount(topidx) >= 0)
# check a column count where parent is valid
childidx = self.model.index(0, 0, topidx)
if childidx.isValid() :
assert(self.model.columnCount(childidx) >= 0)
# columnCount() is tested more extensively in checkChildren,
# but this catches the big mistakes
def hasIndex(self):
"""
Tests self.model's implementation of QtCore.QAbstractItemModel::hasIndex()
"""
# Make sure that invalid values returns an invalid index
assert(self.model.hasIndex(-2, -2) == False)
assert(self.model.hasIndex(-2, 0) == False)
assert(self.model.hasIndex(0, -2) == False)
rows = self.model.rowCount(QtCore.QModelIndex())
cols = self.model.columnCount(QtCore.QModelIndex())
# check out of bounds
assert(self.model.hasIndex(rows, cols) == False)
assert(self.model.hasIndex(rows+1, cols+1) == False)
if rows > 0:
assert(self.model.hasIndex(0, 0) == True)
# hasIndex() is tested more extensively in checkChildren()
# but this catches the big mistakes
def index(self):
"""
Tests self.model's implementation of QtCore.QAbstractItemModel::index()
"""
# Make sure that invalid values returns an invalid index
assert(self.model.index(-2, -2, QtCore.QModelIndex()) == QtCore.QModelIndex())
assert(self.model.index(-2, 0, QtCore.QModelIndex()) == QtCore.QModelIndex())
assert(self.model.index(0, -2, QtCore.QModelIndex()) == QtCore.QModelIndex())
rows = self.model.rowCount(QtCore.QModelIndex())
cols = self.model.columnCount(QtCore.QModelIndex())
if rows == 0:
return
# Catch off by one errors
assert(self.model.index(rows, cols, QtCore.QModelIndex()) == QtCore.QModelIndex())
assert(self.model.index(0, 0, QtCore.QModelIndex()).isValid() == True)
# Make sure that the same index is *always* returned
idx1 = self.model.index(0, 0, QtCore.QModelIndex())
idx2 = self.model.index(0, 0, QtCore.QModelIndex())
assert(idx1==idx2)
# index() is tested more extensively in checkChildren()
# but this catches the big mistakes
def parent(self):
"""
Tests self.model's implementation of QtCore.QAbstractItemModel::parent()
"""
# Make sure the self.model wont crash and will return an invalid QtCore.QModelIndex
# when asked for the parent of an invalid index
assert(self.model.parent(QtCore.QModelIndex()) == QtCore.QModelIndex())
if self.model.rowCount(QtCore.QModelIndex()) == 0:
return
# Column 0 | Column 1 |
# QtCore.Qself.modelIndex() | |
# \- topidx | topidx1 |
# \- childix | childidx1 |
# Common error test #1, make sure that a top level index has a parent
# that is an invalid QtCore.Qself.modelIndex
topidx = self.model.index(0, 0, QtCore.QModelIndex())
assert(self.model.parent(topidx) == QtCore.QModelIndex())
# Common error test #2, make sure that a second level index has a parent
# that is the first level index
if self.model.rowCount(topidx) > 0 :
childidx = self.model.index(0, 0, topidx)
assert(self.model.parent(childidx) == topidx)
# Common error test #3, the second column should NOT have the same children
# as the first column in a row
# Usually the second column shouldn't have children
topidx1 = self.model.index(0, 1, QtCore.QModelIndex())
if self.model.rowCount(topidx1) > 0:
childidx = self.model.index(0, 0, topidx)
childidx1 = self.model.index(0, 0, topidx1)
assert(childidx != childidx1)
# Full test, walk n levels deep through the self.model making sure that all
# parent's children correctly specify their parent
self.checkChildren(QtCore.QModelIndex())
def data(self):
"""
Tests self.model's implementation of QtCore.QAbstractItemModel::data()
"""
# Invalid index should return an invalid qvariant
assert( not self.model.data(QtCore.QModelIndex(), QtCore.Qt.DisplayRole).isValid())
if self.model.rowCount(QtCore.QModelIndex()) == 0:
return
# A valid index should have a valid QtCore.QVariant data
assert( self.model.index(0, 0, QtCore.QModelIndex()).isValid())
# shouldn't be able to set data on an invalid index
assert( self.model.setData( QtCore.QModelIndex(), QtCore.QVariant("foo"), QtCore.Qt.DisplayRole) == False)
# General Purpose roles that should return a QString
variant = self.model.data(self.model.index(0, 0, QtCore.QModelIndex()), QtCore.Qt.ToolTipRole)
if variant.isValid():
assert( variant.canConvert( QtCore.QVariant.String ) )
variant = self.model.data(self.model.index(0, 0, QtCore.QModelIndex()), QtCore.Qt.StatusTipRole)
if variant.isValid():
assert( variant.canConvert( QtCore.QVariant.String ) )
variant = self.model.data(self.model.index(0, 0, QtCore.QModelIndex()), QtCore.Qt.WhatsThisRole)
if variant.isValid():
assert( variant.canConvert( QtCore.QVariant.String ) )
# General Purpose roles that should return a QSize
variant = self.model.data(self.model.index(0, 0, QtCore.QModelIndex()), QtCore.Qt.SizeHintRole)
if variant.isValid():
assert( variant.canConvert( QtCore.QVariant.Size ) )
# General Purpose roles that should return a QFont
variant = self.model.data(self.model.index(0, 0, QtCore.QModelIndex()), QtCore.Qt.FontRole)
if variant.isValid():
assert( variant.canConvert( QtCore.QVariant.Font ) )
# Check that the alignment is one we know about
variant = self.model.data(self.model.index(0, 0, QtCore.QModelIndex()), QtCore.Qt.TextAlignmentRole)
if variant.isValid():
alignment = variant.toInt()[0]
assert( alignment == (alignment & int(QtCore.Qt.AlignHorizontal_Mask | QtCore.Qt.AlignVertical_Mask)))
# General Purpose roles that should return a QColor
variant = self.model.data(self.model.index(0, 0, QtCore.QModelIndex()), QtCore.Qt.BackgroundColorRole)
if variant.isValid():
assert( variant.canConvert( QtCore.QVariant.Color ) )
variant = self.model.data(self.model.index(0, 0, QtCore.QModelIndex()), QtCore.Qt.TextColorRole)
if variant.isValid():
assert( variant.canConvert( QtCore.QVariant.Color ) )
# Check that the "check state" is one we know about.
variant = self.model.data(self.model.index(0, 0, QtCore.QModelIndex()), QtCore.Qt.CheckStateRole)
if variant.isValid():
state = variant.toInt()[0]
assert( state == QtCore.Qt.Unchecked or
state == QtCore.Qt.PartiallyChecked or
state == QtCore.Qt.Checked )
def runAllTests(self):
"""run all tests after the model changed"""
if self.fetchingMore:
return
self.nonDestructiveBasicTest()
self.rowCount()
self.columnCount()
self.hasIndex()
self.index()
self.parent()
self.data()
def rowsAboutToBeInserted(self, parent, start, dummyEnd):
"""
Store what is about to be inserted to make sure it actually happens
"""
item = {}
item['parent'] = parent
item['oldSize'] = self.model.rowCount(parent)
item['last'] = self.model.data(self.model.index(start-1, 0, parent))
item['next'] = self.model.data(self.model.index(start, 0, parent))
self.insert.append(item)
def rowsInserted(self, parent, start, end):
"""
Confirm that what was said was going to happen actually did
"""
item = self.insert.pop()
assert(item['parent'] == parent)
assert(item['oldSize'] + (end - start + 1) == self.model.rowCount(parent))
assert(item['last'] == self.model.data(self.model.index(start-1, 0, item['parent'])))
# if item['next'] != self.model.data(self.model.index(end+1, 0, item['parent'])):
# qDebug << start << end
# for i in range(0, self.model.rowCount(QtCore.QModelIndex())):
# qDebug << self.model.index(i, 0).data().toString()
# qDebug() << item['next'] << self.model.data(model.index(end+1, 0, item['parent']))
assert(item['next'] == self.model.data(self.model.index(end+1, 0, item['parent'])))
def rowsAboutToBeRemoved(self, parent, start, end):
"""
Store what is about to be inserted to make sure it actually happens
"""
item = {}
item['parent'] = parent
item['oldSize'] = self.model.rowCount(parent)
item['last'] = self.model.data(self.model.index(start-1, 0, parent))
item['next'] = self.model.data(self.model.index(end+1, 0, parent))
self.remove.append(item)
def rowsRemoved(self, parent, start, end):
"""
Confirm that what was said was going to happen actually did
"""
item = self.remove.pop()
assert(item['parent'] == parent)
assert(item['oldSize'] - (end - start + 1) == self.model.rowCount(parent))
assert(item['last'] == self.model.data(self.model.index(start-1, 0, item['parent'])))
assert(item['next'] == self.model.data(self.model.index(start, 0, item['parent'])))
def layoutAboutToBeChanged(self):
"""
Store what is about to be changed
"""
for i in range(0, max(0, min( self.model.rowCount(), 100))):
self.changing.append(QtCore.QPersistentModelIndex( self.model.index( i, 0)))
def layoutChanged(self):
"""
Confirm that what was said was going to happen actually did
"""
for change in self.changing:
assert(change == self.model.index( change.row(), change.column(), change.parent()))
self.changing = []
def checkChildren(self, parent, depth = 0):
"""
Called from parent() test.
A self.model that returns an index of parent X should also return X when asking
for the parent of the index
This recursive function does pretty extensive testing on the whole self.model in an
effort to catch edge cases.
This function assumes that rowCount(QtCore.QModelIndex()), columnCount(QtCore.QModelIndex()) and index() already work.
If they have a bug it will point it out, but the above tests should have already
found the basic bugs because it is easier to figure out the problem in
those tests then this one
"""
# First just try walking back up the tree.
parentIdx = parent
while parentIdx.isValid():
parentIdx = parentIdx.parent()
#For self.models that are dynamically populated
if self.model.canFetchMore( parent ):
self.fetchingMore = True
self.model.fetchMore(parent)
self.fetchingMore = False
rows = self.model.rowCount(parent)
cols = self.model.columnCount(parent)
if rows > 0:
assert(self.model.hasChildren(parent))
# Some further testing against rows(), columns, and hasChildren()
assert( rows >= 0 )
assert( cols >= 0 )
if rows > 0:
assert(self.model.hasChildren(parent) == True)
# qDebug() << "parent:" << self.model.data(parent).toString() << "rows:" << rows
# << "columns:" << cols << "parent column:" << parent.column()
assert( self.model.hasIndex( rows+1, 0, parent) == False)
for row in range(0, rows):
if self.model.canFetchMore(parent):
self.fetchingMore = True
self.model.fetchMore(parent)
self.fetchingMore = False
assert(self.model.hasIndex(row, cols+1, parent) == False)
for column in range(0, cols):
assert(self.model.hasIndex(row, column, parent))
index = self.model.index(row, column, parent)
# rowCount(QtCore.QModelIndex()) and columnCount(QtCore.QModelIndex()) said that it existed...
assert(index.isValid() == True)
# index() should always return the same index when called twice in a row
modIdx = self.model.index(row, column, parent)
assert(index == modIdx)
# Make sure we get the same index if we request it twice in a row
idx1 = self.model.index(row, column, parent)
idx2 = self.model.index(row, column, parent)
assert( idx1 == idx2 )
# Some basic checking on the index that is returned
# assert( index.model() == self.model )
# This raises an error that is not part of the qbzr code.
# see http://www.opensubscriber.com/message/pyqt@riverbankcomputing.com/10335500.html
assert( index.row() == row )
assert( index.column() == column )
# While you can technically return a QtCore.QVariant usually this is a sign
# if an bug in data() Disable if this really is ok in your self.model
assert( self.model.data(index, QtCore.Qt.DisplayRole).isValid() == True )
#if the next test fails here is some somehwat useful debug you play with
# if self.model.parent(index) != parent:
# qDebug() << row << column << depth << self.model.data(index).toString()
# << self.model.data(parent).toString()
# qDebug() << index << parent << self.model.parent(index)
# # And a view that you can even use to show the self.model
# # view = QtGui.QTreeView()
# # view.setself.model(model)
# # view.show()
#
# Check that we can get back our real parent
parentIdx = self.model.parent( index )
assert( parentIdx.internalId() == parent.internalId() )
assert( parentIdx.row() == parent.row() )
# recursively go down the children
if self.model.hasChildren(index) and depth < 10:
# qDebug() << row << column << "hasChildren" << self.model.rowCount(index)
self.checkChildren(index, depth+1)
#else:
# if depth >= 10:
# qDebug() << "checked 10 deep"
# Make sure that after testing the children that the index doesn't change
newIdx = self.model.index(row, column, parent)
assert(index == newIdx)
|
gpl-2.0
|
cikelengfeng/HTTPIDL
|
Sources/Compiler/docopt.py
|
94
|
19946
|
"""Pythonic command-line interface parser that will make you smile.
* http://docopt.org
* Repository and issue-tracker: https://github.com/docopt/docopt
* Licensed under terms of MIT license (see LICENSE-MIT)
* Copyright (c) 2013 Vladimir Keleshev, vladimir@keleshev.com
"""
import sys
import re
__all__ = ['docopt']
__version__ = '0.6.2'
class DocoptLanguageError(Exception):
"""Error in construction of usage-message by developer."""
class DocoptExit(SystemExit):
"""Exit in case user invoked program with incorrect arguments."""
usage = ''
def __init__(self, message=''):
SystemExit.__init__(self, (message + '\n' + self.usage).strip())
class Pattern(object):
def __eq__(self, other):
return repr(self) == repr(other)
def __hash__(self):
return hash(repr(self))
def fix(self):
self.fix_identities()
self.fix_repeating_arguments()
return self
def fix_identities(self, uniq=None):
"""Make pattern-tree tips point to same object if they are equal."""
if not hasattr(self, 'children'):
return self
uniq = list(set(self.flat())) if uniq is None else uniq
for i, c in enumerate(self.children):
if not hasattr(c, 'children'):
assert c in uniq
self.children[i] = uniq[uniq.index(c)]
else:
c.fix_identities(uniq)
def fix_repeating_arguments(self):
"""Fix elements that should accumulate/increment values."""
either = [list(c.children) for c in self.either.children]
for case in either:
for e in [c for c in case if case.count(c) > 1]:
if type(e) is Argument or type(e) is Option and e.argcount:
if e.value is None:
e.value = []
elif type(e.value) is not list:
e.value = e.value.split()
if type(e) is Command or type(e) is Option and e.argcount == 0:
e.value = 0
return self
@property
def either(self):
"""Transform pattern into an equivalent, with only top-level Either."""
# Currently the pattern will not be equivalent, but more "narrow",
# although good enough to reason about list arguments.
ret = []
groups = [[self]]
while groups:
children = groups.pop(0)
types = [type(c) for c in children]
if Either in types:
either = [c for c in children if type(c) is Either][0]
children.pop(children.index(either))
for c in either.children:
groups.append([c] + children)
elif Required in types:
required = [c for c in children if type(c) is Required][0]
children.pop(children.index(required))
groups.append(list(required.children) + children)
elif Optional in types:
optional = [c for c in children if type(c) is Optional][0]
children.pop(children.index(optional))
groups.append(list(optional.children) + children)
elif AnyOptions in types:
optional = [c for c in children if type(c) is AnyOptions][0]
children.pop(children.index(optional))
groups.append(list(optional.children) + children)
elif OneOrMore in types:
oneormore = [c for c in children if type(c) is OneOrMore][0]
children.pop(children.index(oneormore))
groups.append(list(oneormore.children) * 2 + children)
else:
ret.append(children)
return Either(*[Required(*e) for e in ret])
class ChildPattern(Pattern):
def __init__(self, name, value=None):
self.name = name
self.value = value
def __repr__(self):
return '%s(%r, %r)' % (self.__class__.__name__, self.name, self.value)
def flat(self, *types):
return [self] if not types or type(self) in types else []
def match(self, left, collected=None):
collected = [] if collected is None else collected
pos, match = self.single_match(left)
if match is None:
return False, left, collected
left_ = left[:pos] + left[pos + 1:]
same_name = [a for a in collected if a.name == self.name]
if type(self.value) in (int, list):
if type(self.value) is int:
increment = 1
else:
increment = ([match.value] if type(match.value) is str
else match.value)
if not same_name:
match.value = increment
return True, left_, collected + [match]
same_name[0].value += increment
return True, left_, collected
return True, left_, collected + [match]
class ParentPattern(Pattern):
def __init__(self, *children):
self.children = list(children)
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__,
', '.join(repr(a) for a in self.children))
def flat(self, *types):
if type(self) in types:
return [self]
return sum([c.flat(*types) for c in self.children], [])
class Argument(ChildPattern):
def single_match(self, left):
for n, p in enumerate(left):
if type(p) is Argument:
return n, Argument(self.name, p.value)
return None, None
@classmethod
def parse(class_, source):
name = re.findall('(<\S*?>)', source)[0]
value = re.findall('\[default: (.*)\]', source, flags=re.I)
return class_(name, value[0] if value else None)
class Command(Argument):
def __init__(self, name, value=False):
self.name = name
self.value = value
def single_match(self, left):
for n, p in enumerate(left):
if type(p) is Argument:
if p.value == self.name:
return n, Command(self.name, True)
else:
break
return None, None
class Option(ChildPattern):
def __init__(self, short=None, long=None, argcount=0, value=False):
assert argcount in (0, 1)
self.short, self.long = short, long
self.argcount, self.value = argcount, value
self.value = None if value is False and argcount else value
@classmethod
def parse(class_, option_description):
short, long, argcount, value = None, None, 0, False
options, _, description = option_description.strip().partition(' ')
options = options.replace(',', ' ').replace('=', ' ')
for s in options.split():
if s.startswith('--'):
long = s
elif s.startswith('-'):
short = s
else:
argcount = 1
if argcount:
matched = re.findall('\[default: (.*)\]', description, flags=re.I)
value = matched[0] if matched else None
return class_(short, long, argcount, value)
def single_match(self, left):
for n, p in enumerate(left):
if self.name == p.name:
return n, p
return None, None
@property
def name(self):
return self.long or self.short
def __repr__(self):
return 'Option(%r, %r, %r, %r)' % (self.short, self.long,
self.argcount, self.value)
class Required(ParentPattern):
def match(self, left, collected=None):
collected = [] if collected is None else collected
l = left
c = collected
for p in self.children:
matched, l, c = p.match(l, c)
if not matched:
return False, left, collected
return True, l, c
class Optional(ParentPattern):
def match(self, left, collected=None):
collected = [] if collected is None else collected
for p in self.children:
m, left, collected = p.match(left, collected)
return True, left, collected
class AnyOptions(Optional):
"""Marker/placeholder for [options] shortcut."""
class OneOrMore(ParentPattern):
def match(self, left, collected=None):
assert len(self.children) == 1
collected = [] if collected is None else collected
l = left
c = collected
l_ = None
matched = True
times = 0
while matched:
# could it be that something didn't match but changed l or c?
matched, l, c = self.children[0].match(l, c)
times += 1 if matched else 0
if l_ == l:
break
l_ = l
if times >= 1:
return True, l, c
return False, left, collected
class Either(ParentPattern):
def match(self, left, collected=None):
collected = [] if collected is None else collected
outcomes = []
for p in self.children:
matched, _, _ = outcome = p.match(left, collected)
if matched:
outcomes.append(outcome)
if outcomes:
return min(outcomes, key=lambda outcome: len(outcome[1]))
return False, left, collected
class TokenStream(list):
def __init__(self, source, error):
self += source.split() if hasattr(source, 'split') else source
self.error = error
def move(self):
return self.pop(0) if len(self) else None
def current(self):
return self[0] if len(self) else None
def parse_long(tokens, options):
"""long ::= '--' chars [ ( ' ' | '=' ) chars ] ;"""
long, eq, value = tokens.move().partition('=')
assert long.startswith('--')
value = None if eq == value == '' else value
similar = [o for o in options if o.long == long]
if tokens.error is DocoptExit and similar == []: # if no exact match
similar = [o for o in options if o.long and o.long.startswith(long)]
if len(similar) > 1: # might be simply specified ambiguously 2+ times?
raise tokens.error('%s is not a unique prefix: %s?' %
(long, ', '.join(o.long for o in similar)))
elif len(similar) < 1:
argcount = 1 if eq == '=' else 0
o = Option(None, long, argcount)
options.append(o)
if tokens.error is DocoptExit:
o = Option(None, long, argcount, value if argcount else True)
else:
o = Option(similar[0].short, similar[0].long,
similar[0].argcount, similar[0].value)
if o.argcount == 0:
if value is not None:
raise tokens.error('%s must not have an argument' % o.long)
else:
if value is None:
if tokens.current() is None:
raise tokens.error('%s requires argument' % o.long)
value = tokens.move()
if tokens.error is DocoptExit:
o.value = value if value is not None else True
return [o]
def parse_shorts(tokens, options):
"""shorts ::= '-' ( chars )* [ [ ' ' ] chars ] ;"""
token = tokens.move()
assert token.startswith('-') and not token.startswith('--')
left = token.lstrip('-')
parsed = []
while left != '':
short, left = '-' + left[0], left[1:]
similar = [o for o in options if o.short == short]
if len(similar) > 1:
raise tokens.error('%s is specified ambiguously %d times' %
(short, len(similar)))
elif len(similar) < 1:
o = Option(short, None, 0)
options.append(o)
if tokens.error is DocoptExit:
o = Option(short, None, 0, True)
else: # why copying is necessary here?
o = Option(short, similar[0].long,
similar[0].argcount, similar[0].value)
value = None
if o.argcount != 0:
if left == '':
if tokens.current() is None:
raise tokens.error('%s requires argument' % short)
value = tokens.move()
else:
value = left
left = ''
if tokens.error is DocoptExit:
o.value = value if value is not None else True
parsed.append(o)
return parsed
def parse_pattern(source, options):
tokens = TokenStream(re.sub(r'([\[\]\(\)\|]|\.\.\.)', r' \1 ', source),
DocoptLanguageError)
result = parse_expr(tokens, options)
if tokens.current() is not None:
raise tokens.error('unexpected ending: %r' % ' '.join(tokens))
return Required(*result)
def parse_expr(tokens, options):
"""expr ::= seq ( '|' seq )* ;"""
seq = parse_seq(tokens, options)
if tokens.current() != '|':
return seq
result = [Required(*seq)] if len(seq) > 1 else seq
while tokens.current() == '|':
tokens.move()
seq = parse_seq(tokens, options)
result += [Required(*seq)] if len(seq) > 1 else seq
return [Either(*result)] if len(result) > 1 else result
def parse_seq(tokens, options):
"""seq ::= ( atom [ '...' ] )* ;"""
result = []
while tokens.current() not in [None, ']', ')', '|']:
atom = parse_atom(tokens, options)
if tokens.current() == '...':
atom = [OneOrMore(*atom)]
tokens.move()
result += atom
return result
def parse_atom(tokens, options):
"""atom ::= '(' expr ')' | '[' expr ']' | 'options'
| long | shorts | argument | command ;
"""
token = tokens.current()
result = []
if token in '([':
tokens.move()
matching, pattern = {'(': [')', Required], '[': [']', Optional]}[token]
result = pattern(*parse_expr(tokens, options))
if tokens.move() != matching:
raise tokens.error("unmatched '%s'" % token)
return [result]
elif token == 'options':
tokens.move()
return [AnyOptions()]
elif token.startswith('--') and token != '--':
return parse_long(tokens, options)
elif token.startswith('-') and token not in ('-', '--'):
return parse_shorts(tokens, options)
elif token.startswith('<') and token.endswith('>') or token.isupper():
return [Argument(tokens.move())]
else:
return [Command(tokens.move())]
def parse_argv(tokens, options, options_first=False):
"""Parse command-line argument vector.
If options_first:
argv ::= [ long | shorts ]* [ argument ]* [ '--' [ argument ]* ] ;
else:
argv ::= [ long | shorts | argument ]* [ '--' [ argument ]* ] ;
"""
parsed = []
while tokens.current() is not None:
if tokens.current() == '--':
return parsed + [Argument(None, v) for v in tokens]
elif tokens.current().startswith('--'):
parsed += parse_long(tokens, options)
elif tokens.current().startswith('-') and tokens.current() != '-':
parsed += parse_shorts(tokens, options)
elif options_first:
return parsed + [Argument(None, v) for v in tokens]
else:
parsed.append(Argument(None, tokens.move()))
return parsed
def parse_defaults(doc):
# in python < 2.7 you can't pass flags=re.MULTILINE
split = re.split('\n *(<\S+?>|-\S+?)', doc)[1:]
split = [s1 + s2 for s1, s2 in zip(split[::2], split[1::2])]
options = [Option.parse(s) for s in split if s.startswith('-')]
#arguments = [Argument.parse(s) for s in split if s.startswith('<')]
#return options, arguments
return options
def printable_usage(doc):
# in python < 2.7 you can't pass flags=re.IGNORECASE
usage_split = re.split(r'([Uu][Ss][Aa][Gg][Ee]:)', doc)
if len(usage_split) < 3:
raise DocoptLanguageError('"usage:" (case-insensitive) not found.')
if len(usage_split) > 3:
raise DocoptLanguageError('More than one "usage:" (case-insensitive).')
return re.split(r'\n\s*\n', ''.join(usage_split[1:]))[0].strip()
def formal_usage(printable_usage):
pu = printable_usage.split()[1:] # split and drop "usage:"
return '( ' + ' '.join(') | (' if s == pu[0] else s for s in pu[1:]) + ' )'
def extras(help, version, options, doc):
if help and any((o.name in ('-h', '--help')) and o.value for o in options):
print(doc.strip("\n"))
sys.exit()
if version and any(o.name == '--version' and o.value for o in options):
print(version)
sys.exit()
class Dict(dict):
def __repr__(self):
return '{%s}' % ',\n '.join('%r: %r' % i for i in sorted(self.items()))
def docopt(doc, argv=None, help=True, version=None, options_first=False):
"""Parse `argv` based on command-line interface described in `doc`.
`docopt` creates your command-line interface based on its
description that you pass as `doc`. Such description can contain
--options, <positional-argument>, commands, which could be
[optional], (required), (mutually | exclusive) or repeated...
Parameters
----------
doc : str
Description of your command-line interface.
argv : list of str, optional
Argument vector to be parsed. sys.argv[1:] is used if not
provided.
help : bool (default: True)
Set to False to disable automatic help on -h or --help
options.
version : any object
If passed, the object will be printed if --version is in
`argv`.
options_first : bool (default: False)
Set to True to require options preceed positional arguments,
i.e. to forbid options and positional arguments intermix.
Returns
-------
args : dict
A dictionary, where keys are names of command-line elements
such as e.g. "--verbose" and "<path>", and values are the
parsed values of those elements.
Example
-------
>>> from docopt import docopt
>>> doc = '''
Usage:
my_program tcp <host> <port> [--timeout=<seconds>]
my_program serial <port> [--baud=<n>] [--timeout=<seconds>]
my_program (-h | --help | --version)
Options:
-h, --help Show this screen and exit.
--baud=<n> Baudrate [default: 9600]
'''
>>> argv = ['tcp', '127.0.0.1', '80', '--timeout', '30']
>>> docopt(doc, argv)
{'--baud': '9600',
'--help': False,
'--timeout': '30',
'--version': False,
'<host>': '127.0.0.1',
'<port>': '80',
'serial': False,
'tcp': True}
See also
--------
* For video introduction see http://docopt.org
* Full documentation is available in README.rst as well as online
at https://github.com/docopt/docopt#readme
"""
if argv is None:
argv = sys.argv[1:]
DocoptExit.usage = printable_usage(doc)
options = parse_defaults(doc)
pattern = parse_pattern(formal_usage(DocoptExit.usage), options)
# [default] syntax for argument is disabled
#for a in pattern.flat(Argument):
# same_name = [d for d in arguments if d.name == a.name]
# if same_name:
# a.value = same_name[0].value
argv = parse_argv(TokenStream(argv, DocoptExit), list(options),
options_first)
pattern_options = set(pattern.flat(Option))
for ao in pattern.flat(AnyOptions):
doc_options = parse_defaults(doc)
ao.children = list(set(doc_options) - pattern_options)
#if any_options:
# ao.children += [Option(o.short, o.long, o.argcount)
# for o in argv if type(o) is Option]
extras(help, version, argv, doc)
matched, left, collected = pattern.fix().match(argv)
if matched and left == []: # better error message if left?
return Dict((a.name, a.value) for a in (pattern.flat() + collected))
raise DocoptExit()
|
mit
|
Joergen/zamboni
|
mkt/fireplace/tests/test_api.py
|
1
|
1450
|
import json
from nose.tools import eq_
import amo
from addons.models import AddonUpsell
from mkt.api.base import get_url, list_url
from mkt.api.tests import BaseAPI
from mkt.api.tests.test_oauth import get_absolute_url
from mkt.webapps.models import Webapp
from mkt.site.fixtures import fixture
class TestAppDetail(BaseAPI):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestAppDetail, self).setUp()
self.url = get_absolute_url(get_url('app', pk=337141),
api_name='fireplace')
def test_get(self):
res = self.client.get(self.url)
data = json.loads(res.content)
eq_(data['id'], '337141')
def test_get_slug(self):
Webapp.objects.get(pk=337141).update(app_slug='foo')
res = self.client.get(get_absolute_url(('api_dispatch_detail',
{'resource_name': 'app', 'app_slug': 'foo'}),
api_name='fireplace'))
data = json.loads(res.content)
eq_(data['id'], '337141')
def test_others(self):
url = get_absolute_url(list_url('app'), api_name='fireplace')
self._allowed_verbs(self.url, ['get'])
self._allowed_verbs(url, [])
def test_get_no_upsold(self):
free = Webapp.objects.create(status=amo.STATUS_PUBLIC)
AddonUpsell.objects.create(premium_id=337141, free=free)
res = self.client.get(self.url)
assert 'upsold' not in res.content
|
bsd-3-clause
|
jepcastelein/marketopy
|
marketo.py
|
1
|
9277
|
import requests
import logging
import time
class MarketoClient:
"""Basic Marketo Client"""
def __init__(self, identity, client_id, client_secret, api):
self.api_endpoint = api
self.identity_endpoint = identity
self.client_id = client_id
self.client_secret = client_secret
self.api_version = "v1"
self._fields = None
self._session = requests.Session()
self.refresh_auth_token()
def refresh_auth_token(self):
auth_url = "%s/oauth/token?grant_type=client_credentials" % (
self.identity_endpoint)
auth_url += "&client_id=%s&client_secret=%s" % (self.client_id,
self.client_secret)
debug("Calling %s" % auth_url)
r = requests.get(auth_url)
r.raise_for_status()
auth_data = r.json()
log("Access token acquired: %s expiring in %s" %
(auth_data['access_token'], auth_data['expires_in']))
self.auth_token = auth_data['access_token']
@property
def fields(self):
if self._fields is None:
res = "leads/describe.json"
fields = self.auth_get(res)["result"]
fields = [f["rest"]["name"] for f in fields]
self._fields = fields
return self._fields
def get_paging_token(self, since):
"""
Get a paging token.
Format expeced: 2014-10-06.
"""
resource = "activities/pagingtoken.json"
params = {"sinceDatetime": since}
data = self.auth_get(resource, params)
return data["nextPageToken"]
def get_leadchanges(self, since, fields):
"""
Get lead changes.
Params: fields = ["company", "score", "firstName"]
"""
return LeadChangeSet(self, since, fields, page_size=300)
def get_lead_by_id(self, id, fields=None):
"""Get a lead by its ID"""
resource = "lead/%i.json" % id
data = self.auth_get(resource)
return data
def get_leads_by_id(self, ids, fields=None):
params = {"filterType": "id",
"filterValues": ",".join(ids),
"fields": ",".join(fields)
}
resource = "leads.json"
data = self.auth_get(resource, params=params)
return data["result"]
def query_leads(self, query, return_fields=None):
"""Query leads by any parameters.
query: dict of fields / value to query on
return fields: array of which fields should be requested from marketo
"""
resource = "leads.json"
params = {
"filterType": ",".join(query.keys()),
"filterValues": ",".join(query.values())}
if return_fields is not None:
params["fields"] = return_fields
data = self.auth_get(resource, params=params)
return data["result"]
def build_resource_url(self, resource):
res_url = "%s/%s/%s" % (self.api_endpoint, self.api_version, resource)
return res_url
def auth_get(self, resource, params=[], page_size=None):
"""
Make an authenticated GET to Marketo, check success and
return dict from json response.
page_size: page size, max and default 300
"""
headers = {"Authorization": "Bearer %s" % self.auth_token}
if page_size is not None:
params['batchSize'] = page_size
res_url = self.build_resource_url(resource)
r = self._session.get(res_url, headers=headers, params=params)
r.raise_for_status()
data = r.json()
if data["success"] is False:
err = data["errors"][0]
raise Exception("Error %s - %s, calling %s" %
(err["code"], err["message"], r.url))
time.sleep(20/80)
return data
class Lead(object):
def __init__(self, client, id):
self._client = client
self._resource = "leads.json"
self.id = id
self._data_cache = None
self._default_fields = None
def __getattr__(self, name):
log("Looking for %s" % name)
if name not in self.fields:
raise AttributeError
if name in self._data:
return self._data[name]
elif name in self.fields:
self._load_data(name)
return self._data[name]
else:
raise AttributeError
@property
def fields(self):
return self._client.fields
@property
def _data(self):
if self._data_cache is None:
if self._default_fields is not None:
self._load_data(self._default_fields)
else:
self._load_data()
return self._data_cache
def _load_data(self, fields=None):
"Load lead data for fields provided, or use default fields."
resource = "leads/%s.json" % (self.id)
params = {}
if fields is not None:
if type(fields) is str:
fields = [fields]
params = {"fields": ",".join(fields)}
result = self._client.auth_get(resource, params)["result"][0]
if self._data_cache is not None:
newdata = self._data_cache.copy()
newdata.update(result)
self._data_cache = newdata
else:
self._data_cache = result
class LeadChangeSet:
"""
REST Resource: activities/leadchanges.json
Represent a set of changed leads, only taking into account changed leads,
not new leads.
TODO: handle new leads
"""
def __init__(self, client, since, fields, page_size):
self.resource = "activities/leadchanges.json"
self.client = client
self.since = since
self.fields = fields
self.page_size = page_size
self.has_more_result = False
self.next_page_token = None
self.changes = []
self.fetch_next_page()
def __iter__(self):
return self
def __next__(self):
if len(self.changes) == 0 and not self.has_more_result:
raise StopIteration
if len(self.changes) == 0 and self.has_more_result:
self.fetch_next_page()
return self.changes.pop(0)
def fetch_next_page(self):
debug("[mkto] Fetching next page for LeadChangeSet")
if self.next_page_token is None:
self.next_page_token = self.client.get_paging_token(
since=self.since)
params = {
"fields": ','.join(self.fields),
"nextPageToken": self.next_page_token}
data = self.client.auth_get(self.resource, params, self.page_size)
# If moreResult is true, set flag on object and next page token, if
# not, reset them
if data["moreResult"]:
self.has_more_result = True
self.next_page_token = data["nextPageToken"]
else:
self.has_more_result = False
self.next_page_token = None
for lead in self.prepare_results(data["result"]):
self.changes.append(lead)
def prepare_results(self, results):
"""
Iterates over change results and output an
array with changed fields and values
"""
for c in results:
changed_fields = {}
changed_fields["id"] = c['leadId']
# if no fields updated -> new lead -> skip
if len(c["fields"]) == 0:
continue
for f in c["fields"]:
changed_fields[f["name"]] = f["newValue"]
yield changed_fields
class PagedMarketoResult:
def __init__(self, client, resource, since, fields, page_size):
self.resource = resource
self.client = client
self.since = since
self.fields = fields
self.page_size = page_size
self.has_more_result = False
self.next_page_token = None
self.changes = []
self.fetch_next_page()
def __iter__(self):
return self
def __next__(self):
if len(self.changes) == 0 and not self.has_more_result:
raise StopIteration
if len(self.changes) == 0 and self.has_more_result:
self.fetch_next_page()
return self.changes.pop(0)
def fetch_next_page(self):
debug("fetching next page")
if self.next_page_token is None:
self.next_page_token = self.client.get_paging_token(
since=self.since)
params = {
"fields": ','.join(self.fields),
"nextPageToken": self.next_page_token}
data = self.client.auth_get(self.resource, params, self.page_size)
# If moreResult is true, set flag on object and next page token, if
# not, reset them
if data["moreResult"]:
self.has_more_result = True
self.next_page_token = data["nextPageToken"]
else:
self.has_more_result = False
self.next_page_token = None
for lead in self.prepare_results(data["result"]):
self.changes.append(lead)
def debug(msg):
logger = logging.getLogger(__name__)
logger.debug(msg)
def log(msg):
logger = logging.getLogger(__name__)
logger.info(msg)
|
apache-2.0
|
jcassee/django-analytical
|
tests/unit/test_tag_hubspot.py
|
1
|
1602
|
"""
Tests for the HubSpot template tags and filters.
"""
import pytest
from django.http import HttpRequest
from django.template import Context
from django.test.utils import override_settings
from utils import TagTestCase
from analytical.templatetags.hubspot import HubSpotNode
from analytical.utils import AnalyticalException
@override_settings(HUBSPOT_PORTAL_ID='1234')
class HubSpotTagTestCase(TagTestCase):
"""
Tests for the ``hubspot`` template tag.
"""
def test_tag(self):
r = self.render_tag('hubspot', 'hubspot')
assert (
"n.id=i;n.src='//js.hs-analytics.net/analytics/'"
"+(Math.ceil(new Date()/r)*r)+'/1234.js';"
) in r
def test_node(self):
r = HubSpotNode().render(Context())
assert (
"n.id=i;n.src='//js.hs-analytics.net/analytics/'"
"+(Math.ceil(new Date()/r)*r)+'/1234.js';"
) in r
@override_settings(HUBSPOT_PORTAL_ID=None)
def test_no_portal_id(self):
with pytest.raises(AnalyticalException):
HubSpotNode()
@override_settings(HUBSPOT_PORTAL_ID='wrong')
def test_wrong_portal_id(self):
with pytest.raises(AnalyticalException):
HubSpotNode()
@override_settings(ANALYTICAL_INTERNAL_IPS=['1.1.1.1'])
def test_render_internal_ip(self):
req = HttpRequest()
req.META['REMOTE_ADDR'] = '1.1.1.1'
context = Context({'request': req})
r = HubSpotNode().render(context)
assert r.startswith('<!-- HubSpot disabled on internal IP address')
assert r.endswith('-->')
|
mit
|
Idematica/django-oscar
|
tests/functional/customer/auth_tests.py
|
6
|
4801
|
import re
from django.core import mail
from django.core.urlresolvers import reverse
from django_webtest import WebTest
from oscar.test.testcases import WebTestCase
from oscar.core.compat import get_user_model
User = get_user_model()
class TestAUserWhoseForgottenHerPassword(WebTest):
def test_can_reset_her_password(self):
username, email, password = 'lucy', 'lucy@example.com', 'password'
User.objects.create_user(username, email, password)
# Fill in password reset form
page = self.app.get(reverse('password-reset'))
form = page.forms['password_reset_form']
form['email'] = email
response = form.submit()
# Response should be a redirect and an email should have been sent
self.assertEqual(302, response.status_code)
self.assertEqual(1, len(mail.outbox))
# Extract URL from email
email_body = mail.outbox[0].body
urlfinder = re.compile(r"http://example.com(?P<path>[-A-Za-z0-9\/\._]+)")
matches = urlfinder.search(email_body, re.MULTILINE)
self.assertTrue('path' in matches.groupdict())
path = matches.groupdict()['path']
# Reset password and check we get redirect
reset_page = self.app.get(path)
form = reset_page.forms['password_reset_form']
form['new_password1'] = 'monkey'
form['new_password2'] = 'monkey'
response = form.submit()
self.assertEqual(302, response.status_code)
# Now attempt to login with new password
url = reverse('customer:login')
form = self.app.get(url).forms['login_form']
form['login-username'] = email
form['login-password'] = 'monkey'
response = form.submit('login_submit')
self.assertEqual(302, response.status_code)
class TestAnAuthenticatedUser(WebTestCase):
is_anonymous = False
def test_receives_an_email_when_their_password_is_changed(self):
page = self.get(reverse('customer:change-password'))
form = page.forms['change_password_form']
form['old_password'] = self.password
form['new_password1'] = u'anotherfancypassword'
form['new_password2'] = u'anotherfancypassword'
page = form.submit()
self.assertEquals(len(mail.outbox), 1)
self.assertIn("your password has been changed", mail.outbox[0].body)
def test_cannot_access_reset_password_page(self):
response = self.get(reverse('password-reset'), status=403)
self.assertEqual(403, response.status_code)
def test_does_not_receive_an_email_when_their_profile_is_updated_but_email_address_not_changed(self):
page = self.get(reverse('customer:profile-update'))
form = page.forms['profile_form']
form['first_name'] = "Terry"
form.submit()
self.assertEquals(len(mail.outbox), 0)
def test_receives_an_email_when_their_email_address_is_changed(self):
page = self.get(reverse('customer:profile-update'))
form = page.forms['profile_form']
new_email = 'a.new.email@user.com'
form['email'] = new_email
page = form.submit()
self.assertEquals(len(mail.outbox), 1)
self.assertEquals(mail.outbox[0].to[0], self.email)
self.assertEquals(User.objects.get(id=self.user.id).email, new_email)
self.assertIn("your email address has been changed",
mail.outbox[0].body)
class TestAnAnonymousUser(WebTestCase):
def assertCanLogin(self, email, password):
url = reverse('customer:login')
form = self.app.get(url).forms['login_form']
form['login-username'] = email
form['login-password'] = password
response = form.submit('login_submit')
self.assertRedirectsTo(response, 'customer:summary')
def test_can_login(self):
email, password = 'd@d.com', 'mypassword'
User.objects.create_user('_', email, password)
self.assertCanLogin(email, password)
def test_can_login_with_email_containing_capitals_in_local_part(self):
email, password = 'Andrew.Smith@test.com', 'mypassword'
User.objects.create_user('_', email, password)
self.assertCanLogin(email, password)
def test_can_login_with_email_containing_capitals_in_host(self):
email, password = 'Andrew.Smith@teSt.com', 'mypassword'
User.objects.create_user('_', email, password)
self.assertCanLogin(email, password)
def test_can_register(self):
url = reverse('customer:register')
form = self.app.get(url).forms['register_form']
form['email'] = 'terry@boom.com'
form['password1'] = 'hedgehog'
form['password2'] = 'hedgehog'
response = form.submit()
self.assertRedirectsTo(response, 'customer:summary')
|
bsd-3-clause
|
walty8/trac
|
tracopt/ticket/deleter.py
|
1
|
7165
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
from genshi.builder import tag
from genshi.filters import Transformer
from genshi.filters.transform import StreamBuffer
from trac.attachment import Attachment
from trac.core import Component, TracError, implements
from trac.ticket.model import Ticket
from trac.ticket.web_ui import TicketModule
from trac.util import get_reporter_id
from trac.util.datefmt import from_utimestamp
from trac.util.presentation import captioned_button
from trac.util.translation import _
from trac.web.api import IRequestFilter, IRequestHandler, ITemplateStreamFilter
from trac.web.chrome import ITemplateProvider, add_notice, add_stylesheet
class TicketDeleter(Component):
"""Ticket and ticket comment deleter.
This component allows deleting ticket comments and complete tickets. For
users having `TICKET_ADMIN` permission, it adds a "Delete" button next to
each "Reply" button on the page. The button in the ticket description
requests deletion of the complete ticket, and the buttons in the change
history request deletion of a single comment.
'''Comment and ticket deletion are irreversible (and therefore
''dangerous'') operations.''' For that reason, a confirmation step is
requested. The confirmation page shows the ticket box (in the case of a
ticket deletion) or the ticket change (in the case of a comment deletion).
"""
implements(ITemplateProvider, ITemplateStreamFilter, IRequestFilter,
IRequestHandler)
# ITemplateProvider methods
def get_htdocs_dirs(self):
return []
def get_templates_dirs(self):
from pkg_resources import resource_filename
return [resource_filename(__name__, 'templates')]
# ITemplateStreamFilter methods
def filter_stream(self, req, method, filename, stream, data):
if filename not in ('ticket.html', 'ticket_preview.html'):
return stream
ticket = data.get('ticket')
if not (ticket and ticket.exists
and 'TICKET_ADMIN' in req.perm(ticket.resource)):
return stream
# Insert "Delete" buttons for ticket description and each comment
def delete_ticket():
return tag.form(
tag.div(
tag.input(type='hidden', name='action', value='delete'),
tag.input(type='submit',
value=captioned_button(req, u'–', # 'EN DASH'
_("Delete")),
title=_('Delete ticket'),
class_="trac-delete"),
class_="inlinebuttons"),
action='#', method='get')
def delete_comment():
for event in buffer:
cnum, cdate = event[1][1].get('id')[12:].split('-', 1)
return tag.form(
tag.div(
tag.input(type='hidden', name='action',
value='delete-comment'),
tag.input(type='hidden', name='cnum', value=cnum),
tag.input(type='hidden', name='cdate', value=cdate),
tag.input(type='submit',
value=captioned_button(req, u'–', # 'EN DASH'
_("Delete")),
title=_('Delete comment %(num)s', num=cnum),
class_="trac-delete"),
class_="inlinebuttons"),
action='#', method='get')
buffer = StreamBuffer()
return stream | Transformer('//div[@class="description"]'
'/h3[@id="comment:description"]') \
.after(delete_ticket).end() \
.select('//div[starts-with(@class, "change")]/@id') \
.copy(buffer).end() \
.select('//div[starts-with(@class, "change") and @id]'
'//div[@class="trac-ticket-buttons"]') \
.append(delete_comment)
# IRequestFilter methods
def pre_process_request(self, req, handler):
if handler is not TicketModule(self.env):
return handler
action = req.args.get('action')
if action in ('delete', 'delete-comment'):
return self
else:
return handler
def post_process_request(self, req, template, data, content_type):
return template, data, content_type
# IRequestHandler methods
def match_request(self, req):
return False
def process_request(self, req):
id = int(req.args.get('id'))
req.perm('ticket', id).require('TICKET_ADMIN')
ticket = Ticket(self.env, id)
action = req.args['action']
cnum = req.args.get('cnum')
if req.method == 'POST':
if 'cancel' in req.args:
href = req.href.ticket(id)
if action == 'delete-comment':
href += '#comment:%s' % cnum
req.redirect(href)
if action == 'delete':
ticket.delete()
add_notice(req, _('The ticket #%(id)s has been deleted.',
id=ticket.id))
req.redirect(req.href())
elif action == 'delete-comment':
cdate = from_utimestamp(long(req.args.get('cdate')))
ticket.delete_change(cdate=cdate)
add_notice(req, _('The ticket comment %(num)s on ticket '
'#%(id)s has been deleted.',
num=cnum, id=ticket.id))
req.redirect(req.href.ticket(id))
tm = TicketModule(self.env)
data = tm._prepare_data(req, ticket)
tm._insert_ticket_data(req, ticket, data,
get_reporter_id(req, 'author'), {})
data.update(action=action, cdate=None)
if action == 'delete-comment':
data['cdate'] = req.args.get('cdate')
cdate = from_utimestamp(long(data['cdate']))
for change in data['changes']:
if change.get('date') == cdate:
data['change'] = change
data['cnum'] = change.get('cnum')
break
else:
raise TracError(_('Comment %(num)s not found', num=cnum))
elif action == 'delete':
attachments = Attachment.select(self.env, ticket.realm, ticket.id)
data.update(attachments=list(attachments))
add_stylesheet(req, 'common/css/ticket.css')
return 'ticket_delete.html', data, None
|
bsd-3-clause
|
k3nnyfr/s2a_fr-nsis
|
s2a/Python/Lib/lib-tk/tkColorChooser.py
|
149
|
1786
|
# tk common colour chooser dialogue
#
# this module provides an interface to the native color dialogue
# available in Tk 4.2 and newer.
#
# written by Fredrik Lundh, May 1997
#
# fixed initialcolor handling in August 1998
#
#
# options (all have default values):
#
# - initialcolor: colour to mark as selected when dialog is displayed
# (given as an RGB triplet or a Tk color string)
#
# - parent: which window to place the dialog on top of
#
# - title: dialog title
#
from tkCommonDialog import Dialog
#
# color chooser class
class Chooser(Dialog):
"Ask for a color"
command = "tk_chooseColor"
def _fixoptions(self):
try:
# make sure initialcolor is a tk color string
color = self.options["initialcolor"]
if isinstance(color, tuple):
# assume an RGB triplet
self.options["initialcolor"] = "#%02x%02x%02x" % color
except KeyError:
pass
def _fixresult(self, widget, result):
# result can be somethings: an empty tuple, an empty string or
# a Tcl_Obj, so this somewhat weird check handles that
if not result or not str(result):
return None, None # canceled
# to simplify application code, the color chooser returns
# an RGB tuple together with the Tk color string
r, g, b = widget.winfo_rgb(result)
return (r/256, g/256, b/256), str(result)
#
# convenience stuff
def askcolor(color = None, **options):
"Ask for a color"
if color:
options = options.copy()
options["initialcolor"] = color
return Chooser(**options).show()
# --------------------------------------------------------------------
# test stuff
if __name__ == "__main__":
print "color", askcolor()
|
gpl-3.0
|
pombredanne/algos-urv
|
gdata/youtube/service.py
|
5
|
58040
|
#!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""YouTubeService extends GDataService to streamline YouTube operations.
YouTubeService: Provides methods to perform CRUD operations on YouTube feeds.
Extends GDataService.
"""
__author__ = ('api.stephaniel@gmail.com (Stephanie Liu), '
'api.jhartmann@gmail.com (Jochen Hartmann)')
try:
from xml.etree import cElementTree as ElementTree
except ImportError:
try:
import cElementTree as ElementTree
except ImportError:
try:
from xml.etree import ElementTree
except ImportError:
from elementtree import ElementTree
import os
import atom
import gdata
import gdata.service
import gdata.youtube
YOUTUBE_SERVER = 'gdata.youtube.com'
YOUTUBE_SERVICE = 'youtube'
YOUTUBE_CLIENTLOGIN_AUTHENTICATION_URL = 'https://www.google.com/youtube/accounts/ClientLogin'
YOUTUBE_SUPPORTED_UPLOAD_TYPES = ('mov', 'avi', 'wmv', 'mpg', 'quicktime',
'flv', 'mp4', 'x-flv')
YOUTUBE_QUERY_VALID_TIME_PARAMETERS = ('today', 'this_week', 'this_month',
'all_time')
YOUTUBE_QUERY_VALID_ORDERBY_PARAMETERS = ('published', 'viewCount', 'rating',
'relevance')
YOUTUBE_QUERY_VALID_RACY_PARAMETERS = ('include', 'exclude')
YOUTUBE_QUERY_VALID_FORMAT_PARAMETERS = ('1', '5', '6')
YOUTUBE_STANDARDFEEDS = ('most_recent', 'recently_featured',
'top_rated', 'most_viewed','watch_on_mobile')
YOUTUBE_UPLOAD_URI = 'http://uploads.gdata.youtube.com/feeds/api/users'
YOUTUBE_UPLOAD_TOKEN_URI = 'http://gdata.youtube.com/action/GetUploadToken'
YOUTUBE_VIDEO_URI = 'http://gdata.youtube.com/feeds/api/videos'
YOUTUBE_USER_FEED_URI = 'http://gdata.youtube.com/feeds/api/users'
YOUTUBE_PLAYLIST_FEED_URI = 'http://gdata.youtube.com/feeds/api/playlists'
YOUTUBE_STANDARD_FEEDS = 'http://gdata.youtube.com/feeds/api/standardfeeds'
YOUTUBE_STANDARD_TOP_RATED_URI = '%s/%s' % (YOUTUBE_STANDARD_FEEDS, 'top_rated')
YOUTUBE_STANDARD_MOST_VIEWED_URI = '%s/%s' % (YOUTUBE_STANDARD_FEEDS,
'most_viewed')
YOUTUBE_STANDARD_RECENTLY_FEATURED_URI = '%s/%s' % (YOUTUBE_STANDARD_FEEDS,
'recently_featured')
YOUTUBE_STANDARD_WATCH_ON_MOBILE_URI = '%s/%s' % (YOUTUBE_STANDARD_FEEDS,
'watch_on_mobile')
YOUTUBE_STANDARD_TOP_FAVORITES_URI = '%s/%s' % (YOUTUBE_STANDARD_FEEDS,
'top_favorites')
YOUTUBE_STANDARD_MOST_RECENT_URI = '%s/%s' % (YOUTUBE_STANDARD_FEEDS,
'most_recent')
YOUTUBE_STANDARD_MOST_DISCUSSED_URI = '%s/%s' % (YOUTUBE_STANDARD_FEEDS,
'most_discussed')
YOUTUBE_STANDARD_MOST_LINKED_URI = '%s/%s' % (YOUTUBE_STANDARD_FEEDS,
'most_linked')
YOUTUBE_STANDARD_MOST_RESPONDED_URI = '%s/%s' % (YOUTUBE_STANDARD_FEEDS,
'most_responded')
YOUTUBE_SCHEMA = 'http://gdata.youtube.com/schemas'
YOUTUBE_RATING_LINK_REL = '%s#video.ratings' % YOUTUBE_SCHEMA
YOUTUBE_COMPLAINT_CATEGORY_SCHEME = '%s/%s' % (YOUTUBE_SCHEMA,
'complaint-reasons.cat')
YOUTUBE_SUBSCRIPTION_CATEGORY_SCHEME = '%s/%s' % (YOUTUBE_SCHEMA,
'subscriptiontypes.cat')
YOUTUBE_COMPLAINT_CATEGORY_TERMS = ('PORN', 'VIOLENCE', 'HATE', 'DANGEROUS',
'RIGHTS', 'SPAM')
YOUTUBE_CONTACT_STATUS = ('accepted', 'rejected')
YOUTUBE_CONTACT_CATEGORY = ('Friends', 'Family')
UNKOWN_ERROR = 1000
YOUTUBE_BAD_REQUEST = 400
YOUTUBE_CONFLICT = 409
YOUTUBE_INTERNAL_SERVER_ERROR = 500
YOUTUBE_INVALID_ARGUMENT = 601
YOUTUBE_INVALID_CONTENT_TYPE = 602
YOUTUBE_NOT_A_VIDEO = 603
YOUTUBE_INVALID_KIND = 604
class Error(Exception):
"""Base class for errors within the YouTube service."""
pass
class RequestError(Error):
"""Error class that is thrown in response to an invalid HTTP Request."""
pass
class YouTubeError(Error):
"""YouTube service specific error class."""
pass
class YouTubeService(gdata.service.GDataService):
"""Client for the YouTube service.
Performs all documented Google Data YouTube API functions, such as inserting,
updating and deleting videos, comments, playlist, subscriptions etc.
YouTube Service requires authentication for any write, update or delete
actions.
Attributes:
email: An optional string identifying the user. Required only for
authenticated actions.
password: An optional string identifying the user's password.
source: An optional string identifying the name of your application.
server: An optional address of the YouTube API server. gdata.youtube.com
is provided as the default value.
additional_headers: An optional dictionary containing additional headers
to be passed along with each request. Use to store developer key.
client_id: An optional string identifying your application, required for
authenticated requests, along with a developer key.
developer_key: An optional string value. Register your application at
http://code.google.com/apis/youtube/dashboard to obtain a (free) key.
"""
def __init__(self, email=None, password=None, source=None,
server=YOUTUBE_SERVER, additional_headers=None, client_id=None,
developer_key=None, **kwargs):
"""Creates a client for the YouTube service.
Args:
email: string (optional) The user's email address, used for
authentication.
password: string (optional) The user's password.
source: string (optional) The name of the user's application.
server: string (optional) The name of the server to which a connection
will be opened. Default value: 'gdata.youtube.com'.
client_id: string (optional) Identifies your application, required for
authenticated requests, along with a developer key.
developer_key: string (optional) Register your application at
http://code.google.com/apis/youtube/dashboard to obtain a (free) key.
**kwargs: The other parameters to pass to gdata.service.GDataService
constructor.
"""
gdata.service.GDataService.__init__(
self, email=email, password=password, service=YOUTUBE_SERVICE,
source=source, server=server, additional_headers=additional_headers,
**kwargs)
if client_id is not None:
self.additional_headers['X-Gdata-Client'] = client_id
if developer_key is not None:
self.additional_headers['X-GData-Key'] = 'key=%s' % developer_key
self.auth_service_url = YOUTUBE_CLIENTLOGIN_AUTHENTICATION_URL
def GetYouTubeVideoFeed(self, uri):
"""Retrieve a YouTubeVideoFeed.
Args:
uri: A string representing the URI of the feed that is to be retrieved.
Returns:
A YouTubeVideoFeed if successfully retrieved.
"""
return self.Get(uri, converter=gdata.youtube.YouTubeVideoFeedFromString)
def GetYouTubeVideoEntry(self, uri=None, video_id=None):
"""Retrieve a YouTubeVideoEntry.
Either a uri or a video_id must be provided.
Args:
uri: An optional string representing the URI of the entry that is to
be retrieved.
video_id: An optional string representing the ID of the video.
Returns:
A YouTubeVideoFeed if successfully retrieved.
Raises:
YouTubeError: You must provide at least a uri or a video_id to the
GetYouTubeVideoEntry() method.
"""
if uri is None and video_id is None:
raise YouTubeError('You must provide at least a uri or a video_id '
'to the GetYouTubeVideoEntry() method')
elif video_id and not uri:
uri = '%s/%s' % (YOUTUBE_VIDEO_URI, video_id)
return self.Get(uri, converter=gdata.youtube.YouTubeVideoEntryFromString)
def GetYouTubeContactFeed(self, uri=None, username='default'):
"""Retrieve a YouTubeContactFeed.
Either a uri or a username must be provided.
Args:
uri: An optional string representing the URI of the contact feed that
is to be retrieved.
username: An optional string representing the username. Defaults to the
currently authenticated user.
Returns:
A YouTubeContactFeed if successfully retrieved.
Raises:
YouTubeError: You must provide at least a uri or a username to the
GetYouTubeContactFeed() method.
"""
if uri is None:
uri = '%s/%s/%s' % (YOUTUBE_USER_FEED_URI, username, 'contacts')
return self.Get(uri, converter=gdata.youtube.YouTubeContactFeedFromString)
def GetYouTubeContactEntry(self, uri):
"""Retrieve a YouTubeContactEntry.
Args:
uri: A string representing the URI of the contact entry that is to
be retrieved.
Returns:
A YouTubeContactEntry if successfully retrieved.
"""
return self.Get(uri, converter=gdata.youtube.YouTubeContactEntryFromString)
def GetYouTubeVideoCommentFeed(self, uri=None, video_id=None):
"""Retrieve a YouTubeVideoCommentFeed.
Either a uri or a video_id must be provided.
Args:
uri: An optional string representing the URI of the comment feed that
is to be retrieved.
video_id: An optional string representing the ID of the video for which
to retrieve the comment feed.
Returns:
A YouTubeVideoCommentFeed if successfully retrieved.
Raises:
YouTubeError: You must provide at least a uri or a video_id to the
GetYouTubeVideoCommentFeed() method.
"""
if uri is None and video_id is None:
raise YouTubeError('You must provide at least a uri or a video_id '
'to the GetYouTubeVideoCommentFeed() method')
elif video_id and not uri:
uri = '%s/%s/%s' % (YOUTUBE_VIDEO_URI, video_id, 'comments')
return self.Get(
uri, converter=gdata.youtube.YouTubeVideoCommentFeedFromString)
def GetYouTubeVideoCommentEntry(self, uri):
"""Retrieve a YouTubeVideoCommentEntry.
Args:
uri: A string representing the URI of the comment entry that is to
be retrieved.
Returns:
A YouTubeCommentEntry if successfully retrieved.
"""
return self.Get(
uri, converter=gdata.youtube.YouTubeVideoCommentEntryFromString)
def GetYouTubeUserFeed(self, uri=None, username=None):
"""Retrieve a YouTubeVideoFeed of user uploaded videos
Either a uri or a username must be provided. This will retrieve list
of videos uploaded by specified user. The uri will be of format
"http://gdata.youtube.com/feeds/api/users/{username}/uploads".
Args:
uri: An optional string representing the URI of the user feed that is
to be retrieved.
username: An optional string representing the username.
Returns:
A YouTubeUserFeed if successfully retrieved.
Raises:
YouTubeError: You must provide at least a uri or a username to the
GetYouTubeUserFeed() method.
"""
if uri is None and username is None:
raise YouTubeError('You must provide at least a uri or a username '
'to the GetYouTubeUserFeed() method')
elif username and not uri:
uri = '%s/%s/%s' % (YOUTUBE_USER_FEED_URI, username, 'uploads')
return self.Get(uri, converter=gdata.youtube.YouTubeUserFeedFromString)
def GetYouTubeUserEntry(self, uri=None, username=None):
"""Retrieve a YouTubeUserEntry.
Either a uri or a username must be provided.
Args:
uri: An optional string representing the URI of the user entry that is
to be retrieved.
username: An optional string representing the username.
Returns:
A YouTubeUserEntry if successfully retrieved.
Raises:
YouTubeError: You must provide at least a uri or a username to the
GetYouTubeUserEntry() method.
"""
if uri is None and username is None:
raise YouTubeError('You must provide at least a uri or a username '
'to the GetYouTubeUserEntry() method')
elif username and not uri:
uri = '%s/%s' % (YOUTUBE_USER_FEED_URI, username)
return self.Get(uri, converter=gdata.youtube.YouTubeUserEntryFromString)
def GetYouTubePlaylistFeed(self, uri=None, username='default'):
"""Retrieve a YouTubePlaylistFeed (a feed of playlists for a user).
Either a uri or a username must be provided.
Args:
uri: An optional string representing the URI of the playlist feed that
is to be retrieved.
username: An optional string representing the username. Defaults to the
currently authenticated user.
Returns:
A YouTubePlaylistFeed if successfully retrieved.
Raises:
YouTubeError: You must provide at least a uri or a username to the
GetYouTubePlaylistFeed() method.
"""
if uri is None:
uri = '%s/%s/%s' % (YOUTUBE_USER_FEED_URI, username, 'playlists')
return self.Get(uri, converter=gdata.youtube.YouTubePlaylistFeedFromString)
def GetYouTubePlaylistEntry(self, uri):
"""Retrieve a YouTubePlaylistEntry.
Args:
uri: A string representing the URI of the playlist feed that is to
be retrieved.
Returns:
A YouTubePlaylistEntry if successfully retrieved.
"""
return self.Get(uri, converter=gdata.youtube.YouTubePlaylistEntryFromString)
def GetYouTubePlaylistVideoFeed(self, uri=None, playlist_id=None):
"""Retrieve a YouTubePlaylistVideoFeed (a feed of videos on a playlist).
Either a uri or a playlist_id must be provided.
Args:
uri: An optional string representing the URI of the playlist video feed
that is to be retrieved.
playlist_id: An optional string representing the Id of the playlist whose
playlist video feed is to be retrieved.
Returns:
A YouTubePlaylistVideoFeed if successfully retrieved.
Raises:
YouTubeError: You must provide at least a uri or a playlist_id to the
GetYouTubePlaylistVideoFeed() method.
"""
if uri is None and playlist_id is None:
raise YouTubeError('You must provide at least a uri or a playlist_id '
'to the GetYouTubePlaylistVideoFeed() method')
elif playlist_id and not uri:
uri = '%s/%s' % (YOUTUBE_PLAYLIST_FEED_URI, playlist_id)
return self.Get(
uri, converter=gdata.youtube.YouTubePlaylistVideoFeedFromString)
def GetYouTubeVideoResponseFeed(self, uri=None, video_id=None):
"""Retrieve a YouTubeVideoResponseFeed.
Either a uri or a playlist_id must be provided.
Args:
uri: An optional string representing the URI of the video response feed
that is to be retrieved.
video_id: An optional string representing the ID of the video whose
response feed is to be retrieved.
Returns:
A YouTubeVideoResponseFeed if successfully retrieved.
Raises:
YouTubeError: You must provide at least a uri or a video_id to the
GetYouTubeVideoResponseFeed() method.
"""
if uri is None and video_id is None:
raise YouTubeError('You must provide at least a uri or a video_id '
'to the GetYouTubeVideoResponseFeed() method')
elif video_id and not uri:
uri = '%s/%s/%s' % (YOUTUBE_VIDEO_URI, video_id, 'responses')
return self.Get(
uri, converter=gdata.youtube.YouTubeVideoResponseFeedFromString)
def GetYouTubeVideoResponseEntry(self, uri):
"""Retrieve a YouTubeVideoResponseEntry.
Args:
uri: A string representing the URI of the video response entry that
is to be retrieved.
Returns:
A YouTubeVideoResponseEntry if successfully retrieved.
"""
return self.Get(
uri, converter=gdata.youtube.YouTubeVideoResponseEntryFromString)
def GetYouTubeSubscriptionFeed(self, uri=None, username='default'):
"""Retrieve a YouTubeSubscriptionFeed.
Either the uri of the feed or a username must be provided.
Args:
uri: An optional string representing the URI of the feed that is to
be retrieved.
username: An optional string representing the username whose subscription
feed is to be retrieved. Defaults to the currently authenticted user.
Returns:
A YouTubeVideoSubscriptionFeed if successfully retrieved.
"""
if uri is None:
uri = '%s/%s/%s' % (YOUTUBE_USER_FEED_URI, username, 'subscriptions')
return self.Get(
uri, converter=gdata.youtube.YouTubeSubscriptionFeedFromString)
def GetYouTubeSubscriptionEntry(self, uri):
"""Retrieve a YouTubeSubscriptionEntry.
Args:
uri: A string representing the URI of the entry that is to be retrieved.
Returns:
A YouTubeVideoSubscriptionEntry if successfully retrieved.
"""
return self.Get(
uri, converter=gdata.youtube.YouTubeSubscriptionEntryFromString)
def GetYouTubeRelatedVideoFeed(self, uri=None, video_id=None):
"""Retrieve a YouTubeRelatedVideoFeed.
Either a uri for the feed or a video_id is required.
Args:
uri: An optional string representing the URI of the feed that is to
be retrieved.
video_id: An optional string representing the ID of the video for which
to retrieve the related video feed.
Returns:
A YouTubeRelatedVideoFeed if successfully retrieved.
Raises:
YouTubeError: You must provide at least a uri or a video_id to the
GetYouTubeRelatedVideoFeed() method.
"""
if uri is None and video_id is None:
raise YouTubeError('You must provide at least a uri or a video_id '
'to the GetYouTubeRelatedVideoFeed() method')
elif video_id and not uri:
uri = '%s/%s/%s' % (YOUTUBE_VIDEO_URI, video_id, 'related')
return self.Get(
uri, converter=gdata.youtube.YouTubeVideoFeedFromString)
def GetTopRatedVideoFeed(self):
"""Retrieve the 'top_rated' standard video feed.
Returns:
A YouTubeVideoFeed if successfully retrieved.
"""
return self.GetYouTubeVideoFeed(YOUTUBE_STANDARD_TOP_RATED_URI)
def GetMostViewedVideoFeed(self):
"""Retrieve the 'most_viewed' standard video feed.
Returns:
A YouTubeVideoFeed if successfully retrieved.
"""
return self.GetYouTubeVideoFeed(YOUTUBE_STANDARD_MOST_VIEWED_URI)
def GetRecentlyFeaturedVideoFeed(self):
"""Retrieve the 'recently_featured' standard video feed.
Returns:
A YouTubeVideoFeed if successfully retrieved.
"""
return self.GetYouTubeVideoFeed(YOUTUBE_STANDARD_RECENTLY_FEATURED_URI)
def GetWatchOnMobileVideoFeed(self):
"""Retrieve the 'watch_on_mobile' standard video feed.
Returns:
A YouTubeVideoFeed if successfully retrieved.
"""
return self.GetYouTubeVideoFeed(YOUTUBE_STANDARD_WATCH_ON_MOBILE_URI)
def GetTopFavoritesVideoFeed(self):
"""Retrieve the 'top_favorites' standard video feed.
Returns:
A YouTubeVideoFeed if successfully retrieved.
"""
return self.GetYouTubeVideoFeed(YOUTUBE_STANDARD_TOP_FAVORITES_URI)
def GetMostRecentVideoFeed(self):
"""Retrieve the 'most_recent' standard video feed.
Returns:
A YouTubeVideoFeed if successfully retrieved.
"""
return self.GetYouTubeVideoFeed(YOUTUBE_STANDARD_MOST_RECENT_URI)
def GetMostDiscussedVideoFeed(self):
"""Retrieve the 'most_discussed' standard video feed.
Returns:
A YouTubeVideoFeed if successfully retrieved.
"""
return self.GetYouTubeVideoFeed(YOUTUBE_STANDARD_MOST_DISCUSSED_URI)
def GetMostLinkedVideoFeed(self):
"""Retrieve the 'most_linked' standard video feed.
Returns:
A YouTubeVideoFeed if successfully retrieved.
"""
return self.GetYouTubeVideoFeed(YOUTUBE_STANDARD_MOST_LINKED_URI)
def GetMostRespondedVideoFeed(self):
"""Retrieve the 'most_responded' standard video feed.
Returns:
A YouTubeVideoFeed if successfully retrieved.
"""
return self.GetYouTubeVideoFeed(YOUTUBE_STANDARD_MOST_RESPONDED_URI)
def GetUserFavoritesFeed(self, username='default'):
"""Retrieve the favorites feed for a given user.
Args:
username: An optional string representing the username whose favorites
feed is to be retrieved. Defaults to the currently authenticated user.
Returns:
A YouTubeVideoFeed if successfully retrieved.
"""
favorites_feed_uri = '%s/%s/%s' % (YOUTUBE_USER_FEED_URI, username,
'favorites')
return self.GetYouTubeVideoFeed(favorites_feed_uri)
def InsertVideoEntry(self, video_entry, filename_or_handle,
youtube_username='default',
content_type='video/quicktime'):
"""Upload a new video to YouTube using the direct upload mechanism.
Needs authentication.
Args:
video_entry: The YouTubeVideoEntry to upload.
filename_or_handle: A file-like object or file name where the video
will be read from.
youtube_username: An optional string representing the username into whose
account this video is to be uploaded to. Defaults to the currently
authenticated user.
content_type: An optional string representing internet media type
(a.k.a. mime type) of the media object. Currently the YouTube API
supports these types:
o video/mpeg
o video/quicktime
o video/x-msvideo
o video/mp4
o video/x-flv
Returns:
The newly created YouTubeVideoEntry if successful.
Raises:
AssertionError: video_entry must be a gdata.youtube.VideoEntry instance.
YouTubeError: An error occurred trying to read the video file provided.
gdata.service.RequestError: An error occurred trying to upload the video
to the API server.
"""
# We need to perform a series of checks on the video_entry and on the
# file that we plan to upload, such as checking whether we have a valid
# video_entry and that the file is the correct type and readable, prior
# to performing the actual POST request.
try:
assert(isinstance(video_entry, gdata.youtube.YouTubeVideoEntry))
except AssertionError:
raise YouTubeError({'status':YOUTUBE_INVALID_ARGUMENT,
'body':'`video_entry` must be a gdata.youtube.VideoEntry instance',
'reason':'Found %s, not VideoEntry' % type(video_entry)
})
#majtype, mintype = content_type.split('/')
#
#try:
# assert(mintype in YOUTUBE_SUPPORTED_UPLOAD_TYPES)
#except (ValueError, AssertionError):
# raise YouTubeError({'status':YOUTUBE_INVALID_CONTENT_TYPE,
# 'body':'This is not a valid content type: %s' % content_type,
# 'reason':'Accepted content types: %s' %
# ['video/%s' % (t) for t in YOUTUBE_SUPPORTED_UPLOAD_TYPES]})
if (isinstance(filename_or_handle, (str, unicode))
and os.path.exists(filename_or_handle)):
mediasource = gdata.MediaSource()
mediasource.setFile(filename_or_handle, content_type)
elif hasattr(filename_or_handle, 'read'):
import StringIO
if hasattr(filename_or_handle, 'seek'):
filename_or_handle.seek(0)
file_handle = filename_or_handle
name = 'video'
if hasattr(filename_or_handle, 'name'):
name = filename_or_handle.name
mediasource = gdata.MediaSource(file_handle, content_type,
content_length=file_handle.len, file_name=name)
else:
raise YouTubeError({'status':YOUTUBE_INVALID_ARGUMENT, 'body':
'`filename_or_handle` must be a path name or a file-like object',
'reason': ('Found %s, not path name or object '
'with a .read() method' % type(filename_or_handle))})
upload_uri = '%s/%s/%s' % (YOUTUBE_UPLOAD_URI, youtube_username,
'uploads')
self.additional_headers['Slug'] = mediasource.file_name
# Using a nested try statement to retain Python 2.4 compatibility
try:
try:
return self.Post(video_entry, uri=upload_uri, media_source=mediasource,
converter=gdata.youtube.YouTubeVideoEntryFromString)
except gdata.service.RequestError, e:
raise YouTubeError(e.args[0])
finally:
del(self.additional_headers['Slug'])
def CheckUploadStatus(self, video_entry=None, video_id=None):
"""Check upload status on a recently uploaded video entry.
Needs authentication. Either video_entry or video_id must be provided.
Args:
video_entry: An optional YouTubeVideoEntry whose upload status to check
video_id: An optional string representing the ID of the uploaded video
whose status is to be checked.
Returns:
A tuple containing (video_upload_state, detailed_message) or None if
no status information is found.
Raises:
YouTubeError: You must provide at least a video_entry or a video_id to the
CheckUploadStatus() method.
"""
if video_entry is None and video_id is None:
raise YouTubeError('You must provide at least a uri or a video_id '
'to the CheckUploadStatus() method')
elif video_id and not video_entry:
video_entry = self.GetYouTubeVideoEntry(video_id=video_id)
control = video_entry.control
if control is not None:
draft = control.draft
if draft is not None:
if draft.text == 'yes':
yt_state = control.extension_elements[0]
if yt_state is not None:
state_value = yt_state.attributes['name']
message = ''
if yt_state.text is not None:
message = yt_state.text
return (state_value, message)
def GetFormUploadToken(self, video_entry, uri=YOUTUBE_UPLOAD_TOKEN_URI):
"""Receives a YouTube Token and a YouTube PostUrl from a YouTubeVideoEntry.
Needs authentication.
Args:
video_entry: The YouTubeVideoEntry to upload (meta-data only).
uri: An optional string representing the URI from where to fetch the
token information. Defaults to the YOUTUBE_UPLOADTOKEN_URI.
Returns:
A tuple containing the URL to which to post your video file, along
with the youtube token that must be included with your upload in the
form of: (post_url, youtube_token).
"""
try:
response = self.Post(video_entry, uri)
except gdata.service.RequestError, e:
raise YouTubeError(e.args[0])
tree = ElementTree.fromstring(response)
for child in tree:
if child.tag == 'url':
post_url = child.text
elif child.tag == 'token':
youtube_token = child.text
return (post_url, youtube_token)
def UpdateVideoEntry(self, video_entry):
"""Updates a video entry's meta-data.
Needs authentication.
Args:
video_entry: The YouTubeVideoEntry to update, containing updated
meta-data.
Returns:
An updated YouTubeVideoEntry on success or None.
"""
for link in video_entry.link:
if link.rel == 'edit':
edit_uri = link.href
return self.Put(video_entry, uri=edit_uri,
converter=gdata.youtube.YouTubeVideoEntryFromString)
def DeleteVideoEntry(self, video_entry):
"""Deletes a video entry.
Needs authentication.
Args:
video_entry: The YouTubeVideoEntry to be deleted.
Returns:
True if entry was deleted successfully.
"""
for link in video_entry.link:
if link.rel == 'edit':
edit_uri = link.href
return self.Delete(edit_uri)
def AddRating(self, rating_value, video_entry):
"""Add a rating to a video entry.
Needs authentication.
Args:
rating_value: The integer value for the rating (between 1 and 5).
video_entry: The YouTubeVideoEntry to be rated.
Returns:
True if the rating was added successfully.
Raises:
YouTubeError: rating_value must be between 1 and 5 in AddRating().
"""
if rating_value < 1 or rating_value > 5:
raise YouTubeError('rating_value must be between 1 and 5 in AddRating()')
entry = gdata.GDataEntry()
rating = gdata.youtube.Rating(min='1', max='5')
rating.extension_attributes['name'] = 'value'
rating.extension_attributes['value'] = str(rating_value)
entry.extension_elements.append(rating)
for link in video_entry.link:
if link.rel == YOUTUBE_RATING_LINK_REL:
rating_uri = link.href
return self.Post(entry, uri=rating_uri)
def AddComment(self, comment_text, video_entry):
"""Add a comment to a video entry.
Needs authentication. Note that each comment that is posted must contain
the video entry that it is to be posted to.
Args:
comment_text: A string representing the text of the comment.
video_entry: The YouTubeVideoEntry to be commented on.
Returns:
True if the comment was added successfully.
"""
content = atom.Content(text=comment_text)
comment_entry = gdata.youtube.YouTubeVideoCommentEntry(content=content)
comment_post_uri = video_entry.comments.feed_link[0].href
return self.Post(comment_entry, uri=comment_post_uri)
def AddVideoResponse(self, video_id_to_respond_to, video_response):
"""Add a video response.
Needs authentication.
Args:
video_id_to_respond_to: A string representing the ID of the video to be
responded to.
video_response: YouTubeVideoEntry to be posted as a response.
Returns:
True if video response was posted successfully.
"""
post_uri = '%s/%s/%s' % (YOUTUBE_VIDEO_URI, video_id_to_respond_to,
'responses')
return self.Post(video_response, uri=post_uri)
def DeleteVideoResponse(self, video_id, response_video_id):
"""Delete a video response.
Needs authentication.
Args:
video_id: A string representing the ID of video that contains the
response.
response_video_id: A string representing the ID of the video that was
posted as a response.
Returns:
True if video response was deleted succcessfully.
"""
delete_uri = '%s/%s/%s/%s' % (YOUTUBE_VIDEO_URI, video_id, 'responses',
response_video_id)
return self.Delete(delete_uri)
def AddComplaint(self, complaint_text, complaint_term, video_id):
"""Add a complaint for a particular video entry.
Needs authentication.
Args:
complaint_text: A string representing the complaint text.
complaint_term: A string representing the complaint category term.
video_id: A string representing the ID of YouTubeVideoEntry to
complain about.
Returns:
True if posted successfully.
Raises:
YouTubeError: Your complaint_term is not valid.
"""
if complaint_term not in YOUTUBE_COMPLAINT_CATEGORY_TERMS:
raise YouTubeError('Your complaint_term is not valid')
content = atom.Content(text=complaint_text)
category = atom.Category(term=complaint_term,
scheme=YOUTUBE_COMPLAINT_CATEGORY_SCHEME)
complaint_entry = gdata.GDataEntry(content=content, category=[category])
post_uri = '%s/%s/%s' % (YOUTUBE_VIDEO_URI, video_id, 'complaints')
return self.Post(complaint_entry, post_uri)
def AddVideoEntryToFavorites(self, video_entry, username='default'):
"""Add a video entry to a users favorite feed.
Needs authentication.
Args:
video_entry: The YouTubeVideoEntry to add.
username: An optional string representing the username to whose favorite
feed you wish to add the entry. Defaults to the currently
authenticated user.
Returns:
The posted YouTubeVideoEntry if successfully posted.
"""
post_uri = '%s/%s/%s' % (YOUTUBE_USER_FEED_URI, username, 'favorites')
return self.Post(video_entry, post_uri,
converter=gdata.youtube.YouTubeVideoEntryFromString)
def DeleteVideoEntryFromFavorites(self, video_id, username='default'):
"""Delete a video entry from the users favorite feed.
Needs authentication.
Args:
video_id: A string representing the ID of the video that is to be removed
username: An optional string representing the username of the user's
favorite feed. Defaults to the currently authenticated user.
Returns:
True if entry was successfully deleted.
"""
edit_link = '%s/%s/%s/%s' % (YOUTUBE_USER_FEED_URI, username, 'favorites',
video_id)
return self.Delete(edit_link)
def AddPlaylist(self, playlist_title, playlist_description,
playlist_private=None):
"""Add a new playlist to the currently authenticated users account.
Needs authentication.
Args:
playlist_title: A string representing the title for the new playlist.
playlist_description: A string representing the description of the
playlist.
playlist_private: An optional boolean, set to True if the playlist is
to be private.
Returns:
The YouTubePlaylistEntry if successfully posted.
"""
playlist_entry = gdata.youtube.YouTubePlaylistEntry(
title=atom.Title(text=playlist_title),
description=gdata.youtube.Description(text=playlist_description))
if playlist_private:
playlist_entry.private = gdata.youtube.Private()
playlist_post_uri = '%s/%s/%s' % (YOUTUBE_USER_FEED_URI, 'default',
'playlists')
return self.Post(playlist_entry, playlist_post_uri,
converter=gdata.youtube.YouTubePlaylistEntryFromString)
def UpdatePlaylist(self, playlist_id, new_playlist_title,
new_playlist_description, playlist_private=None,
username='default'):
"""Update a playlist with new meta-data.
Needs authentication.
Args:
playlist_id: A string representing the ID of the playlist to be updated.
new_playlist_title: A string representing a new title for the playlist.
new_playlist_description: A string representing a new description for the
playlist.
playlist_private: An optional boolean, set to True if the playlist is
to be private.
username: An optional string representing the username whose playlist is
to be updated. Defaults to the currently authenticated user.
Returns:
A YouTubePlaylistEntry if the update was successful.
"""
updated_playlist = gdata.youtube.YouTubePlaylistEntry(
title=atom.Title(text=new_playlist_title),
description=gdata.youtube.Description(text=new_playlist_description))
if playlist_private:
updated_playlist.private = gdata.youtube.Private()
playlist_put_uri = '%s/%s/playlists/%s' % (YOUTUBE_USER_FEED_URI, username,
playlist_id)
return self.Put(updated_playlist, playlist_put_uri,
converter=gdata.youtube.YouTubePlaylistEntryFromString)
def DeletePlaylist(self, playlist_uri):
"""Delete a playlist from the currently authenticated users playlists.
Needs authentication.
Args:
playlist_uri: A string representing the URI of the playlist that is
to be deleted.
Returns:
True if successfully deleted.
"""
return self.Delete(playlist_uri)
def AddPlaylistVideoEntryToPlaylist(
self, playlist_uri, video_id, custom_video_title=None,
custom_video_description=None):
"""Add a video entry to a playlist, optionally providing a custom title
and description.
Needs authentication.
Args:
playlist_uri: A string representing the URI of the playlist to which this
video entry is to be added.
video_id: A string representing the ID of the video entry to add.
custom_video_title: An optional string representing a custom title for
the video (only shown on the playlist).
custom_video_description: An optional string representing a custom
description for the video (only shown on the playlist).
Returns:
A YouTubePlaylistVideoEntry if successfully posted.
"""
playlist_video_entry = gdata.youtube.YouTubePlaylistVideoEntry(
atom_id=atom.Id(text=video_id))
if custom_video_title:
playlist_video_entry.title = atom.Title(text=custom_video_title)
if custom_video_description:
playlist_video_entry.description = gdata.youtube.Description(
text=custom_video_description)
return self.Post(playlist_video_entry, playlist_uri,
converter=gdata.youtube.YouTubePlaylistVideoEntryFromString)
def UpdatePlaylistVideoEntryMetaData(
self, playlist_uri, playlist_entry_id, new_video_title,
new_video_description, new_video_position):
"""Update the meta data for a YouTubePlaylistVideoEntry.
Needs authentication.
Args:
playlist_uri: A string representing the URI of the playlist that contains
the entry to be updated.
playlist_entry_id: A string representing the ID of the entry to be
updated.
new_video_title: A string representing the new title for the video entry.
new_video_description: A string representing the new description for
the video entry.
new_video_position: An integer representing the new position on the
playlist for the video.
Returns:
A YouTubePlaylistVideoEntry if the update was successful.
"""
playlist_video_entry = gdata.youtube.YouTubePlaylistVideoEntry(
title=atom.Title(text=new_video_title),
description=gdata.youtube.Description(text=new_video_description),
position=gdata.youtube.Position(text=str(new_video_position)))
playlist_put_uri = playlist_uri + '/' + playlist_entry_id
return self.Put(playlist_video_entry, playlist_put_uri,
converter=gdata.youtube.YouTubePlaylistVideoEntryFromString)
def DeletePlaylistVideoEntry(self, playlist_uri, playlist_video_entry_id):
"""Delete a playlist video entry from a playlist.
Needs authentication.
Args:
playlist_uri: A URI representing the playlist from which the playlist
video entry is to be removed from.
playlist_video_entry_id: A string representing id of the playlist video
entry that is to be removed.
Returns:
True if entry was successfully deleted.
"""
delete_uri = '%s/%s' % (playlist_uri, playlist_video_entry_id)
return self.Delete(delete_uri)
def AddSubscriptionToChannel(self, username_to_subscribe_to,
my_username = 'default'):
"""Add a new channel subscription to the currently authenticated users
account.
Needs authentication.
Args:
username_to_subscribe_to: A string representing the username of the
channel to which we want to subscribe to.
my_username: An optional string representing the name of the user which
we want to subscribe. Defaults to currently authenticated user.
Returns:
A new YouTubeSubscriptionEntry if successfully posted.
"""
subscription_category = atom.Category(
scheme=YOUTUBE_SUBSCRIPTION_CATEGORY_SCHEME,
term='channel')
subscription_username = gdata.youtube.Username(
text=username_to_subscribe_to)
subscription_entry = gdata.youtube.YouTubeSubscriptionEntry(
category=subscription_category,
username=subscription_username)
post_uri = '%s/%s/%s' % (YOUTUBE_USER_FEED_URI, my_username,
'subscriptions')
return self.Post(subscription_entry, post_uri,
converter=gdata.youtube.YouTubeSubscriptionEntryFromString)
def AddSubscriptionToFavorites(self, username, my_username = 'default'):
"""Add a new subscription to a users favorites to the currently
authenticated user's account.
Needs authentication
Args:
username: A string representing the username of the user's favorite feed
to subscribe to.
my_username: An optional string representing the username of the user
that is to be subscribed. Defaults to currently authenticated user.
Returns:
A new YouTubeSubscriptionEntry if successful.
"""
subscription_category = atom.Category(
scheme=YOUTUBE_SUBSCRIPTION_CATEGORY_SCHEME,
term='favorites')
subscription_username = gdata.youtube.Username(text=username)
subscription_entry = gdata.youtube.YouTubeSubscriptionEntry(
category=subscription_category,
username=subscription_username)
post_uri = '%s/%s/%s' % (YOUTUBE_USER_FEED_URI, my_username,
'subscriptions')
return self.Post(subscription_entry, post_uri,
converter=gdata.youtube.YouTubeSubscriptionEntryFromString)
def AddSubscriptionToQuery(self, query, my_username = 'default'):
"""Add a new subscription to a specific keyword query to the currently
authenticated user's account.
Needs authentication
Args:
query: A string representing the keyword query to subscribe to.
my_username: An optional string representing the username of the user
that is to be subscribed. Defaults to currently authenticated user.
Returns:
A new YouTubeSubscriptionEntry if successful.
"""
subscription_category = atom.Category(
scheme=YOUTUBE_SUBSCRIPTION_CATEGORY_SCHEME,
term='query')
subscription_query_string = gdata.youtube.QueryString(text=query)
subscription_entry = gdata.youtube.YouTubeSubscriptionEntry(
category=subscription_category,
query_string=subscription_query_string)
post_uri = '%s/%s/%s' % (YOUTUBE_USER_FEED_URI, my_username,
'subscriptions')
return self.Post(subscription_entry, post_uri,
converter=gdata.youtube.YouTubeSubscriptionEntryFromString)
def DeleteSubscription(self, subscription_uri):
"""Delete a subscription from the currently authenticated user's account.
Needs authentication.
Args:
subscription_uri: A string representing the URI of the subscription that
is to be deleted.
Returns:
True if deleted successfully.
"""
return self.Delete(subscription_uri)
def AddContact(self, contact_username, my_username='default'):
"""Add a new contact to the currently authenticated user's contact feed.
Needs authentication.
Args:
contact_username: A string representing the username of the contact
that you wish to add.
my_username: An optional string representing the username to whose
contact the new contact is to be added.
Returns:
A YouTubeContactEntry if added successfully.
"""
contact_category = atom.Category(
scheme = 'http://gdata.youtube.com/schemas/2007/contact.cat',
term = 'Friends')
contact_username = gdata.youtube.Username(text=contact_username)
contact_entry = gdata.youtube.YouTubeContactEntry(
category=contact_category,
username=contact_username)
contact_post_uri = '%s/%s/%s' % (YOUTUBE_USER_FEED_URI, my_username,
'contacts')
return self.Post(contact_entry, contact_post_uri,
converter=gdata.youtube.YouTubeContactEntryFromString)
def UpdateContact(self, contact_username, new_contact_status,
new_contact_category, my_username='default'):
"""Update a contact, providing a new status and a new category.
Needs authentication.
Args:
contact_username: A string representing the username of the contact
that is to be updated.
new_contact_status: A string representing the new status of the contact.
This can either be set to 'accepted' or 'rejected'.
new_contact_category: A string representing the new category for the
contact, either 'Friends' or 'Family'.
my_username: An optional string representing the username of the user
whose contact feed we are modifying. Defaults to the currently
authenticated user.
Returns:
A YouTubeContactEntry if updated succesfully.
Raises:
YouTubeError: New contact status must be within the accepted values. Or
new contact category must be within the accepted categories.
"""
if new_contact_status not in YOUTUBE_CONTACT_STATUS:
raise YouTubeError('New contact status must be one of %s' %
(' '.join(YOUTUBE_CONTACT_STATUS)))
if new_contact_category not in YOUTUBE_CONTACT_CATEGORY:
raise YouTubeError('New contact category must be one of %s' %
(' '.join(YOUTUBE_CONTACT_CATEGORY)))
contact_category = atom.Category(
scheme='http://gdata.youtube.com/schemas/2007/contact.cat',
term=new_contact_category)
contact_status = gdata.youtube.Status(text=new_contact_status)
contact_entry = gdata.youtube.YouTubeContactEntry(
category=contact_category,
status=contact_status)
contact_put_uri = '%s/%s/%s/%s' % (YOUTUBE_USER_FEED_URI, my_username,
'contacts', contact_username)
return self.Put(contact_entry, contact_put_uri,
converter=gdata.youtube.YouTubeContactEntryFromString)
def DeleteContact(self, contact_username, my_username='default'):
"""Delete a contact from a users contact feed.
Needs authentication.
Args:
contact_username: A string representing the username of the contact
that is to be deleted.
my_username: An optional string representing the username of the user's
contact feed from which to delete the contact. Defaults to the
currently authenticated user.
Returns:
True if the contact was deleted successfully
"""
contact_edit_uri = '%s/%s/%s/%s' % (YOUTUBE_USER_FEED_URI, my_username,
'contacts', contact_username)
return self.Delete(contact_edit_uri)
def _GetDeveloperKey(self):
"""Getter for Developer Key property.
Returns:
If the developer key has been set, a string representing the developer key
is returned or None.
"""
if 'X-GData-Key' in self.additional_headers:
return self.additional_headers['X-GData-Key'][4:]
else:
return None
def _SetDeveloperKey(self, developer_key):
"""Setter for Developer Key property.
Sets the developer key in the 'X-GData-Key' header. The actual value that
is set is 'key=' plus the developer_key that was passed.
"""
self.additional_headers['X-GData-Key'] = 'key=' + developer_key
developer_key = property(_GetDeveloperKey, _SetDeveloperKey,
doc="""The Developer Key property""")
def _GetClientId(self):
"""Getter for Client Id property.
Returns:
If the client_id has been set, a string representing it is returned
or None.
"""
if 'X-Gdata-Client' in self.additional_headers:
return self.additional_headers['X-Gdata-Client']
else:
return None
def _SetClientId(self, client_id):
"""Setter for Client Id property.
Sets the 'X-Gdata-Client' header.
"""
self.additional_headers['X-Gdata-Client'] = client_id
client_id = property(_GetClientId, _SetClientId,
doc="""The ClientId property""")
def Query(self, uri):
"""Performs a query and returns a resulting feed or entry.
Args:
uri: A string representing the URI of the feed that is to be queried.
Returns:
On success, a tuple in the form:
(boolean succeeded=True, ElementTree._Element result)
On failure, a tuple in the form:
(boolean succeeded=False, {'status': HTTP status code from server,
'reason': HTTP reason from the server,
'body': HTTP body of the server's response})
"""
result = self.Get(uri)
return result
def YouTubeQuery(self, query):
"""Performs a YouTube specific query and returns a resulting feed or entry.
Args:
query: A Query object or one if its sub-classes (YouTubeVideoQuery,
YouTubeUserQuery or YouTubePlaylistQuery).
Returns:
Depending on the type of Query object submitted returns either a
YouTubeVideoFeed, a YouTubeUserFeed, a YouTubePlaylistFeed. If the
Query object provided was not YouTube-related, a tuple is returned.
On success the tuple will be in this form:
(boolean succeeded=True, ElementTree._Element result)
On failure, the tuple will be in this form:
(boolean succeeded=False, {'status': HTTP status code from server,
'reason': HTTP reason from the server,
'body': HTTP body of the server response})
"""
result = self.Query(query.ToUri())
if isinstance(query, YouTubeUserQuery):
return gdata.youtube.YouTubeUserFeedFromString(result.ToString())
elif isinstance(query, YouTubePlaylistQuery):
return gdata.youtube.YouTubePlaylistFeedFromString(result.ToString())
elif isinstance(query, YouTubeVideoQuery):
return gdata.youtube.YouTubeVideoFeedFromString(result.ToString())
else:
return result
class YouTubeVideoQuery(gdata.service.Query):
"""Subclasses gdata.service.Query to represent a YouTube Data API query.
Attributes are set dynamically via properties. Properties correspond to
the standard Google Data API query parameters with YouTube Data API
extensions. Please refer to the API documentation for details.
Attributes:
vq: The vq parameter, which is only supported for video feeds, specifies a
search query term. Refer to API documentation for further details.
orderby: The orderby parameter, which is only supported for video feeds,
specifies the value that will be used to sort videos in the search
result set. Valid values for this parameter are relevance, published,
viewCount and rating.
time: The time parameter, which is only available for the top_rated,
top_favorites, most_viewed, most_discussed, most_linked and
most_responded standard feeds, restricts the search to videos uploaded
within the specified time. Valid values for this parameter are today
(1 day), this_week (7 days), this_month (1 month) and all_time.
The default value for this parameter is all_time.
format: The format parameter specifies that videos must be available in a
particular video format. Refer to the API documentation for details.
racy: The racy parameter allows a search result set to include restricted
content as well as standard content. Valid values for this parameter
are include and exclude. By default, restricted content is excluded.
lr: The lr parameter restricts the search to videos that have a title,
description or keywords in a specific language. Valid values for the lr
parameter are ISO 639-1 two-letter language codes.
restriction: The restriction parameter identifies the IP address that
should be used to filter videos that can only be played in specific
countries.
location: A string of geo coordinates. Note that this is not used when the
search is performed but rather to filter the returned videos for ones
that match to the location entered.
feed: str (optional) The base URL which is the beginning of the query URL.
defaults to 'http://%s/feeds/videos' % (YOUTUBE_SERVER)
"""
def __init__(self, video_id=None, feed_type=None, text_query=None,
params=None, categories=None, feed=None):
if feed_type in YOUTUBE_STANDARDFEEDS and feed is None:
feed = 'http://%s/feeds/standardfeeds/%s' % (YOUTUBE_SERVER, feed_type)
elif (feed_type is 'responses' or feed_type is 'comments' and video_id
and feed is None):
feed = 'http://%s/feeds/videos/%s/%s' % (YOUTUBE_SERVER, video_id,
feed_type)
elif feed is None:
feed = 'http://%s/feeds/videos' % (YOUTUBE_SERVER)
gdata.service.Query.__init__(self, feed, text_query=text_query,
params=params, categories=categories)
def _GetVideoQuery(self):
if 'vq' in self:
return self['vq']
else:
return None
def _SetVideoQuery(self, val):
self['vq'] = val
vq = property(_GetVideoQuery, _SetVideoQuery,
doc="""The video query (vq) query parameter""")
def _GetOrderBy(self):
if 'orderby' in self:
return self['orderby']
else:
return None
def _SetOrderBy(self, val):
if val not in YOUTUBE_QUERY_VALID_ORDERBY_PARAMETERS:
if val.startswith('relevance_lang_') is False:
raise YouTubeError('OrderBy must be one of: %s ' %
' '.join(YOUTUBE_QUERY_VALID_ORDERBY_PARAMETERS))
self['orderby'] = val
orderby = property(_GetOrderBy, _SetOrderBy,
doc="""The orderby query parameter""")
def _GetTime(self):
if 'time' in self:
return self['time']
else:
return None
def _SetTime(self, val):
if val not in YOUTUBE_QUERY_VALID_TIME_PARAMETERS:
raise YouTubeError('Time must be one of: %s ' %
' '.join(YOUTUBE_QUERY_VALID_TIME_PARAMETERS))
self['time'] = val
time = property(_GetTime, _SetTime,
doc="""The time query parameter""")
def _GetFormat(self):
if 'format' in self:
return self['format']
else:
return None
def _SetFormat(self, val):
if val not in YOUTUBE_QUERY_VALID_FORMAT_PARAMETERS:
raise YouTubeError('Format must be one of: %s ' %
' '.join(YOUTUBE_QUERY_VALID_FORMAT_PARAMETERS))
self['format'] = val
format = property(_GetFormat, _SetFormat,
doc="""The format query parameter""")
def _GetRacy(self):
if 'racy' in self:
return self['racy']
else:
return None
def _SetRacy(self, val):
if val not in YOUTUBE_QUERY_VALID_RACY_PARAMETERS:
raise YouTubeError('Racy must be one of: %s ' %
' '.join(YOUTUBE_QUERY_VALID_RACY_PARAMETERS))
self['racy'] = val
racy = property(_GetRacy, _SetRacy,
doc="""The racy query parameter""")
def _GetLanguageRestriction(self):
if 'lr' in self:
return self['lr']
else:
return None
def _SetLanguageRestriction(self, val):
self['lr'] = val
lr = property(_GetLanguageRestriction, _SetLanguageRestriction,
doc="""The lr (language restriction) query parameter""")
def _GetIPRestriction(self):
if 'restriction' in self:
return self['restriction']
else:
return None
def _SetIPRestriction(self, val):
self['restriction'] = val
restriction = property(_GetIPRestriction, _SetIPRestriction,
doc="""The restriction query parameter""")
def _GetLocation(self):
if 'location' in self:
return self['location']
else:
return None
def _SetLocation(self, val):
self['location'] = val
location = property(_GetLocation, _SetLocation,
doc="""The location query parameter""")
class YouTubeUserQuery(YouTubeVideoQuery):
"""Subclasses YouTubeVideoQuery to perform user-specific queries.
Attributes are set dynamically via properties. Properties correspond to
the standard Google Data API query parameters with YouTube Data API
extensions.
"""
def __init__(self, username=None, feed_type=None, subscription_id=None,
text_query=None, params=None, categories=None):
uploads_favorites_playlists = ('uploads', 'favorites', 'playlists')
if feed_type is 'subscriptions' and subscription_id and username:
feed = "http://%s/feeds/users/%s/%s/%s" % (YOUTUBE_SERVER, username,
feed_type, subscription_id)
elif feed_type is 'subscriptions' and not subscription_id and username:
feed = "http://%s/feeds/users/%s/%s" % (YOUTUBE_SERVER, username,
feed_type)
elif feed_type in uploads_favorites_playlists:
feed = "http://%s/feeds/users/%s/%s" % (YOUTUBE_SERVER, username,
feed_type)
else:
feed = "http://%s/feeds/users" % (YOUTUBE_SERVER)
YouTubeVideoQuery.__init__(self, feed=feed, text_query=text_query,
params=params, categories=categories)
class YouTubePlaylistQuery(YouTubeVideoQuery):
"""Subclasses YouTubeVideoQuery to perform playlist-specific queries.
Attributes are set dynamically via properties. Properties correspond to
the standard Google Data API query parameters with YouTube Data API
extensions.
"""
def __init__(self, playlist_id, text_query=None, params=None,
categories=None):
if playlist_id:
feed = "http://%s/feeds/playlists/%s" % (YOUTUBE_SERVER, playlist_id)
else:
feed = "http://%s/feeds/playlists" % (YOUTUBE_SERVER)
YouTubeVideoQuery.__init__(self, feed=feed, text_query=text_query,
params=params, categories=categories)
|
bsd-3-clause
|
ggaughan/dee
|
darwen.py
|
1
|
3332
|
from Dee import Relation, Key, Tuple, QUOTA, MAX, MIN, IS_EMPTY, COUNT, GENERATE
from DeeDatabase import Database
class darwen_Database(Database):
def __init__(self, name):
"""Define initial relvars and their initial values here
(Called once on database creation)"""
Database.__init__(self, name)
if 'IS_CALLED' not in self:
print "Adding IS_CALLED..."
self.IS_CALLED = Relation(['StudentId', 'Name'],
[('S1', 'Anne'),
('S2', 'Boris'),
('S3', 'Cindy'),
('S4', 'Devinder'),
('S5', 'Boris'),
]
)
if 'IS_ENROLLED_ON' not in self:
print "Adding IS_ENROLLED_ON..."
self.IS_ENROLLED_ON = Relation(['StudentId', 'CourseId'],
[('S1', 'C1'),
('S1', 'C2'),
('S2', 'C1'),
('S3', 'C3'),
('S4', 'C1'),
]
)
if 'COURSE' not in self:
print "Adding COURSE..."
self.COURSE = Relation(['CourseId', 'Title'],
[('C1', 'Database'),
('C2', 'HCI'),
('C3', 'Op Systems'),
('C4', 'Programming'),
]
)
if 'EXAM_MARK' not in self:
print "Adding EXAM_MARK..."
self.EXAM_MARK = Relation(['StudentId', 'CourseId', 'Mark'],
[('S1', 'C1', 85),
('S1', 'C2', 49),
('S2', 'C1', 49),
('S3', 'C3', 66),
('S4', 'C1', 93),
]
)
def _vinit(self):
"""Define virtual relvars/relconsts
(Called repeatedly, e.g. after database load from disk or commit)
"""
Database._vinit(self)
if 'C_ER' not in self:
print "Defining C_ER..."
#this will always be the case, even when re-loading: we don't store relations with callable bodies
self.C_ER = Relation(['CourseId', 'Exam_Result'],
self.vC_ER,
{'pk':(Key,['CourseId'])})
def vC_ER(self):
return self.COURSE.extend(['Exam_Result'], lambda t:{'Exam_Result':
(self.EXAM_MARK & GENERATE({'CourseId':t.CourseId})
)(['StudentId', 'Mark'])}
)(['CourseId', 'Exam_Result']) #fixed
#Load or create the database
darwen = Database.open(darwen_Database, "darwen")
###################################
if __name__=="__main__":
print darwen.relations
|
mit
|
chirilo/ana-ka-todo-app
|
node_modules/laravel-elixir/node_modules/npm/node_modules/node-gyp/gyp/tools/pretty_gyp.py
|
2618
|
4756
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Pretty-prints the contents of a GYP file."""
import sys
import re
# Regex to remove comments when we're counting braces.
COMMENT_RE = re.compile(r'\s*#.*')
# Regex to remove quoted strings when we're counting braces.
# It takes into account quoted quotes, and makes sure that the quotes match.
# NOTE: It does not handle quotes that span more than one line, or
# cases where an escaped quote is preceeded by an escaped backslash.
QUOTE_RE_STR = r'(?P<q>[\'"])(.*?)(?<![^\\][\\])(?P=q)'
QUOTE_RE = re.compile(QUOTE_RE_STR)
def comment_replace(matchobj):
return matchobj.group(1) + matchobj.group(2) + '#' * len(matchobj.group(3))
def mask_comments(input):
"""Mask the quoted strings so we skip braces inside quoted strings."""
search_re = re.compile(r'(.*?)(#)(.*)')
return [search_re.sub(comment_replace, line) for line in input]
def quote_replace(matchobj):
return "%s%s%s%s" % (matchobj.group(1),
matchobj.group(2),
'x'*len(matchobj.group(3)),
matchobj.group(2))
def mask_quotes(input):
"""Mask the quoted strings so we skip braces inside quoted strings."""
search_re = re.compile(r'(.*?)' + QUOTE_RE_STR)
return [search_re.sub(quote_replace, line) for line in input]
def do_split(input, masked_input, search_re):
output = []
mask_output = []
for (line, masked_line) in zip(input, masked_input):
m = search_re.match(masked_line)
while m:
split = len(m.group(1))
line = line[:split] + r'\n' + line[split:]
masked_line = masked_line[:split] + r'\n' + masked_line[split:]
m = search_re.match(masked_line)
output.extend(line.split(r'\n'))
mask_output.extend(masked_line.split(r'\n'))
return (output, mask_output)
def split_double_braces(input):
"""Masks out the quotes and comments, and then splits appropriate
lines (lines that matche the double_*_brace re's above) before
indenting them below.
These are used to split lines which have multiple braces on them, so
that the indentation looks prettier when all laid out (e.g. closing
braces make a nice diagonal line).
"""
double_open_brace_re = re.compile(r'(.*?[\[\{\(,])(\s*)([\[\{\(])')
double_close_brace_re = re.compile(r'(.*?[\]\}\)],?)(\s*)([\]\}\)])')
masked_input = mask_quotes(input)
masked_input = mask_comments(masked_input)
(output, mask_output) = do_split(input, masked_input, double_open_brace_re)
(output, mask_output) = do_split(output, mask_output, double_close_brace_re)
return output
def count_braces(line):
"""keeps track of the number of braces on a given line and returns the result.
It starts at zero and subtracts for closed braces, and adds for open braces.
"""
open_braces = ['[', '(', '{']
close_braces = [']', ')', '}']
closing_prefix_re = re.compile(r'(.*?[^\s\]\}\)]+.*?)([\]\}\)],?)\s*$')
cnt = 0
stripline = COMMENT_RE.sub(r'', line)
stripline = QUOTE_RE.sub(r"''", stripline)
for char in stripline:
for brace in open_braces:
if char == brace:
cnt += 1
for brace in close_braces:
if char == brace:
cnt -= 1
after = False
if cnt > 0:
after = True
# This catches the special case of a closing brace having something
# other than just whitespace ahead of it -- we don't want to
# unindent that until after this line is printed so it stays with
# the previous indentation level.
if cnt < 0 and closing_prefix_re.match(stripline):
after = True
return (cnt, after)
def prettyprint_input(lines):
"""Does the main work of indenting the input based on the brace counts."""
indent = 0
basic_offset = 2
last_line = ""
for line in lines:
if COMMENT_RE.match(line):
print line
else:
line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix.
if len(line) > 0:
(brace_diff, after) = count_braces(line)
if brace_diff != 0:
if after:
print " " * (basic_offset * indent) + line
indent += brace_diff
else:
indent += brace_diff
print " " * (basic_offset * indent) + line
else:
print " " * (basic_offset * indent) + line
else:
print ""
last_line = line
def main():
if len(sys.argv) > 1:
data = open(sys.argv[1]).read().splitlines()
else:
data = sys.stdin.read().splitlines()
# Split up the double braces.
lines = split_double_braces(data)
# Indent and print the output.
prettyprint_input(lines)
return 0
if __name__ == '__main__':
sys.exit(main())
|
mit
|
scottmcmaster/catapult
|
third_party/mapreduce/mapreduce/status.py
|
29
|
12789
|
#!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Status page handler for mapreduce framework."""
__author__ = ("aizatsky@google.com (Mike Aizatsky)",
"bslatkin@google.com (Brett Slatkin)")
import os
import pkgutil
import time
import zipfile
from google.appengine.api import validation
from google.appengine.api import yaml_builder
from google.appengine.api import yaml_errors
from google.appengine.api import yaml_listener
from google.appengine.api import yaml_object
from google.appengine.ext import db
from google.appengine.ext import webapp
from mapreduce import base_handler
from mapreduce import errors
from mapreduce import model
# TODO(user): a list of features we'd like to have in status page:
# - show sparklet of entities/sec on index page
# - shard bar chart should color finished shards differently
# mapreduce.yaml file names
MR_YAML_NAMES = ["mapreduce.yaml", "mapreduce.yml"]
class BadStatusParameterError(Exception):
"""A parameter passed to a status handler was invalid."""
class UserParam(validation.Validated):
"""A user-supplied parameter to a mapreduce job."""
ATTRIBUTES = {
"name": r"[a-zA-Z0-9_\.]+",
"default": validation.Optional(r".*"),
"value": validation.Optional(r".*"),
}
class MapperInfo(validation.Validated):
"""Configuration parameters for the mapper part of the job."""
ATTRIBUTES = {
"handler": r".+",
"input_reader": r".+",
"output_writer": validation.Optional(r".+"),
"params": validation.Optional(validation.Repeated(UserParam)),
"params_validator": validation.Optional(r".+"),
}
class MapreduceInfo(validation.Validated):
"""Mapreduce description in mapreduce.yaml."""
ATTRIBUTES = {
"name": r".+",
"mapper": MapperInfo,
"params": validation.Optional(validation.Repeated(UserParam)),
"params_validator": validation.Optional(r".+"),
}
class MapReduceYaml(validation.Validated):
"""Root class for mapreduce.yaml.
File format:
mapreduce:
- name: <mapreduce_name>
mapper:
- input_reader: google.appengine.ext.mapreduce.DatastoreInputReader
- handler: path_to_my.MapperFunction
- params:
- name: foo
default: bar
- name: blah
default: stuff
- params_validator: path_to_my.ValidatorFunction
Where
mapreduce_name: The name of the mapreduce. Used for UI purposes.
mapper_handler_spec: Full <module_name>.<function_name/class_name> of
mapper handler. See MapreduceSpec class documentation for full handler
specification.
input_reader: Full <module_name>.<function_name/class_name> of the
InputReader sub-class to use for the mapper job.
params: A list of optional parameter names and optional default values
that may be supplied or overridden by the user running the job.
params_validator is full <module_name>.<function_name/class_name> of
a callable to validate the mapper_params after they are input by the
user running the job.
"""
ATTRIBUTES = {
"mapreduce": validation.Optional(validation.Repeated(MapreduceInfo))
}
@staticmethod
def to_dict(mapreduce_yaml):
"""Converts a MapReduceYaml file into a JSON-encodable dictionary.
For use in user-visible UI and internal methods for interfacing with
user code (like param validation). as a list
Args:
mapreduce_yaml: The Pyton representation of the mapreduce.yaml document.
Returns:
A list of configuration dictionaries.
"""
all_configs = []
for config in mapreduce_yaml.mapreduce:
out = {
"name": config.name,
"mapper_input_reader": config.mapper.input_reader,
"mapper_handler": config.mapper.handler,
}
if config.mapper.params_validator:
out["mapper_params_validator"] = config.mapper.params_validator
if config.mapper.params:
param_defaults = {}
for param in config.mapper.params:
param_defaults[param.name] = param.default or param.value
out["mapper_params"] = param_defaults
if config.params:
param_defaults = {}
for param in config.params:
param_defaults[param.name] = param.default or param.value
out["params"] = param_defaults
if config.mapper.output_writer:
out["mapper_output_writer"] = config.mapper.output_writer
all_configs.append(out)
return all_configs
# N.B. Sadly, we currently don't have and ability to determine
# application root dir at run time. We need to walk up the directory structure
# to find it.
def find_mapreduce_yaml(status_file=__file__):
"""Traverse directory trees to find mapreduce.yaml file.
Begins with the location of status.py and then moves on to check the working
directory.
Args:
status_file: location of status.py, overridable for testing purposes.
Returns:
the path of mapreduce.yaml file or None if not found.
"""
checked = set()
yaml = _find_mapreduce_yaml(os.path.dirname(status_file), checked)
if not yaml:
yaml = _find_mapreduce_yaml(os.getcwd(), checked)
return yaml
def _find_mapreduce_yaml(start, checked):
"""Traverse the directory tree identified by start until a directory already
in checked is encountered or the path of mapreduce.yaml is found.
Checked is present both to make loop termination easy to reason about and so
that the same directories do not get rechecked.
Args:
start: the path to start in and work upward from
checked: the set of already examined directories
Returns:
the path of mapreduce.yaml file or None if not found.
"""
dir = start
while dir not in checked:
checked.add(dir)
for mr_yaml_name in MR_YAML_NAMES:
yaml_path = os.path.join(dir, mr_yaml_name)
if os.path.exists(yaml_path):
return yaml_path
dir = os.path.dirname(dir)
return None
def parse_mapreduce_yaml(contents):
"""Parses mapreduce.yaml file contents.
Args:
contents: mapreduce.yaml file contents.
Returns:
MapReduceYaml object with all the data from original file.
Raises:
errors.BadYamlError: when contents is not a valid mapreduce.yaml file.
"""
try:
builder = yaml_object.ObjectBuilder(MapReduceYaml)
handler = yaml_builder.BuilderHandler(builder)
listener = yaml_listener.EventListener(handler)
listener.Parse(contents)
mr_info = handler.GetResults()
except (ValueError, yaml_errors.EventError), e:
raise errors.BadYamlError(e)
if len(mr_info) < 1:
raise errors.BadYamlError("No configs found in mapreduce.yaml")
if len(mr_info) > 1:
raise errors.MultipleDocumentsInMrYaml("Found %d YAML documents" %
len(mr_info))
jobs = mr_info[0]
job_names = set(j.name for j in jobs.mapreduce)
if len(jobs.mapreduce) != len(job_names):
raise errors.BadYamlError(
"Overlapping mapreduce names; names must be unique")
return jobs
def get_mapreduce_yaml(parse=parse_mapreduce_yaml):
"""Locates mapreduce.yaml, loads and parses its info.
Args:
parse: Used for testing.
Returns:
MapReduceYaml object.
Raises:
errors.BadYamlError: when contents is not a valid mapreduce.yaml file or the
file is missing.
"""
mr_yaml_path = find_mapreduce_yaml()
if not mr_yaml_path:
raise errors.MissingYamlError()
mr_yaml_file = open(mr_yaml_path)
try:
return parse(mr_yaml_file.read())
finally:
mr_yaml_file.close()
class ResourceHandler(webapp.RequestHandler):
"""Handler for static resources."""
_RESOURCE_MAP = {
"status": ("overview.html", "text/html"),
"detail": ("detail.html", "text/html"),
"base.css": ("base.css", "text/css"),
"jquery.js": ("jquery-1.6.1.min.js", "text/javascript"),
"jquery-json.js": ("jquery.json-2.2.min.js", "text/javascript"),
"jquery-url.js": ("jquery.url.js", "text/javascript"),
"status.js": ("status.js", "text/javascript"),
}
def get(self, relative):
if relative not in self._RESOURCE_MAP:
self.response.set_status(404)
self.response.out.write("Resource not found.")
return
real_path, content_type = self._RESOURCE_MAP[relative]
path = os.path.join(os.path.dirname(__file__), "static", real_path)
# It's possible we're inside a zipfile (zipimport). If so, path
# will include 'something.zip'.
if ('.zip' + os.sep) in path:
(zip_file, zip_path) = os.path.relpath(path).split('.zip' + os.sep, 1)
content = zipfile.ZipFile(zip_file + '.zip').read(zip_path)
else:
try:
data = pkgutil.get_data(__name__, "static/" + real_path)
except AttributeError: # Python < 2.6.
data = None
content = data or open(path, 'rb').read()
self.response.headers["Cache-Control"] = "public; max-age=300"
self.response.headers["Content-Type"] = content_type
self.response.out.write(content)
class ListConfigsHandler(base_handler.GetJsonHandler):
"""Lists mapreduce configs as JSON for users to start jobs."""
def handle(self):
self.json_response["configs"] = MapReduceYaml.to_dict(get_mapreduce_yaml())
class ListJobsHandler(base_handler.GetJsonHandler):
"""Lists running and completed mapreduce jobs for an overview as JSON."""
def handle(self):
cursor = self.request.get("cursor")
count = int(self.request.get("count", "50"))
query = model.MapreduceState.all()
if cursor:
query.filter("__key__ >=", db.Key(cursor))
query.order("__key__")
jobs_list = query.fetch(count + 1)
if len(jobs_list) == (count + 1):
self.json_response["cursor"] = str(jobs_list[-1].key())
jobs_list = jobs_list[:-1]
all_jobs = []
for job in jobs_list:
out = {
# Data shared between overview and detail pages.
"name": job.mapreduce_spec.name,
"mapreduce_id": job.mapreduce_spec.mapreduce_id,
"active": job.active,
"start_timestamp_ms":
int(time.mktime(job.start_time.utctimetuple()) * 1000),
"updated_timestamp_ms":
int(time.mktime(job.last_poll_time.utctimetuple()) * 1000),
# Specific to overview page.
"chart_url": job.sparkline_url,
"chart_width": job.chart_width,
"active_shards": job.active_shards,
"shards": job.mapreduce_spec.mapper.shard_count,
}
if job.result_status:
out["result_status"] = job.result_status
all_jobs.append(out)
self.json_response["jobs"] = all_jobs
class GetJobDetailHandler(base_handler.GetJsonHandler):
"""Retrieves the details of a mapreduce job as JSON."""
def handle(self):
mapreduce_id = self.request.get("mapreduce_id")
if not mapreduce_id:
raise BadStatusParameterError("'mapreduce_id' was invalid")
job = model.MapreduceState.get_by_key_name(mapreduce_id)
if job is None:
raise KeyError("Could not find job with ID %r" % mapreduce_id)
self.json_response.update(job.mapreduce_spec.to_json())
self.json_response.update(job.counters_map.to_json())
self.json_response.update({
# Shared with overview page.
"active": job.active,
"start_timestamp_ms":
int(time.mktime(job.start_time.utctimetuple()) * 1000),
"updated_timestamp_ms":
int(time.mktime(job.last_poll_time.utctimetuple()) * 1000),
# Specific to detail page.
"chart_url": job.chart_url,
"chart_width": job.chart_width,
})
self.json_response["result_status"] = job.result_status
all_shards = []
for shard in model.ShardState.find_all_by_mapreduce_state(job):
out = {
"active": shard.active,
"result_status": shard.result_status,
"shard_number": shard.shard_number,
"shard_id": shard.shard_id,
"updated_timestamp_ms":
int(time.mktime(shard.update_time.utctimetuple()) * 1000),
"shard_description": shard.shard_description,
"last_work_item": shard.last_work_item,
}
out.update(shard.counters_map.to_json())
all_shards.append(out)
all_shards.sort(key=lambda x: x["shard_number"])
self.json_response["shards"] = all_shards
|
bsd-3-clause
|
diofeher/django-nfa
|
django/contrib/admin/widgets.py
|
1
|
8956
|
"""
Form Widget classes specific to the Django admin site.
"""
import copy
from django import newforms as forms
from django.newforms.widgets import RadioFieldRenderer
from django.newforms.util import flatatt
from django.utils.datastructures import MultiValueDict
from django.utils.text import capfirst, truncate_words
from django.utils.translation import ugettext as _
from django.utils.safestring import mark_safe
from django.utils.encoding import force_unicode
from django.conf import settings
class FilteredSelectMultiple(forms.SelectMultiple):
"""
A SelectMultiple with a JavaScript filter interface.
Note that the resulting JavaScript assumes that the SelectFilter2.js
library and its dependencies have been loaded in the HTML page.
"""
def __init__(self, verbose_name, is_stacked, attrs=None, choices=()):
self.verbose_name = verbose_name
self.is_stacked = is_stacked
super(FilteredSelectMultiple, self).__init__(attrs, choices)
def render(self, name, value, attrs=None, choices=()):
from django.conf import settings
output = [super(FilteredSelectMultiple, self).render(name, value, attrs, choices)]
output.append(u'<script type="text/javascript">addEvent(window, "load", function(e) {')
# TODO: "id_" is hard-coded here. This should instead use the correct
# API to determine the ID dynamically.
output.append(u'SelectFilter.init("id_%s", "%s", %s, "%s"); });</script>\n' % \
(name, self.verbose_name.replace('"', '\\"'), int(self.is_stacked), settings.ADMIN_MEDIA_PREFIX))
return mark_safe(u''.join(output))
class AdminDateWidget(forms.TextInput):
class Media:
js = (settings.ADMIN_MEDIA_PREFIX + "js/calendar.js",
settings.ADMIN_MEDIA_PREFIX + "js/admin/DateTimeShortcuts.js")
def __init__(self, attrs={}):
super(AdminDateWidget, self).__init__(attrs={'class': 'vDateField', 'size': '10'})
class AdminTimeWidget(forms.TextInput):
class Media:
js = (settings.ADMIN_MEDIA_PREFIX + "js/calendar.js",
settings.ADMIN_MEDIA_PREFIX + "js/admin/DateTimeShortcuts.js")
def __init__(self, attrs={}):
super(AdminTimeWidget, self).__init__(attrs={'class': 'vTimeField', 'size': '8'})
class AdminSplitDateTime(forms.SplitDateTimeWidget):
"""
A SplitDateTime Widget that has some admin-specific styling.
"""
def __init__(self, attrs=None):
widgets = [AdminDateWidget, AdminTimeWidget]
# Note that we're calling MultiWidget, not SplitDateTimeWidget, because
# we want to define widgets.
forms.MultiWidget.__init__(self, widgets, attrs)
def format_output(self, rendered_widgets):
return mark_safe(u'<p class="datetime">%s %s<br />%s %s</p>' % \
(_('Date:'), rendered_widgets[0], _('Time:'), rendered_widgets[1]))
class AdminRadioFieldRenderer(RadioFieldRenderer):
def render(self):
"""Outputs a <ul> for this set of radio fields."""
return mark_safe(u'<ul%s>\n%s\n</ul>' % (
flatatt(self.attrs),
u'\n'.join([u'<li>%s</li>' % force_unicode(w) for w in self]))
)
class AdminRadioSelect(forms.RadioSelect):
renderer = AdminRadioFieldRenderer
class AdminFileWidget(forms.FileInput):
"""
A FileField Widget that shows its current value if it has one.
"""
def __init__(self, attrs={}):
super(AdminFileWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
from django.conf import settings
output = []
if value:
output.append('%s <a target="_blank" href="%s%s">%s</a> <br />%s ' % \
(_('Currently:'), settings.MEDIA_URL, value, value, _('Change:')))
output.append(super(AdminFileWidget, self).render(name, value, attrs))
return mark_safe(u''.join(output))
class ForeignKeyRawIdWidget(forms.TextInput):
"""
A Widget for displaying ForeignKeys in the "raw_id" interface rather than
in a <select> box.
"""
def __init__(self, rel, attrs=None):
self.rel = rel
super(ForeignKeyRawIdWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
from django.conf import settings
related_url = '../../../%s/%s/' % (self.rel.to._meta.app_label, self.rel.to._meta.object_name.lower())
if self.rel.limit_choices_to:
url = '?' + '&'.join(['%s=%s' % (k, v) for k, v in self.rel.limit_choices_to.items()])
else:
url = ''
if not attrs.has_key('class'):
attrs['class'] = 'vForeignKeyRawIdAdminField' # The JavaScript looks for this hook.
output = [super(ForeignKeyRawIdWidget, self).render(name, value, attrs)]
# TODO: "id_" is hard-coded here. This should instead use the correct
# API to determine the ID dynamically.
output.append('<a href="%s%s" class="related-lookup" id="lookup_id_%s" onclick="return showRelatedObjectLookupPopup(this);"> ' % \
(related_url, url, name))
output.append('<img src="%simg/admin/selector-search.gif" width="16" height="16" alt="Lookup" /></a>' % settings.ADMIN_MEDIA_PREFIX)
if value:
output.append(self.label_for_value(value))
return mark_safe(u''.join(output))
def label_for_value(self, value):
return ' <strong>%s</strong>' % \
truncate_words(self.rel.to.objects.get(pk=value), 14)
class ManyToManyRawIdWidget(ForeignKeyRawIdWidget):
"""
A Widget for displaying ManyToMany ids in the "raw_id" interface rather than
in a <select multiple> box.
"""
def __init__(self, rel, attrs=None):
super(ManyToManyRawIdWidget, self).__init__(rel, attrs)
def render(self, name, value, attrs=None):
attrs['class'] = 'vManyToManyRawIdAdminField'
if value:
value = ','.join([str(v) for v in value])
else:
value = ''
return super(ManyToManyRawIdWidget, self).render(name, value, attrs)
def label_for_value(self, value):
return ''
def value_from_datadict(self, data, files, name):
value = data.get(name, None)
if value and ',' in value:
return data[name].split(',')
if value:
return [value]
return None
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if len(initial) != len(data):
return True
for pk1, pk2 in zip(initial, data):
if force_unicode(pk1) != force_unicode(pk2):
return True
return False
class RelatedFieldWidgetWrapper(forms.Widget):
"""
This class is a wrapper to a given widget to add the add icon for the
admin interface.
"""
def __init__(self, widget, rel, admin_site):
self.is_hidden = widget.is_hidden
self.needs_multipart_form = widget.needs_multipart_form
self.attrs = widget.attrs
self.choices = widget.choices
self.widget = widget
self.rel = rel
# so we can check if the related object is registered with this AdminSite
self.admin_site = admin_site
def __deepcopy__(self, memo):
obj = copy.copy(self)
obj.widget = copy.deepcopy(self.widget, memo)
obj.attrs = self.widget.attrs
memo[id(self)] = obj
return obj
def render(self, name, value, *args, **kwargs):
from django.conf import settings
rel_to = self.rel.to
related_url = '../../../%s/%s/' % (rel_to._meta.app_label, rel_to._meta.object_name.lower())
self.widget.choices = self.choices
output = [self.widget.render(name, value, *args, **kwargs)]
if rel_to in self.admin_site._registry: # If the related object has an admin interface:
# TODO: "id_" is hard-coded here. This should instead use the correct
# API to determine the ID dynamically.
output.append(u'<a href="%sadd/" class="add-another" id="add_id_%s" onclick="return showAddAnotherPopup(this);"> ' % \
(related_url, name))
output.append(u'<img src="%simg/admin/icon_addlink.gif" width="10" height="10" alt="Add Another"/></a>' % settings.ADMIN_MEDIA_PREFIX)
return mark_safe(u''.join(output))
def build_attrs(self, extra_attrs=None, **kwargs):
"Helper function for building an attribute dictionary."
self.attrs = self.widget.build_attrs(extra_attrs=None, **kwargs)
return self.attrs
def value_from_datadict(self, data, files, name):
return self.widget.value_from_datadict(data, files, name)
def _has_changed(self, initial, data):
return self.widget._has_changed(initial, data)
def id_for_label(self, id_):
return self.widget.id_for_label(id_)
|
bsd-3-clause
|
fxia22/ASM_xf
|
PythonD/site_python/numarray/image/combine.py
|
2
|
9369
|
import numarray as num
from _combine import combine as _comb
import operator as _operator
def _combine_f(funcstr, arrays, output=None, outtype=None, nlow=0, nhigh=0, badmasks=None):
arrays = [ num.asarray(a) for a in arrays ]
shape = arrays[0].shape
if output is None:
if outtype is not None:
out = arrays[0].astype(outtype)
else:
out = arrays[0].copy()
else:
out = output
for a in tuple(arrays[1:])+(out,):
if a.shape != shape:
raise ValueError("all arrays must have identical shapes")
_comb(arrays, out, nlow, nhigh, badmasks, funcstr)
if output is None:
return out
def median( arrays, output=None, outtype=None, nlow=0, nhigh=0, badmasks=None):
"""median() nominally computes the median pixels for a stack of
identically shaped images.
arrays specifies a sequence of inputs arrays, which are nominally a
stack of identically shaped images.
output may be used to specify the output array. If none is specified,
either arrays[0] is copied or a new array of type 'outtype'
is created.
outtype specifies the type of the output array when no 'output' is
specified.
nlow specifies the number of pixels to be excluded from median
on the low end of the pixel stack.
nhigh specifies the number of pixels to be excluded from median
on the high end of the pixel stack.
badmasks specifies boolean arrays corresponding to 'arrays', where true
indicates that a particular pixel is not to be included in the
median calculation.
>>> a = num.arange(4, shape=(2,2))
>>> arrays = [a*16, a*4, a*2, a*8]
>>> median(arrays)
array([[ 0, 6],
[12, 18]])
>>> median(arrays, nhigh=1)
array([[ 0, 4],
[ 8, 12]])
>>> median(arrays, nlow=1)
array([[ 0, 8],
[16, 24]])
>>> median(arrays, outtype=num.Float32)
array([[ 0., 6.],
[ 12., 18.]], type=Float32)
>>> bm = num.zeros((4,2,2), type=num.Bool)
>>> bm[2,...] = 1
>>> median(arrays, badmasks=bm)
array([[ 0, 8],
[16, 24]])
>>> median(arrays, badmasks=threshhold(arrays, high=25))
array([[ 0, 6],
[ 8, 12]])
"""
return _combine_f("median", arrays, output, outtype, nlow, nhigh, badmasks)
def average( arrays, output=None, outtype=None, nlow=0, nhigh=0, badmasks=None):
"""average() nominally computes the average pixel value for a stack of
identically shaped images.
arrays specifies a sequence of inputs arrays, which are nominally a
stack of identically shaped images.
output may be used to specify the output array. If none is specified,
either arrays[0] is copied or a new array of type 'outtype'
is created.
outtype specifies the type of the output array when no 'output' is
specified.
nlow specifies the number of pixels to be excluded from average
on the low end of the pixel stack.
nhigh specifies the number of pixels to be excluded from average
on the high end of the pixel stack.
badmasks specifies boolean arrays corresponding to 'arrays', where true
indicates that a particular pixel is not to be included in the
average calculation.
>>> a = num.arange(4, shape=(2,2))
>>> arrays = [a*16, a*4, a*2, a*8]
>>> average(arrays)
array([[ 0, 7],
[15, 22]])
>>> average(arrays, nhigh=1)
array([[ 0, 4],
[ 9, 14]])
>>> average(arrays, nlow=1)
array([[ 0, 9],
[18, 28]])
>>> average(arrays, outtype=num.Float32)
array([[ 0. , 7.5],
[ 15. , 22.5]], type=Float32)
>>> bm = num.zeros((4,2,2), type=num.Bool)
>>> bm[2,...] = 1
>>> average(arrays, badmasks=bm)
array([[ 0, 9],
[18, 28]])
>>> average(arrays, badmasks=threshhold(arrays, high=25))
array([[ 0, 7],
[ 9, 14]])
"""
return _combine_f("average", arrays, output, outtype, nlow, nhigh, badmasks)
def minimum( arrays, output=None, outtype=None, nlow=0, nhigh=0, badmasks=None):
"""minimum() nominally computes the minimum pixel value for a stack of
identically shaped images.
arrays specifies a sequence of inputs arrays, which are nominally a
stack of identically shaped images.
output may be used to specify the output array. If none is specified,
either arrays[0] is copied or a new array of type 'outtype'
is created.
outtype specifies the type of the output array when no 'output' is
specified.
nlow specifies the number of pixels to be excluded from minimum
on the low end of the pixel stack.
nhigh specifies the number of pixels to be excluded from minimum
on the high end of the pixel stack.
badmasks specifies boolean arrays corresponding to 'arrays', where true
indicates that a particular pixel is not to be included in the
minimum calculation.
>>> a = num.arange(4, shape=(2,2))
>>> arrays = [a*16, a*4, a*2, a*8]
>>> minimum(arrays)
array([[0, 2],
[4, 6]])
>>> minimum(arrays, nhigh=1)
array([[0, 2],
[4, 6]])
>>> minimum(arrays, nlow=1)
array([[ 0, 4],
[ 8, 12]])
>>> minimum(arrays, outtype=num.Float32)
array([[ 0., 2.],
[ 4., 6.]], type=Float32)
>>> bm = num.zeros((4,2,2), type=num.Bool)
>>> bm[2,...] = 1
>>> minimum(arrays, badmasks=bm)
array([[ 0, 4],
[ 8, 12]])
>>> minimum(arrays, badmasks=threshhold(arrays, low=10))
array([[ 0, 16],
[16, 12]])
"""
return _combine_f("minimum", arrays, output, outtype, nlow, nhigh, badmasks)
def threshhold(arrays, low=None, high=None, outputs=None):
"""threshhold() computes a boolean array 'outputs' with
corresponding elements for each element of arrays. The
boolean value is true where each of the arrays values
is < the low or >= the high threshholds.
>>> a=num.arange(100, shape=(10,10))
>>> threshhold(a, 1, 50)
array([[1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1]], type=Bool)
>>> threshhold([ range(10)]*10, 3, 7)
array([[1, 1, 1, 0, 0, 0, 0, 1, 1, 1],
[1, 1, 1, 0, 0, 0, 0, 1, 1, 1],
[1, 1, 1, 0, 0, 0, 0, 1, 1, 1],
[1, 1, 1, 0, 0, 0, 0, 1, 1, 1],
[1, 1, 1, 0, 0, 0, 0, 1, 1, 1],
[1, 1, 1, 0, 0, 0, 0, 1, 1, 1],
[1, 1, 1, 0, 0, 0, 0, 1, 1, 1],
[1, 1, 1, 0, 0, 0, 0, 1, 1, 1],
[1, 1, 1, 0, 0, 0, 0, 1, 1, 1],
[1, 1, 1, 0, 0, 0, 0, 1, 1, 1]], type=Bool)
>>> threshhold(a, high=50)
array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1]], type=Bool)
>>> threshhold(a, low=50)
array([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], type=Bool)
"""
if not isinstance(arrays[0], num.NumArray):
return threshhold( num.inputarray(arrays), low, high, outputs)
if outputs is None:
outs = num.zeros(shape=(len(arrays),)+arrays[0].shape,
type=num.Bool)
else:
outs = outputs
for i in range(len(arrays)):
a, out = arrays[i], outs[i]
out[:] = 0
if high is not None:
num.greater_equal(a, high, out)
if low is not None:
num.logical_or(out, a < low, out)
else:
if low is not None:
num.less(a, low, out)
if outputs is None:
return outs
def _bench():
"""time a 10**6 element median"""
import time
a = num.arange(10**6, shape=(1000, 1000))
arrays = [a*2, a*64, a*16, a*8]
t0 = time.clock()
median(arrays)
print "maskless:", time.clock()-t0
a = num.arange(10**6, shape=(1000, 1000))
arrays = [a*2, a*64, a*16, a*8]
t0 = time.clock()
median(arrays, badmasks=num.zeros((1000,1000), type=num.Bool))
print "masked:", time.clock()-t0
|
gpl-2.0
|
mahak/neutron
|
neutron/tests/unit/plugins/ml2/drivers/openvswitch/agent/openflow/native/test_br_tun.py
|
2
|
21858
|
# Copyright (C) 2014,2015 VA Linux Systems Japan K.K.
# Copyright (C) 2014,2015 YAMAMOTO Takashi <yamamoto at valinux co jp>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
import neutron.plugins.ml2.drivers.openvswitch.agent.common.constants \
as ovs_const
from neutron.tests.unit.plugins.ml2.drivers.openvswitch.agent.openflow.native \
import ovs_bridge_test_base
call = mock.call # short hand
class OVSTunnelBridgeTest(ovs_bridge_test_base.OVSBridgeTestBase,
ovs_bridge_test_base.OVSDVRProcessTestMixin):
dvr_process_table_id = ovs_const.DVR_PROCESS
dvr_process_next_table_id = ovs_const.PATCH_LV_TO_TUN
def setUp(self):
conn_patcher = mock.patch(
'neutron.agent.ovsdb.impl_idl._connection')
conn_patcher.start()
super(OVSTunnelBridgeTest, self).setUp()
# NOTE(ivasilevskaya) The behaviour of oslotest.base.addCleanup()
# according to https://review.opendev.org/#/c/119201/4 guarantees
# that all started mocks will be stopped even without direct call to
# patcher.stop().
# If any individual mocks should be stopped by other than default
# mechanism, their cleanup has to be added after
# oslotest.BaseTestCase.setUp() not to be included in the stopall set
# that will be cleaned up by mock.patch.stopall. This way the mock
# won't be attempted to be stopped twice.
self.addCleanup(conn_patcher.stop)
self.setup_bridge_mock('br-tun', self.br_tun_cls)
self.stamp = self.br.default_cookie
def test_setup_default_table(self):
patch_int_ofport = 5555
arp_responder_enabled = False
self.br.setup_default_table(patch_int_ofport=patch_int_ofport,
arp_responder_enabled=arp_responder_enabled)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[ofpp.OFPInstructionGotoTable(table_id=2)],
match=ofpp.OFPMatch(in_port=patch_int_ofport),
priority=1, table_id=0),
active_bundle=None),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[],
match=ofpp.OFPMatch(),
priority=0, table_id=0),
active_bundle=None),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[ofpp.OFPInstructionGotoTable(table_id=20)],
match=ofpp.OFPMatch(
eth_dst=('00:00:00:00:00:00', '01:00:00:00:00:00')),
priority=0,
table_id=2),
active_bundle=None),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[ofpp.OFPInstructionGotoTable(table_id=22)],
match=ofpp.OFPMatch(
eth_dst=('01:00:00:00:00:00', '01:00:00:00:00:00')),
priority=0,
table_id=2),
active_bundle=None),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[],
match=ofpp.OFPMatch(),
priority=0, table_id=3),
active_bundle=None),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[],
match=ofpp.OFPMatch(),
priority=0, table_id=4),
active_bundle=None),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[],
match=ofpp.OFPMatch(),
priority=0, table_id=6),
active_bundle=None),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, [
ofpp.NXActionLearn(
cookie=self.stamp,
hard_timeout=300,
priority=1,
specs=[
ofpp.NXFlowSpecMatch(
dst=('vlan_tci', 0),
n_bits=12,
src=('vlan_tci', 0)),
ofpp.NXFlowSpecMatch(
dst=('eth_dst', 0),
n_bits=48,
src=('eth_src', 0)),
ofpp.NXFlowSpecLoad(
dst=('vlan_tci', 0),
n_bits=16,
src=0),
ofpp.NXFlowSpecLoad(
dst=('tunnel_id', 0),
n_bits=64,
src=('tunnel_id', 0)),
ofpp.NXFlowSpecOutput(
dst='',
n_bits=32,
src=('in_port', 0)),
],
table_id=20),
ofpp.OFPActionOutput(patch_int_ofport, 0),
]),
],
match=ofpp.OFPMatch(),
priority=1,
table_id=10),
active_bundle=None),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[ofpp.OFPInstructionGotoTable(table_id=22)],
match=ofpp.OFPMatch(),
priority=0,
table_id=20),
active_bundle=None),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[],
match=ofpp.OFPMatch(),
priority=0,
table_id=22),
active_bundle=None)
]
self.assertEqual(expected, self.mock.mock_calls)
def test_setup_default_table_arp_responder_enabled(self):
patch_int_ofport = 5555
arp_responder_enabled = True
self.br.setup_default_table(patch_int_ofport=patch_int_ofport,
arp_responder_enabled=arp_responder_enabled)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[ofpp.OFPInstructionGotoTable(table_id=2)],
match=ofpp.OFPMatch(in_port=patch_int_ofport),
priority=1, table_id=0),
active_bundle=None),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[],
match=ofpp.OFPMatch(),
priority=0, table_id=0),
active_bundle=None),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[ofpp.OFPInstructionGotoTable(table_id=21)],
match=ofpp.OFPMatch(
eth_dst='ff:ff:ff:ff:ff:ff',
eth_type=self.ether_types.ETH_TYPE_ARP),
priority=1,
table_id=2),
active_bundle=None),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[ofpp.OFPInstructionGotoTable(table_id=20)],
match=ofpp.OFPMatch(
eth_dst=('00:00:00:00:00:00', '01:00:00:00:00:00')),
priority=0,
table_id=2),
active_bundle=None),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[ofpp.OFPInstructionGotoTable(table_id=22)],
match=ofpp.OFPMatch(
eth_dst=('01:00:00:00:00:00', '01:00:00:00:00:00')),
priority=0,
table_id=2),
active_bundle=None),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[],
match=ofpp.OFPMatch(),
priority=0, table_id=3),
active_bundle=None),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[],
match=ofpp.OFPMatch(),
priority=0, table_id=4),
active_bundle=None),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[],
match=ofpp.OFPMatch(),
priority=0, table_id=6),
active_bundle=None),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, [
ofpp.NXActionLearn(
cookie=self.stamp,
hard_timeout=300,
priority=1,
specs=[
ofpp.NXFlowSpecMatch(
dst=('vlan_tci', 0),
n_bits=12,
src=('vlan_tci', 0)),
ofpp.NXFlowSpecMatch(
dst=('eth_dst', 0),
n_bits=48,
src=('eth_src', 0)),
ofpp.NXFlowSpecLoad(
dst=('vlan_tci', 0),
n_bits=16,
src=0),
ofpp.NXFlowSpecLoad(
dst=('tunnel_id', 0),
n_bits=64,
src=('tunnel_id', 0)),
ofpp.NXFlowSpecOutput(
dst='',
n_bits=32,
src=('in_port', 0)),
],
table_id=20),
ofpp.OFPActionOutput(patch_int_ofport, 0),
]),
],
match=ofpp.OFPMatch(),
priority=1,
table_id=10),
active_bundle=None),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[ofpp.OFPInstructionGotoTable(table_id=22)],
match=ofpp.OFPMatch(),
priority=0,
table_id=20),
active_bundle=None),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[ofpp.OFPInstructionGotoTable(table_id=22)],
match=ofpp.OFPMatch(),
priority=0,
table_id=21),
active_bundle=None),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[],
match=ofpp.OFPMatch(),
priority=0,
table_id=22),
active_bundle=None)
]
self.assertEqual(expected, self.mock.mock_calls)
def test_provision_local_vlan(self):
network_type = 'vxlan'
lvid = 888
segmentation_id = 777
distributed = False
self.br.provision_local_vlan(network_type=network_type, lvid=lvid,
segmentation_id=segmentation_id,
distributed=distributed)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, [
ofpp.OFPActionPushVlan(),
ofpp.OFPActionSetField(
vlan_vid=lvid | ofp.OFPVID_PRESENT)
]),
ofpp.OFPInstructionGotoTable(table_id=10),
],
match=ofpp.OFPMatch(tunnel_id=segmentation_id),
priority=1,
table_id=4),
active_bundle=None),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_reclaim_local_vlan(self):
network_type = 'vxlan'
segmentation_id = 777
self.br.reclaim_local_vlan(network_type=network_type,
segmentation_id=segmentation_id)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call.uninstall_flows(
table_id=4,
match=ofpp.OFPMatch(tunnel_id=segmentation_id)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_install_flood_to_tun(self):
vlan = 3333
tun_id = 2222
ports = [11, 44, 22, 33]
self.br.install_flood_to_tun(vlan=vlan,
tun_id=tun_id,
ports=ports)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, [
ofpp.OFPActionPopVlan(),
ofpp.OFPActionSetField(tunnel_id=tun_id),
] + [ofpp.OFPActionOutput(p, 0) for p in ports]),
],
match=ofpp.OFPMatch(vlan_vid=vlan | ofp.OFPVID_PRESENT),
priority=1,
table_id=22),
active_bundle=None),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_delete_flood_to_tun(self):
vlan = 3333
self.br.delete_flood_to_tun(vlan=vlan)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call.uninstall_flows(table_id=22,
match=ofpp.OFPMatch(vlan_vid=vlan | ofp.OFPVID_PRESENT)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_install_unicast_to_tun(self):
vlan = 3333
port = 55
mac = '08:60:6e:7f:74:e7'
tun_id = 2222
self.br.install_unicast_to_tun(vlan=vlan,
tun_id=tun_id,
port=port,
mac=mac)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, [
ofpp.OFPActionPopVlan(),
ofpp.OFPActionSetField(tunnel_id=tun_id),
ofpp.OFPActionOutput(port, 0),
]),
],
match=ofpp.OFPMatch(
eth_dst=mac, vlan_vid=vlan | ofp.OFPVID_PRESENT),
priority=2,
table_id=20),
active_bundle=None),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_delete_unicast_to_tun(self):
vlan = 3333
mac = '08:60:6e:7f:74:e7'
self.br.delete_unicast_to_tun(vlan=vlan, mac=mac)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call.uninstall_flows(table_id=20,
match=ofpp.OFPMatch(
eth_dst=mac, vlan_vid=vlan | ofp.OFPVID_PRESENT)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_delete_unicast_to_tun_without_mac(self):
vlan = 3333
mac = None
self.br.delete_unicast_to_tun(vlan=vlan, mac=mac)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call.uninstall_flows(table_id=20,
match=ofpp.OFPMatch(vlan_vid=vlan | ofp.OFPVID_PRESENT)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_install_arp_responder(self):
vlan = 3333
ip = '192.0.2.1'
mac = '08:60:6e:7f:74:e7'
self.br.install_arp_responder(vlan=vlan, ip=ip, mac=mac)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, [
ofpp.OFPActionSetField(arp_op=self.arp.ARP_REPLY),
ofpp.NXActionRegMove(
dst_field='arp_tha',
n_bits=48,
src_field='arp_sha'),
ofpp.NXActionRegMove(
dst_field='arp_tpa',
n_bits=32,
src_field='arp_spa'),
ofpp.OFPActionSetField(arp_sha=mac),
ofpp.OFPActionSetField(arp_spa=ip),
ofpp.NXActionRegMove(src_field='eth_src',
dst_field='eth_dst',
n_bits=48),
ofpp.OFPActionSetField(eth_src=mac),
ofpp.OFPActionOutput(ofp.OFPP_IN_PORT, 0),
]),
],
match=ofpp.OFPMatch(
eth_type=self.ether_types.ETH_TYPE_ARP,
arp_tpa=ip,
vlan_vid=vlan | ofp.OFPVID_PRESENT),
priority=1,
table_id=21),
active_bundle=None),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_delete_arp_responder(self):
vlan = 3333
ip = '192.0.2.1'
self.br.delete_arp_responder(vlan=vlan, ip=ip)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call.uninstall_flows(
match=ofpp.OFPMatch(
eth_type=self.ether_types.ETH_TYPE_ARP,
arp_tpa=ip,
vlan_vid=vlan | ofp.OFPVID_PRESENT),
table_id=21),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_delete_arp_responder_without_ip(self):
vlan = 3333
ip = None
self.br.delete_arp_responder(vlan=vlan, ip=ip)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call.uninstall_flows(
match=ofpp.OFPMatch(
eth_type=self.ether_types.ETH_TYPE_ARP,
vlan_vid=vlan | ofp.OFPVID_PRESENT),
table_id=21),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_setup_tunnel_port(self):
network_type = 'vxlan'
port = 11111
self.br.setup_tunnel_port(network_type=network_type, port=port)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[
ofpp.OFPInstructionGotoTable(table_id=4),
],
match=ofpp.OFPMatch(in_port=port),
priority=1,
table_id=0),
active_bundle=None),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_cleanup_tunnel_port(self):
port = 11111
self.br.cleanup_tunnel_port(port=port)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call.uninstall_flows(in_port=port),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_add_dvr_mac_tun(self):
mac = '00:02:b3:13:fe:3d'
port = 8888
self.br.add_dvr_mac_tun(mac=mac, port=port)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, [
ofpp.OFPActionOutput(port, 0),
]),
],
match=ofpp.OFPMatch(eth_src=mac),
priority=1,
table_id=9),
active_bundle=None),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_remove_dvr_mac_tun(self):
mac = '00:02:b3:13:fe:3d'
self.br.remove_dvr_mac_tun(mac=mac)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call.uninstall_flows(eth_src=mac, table_id=9),
]
self.assertEqual(expected, self.mock.mock_calls)
|
apache-2.0
|
Russell-IO/ansible
|
lib/ansible/executor/task_result.py
|
23
|
4602
|
# Copyright: (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from copy import deepcopy
from ansible.parsing.dataloader import DataLoader
from ansible.vars.clean import strip_internal_keys
_IGNORE = ('failed', 'skipped')
_PRESERVE = ('attempts', 'changed', 'retries')
class TaskResult:
'''
This class is responsible for interpreting the resulting data
from an executed task, and provides helper methods for determining
the result of a given task.
'''
def __init__(self, host, task, return_data, task_fields=None):
self._host = host
self._task = task
if isinstance(return_data, dict):
self._result = return_data.copy()
else:
self._result = DataLoader().load(return_data)
if task_fields is None:
self._task_fields = dict()
else:
self._task_fields = task_fields
@property
def task_name(self):
return self._task_fields.get('name', None) or self._task.get_name()
def is_changed(self):
return self._check_key('changed')
def is_skipped(self):
# loop results
if 'results' in self._result:
results = self._result['results']
# Loop tasks are only considered skipped if all items were skipped.
# some squashed results (eg, yum) are not dicts and can't be skipped individually
if results and all(isinstance(res, dict) and res.get('skipped', False) for res in results):
return True
# regular tasks and squashed non-dict results
return self._result.get('skipped', False)
def is_failed(self):
if 'failed_when_result' in self._result or \
'results' in self._result and True in [True for x in self._result['results'] if 'failed_when_result' in x]:
return self._check_key('failed_when_result')
else:
return self._check_key('failed')
def is_unreachable(self):
return self._check_key('unreachable')
def needs_debugger(self, globally_enabled=False):
_debugger = self._task_fields.get('debugger')
ret = False
if globally_enabled and (self.is_failed() or self.is_unreachable()):
ret = True
if _debugger in ('always',):
ret = True
elif _debugger in ('never',):
ret = False
elif _debugger in ('on_failed',) and self.is_failed():
ret = True
elif _debugger in ('on_unreachable',) and self.is_unreachable():
ret = True
elif _debugger in('on_skipped',) and self.is_skipped():
ret = True
return ret
def _check_key(self, key):
'''get a specific key from the result or its items'''
if isinstance(self._result, dict) and key in self._result:
return self._result.get(key, False)
else:
flag = False
for res in self._result.get('results', []):
if isinstance(res, dict):
flag |= res.get(key, False)
return flag
def clean_copy(self):
''' returns 'clean' taskresult object '''
# FIXME: clean task_fields, _task and _host copies
result = TaskResult(self._host, self._task, {}, self._task_fields)
# statuses are already reflected on the event type
if result._task and result._task.action in ['debug']:
# debug is verbose by default to display vars, no need to add invocation
ignore = _IGNORE + ('invocation',)
else:
ignore = _IGNORE
if self._result.get('_ansible_no_log', False):
x = {"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result"}
for preserve in _PRESERVE:
if preserve in self._result:
x[preserve] = self._result[preserve]
result._result = x
elif self._result:
result._result = deepcopy(self._result)
# actualy remove
for remove_key in ignore:
if remove_key in result._result:
del result._result[remove_key]
# remove almost ALL internal keys, keep ones relevant to callback
strip_internal_keys(result._result, exceptions=('_ansible_verbose_always', '_ansible_item_label', '_ansible_no_log'))
return result
|
gpl-3.0
|
izapolsk/integration_tests
|
cfme/containers/project.py
|
3
|
5378
|
import attr
from navmazing import NavigateToAttribute
from navmazing import NavigateToSibling
from cfme.common import Taggable
from cfme.common import TaggableCollection
from cfme.common import TagPageView
from cfme.containers.provider import ContainerObjectAllBaseView
from cfme.containers.provider import ContainerObjectDetailsBaseView
from cfme.containers.provider import GetRandomInstancesMixin
from cfme.containers.provider import Labelable
from cfme.modeling.base import BaseCollection
from cfme.modeling.base import BaseEntity
from cfme.utils.appliance.implementations.ui import CFMENavigateStep
from cfme.utils.appliance.implementations.ui import navigator
from cfme.utils.providers import get_crud_by_name
class ProjectAllView(ContainerObjectAllBaseView):
"""Container Projects All view"""
SUMMARY_TEXT = 'Container Projects'
class ProjectDetailsView(ContainerObjectDetailsBaseView):
"""Container Projects Detail view"""
SUMMARY_TEXT = 'Container Projects'
class ProjectDashboardView(ProjectDetailsView):
@property
def is_displayed(self):
return(
self.breadcrumb.is_displayed and
'{} (Dashboard)'.format(self.context['object'].name) in self.breadcrumb.active_location)
@attr.s
class Project(BaseEntity, Taggable, Labelable):
PLURAL = 'Projects'
all_view = ProjectAllView
details_view = ProjectDetailsView
name = attr.ib()
provider = attr.ib()
@attr.s
class ProjectCollection(GetRandomInstancesMixin, BaseCollection, TaggableCollection):
"""Collection object for :py:class:`Project`."""
ENTITY = Project
def all(self):
# container_projects table has ems_id, join with ext_mgmgt_systems on id for provider name
# TODO Update to use REST API instead of DB queries
project_table = self.appliance.db.client['container_projects']
ems_table = self.appliance.db.client['ext_management_systems']
project_query = (
self.appliance.db.client.session
.query(project_table.name, ems_table.name)
.join(ems_table, project_table.ems_id == ems_table.id))
if self.filters.get('archived'):
project_query = project_query.filter(project_table.deleted_on.isnot(None))
if self.filters.get('active'):
project_query = project_query.filter(project_table.deleted_on.is_(None))
provider = None
# filtered
if self.filters.get('provider'):
provider = self.filters.get('provider')
project_query = project_query.filter(ems_table.name == provider.name)
projects = []
for name, ems_name in project_query.all():
projects.append(self.instantiate(name=name,
provider=provider or get_crud_by_name(ems_name)))
return projects
@navigator.register(ProjectCollection, 'All')
class All(CFMENavigateStep):
prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn')
VIEW = ProjectAllView
def step(self, *args, **kwargs):
self.prerequisite_view.navigation.select('Compute', 'Containers', 'Projects')
def resetter(self, *args, **kwargs):
# Reset view and selection
if self.view.toolbar.view_selector.is_displayed:
self.view.toolbar.view_selector.select("List View")
if self.view.paginator.is_displayed:
self.view.paginator.reset_selection()
@navigator.register(Project, 'Details')
class Details(CFMENavigateStep):
VIEW = ProjectDetailsView
prerequisite = NavigateToAttribute('parent', 'All')
def step(self, *args, **kwargs):
search_visible = self.prerequisite_view.entities.search.is_displayed
self.prerequisite_view.entities.get_entity(name=self.obj.name,
surf_pages=not search_visible,
use_search=search_visible).click()
def resetter(self, *args, **kwargs):
if self.view.toolbar.view_selector.is_displayed:
self.view.toolbar.view_selector.select("Summary View")
@navigator.register(Project, 'Dashboard')
class Dashboard(CFMENavigateStep):
VIEW = ProjectDashboardView
prerequisite = NavigateToAttribute('parent', 'All')
def step(self, *args, **kwargs):
search_visible = self.prerequisite_view.entities.search.is_displayed
self.prerequisite_view.entities.get_entity(name=self.obj.name,
surf_pages=not search_visible,
use_search=search_visible).click()
def resetter(self, *args, **kwargs):
if self.view.toolbar.view_selector.is_displayed:
self.view.toolbar.view_selector.select("Dashboard View")
@navigator.register(Project, 'EditTagsFromDetails')
class EditTagsFromDetails(CFMENavigateStep):
VIEW = TagPageView
prerequisite = NavigateToSibling('Details')
def step(self, *args, **kwargs):
self.prerequisite_view.toolbar.policy.item_select('Edit Tags')
@navigator.register(Project, 'EditTagsFromDashboard')
class EditTagsFromDashboard(CFMENavigateStep):
VIEW = TagPageView
prerequisite = NavigateToSibling('Dashboard')
def step(self, *args, **kwargs):
self.prerequisite_view.toolbar.policy.item_select('Edit Tags')
|
gpl-2.0
|
vivianli32/TravelConnect
|
flask/lib/python3.4/site-packages/babel/messages/jslexer.py
|
162
|
5317
|
# -*- coding: utf-8 -*-
"""
babel.messages.jslexer
~~~~~~~~~~~~~~~~~~~~~~
A simple JavaScript 1.5 lexer which is used for the JavaScript
extractor.
:copyright: (c) 2013 by the Babel Team.
:license: BSD, see LICENSE for more details.
"""
from operator import itemgetter
import re
from babel._compat import unichr
operators = [
'+', '-', '*', '%', '!=', '==', '<', '>', '<=', '>=', '=',
'+=', '-=', '*=', '%=', '<<', '>>', '>>>', '<<=', '>>=',
'>>>=', '&', '&=', '|', '|=', '&&', '||', '^', '^=', '(', ')',
'[', ']', '{', '}', '!', '--', '++', '~', ',', ';', '.', ':'
]
operators.sort(key=lambda a: -len(a))
escapes = {'b': '\b', 'f': '\f', 'n': '\n', 'r': '\r', 't': '\t'}
rules = [
(None, re.compile(r'\s+(?u)')),
(None, re.compile(r'<!--.*')),
('linecomment', re.compile(r'//.*')),
('multilinecomment', re.compile(r'/\*.*?\*/(?us)')),
('name', re.compile(r'(\$+\w*|[^\W\d]\w*)(?u)')),
('number', re.compile(r'''(?x)(
(?:0|[1-9]\d*)
(\.\d+)?
([eE][-+]?\d+)? |
(0x[a-fA-F0-9]+)
)''')),
('operator', re.compile(r'(%s)' % '|'.join(map(re.escape, operators)))),
('string', re.compile(r'''(?xs)(
'(?:[^'\\]*(?:\\.[^'\\]*)*)' |
"(?:[^"\\]*(?:\\.[^"\\]*)*)"
)'''))
]
division_re = re.compile(r'/=?')
regex_re = re.compile(r'/(?:[^/\\]*(?:\\.[^/\\]*)*)/[a-zA-Z]*(?s)')
line_re = re.compile(r'(\r\n|\n|\r)')
line_join_re = re.compile(r'\\' + line_re.pattern)
uni_escape_re = re.compile(r'[a-fA-F0-9]{1,4}')
class Token(tuple):
"""Represents a token as returned by `tokenize`."""
__slots__ = ()
def __new__(cls, type, value, lineno):
return tuple.__new__(cls, (type, value, lineno))
type = property(itemgetter(0))
value = property(itemgetter(1))
lineno = property(itemgetter(2))
def indicates_division(token):
"""A helper function that helps the tokenizer to decide if the current
token may be followed by a division operator.
"""
if token.type == 'operator':
return token.value in (')', ']', '}', '++', '--')
return token.type in ('name', 'number', 'string', 'regexp')
def unquote_string(string):
"""Unquote a string with JavaScript rules. The string has to start with
string delimiters (``'`` or ``"``.)
"""
assert string and string[0] == string[-1] and string[0] in '"\'', \
'string provided is not properly delimited'
string = line_join_re.sub('\\1', string[1:-1])
result = []
add = result.append
pos = 0
while 1:
# scan for the next escape
escape_pos = string.find('\\', pos)
if escape_pos < 0:
break
add(string[pos:escape_pos])
# check which character is escaped
next_char = string[escape_pos + 1]
if next_char in escapes:
add(escapes[next_char])
# unicode escapes. trie to consume up to four characters of
# hexadecimal characters and try to interpret them as unicode
# character point. If there is no such character point, put
# all the consumed characters into the string.
elif next_char in 'uU':
escaped = uni_escape_re.match(string, escape_pos + 2)
if escaped is not None:
escaped_value = escaped.group()
if len(escaped_value) == 4:
try:
add(unichr(int(escaped_value, 16)))
except ValueError:
pass
else:
pos = escape_pos + 6
continue
add(next_char + escaped_value)
pos = escaped.end()
continue
else:
add(next_char)
# bogus escape. Just remove the backslash.
else:
add(next_char)
pos = escape_pos + 2
if pos < len(string):
add(string[pos:])
return u''.join(result)
def tokenize(source):
"""Tokenize a JavaScript source. Returns a generator of tokens.
"""
may_divide = False
pos = 0
lineno = 1
end = len(source)
while pos < end:
# handle regular rules first
for token_type, rule in rules:
match = rule.match(source, pos)
if match is not None:
break
# if we don't have a match we don't give up yet, but check for
# division operators or regular expression literals, based on
# the status of `may_divide` which is determined by the last
# processed non-whitespace token using `indicates_division`.
else:
if may_divide:
match = division_re.match(source, pos)
token_type = 'operator'
else:
match = regex_re.match(source, pos)
token_type = 'regexp'
if match is None:
# woops. invalid syntax. jump one char ahead and try again.
pos += 1
continue
token_value = match.group()
if token_type is not None:
token = Token(token_type, token_value, lineno)
may_divide = indicates_division(token)
yield token
lineno += len(line_re.findall(token_value))
pos = match.end()
|
mit
|
chaitu2289/xray_scattering
|
liblinear-multicore-2.1-2/python/liblinearutil.py
|
4
|
8712
|
#!/usr/bin/env python
import os, sys
sys.path = [os.path.dirname(os.path.abspath(__file__))] + sys.path
from liblinear import *
from liblinear import __all__ as liblinear_all
from ctypes import c_double
__all__ = ['svm_read_problem', 'load_model', 'save_model', 'evaluations',
'train', 'predict'] + liblinear_all
def svm_read_problem(data_file_name):
"""
svm_read_problem(data_file_name) -> [y, x]
Read LIBSVM-format data from data_file_name and return labels y
and data instances x.
"""
prob_y = []
prob_x = []
for line in open(data_file_name):
line = line.split(None, 1)
# In case an instance with all zero features
if len(line) == 1: line += ['']
label, features = line
xi = {}
for e in features.split():
ind, val = e.split(":")
xi[int(ind)] = float(val)
prob_y += [float(label)]
prob_x += [xi]
return (prob_y, prob_x)
def load_model(model_file_name):
"""
load_model(model_file_name) -> model
Load a LIBLINEAR model from model_file_name and return.
"""
model = liblinear.load_model(model_file_name.encode())
if not model:
print("can't open model file %s" % model_file_name)
return None
model = toPyModel(model)
return model
def save_model(model_file_name, model):
"""
save_model(model_file_name, model) -> None
Save a LIBLINEAR model to the file model_file_name.
"""
liblinear.save_model(model_file_name.encode(), model)
def evaluations(ty, pv):
"""
evaluations(ty, pv) -> (ACC, MSE, SCC)
Calculate accuracy, mean squared error and squared correlation coefficient
using the true values (ty) and predicted values (pv).
"""
if len(ty) != len(pv):
raise ValueError("len(ty) must equal to len(pv)")
total_correct = total_error = 0
sumv = sumy = sumvv = sumyy = sumvy = 0
for v, y in zip(pv, ty):
if y == v:
total_correct += 1
total_error += (v-y)*(v-y)
sumv += v
sumy += y
sumvv += v*v
sumyy += y*y
sumvy += v*y
l = len(ty)
ACC = 100.0*total_correct/l
MSE = total_error/l
try:
SCC = ((l*sumvy-sumv*sumy)*(l*sumvy-sumv*sumy))/((l*sumvv-sumv*sumv)*(l*sumyy-sumy*sumy))
except:
SCC = float('nan')
return (ACC, MSE, SCC)
def train(arg1, arg2=None, arg3=None):
"""
train(y, x [, options]) -> model | ACC
train(prob [, options]) -> model | ACC
train(prob, param) -> model | ACC
Train a model from data (y, x) or a problem prob using
'options' or a parameter param.
If '-v' is specified in 'options' (i.e., cross validation)
either accuracy (ACC) or mean-squared error (MSE) is returned.
options:
-s type : set type of solver (default 1)
for multi-class classification
0 -- L2-regularized logistic regression (primal)
1 -- L2-regularized L2-loss support vector classification (dual)
2 -- L2-regularized L2-loss support vector classification (primal)
3 -- L2-regularized L1-loss support vector classification (dual)
4 -- support vector classification by Crammer and Singer
5 -- L1-regularized L2-loss support vector classification
6 -- L1-regularized logistic regression
7 -- L2-regularized logistic regression (dual)
for regression
11 -- L2-regularized L2-loss support vector regression (primal)
12 -- L2-regularized L2-loss support vector regression (dual)
13 -- L2-regularized L1-loss support vector regression (dual)
-c cost : set the parameter C (default 1)
-p epsilon : set the epsilon in loss function of SVR (default 0.1)
-e epsilon : set tolerance of termination criterion
-s 0 and 2
|f'(w)|_2 <= eps*min(pos,neg)/l*|f'(w0)|_2,
where f is the primal function, (default 0.01)
-s 11
|f'(w)|_2 <= eps*|f'(w0)|_2 (default 0.001)
-s 1, 3, 4, and 7
Dual maximal violation <= eps; similar to liblinear (default 0.)
-s 5 and 6
|f'(w)|_inf <= eps*min(pos,neg)/l*|f'(w0)|_inf,
where f is the primal function (default 0.01)
-s 12 and 13
|f'(alpha)|_1 <= eps |f'(alpha0)|,
where f is the dual function (default 0.1)
-B bias : if bias >= 0, instance x becomes [x; bias]; if < 0, no bias term added (default -1)
-wi weight: weights adjust the parameter C of different classes (see README for details)
-v n: n-fold cross validation mode
-n nr_thread : parallel version with [nr_thread] threads (default 1; only for -s 0, 2, 11)
-q : quiet mode (no outputs)
"""
prob, param = None, None
if isinstance(arg1, (list, tuple)):
assert isinstance(arg2, (list, tuple))
y, x, options = arg1, arg2, arg3
prob = problem(y, x)
param = parameter(options)
elif isinstance(arg1, problem):
prob = arg1
if isinstance(arg2, parameter):
param = arg2
else :
param = parameter(arg2)
if prob == None or param == None :
raise TypeError("Wrong types for the arguments")
prob.set_bias(param.bias)
liblinear.set_print_string_function(param.print_func)
err_msg = liblinear.check_parameter(prob, param)
if err_msg :
raise ValueError('Error: %s' % err_msg)
if param.flag_find_C:
nr_fold = param.nr_fold
best_C = c_double()
best_rate = c_double()
max_C = 1024
if param.flag_C_specified:
start_C = param.C
else:
start_C = -1.0
liblinear.find_parameter_C(prob, param, nr_fold, start_C, max_C, best_C, best_rate)
print("Best C = %lf CV accuracy = %g%%\n"% (best_C.value, 100.0*best_rate.value))
return best_C.value,best_rate.value
elif param.flag_cross_validation:
l, nr_fold = prob.l, param.nr_fold
target = (c_double * l)()
liblinear.cross_validation(prob, param, nr_fold, target)
ACC, MSE, SCC = evaluations(prob.y[:l], target[:l])
if param.solver_type in [L2R_L2LOSS_SVR, L2R_L2LOSS_SVR_DUAL, L2R_L1LOSS_SVR_DUAL]:
print("Cross Validation Mean squared error = %g" % MSE)
print("Cross Validation Squared correlation coefficient = %g" % SCC)
return MSE
else:
print("Cross Validation Accuracy = %g%%" % ACC)
return ACC
else :
m = liblinear.train(prob, param)
m = toPyModel(m)
return m
def predict(y, x, m, options=""):
"""
predict(y, x, m [, options]) -> (p_labels, p_acc, p_vals)
Predict data (y, x) with the SVM model m.
options:
-b probability_estimates: whether to output probability estimates, 0 or 1 (default 0); currently for logistic regression only
-q quiet mode (no outputs)
The return tuple contains
p_labels: a list of predicted labels
p_acc: a tuple including accuracy (for classification), mean-squared
error, and squared correlation coefficient (for regression).
p_vals: a list of decision values or probability estimates (if '-b 1'
is specified). If k is the number of classes, for decision values,
each element includes results of predicting k binary-class
SVMs. if k = 2 and solver is not MCSVM_CS, only one decision value
is returned. For probabilities, each element contains k values
indicating the probability that the testing instance is in each class.
Note that the order of classes here is the same as 'model.label'
field in the model structure.
"""
def info(s):
print(s)
predict_probability = 0
argv = options.split()
i = 0
while i < len(argv):
if argv[i] == '-b':
i += 1
predict_probability = int(argv[i])
elif argv[i] == '-q':
info = print_null
else:
raise ValueError("Wrong options")
i+=1
solver_type = m.param.solver_type
nr_class = m.get_nr_class()
nr_feature = m.get_nr_feature()
is_prob_model = m.is_probability_model()
bias = m.bias
if bias >= 0:
biasterm = feature_node(nr_feature+1, bias)
else:
biasterm = feature_node(-1, bias)
pred_labels = []
pred_values = []
if predict_probability:
if not is_prob_model:
raise TypeError('probability output is only supported for logistic regression')
prob_estimates = (c_double * nr_class)()
for xi in x:
xi, idx = gen_feature_nodearray(xi, feature_max=nr_feature)
xi[-2] = biasterm
label = liblinear.predict_probability(m, xi, prob_estimates)
values = prob_estimates[:nr_class]
pred_labels += [label]
pred_values += [values]
else:
if nr_class <= 2:
nr_classifier = 1
else:
nr_classifier = nr_class
dec_values = (c_double * nr_classifier)()
for xi in x:
xi, idx = gen_feature_nodearray(xi, feature_max=nr_feature)
xi[-2] = biasterm
label = liblinear.predict_values(m, xi, dec_values)
values = dec_values[:nr_classifier]
pred_labels += [label]
pred_values += [values]
if len(y) == 0:
y = [0] * len(x)
ACC, MSE, SCC = evaluations(y, pred_labels)
l = len(y)
if m.is_regression_model():
info("Mean squared error = %g (regression)" % MSE)
info("Squared correlation coefficient = %g (regression)" % SCC)
else:
info("Accuracy = %g%% (%d/%d) (classification)" % (ACC, int(l*ACC/100), l))
return pred_labels, (ACC, MSE, SCC), pred_values
|
bsd-3-clause
|
chdb/DhammaMap
|
app/cryptoken.py
|
1
|
6464
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#from __future__ import unicode_literals
import hashlib
import hmac
import os
import json
import utils as u
import widget as W
import logging
from base64 import urlsafe_b64encode\
, urlsafe_b64decode
class Base64Error (Exception):
'''invalid Base64 character or incorrect padding'''
def decodeToken (token, expected):
try:
td = _decode (token)
valid, expired = td.valid (expected)
if valid:
if expected == 'session':
td.data['_ts'] = td.timeStamp
return td.data, expired
except Base64Error:
logging.warning ('invalid Base64 in %s Token: %r', type, token)
except:
logging.exception('unexpected exception decoding %s token : %r', type, token)
return None, False
def encodeVerifyToken (data, tt):
# tt = _tokenType (tt)
assert tt in ['signUp'
,'pw1'
,'pw2'
], 'invalid TokenType: %s' % tt
return _encode (tt, data)
def encodeSessionToken (ssn):#, user=None):
data = dict(ssn)
if '_userID' in ssn:
return _encode ('auth', data)
return _encode ('anon', data)
TokenTypes = ( 'anon'
, 'auth'
, 'signUp'
, 'pw1'
)
def _tokenTypeCode (tt): return TokenTypes.index(tt)
def _tokenType (code): return TokenTypes [code]
#.........................................
class _TokenData (object):
def __init__ (_s, token, tt, obj, bM, ts):
_s.badMac = bM
_s.tokenType = tt
_s.timeStamp = ts
_s.token = token
_s.data = obj
def maxAge (_s):
if _s.tokenType =='auth' : return u.config('maxIdleAuth')
elif _s.tokenType =='signUp': return u.config('maxAgeSignUpTok')
elif _s.tokenType =='pw1' : return u.config('maxAgePasswordTok')
else: raise RuntimeError ('invalid token type')
def valid (_s, expected):
""" Checks encryption validity and expiry: whether the token is younger than maxAge seconds.
Use neutral evaluation pathways to beat timing attacks.
NB: return only success or failure - log shows why it failed but user mustn't know !
"""
if expected == 'session':
badType = (_s.tokenType != 'anon'
and _s.tokenType != 'auth')
else:
badType = _s.tokenType != expected
if _s.tokenType == 'anon':
expired = False
else:
expired = not u.validTimeStamp (_s.timeStamp, _s.maxAge())
badData = _s.data is None # and (type(_s.data) == dict)
isValid = False
# check booleans in order of their initialisation
if _s.badMac: x ='Invalid MAC'
elif badType: x ='Invalid token type:{} expected:{}'.format(_s.tokenType, expected)
elif badData: x ='Invalid data object'
else:
isValid = True
if expired:
logging.debug ('Token expired: %r', _s.token) #no warning log if merely expired
if not isValid:
logging.warning ('%s in Token: %r', x, _s.token)
return isValid, expired
#.........................................
# Some global constants to hold the lengths of component substrings of the token
CH = 1
TS = 4
UID = 8
MAC = 20
def _hash (msg, ts):
"""hmac output of sha1 is 20 bytes irrespective of msg length"""
k = W.W.keys (ts)
return hmac.new (k, msg, hashlib.sha1).digest()
def _serialize (data):
'''Generic data is stored in the token. The data could be a dict or any other serialisable type.
However the data size is limited because currently it all goes into one cookie and
there is a max cookie size for some browsers so we place a limit in session.save()
'''
# ToDo: replace json with binary protocol cpickle
# ToDo compression of data thats too long to fit otherwise:
# data = json.encode (data)
# if len(data) > data_max: # 4K minus the other fields
# level = (len(data) - data_max) * K # experiment! or use level = 9
# data = zlib.compress( data, level)
# if len(data) > data_max:
# assert False, 'oh dear!' todo - save some? data in datastore
# return data, True
# return data, False # todo: encode a boolean in kch to indicate whether compressed
#logging.debug ('serializing data = %r', data)
s = json.dumps (data, separators=(',',':'))
#logging.debug('serialized data: %r', s)
return s.encode('utf-8') #byte str
def _deserialize (data):
try:
# logging.debug('data1: %r', data)
obj = json.loads (data)
# logging.debug('obj: %r', obj)
return obj # byteify(obj)
except Exception, e:
logging.exception(e)
return None
def _encode (tokentype, obj):
""" obj is serializable session data
returns a token string of base64 chars with iv and encrypted tokentype and data
"""
tt = _tokenTypeCode (tokentype)
logging.debug ('encode tokentype = %r tt = %r',tokentype, tt)
now = u.sNow()
#logging.debug ('encode tokentype = %r tt = %r',tokentype, tt)
data = W._iB.pack (now, tt) # ts + tt
data += _serialize (obj) # ts + tt + data
h20 = _hash (data, now)
return urlsafe_b64encode (data + h20) # ts + tt + data + mac
def _decode (token):
"""inverse of encode: return _TokenData"""
try:
bytes = urlsafe_b64decode (token) # ts + tt + data + mac
except TypeError:
logging.warning('Base64 Error: token = %r', token)
logging.exception('Base64 Error: ')
raise Base64Error
ts, tt = W._iB.unpack_from (bytes)
ttype = _tokenType (tt)
#logging.debug ('decode tokentype = %r tt = %r token = %s',ttype, tt, token)
preDataLen = TS+CH
data = bytes[ :-MAC]
mac1 = bytes[-MAC: ]
mac2 = _hash (data, ts)
badMac = not u.sameStr (mac1, mac2)
data = _deserialize (data [preDataLen: ])
# logging.debug('data: %r', data)
return _TokenData (token, ttype, data, badMac, ts)
|
mit
|
slowfranklin/samba
|
source4/heimdal/lib/wind/util.py
|
88
|
1978
|
#!/usr/local/bin/python
# -*- coding: iso-8859-1 -*-
# $Id$
# Copyright (c) 2004 Kungliga Tekniska Högskolan
# (Royal Institute of Technology, Stockholm, Sweden).
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the Institute nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE INSTITUTE AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE INSTITUTE OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
def subList(l, sl) :
"""return the index of sl in l or None"""
lLen = len(l)
slLen = len(sl)
for i in range(lLen - slLen + 1):
j = 0
while j < slLen and l[i + j] == sl[j]:
j += 1
if j == slLen:
return i
return None
|
gpl-3.0
|
moylop260/odoo-dev
|
addons/report/models/report_paperformat.py
|
64
|
7912
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2014-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from functools import partial
from openerp import SUPERUSER_ID
from openerp.osv import osv, fields
class report_paperformat(osv.Model):
_name = "report.paperformat"
_description = "Allows customization of a report."
_columns = {'name': fields.char('Name', required=True),
'default': fields.boolean('Default paper format ?'),
'format': fields.selection([('A0', 'A0 5 841 x 1189 mm'),
('A1', 'A1 6 594 x 841 mm'),
('A2', 'A2 7 420 x 594 mm'),
('A3', 'A3 8 297 x 420 mm'),
('A4', 'A4 0 210 x 297 mm, 8.26 x 11.69 inches'),
('A5', 'A5 9 148 x 210 mm'),
('A6', 'A6 10 105 x 148 mm'),
('A7', 'A7 11 74 x 105 mm'),
('A8', 'A8 12 52 x 74 mm'),
('A9', 'A9 13 37 x 52 mm'),
('B0', 'B0 14 1000 x 1414 mm'),
('B1', 'B1 15 707 x 1000 mm'),
('B2', 'B2 17 500 x 707 mm'),
('B3', 'B3 18 353 x 500 mm'),
('B4', 'B4 19 250 x 353 mm'),
('B5', 'B5 1 176 x 250 mm, 6.93 x 9.84 inches'),
('B6', 'B6 20 125 x 176 mm'),
('B7', 'B7 21 88 x 125 mm'),
('B8', 'B8 22 62 x 88 mm'),
('B9', 'B9 23 33 x 62 mm'),
('B10', ':B10 16 31 x 44 mm'),
('C5E', 'C5E 24 163 x 229 mm'),
('Comm10E', 'Comm10E 25 105 x 241 mm, U.S. '
'Common 10 Envelope'),
('DLE', 'DLE 26 110 x 220 mm'),
('Executive', 'Executive 4 7.5 x 10 inches, '
'190.5 x 254 mm'),
('Folio', 'Folio 27 210 x 330 mm'),
('Ledger', 'Ledger 28 431.8 x 279.4 mm'),
('Legal', 'Legal 3 8.5 x 14 inches, '
'215.9 x 355.6 mm'),
('Letter', 'Letter 2 8.5 x 11 inches, '
'215.9 x 279.4 mm'),
('Tabloid', 'Tabloid 29 279.4 x 431.8 mm'),
('custom', 'Custom')],
'Paper size',
help="Select Proper Paper size"),
'margin_top': fields.integer('Top Margin (mm)'),
'margin_bottom': fields.integer('Bottom Margin (mm)'),
'margin_left': fields.integer('Left Margin (mm)'),
'margin_right': fields.integer('Right Margin (mm)'),
'page_height': fields.integer('Page height (mm)'),
'page_width': fields.integer('Page width (mm)'),
'orientation': fields.selection([('Landscape', 'Landscape'),
('Portrait', 'Portrait')],
'Orientation'),
'header_line': fields.boolean('Display a header line'),
'header_spacing': fields.integer('Header spacing'),
'dpi': fields.integer('Output DPI', required=True),
'report_ids': fields.one2many('ir.actions.report.xml',
'paperformat_id',
'Associated reports',
help="Explicitly associated reports")
}
def _check_format_or_page(self, cr, uid, ids, context=None):
for paperformat in self.browse(cr, uid, ids, context=context):
if paperformat.format != 'custom' and (paperformat.page_width or paperformat.page_height):
return False
return True
_constraints = [
(_check_format_or_page, 'Error ! You cannot select a format AND speficic '
'page width/height.', ['format']),
]
_defaults = {
'format': 'A4',
'margin_top': 40,
'margin_bottom': 20,
'margin_left': 7,
'margin_right': 7,
'page_height': False,
'page_width': False,
'orientation': 'Landscape',
'header_line': False,
'header_spacing': 35,
'dpi': 90,
}
class res_company(osv.Model):
_inherit = 'res.company'
_columns = {'paperformat_id': fields.many2one('report.paperformat', 'Paper format')}
def init(self, cr):
# set a default paperformat based on rml one.
ref = partial(self.pool['ir.model.data'].xmlid_to_res_id, cr, SUPERUSER_ID)
ids = self.search(cr, SUPERUSER_ID, [('paperformat_id', '=', False)])
for company in self.browse(cr, SUPERUSER_ID, ids):
paperformat_id = {
'a4': ref('report.paperformat_euro'),
'us_letter': ref('report.paperformat_us'),
}.get(company.rml_paper_format) or ref('report.paperformat_euro')
if paperformat_id:
company.write({'paperformat_id': paperformat_id})
sup = super(res_company, self)
if hasattr(sup, 'init'):
sup.init(cr)
class ir_actions_report(osv.Model):
_inherit = 'ir.actions.report.xml'
def associated_view(self, cr, uid, ids, context):
"""Used in the ir.actions.report.xml form view in order to search naively after the view(s)
used in the rendering.
"""
if context is None:
context = {}
try:
report_name = self.browse(cr, uid, ids[0], context).report_name
act_window_obj = self.pool.get('ir.actions.act_window')
view_action = act_window_obj.for_xml_id(cr, uid, 'base', 'action_ui_view', context=context)
view_action['context'] = context
view_action['domain'] = [('name', 'ilike', report_name.split('.')[1]), ('type', '=', 'qweb')]
return view_action
except:
return False
_columns = {'paperformat_id': fields.many2one('report.paperformat', 'Paper format')}
|
agpl-3.0
|
SciLifeLab/bcbio-nextgen
|
bcbio/rnaseq/count.py
|
1
|
12286
|
"""
count number of reads mapping to features of transcripts
"""
import os
import sys
import itertools
# soft imports
try:
import HTSeq
import pandas as pd
import gffutils
except ImportError:
HTSeq, pd, gffutils = None, None, None
from bcbio.utils import file_exists
from bcbio.distributed.transaction import file_transaction
from bcbio.log import logger
from bcbio import bam
import bcbio.pipeline.datadict as dd
def _get_files(data):
mapped = bam.mapped(data["work_bam"], data["config"])
in_file = bam.sort(mapped, data["config"], order="queryname")
gtf_file = dd.get_gtf_file(data)
work_dir = dd.get_work_dir(data)
out_dir = os.path.join(work_dir, "htseq-count")
sample_name = dd.get_sample_name(data)
out_file = os.path.join(out_dir, sample_name + ".counts")
stats_file = os.path.join(out_dir, sample_name + ".stats")
return in_file, gtf_file, out_file, stats_file
def invert_strand(iv):
iv2 = iv.copy()
if iv2.strand == "+":
iv2.strand = "-"
elif iv2.strand == "-":
iv2.strand = "+"
else:
raise ValueError("Illegal strand")
return iv2
class UnknownChrom(Exception):
pass
def _get_stranded_flag(data):
strand_flag = {"unstranded": "no",
"firststrand": "reverse",
"secondstrand": "yes"}
stranded = dd.get_strandedness(data, "unstranded").lower()
assert stranded in strand_flag, ("%s is not a valid strandedness value. "
"Valid values are 'firststrand', 'secondstrand', "
"and 'unstranded")
return strand_flag[stranded]
def htseq_count(data):
""" adapted from Simon Anders htseq-count.py script
http://www-huber.embl.de/users/anders/HTSeq/doc/count.html
"""
sam_filename, gff_filename, out_file, stats_file = _get_files(data)
stranded = _get_stranded_flag(data["config"])
overlap_mode = "union"
feature_type = "exon"
id_attribute = "gene_id"
minaqual = 0
if file_exists(out_file):
return out_file
logger.info("Counting reads mapping to exons in %s using %s as the "
"annotation and strandedness as %s." %
(os.path.basename(sam_filename), os.path.basename(gff_filename), dd.get_strandedness(data)))
features = HTSeq.GenomicArrayOfSets("auto", stranded != "no")
counts = {}
# Try to open samfile to fail early in case it is not there
open(sam_filename).close()
gff = HTSeq.GFF_Reader(gff_filename)
i = 0
try:
for f in gff:
if f.type == feature_type:
try:
feature_id = f.attr[id_attribute]
except KeyError:
sys.exit("Feature %s does not contain a '%s' attribute" %
(f.name, id_attribute))
if stranded != "no" and f.iv.strand == ".":
sys.exit("Feature %s at %s does not have strand "
"information but you are running htseq-count "
"in stranded mode. Use '--stranded=no'." %
(f.name, f.iv))
features[f.iv] += feature_id
counts[f.attr[id_attribute]] = 0
i += 1
if i % 100000 == 0:
sys.stderr.write("%d GFF lines processed.\n" % i)
except:
sys.stderr.write("Error occured in %s.\n"
% gff.get_line_number_string())
raise
sys.stderr.write("%d GFF lines processed.\n" % i)
if len(counts) == 0:
sys.stderr.write("Warning: No features of type '%s' found.\n"
% feature_type)
try:
align_reader = htseq_reader(sam_filename)
first_read = iter(align_reader).next()
pe_mode = first_read.paired_end
except:
sys.stderr.write("Error occured when reading first line of sam "
"file.\n")
raise
try:
if pe_mode:
read_seq_pe_file = align_reader
read_seq = HTSeq.pair_SAM_alignments(align_reader)
empty = 0
ambiguous = 0
notaligned = 0
lowqual = 0
nonunique = 0
i = 0
for r in read_seq:
i += 1
if not pe_mode:
if not r.aligned:
notaligned += 1
continue
try:
if r.optional_field("NH") > 1:
nonunique += 1
continue
except KeyError:
pass
if r.aQual < minaqual:
lowqual += 1
continue
if stranded != "reverse":
iv_seq = (co.ref_iv for co in r.cigar if co.type == "M"
and co.size > 0)
else:
iv_seq = (invert_strand(co.ref_iv) for co in r.cigar if
co.type == "M" and co.size > 0)
else:
if r[0] is not None and r[0].aligned:
if stranded != "reverse":
iv_seq = (co.ref_iv for co in r[0].cigar if
co.type == "M" and co.size > 0)
else:
iv_seq = (invert_strand(co.ref_iv) for co in r[0].cigar if
co.type == "M" and co.size > 0)
else:
iv_seq = tuple()
if r[1] is not None and r[1].aligned:
if stranded != "reverse":
iv_seq = itertools.chain(iv_seq,
(invert_strand(co.ref_iv) for co
in r[1].cigar if co.type == "M"
and co.size > 0))
else:
iv_seq = itertools.chain(iv_seq,
(co.ref_iv for co in r[1].cigar
if co.type == "M" and co.size
> 0))
else:
if (r[0] is None) or not (r[0].aligned):
notaligned += 1
continue
try:
if (r[0] is not None and r[0].optional_field("NH") > 1) or \
(r[1] is not None and r[1].optional_field("NH") > 1):
nonunique += 1
continue
except KeyError:
pass
if (r[0] and r[0].aQual < minaqual) or (r[1] and
r[1].aQual < minaqual):
lowqual += 1
continue
try:
if overlap_mode == "union":
fs = set()
for iv in iv_seq:
if iv.chrom not in features.chrom_vectors:
raise UnknownChrom
for iv2, fs2 in features[iv].steps():
fs = fs.union(fs2)
elif (overlap_mode == "intersection-strict" or
overlap_mode == "intersection-nonempty"):
fs = None
for iv in iv_seq:
if iv.chrom not in features.chrom_vectors:
raise UnknownChrom
for iv2, fs2 in features[iv].steps():
if (len(fs2) > 0 or overlap_mode == "intersection-strict"):
if fs is None:
fs = fs2.copy()
else:
fs = fs.intersection(fs2)
else:
sys.exit("Illegal overlap mode.")
if fs is None or len(fs) == 0:
empty += 1
elif len(fs) > 1:
ambiguous += 1
else:
counts[list(fs)[0]] += 1
except UnknownChrom:
if not pe_mode:
rr = r
else:
rr = r[0] if r[0] is not None else r[1]
empty += 1
if i % 100000 == 0:
sys.stderr.write("%d sam %s processed.\n" %
(i, "lines " if not pe_mode else "line pairs"))
except:
if not pe_mode:
sys.stderr.write("Error occured in %s.\n"
% read_seq.get_line_number_string())
else:
sys.stderr.write("Error occured in %s.\n"
% read_seq_pe_file.get_line_number_string())
raise
sys.stderr.write("%d sam %s processed.\n" %
(i, "lines " if not pe_mode else "line pairs"))
with file_transaction(data, out_file) as tmp_out_file:
with open(tmp_out_file, "w") as out_handle:
on_feature = 0
for fn in sorted(counts.keys()):
on_feature += counts[fn]
out_handle.write("%s\t%d\n" % (fn, counts[fn]))
with file_transaction(data, stats_file) as tmp_stats_file:
with open(tmp_stats_file, "w") as out_handle:
out_handle.write("on_feature\t%d\n" % on_feature)
out_handle.write("no_feature\t%d\n" % empty)
out_handle.write("ambiguous\t%d\n" % ambiguous)
out_handle.write("too_low_aQual\t%d\n" % lowqual)
out_handle.write("not_aligned\t%d\n" % notaligned)
out_handle.write("alignment_not_unique\t%d\n" % nonunique)
return out_file
def combine_count_files(files, out_file=None, ext=".fpkm"):
"""
combine a set of count files into a single combined file
"""
assert all([file_exists(x) for x in files]), \
"Some count files in %s do not exist." % files
for f in files:
assert file_exists(f), "%s does not exist or is empty." % f
col_names = [os.path.basename(x.split(ext)[0]) for x in files]
if not out_file:
out_dir = os.path.join(os.path.dirname(files[0]))
out_file = os.path.join(out_dir, "combined.counts")
if file_exists(out_file):
return out_file
df = pd.io.parsers.read_table(f, sep="\t", index_col=0, header=None,
names=[col_names[0]])
for i, f in enumerate(files):
if i == 0:
df = pd.io.parsers.read_table(f, sep="\t", index_col=0, header=None,
names=[col_names[0]])
else:
df = df.join(pd.io.parsers.read_table(f, sep="\t", index_col=0,
header=None,
names=[col_names[i]]))
df.to_csv(out_file, sep="\t", index_label="id")
return out_file
def annotate_combined_count_file(count_file, gtf_file, out_file=None):
dbfn = gtf_file + ".db"
if not file_exists(dbfn):
return None
if not gffutils:
return None
db = gffutils.FeatureDB(dbfn, keep_order=True)
if not out_file:
out_dir = os.path.dirname(count_file)
out_file = os.path.join(out_dir, "annotated_combined.counts")
# if the genes don't have a gene_id or gene_name set, bail out
try:
symbol_lookup = {f['gene_id'][0]: f['gene_name'][0] for f in
db.features_of_type('exon')}
except KeyError:
return None
df = pd.io.parsers.read_table(count_file, sep="\t", index_col=0, header=0)
df['symbol'] = df.apply(lambda x: symbol_lookup.get(x.name, ""), axis=1)
df.to_csv(out_file, sep="\t", index_label="id")
return out_file
def htseq_reader(align_file):
"""
returns a read-by-read sequence reader for a BAM or SAM file
"""
if bam.is_sam(align_file):
read_seq = HTSeq.SAM_Reader(align_file)
elif bam.is_bam(align_file):
read_seq = HTSeq.BAM_Reader(align_file)
else:
logger.error("%s is not a SAM or BAM file" % (align_file))
sys.exit(1)
return read_seq
|
mit
|
hotdoc/hotdoc_gi_extension
|
setup.py
|
1
|
1887
|
# -*- coding: utf-8 -*-
#
# Copyright © 2015,2016 Mathieu Duponchelle <mathieu.duponchelle@opencreed.com>
# Copyright © 2015,2016 Collabora Ltd
#
# This library is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this library. If not, see <http://www.gnu.org/licenses/>.
import os
from setuptools import setup, find_packages
with open(os.path.join('hotdoc_gi_extension', 'VERSION.txt'), 'r') as _:
VERSION = _.read().strip()
setup(
name = "hotdoc_gi_extension",
version = VERSION,
keywords = "gobject-introspection C hotdoc",
url='https://github.com/hotdoc/hotdoc_gi_extension',
author_email = 'mathieu.duponchelle@opencreed.com',
license = 'LGPLv2.1+',
description = "An extension for hotdoc that parses gir files",
author = "Mathieu Duponchelle",
packages = find_packages(),
package_data = {
'': ['*.html'],
'hotdoc_gi_extension': ['VERSION.txt'],
'hotdoc_gi_extension.transition_scripts': ['translate_sections.sh'],
},
scripts=['hotdoc_gi_extension/transition_scripts/hotdoc_gtk_doc_porter',
'hotdoc_gi_extension/transition_scripts/hotdoc_gtk_doc_scan_parser'],
entry_points = {'hotdoc.extensions': 'get_extension_classes = hotdoc_gi_extension.gi_extension:get_extension_classes'},
install_requires = [
'lxml',
'pyyaml',
],
)
|
lgpl-2.1
|
aroche/django
|
tests/serializers_regress/tests.py
|
144
|
23114
|
"""
A test spanning all the capabilities of all the serializers.
This class defines sample data and a dynamically generated
test case that is capable of testing the capabilities of
the serializers. This includes all valid data values, plus
forward, backwards and self references.
"""
from __future__ import unicode_literals
import datetime
import decimal
import uuid
from unittest import skipUnless
from django.core import serializers
from django.core.serializers import SerializerDoesNotExist
from django.core.serializers.base import DeserializationError
from django.core.serializers.xml_serializer import DTDForbidden
from django.db import connection, models
from django.http import HttpResponse
from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature
from django.utils import six
from django.utils.functional import curry
from .models import (
Anchor, AutoNowDateTimeData, BaseModel, BigIntegerData, BinaryData,
BooleanData, BooleanPKData, CharData, CharPKData, ComplexModel, DateData,
DateTimeData, DecimalData, DecimalPKData, EmailData, EmailPKData,
ExplicitInheritBaseModel, FileData, FilePathData, FilePathPKData, FKData,
FKDataNaturalKey, FKDataToField, FKDataToO2O, FKSelfData, FKToUUID,
FloatData, FloatPKData, GenericData, GenericIPAddressData,
GenericIPAddressPKData, InheritAbstractModel, InheritBaseModel,
IntegerData, IntegerPKData, Intermediate, LengthModel, M2MData,
M2MIntermediateData, M2MSelfData, ModifyingSaveData, NaturalKeyAnchor,
NullBooleanData, O2OData, PositiveIntegerData, PositiveIntegerPKData,
PositiveSmallIntegerData, PositiveSmallIntegerPKData, ProxyBaseModel,
ProxyProxyBaseModel, SlugData, SlugPKData, SmallData, SmallPKData, Tag,
TextData, TimeData, UniqueAnchor, UUIDData,
)
try:
import yaml
except ImportError:
yaml = None
# A set of functions that can be used to recreate
# test data objects of various kinds.
# The save method is a raw base model save, to make
# sure that the data in the database matches the
# exact test case.
def data_create(pk, klass, data):
instance = klass(id=pk)
instance.data = data
models.Model.save_base(instance, raw=True)
return [instance]
def generic_create(pk, klass, data):
instance = klass(id=pk)
instance.data = data[0]
models.Model.save_base(instance, raw=True)
for tag in data[1:]:
instance.tags.create(data=tag)
return [instance]
def fk_create(pk, klass, data):
instance = klass(id=pk)
setattr(instance, 'data_id', data)
models.Model.save_base(instance, raw=True)
return [instance]
def m2m_create(pk, klass, data):
instance = klass(id=pk)
models.Model.save_base(instance, raw=True)
instance.data = data
return [instance]
def im2m_create(pk, klass, data):
instance = klass(id=pk)
models.Model.save_base(instance, raw=True)
return [instance]
def im_create(pk, klass, data):
instance = klass(id=pk)
instance.right_id = data['right']
instance.left_id = data['left']
if 'extra' in data:
instance.extra = data['extra']
models.Model.save_base(instance, raw=True)
return [instance]
def o2o_create(pk, klass, data):
instance = klass()
instance.data_id = data
models.Model.save_base(instance, raw=True)
return [instance]
def pk_create(pk, klass, data):
instance = klass()
instance.data = data
models.Model.save_base(instance, raw=True)
return [instance]
def inherited_create(pk, klass, data):
instance = klass(id=pk, **data)
# This isn't a raw save because:
# 1) we're testing inheritance, not field behavior, so none
# of the field values need to be protected.
# 2) saving the child class and having the parent created
# automatically is easier than manually creating both.
models.Model.save(instance)
created = [instance]
for klass, field in instance._meta.parents.items():
created.append(klass.objects.get(id=pk))
return created
# A set of functions that can be used to compare
# test data objects of various kinds
def data_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
if klass == BinaryData and data is not None:
testcase.assertEqual(bytes(data), bytes(instance.data),
"Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)" % (
pk, repr(bytes(data)), type(data), repr(bytes(instance.data)),
type(instance.data))
)
else:
testcase.assertEqual(data, instance.data,
"Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)" % (
pk, data, type(data), instance, type(instance.data))
)
def generic_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data[0], instance.data)
testcase.assertEqual(data[1:], [t.data for t in instance.tags.order_by('id')])
def fk_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data, instance.data_id)
def m2m_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data, [obj.id for obj in instance.data.order_by('id')])
def im2m_compare(testcase, pk, klass, data):
klass.objects.get(id=pk)
# actually nothing else to check, the instance just should exist
def im_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data['left'], instance.left_id)
testcase.assertEqual(data['right'], instance.right_id)
if 'extra' in data:
testcase.assertEqual(data['extra'], instance.extra)
else:
testcase.assertEqual("doesn't matter", instance.extra)
def o2o_compare(testcase, pk, klass, data):
instance = klass.objects.get(data=data)
testcase.assertEqual(data, instance.data_id)
def pk_compare(testcase, pk, klass, data):
instance = klass.objects.get(data=data)
testcase.assertEqual(data, instance.data)
def inherited_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
for key, value in data.items():
testcase.assertEqual(value, getattr(instance, key))
# Define some data types. Each data type is
# actually a pair of functions; one to create
# and one to compare objects of that type
data_obj = (data_create, data_compare)
generic_obj = (generic_create, generic_compare)
fk_obj = (fk_create, fk_compare)
m2m_obj = (m2m_create, m2m_compare)
im2m_obj = (im2m_create, im2m_compare)
im_obj = (im_create, im_compare)
o2o_obj = (o2o_create, o2o_compare)
pk_obj = (pk_create, pk_compare)
inherited_obj = (inherited_create, inherited_compare)
uuid_obj = uuid.uuid4()
test_data = [
# Format: (data type, PK value, Model Class, data)
(data_obj, 1, BinaryData, six.memoryview(b"\x05\xFD\x00")),
(data_obj, 2, BinaryData, None),
(data_obj, 5, BooleanData, True),
(data_obj, 6, BooleanData, False),
(data_obj, 10, CharData, "Test Char Data"),
(data_obj, 11, CharData, ""),
(data_obj, 12, CharData, "None"),
(data_obj, 13, CharData, "null"),
(data_obj, 14, CharData, "NULL"),
(data_obj, 15, CharData, None),
# (We use something that will fit into a latin1 database encoding here,
# because that is still the default used on many system setups.)
(data_obj, 16, CharData, '\xa5'),
(data_obj, 20, DateData, datetime.date(2006, 6, 16)),
(data_obj, 21, DateData, None),
(data_obj, 30, DateTimeData, datetime.datetime(2006, 6, 16, 10, 42, 37)),
(data_obj, 31, DateTimeData, None),
(data_obj, 40, EmailData, "hovercraft@example.com"),
(data_obj, 41, EmailData, None),
(data_obj, 42, EmailData, ""),
(data_obj, 50, FileData, 'file:///foo/bar/whiz.txt'),
# (data_obj, 51, FileData, None),
(data_obj, 52, FileData, ""),
(data_obj, 60, FilePathData, "/foo/bar/whiz.txt"),
(data_obj, 61, FilePathData, None),
(data_obj, 62, FilePathData, ""),
(data_obj, 70, DecimalData, decimal.Decimal('12.345')),
(data_obj, 71, DecimalData, decimal.Decimal('-12.345')),
(data_obj, 72, DecimalData, decimal.Decimal('0.0')),
(data_obj, 73, DecimalData, None),
(data_obj, 74, FloatData, 12.345),
(data_obj, 75, FloatData, -12.345),
(data_obj, 76, FloatData, 0.0),
(data_obj, 77, FloatData, None),
(data_obj, 80, IntegerData, 123456789),
(data_obj, 81, IntegerData, -123456789),
(data_obj, 82, IntegerData, 0),
(data_obj, 83, IntegerData, None),
# (XX, ImageData
(data_obj, 95, GenericIPAddressData, "fe80:1424:2223:6cff:fe8a:2e8a:2151:abcd"),
(data_obj, 96, GenericIPAddressData, None),
(data_obj, 100, NullBooleanData, True),
(data_obj, 101, NullBooleanData, False),
(data_obj, 102, NullBooleanData, None),
(data_obj, 120, PositiveIntegerData, 123456789),
(data_obj, 121, PositiveIntegerData, None),
(data_obj, 130, PositiveSmallIntegerData, 12),
(data_obj, 131, PositiveSmallIntegerData, None),
(data_obj, 140, SlugData, "this-is-a-slug"),
(data_obj, 141, SlugData, None),
(data_obj, 142, SlugData, ""),
(data_obj, 150, SmallData, 12),
(data_obj, 151, SmallData, -12),
(data_obj, 152, SmallData, 0),
(data_obj, 153, SmallData, None),
(data_obj, 160, TextData, """This is a long piece of text.
It contains line breaks.
Several of them.
The end."""),
(data_obj, 161, TextData, ""),
(data_obj, 162, TextData, None),
(data_obj, 170, TimeData, datetime.time(10, 42, 37)),
(data_obj, 171, TimeData, None),
(generic_obj, 200, GenericData, ['Generic Object 1', 'tag1', 'tag2']),
(generic_obj, 201, GenericData, ['Generic Object 2', 'tag2', 'tag3']),
(data_obj, 300, Anchor, "Anchor 1"),
(data_obj, 301, Anchor, "Anchor 2"),
(data_obj, 302, UniqueAnchor, "UAnchor 1"),
(fk_obj, 400, FKData, 300), # Post reference
(fk_obj, 401, FKData, 500), # Pre reference
(fk_obj, 402, FKData, None), # Empty reference
(m2m_obj, 410, M2MData, []), # Empty set
(m2m_obj, 411, M2MData, [300, 301]), # Post reference
(m2m_obj, 412, M2MData, [500, 501]), # Pre reference
(m2m_obj, 413, M2MData, [300, 301, 500, 501]), # Pre and Post reference
(o2o_obj, None, O2OData, 300), # Post reference
(o2o_obj, None, O2OData, 500), # Pre reference
(fk_obj, 430, FKSelfData, 431), # Pre reference
(fk_obj, 431, FKSelfData, 430), # Post reference
(fk_obj, 432, FKSelfData, None), # Empty reference
(m2m_obj, 440, M2MSelfData, []),
(m2m_obj, 441, M2MSelfData, []),
(m2m_obj, 442, M2MSelfData, [440, 441]),
(m2m_obj, 443, M2MSelfData, [445, 446]),
(m2m_obj, 444, M2MSelfData, [440, 441, 445, 446]),
(m2m_obj, 445, M2MSelfData, []),
(m2m_obj, 446, M2MSelfData, []),
(fk_obj, 450, FKDataToField, "UAnchor 1"),
(fk_obj, 451, FKDataToField, "UAnchor 2"),
(fk_obj, 452, FKDataToField, None),
(fk_obj, 460, FKDataToO2O, 300),
(im2m_obj, 470, M2MIntermediateData, None),
# testing post- and prereferences and extra fields
(im_obj, 480, Intermediate, {'right': 300, 'left': 470}),
(im_obj, 481, Intermediate, {'right': 300, 'left': 490}),
(im_obj, 482, Intermediate, {'right': 500, 'left': 470}),
(im_obj, 483, Intermediate, {'right': 500, 'left': 490}),
(im_obj, 484, Intermediate, {'right': 300, 'left': 470, 'extra': "extra"}),
(im_obj, 485, Intermediate, {'right': 300, 'left': 490, 'extra': "extra"}),
(im_obj, 486, Intermediate, {'right': 500, 'left': 470, 'extra': "extra"}),
(im_obj, 487, Intermediate, {'right': 500, 'left': 490, 'extra': "extra"}),
(im2m_obj, 490, M2MIntermediateData, []),
(data_obj, 500, Anchor, "Anchor 3"),
(data_obj, 501, Anchor, "Anchor 4"),
(data_obj, 502, UniqueAnchor, "UAnchor 2"),
(pk_obj, 601, BooleanPKData, True),
(pk_obj, 602, BooleanPKData, False),
(pk_obj, 610, CharPKData, "Test Char PKData"),
# (pk_obj, 620, DatePKData, datetime.date(2006, 6, 16)),
# (pk_obj, 630, DateTimePKData, datetime.datetime(2006, 6, 16, 10, 42, 37)),
(pk_obj, 640, EmailPKData, "hovercraft@example.com"),
# (pk_obj, 650, FilePKData, 'file:///foo/bar/whiz.txt'),
(pk_obj, 660, FilePathPKData, "/foo/bar/whiz.txt"),
(pk_obj, 670, DecimalPKData, decimal.Decimal('12.345')),
(pk_obj, 671, DecimalPKData, decimal.Decimal('-12.345')),
(pk_obj, 672, DecimalPKData, decimal.Decimal('0.0')),
(pk_obj, 673, FloatPKData, 12.345),
(pk_obj, 674, FloatPKData, -12.345),
(pk_obj, 675, FloatPKData, 0.0),
(pk_obj, 680, IntegerPKData, 123456789),
(pk_obj, 681, IntegerPKData, -123456789),
(pk_obj, 682, IntegerPKData, 0),
# (XX, ImagePKData
(pk_obj, 695, GenericIPAddressPKData, "fe80:1424:2223:6cff:fe8a:2e8a:2151:abcd"),
# (pk_obj, 700, NullBooleanPKData, True),
# (pk_obj, 701, NullBooleanPKData, False),
(pk_obj, 720, PositiveIntegerPKData, 123456789),
(pk_obj, 730, PositiveSmallIntegerPKData, 12),
(pk_obj, 740, SlugPKData, "this-is-a-slug"),
(pk_obj, 750, SmallPKData, 12),
(pk_obj, 751, SmallPKData, -12),
(pk_obj, 752, SmallPKData, 0),
# (pk_obj, 760, TextPKData, """This is a long piece of text.
# It contains line breaks.
# Several of them.
# The end."""),
# (pk_obj, 770, TimePKData, datetime.time(10, 42, 37)),
# (pk_obj, 790, XMLPKData, "<foo></foo>"),
(pk_obj, 791, UUIDData, uuid_obj),
(fk_obj, 792, FKToUUID, uuid_obj),
(data_obj, 800, AutoNowDateTimeData, datetime.datetime(2006, 6, 16, 10, 42, 37)),
(data_obj, 810, ModifyingSaveData, 42),
(inherited_obj, 900, InheritAbstractModel, {'child_data': 37, 'parent_data': 42}),
(inherited_obj, 910, ExplicitInheritBaseModel, {'child_data': 37, 'parent_data': 42}),
(inherited_obj, 920, InheritBaseModel, {'child_data': 37, 'parent_data': 42}),
(data_obj, 1000, BigIntegerData, 9223372036854775807),
(data_obj, 1001, BigIntegerData, -9223372036854775808),
(data_obj, 1002, BigIntegerData, 0),
(data_obj, 1003, BigIntegerData, None),
(data_obj, 1004, LengthModel, 0),
(data_obj, 1005, LengthModel, 1),
]
natural_key_test_data = [
(data_obj, 1100, NaturalKeyAnchor, "Natural Key Anghor"),
(fk_obj, 1101, FKDataNaturalKey, 1100),
(fk_obj, 1102, FKDataNaturalKey, None),
]
# Because Oracle treats the empty string as NULL, Oracle is expected to fail
# when field.empty_strings_allowed is True and the value is None; skip these
# tests.
if connection.features.interprets_empty_strings_as_nulls:
test_data = [data for data in test_data
if not (data[0] == data_obj and
data[2]._meta.get_field('data').empty_strings_allowed and
data[3] is None)]
# Regression test for #8651 -- a FK to an object with PK of 0
# This won't work on MySQL since it won't let you create an object
# with an autoincrement primary key of 0,
if connection.features.allows_auto_pk_0:
test_data.extend([
(data_obj, 0, Anchor, "Anchor 0"),
(fk_obj, 465, FKData, 0),
])
# Dynamically create serializer tests to ensure that all
# registered serializers are automatically tested.
@skipUnlessDBFeature('can_defer_constraint_checks')
class SerializerTests(TestCase):
def test_get_unknown_serializer(self):
"""
#15889: get_serializer('nonsense') raises a SerializerDoesNotExist
"""
with self.assertRaises(SerializerDoesNotExist):
serializers.get_serializer("nonsense")
with self.assertRaises(KeyError):
serializers.get_serializer("nonsense")
# SerializerDoesNotExist is instantiated with the nonexistent format
with self.assertRaises(SerializerDoesNotExist) as cm:
serializers.get_serializer("nonsense")
self.assertEqual(cm.exception.args, ("nonsense",))
def test_unregister_unknown_serializer(self):
with self.assertRaises(SerializerDoesNotExist):
serializers.unregister_serializer("nonsense")
def test_get_unknown_deserializer(self):
with self.assertRaises(SerializerDoesNotExist):
serializers.get_deserializer("nonsense")
def test_json_deserializer_exception(self):
with self.assertRaises(DeserializationError):
for obj in serializers.deserialize("json", """[{"pk":1}"""):
pass
@skipUnless(yaml, "PyYAML not installed")
def test_yaml_deserializer_exception(self):
with self.assertRaises(DeserializationError):
for obj in serializers.deserialize("yaml", "{"):
pass
def test_serialize_proxy_model(self):
BaseModel.objects.create(parent_data=1)
base_objects = BaseModel.objects.all()
proxy_objects = ProxyBaseModel.objects.all()
proxy_proxy_objects = ProxyProxyBaseModel.objects.all()
base_data = serializers.serialize("json", base_objects)
proxy_data = serializers.serialize("json", proxy_objects)
proxy_proxy_data = serializers.serialize("json", proxy_proxy_objects)
self.assertEqual(base_data, proxy_data.replace('proxy', ''))
self.assertEqual(base_data, proxy_proxy_data.replace('proxy', ''))
def serializerTest(format, self):
# Create all the objects defined in the test data
objects = []
instance_count = {}
for (func, pk, klass, datum) in test_data:
with connection.constraint_checks_disabled():
objects.extend(func[0](pk, klass, datum))
# Get a count of the number of objects created for each class
for klass in instance_count:
instance_count[klass] = klass.objects.count()
# Add the generic tagged objects to the object list
objects.extend(Tag.objects.all())
# Serialize the test database
serialized_data = serializers.serialize(format, objects, indent=2)
for obj in serializers.deserialize(format, serialized_data):
obj.save()
# Assert that the deserialized data is the same
# as the original source
for (func, pk, klass, datum) in test_data:
func[1](self, pk, klass, datum)
# Assert that the number of objects deserialized is the
# same as the number that was serialized.
for klass, count in instance_count.items():
self.assertEqual(count, klass.objects.count())
def naturalKeySerializerTest(format, self):
# Create all the objects defined in the test data
objects = []
instance_count = {}
for (func, pk, klass, datum) in natural_key_test_data:
with connection.constraint_checks_disabled():
objects.extend(func[0](pk, klass, datum))
# Get a count of the number of objects created for each class
for klass in instance_count:
instance_count[klass] = klass.objects.count()
# Serialize the test database
serialized_data = serializers.serialize(format, objects, indent=2,
use_natural_foreign_keys=True)
for obj in serializers.deserialize(format, serialized_data):
obj.save()
# Assert that the deserialized data is the same
# as the original source
for (func, pk, klass, datum) in natural_key_test_data:
func[1](self, pk, klass, datum)
# Assert that the number of objects deserialized is the
# same as the number that was serialized.
for klass, count in instance_count.items():
self.assertEqual(count, klass.objects.count())
def fieldsTest(format, self):
obj = ComplexModel(field1='first', field2='second', field3='third')
obj.save_base(raw=True)
# Serialize then deserialize the test database
serialized_data = serializers.serialize(format, [obj], indent=2, fields=('field1', 'field3'))
result = next(serializers.deserialize(format, serialized_data))
# Check that the deserialized object contains data in only the serialized fields.
self.assertEqual(result.object.field1, 'first')
self.assertEqual(result.object.field2, '')
self.assertEqual(result.object.field3, 'third')
def streamTest(format, self):
obj = ComplexModel(field1='first', field2='second', field3='third')
obj.save_base(raw=True)
# Serialize the test database to a stream
for stream in (six.StringIO(), HttpResponse()):
serializers.serialize(format, [obj], indent=2, stream=stream)
# Serialize normally for a comparison
string_data = serializers.serialize(format, [obj], indent=2)
# Check that the two are the same
if isinstance(stream, six.StringIO):
self.assertEqual(string_data, stream.getvalue())
else:
self.assertEqual(string_data, stream.content.decode('utf-8'))
def naturalKeyTest(format, self):
book1 = {'data': '978-1590597255', 'title': 'The Definitive Guide to '
'Django: Web Development Done Right'}
book2 = {'data': '978-1590599969', 'title': 'Practical Django Projects'}
# Create the books.
adrian = NaturalKeyAnchor.objects.create(**book1)
james = NaturalKeyAnchor.objects.create(**book2)
# Serialize the books.
string_data = serializers.serialize(format, NaturalKeyAnchor.objects.all(),
indent=2, use_natural_foreign_keys=True,
use_natural_primary_keys=True)
# Delete one book (to prove that the natural key generation will only
# restore the primary keys of books found in the database via the
# get_natural_key manager method).
james.delete()
# Deserialize and test.
books = list(serializers.deserialize(format, string_data))
self.assertEqual(len(books), 2)
self.assertEqual(books[0].object.title, book1['title'])
self.assertEqual(books[0].object.pk, adrian.pk)
self.assertEqual(books[1].object.title, book2['title'])
self.assertEqual(books[1].object.pk, None)
for format in [f for f in serializers.get_serializer_formats()
if not isinstance(serializers.get_serializer(f), serializers.BadSerializer) and not f == 'geojson']:
setattr(SerializerTests, 'test_' + format + '_serializer', curry(serializerTest, format))
setattr(SerializerTests, 'test_' + format + '_natural_key_serializer', curry(naturalKeySerializerTest, format))
setattr(SerializerTests, 'test_' + format + '_serializer_fields', curry(fieldsTest, format))
setattr(SerializerTests, 'test_' + format + '_serializer_natural_keys', curry(naturalKeyTest, format))
if format != 'python':
setattr(SerializerTests, 'test_' + format + '_serializer_stream', curry(streamTest, format))
class XmlDeserializerSecurityTests(SimpleTestCase):
def test_no_dtd(self):
"""
The XML deserializer shouldn't allow a DTD.
This is the most straightforward way to prevent all entity definitions
and avoid both external entities and entity-expansion attacks.
"""
xml = '<?xml version="1.0" standalone="no"?><!DOCTYPE example SYSTEM "http://example.com/example.dtd">'
with self.assertRaises(DTDForbidden):
next(serializers.deserialize('xml', xml))
|
bsd-3-clause
|
lepricon49/headphones
|
lib/html5lib/filters/inject_meta_charset.py
|
1730
|
2746
|
from __future__ import absolute_import, division, unicode_literals
from . import _base
class Filter(_base.Filter):
def __init__(self, source, encoding):
_base.Filter.__init__(self, source)
self.encoding = encoding
def __iter__(self):
state = "pre_head"
meta_found = (self.encoding is None)
pending = []
for token in _base.Filter.__iter__(self):
type = token["type"]
if type == "StartTag":
if token["name"].lower() == "head":
state = "in_head"
elif type == "EmptyTag":
if token["name"].lower() == "meta":
# replace charset with actual encoding
has_http_equiv_content_type = False
for (namespace, name), value in token["data"].items():
if namespace is not None:
continue
elif name.lower() == 'charset':
token["data"][(namespace, name)] = self.encoding
meta_found = True
break
elif name == 'http-equiv' and value.lower() == 'content-type':
has_http_equiv_content_type = True
else:
if has_http_equiv_content_type and (None, "content") in token["data"]:
token["data"][(None, "content")] = 'text/html; charset=%s' % self.encoding
meta_found = True
elif token["name"].lower() == "head" and not meta_found:
# insert meta into empty head
yield {"type": "StartTag", "name": "head",
"data": token["data"]}
yield {"type": "EmptyTag", "name": "meta",
"data": {(None, "charset"): self.encoding}}
yield {"type": "EndTag", "name": "head"}
meta_found = True
continue
elif type == "EndTag":
if token["name"].lower() == "head" and pending:
# insert meta into head (if necessary) and flush pending queue
yield pending.pop(0)
if not meta_found:
yield {"type": "EmptyTag", "name": "meta",
"data": {(None, "charset"): self.encoding}}
while pending:
yield pending.pop(0)
meta_found = True
state = "post_head"
if state == "in_head":
pending.append(token)
else:
yield token
|
gpl-3.0
|
gsehub/edx-platform
|
lms/djangoapps/bulk_email/tests/test_email.py
|
9
|
28165
|
# -*- coding: utf-8 -*-
"""
Unit tests for sending course email
"""
import json
import os
from unittest import skipIf
import ddt
from django.conf import settings
from django.core import mail
from django.core.mail.message import forbid_multi_line_headers
from django.core.management import call_command
from django.urls import reverse
from django.test.utils import override_settings
from django.utils.translation import get_language
from markupsafe import escape
from mock import Mock, patch
from nose.plugins.attrib import attr
from bulk_email.models import BulkEmailFlag, Optout
from bulk_email.tasks import _get_course_email_context, _get_source_address
from course_modes.models import CourseMode
from courseware.tests.factories import InstructorFactory, StaffFactory
from enrollment.api import update_enrollment
from lms.djangoapps.instructor_task.subtasks import update_subtask_status
from openedx.core.djangoapps.course_groups.cohorts import add_user_to_cohort
from openedx.core.djangoapps.course_groups.models import CourseCohort
from student.models import CourseEnrollment
from student.roles import CourseStaffRole
from student.tests.factories import CourseEnrollmentFactory, UserFactory
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
STAFF_COUNT = 3
STUDENT_COUNT = 10
LARGE_NUM_EMAILS = 137
class MockCourseEmailResult(object):
"""
A small closure-like class to keep count of emails sent over all tasks, recorded
by mock object side effects
"""
emails_sent = 0
def get_mock_update_subtask_status(self):
"""Wrapper for mock email function."""
def mock_update_subtask_status(entry_id, current_task_id, new_subtask_status):
"""Increments count of number of emails sent."""
self.emails_sent += new_subtask_status.succeeded
return update_subtask_status(entry_id, current_task_id, new_subtask_status)
return mock_update_subtask_status
class EmailSendFromDashboardTestCase(SharedModuleStoreTestCase):
"""
Test that emails send correctly.
"""
def create_staff_and_instructor(self):
"""
Creates one instructor and several course staff for self.course. Assigns
them to self.instructor (single user) and self.staff (list of users),
respectively.
"""
self.instructor = InstructorFactory(course_key=self.course.id)
self.staff = [
StaffFactory(course_key=self.course.id) for __ in xrange(STAFF_COUNT)
]
def create_students(self):
"""
Creates users and enrolls them in self.course. Assigns these users to
self.students.
"""
self.students = [UserFactory() for _ in xrange(STUDENT_COUNT)]
for student in self.students:
CourseEnrollmentFactory.create(user=student, course_id=self.course.id)
def login_as_user(self, user):
"""
Log in self.client as user.
"""
self.client.login(username=user.username, password="test")
def goto_instructor_dash_email_view(self):
"""
Goes to the instructor dashboard to verify that the email section is
there.
"""
url = reverse('instructor_dashboard', kwargs={'course_id': unicode(self.course.id)})
# Response loads the whole instructor dashboard, so no need to explicitly
# navigate to a particular email section
response = self.client.get(url)
email_section = '<div class="vert-left send-email" id="section-send-email">'
# If this fails, it is likely because BulkEmailFlag.is_enabled() is set to False
self.assertIn(email_section, response.content)
@classmethod
def setUpClass(cls):
super(EmailSendFromDashboardTestCase, cls).setUpClass()
course_title = u"ẗëṡẗ title イ乇丂イ ᄊ乇丂丂ムg乇 キo尺 ムレレ тэѕт мэѕѕаБэ"
cls.course = CourseFactory.create(
display_name=course_title,
default_store=ModuleStoreEnum.Type.split
)
def setUp(self):
super(EmailSendFromDashboardTestCase, self).setUp()
BulkEmailFlag.objects.create(enabled=True, require_course_email_auth=False)
self.create_staff_and_instructor()
self.create_students()
# load initial content (since we don't run migrations as part of tests):
call_command("loaddata", "course_email_template.json")
self.login_as_user(self.instructor)
# Pulling up the instructor dash email view here allows us to test sending emails in tests
self.goto_instructor_dash_email_view()
self.send_mail_url = reverse(
'send_email', kwargs={'course_id': unicode(self.course.id)}
)
self.success_content = {
'course_id': unicode(self.course.id),
'success': True,
}
def tearDown(self):
super(EmailSendFromDashboardTestCase, self).tearDown()
BulkEmailFlag.objects.all().delete()
class SendEmailWithMockedUgettextMixin(object):
"""
Mock uggetext for EmailSendFromDashboardTestCase.
"""
def send_email(self):
"""
Sends a dummy email to check the `from_addr` translation.
"""
test_email = {
'action': 'send',
'send_to': '["myself"]',
'subject': 'test subject for myself',
'message': 'test message for myself'
}
def mock_ugettext(text):
"""
Mocks ugettext to return the lang code with the original string.
e.g.
>>> mock_ugettext('Hello') == '@AR Hello@'
"""
return u'@{lang} {text}@'.format(
lang=get_language().upper(),
text=text,
)
with patch('bulk_email.tasks._', side_effect=mock_ugettext):
self.client.post(self.send_mail_url, test_email)
return mail.outbox[0]
@attr(shard=1)
@patch.dict(settings.FEATURES, {'ENABLE_INSTRUCTOR_EMAIL': True, 'REQUIRE_COURSE_EMAIL_AUTH': False})
@ddt.ddt
class LocalizedFromAddressPlatformLangTestCase(SendEmailWithMockedUgettextMixin, EmailSendFromDashboardTestCase):
"""
Tests to ensure that the bulk email has the "From" address localized according to LANGUAGE_CODE.
"""
@override_settings(LANGUAGE_CODE='en')
def test_english_platform(self):
"""
Ensures that the source-code language (English) works well.
"""
self.assertIsNone(self.course.language) # Sanity check
message = self.send_email()
self.assertRegexpMatches(message.from_email, '.*Course Staff.*')
@override_settings(LANGUAGE_CODE='eo')
def test_esperanto_platform(self):
"""
Tests the fake Esperanto language to ensure proper gettext calls.
"""
self.assertIsNone(self.course.language) # Sanity check
message = self.send_email()
self.assertRegexpMatches(message.from_email, '@EO .* Course Staff@')
@attr(shard=1)
@patch.dict(settings.FEATURES, {'ENABLE_INSTRUCTOR_EMAIL': True, 'REQUIRE_COURSE_EMAIL_AUTH': False})
@ddt.ddt
class LocalizedFromAddressCourseLangTestCase(SendEmailWithMockedUgettextMixin, EmailSendFromDashboardTestCase):
"""
Test if the bulk email "From" address uses the course.language if present instead of LANGUAGE_CODE.
This is similiar to LocalizedFromAddressTestCase but creating a different test case to allow
changing the class-wide course object.
"""
@classmethod
def setUpClass(cls):
"""
Creates a different course.
"""
super(LocalizedFromAddressCourseLangTestCase, cls).setUpClass()
course_title = u"ẗëṡẗ イэ"
cls.course = CourseFactory.create(
display_name=course_title,
language='ar',
default_store=ModuleStoreEnum.Type.split
)
@override_settings(LANGUAGE_CODE='eo')
def test_esperanto_platform_arabic_course(self):
"""
The course language should override the platform's.
"""
message = self.send_email()
self.assertRegexpMatches(message.from_email, '@AR .* Course Staff@')
@attr(shard=1)
@patch('bulk_email.models.html_to_text', Mock(return_value='Mocking CourseEmail.text_message', autospec=True))
class TestEmailSendFromDashboardMockedHtmlToText(EmailSendFromDashboardTestCase):
"""
Tests email sending with mocked html_to_text.
"""
def test_email_disabled(self):
"""
Test response when email is disabled for course.
"""
BulkEmailFlag.objects.create(enabled=True, require_course_email_auth=True)
test_email = {
'action': 'Send email',
'send_to': '["myself"]',
'subject': 'test subject for myself',
'message': 'test message for myself'
}
response = self.client.post(self.send_mail_url, test_email)
# We should get back a HttpResponseForbidden (status code 403)
self.assertContains(response, "Email is not enabled for this course.", status_code=403)
@patch('bulk_email.models.html_to_text', Mock(return_value='Mocking CourseEmail.text_message', autospec=True))
def test_send_to_self(self):
"""
Make sure email send to myself goes to myself.
"""
test_email = {
'action': 'send',
'send_to': '["myself"]',
'subject': 'test subject for myself',
'message': 'test message for myself'
}
response = self.client.post(self.send_mail_url, test_email)
self.assertEquals(json.loads(response.content), self.success_content)
# Check that outbox is as expected
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(len(mail.outbox[0].to), 1)
self.assertEquals(mail.outbox[0].to[0], self.instructor.email)
self.assertEquals(mail.outbox[0].subject, 'test subject for myself')
self.assertEquals(
mail.outbox[0].from_email,
u'"{course_display_name}" Course Staff <{course_name}-no-reply@example.com>'.format(
course_display_name=self.course.display_name,
course_name=self.course.id.course
)
)
def test_send_to_staff(self):
"""
Make sure email send to staff and instructors goes there.
"""
test_email = {
'action': 'Send email',
'send_to': '["staff"]',
'subject': 'test subject for staff',
'message': 'test message for subject'
}
response = self.client.post(self.send_mail_url, test_email)
self.assertEquals(json.loads(response.content), self.success_content)
# the 1 is for the instructor in this test and others
self.assertEquals(len(mail.outbox), 1 + len(self.staff))
self.assertItemsEqual(
[e.to[0] for e in mail.outbox],
[self.instructor.email] + [s.email for s in self.staff]
)
def test_send_to_cohort(self):
"""
Make sure email sent to a cohort goes there.
"""
cohort = CourseCohort.create(cohort_name='test cohort', course_id=self.course.id)
for student in self.students:
add_user_to_cohort(cohort.course_user_group, student.username)
test_email = {
'action': 'Send email',
'send_to': '["cohort:{}"]'.format(cohort.course_user_group.name),
'subject': 'test subject for cohort',
'message': 'test message for cohort'
}
response = self.client.post(self.send_mail_url, test_email)
self.assertEquals(json.loads(response.content), self.success_content)
self.assertItemsEqual(
[e.to[0] for e in mail.outbox],
[s.email for s in self.students]
)
def test_send_to_cohort_unenrolled(self):
"""
Make sure email sent to a cohort does not go to unenrolled members of the cohort.
"""
self.students.append(UserFactory()) # user will be added to cohort, but not enrolled in course
cohort = CourseCohort.create(cohort_name='test cohort', course_id=self.course.id)
for student in self.students:
add_user_to_cohort(cohort.course_user_group, student.username)
test_email = {
'action': 'Send email',
'send_to': '["cohort:{}"]'.format(cohort.course_user_group.name),
'subject': 'test subject for cohort',
'message': 'test message for cohort'
}
response = self.client.post(self.send_mail_url, test_email)
self.assertEquals(json.loads(response.content), self.success_content)
self.assertEquals(len(mail.outbox), len(self.students) - 1)
self.assertNotIn(self.students[-1].email, [e.to[0] for e in mail.outbox])
def test_send_to_track(self):
"""
Make sure email sent to a registration track goes there.
"""
CourseMode.objects.create(mode_slug='test', course_id=self.course.id)
for student in self.students:
update_enrollment(student, unicode(self.course.id), 'test')
test_email = {
'action': 'Send email',
'send_to': '["track:test"]',
'subject': 'test subject for test track',
'message': 'test message for test track',
}
response = self.client.post(self.send_mail_url, test_email)
self.assertEquals(json.loads(response.content), self.success_content)
self.assertItemsEqual(
[e.to[0] for e in mail.outbox],
[s.email for s in self.students]
)
def test_send_to_track_other_enrollments(self):
"""
Failing test for EDUCATOR-217: verifies that emails are only sent to
users in a specific track if they're in that track in the course the
email is being sent from.
"""
# Create a mode and designate an enrolled user to be placed in that mode
CourseMode.objects.create(mode_slug='test_mode', course_id=self.course.id)
test_mode_student = self.students[0]
update_enrollment(test_mode_student, unicode(self.course.id), 'test_mode')
# Take another user already enrolled in the course, then enroll them in
# another course but in that same test mode
test_mode_student_other_course = self.students[1]
other_course = CourseFactory.create()
CourseMode.objects.create(mode_slug='test_mode', course_id=other_course.id)
CourseEnrollmentFactory.create(
user=test_mode_student_other_course,
course_id=other_course.id
)
update_enrollment(test_mode_student_other_course, unicode(other_course.id), 'test_mode')
# Send the emails...
test_email = {
'action': 'Send email',
'send_to': '["track:test_mode"]',
'subject': 'test subject for test_mode track',
'message': 'test message for test_mode track',
}
response = self.client.post(self.send_mail_url, test_email)
self.assertEquals(json.loads(response.content), self.success_content)
# Only the the student in the test mode in the course the email was
# sent from should receive an email
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to[0], test_mode_student.email)
def test_send_to_all(self):
"""
Make sure email send to all goes there.
"""
test_email = {
'action': 'Send email',
'send_to': '["myself", "staff", "learners"]',
'subject': 'test subject for all',
'message': 'test message for all'
}
response = self.client.post(self.send_mail_url, test_email)
self.assertEquals(json.loads(response.content), self.success_content)
# the 1 is for the instructor
self.assertEquals(len(mail.outbox), 1 + len(self.staff) + len(self.students))
self.assertItemsEqual(
[e.to[0] for e in mail.outbox],
[self.instructor.email] + [s.email for s in self.staff] + [s.email for s in self.students]
)
@override_settings(BULK_EMAIL_JOB_SIZE_THRESHOLD=1)
def test_send_to_all_high_queue(self):
"""
Test that email is still sent when the high priority queue is used
"""
self.test_send_to_all()
def test_no_duplicate_emails_staff_instructor(self):
"""
Test that no duplicate emails are sent to a course instructor that is
also course staff
"""
CourseStaffRole(self.course.id).add_users(self.instructor)
self.test_send_to_all()
def test_no_duplicate_emails_enrolled_staff(self):
"""
Test that no duplicate emails are sent to a course instructor that is
also enrolled in the course
"""
CourseEnrollment.enroll(self.instructor, self.course.id)
self.test_send_to_all()
def test_no_duplicate_emails_unenrolled_staff(self):
"""
Test that no duplicate emails are sent to a course staff that is
not enrolled in the course, but is enrolled in other courses
"""
course_1 = CourseFactory.create()
course_2 = CourseFactory.create()
# make sure self.instructor isn't enrolled in the course
self.assertFalse(CourseEnrollment.is_enrolled(self.instructor, self.course.id))
CourseEnrollment.enroll(self.instructor, course_1.id)
CourseEnrollment.enroll(self.instructor, course_2.id)
self.test_send_to_all()
def test_unicode_subject_send_to_all(self):
"""
Make sure email (with Unicode characters) send to all goes there.
"""
uni_subject = u'téśt śúbjéćt főŕ áĺĺ'
test_email = {
'action': 'Send email',
'send_to': '["myself", "staff", "learners"]',
'subject': uni_subject,
'message': 'test message for all'
}
response = self.client.post(self.send_mail_url, test_email)
self.assertEquals(json.loads(response.content), self.success_content)
self.assertEquals(len(mail.outbox), 1 + len(self.staff) + len(self.students))
self.assertItemsEqual(
[e.to[0] for e in mail.outbox],
[self.instructor.email] + [s.email for s in self.staff] + [s.email for s in self.students]
)
self.assertEquals(mail.outbox[0].subject, uni_subject)
def test_unicode_students_send_to_all(self):
"""
Make sure email (with Unicode characters) send to all goes there.
"""
# Create a student with Unicode in their first & last names
unicode_user = UserFactory(first_name=u'Ⓡⓞⓑⓞⓣ', last_name=u'ՇﻉรՇ')
CourseEnrollmentFactory.create(user=unicode_user, course_id=self.course.id)
self.students.append(unicode_user)
test_email = {
'action': 'Send email',
'send_to': '["myself", "staff", "learners"]',
'subject': 'test subject for all',
'message': 'test message for all'
}
response = self.client.post(self.send_mail_url, test_email)
self.assertEquals(json.loads(response.content), self.success_content)
self.assertEquals(len(mail.outbox), 1 + len(self.staff) + len(self.students))
self.assertItemsEqual(
[e.to[0] for e in mail.outbox],
[self.instructor.email] + [s.email for s in self.staff] + [s.email for s in self.students]
)
@override_settings(BULK_EMAIL_DEFAULT_FROM_EMAIL="no-reply@courseupdates.edx.org")
def test_long_course_display_name(self):
"""
This test tests that courses with exorbitantly large display names
can still send emails, since it appears that 320 appears to be the
character length limit of from emails for Amazon SES.
"""
test_email = {
'action': 'Send email',
'send_to': '["myself", "staff", "learners"]',
'subject': 'test subject for self',
'message': 'test message for self'
}
# make display_name that's longer than 320 characters when encoded
# to ascii and escaped, but shorter than 320 unicode characters
long_name = u"Финансовое программирование и политика, часть 1: макроэкономические счета и анализ"
course = CourseFactory.create(
display_name=long_name,
org="IMF",
number="FPP.1x",
run="2016",
)
instructor = InstructorFactory(course_key=course.id)
unexpected_from_addr = _get_source_address(
course.id, course.display_name, course_language=None, truncate=False
)
__, encoded_unexpected_from_addr = forbid_multi_line_headers(
"from", unexpected_from_addr, 'utf-8'
)
escaped_encoded_unexpected_from_addr = escape(encoded_unexpected_from_addr)
# it's shorter than 320 characters when just encoded
self.assertEqual(len(encoded_unexpected_from_addr), 318)
# escaping it brings it over that limit
self.assertEqual(len(escaped_encoded_unexpected_from_addr), 324)
# when not escaped or encoded, it's well below 320 characters
self.assertEqual(len(unexpected_from_addr), 137)
self.login_as_user(instructor)
send_mail_url = reverse('send_email', kwargs={'course_id': unicode(course.id)})
response = self.client.post(send_mail_url, test_email)
self.assertTrue(json.loads(response.content)['success'])
self.assertEqual(len(mail.outbox), 1)
from_email = mail.outbox[0].from_email
expected_from_addr = (
u'"{course_name}" Course Staff <{course_name}-no-reply@courseupdates.edx.org>'
).format(course_name=course.id.course)
self.assertEqual(
from_email,
expected_from_addr
)
self.assertEqual(len(from_email), 61)
@override_settings(BULK_EMAIL_EMAILS_PER_TASK=3)
@patch('bulk_email.tasks.update_subtask_status')
def test_chunked_queries_send_numerous_emails(self, email_mock):
"""
Test sending a large number of emails, to test the chunked querying
"""
mock_factory = MockCourseEmailResult()
email_mock.side_effect = mock_factory.get_mock_update_subtask_status()
added_users = []
for _ in xrange(LARGE_NUM_EMAILS):
user = UserFactory()
added_users.append(user)
CourseEnrollmentFactory.create(user=user, course_id=self.course.id)
optouts = []
for i in [1, 3, 9, 10, 18]: # 5 random optouts
user = added_users[i]
optouts.append(user)
optout = Optout(user=user, course_id=self.course.id)
optout.save()
test_email = {
'action': 'Send email',
'send_to': '["myself", "staff", "learners"]',
'subject': 'test subject for all',
'message': 'test message for all'
}
response = self.client.post(self.send_mail_url, test_email)
self.assertEquals(json.loads(response.content), self.success_content)
self.assertEquals(mock_factory.emails_sent,
1 + len(self.staff) + len(self.students) + LARGE_NUM_EMAILS - len(optouts))
outbox_contents = [e.to[0] for e in mail.outbox]
should_send_contents = ([self.instructor.email] +
[s.email for s in self.staff] +
[s.email for s in self.students] +
[s.email for s in added_users if s not in optouts])
self.assertItemsEqual(outbox_contents, should_send_contents)
@attr(shard=1)
@skipIf(os.environ.get("TRAVIS") == 'true', "Skip this test in Travis CI.")
class TestEmailSendFromDashboard(EmailSendFromDashboardTestCase):
"""
Tests email sending without mocked html_to_text.
Note that these tests are skipped on Travis because we can't use the
function `html_to_text` as it is currently implemented on Travis.
"""
def test_unicode_message_send_to_all(self):
"""
Make sure email (with Unicode characters) send to all goes there.
"""
uni_message = u'ẗëṡẗ ṁëṡṡäġë ḟöṛ äḷḷ イ乇丂イ ᄊ乇丂丂ムg乇 キo尺 ムレレ тэѕт мэѕѕаБэ fоѓ аll'
test_email = {
'action': 'Send email',
'send_to': '["myself", "staff", "learners"]',
'subject': 'test subject for all',
'message': uni_message
}
response = self.client.post(self.send_mail_url, test_email)
self.assertEquals(json.loads(response.content), self.success_content)
self.assertEquals(len(mail.outbox), 1 + len(self.staff) + len(self.students))
self.assertItemsEqual(
[e.to[0] for e in mail.outbox],
[self.instructor.email] + [s.email for s in self.staff] + [s.email for s in self.students]
)
message_body = mail.outbox[0].body
self.assertIn(uni_message, message_body)
class TestCourseEmailContext(SharedModuleStoreTestCase):
"""
Test the course email context hash used to send bulk emails.
"""
@classmethod
def setUpClass(cls):
"""
Create a course shared by all tests.
"""
super(TestCourseEmailContext, cls).setUpClass()
cls.course_title = u"Финансовое программирование и политика, часть 1: макроэкономические счета и анализ"
cls.course_org = 'IMF'
cls.course_number = "FPP.1x"
cls.course_run = "2016"
cls.course = CourseFactory.create(
display_name=cls.course_title,
org=cls.course_org,
number=cls.course_number,
run=cls.course_run,
)
def verify_email_context(self, email_context, scheme):
"""
This test tests that the bulk email context uses http or https urls as appropriate.
"""
self.assertEquals(email_context['platform_name'], settings.PLATFORM_NAME)
self.assertEquals(email_context['course_title'], self.course_title)
self.assertEquals(email_context['course_url'],
'{}://edx.org/courses/{}/{}/{}/'.format(scheme,
self.course_org,
self.course_number,
self.course_run))
self.assertEquals(email_context['course_image_url'],
'{}://edx.org/c4x/{}/{}/asset/images_course_image.jpg'.format(scheme,
self.course_org,
self.course_number))
self.assertEquals(email_context['email_settings_url'], '{}://edx.org/dashboard'.format(scheme))
self.assertEquals(email_context['account_settings_url'], '{}://edx.org/account/settings'.format(scheme))
@override_settings(LMS_ROOT_URL="http://edx.org")
def test_insecure_email_context(self):
"""
This test tests that the bulk email context uses http urls
"""
email_context = _get_course_email_context(self.course)
self.verify_email_context(email_context, 'http')
@override_settings(LMS_ROOT_URL="https://edx.org")
def test_secure_email_context(self):
"""
This test tests that the bulk email context uses https urls
"""
email_context = _get_course_email_context(self.course)
self.verify_email_context(email_context, 'https')
|
agpl-3.0
|
patochectp/navitia
|
source/navitiacommon/navitiacommon/__init__.py
|
53
|
1193
|
# Copyright (c) 2001-2014, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# IRC #navitia on freenode
# https://groups.google.com/d/forum/navitia
# www.navitia.io
|
agpl-3.0
|
apurvbhartia/gnuradio-routing
|
gnuradio-core/src/python/gnuradio/gruimpl/sdr_1000.py
|
18
|
2582
|
#
# Copyright 2003,2004 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr
class sdr_1000 (gr.sdr_1000_base):
"Control the DDS on the SDR-1000"
def __init__(self, pport = 0):
gr.sdr_1000_base.__init__(self, pport)
self.write_latch (3, 0x00, 0xC0) # Reset low, WRS/ low
self.write_reg (0x20, 0x40)
def write_reg(self, addr, data):
self.write_latch (3, addr & 0x3f, 0x3f)
self.write_latch (2, data, 0xff)
self.write_latch (3, 0x40, 0x40)
self.write_latch (3, 0x00, 0x40)
def set_freq(self, freq):
self.set_band (freq)
ftw = freq / 200e6;
for i in xrange(6):
word = int(ftw * 256)
ftw = ftw*256 - word
# print (('%d [%02x]') % (i, word))
self.write_reg (4+i, word)
def set_band (self, freq):
if freq <= 2.25e6:
band = 0
elif freq <= 5.5e6:
band = 1
elif freq <= 11e6:
band = 3 # due to wiring mistake on board
elif freq <= 22e6:
band = 2 # due to wiring mistake on board
elif freq <= 37.5e6:
band = 4
else:
band = 5
self.write_latch (1, 1 << band, 0x3f)
def set_bit (self, reg, bit, state):
val = 0x00
if state: val = 1<<bit
self.write_latch (reg, val, 1<<bit)
def set_tx (self, on = 1):
self.set_bit(1, 6, on)
def set_rx (self):
self.set_bit(1, 6, 0)
def set_gain (self, high):
self.set_bit(0, 7, high)
def set_mute (self, mute = 1):
self.set_bit(1, 7, mute)
def set_unmute (self):
self.set_bit(1, 7, 0)
def set_external_pin (self, pin, on = 1):
assert (pin < 8 and pin > 0), "Out of range 1..7"
self.set_bit(0, pin-1, on)
|
gpl-3.0
|
dagwieers/ansible
|
lib/ansible/plugins/callback/__init__.py
|
13
|
16626
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import difflib
import json
import os
import sys
import warnings
from copy import deepcopy
from ansible import constants as C
from ansible.module_utils.common._collections_compat import MutableMapping
from ansible.module_utils.six import PY3
from ansible.module_utils._text import to_text
from ansible.parsing.ajson import AnsibleJSONEncoder
from ansible.plugins import AnsiblePlugin, get_plugin_class
from ansible.utils.color import stringc
from ansible.utils.display import Display
from ansible.vars.clean import strip_internal_keys, module_response_deepcopy
if PY3:
# OrderedDict is needed for a backwards compat shim on Python3.x only
# https://github.com/ansible/ansible/pull/49512
from collections import OrderedDict
else:
OrderedDict = None
global_display = Display()
__all__ = ["CallbackBase"]
_DEBUG_ALLOWED_KEYS = frozenset(('msg', 'exception', 'warnings', 'deprecations'))
class CallbackBase(AnsiblePlugin):
'''
This is a base ansible callback class that does nothing. New callbacks should
use this class as a base and override any callback methods they wish to execute
custom actions.
'''
def __init__(self, display=None, options=None):
if display:
self._display = display
else:
self._display = global_display
if self._display.verbosity >= 4:
name = getattr(self, 'CALLBACK_NAME', 'unnamed')
ctype = getattr(self, 'CALLBACK_TYPE', 'old')
version = getattr(self, 'CALLBACK_VERSION', '1.0')
self._display.vvvv('Loading callback plugin %s of type %s, v%s from %s' % (name, ctype, version, sys.modules[self.__module__].__file__))
self.disabled = False
self._plugin_options = {}
if options is not None:
self.set_options(options)
self._hide_in_debug = ('changed', 'failed', 'skipped', 'invocation', 'skip_reason')
''' helper for callbacks, so they don't all have to include deepcopy '''
_copy_result = deepcopy
def set_option(self, k, v):
self._plugin_options[k] = v
def get_option(self, k):
return self._plugin_options[k]
def set_options(self, task_keys=None, var_options=None, direct=None):
''' This is different than the normal plugin method as callbacks get called early and really don't accept keywords.
Also _options was already taken for CLI args and callbacks use _plugin_options instead.
'''
# load from config
self._plugin_options = C.config.get_plugin_options(get_plugin_class(self), self._load_name, keys=task_keys, variables=var_options, direct=direct)
def _dump_results(self, result, indent=None, sort_keys=True, keep_invocation=False):
if not indent and (result.get('_ansible_verbose_always') or self._display.verbosity > 2):
indent = 4
# All result keys stating with _ansible_ are internal, so remove them from the result before we output anything.
abridged_result = strip_internal_keys(module_response_deepcopy(result))
# remove invocation unless specifically wanting it
if not keep_invocation and self._display.verbosity < 3 and 'invocation' in result:
del abridged_result['invocation']
# remove diff information from screen output
if self._display.verbosity < 3 and 'diff' in result:
del abridged_result['diff']
# remove exception from screen output
if 'exception' in abridged_result:
del abridged_result['exception']
try:
jsonified_results = json.dumps(abridged_result, cls=AnsibleJSONEncoder, indent=indent, ensure_ascii=False, sort_keys=sort_keys)
except TypeError:
# Python3 bug: throws an exception when keys are non-homogenous types:
# https://bugs.python.org/issue25457
# sort into an OrderedDict and then json.dumps() that instead
if not OrderedDict:
raise
jsonified_results = json.dumps(OrderedDict(sorted(abridged_result.items(), key=to_text)),
cls=AnsibleJSONEncoder, indent=indent,
ensure_ascii=False, sort_keys=False)
return jsonified_results
def _handle_warnings(self, res):
''' display warnings, if enabled and any exist in the result '''
if C.ACTION_WARNINGS:
if 'warnings' in res and res['warnings']:
for warning in res['warnings']:
self._display.warning(warning)
del res['warnings']
if 'deprecations' in res and res['deprecations']:
for warning in res['deprecations']:
self._display.deprecated(**warning)
del res['deprecations']
def _handle_exception(self, result, use_stderr=False):
if 'exception' in result:
msg = "An exception occurred during task execution. "
if self._display.verbosity < 3:
# extract just the actual error message from the exception text
error = result['exception'].strip().split('\n')[-1]
msg += "To see the full traceback, use -vvv. The error was: %s" % error
else:
msg = "The full traceback is:\n" + result['exception']
del result['exception']
self._display.display(msg, color=C.COLOR_ERROR, stderr=use_stderr)
def _serialize_diff(self, diff):
return json.dumps(diff, sort_keys=True, indent=4, separators=(u',', u': ')) + u'\n'
def _get_diff(self, difflist):
if not isinstance(difflist, list):
difflist = [difflist]
ret = []
for diff in difflist:
if 'dst_binary' in diff:
ret.append(u"diff skipped: destination file appears to be binary\n")
if 'src_binary' in diff:
ret.append(u"diff skipped: source file appears to be binary\n")
if 'dst_larger' in diff:
ret.append(u"diff skipped: destination file size is greater than %d\n" % diff['dst_larger'])
if 'src_larger' in diff:
ret.append(u"diff skipped: source file size is greater than %d\n" % diff['src_larger'])
if 'before' in diff and 'after' in diff:
# format complex structures into 'files'
for x in ['before', 'after']:
if isinstance(diff[x], MutableMapping):
diff[x] = self._serialize_diff(diff[x])
if 'before_header' in diff:
before_header = u"before: %s" % diff['before_header']
else:
before_header = u'before'
if 'after_header' in diff:
after_header = u"after: %s" % diff['after_header']
else:
after_header = u'after'
before_lines = diff['before'].splitlines(True)
after_lines = diff['after'].splitlines(True)
if before_lines and not before_lines[-1].endswith(u'\n'):
before_lines[-1] += u'\n\\ No newline at end of file\n'
if after_lines and not after_lines[-1].endswith('\n'):
after_lines[-1] += u'\n\\ No newline at end of file\n'
differ = difflib.unified_diff(before_lines,
after_lines,
fromfile=before_header,
tofile=after_header,
fromfiledate=u'',
tofiledate=u'',
n=C.DIFF_CONTEXT)
difflines = list(differ)
if len(difflines) >= 3 and sys.version_info[:2] == (2, 6):
# difflib in Python 2.6 adds trailing spaces after
# filenames in the -- before/++ after headers.
difflines[0] = difflines[0].replace(u' \n', u'\n')
difflines[1] = difflines[1].replace(u' \n', u'\n')
# it also treats empty files differently
difflines[2] = difflines[2].replace(u'-1,0', u'-0,0').replace(u'+1,0', u'+0,0')
has_diff = False
for line in difflines:
has_diff = True
if line.startswith(u'+'):
line = stringc(line, C.COLOR_DIFF_ADD)
elif line.startswith(u'-'):
line = stringc(line, C.COLOR_DIFF_REMOVE)
elif line.startswith(u'@@'):
line = stringc(line, C.COLOR_DIFF_LINES)
ret.append(line)
if has_diff:
ret.append('\n')
if 'prepared' in diff:
ret.append(diff['prepared'])
return u''.join(ret)
def _get_item_label(self, result):
''' retrieves the value to be displayed as a label for an item entry from a result object'''
if result.get('_ansible_no_log', False):
item = "(censored due to no_log)"
else:
item = result.get('_ansible_item_label', result.get('item'))
return item
def _get_item(self, result):
''' here for backwards compat, really should have always been named: _get_item_label'''
cback = getattr(self, 'NAME', os.path.basename(__file__))
self._display.deprecated("The %s callback plugin should be updated to use the _get_item_label method instead" % cback, version="2.11")
return self._get_item_label(result)
def _process_items(self, result):
# just remove them as now they get handled by individual callbacks
del result._result['results']
def _clean_results(self, result, task_name):
''' removes data from results for display '''
# mostly controls that debug only outputs what it was meant to
if task_name == 'debug':
if 'msg' in result:
# msg should be alone
for key in list(result.keys()):
if key not in _DEBUG_ALLOWED_KEYS and not key.startswith('_'):
result.pop(key)
else:
# 'var' value as field, so eliminate others and what is left should be varname
for hidme in self._hide_in_debug:
result.pop(hidme, None)
def set_play_context(self, play_context):
pass
def on_any(self, *args, **kwargs):
pass
def runner_on_failed(self, host, res, ignore_errors=False):
pass
def runner_on_ok(self, host, res):
pass
def runner_on_skipped(self, host, item=None):
pass
def runner_on_unreachable(self, host, res):
pass
def runner_on_no_hosts(self):
pass
def runner_on_async_poll(self, host, res, jid, clock):
pass
def runner_on_async_ok(self, host, res, jid):
pass
def runner_on_async_failed(self, host, res, jid):
pass
def playbook_on_start(self):
pass
def playbook_on_notify(self, host, handler):
pass
def playbook_on_no_hosts_matched(self):
pass
def playbook_on_no_hosts_remaining(self):
pass
def playbook_on_task_start(self, name, is_conditional):
pass
def playbook_on_vars_prompt(self, varname, private=True, prompt=None, encrypt=None, confirm=False, salt_size=None, salt=None, default=None, unsafe=None):
pass
def playbook_on_setup(self):
pass
def playbook_on_import_for_host(self, host, imported_file):
pass
def playbook_on_not_import_for_host(self, host, missing_file):
pass
def playbook_on_play_start(self, name):
pass
def playbook_on_stats(self, stats):
pass
def on_file_diff(self, host, diff):
pass
# V2 METHODS, by default they call v1 counterparts if possible
def v2_on_any(self, *args, **kwargs):
self.on_any(args, kwargs)
def v2_runner_on_failed(self, result, ignore_errors=False):
host = result._host.get_name()
self.runner_on_failed(host, result._result, ignore_errors)
def v2_runner_on_ok(self, result):
host = result._host.get_name()
self.runner_on_ok(host, result._result)
def v2_runner_on_skipped(self, result):
if C.DISPLAY_SKIPPED_HOSTS:
host = result._host.get_name()
self.runner_on_skipped(host, self._get_item_label(getattr(result._result, 'results', {})))
def v2_runner_on_unreachable(self, result):
host = result._host.get_name()
self.runner_on_unreachable(host, result._result)
# FIXME: not called
def v2_runner_on_async_poll(self, result):
host = result._host.get_name()
jid = result._result.get('ansible_job_id')
# FIXME, get real clock
clock = 0
self.runner_on_async_poll(host, result._result, jid, clock)
# FIXME: not called
def v2_runner_on_async_ok(self, result):
host = result._host.get_name()
jid = result._result.get('ansible_job_id')
self.runner_on_async_ok(host, result._result, jid)
# FIXME: not called
def v2_runner_on_async_failed(self, result):
host = result._host.get_name()
jid = result._result.get('ansible_job_id')
self.runner_on_async_failed(host, result._result, jid)
def v2_playbook_on_start(self, playbook):
self.playbook_on_start()
def v2_playbook_on_notify(self, handler, host):
self.playbook_on_notify(host, handler)
def v2_playbook_on_no_hosts_matched(self):
self.playbook_on_no_hosts_matched()
def v2_playbook_on_no_hosts_remaining(self):
self.playbook_on_no_hosts_remaining()
def v2_playbook_on_task_start(self, task, is_conditional):
self.playbook_on_task_start(task.name, is_conditional)
# FIXME: not called
def v2_playbook_on_cleanup_task_start(self, task):
pass # no v1 correspondence
def v2_playbook_on_handler_task_start(self, task):
pass # no v1 correspondence
def v2_playbook_on_vars_prompt(self, varname, private=True, prompt=None, encrypt=None, confirm=False, salt_size=None, salt=None, default=None, unsafe=None):
self.playbook_on_vars_prompt(varname, private, prompt, encrypt, confirm, salt_size, salt, default, unsafe)
# FIXME: not called
def v2_playbook_on_import_for_host(self, result, imported_file):
host = result._host.get_name()
self.playbook_on_import_for_host(host, imported_file)
# FIXME: not called
def v2_playbook_on_not_import_for_host(self, result, missing_file):
host = result._host.get_name()
self.playbook_on_not_import_for_host(host, missing_file)
def v2_playbook_on_play_start(self, play):
self.playbook_on_play_start(play.name)
def v2_playbook_on_stats(self, stats):
self.playbook_on_stats(stats)
def v2_on_file_diff(self, result):
if 'diff' in result._result:
host = result._host.get_name()
self.on_file_diff(host, result._result['diff'])
def v2_playbook_on_include(self, included_file):
pass # no v1 correspondence
def v2_runner_item_on_ok(self, result):
pass
def v2_runner_item_on_failed(self, result):
pass
def v2_runner_item_on_skipped(self, result):
pass
def v2_runner_retry(self, result):
pass
def v2_runner_on_start(self, host, task):
"""Event used when host begins execution of a task
.. versionadded:: 2.8
"""
pass
|
gpl-3.0
|
pugpe/pugpe
|
apps/cert/management/commands/send_certificates.py
|
1
|
2215
|
# -*- coding: utf-8 -*-
import traceback
from datetime import timedelta
from django.core import mail
from django.core.mail import EmailMultiAlternatives, mail_admins
from django.core.management.base import BaseCommand
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
from django.contrib.sites.models import Site
from django.conf import settings
from django.utils import translation
from django.utils import timezone
from cert.models import Attendee
class Command(BaseCommand):
help = u'Send certificate e-mails'
def get_email(self, attendee):
translation.activate(settings.LANGUAGE_CODE)
subject = _(u'Certificado de participação | PUG-PE')
from_email = settings.DEFAULT_FROM_EMAIL
ctx = {
'site': Site.objects.get_current().domain,
'event': attendee.event,
'attendee': attendee,
}
text_content = render_to_string('cert/cert_email.txt', ctx)
html_content = render_to_string('cert/cert_email.html', ctx)
msg = EmailMultiAlternatives(
subject, text_content, from_email, [attendee.email],
)
msg.attach_alternative(html_content, "text/html")
return msg
def handle(self, *args, **options):
connection = mail.get_connection()
num_emails = 0
attendees = Attendee.objects.filter(sent_date__isnull=True)
# Evitar envio para eventos muito antigos
attendees = attendees.filter(
pub_date__gte=timezone.now() - timedelta(days=10),
)
for attendee in attendees:
msg = self.get_email(attendee)
try:
num_emails += connection.send_messages([msg])
except Exception as exc:
subject = _(u'PUG-PE: Problema envio certificado')
body = 'except: '.format(exc)
body += traceback.format_exc()
mail_admins(subject, body)
else:
attendee.sent_date = timezone.now()
attendee.save()
self.stdout.write(
unicode(_(u'Foram enviados {0} emails\n'.format(num_emails))),
)
|
mit
|
wgcv/SWW-Crashphone
|
lib/python2.7/site-packages/pip/_vendor/distlib/scripts.py
|
190
|
12307
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2013 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
from io import BytesIO
import logging
import os
import re
import struct
import sys
from .compat import sysconfig, fsencode, detect_encoding, ZipFile
from .resources import finder
from .util import (FileOperator, get_export_entry, convert_path,
get_executable, in_venv)
logger = logging.getLogger(__name__)
_DEFAULT_MANIFEST = '''
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
<assemblyIdentity version="1.0.0.0"
processorArchitecture="X86"
name="%s"
type="win32"/>
<!-- Identify the application security requirements. -->
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level="asInvoker" uiAccess="false"/>
</requestedPrivileges>
</security>
</trustInfo>
</assembly>'''.strip()
# check if Python is called on the first line with this expression
FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$')
SCRIPT_TEMPLATE = '''# -*- coding: utf-8 -*-
if __name__ == '__main__':
import sys, re
def _resolve(module, func):
__import__(module)
mod = sys.modules[module]
parts = func.split('.')
result = getattr(mod, parts.pop(0))
for p in parts:
result = getattr(result, p)
return result
try:
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
func = _resolve('%(module)s', '%(func)s')
rc = func() # None interpreted as 0
except Exception as e: # only supporting Python >= 2.6
sys.stderr.write('%%s\\n' %% e)
rc = 1
sys.exit(rc)
'''
class ScriptMaker(object):
"""
A class to copy or create scripts from source scripts or callable
specifications.
"""
script_template = SCRIPT_TEMPLATE
executable = None # for shebangs
def __init__(self, source_dir, target_dir, add_launchers=True,
dry_run=False, fileop=None):
self.source_dir = source_dir
self.target_dir = target_dir
self.add_launchers = add_launchers
self.force = False
self.clobber = False
# It only makes sense to set mode bits on POSIX.
self.set_mode = (os.name == 'posix')
self.variants = set(('', 'X.Y'))
self._fileop = fileop or FileOperator(dry_run)
def _get_alternate_executable(self, executable, options):
if options.get('gui', False) and os.name == 'nt':
dn, fn = os.path.split(executable)
fn = fn.replace('python', 'pythonw')
executable = os.path.join(dn, fn)
return executable
def _get_shebang(self, encoding, post_interp=b'', options=None):
enquote = True
if self.executable:
executable = self.executable
enquote = False # assume this will be taken care of
elif not sysconfig.is_python_build():
executable = get_executable()
elif in_venv():
executable = os.path.join(sysconfig.get_path('scripts'),
'python%s' % sysconfig.get_config_var('EXE'))
else:
executable = os.path.join(
sysconfig.get_config_var('BINDIR'),
'python%s%s' % (sysconfig.get_config_var('VERSION'),
sysconfig.get_config_var('EXE')))
if options:
executable = self._get_alternate_executable(executable, options)
# If the user didn't specify an executable, it may be necessary to
# cater for executable paths with spaces (not uncommon on Windows)
if enquote and ' ' in executable:
executable = '"%s"' % executable
executable = fsencode(executable)
shebang = b'#!' + executable + post_interp + b'\n'
# Python parser starts to read a script using UTF-8 until
# it gets a #coding:xxx cookie. The shebang has to be the
# first line of a file, the #coding:xxx cookie cannot be
# written before. So the shebang has to be decodable from
# UTF-8.
try:
shebang.decode('utf-8')
except UnicodeDecodeError:
raise ValueError(
'The shebang (%r) is not decodable from utf-8' % shebang)
# If the script is encoded to a custom encoding (use a
# #coding:xxx cookie), the shebang has to be decodable from
# the script encoding too.
if encoding != 'utf-8':
try:
shebang.decode(encoding)
except UnicodeDecodeError:
raise ValueError(
'The shebang (%r) is not decodable '
'from the script encoding (%r)' % (shebang, encoding))
return shebang
def _get_script_text(self, entry):
return self.script_template % dict(module=entry.prefix,
func=entry.suffix)
manifest = _DEFAULT_MANIFEST
def get_manifest(self, exename):
base = os.path.basename(exename)
return self.manifest % base
def _write_script(self, names, shebang, script_bytes, filenames, ext):
use_launcher = self.add_launchers and os.name == 'nt'
linesep = os.linesep.encode('utf-8')
if not use_launcher:
script_bytes = shebang + linesep + script_bytes
else:
if ext == 'py':
launcher = self._get_launcher('t')
else:
launcher = self._get_launcher('w')
stream = BytesIO()
with ZipFile(stream, 'w') as zf:
zf.writestr('__main__.py', script_bytes)
zip_data = stream.getvalue()
script_bytes = launcher + shebang + linesep + zip_data
for name in names:
outname = os.path.join(self.target_dir, name)
if use_launcher:
n, e = os.path.splitext(outname)
if e.startswith('.py'):
outname = n
outname = '%s.exe' % outname
try:
self._fileop.write_binary_file(outname, script_bytes)
except Exception:
# Failed writing an executable - it might be in use.
logger.warning('Failed to write executable - trying to '
'use .deleteme logic')
dfname = '%s.deleteme' % outname
if os.path.exists(dfname):
os.remove(dfname) # Not allowed to fail here
os.rename(outname, dfname) # nor here
self._fileop.write_binary_file(outname, script_bytes)
logger.debug('Able to replace executable using '
'.deleteme logic')
try:
os.remove(dfname)
except Exception:
pass # still in use - ignore error
else:
if os.name == 'nt' and not outname.endswith('.' + ext):
outname = '%s.%s' % (outname, ext)
if os.path.exists(outname) and not self.clobber:
logger.warning('Skipping existing file %s', outname)
continue
self._fileop.write_binary_file(outname, script_bytes)
if self.set_mode:
self._fileop.set_executable_mode([outname])
filenames.append(outname)
def _make_script(self, entry, filenames, options=None):
shebang = self._get_shebang('utf-8', options=options)
script = self._get_script_text(entry).encode('utf-8')
name = entry.name
scriptnames = set()
if '' in self.variants:
scriptnames.add(name)
if 'X' in self.variants:
scriptnames.add('%s%s' % (name, sys.version[0]))
if 'X.Y' in self.variants:
scriptnames.add('%s-%s' % (name, sys.version[:3]))
if options and options.get('gui', False):
ext = 'pyw'
else:
ext = 'py'
self._write_script(scriptnames, shebang, script, filenames, ext)
def _copy_script(self, script, filenames):
adjust = False
script = os.path.join(self.source_dir, convert_path(script))
outname = os.path.join(self.target_dir, os.path.basename(script))
if not self.force and not self._fileop.newer(script, outname):
logger.debug('not copying %s (up-to-date)', script)
return
# Always open the file, but ignore failures in dry-run mode --
# that way, we'll get accurate feedback if we can read the
# script.
try:
f = open(script, 'rb')
except IOError:
if not self.dry_run:
raise
f = None
else:
encoding, lines = detect_encoding(f.readline)
f.seek(0)
first_line = f.readline()
if not first_line:
logger.warning('%s: %s is an empty file (skipping)',
self.get_command_name(), script)
return
match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n'))
if match:
adjust = True
post_interp = match.group(1) or b''
if not adjust:
if f:
f.close()
self._fileop.copy_file(script, outname)
if self.set_mode:
self._fileop.set_executable_mode([outname])
filenames.append(outname)
else:
logger.info('copying and adjusting %s -> %s', script,
self.target_dir)
if not self._fileop.dry_run:
shebang = self._get_shebang(encoding, post_interp)
if b'pythonw' in first_line:
ext = 'pyw'
else:
ext = 'py'
n = os.path.basename(outname)
self._write_script([n], shebang, f.read(), filenames, ext)
if f:
f.close()
@property
def dry_run(self):
return self._fileop.dry_run
@dry_run.setter
def dry_run(self, value):
self._fileop.dry_run = value
if os.name == 'nt':
# Executable launcher support.
# Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/
def _get_launcher(self, kind):
if struct.calcsize('P') == 8: # 64-bit
bits = '64'
else:
bits = '32'
name = '%s%s.exe' % (kind, bits)
# Issue 31: don't hardcode an absolute package name, but
# determine it relative to the current package
distlib_package = __name__.rsplit('.', 1)[0]
result = finder(distlib_package).find(name).bytes
return result
# Public API follows
def make(self, specification, options=None):
"""
Make a script.
:param specification: The specification, which is either a valid export
entry specification (to make a script from a
callable) or a filename (to make a script by
copying from a source location).
:param options: A dictionary of options controlling script generation.
:return: A list of all absolute pathnames written to.
"""
filenames = []
entry = get_export_entry(specification)
if entry is None:
self._copy_script(specification, filenames)
else:
self._make_script(entry, filenames, options=options)
return filenames
def make_multiple(self, specifications, options=None):
"""
Take a list of specifications and make scripts from them,
:param specifications: A list of specifications.
:return: A list of all absolute pathnames written to,
"""
filenames = []
for specification in specifications:
filenames.extend(self.make(specification, options))
return filenames
|
apache-2.0
|
kodat/odoo-module-template
|
odoo_module_template/model.py
|
1
|
1936
|
# -*- coding: utf-8 -*-
# Bashir Idirs (Alsuty)
# Copyright (C) 2016.
#
# This Code is free: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openerp import models,fields
class FirstModel(models.Model):
_name= 'template.firstmodel'
image = fields.Binary('Image')
name = fields.Char('Name', required=True)
select_field = fields.Selection(string="Type",
selection=[('type1', 'Type1'),
('type2', 'Type2'),
('type3', 'Type3'),], required=True)
boolean_field = fields.Boolean('Check')
integer_field = fields.Integer('Integer Number')
float_field = fields.Float('Float Value')
many2one_field = fields.Many2one('template.secondmodel', 'Many2one')
many2many_ids = fields.Many2many('template.thirdmodel',
'many2many_relation', 'firstmodel_id',
'thirdmodel_id', string='Many2many')
ony2many_fields = fields.One2many('template.forthmodel',
'firstmodel_id', string='One2many')
class SecondModel(models.Model):
_name = 'template.secondmodel'
name = fields.Char('Name')
class ThirdModel(models.Model):
_name = 'template.thirdmodel'
name = fields.Char('Name')
class ForthModel(models.Model):
_name = 'template.forthmodel'
name = fields.Char('Name')
firstmodel_id= fields.Many2one('template.firstmodel')
|
gpl-3.0
|
XiaosongWei/chromium-crosswalk
|
third_party/WebKit/Source/build/scripts/make_media_features.py
|
78
|
1324
|
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import media_feature_symbol
import in_generator
import template_expander
import name_utilities
import sys
class MakeMediaFeaturesWriter(in_generator.Writer):
defaults = {
'Conditional': None, # FIXME: Add support for Conditional.
'RuntimeEnabled': None,
'ImplementedAs': None,
}
filters = {
'symbol': media_feature_symbol.getMediaFeatureSymbolWithSuffix(''),
'to_macro_style': name_utilities.to_macro_style,
}
default_parameters = {
'namespace': '',
'export': '',
}
def __init__(self, in_file_path):
super(MakeMediaFeaturesWriter, self).__init__(in_file_path)
self._outputs = {
('MediaFeatures.h'): self.generate_header,
}
self._template_context = {
'namespace': '',
'export': '',
'entries': self.in_file.name_dictionaries,
}
@template_expander.use_jinja('MediaFeatures.h.tmpl', filters=filters)
def generate_header(self):
return self._template_context
if __name__ == '__main__':
in_generator.Maker(MakeMediaFeaturesWriter).main(sys.argv)
|
bsd-3-clause
|
Commonists/wm_metrics
|
app/wmmetrics.py
|
2
|
4057
|
#!/usr/bin/env python
# -*- coding: latin-1 -*-
"""wm_metrics flask webapp module."""
import os
import sys
from flask import Flask, render_template, request
current_dir = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(0, os.path.join(current_dir, '..'))
from wm_metrics.fdc.round import Round
from wm_metrics import wmfr_photography
from wm_metrics import category_induced
from wm_metrics import mw_util
app = Flask(__name__)
@app.route("/")
def index():
"""Home page of the wm_metrics tool."""
return render_template('index.html')
@app.route("/fdc")
def fdc_report_page():
"""Form to create your FDC report."""
return render_template('fdc-report.html')
@app.route("/category-induced")
def category_induced_page():
"""Form to compute the Category induced."""
return render_template('category-induced.html')
@app.route("/fdc/submit", methods=["POST"])
def compute_fdc_report():
"""FDC report generation."""
category = request.form['category']
fdc_year = int(request.form['year'])
round_num = int(request.form['round'])
prefix = "Category:"
if not category:
return render_template('error.html', message='No category provided')
if category.startswith(prefix):
category = category[len(prefix):]
app.logger.info('FDC report %s-%s on %s' % (fdc_year, round_num, category))
nb_uploaders_on = 'indicator-uploaders' in request.form
nb_files_on = 'indicator-files' in request.form
nb_labels_on = 'indicator-highlighted' in request.form
pct_labels_on = 'indicator-highlighted_percentage' in request.form
pixel_count_on = 'indicator-pixel-count' in request.form
fdc_round = Round(fdc_year - 1, fdc_year, round_num)
results = 'Nothing'
try:
results = wmfr_photography.make_example_report(fdc_round, category,
nb_files_on=nb_files_on,
nb_labels_on=nb_labels_on,
nb_uploaders_on=nb_uploaders_on,
pct_labels_on=pct_labels_on,
pixel_count_on=pixel_count_on)
except wmfr_photography.WMmetricsException, e:
message = 'Something went wrong in Wm_metrics: ' + e.message
return render_template('error.html', message=message)
try:
return render_template('fdc-report-results.html',
category=category,
fdc_round=fdc_round,
contents=results.decode('utf-8'))
except UnicodeDecodeError, e:
return render_template('error.html', message='Unicode error')
except Exception, e:
return render_template('error.html', message=e)
@app.route("/category-induced/submit", methods=["POST"])
def compute_category_induced():
"""Compute the induced categories."""
category = mw_util.str2cat(request.form['category'])
app.logger.info('CategoryInduced on %s' % (category))
try:
ci = category_induced.CategoryInduced(category)
ci.induce_categories()
contents = '\n'.join(ci.result)
return render_template('category-induced-result.html',
category=category,
images_count=ci.images_count,
categories_count=ci.categories_traversed_count,
results_count=ci.results_count,
contents=contents)
except Exception, e:
return render_template('error.html', message=e)
@app.errorhandler(404)
def page_not_found(error):
message = "Page does not exist."
return render_template('error.html', message=message), 404
@app.errorhandler(500)
def error_500(error):
message = "Internal wm-metrics error (aka 500)"
return render_template('error.html', message=message), 500
if __name__ == "__main__":
app.run(debug=True)
|
mit
|
susuchina/ERPNEXT
|
erpnext/manufacturing/doctype/bom/bom.py
|
11
|
14754
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import cint, cstr, flt
from frappe import _
from frappe.model.document import Document
from operator import itemgetter
class BOM(Document):
def autoname(self):
last_name = frappe.db.sql("""select max(name) from `tabBOM`
where name like "BOM/%s/%%" """ % frappe.db.escape(self.item))
if last_name:
idx = cint(cstr(last_name[0][0]).split('/')[-1].split('-')[0]) + 1
else:
idx = 1
self.name = 'BOM/' + self.item + ('/%.3i' % idx)
def validate(self):
self.clear_operations()
self.validate_main_item()
from erpnext.utilities.transaction_base import validate_uom_is_integer
validate_uom_is_integer(self, "stock_uom", "qty", "BOM Item")
self.validate_materials()
self.set_bom_material_details()
self.calculate_cost()
self.validate_operations()
def on_update(self):
self.check_recursion()
self.update_exploded_items()
def on_submit(self):
self.manage_default_bom()
def on_cancel(self):
frappe.db.set(self, "is_active", 0)
frappe.db.set(self, "is_default", 0)
# check if used in any other bom
self.validate_bom_links()
self.manage_default_bom()
def on_update_after_submit(self):
self.validate_bom_links()
self.manage_default_bom()
def get_item_det(self, item_code):
item = frappe.db.sql("""select name, item_name, is_asset_item, is_purchase_item,
docstatus, description, image, is_sub_contracted_item, stock_uom, default_bom,
last_purchase_rate
from `tabItem` where name=%s""", item_code, as_dict = 1)
if not item:
frappe.throw(_("Item: {0} does not exist in the system").format(item_code))
return item
def validate_rm_item(self, item):
if item[0]['name'] == self.item:
frappe.throw(_("Raw material cannot be same as main Item"))
def set_bom_material_details(self):
for item in self.get("items"):
ret = self.get_bom_material_detail({"item_code": item.item_code, "item_name": item.item_name, "bom_no": item.bom_no,
"qty": item.qty})
for r in ret:
if not item.get(r):
item.set(r, ret[r])
def get_bom_material_detail(self, args=None):
""" Get raw material details like uom, desc and rate"""
if not args:
args = frappe.form_dict.get('args')
if isinstance(args, basestring):
import json
args = json.loads(args)
item = self.get_item_det(args['item_code'])
self.validate_rm_item(item)
args['bom_no'] = args['bom_no'] or item and cstr(item[0]['default_bom']) or ''
args.update(item[0])
rate = self.get_rm_rate(args)
ret_item = {
'item_name' : item and args['item_name'] or '',
'description' : item and args['description'] or '',
'image' : item and args['image'] or '',
'stock_uom' : item and args['stock_uom'] or '',
'bom_no' : args['bom_no'],
'rate' : rate
}
return ret_item
def get_rm_rate(self, arg):
""" Get raw material rate as per selected method, if bom exists takes bom cost """
rate = 0
if arg['bom_no']:
rate = self.get_bom_unitcost(arg['bom_no'])
elif arg and (arg['is_purchase_item'] == 1 or arg['is_sub_contracted_item'] == 1):
if self.rm_cost_as_per == 'Valuation Rate':
rate = self.get_valuation_rate(arg)
elif self.rm_cost_as_per == 'Last Purchase Rate':
rate = arg['last_purchase_rate']
elif self.rm_cost_as_per == "Price List":
if not self.buying_price_list:
frappe.throw(_("Please select Price List"))
rate = frappe.db.get_value("Item Price", {"price_list": self.buying_price_list,
"item_code": arg["item_code"]}, "price_list_rate") or 0
return rate
def update_cost(self):
if self.docstatus == 2:
return
items_rate = frappe._dict()
for d in self.get("items"):
rate = self.get_bom_material_detail({'item_code': d.item_code, 'bom_no': d.bom_no,
'qty': d.qty})["rate"]
if rate:
d.rate = rate
items_rate.setdefault(d.item_code, d.rate)
for e in self.get("exploded_items"):
if items_rate.get(e.item_code):
e.rate = items_rate.get(e.item_code)
if self.docstatus == 1:
self.flags.ignore_validate_update_after_submit = True
self.calculate_cost()
self.save()
frappe.msgprint(_("Cost Updated"))
def get_bom_unitcost(self, bom_no):
bom = frappe.db.sql("""select name, total_cost/quantity as unit_cost from `tabBOM`
where is_active = 1 and name = %s""", bom_no, as_dict=1)
return bom and bom[0]['unit_cost'] or 0
def get_valuation_rate(self, args):
""" Get weighted average of valuation rate from all warehouses """
total_qty, total_value, valuation_rate = 0.0, 0.0, 0.0
for d in frappe.db.sql("""select actual_qty, stock_value from `tabBin`
where item_code=%s""", args['item_code'], as_dict=1):
total_qty += flt(d.actual_qty)
total_value += flt(d.stock_value)
if total_qty:
valuation_rate = total_value / total_qty
if valuation_rate <= 0:
last_valuation_rate = frappe.db.sql("""select valuation_rate
from `tabStock Ledger Entry`
where item_code = %s and ifnull(valuation_rate, 0) > 0
order by posting_date desc, posting_time desc, name desc limit 1""", args['item_code'])
valuation_rate = flt(last_valuation_rate[0][0]) if last_valuation_rate else 0
return valuation_rate
def manage_default_bom(self):
""" Uncheck others if current one is selected as default,
update default bom in item master
"""
if self.is_default and self.is_active:
from frappe.model.utils import set_default
set_default(self, "item")
item = frappe.get_doc("Item", self.item)
if item.default_bom != self.name:
item.default_bom = self.name
item.save()
else:
frappe.db.set(self, "is_default", 0)
item = frappe.get_doc("Item", self.item)
if item.default_bom == self.name:
item.default_bom = None
item.save()
def clear_operations(self):
if not self.with_operations:
self.set('operations', [])
def validate_main_item(self):
""" Validate main FG item"""
item = self.get_item_det(self.item)
if not item:
frappe.throw(_("Item {0} does not exist in the system or has expired").format(self.item))
else:
ret = frappe.db.get_value("Item", self.item, ["description", "stock_uom", "item_name"])
self.description = ret[0]
self.uom = ret[1]
self.item_name= ret[2]
def validate_materials(self):
""" Validate raw material entries """
if not self.get('items'):
frappe.throw(_("Raw Materials cannot be blank."))
check_list = []
for m in self.get('items'):
if m.bom_no:
validate_bom_no(m.item_code, m.bom_no)
if flt(m.qty) <= 0:
frappe.throw(_("Quantity required for Item {0} in row {1}").format(m.item_code, m.idx))
check_list.append(cstr(m.item_code))
unique_chk_list = set(check_list)
if len(unique_chk_list) != len(check_list):
frappe.throw(_("Same item has been entered multiple times."))
def check_recursion(self):
""" Check whether recursion occurs in any bom"""
check_list = [['parent', 'bom_no', 'parent'], ['bom_no', 'parent', 'child']]
for d in check_list:
bom_list, count = [self.name], 0
while (len(bom_list) > count ):
boms = frappe.db.sql(" select %s from `tabBOM Item` where %s = %s " %
(d[0], d[1], '%s'), cstr(bom_list[count]))
count = count + 1
for b in boms:
if b[0] == self.name:
frappe.throw(_("BOM recursion: {0} cannot be parent or child of {2}").format(b[0], self.name))
if b[0]:
bom_list.append(b[0])
def update_cost_and_exploded_items(self, bom_list=[]):
bom_list = self.traverse_tree(bom_list)
for bom in bom_list:
bom_obj = frappe.get_doc("BOM", bom)
bom_obj.on_update()
return bom_list
def traverse_tree(self, bom_list=[]):
def _get_children(bom_no):
return [cstr(d[0]) for d in frappe.db.sql("""select bom_no from `tabBOM Item`
where parent = %s and ifnull(bom_no, '') != ''""", bom_no)]
count = 0
if self.name not in bom_list:
bom_list.append(self.name)
while(count < len(bom_list)):
for child_bom in _get_children(bom_list[count]):
if child_bom not in bom_list:
bom_list.append(child_bom)
count += 1
bom_list.reverse()
return bom_list
def calculate_cost(self):
"""Calculate bom totals"""
self.calculate_op_cost()
self.calculate_rm_cost()
self.total_cost = self.operating_cost + self.raw_material_cost
def calculate_op_cost(self):
"""Update workstation rate and calculates totals"""
self.operating_cost = 0
for d in self.get('operations'):
if d.workstation:
if not d.hour_rate:
d.hour_rate = flt(frappe.db.get_value("Workstation", d.workstation, "hour_rate"))
if d.hour_rate and d.time_in_mins:
d.operating_cost = flt(d.hour_rate) * flt(d.time_in_mins) / 60.0
self.operating_cost += flt(d.operating_cost)
def calculate_rm_cost(self):
"""Fetch RM rate as per today's valuation rate and calculate totals"""
total_rm_cost = 0
for d in self.get('items'):
if d.bom_no:
d.rate = self.get_bom_unitcost(d.bom_no)
d.amount = flt(d.rate, self.precision("rate", d)) * flt(d.qty, self.precision("qty", d))
d.qty_consumed_per_unit = flt(d.qty, self.precision("qty", d)) / flt(self.quantity, self.precision("quantity"))
total_rm_cost += d.amount
self.raw_material_cost = total_rm_cost
def update_exploded_items(self):
""" Update Flat BOM, following will be correct data"""
self.get_exploded_items()
self.add_exploded_items()
def get_exploded_items(self):
""" Get all raw materials including items from child bom"""
self.cur_exploded_items = {}
for d in self.get('items'):
if d.bom_no:
self.get_child_exploded_items(d.bom_no, d.qty)
else:
self.add_to_cur_exploded_items(frappe._dict({
'item_code' : d.item_code,
'item_name' : d.item_name,
'description' : d.description,
'image' : d.image,
'stock_uom' : d.stock_uom,
'qty' : flt(d.qty),
'rate' : flt(d.rate),
}))
def add_to_cur_exploded_items(self, args):
if self.cur_exploded_items.get(args.item_code):
self.cur_exploded_items[args.item_code]["qty"] += args.qty
else:
self.cur_exploded_items[args.item_code] = args
def get_child_exploded_items(self, bom_no, qty):
""" Add all items from Flat BOM of child BOM"""
# Did not use qty_consumed_per_unit in the query, as it leads to rounding loss
child_fb_items = frappe.db.sql("""select bom_item.item_code, bom_item.item_name, bom_item.description,
bom_item.stock_uom, bom_item.qty, bom_item.rate,
ifnull(bom_item.qty, 0 ) / ifnull(bom.quantity, 1) as qty_consumed_per_unit
from `tabBOM Explosion Item` bom_item, tabBOM bom
where bom_item.parent = bom.name and bom.name = %s and bom.docstatus = 1""", bom_no, as_dict = 1)
for d in child_fb_items:
self.add_to_cur_exploded_items(frappe._dict({
'item_code' : d['item_code'],
'item_name' : d['item_name'],
'description' : d['description'],
'stock_uom' : d['stock_uom'],
'qty' : d['qty_consumed_per_unit']*qty,
'rate' : flt(d['rate']),
}))
def add_exploded_items(self):
"Add items to Flat BOM table"
frappe.db.sql("""delete from `tabBOM Explosion Item` where parent=%s""", self.name)
self.set('exploded_items', [])
for d in sorted(self.cur_exploded_items, key=itemgetter(0)):
ch = self.append('exploded_items', {})
for i in self.cur_exploded_items[d].keys():
ch.set(i, self.cur_exploded_items[d][i])
ch.amount = flt(ch.qty) * flt(ch.rate)
ch.qty_consumed_per_unit = flt(ch.qty) / flt(self.quantity)
ch.docstatus = self.docstatus
ch.db_insert()
def validate_bom_links(self):
if not self.is_active:
act_pbom = frappe.db.sql("""select distinct bom_item.parent from `tabBOM Item` bom_item
where bom_item.bom_no = %s and bom_item.docstatus = 1
and exists (select * from `tabBOM` where name = bom_item.parent
and docstatus = 1 and is_active = 1)""", self.name)
if act_pbom and act_pbom[0][0]:
frappe.throw(_("Cannot deactivate or cancel BOM as it is linked with other BOMs"))
def validate_operations(self):
if self.with_operations and not self.get('operations'):
frappe.throw(_("Operations cannot be left blank."))
def get_bom_items_as_dict(bom, company, qty=1, fetch_exploded=1):
item_dict = {}
# Did not use qty_consumed_per_unit in the query, as it leads to rounding loss
query = """select
bom_item.item_code,
item.item_name,
sum(ifnull(bom_item.qty, 0)/ifnull(bom.quantity, 1)) * %(qty)s as qty,
item.description,
item.image,
item.stock_uom,
item.default_warehouse,
item.expense_account as expense_account,
item.buying_cost_center as cost_center
from
`tab{table}` bom_item, `tabBOM` bom, `tabItem` item
where
bom_item.parent = bom.name
and bom_item.docstatus < 2
and bom_item.parent = %(bom)s
and item.name = bom_item.item_code
and is_stock_item = 1
{conditions}
group by item_code, stock_uom"""
if fetch_exploded:
query = query.format(table="BOM Explosion Item",
conditions="""and item.is_sub_contracted_item = 0""")
items = frappe.db.sql(query, { "qty": qty, "bom": bom }, as_dict=True)
else:
query = query.format(table="BOM Item", conditions="")
items = frappe.db.sql(query, { "qty": qty, "bom": bom }, as_dict=True)
# make unique
for item in items:
if item_dict.has_key(item.item_code):
item_dict[item.item_code]["qty"] += flt(item.qty)
else:
item_dict[item.item_code] = item
for item, item_details in item_dict.items():
for d in [["Account", "expense_account", "default_expense_account"],
["Cost Center", "cost_center", "cost_center"], ["Warehouse", "default_warehouse", ""]]:
company_in_record = frappe.db.get_value(d[0], item_details.get(d[1]), "company")
if not item_details.get(d[1]) or (company_in_record and company != company_in_record):
item_dict[item][d[1]] = frappe.db.get_value("Company", company, d[2]) if d[2] else None
return item_dict
@frappe.whitelist()
def get_bom_items(bom, company, qty=1, fetch_exploded=1):
items = get_bom_items_as_dict(bom, company, qty, fetch_exploded).values()
items.sort(lambda a, b: a.item_code > b.item_code and 1 or -1)
return items
def validate_bom_no(item, bom_no):
"""Validate BOM No of sub-contracted items"""
bom = frappe.get_doc("BOM", bom_no)
if not bom.is_active:
frappe.throw(_("BOM {0} must be active").format(bom_no))
if bom.docstatus != 1:
if not getattr(frappe.flags, "in_test", False):
frappe.throw(_("BOM {0} must be submitted").format(bom_no))
if item and not (bom.item.lower() == item.lower() or \
bom.item.lower() == cstr(frappe.db.get_value("Item", item, "variant_of")).lower()):
frappe.throw(_("BOM {0} does not belong to Item {1}").format(bom_no, item))
|
agpl-3.0
|
svenstaro/ansible
|
lib/ansible/modules/network/f5/bigip_device_dns.py
|
78
|
12525
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2016 F5 Networks Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: bigip_device_dns
short_description: Manage BIG-IP device DNS settings
description:
- Manage BIG-IP device DNS settings
version_added: "2.2"
options:
cache:
description:
- Specifies whether the system caches DNS lookups or performs the
operation each time a lookup is needed. Please note that this applies
only to Access Policy Manager features, such as ACLs, web application
rewrites, and authentication.
required: false
default: disable
choices:
- enable
- disable
name_servers:
description:
- A list of name serverz that the system uses to validate DNS lookups
forwarders:
description:
- A list of BIND servers that the system can use to perform DNS lookups
search:
description:
- A list of domains that the system searches for local domain lookups,
to resolve local host names.
ip_version:
description:
- Specifies whether the DNS specifies IP addresses using IPv4 or IPv6.
required: false
choices:
- 4
- 6
state:
description:
- The state of the variable on the system. When C(present), guarantees
that an existing variable is set to C(value).
required: false
default: present
choices:
- absent
- present
notes:
- Requires the f5-sdk Python package on the host. This is as easy as pip
install requests
extends_documentation_fragment: f5
requirements:
- f5-sdk
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = '''
- name: Set the DNS settings on the BIG-IP
bigip_device_dns:
name_servers:
- 208.67.222.222
- 208.67.220.220
search:
- localdomain
- lab.local
state: present
password: "secret"
server: "lb.mydomain.com"
user: "admin"
validate_certs: "no"
delegate_to: localhost
'''
RETURN = '''
cache:
description: The new value of the DNS caching
returned: changed
type: string
sample: "enabled"
name_servers:
description: List of name servers that were added or removed
returned: changed
type: list
sample: "['192.0.2.10', '172.17.12.10']"
forwarders:
description: List of forwarders that were added or removed
returned: changed
type: list
sample: "['192.0.2.10', '172.17.12.10']"
search:
description: List of search domains that were added or removed
returned: changed
type: list
sample: "['192.0.2.10', '172.17.12.10']"
ip_version:
description: IP version that was set that DNS will specify IP addresses in
returned: changed
type: int
sample: 4
'''
try:
from f5.bigip.contexts import TransactionContextManager
from f5.bigip import ManagementRoot
HAS_F5SDK = True
except ImportError:
HAS_F5SDK = False
REQUIRED = ['name_servers', 'search', 'forwarders', 'ip_version', 'cache']
CACHE = ['disable', 'enable']
IP = [4, 6]
class BigIpDeviceDns(object):
def __init__(self, *args, **kwargs):
if not HAS_F5SDK:
raise F5ModuleError("The python f5-sdk module is required")
# The params that change in the module
self.cparams = dict()
# Stores the params that are sent to the module
self.params = kwargs
self.api = ManagementRoot(kwargs['server'],
kwargs['user'],
kwargs['password'],
port=kwargs['server_port'])
def flush(self):
result = dict()
changed = False
state = self.params['state']
if self.dhcp_enabled():
raise F5ModuleError(
"DHCP on the mgmt interface must be disabled to make use of " +
"this module"
)
if state == 'absent':
changed = self.absent()
else:
changed = self.present()
result.update(**self.cparams)
result.update(dict(changed=changed))
return result
def dhcp_enabled(self):
r = self.api.tm.sys.dbs.db.load(name='dhclient.mgmt')
if r.value == 'enable':
return True
else:
return False
def read(self):
result = dict()
cache = self.api.tm.sys.dbs.db.load(name='dns.cache')
proxy = self.api.tm.sys.dbs.db.load(name='dns.proxy.__iter__')
dns = self.api.tm.sys.dns.load()
result['cache'] = str(cache.value)
result['forwarders'] = str(proxy.value).split(' ')
if hasattr(dns, 'nameServers'):
result['name_servers'] = dns.nameServers
if hasattr(dns, 'search'):
result['search'] = dns.search
if hasattr(dns, 'include') and 'options inet6' in dns.include:
result['ip_version'] = 6
else:
result['ip_version'] = 4
return result
def present(self):
params = dict()
current = self.read()
# Temporary locations to hold the changed params
update = dict(
dns=None,
forwarders=None,
cache=None
)
nameservers = self.params['name_servers']
search_domains = self.params['search']
ip_version = self.params['ip_version']
forwarders = self.params['forwarders']
cache = self.params['cache']
check_mode = self.params['check_mode']
if nameservers:
if 'name_servers' in current:
if nameservers != current['name_servers']:
params['nameServers'] = nameservers
else:
params['nameServers'] = nameservers
if search_domains:
if 'search' in current:
if search_domains != current['search']:
params['search'] = search_domains
else:
params['search'] = search_domains
if ip_version:
if 'ip_version' in current:
if ip_version != int(current['ip_version']):
if ip_version == 6:
params['include'] = 'options inet6'
elif ip_version == 4:
params['include'] = ''
else:
if ip_version == 6:
params['include'] = 'options inet6'
elif ip_version == 4:
params['include'] = ''
if params:
self.cparams.update(camel_dict_to_snake_dict(params))
if 'include' in params:
del self.cparams['include']
if params['include'] == '':
self.cparams['ip_version'] = 4
else:
self.cparams['ip_version'] = 6
update['dns'] = params.copy()
params = dict()
if forwarders:
if 'forwarders' in current:
if forwarders != current['forwarders']:
params['forwarders'] = forwarders
else:
params['forwarders'] = forwarders
if params:
self.cparams.update(camel_dict_to_snake_dict(params))
update['forwarders'] = ' '.join(params['forwarders'])
params = dict()
if cache:
if 'cache' in current:
if cache != current['cache']:
params['cache'] = cache
if params:
self.cparams.update(camel_dict_to_snake_dict(params))
update['cache'] = params['cache']
params = dict()
if self.cparams:
changed = True
if check_mode:
return changed
else:
return False
tx = self.api.tm.transactions.transaction
with TransactionContextManager(tx) as api:
cache = api.tm.sys.dbs.db.load(name='dns.cache')
proxy = api.tm.sys.dbs.db.load(name='dns.proxy.__iter__')
dns = api.tm.sys.dns.load()
# Empty values can be supplied, but you cannot supply the
# None value, so we check for that specifically
if update['cache'] is not None:
cache.update(value=update['cache'])
if update['forwarders'] is not None:
proxy.update(value=update['forwarders'])
if update['dns'] is not None:
dns.update(**update['dns'])
return changed
def absent(self):
params = dict()
current = self.read()
# Temporary locations to hold the changed params
update = dict(
dns=None,
forwarders=None
)
nameservers = self.params['name_servers']
search_domains = self.params['search']
forwarders = self.params['forwarders']
check_mode = self.params['check_mode']
if forwarders and 'forwarders' in current:
set_current = set(current['forwarders'])
set_new = set(forwarders)
forwarders = set_current - set_new
if forwarders != set_current:
forwarders = list(forwarders)
params['forwarders'] = ' '.join(forwarders)
if params:
changed = True
self.cparams.update(camel_dict_to_snake_dict(params))
update['forwarders'] = params['forwarders']
params = dict()
if nameservers and 'name_servers' in current:
set_current = set(current['name_servers'])
set_new = set(nameservers)
nameservers = set_current - set_new
if nameservers != set_current:
params['nameServers'] = list(nameservers)
if search_domains and 'search' in current:
set_current = set(current['search'])
set_new = set(search_domains)
search_domains = set_current - set_new
if search_domains != set_current:
params['search'] = list(search_domains)
if params:
changed = True
self.cparams.update(camel_dict_to_snake_dict(params))
update['dns'] = params.copy()
params = dict()
if not self.cparams:
return False
if check_mode:
return changed
tx = self.api.tm.transactions.transaction
with TransactionContextManager(tx) as api:
proxy = api.tm.sys.dbs.db.load(name='dns.proxy.__iter__')
dns = api.tm.sys.dns.load()
if update['forwarders'] is not None:
proxy.update(value=update['forwarders'])
if update['dns'] is not None:
dns.update(**update['dns'])
return changed
def main():
argument_spec = f5_argument_spec()
meta_args = dict(
cache=dict(required=False, choices=CACHE, default=None),
name_servers=dict(required=False, default=None, type='list'),
forwarders=dict(required=False, default=None, type='list'),
search=dict(required=False, default=None, type='list'),
ip_version=dict(required=False, default=None, choices=IP, type='int')
)
argument_spec.update(meta_args)
module = AnsibleModule(
argument_spec=argument_spec,
required_one_of=[REQUIRED],
supports_check_mode=True
)
try:
obj = BigIpDeviceDns(check_mode=module.check_mode, **module.params)
result = obj.flush()
module.exit_json(**result)
except F5ModuleError as e:
module.fail_json(msg=str(e))
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import camel_dict_to_snake_dict
from ansible.module_utils.f5_utils import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
joopert/home-assistant
|
tests/components/homekit/test_type_thermostats.py
|
2
|
22963
|
"""Test different accessory types: Thermostats."""
from collections import namedtuple
from unittest.mock import patch
import pytest
from homeassistant.components.climate.const import (
ATTR_CURRENT_TEMPERATURE,
ATTR_HVAC_ACTION,
ATTR_HVAC_MODE,
ATTR_HVAC_MODES,
ATTR_MAX_TEMP,
ATTR_MIN_TEMP,
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
ATTR_TARGET_TEMP_STEP,
CURRENT_HVAC_COOL,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
DEFAULT_MAX_TEMP,
DEFAULT_MIN_TEMP,
DOMAIN as DOMAIN_CLIMATE,
HVAC_MODE_COOL,
HVAC_MODE_HEAT,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
)
from homeassistant.components.homekit.const import (
ATTR_VALUE,
DEFAULT_MAX_TEMP_WATER_HEATER,
DEFAULT_MIN_TEMP_WATER_HEATER,
PROP_MAX_VALUE,
PROP_MIN_STEP,
PROP_MIN_VALUE,
)
from homeassistant.components.water_heater import DOMAIN as DOMAIN_WATER_HEATER
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
ATTR_TEMPERATURE,
CONF_TEMPERATURE_UNIT,
TEMP_FAHRENHEIT,
)
from tests.common import async_mock_service
from tests.components.homekit.common import patch_debounce
@pytest.fixture(scope="module")
def cls():
"""Patch debounce decorator during import of type_thermostats."""
patcher = patch_debounce()
patcher.start()
_import = __import__(
"homeassistant.components.homekit.type_thermostats",
fromlist=["Thermostat", "WaterHeater"],
)
patcher_tuple = namedtuple("Cls", ["thermostat", "water_heater"])
yield patcher_tuple(thermostat=_import.Thermostat, water_heater=_import.WaterHeater)
patcher.stop()
async def test_thermostat(hass, hk_driver, cls, events):
"""Test if accessory and HA are updated accordingly."""
entity_id = "climate.test"
hass.states.async_set(entity_id, HVAC_MODE_OFF)
await hass.async_block_till_done()
acc = cls.thermostat(hass, hk_driver, "Climate", entity_id, 2, None)
await hass.async_add_job(acc.run)
await hass.async_block_till_done()
assert acc.aid == 2
assert acc.category == 9 # Thermostat
assert acc.get_temperature_range() == (7.0, 35.0)
assert acc.char_current_heat_cool.value == 0
assert acc.char_target_heat_cool.value == 0
assert acc.char_current_temp.value == 21.0
assert acc.char_target_temp.value == 21.0
assert acc.char_display_units.value == 0
assert acc.char_cooling_thresh_temp is None
assert acc.char_heating_thresh_temp is None
assert acc.char_target_temp.properties[PROP_MAX_VALUE] == DEFAULT_MAX_TEMP
assert acc.char_target_temp.properties[PROP_MIN_VALUE] == DEFAULT_MIN_TEMP
assert acc.char_target_temp.properties[PROP_MIN_STEP] == 0.5
hass.states.async_set(
entity_id,
HVAC_MODE_HEAT,
{
ATTR_TEMPERATURE: 22.2,
ATTR_CURRENT_TEMPERATURE: 17.8,
ATTR_HVAC_ACTION: CURRENT_HVAC_HEAT,
},
)
await hass.async_block_till_done()
assert acc.char_target_temp.value == 22.2
assert acc.char_current_heat_cool.value == 1
assert acc.char_target_heat_cool.value == 1
assert acc.char_current_temp.value == 17.8
assert acc.char_display_units.value == 0
hass.states.async_set(
entity_id,
HVAC_MODE_HEAT,
{
ATTR_TEMPERATURE: 22.0,
ATTR_CURRENT_TEMPERATURE: 23.0,
ATTR_HVAC_ACTION: CURRENT_HVAC_IDLE,
},
)
await hass.async_block_till_done()
assert acc.char_target_temp.value == 22.0
assert acc.char_current_heat_cool.value == 0
assert acc.char_target_heat_cool.value == 1
assert acc.char_current_temp.value == 23.0
assert acc.char_display_units.value == 0
hass.states.async_set(
entity_id,
HVAC_MODE_COOL,
{
ATTR_TEMPERATURE: 20.0,
ATTR_CURRENT_TEMPERATURE: 25.0,
ATTR_HVAC_ACTION: CURRENT_HVAC_COOL,
},
)
await hass.async_block_till_done()
assert acc.char_target_temp.value == 20.0
assert acc.char_current_heat_cool.value == 2
assert acc.char_target_heat_cool.value == 2
assert acc.char_current_temp.value == 25.0
assert acc.char_display_units.value == 0
hass.states.async_set(
entity_id,
HVAC_MODE_COOL,
{
ATTR_TEMPERATURE: 20.0,
ATTR_CURRENT_TEMPERATURE: 19.0,
ATTR_HVAC_ACTION: CURRENT_HVAC_IDLE,
},
)
await hass.async_block_till_done()
assert acc.char_target_temp.value == 20.0
assert acc.char_current_heat_cool.value == 0
assert acc.char_target_heat_cool.value == 2
assert acc.char_current_temp.value == 19.0
assert acc.char_display_units.value == 0
hass.states.async_set(
entity_id,
HVAC_MODE_OFF,
{ATTR_TEMPERATURE: 22.0, ATTR_CURRENT_TEMPERATURE: 18.0},
)
await hass.async_block_till_done()
assert acc.char_target_temp.value == 22.0
assert acc.char_current_heat_cool.value == 0
assert acc.char_target_heat_cool.value == 0
assert acc.char_current_temp.value == 18.0
assert acc.char_display_units.value == 0
hass.states.async_set(
entity_id,
HVAC_MODE_HEAT_COOL,
{
ATTR_HVAC_MODES: [HVAC_MODE_HEAT, HVAC_MODE_COOL],
ATTR_TEMPERATURE: 22.0,
ATTR_CURRENT_TEMPERATURE: 18.0,
ATTR_HVAC_ACTION: CURRENT_HVAC_HEAT,
},
)
await hass.async_block_till_done()
assert acc.char_target_temp.value == 22.0
assert acc.char_current_heat_cool.value == 1
assert acc.char_target_heat_cool.value == 3
assert acc.char_current_temp.value == 18.0
assert acc.char_display_units.value == 0
hass.states.async_set(
entity_id,
HVAC_MODE_HEAT_COOL,
{
ATTR_HVAC_MODES: [HVAC_MODE_HEAT, HVAC_MODE_COOL],
ATTR_TEMPERATURE: 22.0,
ATTR_CURRENT_TEMPERATURE: 25.0,
ATTR_HVAC_ACTION: CURRENT_HVAC_COOL,
},
)
await hass.async_block_till_done()
assert acc.char_target_temp.value == 22.0
assert acc.char_current_heat_cool.value == 2
assert acc.char_target_heat_cool.value == 3
assert acc.char_current_temp.value == 25.0
assert acc.char_display_units.value == 0
hass.states.async_set(
entity_id,
HVAC_MODE_HEAT_COOL,
{
ATTR_HVAC_MODES: [HVAC_MODE_HEAT, HVAC_MODE_COOL],
ATTR_TEMPERATURE: 22.0,
ATTR_CURRENT_TEMPERATURE: 22.0,
ATTR_HVAC_ACTION: CURRENT_HVAC_IDLE,
},
)
await hass.async_block_till_done()
assert acc.char_target_temp.value == 22.0
assert acc.char_current_heat_cool.value == 0
assert acc.char_target_heat_cool.value == 3
assert acc.char_current_temp.value == 22.0
assert acc.char_display_units.value == 0
# Set from HomeKit
call_set_temperature = async_mock_service(hass, DOMAIN_CLIMATE, "set_temperature")
call_set_hvac_mode = async_mock_service(hass, DOMAIN_CLIMATE, "set_hvac_mode")
await hass.async_add_job(acc.char_target_temp.client_update_value, 19.0)
await hass.async_block_till_done()
assert call_set_temperature
assert call_set_temperature[0].data[ATTR_ENTITY_ID] == entity_id
assert call_set_temperature[0].data[ATTR_TEMPERATURE] == 19.0
assert acc.char_target_temp.value == 19.0
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] == "19.0°C"
await hass.async_add_job(acc.char_target_heat_cool.client_update_value, 1)
await hass.async_block_till_done()
assert call_set_hvac_mode
assert call_set_hvac_mode[0].data[ATTR_ENTITY_ID] == entity_id
assert call_set_hvac_mode[0].data[ATTR_HVAC_MODE] == HVAC_MODE_HEAT
assert acc.char_target_heat_cool.value == 1
assert len(events) == 2
assert events[-1].data[ATTR_VALUE] == HVAC_MODE_HEAT
async def test_thermostat_auto(hass, hk_driver, cls, events):
"""Test if accessory and HA are updated accordingly."""
entity_id = "climate.test"
# support_auto = True
hass.states.async_set(entity_id, HVAC_MODE_OFF, {ATTR_SUPPORTED_FEATURES: 6})
await hass.async_block_till_done()
acc = cls.thermostat(hass, hk_driver, "Climate", entity_id, 2, None)
await hass.async_add_job(acc.run)
await hass.async_block_till_done()
assert acc.char_cooling_thresh_temp.value == 23.0
assert acc.char_heating_thresh_temp.value == 19.0
assert acc.char_cooling_thresh_temp.properties[PROP_MAX_VALUE] == DEFAULT_MAX_TEMP
assert acc.char_cooling_thresh_temp.properties[PROP_MIN_VALUE] == DEFAULT_MIN_TEMP
assert acc.char_cooling_thresh_temp.properties[PROP_MIN_STEP] == 0.5
assert acc.char_heating_thresh_temp.properties[PROP_MAX_VALUE] == DEFAULT_MAX_TEMP
assert acc.char_heating_thresh_temp.properties[PROP_MIN_VALUE] == DEFAULT_MIN_TEMP
assert acc.char_heating_thresh_temp.properties[PROP_MIN_STEP] == 0.5
hass.states.async_set(
entity_id,
HVAC_MODE_HEAT_COOL,
{
ATTR_HVAC_MODE: HVAC_MODE_HEAT_COOL,
ATTR_TARGET_TEMP_HIGH: 22.0,
ATTR_TARGET_TEMP_LOW: 20.0,
ATTR_CURRENT_TEMPERATURE: 18.0,
ATTR_HVAC_ACTION: CURRENT_HVAC_HEAT,
},
)
await hass.async_block_till_done()
assert acc.char_heating_thresh_temp.value == 20.0
assert acc.char_cooling_thresh_temp.value == 22.0
assert acc.char_current_heat_cool.value == 1
assert acc.char_target_heat_cool.value == 3
assert acc.char_current_temp.value == 18.0
assert acc.char_display_units.value == 0
hass.states.async_set(
entity_id,
HVAC_MODE_HEAT_COOL,
{
ATTR_HVAC_MODE: HVAC_MODE_HEAT_COOL,
ATTR_TARGET_TEMP_HIGH: 23.0,
ATTR_TARGET_TEMP_LOW: 19.0,
ATTR_CURRENT_TEMPERATURE: 24.0,
ATTR_HVAC_ACTION: CURRENT_HVAC_COOL,
},
)
await hass.async_block_till_done()
assert acc.char_heating_thresh_temp.value == 19.0
assert acc.char_cooling_thresh_temp.value == 23.0
assert acc.char_current_heat_cool.value == 2
assert acc.char_target_heat_cool.value == 3
assert acc.char_current_temp.value == 24.0
assert acc.char_display_units.value == 0
hass.states.async_set(
entity_id,
HVAC_MODE_HEAT_COOL,
{
ATTR_HVAC_MODE: HVAC_MODE_HEAT_COOL,
ATTR_TARGET_TEMP_HIGH: 23.0,
ATTR_TARGET_TEMP_LOW: 19.0,
ATTR_CURRENT_TEMPERATURE: 21.0,
ATTR_HVAC_ACTION: CURRENT_HVAC_IDLE,
},
)
await hass.async_block_till_done()
assert acc.char_heating_thresh_temp.value == 19.0
assert acc.char_cooling_thresh_temp.value == 23.0
assert acc.char_current_heat_cool.value == 0
assert acc.char_target_heat_cool.value == 3
assert acc.char_current_temp.value == 21.0
assert acc.char_display_units.value == 0
# Set from HomeKit
call_set_temperature = async_mock_service(hass, DOMAIN_CLIMATE, "set_temperature")
await hass.async_add_job(acc.char_heating_thresh_temp.client_update_value, 20.0)
await hass.async_block_till_done()
assert call_set_temperature[0]
assert call_set_temperature[0].data[ATTR_ENTITY_ID] == entity_id
assert call_set_temperature[0].data[ATTR_TARGET_TEMP_LOW] == 20.0
assert acc.char_heating_thresh_temp.value == 20.0
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] == "heating threshold 20.0°C"
await hass.async_add_job(acc.char_cooling_thresh_temp.client_update_value, 25.0)
await hass.async_block_till_done()
assert call_set_temperature[1]
assert call_set_temperature[1].data[ATTR_ENTITY_ID] == entity_id
assert call_set_temperature[1].data[ATTR_TARGET_TEMP_HIGH] == 25.0
assert acc.char_cooling_thresh_temp.value == 25.0
assert len(events) == 2
assert events[-1].data[ATTR_VALUE] == "cooling threshold 25.0°C"
async def test_thermostat_power_state(hass, hk_driver, cls, events):
"""Test if accessory and HA are updated accordingly."""
entity_id = "climate.test"
# SUPPORT_ON_OFF = True
hass.states.async_set(
entity_id,
HVAC_MODE_HEAT,
{
ATTR_SUPPORTED_FEATURES: 4096,
ATTR_HVAC_MODE: HVAC_MODE_HEAT,
ATTR_TEMPERATURE: 23.0,
ATTR_CURRENT_TEMPERATURE: 18.0,
ATTR_HVAC_ACTION: CURRENT_HVAC_HEAT,
},
)
await hass.async_block_till_done()
acc = cls.thermostat(hass, hk_driver, "Climate", entity_id, 2, None)
await hass.async_add_job(acc.run)
await hass.async_block_till_done()
assert acc.char_current_heat_cool.value == 1
assert acc.char_target_heat_cool.value == 1
hass.states.async_set(
entity_id,
HVAC_MODE_OFF,
{
ATTR_HVAC_MODE: HVAC_MODE_HEAT,
ATTR_TEMPERATURE: 23.0,
ATTR_CURRENT_TEMPERATURE: 18.0,
ATTR_HVAC_ACTION: CURRENT_HVAC_IDLE,
},
)
await hass.async_block_till_done()
assert acc.char_current_heat_cool.value == 0
assert acc.char_target_heat_cool.value == 0
hass.states.async_set(
entity_id,
HVAC_MODE_OFF,
{
ATTR_HVAC_MODE: HVAC_MODE_OFF,
ATTR_TEMPERATURE: 23.0,
ATTR_CURRENT_TEMPERATURE: 18.0,
ATTR_HVAC_ACTION: CURRENT_HVAC_IDLE,
},
)
await hass.async_block_till_done()
assert acc.char_current_heat_cool.value == 0
assert acc.char_target_heat_cool.value == 0
# Set from HomeKit
call_set_hvac_mode = async_mock_service(hass, DOMAIN_CLIMATE, "set_hvac_mode")
await hass.async_add_job(acc.char_target_heat_cool.client_update_value, 1)
await hass.async_block_till_done()
assert call_set_hvac_mode
assert call_set_hvac_mode[0].data[ATTR_ENTITY_ID] == entity_id
assert call_set_hvac_mode[0].data[ATTR_HVAC_MODE] == HVAC_MODE_HEAT
assert acc.char_target_heat_cool.value == 1
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] == HVAC_MODE_HEAT
await hass.async_add_job(acc.char_target_heat_cool.client_update_value, 0)
await hass.async_block_till_done()
assert acc.char_target_heat_cool.value == 0
assert len(events) == 2
assert events[-1].data[ATTR_VALUE] == HVAC_MODE_OFF
async def test_thermostat_fahrenheit(hass, hk_driver, cls, events):
"""Test if accessory and HA are updated accordingly."""
entity_id = "climate.test"
# support_ = True
hass.states.async_set(entity_id, HVAC_MODE_OFF, {ATTR_SUPPORTED_FEATURES: 6})
await hass.async_block_till_done()
with patch.object(hass.config.units, CONF_TEMPERATURE_UNIT, new=TEMP_FAHRENHEIT):
acc = cls.thermostat(hass, hk_driver, "Climate", entity_id, 2, None)
await hass.async_add_job(acc.run)
await hass.async_block_till_done()
hass.states.async_set(
entity_id,
HVAC_MODE_HEAT_COOL,
{
ATTR_HVAC_MODE: HVAC_MODE_HEAT_COOL,
ATTR_TARGET_TEMP_HIGH: 75.2,
ATTR_TARGET_TEMP_LOW: 68.1,
ATTR_TEMPERATURE: 71.6,
ATTR_CURRENT_TEMPERATURE: 73.4,
},
)
await hass.async_block_till_done()
assert acc.get_temperature_range() == (7.0, 35.0)
assert acc.char_heating_thresh_temp.value == 20.1
assert acc.char_cooling_thresh_temp.value == 24.0
assert acc.char_current_temp.value == 23.0
assert acc.char_target_temp.value == 22.0
assert acc.char_display_units.value == 1
# Set from HomeKit
call_set_temperature = async_mock_service(hass, DOMAIN_CLIMATE, "set_temperature")
await hass.async_add_job(acc.char_cooling_thresh_temp.client_update_value, 23)
await hass.async_block_till_done()
assert call_set_temperature[0]
assert call_set_temperature[0].data[ATTR_ENTITY_ID] == entity_id
assert call_set_temperature[0].data[ATTR_TARGET_TEMP_HIGH] == 73.5
assert call_set_temperature[0].data[ATTR_TARGET_TEMP_LOW] == 68
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] == "cooling threshold 73.5°F"
await hass.async_add_job(acc.char_heating_thresh_temp.client_update_value, 22)
await hass.async_block_till_done()
assert call_set_temperature[1]
assert call_set_temperature[1].data[ATTR_ENTITY_ID] == entity_id
assert call_set_temperature[1].data[ATTR_TARGET_TEMP_HIGH] == 73.5
assert call_set_temperature[1].data[ATTR_TARGET_TEMP_LOW] == 71.5
assert len(events) == 2
assert events[-1].data[ATTR_VALUE] == "heating threshold 71.5°F"
await hass.async_add_job(acc.char_target_temp.client_update_value, 24.0)
await hass.async_block_till_done()
assert call_set_temperature[2]
assert call_set_temperature[2].data[ATTR_ENTITY_ID] == entity_id
assert call_set_temperature[2].data[ATTR_TEMPERATURE] == 75.0
assert len(events) == 3
assert events[-1].data[ATTR_VALUE] == "75.0°F"
async def test_thermostat_get_temperature_range(hass, hk_driver, cls):
"""Test if temperature range is evaluated correctly."""
entity_id = "climate.test"
hass.states.async_set(entity_id, HVAC_MODE_OFF)
await hass.async_block_till_done()
acc = cls.thermostat(hass, hk_driver, "Climate", entity_id, 2, None)
hass.states.async_set(
entity_id, HVAC_MODE_OFF, {ATTR_MIN_TEMP: 20, ATTR_MAX_TEMP: 25}
)
await hass.async_block_till_done()
assert acc.get_temperature_range() == (20, 25)
acc._unit = TEMP_FAHRENHEIT
hass.states.async_set(
entity_id, HVAC_MODE_OFF, {ATTR_MIN_TEMP: 60, ATTR_MAX_TEMP: 70}
)
await hass.async_block_till_done()
assert acc.get_temperature_range() == (15.5, 21.0)
async def test_thermostat_temperature_step_whole(hass, hk_driver, cls):
"""Test climate device with single digit precision."""
entity_id = "climate.test"
hass.states.async_set(entity_id, HVAC_MODE_OFF, {ATTR_TARGET_TEMP_STEP: 1})
await hass.async_block_till_done()
acc = cls.thermostat(hass, hk_driver, "Climate", entity_id, 2, None)
await hass.async_add_job(acc.run)
await hass.async_block_till_done()
assert acc.char_target_temp.properties[PROP_MIN_STEP] == 1.0
async def test_water_heater(hass, hk_driver, cls, events):
"""Test if accessory and HA are updated accordingly."""
entity_id = "water_heater.test"
hass.states.async_set(entity_id, HVAC_MODE_HEAT)
await hass.async_block_till_done()
acc = cls.water_heater(hass, hk_driver, "WaterHeater", entity_id, 2, None)
await hass.async_add_job(acc.run)
await hass.async_block_till_done()
assert acc.aid == 2
assert acc.category == 9 # Thermostat
assert acc.char_current_heat_cool.value == 1 # Heat
assert acc.char_target_heat_cool.value == 1 # Heat
assert acc.char_current_temp.value == 50.0
assert acc.char_target_temp.value == 50.0
assert acc.char_display_units.value == 0
assert (
acc.char_target_temp.properties[PROP_MAX_VALUE] == DEFAULT_MAX_TEMP_WATER_HEATER
)
assert (
acc.char_target_temp.properties[PROP_MIN_VALUE] == DEFAULT_MIN_TEMP_WATER_HEATER
)
assert acc.char_target_temp.properties[PROP_MIN_STEP] == 0.5
hass.states.async_set(
entity_id,
HVAC_MODE_HEAT,
{ATTR_HVAC_MODE: HVAC_MODE_HEAT, ATTR_TEMPERATURE: 56.0},
)
await hass.async_block_till_done()
assert acc.char_target_temp.value == 56.0
assert acc.char_current_temp.value == 56.0
assert acc.char_target_heat_cool.value == 1
assert acc.char_current_heat_cool.value == 1
assert acc.char_display_units.value == 0
hass.states.async_set(
entity_id, HVAC_MODE_HEAT_COOL, {ATTR_HVAC_MODE: HVAC_MODE_HEAT_COOL}
)
await hass.async_block_till_done()
assert acc.char_target_heat_cool.value == 1
assert acc.char_current_heat_cool.value == 1
# Set from HomeKit
call_set_temperature = async_mock_service(
hass, DOMAIN_WATER_HEATER, "set_temperature"
)
await hass.async_add_job(acc.char_target_temp.client_update_value, 52.0)
await hass.async_block_till_done()
assert call_set_temperature
assert call_set_temperature[0].data[ATTR_ENTITY_ID] == entity_id
assert call_set_temperature[0].data[ATTR_TEMPERATURE] == 52.0
assert acc.char_target_temp.value == 52.0
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] == "52.0°C"
await hass.async_add_job(acc.char_target_heat_cool.client_update_value, 0)
await hass.async_block_till_done()
assert acc.char_target_heat_cool.value == 1
await hass.async_add_job(acc.char_target_heat_cool.client_update_value, 2)
await hass.async_block_till_done()
assert acc.char_target_heat_cool.value == 1
await hass.async_add_job(acc.char_target_heat_cool.client_update_value, 3)
await hass.async_block_till_done()
assert acc.char_target_heat_cool.value == 1
async def test_water_heater_fahrenheit(hass, hk_driver, cls, events):
"""Test if accessory and HA are update accordingly."""
entity_id = "water_heater.test"
hass.states.async_set(entity_id, HVAC_MODE_HEAT)
await hass.async_block_till_done()
with patch.object(hass.config.units, CONF_TEMPERATURE_UNIT, new=TEMP_FAHRENHEIT):
acc = cls.water_heater(hass, hk_driver, "WaterHeater", entity_id, 2, None)
await hass.async_add_job(acc.run)
await hass.async_block_till_done()
hass.states.async_set(entity_id, HVAC_MODE_HEAT, {ATTR_TEMPERATURE: 131})
await hass.async_block_till_done()
assert acc.char_target_temp.value == 55.0
assert acc.char_current_temp.value == 55.0
assert acc.char_display_units.value == 1
# Set from HomeKit
call_set_temperature = async_mock_service(
hass, DOMAIN_WATER_HEATER, "set_temperature"
)
await hass.async_add_job(acc.char_target_temp.client_update_value, 60)
await hass.async_block_till_done()
assert call_set_temperature
assert call_set_temperature[0].data[ATTR_ENTITY_ID] == entity_id
assert call_set_temperature[0].data[ATTR_TEMPERATURE] == 140.0
assert acc.char_target_temp.value == 60.0
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] == "140.0°F"
async def test_water_heater_get_temperature_range(hass, hk_driver, cls):
"""Test if temperature range is evaluated correctly."""
entity_id = "water_heater.test"
hass.states.async_set(entity_id, HVAC_MODE_HEAT)
await hass.async_block_till_done()
acc = cls.thermostat(hass, hk_driver, "WaterHeater", entity_id, 2, None)
hass.states.async_set(
entity_id, HVAC_MODE_HEAT, {ATTR_MIN_TEMP: 20, ATTR_MAX_TEMP: 25}
)
await hass.async_block_till_done()
assert acc.get_temperature_range() == (20, 25)
acc._unit = TEMP_FAHRENHEIT
hass.states.async_set(
entity_id, HVAC_MODE_OFF, {ATTR_MIN_TEMP: 60, ATTR_MAX_TEMP: 70}
)
await hass.async_block_till_done()
assert acc.get_temperature_range() == (15.5, 21.0)
|
apache-2.0
|
mmnelemane/neutron
|
neutron/tests/unit/notifiers/test_nova.py
|
18
|
14302
|
# Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from novaclient import exceptions as nova_exceptions
from oslo_utils import uuidutils
from sqlalchemy.orm import attributes as sql_attr
from oslo_config import cfg
from neutron.common import constants
from neutron.db import models_v2
from neutron.notifiers import nova
from neutron.tests import base
class TestNovaNotify(base.BaseTestCase):
def setUp(self, plugin=None):
super(TestNovaNotify, self).setUp()
class FakePlugin(object):
def get_port(self, context, port_id):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
return {'device_id': device_id,
'device_owner': 'compute:None'}
self.nova_notifier = nova.Notifier()
self.nova_notifier._plugin_ref = FakePlugin()
def test_notify_port_status_all_values(self):
states = [constants.PORT_STATUS_ACTIVE, constants.PORT_STATUS_DOWN,
constants.PORT_STATUS_ERROR, constants.PORT_STATUS_BUILD,
sql_attr.NO_VALUE]
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
# test all combinations
for previous_port_status in states:
for current_port_status in states:
port = models_v2.Port(id='port-uuid', device_id=device_id,
device_owner="compute:",
status=current_port_status)
self._record_port_status_changed_helper(current_port_status,
previous_port_status,
port)
def test_port_without_uuid_device_id_no_notify(self):
port = models_v2.Port(id='port-uuid', device_id='compute_probe:',
device_owner='compute:',
status=constants.PORT_STATUS_ACTIVE)
self._record_port_status_changed_helper(constants.PORT_STATUS_ACTIVE,
sql_attr.NO_VALUE,
port)
def test_port_without_device_owner_no_notify(self):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
port = models_v2.Port(id='port-uuid', device_id=device_id,
status=constants.PORT_STATUS_ACTIVE)
self._record_port_status_changed_helper(constants.PORT_STATUS_ACTIVE,
sql_attr.NO_VALUE,
port)
def test_port_without_device_id_no_notify(self):
port = models_v2.Port(id='port-uuid', device_owner="network:dhcp",
status=constants.PORT_STATUS_ACTIVE)
self._record_port_status_changed_helper(constants.PORT_STATUS_ACTIVE,
sql_attr.NO_VALUE,
port)
def test_port_without_id_no_notify(self):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
port = models_v2.Port(device_id=device_id,
device_owner="compute:",
status=constants.PORT_STATUS_ACTIVE)
self._record_port_status_changed_helper(constants.PORT_STATUS_ACTIVE,
sql_attr.NO_VALUE,
port)
def test_non_compute_instances_no_notify(self):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
port = models_v2.Port(id='port-uuid', device_id=device_id,
device_owner="network:dhcp",
status=constants.PORT_STATUS_ACTIVE)
self._record_port_status_changed_helper(constants.PORT_STATUS_ACTIVE,
sql_attr.NO_VALUE,
port)
def _record_port_status_changed_helper(self, current_port_status,
previous_port_status, port):
if not (port.device_id and port.id and port.device_owner and
port.device_owner.startswith('compute:') and
uuidutils.is_uuid_like(port.device_id)):
return
if (previous_port_status == constants.PORT_STATUS_ACTIVE and
current_port_status == constants.PORT_STATUS_DOWN):
event_name = nova.VIF_UNPLUGGED
elif (previous_port_status in [sql_attr.NO_VALUE,
constants.PORT_STATUS_DOWN,
constants.PORT_STATUS_BUILD]
and current_port_status in [constants.PORT_STATUS_ACTIVE,
constants.PORT_STATUS_ERROR]):
event_name = nova.VIF_PLUGGED
else:
return
status = nova.NEUTRON_NOVA_EVENT_STATUS_MAP.get(current_port_status)
self.nova_notifier.record_port_status_changed(port,
current_port_status,
previous_port_status,
None)
event = {'server_uuid': port.device_id, 'status': status,
'name': event_name, 'tag': 'port-uuid'}
self.assertEqual(event, port._notify_event)
def test_update_fixed_ip_changed(self):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
returned_obj = {'port':
{'device_owner': u'compute:dfd',
'id': u'bee50827-bcee-4cc8-91c1-a27b0ce54222',
'device_id': device_id}}
expected_event = {'server_uuid': device_id,
'name': 'network-changed'}
event = self.nova_notifier.create_port_changed_event('update_port',
{}, returned_obj)
self.assertEqual(event, expected_event)
def test_create_floatingip_notify(self):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
returned_obj = {'floatingip':
{'port_id': u'bee50827-bcee-4cc8-91c1-a27b0ce54222'}}
expected_event = {'server_uuid': device_id,
'name': 'network-changed'}
event = self.nova_notifier.create_port_changed_event(
'create_floatingip', {}, returned_obj)
self.assertEqual(event, expected_event)
def test_create_floatingip_no_port_id_no_notify(self):
returned_obj = {'floatingip':
{'port_id': None}}
event = self.nova_notifier.create_port_changed_event(
'create_floatingip', {}, returned_obj)
self.assertFalse(event, None)
def test_delete_floatingip_notify(self):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
returned_obj = {'floatingip':
{'port_id': u'bee50827-bcee-4cc8-91c1-a27b0ce54222'}}
expected_event = {'server_uuid': device_id,
'name': 'network-changed'}
event = self.nova_notifier.create_port_changed_event(
'delete_floatingip', {}, returned_obj)
self.assertEqual(expected_event, event)
def test_delete_floatingip_no_port_id_no_notify(self):
returned_obj = {'floatingip':
{'port_id': None}}
event = self.nova_notifier.create_port_changed_event(
'delete_floatingip', {}, returned_obj)
self.assertEqual(event, None)
def test_associate_floatingip_notify(self):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
returned_obj = {'floatingip':
{'port_id': u'5a39def4-3d3f-473d-9ff4-8e90064b9cc1'}}
original_obj = {'port_id': None}
expected_event = {'server_uuid': device_id,
'name': 'network-changed'}
event = self.nova_notifier.create_port_changed_event(
'update_floatingip', original_obj, returned_obj)
self.assertEqual(expected_event, event)
def test_disassociate_floatingip_notify(self):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
returned_obj = {'floatingip': {'port_id': None}}
original_obj = {'port_id': '5a39def4-3d3f-473d-9ff4-8e90064b9cc1'}
expected_event = {'server_uuid': device_id,
'name': 'network-changed'}
event = self.nova_notifier.create_port_changed_event(
'update_floatingip', original_obj, returned_obj)
self.assertEqual(expected_event, event)
def test_no_notification_notify_nova_on_port_data_changes_false(self):
cfg.CONF.set_override('notify_nova_on_port_data_changes', False)
with mock.patch.object(self.nova_notifier,
'send_events') as send_events:
self.nova_notifier.send_network_change('update_floatingip',
{}, {})
self.assertFalse(send_events.called, False)
def test_nova_send_events_returns_bad_list(self):
with mock.patch.object(
self.nova_notifier.nclient.server_external_events,
'create') as nclient_create:
nclient_create.return_value = 'i am a string!'
self.nova_notifier.send_events([])
def test_nova_send_event_rasies_404(self):
with mock.patch.object(
self.nova_notifier.nclient.server_external_events,
'create') as nclient_create:
nclient_create.side_effect = nova_exceptions.NotFound
self.nova_notifier.send_events([])
def test_nova_send_events_raises(self):
with mock.patch.object(
self.nova_notifier.nclient.server_external_events,
'create') as nclient_create:
nclient_create.side_effect = Exception
self.nova_notifier.send_events([])
def test_nova_send_events_returns_non_200(self):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
with mock.patch.object(
self.nova_notifier.nclient.server_external_events,
'create') as nclient_create:
nclient_create.return_value = [{'code': 404,
'name': 'network-changed',
'server_uuid': device_id}]
self.nova_notifier.send_events(
[{'name': 'network-changed', 'server_uuid': device_id}])
def test_nova_send_events_return_200(self):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
with mock.patch.object(
self.nova_notifier.nclient.server_external_events,
'create') as nclient_create:
nclient_create.return_value = [{'code': 200,
'name': 'network-changed',
'server_uuid': device_id}]
self.nova_notifier.send_events(
[{'name': 'network-changed', 'server_uuid': device_id}])
def test_nova_send_events_multiple(self):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
with mock.patch.object(
self.nova_notifier.nclient.server_external_events,
'create') as nclient_create:
nclient_create.return_value = [{'code': 200,
'name': 'network-changed',
'server_uuid': device_id},
{'code': 200,
'name': 'network-changed',
'server_uuid': device_id}]
self.nova_notifier.send_events([
{'name': 'network-changed', 'server_uuid': device_id},
{'name': 'network-changed', 'server_uuid': device_id}])
def test_reassociate_floatingip_without_disassociate_event(self):
returned_obj = {'floatingip':
{'port_id': 'f5348a16-609a-4971-b0f0-4b8def5235fb'}}
original_obj = {'port_id': '5a39def4-3d3f-473d-9ff4-8e90064b9cc1'}
self.nova_notifier._waiting_to_send = True
self.nova_notifier.send_network_change(
'update_floatingip', original_obj, returned_obj)
self.assertEqual(
2, len(self.nova_notifier.batch_notifier.pending_events))
returned_obj_non = {'floatingip': {'port_id': None}}
event_dis = self.nova_notifier.create_port_changed_event(
'update_floatingip', original_obj, returned_obj_non)
event_assoc = self.nova_notifier.create_port_changed_event(
'update_floatingip', original_obj, returned_obj)
self.assertEqual(
self.nova_notifier.batch_notifier.pending_events[0], event_dis)
self.assertEqual(
self.nova_notifier.batch_notifier.pending_events[1], event_assoc)
def test_delete_port_notify(self):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
port_id = 'bee50827-bcee-4cc8-91c1-a27b0ce54222'
returned_obj = {'port':
{'device_owner': 'compute:dfd',
'id': port_id,
'device_id': device_id}}
expected_event = {'server_uuid': device_id,
'name': nova.VIF_DELETED,
'tag': port_id}
event = self.nova_notifier.create_port_changed_event('delete_port',
{}, returned_obj)
self.assertEqual(expected_event, event)
|
apache-2.0
|
switchboardOp/ansible
|
lib/ansible/playbook/taggable.py
|
62
|
3447
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import itertools
from ansible.errors import AnsibleError
from ansible.module_utils.six import string_types
from ansible.playbook.attribute import FieldAttribute
from ansible.template import Templar
class Taggable:
untagged = frozenset(['untagged'])
_tags = FieldAttribute(isa='list', default=[], listof=(string_types, int))
def __init__(self):
super(Taggable, self).__init__()
def _load_tags(self, attr, ds):
if isinstance(ds, list):
return ds
elif isinstance(ds, string_types):
value = ds.split(',')
if isinstance(value, list):
return [x.strip() for x in value]
else:
return [ds]
else:
raise AnsibleError('tags must be specified as a list', obj=ds)
def _get_attr_tags(self):
'''
Override for the 'tags' getattr fetcher, used from Base.
'''
tags = self._attributes['tags']
if tags is None:
tags = []
if hasattr(self, '_get_parent_attribute'):
tags = self._get_parent_attribute('tags', extend=True)
return tags
def evaluate_tags(self, only_tags, skip_tags, all_vars):
''' this checks if the current item should be executed depending on tag options '''
should_run = True
if self.tags:
templar = Templar(loader=self._loader, variables=all_vars)
tags = templar.template(self.tags)
if not isinstance(tags, list):
if tags.find(',') != -1:
tags = set(tags.split(','))
else:
tags = set([tags])
else:
tags = set([i for i, _ in itertools.groupby(tags)])
else:
# this makes isdisjoint work for untagged
tags = self.untagged
if only_tags:
should_run = False
if 'always' in tags or 'all' in only_tags:
should_run = True
elif not tags.isdisjoint(only_tags):
should_run = True
elif 'tagged' in only_tags and tags != self.untagged:
should_run = True
if should_run and skip_tags:
# Check for tags that we need to skip
if 'all' in skip_tags:
if 'always' not in tags or 'always' in skip_tags:
should_run = False
elif not tags.isdisjoint(skip_tags):
should_run = False
elif 'tagged' in skip_tags and tags != self.untagged:
should_run = False
return should_run
|
gpl-3.0
|
kurtdawg24/robotframework
|
utest/utils/test_markuputils.py
|
24
|
28370
|
import unittest
from robot.utils.asserts import assert_equals
from robot.utils.markuputils import html_escape, html_format, attribute_escape
from robot.utils.htmlformatters import TableFormatter
_format_table = TableFormatter()._format_table
def assert_escape_and_format(inp, exp_escape=None, exp_format=None):
if exp_escape is None:
exp_escape = str(inp)
if exp_format is None:
exp_format = exp_escape
exp_format = '<p>%s</p>' % exp_format.replace('\n', ' ')
escape = html_escape(inp)
format = html_format(inp)
assert_equals(escape, exp_escape,
'ESCAPE:\n%r =!\n%r' % (escape, exp_escape), values=False)
assert_equals(format, exp_format,
'FORMAT:\n%r =!\n%r' % (format, exp_format), values=False)
def assert_format(inp, exp=None, p=False):
exp = exp if exp is not None else inp
if p:
exp = '<p>%s</p>' % exp
assert_equals(html_format(inp), exp)
def assert_escape(inp, exp=None):
exp = exp if exp is not None else inp
assert_equals(html_escape(inp), exp)
class TestHtmlEscape(unittest.TestCase):
def test_no_changes(self):
for inp in ['', 'nothing to change']:
assert_escape(inp)
def test_newlines_and_paragraphs(self):
for inp in ['Text on first line.\nText on second line.',
'1 line\n2 line\n3 line\n4 line\n5 line\n',
'Para 1 line 1\nP1 L2\n\nP2 L1\nP2 L1\n\nP3 L1\nP3 L2',
'Multiple empty lines\n\n\n\n\nbetween these lines']:
assert_escape(inp)
class TestEntities(unittest.TestCase):
def test_entities(self):
for char, entity in [('<','<'), ('>','>'), ('&','&')]:
for inp, exp in [(char, entity),
('text %s' % char, 'text %s' % entity),
('-%s-%s-' % (char, char),
'-%s-%s-' % (entity, entity)),
('"%s&%s"' % (char, char),
'"%s&%s"' % (entity, entity))]:
assert_escape_and_format(inp, exp)
class TestUrlsToLinks(unittest.TestCase):
def test_not_urls(self):
for no_url in ['http no link', 'http:/no', '123://no',
'1a://no', 'http://', 'http:// no']:
assert_escape_and_format(no_url)
def test_simple_urls(self):
for link in ['http://robot.fi', 'https://r.fi/', 'FTP://x.y.z/p/f.txt',
'a23456://link', 'file:///c:/temp/xxx.yyy']:
exp = '<a href="%s">%s</a>' % (link, link)
assert_escape_and_format(link, exp)
for end in [',', '.', ';', ':', '!', '?', '...', '!?!', ' hello' ]:
assert_escape_and_format(link+end, exp+end)
assert_escape_and_format('xxx '+link+end, 'xxx '+exp+end)
for start, end in [('(',')'), ('[',']'), ('"','"'), ("'","'")]:
assert_escape_and_format(start+link+end, start+exp+end)
def test_complex_urls_and_surrounding_content(self):
for inp, exp in [
('hello http://link world',
'hello <a href="http://link">http://link</a> world'),
('multi\nhttp://link\nline',
'multi\n<a href="http://link">http://link</a>\nline'),
('http://link, ftp://link2.',
'<a href="http://link">http://link</a>, '
'<a href="ftp://link2">ftp://link2</a>.'),
('x (git+ssh://yy, z)',
'x (<a href="git+ssh://yy">git+ssh://yy</a>, z)'),
('(http://x.com/blah_(wikipedia)#cite-1)',
'(<a href="http://x.com/blah_(wikipedia)#cite-1">http://x.com/blah_(wikipedia)#cite-1</a>)'),
('x-yojimbo-item://6303,E4C1,6A6E, FOO',
'<a href="x-yojimbo-item://6303,E4C1,6A6E">x-yojimbo-item://6303,E4C1,6A6E</a>, FOO'),
('Hello http://one, ftp://kaksi/; "gopher://3.0"',
'Hello <a href="http://one">http://one</a>, '
'<a href="ftp://kaksi/">ftp://kaksi/</a>; '
'"<a href="gopher://3.0">gopher://3.0</a>"')]:
assert_escape_and_format(inp, exp)
def test_image_urls(self):
link = '(<a href="%s">%s</a>)'
img = '(<img src="%s" title="%s">)'
for ext in ['jpg', 'jpeg', 'png', 'gif', 'bmp']:
url = 'foo://bar/zap.%s' % ext
uprl = url.upper()
inp = '(%s)' % url
assert_escape_and_format(inp, link % (url, url), img % (url, url))
assert_escape_and_format(inp.upper(), link % (uprl, uprl),
img % (uprl, uprl))
def test_url_with_chars_needing_escaping(self):
for items in [
('http://foo"bar',
'<a href="http://foo"bar">http://foo"bar</a>'),
('ftp://<&>/',
'<a href="ftp://<&>/">ftp://<&>/</a>'),
('http://x&".png',
'<a href="http://x&".png">http://x&".png</a>',
'<img src="http://x&".png" title="http://x&".png">')
]:
assert_escape_and_format(*items)
class TestFormatParagraph(unittest.TestCase):
def test_empty(self):
assert_format('', '')
def test_single_line(self):
assert_format('foo', '<p>foo</p>')
def test_multi_line(self):
assert_format('foo\nbar', '<p>foo bar</p>')
def test_leading_and_trailing_spaces(self):
assert_format(' foo \n bar', '<p>foo bar</p>')
def test_multiple_paragraphs(self):
assert_format('P\n1\n\nP 2', '<p>P 1</p>\n<p>P 2</p>')
def test_leading_empty_line(self):
assert_format('\nP', '<p>P</p>')
def test_other_formatted_content_before_paragraph(self):
assert_format('---\nP', '<hr>\n<p>P</p>')
assert_format('| PRE \nP', '<pre>\nPRE\n</pre>\n<p>P</p>')
def test_other_formatted_content_after_paragraph(self):
assert_format('P\n---', '<p>P</p>\n<hr>')
assert_format('P\n| PRE \n', '<p>P</p>\n<pre>\nPRE\n</pre>')
class TestHtmlFormatInlineStyles(unittest.TestCase):
def test_bold_once(self):
for inp, exp in [('*bold*', '<b>bold</b>'),
('*b*', '<b>b</b>'),
('*many bold words*', '<b>many bold words</b>'),
(' *bold*', '<b>bold</b>'),
('*bold* ', '<b>bold</b>'),
('xx *bold*', 'xx <b>bold</b>'),
('*bold* xx', '<b>bold</b> xx'),
('***', '<b>*</b>'),
('****', '<b>**</b>'),
('*****', '<b>***</b>')]:
assert_format(inp, exp, p=True)
def test_bold_multiple_times(self):
for inp, exp in [('*bold* *b* not bold *b3* not',
'<b>bold</b> <b>b</b> not bold <b>b3</b> not'),
('not b *this is b* *more b words here*',
'not b <b>this is b</b> <b>more b words here</b>'),
('*** not *b* ***',
'<b>*</b> not <b>b</b> <b>*</b>')]:
assert_format(inp, exp, p=True)
def test_bold_on_multiple_lines(self):
inp = 'this is *bold*\nand *this*\nand *that*'
exp = 'this is <b>bold</b> and <b>this</b> and <b>that</b>'
assert_format(inp, exp, p=True)
assert_format('this *works\ntoo!*', 'this <b>works too!</b>', p=True)
def test_not_bolded_if_no_content(self):
assert_format('**', p=True)
def test_asterisk_in_the_middle_of_word_is_ignored(self):
for inp, exp in [('aa*notbold*bbb', None),
('*bold*still bold*', '<b>bold*still bold</b>'),
('a*not*b c*still not*d', None),
('*b*b2* -*n*- *b3*', '<b>b*b2</b> -*n*- <b>b3</b>')]:
assert_format(inp, exp, p=True)
def test_asterisk_alone_does_not_start_bolding(self):
for inp, exp in [('*', None),
(' * ', '*'),
('* not *', None),
(' * not * ', '* not *'),
('* not*', None),
('*bold *', '<b>bold </b>'),
('* *b* *', '* <b>b</b> *'),
('*bold * not*', '<b>bold </b> not*'),
('*bold * not*not* *b*',
'<b>bold </b> not*not* <b>b</b>')]:
assert_format(inp, exp, p=True)
def test_italic_once(self):
for inp, exp in [('_italic_', '<i>italic</i>'),
('_i_', '<i>i</i>'),
('_many italic words_', '<i>many italic words</i>'),
(' _italic_', '<i>italic</i>'),
('_italic_ ', '<i>italic</i>'),
('xx _italic_', 'xx <i>italic</i>'),
('_italic_ xx', '<i>italic</i> xx')]:
assert_format(inp, exp, p=True)
def test_italic_multiple_times(self):
for inp, exp in [('_italic_ _i_ not italic _i3_ not',
'<i>italic</i> <i>i</i> not italic <i>i3</i> not'),
('not i _this is i_ _more i words here_',
'not i <i>this is i</i> <i>more i words here</i>')]:
assert_format(inp, exp, p=True)
def test_not_italiced_if_no_content(self):
assert_format('__', p=True)
def test_not_italiced_many_underlines(self):
for inp in ['___', '____', '_________', '__len__']:
assert_format(inp, p=True)
def test_underscore_in_the_middle_of_word_is_ignored(self):
for inp, exp in [('aa_notitalic_bbb', None),
('_ital_still ital_', '<i>ital_still ital</i>'),
('a_not_b c_still not_d', None),
('_i_i2_ -_n_- _i3_', '<i>i_i2</i> -_n_- <i>i3</i>')]:
assert_format(inp, exp, p=True)
def test_underscore_alone_does_not_start_italicing(self):
for inp, exp in [('_', None),
(' _ ', '_'),
('_ not _', None),
(' _ not _ ', '_ not _'),
('_ not_', None),
('_italic _', '<i>italic </i>'),
('_ _i_ _', '_ <i>i</i> _'),
('_italic _ not_', '<i>italic </i> not_'),
('_italic _ not_not_ _i_',
'<i>italic </i> not_not_ <i>i</i>')]:
assert_format(inp, exp, p=True)
def test_bold_and_italic(self):
for inp, exp in [('*b* _i_', '<b>b</b> <i>i</i>')]:
assert_format(inp, exp, p=True)
def test_bold_and_italic_works_with_punctuation_marks(self):
for bef, aft in [('(',''), ('"',''), ("'",''), ('(\'"(',''),
('',')'), ('','"'), ('',','), ('','"\').,!?!?:;'),
('(',')'), ('"','"'), ('("\'','\'";)'), ('"','..."')]:
for inp, exp in [('*bold*','<b>bold</b>'),
('_ital_','<i>ital</i>'),
('*b* _i_','<b>b</b> <i>i</i>')]:
assert_format(bef + inp + aft, bef + exp + aft, p=True)
def test_bold_italic(self):
for inp, exp in [('_*bi*_', '<i><b>bi</b></i>'),
('_*bold ital*_', '<i><b>bold ital</b></i>'),
('_*bi* i_', '<i><b>bi</b> i</i>'),
('_*bi_ b*', '<i><b>bi</i> b</b>'),
('_i *bi*_', '<i>i <b>bi</b></i>'),
('*b _bi*_', '<b>b <i>bi</b></i>')]:
assert_format(inp, exp, p=True)
def test_code_once(self):
for inp, exp in [('``code``', '<code>code</code>'),
('``c``', '<code>c</code>'),
('``many code words``', '<code>many code words</code>'),
(' ``leading space``', '<code>leading space</code>'),
('``trailing space`` ', '<code>trailing space</code>'),
('xx ``code``', 'xx <code>code</code>'),
('``code`` xx', '<code>code</code> xx')]:
assert_format(inp, exp, p=True)
def test_code_multiple_times(self):
for inp, exp in [('``code`` ``c`` not ``c3`` not',
'<code>code</code> <code>c</code> not <code>c3</code> not'),
('not c ``this is c`` ``more c words here``',
'not c <code>this is c</code> <code>more c words here</code>')]:
assert_format(inp, exp, p=True)
def test_not_coded_if_no_content(self):
assert_format('````', p=True)
def test_not_codeed_many_underlines(self):
for inp in ['``````', '````````', '``````````````````', '````len````']:
assert_format(inp, p=True)
def test_backtics_in_the_middle_of_word_are_ignored(self):
for inp, exp in [('aa``notcode``bbb', None),
('``code``still code``', '<code>code``still code</code>'),
('a``not``b c``still not``d', None),
('``c``c2`` -``n``- ``c3``', '<code>c``c2</code> -``n``- <code>c3</code>')]:
assert_format(inp, exp, p=True)
def test_backtics_alone_do_not_start_codeing(self):
for inp, exp in [('``', None),
(' `` ', '``'),
('`` not ``', None),
(' `` not `` ', '`` not ``'),
('`` not``', None),
('``code ``', '<code>code </code>'),
('`` ``b`` ``', '`` <code>b</code> ``'),
('``code `` not``', '<code>code </code> not``'),
('``code `` not``not`` ``c``',
'<code>code </code> not``not`` <code>c</code>')]:
assert_format(inp, exp, p=True)
class TestHtmlFormatCustomLinks(unittest.TestCase):
def test_text_with_text(self):
assert_format('[link.html|title]', '<a href="link.html">title</a>', p=True)
assert_format('[link|t|i|t|l|e]', '<a href="link">t|i|t|l|e</a>', p=True)
def test_text_with_image(self):
assert_format('[link|img.png]',
'<a href="link"><img src="img.png" title="link"></a>',
p=True)
def test_image_with_text(self):
assert_format('[img.png|title]', '<img src="img.png" title="title">', p=True)
assert_format('[img.png|]', '<img src="img.png" title="img.png">', p=True)
def test_image_with_image(self):
assert_format('[x.png|thumb.png]',
'<a href="x.png"><img src="thumb.png" title="x.png"></a>',
p=True)
def test_link_is_required(self):
assert_format('[|]', '[|]', p=True)
def test_spaces_are_stripped(self):
assert_format('[ link.html | title words ]',
'<a href="link.html">title words</a>', p=True)
def test_newlines_inside_text(self):
assert_format('[http://url|text\non\nmany\nlines]',
'<a href="http://url">text on many lines</a>', p=True)
def test_newline_after_pipe(self):
assert_format('[http://url|\nwrapping was needed]',
'<a href="http://url">wrapping was needed</a>', p=True)
def test_url_and_link(self):
assert_format('http://url [link|title]',
'<a href="http://url">http://url</a> <a href="link">title</a>',
p=True)
def test_link_as_url(self):
assert_format('[http://url|title]', '<a href="http://url">title</a>', p=True)
def test_multiple_links(self):
assert_format('start [link|img.png] middle [link.html|title] end',
'start <a href="link"><img src="img.png" title="link"></a> '
'middle <a href="link.html">title</a> end', p=True)
def test_multiple_links_and_urls(self):
assert_format('[L|T]ftp://url[X|Y][http://u2]',
'<a href="L">T</a><a href="ftp://url">ftp://url</a>'
'<a href="X">Y</a>[<a href="http://u2">http://u2</a>]', p=True)
def test_escaping(self):
assert_format('["|<&>]', '<a href="""><&></a>', p=True)
assert_format('[<".jpg|">]', '<img src="<".jpg" title="">">', p=True)
def test_formatted_link(self):
assert_format('*[link.html|title]*', '<b><a href="link.html">title</a></b>', p=True)
def test_link_in_table(self):
assert_format('| [link.html|title] |', '''\
<table border="1">
<tr>
<td><a href="link.html">title</a></td>
</tr>
</table>''')
class TestHtmlFormatTable(unittest.TestCase):
def test_one_row_table(self):
inp = '| one | two |'
exp = _format_table([['one','two']])
assert_format(inp, exp)
def test_multi_row_table(self):
inp = '| 1.1 | 1.2 | 1.3 |\n| 2.1 | 2.2 |\n| 3.1 | 3.2 | 3.3 |\n'
exp = _format_table([['1.1','1.2','1.3'],
['2.1','2.2'],
['3.1','3.2','3.3']])
assert_format(inp, exp)
def test_table_with_extra_spaces(self):
inp = ' | 1.1 | 1.2 | \n | 2.1 | 2.2 | '
exp = _format_table([['1.1','1.2',],['2.1','2.2']])
assert_format(inp, exp)
def test_table_with_one_space_empty_cells(self):
inp = '''
| 1.1 | 1.2 | |
| 2.1 | | 2.3 |
| | 3.2 | 3.3 |
| 4.1 | | |
| | 5.2 | |
| | | 6.3 |
| | | |
'''[1:-1]
exp = _format_table([['1.1','1.2',''],
['2.1','','2.3'],
['','3.2','3.3'],
['4.1','',''],
['','5.2',''],
['','','6.3'],
['','','']])
assert_format(inp, exp)
def test_one_column_table(self):
inp = '| one column |\n| |\n | | \n| 2 | col |\n| |'
exp = _format_table([['one column'],[''],[''],['2','col'],['']])
assert_format(inp, exp)
def test_table_with_other_content_around(self):
inp = '''before table
| in | table |
| still | in |
after table
'''
exp = '<p>before table</p>\n' \
+ _format_table([['in','table'],['still','in']]) \
+ '\n<p>after table</p>'
assert_format(inp, exp)
def test_multiple_tables(self):
inp = '''before tables
| table | 1 |
| still | 1 |
between
| table | 2 |
between
| 3.1.1 | 3.1.2 | 3.1.3 |
| 3.2.1 | 3.2.2 | 3.2.3 |
| 3.3.1 | 3.3.2 | 3.3.3 |
| t | 4 |
| | |
after
'''
exp = '<p>before tables</p>\n' \
+ _format_table([['table','1'],['still','1']]) \
+ '\n<p>between</p>\n' \
+ _format_table([['table','2']]) \
+ '\n<p>between</p>\n' \
+ _format_table([['3.1.1','3.1.2','3.1.3'],
['3.2.1','3.2.2','3.2.3'],
['3.3.1','3.3.2','3.3.3']]) \
+ '\n' \
+ _format_table([['t','4'],['','']]) \
+ '\n<p>after</p>'
assert_format(inp, exp)
def test_ragged_table(self):
inp = '''
| 1.1 | 1.2 | 1.3 |
| 2.1 |
| 3.1 | 3.2 |
'''
exp = _format_table([['1.1','1.2','1.3'],
['2.1','',''],
['3.1','3.2','']])
assert_format(inp, exp)
def test_th(self):
inp = '''
| =a= | = b = | = = c = = |
| = = | = _e_ = | =_*f*_= |
'''
exp = '''
<table border="1">
<tr>
<th>a</th>
<th>b</th>
<th>= c =</th>
</tr>
<tr>
<th></th>
<th><i>e</i></th>
<th><i><b>f</b></i></th>
</tr>
</table>
'''
assert_format(inp, exp.strip())
def test_bold_in_table_cells(self):
inp = '''
| *a* | *b* | *c* |
| *b* | x | y |
| *c* | z | |
| a | x *b* y | *b* *c* |
| *a | b* | |
'''
exp = _format_table([['<b>a</b>','<b>b</b>','<b>c</b>'],
['<b>b</b>','x','y'],
['<b>c</b>','z','']]) + '\n' \
+ _format_table([['a','x <b>b</b> y','<b>b</b> <b>c</b>'],
['*a','b*','']])
assert_format(inp, exp)
def test_italic_in_table_cells(self):
inp = '''
| _a_ | _b_ | _c_ |
| _b_ | x | y |
| _c_ | z | |
| a | x _b_ y | _b_ _c_ |
| _a | b_ | |
'''
exp = _format_table([['<i>a</i>','<i>b</i>','<i>c</i>'],
['<i>b</i>','x','y'],
['<i>c</i>','z','']]) + '\n' \
+ _format_table([['a','x <i>b</i> y','<i>b</i> <i>c</i>'],
['_a','b_','']])
assert_format(inp, exp)
def test_bold_and_italic_in_table_cells(self):
inp = '''
| *a* | *b* | *c* |
| _b_ | x | y |
| _c_ | z | *b* _i_ |
'''
exp = _format_table([['<b>a</b>','<b>b</b>','<b>c</b>'],
['<i>b</i>','x','y'],
['<i>c</i>','z','<b>b</b> <i>i</i>']])
assert_format(inp, exp)
def test_link_in_table_cell(self):
inp = '''
| 1 | http://one |
| 2 | ftp://two/ |
'''
exp = _format_table([['1','FIRST'],
['2','SECOND']]) \
.replace('FIRST', '<a href="http://one">http://one</a>') \
.replace('SECOND', '<a href="ftp://two/">ftp://two/</a>')
assert_format(inp, exp)
class TestHtmlFormatHr(unittest.TestCase):
def test_hr_is_three_or_more_hyphens(self):
for i in range(3, 10):
hr = '-' * i
spaces = ' ' * i
assert_format(hr, '<hr>')
assert_format(spaces + hr + spaces, '<hr>')
def test_hr_with_other_stuff_around(self):
for inp, exp in [('---\n-', '<hr>\n<p>-</p>'),
('xx\n---\nxx', '<p>xx</p>\n<hr>\n<p>xx</p>'),
('xx\n\n------\n\nxx', '<p>xx</p>\n<hr>\n<p>xx</p>')]:
assert_format(inp, exp)
def test_multiple_hrs(self):
assert_format('---\n---\n\n---', '<hr>\n<hr>\n<hr>')
def test_not_hr(self):
for inp in ['-', '--', '-- --', '...---...', '===']:
assert_format(inp, p=True)
def test_hr_before_and_after_table(self):
inp = '''
---
| t | a | b | l | e |
---'''
exp = '<hr>\n' + _format_table([['t','a','b','l','e']]) + '\n<hr>'
assert_format(inp, exp)
class TestHtmlFormatList(unittest.TestCase):
def test_not_a_list(self):
for inp in ('-- item', '+ item', '* item', '-item'):
assert_format(inp, inp, p=True)
def test_one_item_list(self):
assert_format('- item', '<ul>\n<li>item</li>\n</ul>')
assert_format(' - item', '<ul>\n<li>item</li>\n</ul>')
def test_multi_item_list(self):
assert_format('- 1\n - 2\n- 3',
'<ul>\n<li>1</li>\n<li>2</li>\n<li>3</li>\n</ul>')
def test_list_with_formatted_content(self):
assert_format('- *bold* text\n- _italic_\n- [http://url|link]',
'<ul>\n<li><b>bold</b> text</li>\n<li><i>italic</i></li>\n'
'<li><a href="http://url">link</a></li>\n</ul>')
def test_indentation_can_be_used_to_continue_list_item(self):
assert_format('''
outside list
- this item
continues
- 2nd item
continues
twice
''', '''\
<p>outside list</p>
<ul>
<li>this item continues</li>
<li>2nd item continues twice</li>
</ul>''')
def test_lists_with_other_content_around(self):
assert_format('''
before
- a
- *b*
between
- c
- d
e
f
---
''', '''\
<p>before</p>
<ul>
<li>a</li>
<li><b>b</b></li>
</ul>
<p>between</p>
<ul>
<li>c</li>
<li>d e f</li>
</ul>
<hr>''')
class TestHtmlFormatPreformatted(unittest.TestCase):
def test_single_line_block(self):
self._assert_preformatted('| some', 'some')
def test_block_without_any_content(self):
self._assert_preformatted('|', '')
def test_first_char_after_pipe_must_be_space(self):
assert_format('|x', p=True)
def test_multi_line_block(self):
self._assert_preformatted('| some\n|\n| quote', 'some\n\nquote')
def test_internal_whitespace_is_preserved(self):
self._assert_preformatted('| so\t\tme ', ' so\t\tme')
def test_spaces_before_leading_pipe_are_ignored(self):
self._assert_preformatted(' | some', 'some')
def test_block_mixed_with_other_content(self):
assert_format('before block:\n| some\n| quote\nafter block',
'<p>before block:</p>\n<pre>\nsome\nquote\n</pre>\n<p>after block</p>')
def test_multiple_blocks(self):
assert_format('| some\n| quote\nbetween\n| other block\n\nafter', '''\
<pre>
some
quote
</pre>
<p>between</p>
<pre>
other block
</pre>
<p>after</p>''')
def test_block_line_with_other_formatting(self):
self._assert_preformatted('| _some_ formatted\n| text *here*',
'<i>some</i> formatted\ntext <b>here</b>')
def _assert_preformatted(self, inp, exp):
assert_format(inp, '<pre>\n' + exp + '\n</pre>')
class TestHtmlFormatHeaders(unittest.TestCase):
def test_no_header(self):
for line in ['', 'hello', '=', '==', '====', '= =', '= =', '== ==',
'= inconsistent levels ==', '==== 4 is too many ====',
'=no spaces=', '=no spaces =', '= no spaces=']:
assert_format(line, p=bool(line))
def test_header(self):
for line, expected in [('= My Header =', '<h2>My Header</h2>'),
('== my == header ==', '<h3>my == header</h3>'),
(' === === === ', '<h4>===</h4>')]:
assert_format(line, expected)
class TestFormatTable(unittest.TestCase):
# RIDE needs border="1" because its HTML view doesn't support CSS
_table_start = '<table border="1">'
def test_one_row_table(self):
inp = [['1','2','3']]
exp = self._table_start + '''
<tr>
<td>1</td>
<td>2</td>
<td>3</td>
</tr>
</table>'''
assert_equals(_format_table(inp), exp)
def test_multi_row_table(self):
inp = [['1.1','1.2'], ['2.1','2.2'], ['3.1','3.2']]
exp = self._table_start + '''
<tr>
<td>1.1</td>
<td>1.2</td>
</tr>
<tr>
<td>2.1</td>
<td>2.2</td>
</tr>
<tr>
<td>3.1</td>
<td>3.2</td>
</tr>
</table>'''
assert_equals(_format_table(inp), exp)
def test_fix_ragged_table(self):
inp = [['1.1','1.2','1.3'], ['2.1'], ['3.1','3.2']]
exp = self._table_start + '''
<tr>
<td>1.1</td>
<td>1.2</td>
<td>1.3</td>
</tr>
<tr>
<td>2.1</td>
<td></td>
<td></td>
</tr>
<tr>
<td>3.1</td>
<td>3.2</td>
<td></td>
</tr>
</table>'''
assert_equals(_format_table(inp), exp)
def test_th(self):
inp = [['=h1.1=', '= h 1.2 ='], ['== _h2.1_ =', '= not h 2.2']]
exp = self._table_start + '''
<tr>
<th>h1.1</th>
<th>h 1.2</th>
</tr>
<tr>
<th>= <i>h2.1</i></th>
<td>= not h 2.2</td>
</tr>
</table>'''
assert_equals(_format_table(inp), exp)
class TestAttributeEscape(unittest.TestCase):
def test_nothing_to_escape(self):
for inp in ['', 'whatever', 'nothing here, move along']:
assert_equals(attribute_escape(inp), inp)
def test_html_entities(self):
for inp, exp in [('"', '"'), ('<', '<'), ('>', '>'),
('&', '&'), ('&<">&', '&<">&'),
('Sanity < "check"', 'Sanity < "check"')]:
assert_equals(attribute_escape(inp), exp)
def test_newlines_and_tabs(self):
for inp, exp in [('\n', ' '), ('\t', '	'), ('"\n\t"', '" 	"'),
('N1\nN2\n\nT1\tT3\t\t\t', 'N1 N2 T1	T3			')]:
assert_equals(attribute_escape(inp), exp)
def test_illegal_chars_in_xml(self):
for c in u'\x00\x08\x0B\x0C\x0E\x1F\uFFFE\uFFFF':
assert_equals(attribute_escape(c), '')
if __name__ == '__main__':
unittest.main()
|
apache-2.0
|
mavit/ansible
|
lib/ansible/plugins/connection/__init__.py
|
14
|
15940
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
# (c) 2015 Toshio Kuratomi <tkuratomi@ansible.com>
# (c) 2017, Peter Sprygada <psprygad@redhat.com>
# (c) 2017 Ansible Project
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import fcntl
import gettext
import os
import shlex
from abc import abstractmethod, abstractproperty
from functools import wraps
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_bytes, to_text
from ansible.plugins import AnsiblePlugin
from ansible.plugins.loader import shell_loader, connection_loader
from ansible.utils.path import unfrackpath
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
__all__ = ['ConnectionBase', 'ensure_connect']
BUFSIZE = 65536
def ensure_connect(func):
@wraps(func)
def wrapped(self, *args, **kwargs):
if not self._connected:
self._connect()
return func(self, *args, **kwargs)
return wrapped
class ConnectionBase(AnsiblePlugin):
'''
A base class for connections to contain common code.
'''
has_pipelining = False
has_native_async = False # eg, winrm
always_pipeline_modules = False # eg, winrm
become_methods = C.BECOME_METHODS
# When running over this connection type, prefer modules written in a certain language
# as discovered by the specified file extension. An empty string as the
# language means any language.
module_implementation_preferences = ('',)
allow_executable = True
# the following control whether or not the connection supports the
# persistent connection framework or not
supports_persistence = False
force_persistence = False
default_user = None
def __init__(self, play_context, new_stdin, shell=None, *args, **kwargs):
super(ConnectionBase, self).__init__()
# All these hasattrs allow subclasses to override these parameters
if not hasattr(self, '_play_context'):
self._play_context = play_context
if not hasattr(self, '_new_stdin'):
self._new_stdin = new_stdin
# Backwards compat: self._display isn't really needed, just import the global display and use that.
if not hasattr(self, '_display'):
self._display = display
if not hasattr(self, '_connected'):
self._connected = False
self.success_key = None
self.prompt = None
self._connected = False
self._socket_path = None
if shell is not None:
self._shell = shell
# load the shell plugin for this action/connection
if play_context.shell:
shell_type = play_context.shell
elif hasattr(self, '_shell_type'):
shell_type = getattr(self, '_shell_type')
else:
shell_type = 'sh'
shell_filename = os.path.basename(self._play_context.executable)
try:
shell = shell_loader.get(shell_filename)
except Exception:
shell = None
if shell is None:
for shell in shell_loader.all():
if shell_filename in shell.COMPATIBLE_SHELLS:
break
shell_type = shell.SHELL_FAMILY
self._shell = shell_loader.get(shell_type)
if not self._shell:
raise AnsibleError("Invalid shell type specified (%s), or the plugin for that shell type is missing." % shell_type)
@property
def connected(self):
'''Read-only property holding whether the connection to the remote host is active or closed.'''
return self._connected
@property
def socket_path(self):
'''Read-only property holding the connection socket path for this remote host'''
return self._socket_path
def _become_method_supported(self):
''' Checks if the current class supports this privilege escalation method '''
if self._play_context.become_method in self.become_methods:
return True
raise AnsibleError("Internal Error: this connection module does not support running commands via %s" % self._play_context.become_method)
@staticmethod
def _split_ssh_args(argstring):
"""
Takes a string like '-o Foo=1 -o Bar="foo bar"' and returns a
list ['-o', 'Foo=1', '-o', 'Bar=foo bar'] that can be added to
the argument list. The list will not contain any empty elements.
"""
try:
# Python 2.6.x shlex doesn't handle unicode type so we have to
# convert args to byte string for that case. More efficient to
# try without conversion first but python2.6 doesn't throw an
# exception, it merely mangles the output:
# >>> shlex.split(u't e')
# ['t\x00\x00\x00', '\x00\x00\x00e\x00\x00\x00']
return [to_text(x.strip()) for x in shlex.split(to_bytes(argstring)) if x.strip()]
except AttributeError:
# In Python3, shlex.split doesn't work on a byte string.
return [to_text(x.strip()) for x in shlex.split(argstring) if x.strip()]
@abstractproperty
def transport(self):
"""String used to identify this Connection class from other classes"""
pass
@abstractmethod
def _connect(self):
"""Connect to the host we've been initialized with"""
# Check if PE is supported
if self._play_context.become:
self._become_method_supported()
@ensure_connect
@abstractmethod
def exec_command(self, cmd, in_data=None, sudoable=True):
"""Run a command on the remote host.
:arg cmd: byte string containing the command
:kwarg in_data: If set, this data is passed to the command's stdin.
This is used to implement pipelining. Currently not all
connection plugins implement pipelining.
:kwarg sudoable: Tell the connection plugin if we're executing
a command via a privilege escalation mechanism. This may affect
how the connection plugin returns data. Note that not all
connections can handle privilege escalation.
:returns: a tuple of (return code, stdout, stderr) The return code is
an int while stdout and stderr are both byte strings.
When a command is executed, it goes through multiple commands to get
there. It looks approximately like this::
[LocalShell] ConnectionCommand [UsersLoginShell (*)] ANSIBLE_SHELL_EXECUTABLE [(BecomeCommand ANSIBLE_SHELL_EXECUTABLE)] Command
:LocalShell: Is optional. It is run locally to invoke the
``Connection Command``. In most instances, the
``ConnectionCommand`` can be invoked directly instead. The ssh
connection plugin which can have values that need expanding
locally specified via ssh_args is the sole known exception to
this. Shell metacharacters in the command itself should be
processed on the remote machine, not on the local machine so no
shell is needed on the local machine. (Example, ``/bin/sh``)
:ConnectionCommand: This is the command that connects us to the remote
machine to run the rest of the command. ``ansible_ssh_user``,
``ansible_ssh_host`` and so forth are fed to this piece of the
command to connect to the correct host (Examples ``ssh``,
``chroot``)
:UsersLoginShell: This shell may or may not be created depending on
the ConnectionCommand used by the connection plugin. This is the
shell that the ``ansible_ssh_user`` has configured as their login
shell. In traditional UNIX parlance, this is the last field of
a user's ``/etc/passwd`` entry We do not specifically try to run
the ``UsersLoginShell`` when we connect. Instead it is implicit
in the actions that the ``ConnectionCommand`` takes when it
connects to a remote machine. ``ansible_shell_type`` may be set
to inform ansible of differences in how the ``UsersLoginShell``
handles things like quoting if a shell has different semantics
than the Bourne shell.
:ANSIBLE_SHELL_EXECUTABLE: This is the shell set via the inventory var
``ansible_shell_executable`` or via
``constants.DEFAULT_EXECUTABLE`` if the inventory var is not set.
We explicitly invoke this shell so that we have predictable
quoting rules at this point. ``ANSIBLE_SHELL_EXECUTABLE`` is only
settable by the user because some sudo setups may only allow
invoking a specific shell. (For instance, ``/bin/bash`` may be
allowed but ``/bin/sh``, our default, may not). We invoke this
twice, once after the ``ConnectionCommand`` and once after the
``BecomeCommand``. After the ConnectionCommand, this is run by
the ``UsersLoginShell``. After the ``BecomeCommand`` we specify
that the ``ANSIBLE_SHELL_EXECUTABLE`` is being invoked directly.
:BecomeComand ANSIBLE_SHELL_EXECUTABLE: Is the command that performs
privilege escalation. Setting this up is performed by the action
plugin prior to running ``exec_command``. So we just get passed
:param:`cmd` which has the BecomeCommand already added.
(Examples: sudo, su) If we have a BecomeCommand then we will
invoke a ANSIBLE_SHELL_EXECUTABLE shell inside of it so that we
have a consistent view of quoting.
:Command: Is the command we're actually trying to run remotely.
(Examples: mkdir -p $HOME/.ansible, python $HOME/.ansible/tmp-script-file)
"""
pass
@ensure_connect
@abstractmethod
def put_file(self, in_path, out_path):
"""Transfer a file from local to remote"""
pass
@ensure_connect
@abstractmethod
def fetch_file(self, in_path, out_path):
"""Fetch a file from remote to local"""
pass
@abstractmethod
def close(self):
"""Terminate the connection"""
pass
def check_become_success(self, b_output):
b_success_key = to_bytes(self._play_context.success_key)
for b_line in b_output.splitlines(True):
if b_success_key == b_line.rstrip():
return True
return False
def check_password_prompt(self, b_output):
if self._play_context.prompt is None:
return False
elif isinstance(self._play_context.prompt, string_types):
b_prompt = to_bytes(self._play_context.prompt).strip()
b_lines = b_output.splitlines()
return any(l.strip().startswith(b_prompt) for l in b_lines)
else:
return self._play_context.prompt(b_output)
def check_incorrect_password(self, b_output):
b_incorrect_password = to_bytes(gettext.dgettext(self._play_context.become_method, C.BECOME_ERROR_STRINGS[self._play_context.become_method]))
return b_incorrect_password and b_incorrect_password in b_output
def check_missing_password(self, b_output):
b_missing_password = to_bytes(gettext.dgettext(self._play_context.become_method, C.BECOME_MISSING_STRINGS[self._play_context.become_method]))
return b_missing_password and b_missing_password in b_output
def connection_lock(self):
f = self._play_context.connection_lockfd
display.vvvv('CONNECTION: pid %d waiting for lock on %d' % (os.getpid(), f), host=self._play_context.remote_addr)
fcntl.lockf(f, fcntl.LOCK_EX)
display.vvvv('CONNECTION: pid %d acquired lock on %d' % (os.getpid(), f), host=self._play_context.remote_addr)
def connection_unlock(self):
f = self._play_context.connection_lockfd
fcntl.lockf(f, fcntl.LOCK_UN)
display.vvvv('CONNECTION: pid %d released lock on %d' % (os.getpid(), f), host=self._play_context.remote_addr)
def reset(self):
display.warning("Reset is not implemented for this connection")
class NetworkConnectionBase(ConnectionBase):
"""
A base class for network-style connections.
"""
force_persistence = True
# Do not use _remote_is_local in other connections
_remote_is_local = True
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(NetworkConnectionBase, self).__init__(play_context, new_stdin, *args, **kwargs)
self._network_os = self._play_context.network_os
self._local = connection_loader.get('local', play_context, '/dev/null')
self._local.set_options()
self._sub_plugins = []
self._cached_variables = (None, None, None)
# reconstruct the socket_path and set instance values accordingly
self._ansible_playbook_pid = kwargs.get('ansible_playbook_pid')
self._update_connection_state()
def __getattr__(self, name):
try:
return self.__dict__[name]
except KeyError:
if not name.startswith('_'):
for plugin in self._sub_plugins:
method = getattr(plugin['obj'], name, None)
if method is not None:
return method
raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, name))
def exec_command(self, cmd, in_data=None, sudoable=True):
return self._local.exec_command(cmd, in_data, sudoable)
def put_file(self, in_path, out_path):
"""Transfer a file from local to remote"""
return self._local.put_file(in_path, out_path)
def fetch_file(self, in_path, out_path):
"""Fetch a file from remote to local"""
return self._local.fetch_file(in_path, out_path)
def reset(self):
'''
Reset the connection
'''
if self._socket_path:
display.vvvv('resetting persistent connection for socket_path %s' % self._socket_path, host=self._play_context.remote_addr)
self.close()
display.vvvv('reset call on connection instance', host=self._play_context.remote_addr)
def close(self):
if self._connected:
self._connected = False
def set_options(self, task_keys=None, var_options=None, direct=None):
super(NetworkConnectionBase, self).set_options(task_keys=task_keys, var_options=var_options, direct=direct)
for plugin in self._sub_plugins:
if plugin['type'] != 'external':
try:
plugin['obj'].set_options(task_keys=task_keys, var_options=var_options, direct=direct)
except AttributeError:
pass
def _update_connection_state(self):
'''
Reconstruct the connection socket_path and check if it exists
If the socket path exists then the connection is active and set
both the _socket_path value to the path and the _connected value
to True. If the socket path doesn't exist, leave the socket path
value to None and the _connected value to False
'''
ssh = connection_loader.get('ssh', class_only=True)
control_path = ssh._create_control_path(
self._play_context.remote_addr, self._play_context.port,
self._play_context.remote_user, self._play_context.connection,
self._ansible_playbook_pid
)
tmp_path = unfrackpath(C.PERSISTENT_CONTROL_PATH_DIR)
socket_path = unfrackpath(control_path % dict(directory=tmp_path))
if os.path.exists(socket_path):
self._connected = True
self._socket_path = socket_path
|
gpl-3.0
|
saurabh6790/med_app_rels
|
patches/may_2013/p08_change_item_wise_tax.py
|
30
|
1085
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
import webnotes
import json
from webnotes.utils import flt
def execute():
webnotes.conn.auto_commit_on_many_writes = 1
for doctype in ["Purchase Taxes and Charges", "Sales Taxes and Charges"]:
for tax_name, item_wise_tax_detail in \
webnotes.conn.sql("""select name, item_wise_tax_detail from `tab%s`""" % doctype):
if not item_wise_tax_detail or not isinstance(item_wise_tax_detail, basestring):
continue
try:
json.loads(item_wise_tax_detail)
except ValueError:
out = {}
for t in item_wise_tax_detail.split("\n"):
if " : " in t:
split_index = t.rfind(" : ")
account_head, amount = t[:split_index], t[split_index+3:]
out[account_head.strip()] = flt(amount.strip())
if out:
webnotes.conn.sql("""update `tab%s` set item_wise_tax_detail=%s
where name=%s""" % (doctype, "%s", "%s"), (json.dumps(out), tax_name))
webnotes.conn.auto_commit_on_many_writes = 0
|
agpl-3.0
|
sixfeetup/cloud-custodian
|
c7n/resources/waf.py
|
1
|
1475
|
# Copyright 2016-2017 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function, unicode_literals
from c7n.manager import resources
from c7n.query import QueryResourceManager
@resources.register('waf')
class WAF(QueryResourceManager):
class resource_type(object):
service = "waf"
enum_spec = ("list_web_acls", "WebACLs", None)
detail_spec = ("get_web_acl", "WebACLId", "WebACLId", "WebACL")
name = "Name"
id = "WebACLId"
dimension = "WebACL"
filter_name = None
@resources.register('waf-regional')
class RegionalWAF(QueryResourceManager):
class resource_type(object):
service = "waf-regional"
enum_spec = ("list_web_acls", "WebACLs", None)
detail_spec = ("get_web_acl", "WebACLId", "WebACLId", "WebACL")
name = "Name"
id = "WebACLId"
dimension = "WebACL"
filter_name = None
|
apache-2.0
|
gusai-francelabs/datafari
|
debian7/zookeeper/src/contrib/huebrowser/zkui/src/zkui/utils.py
|
114
|
1120
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from zkui import settings
from django.http import Http404
def get_cluster_or_404(id):
try:
id = int(id)
if not (0 <= id < len(settings.CLUSTERS)):
raise ValueError, 'Undefined cluster id.'
except (TypeError, ValueError):
raise Http404()
cluster = settings.CLUSTERS[id]
cluster['id'] = id
return cluster
|
apache-2.0
|
BT-astauder/odoo
|
addons/lunch/__openerp__.py
|
267
|
2542
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2012 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Lunch Orders',
'author': 'OpenERP SA',
'version': '0.2',
'depends': ['base', 'report'],
'category' : 'Tools',
'summary': 'Lunch Order, Meal, Food',
'description': """
The base module to manage lunch.
================================
Many companies order sandwiches, pizzas and other, from usual suppliers, for their employees to offer them more facilities.
However lunches management within the company requires proper administration especially when the number of employees or suppliers is important.
The “Lunch Order” module has been developed to make this management easier but also to offer employees more tools and usability.
In addition to a full meal and supplier management, this module offers the possibility to display warning and provides quick order selection based on employee’s preferences.
If you want to save your employees' time and avoid them to always have coins in their pockets, this module is essential.
""",
'data': [
'security/lunch_security.xml',
'lunch_view.xml',
'wizard/lunch_order_view.xml',
'wizard/lunch_validation_view.xml',
'wizard/lunch_cancel_view.xml',
'lunch_report.xml',
'report/report_lunch_order_view.xml',
'security/ir.model.access.csv',
'views/report_lunchorder.xml',
'views/lunch.xml',
],
'demo': ['lunch_demo.xml',],
'installable': True,
'website': 'https://www.odoo.com/page/employees',
'application' : True,
'certificate' : '001292377792581874189',
}
|
agpl-3.0
|
JoseBlanca/vcf_crumbs
|
test/test_utils.py
|
1
|
4369
|
# Copyright 2013 Jose Blanca, Peio Ziarsolo, COMAV-Univ. Politecnica Valencia
# This file is part of seq_crumbs.
# vcf_crumbs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# vcf_crumbs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with vcf_crumbs. If not, see <http://www.gnu.org/licenses/>.
import unittest
from tempfile import NamedTemporaryFile
from os.path import exists
from os.path import join as pjoin
from os import remove
from StringIO import StringIO
import gzip
from vcf_crumbs.utils.file_utils import (compress_with_bgzip, uncompress_gzip,
index_vcf_with_tabix, TEST_DATA_DIR,
_build_template_fhand)
# Method could be a function
# pylint: disable=R0201
# Too many public methods
# pylint: disable=R0904
# Missing docstring
# pylint: disable=C0111
VCF = '''##fileformat=VCFv4.1
##fileDate=20090805
##source=myImputationProgramV3.1
##reference=file:///seq/references/1000GenomesPilot-NCBI36.fasta
##contig=<ID=20,length=62435964,assembly=B36>
##phasing=partial
##INFO=<ID=NS,Number=1,Type=Integer,Description="Number_of_Samples_With_Data">
##INFO=<ID=DP,Number=1,Type=Integer,Description="Total_Depth">
##INFO=<ID=AF,Number=A,Type=Float,Description="Allele_Frequency">
##INFO=<ID=AA,Number=1,Type=String,Description="Ancestral_Allele">
##INFO=<ID=DB,Number=0,Type=Flag,Description="dbSNP membership,build_129">
##INFO=<ID=H2,Number=0,Type=Flag,Description="HapMap2_membership">
##FILTER=<ID=q10,Description="Quality_below_10">
##FILTER=<ID=s50,Description="Less_than_50%_of_samples_have_data">
##FORMAT=<ID=GT,Number=1,Type=String,Description="Genotype">
##FORMAT=<ID=GQ,Number=1,Type=Integer,Description="Genotype_Quality">
##FORMAT=<ID=DP,Number=1,Type=Integer,Description="Read_Depth">
##FORMAT=<ID=HQ,Number=2,Type=Integer,Description="Haplotype_Quality">
#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT NA00001 NA00002 NA00003
20 14370 rs6054257 G A 29 PASS NS=3;DP=14;AF=0.5;DB;H2 GT:GQ:DP:HQ 0|0:48:1:51,51 1|0:48:8:51,51 1/1:43:5:.,.
20 17330 . T A 3 q10 NS=3;DP=11;AF=0.017 GT:GQ:DP:HQ 0|0:49:3:58,50 0|1:3:5:65,3 0/0:41:3
'''
class CompressTest(unittest.TestCase):
def test_bgzip_compression(self):
orig = 'hola\ncaracola\n'
orig_fhand = NamedTemporaryFile()
orig_fhand.write(orig)
orig_fhand.flush()
compressed_fhand = NamedTemporaryFile(suffix='.gz')
compress_with_bgzip(orig_fhand, compressed_fhand)
compressed_fhand.seek(0)
compressed = compressed_fhand.read()
orig_fhand.seek(0)
assert orig_fhand.read() == orig
uncompressed_fhand = NamedTemporaryFile()
uncompress_gzip(compressed_fhand, uncompressed_fhand)
compressed_fhand.seek(0)
assert compressed_fhand.read() == compressed
uncompressed_fhand.seek(0)
assert uncompressed_fhand.read() == orig
def test_vcf_index(self):
vcf = VCF.replace(' ', '\t')
vcf_fhand = NamedTemporaryFile(suffix='.vcf')
vcf_fhand.write(vcf)
vcf_fhand.flush()
compressed_fhand = NamedTemporaryFile(suffix='.vcf.gz')
compress_with_bgzip(vcf_fhand, compressed_fhand)
index_vcf_with_tabix(compressed_fhand.name)
assert exists(compressed_fhand.name + '.tbi')
remove(compressed_fhand.name + '.tbi')
class BuildTemplateTest(unittest.TestCase):
def test_build_template(self):
vcf = VCF.replace(' ', '\t')
in_fhand = StringIO(vcf)
t_fhand = _build_template_fhand(in_fhand)
assert 'NA00002\tNA00003\n' in t_fhand.read()
# compressed file
in_fhand = open(pjoin(TEST_DATA_DIR, 'freebayes_multisample.vcf.gz'))
t_fhand = _build_template_fhand(in_fhand)
assert 'sample12_gbs\n' in t_fhand.read()
if __name__ == "__main__":
# import sys;sys.argv = ['', 'FilterTest.test_close_to_filter']
unittest.main()
|
gpl-3.0
|
saradbowman/osf.io
|
api/base/filters.py
|
2
|
24828
|
import datetime
import functools
import operator
import re
import pytz
from api.base import utils
from api.base.exceptions import (
InvalidFilterComparisonType,
InvalidFilterError, InvalidFilterFieldError,
InvalidFilterMatchType, InvalidFilterOperator,
InvalidFilterValue,
)
from api.base.serializers import RelationshipField, ShowIfVersion, TargetField
from dateutil import parser as date_parser
from django.core.exceptions import ValidationError
from django.db.models import QuerySet as DjangoQuerySet
from django.db.models import Q
from rest_framework import serializers as ser
from rest_framework.filters import OrderingFilter
from osf.models import Subject, Preprint
from osf.models.base import GuidMixin
from functools import cmp_to_key
def lowercase(lower):
if hasattr(lower, '__call__'):
return lower()
return lower
def sort_multiple(fields):
fields = list(fields)
def sort_fn(a, b):
sort_direction = 1
for field in fields:
if field[0] == '-':
sort_direction = -1
field = field[1:]
a_field = getattr(a, field)
b_field = getattr(b, field)
if a_field > b_field:
return 1 * sort_direction
elif a_field < b_field:
return -1 * sort_direction
return 0
return sort_fn
class OSFOrderingFilter(OrderingFilter):
"""Adaptation of rest_framework.filters.OrderingFilter to work with modular-odm."""
# override
def filter_queryset(self, request, queryset, view):
ordering = self.get_ordering(request, queryset, view)
if isinstance(queryset, DjangoQuerySet):
if queryset.ordered:
return queryset
elif ordering and getattr(queryset.query, 'distinct_fields', None):
order_fields = tuple([field.lstrip('-') for field in ordering])
distinct_fields = queryset.query.distinct_fields
queryset.query.distinct_fields = tuple(set(distinct_fields + order_fields))
return super(OSFOrderingFilter, self).filter_queryset(request, queryset, view)
if ordering:
if isinstance(ordering, (list, tuple)):
sorted_list = sorted(queryset, key=cmp_to_key(sort_multiple(ordering)))
return sorted_list
return queryset.sort(*ordering)
return queryset
def get_serializer_source_field(self, view, request):
"""
Returns a dictionary of serializer fields and source names. i.e. {'date_created': 'created'}
Logic borrowed from OrderingFilter.get_default_valid_fields with modifications to retrieve
source fields for serializer field names.
:param view api view
:
"""
field_to_source_mapping = {}
if hasattr(view, 'get_serializer_class'):
serializer_class = view.get_serializer_class()
else:
serializer_class = getattr(view, 'serializer_class', None)
# This will not allow any serializer fields with nested related fields to be sorted on
for field_name, field in serializer_class(context={'request': request}).fields.items():
if not getattr(field, 'write_only', False) and not field.source == '*' and field_name != field.source:
field_to_source_mapping[field_name] = field.source.replace('.', '_')
return field_to_source_mapping
# Overrides OrderingFilter
def remove_invalid_fields(self, queryset, fields, view, request):
"""
Returns an array of valid fields to be used for ordering.
Any valid source fields which are input remain in the valid fields list using the super method.
Serializer fields are mapped to their source fields and returned.
:param fields, array, input sort fields
:returns array of source fields for sorting.
"""
valid_fields = super(OSFOrderingFilter, self).remove_invalid_fields(queryset, fields, view, request)
if not valid_fields:
for invalid_field in fields:
ordering_sign = '-' if invalid_field[0] == '-' else ''
invalid_field = invalid_field.lstrip('-')
field_source_mapping = self.get_serializer_source_field(view, request)
source_field = field_source_mapping.get(invalid_field, None)
if source_field:
valid_fields.append(ordering_sign + source_field)
return valid_fields
class FilterMixin(object):
""" View mixin with helper functions for filtering. """
QUERY_PATTERN = re.compile(r'^filter\[(?P<fields>((?:,*\s*\w+)*))\](\[(?P<op>\w+)\])?$')
FILTER_FIELDS = re.compile(r'(?:,*\s*(\w+)+)')
MATCH_OPERATORS = ('contains', 'icontains')
MATCHABLE_FIELDS = (ser.CharField, ser.ListField)
DEFAULT_OPERATORS = ('eq', 'ne')
DEFAULT_OPERATOR_OVERRIDES = {
ser.CharField: 'icontains',
ser.ListField: 'contains',
}
NUMERIC_FIELDS = (ser.IntegerField, ser.DecimalField, ser.FloatField)
DATE_FIELDS = (ser.DateTimeField, ser.DateField)
DATETIME_PATTERN = re.compile(r'^\d{4}\-\d{2}\-\d{2}(?P<time>T\d{2}:\d{2}(:\d{2}(\.\d{1,6})?)?)$')
COMPARISON_OPERATORS = ('gt', 'gte', 'lt', 'lte')
COMPARABLE_FIELDS = NUMERIC_FIELDS + DATE_FIELDS
LIST_FIELDS = (ser.ListField, )
RELATIONSHIP_FIELDS = (RelationshipField, TargetField)
def __init__(self, *args, **kwargs):
super(FilterMixin, self).__init__(*args, **kwargs)
if not self.serializer_class:
raise NotImplementedError()
def _get_default_operator(self, field):
return self.DEFAULT_OPERATOR_OVERRIDES.get(type(field), 'eq')
def _get_valid_operators(self, field):
if isinstance(field, self.COMPARABLE_FIELDS):
return self.COMPARISON_OPERATORS + self.DEFAULT_OPERATORS
elif isinstance(field, self.MATCHABLE_FIELDS):
return self.MATCH_OPERATORS + self.DEFAULT_OPERATORS
else:
return self.DEFAULT_OPERATORS
def _get_field_or_error(self, field_name):
"""
Check that the attempted filter field is valid
:raises InvalidFilterError: If the filter field is not valid
"""
predeclared_fields = self.serializer_class._declared_fields
initialized_fields = self.get_serializer().fields if hasattr(self, 'get_serializer') else {}
serializer_fields = predeclared_fields.copy()
# Merges fields that were declared on serializer with fields that may have been dynamically added
serializer_fields.update(initialized_fields)
if field_name not in serializer_fields:
raise InvalidFilterError(detail="'{0}' is not a valid field for this endpoint.".format(field_name))
if field_name not in getattr(self.serializer_class, 'filterable_fields', set()):
raise InvalidFilterFieldError(parameter='filter', value=field_name)
field = serializer_fields[field_name]
# You cannot filter on deprecated fields.
if isinstance(field, ShowIfVersion) and utils.is_deprecated(self.request.version, field.min_version, field.max_version):
raise InvalidFilterFieldError(parameter='filter', value=field_name)
return serializer_fields[field_name]
def _validate_operator(self, field, field_name, op):
"""
Check that the operator and field combination is valid
:raises InvalidFilterComparisonType: If the query contains comparisons against non-date or non-numeric fields
:raises InvalidFilterMatchType: If the query contains comparisons against non-string or non-list fields
:raises InvalidFilterOperator: If the filter operator is not a member of self.COMPARISON_OPERATORS
"""
if op not in set(self.MATCH_OPERATORS + self.COMPARISON_OPERATORS + self.DEFAULT_OPERATORS):
valid_operators = self._get_valid_operators(field)
raise InvalidFilterOperator(value=op, valid_operators=valid_operators)
if op in self.COMPARISON_OPERATORS:
if not isinstance(field, self.COMPARABLE_FIELDS):
raise InvalidFilterComparisonType(
parameter='filter',
detail="Field '{0}' does not support comparison operators in a filter.".format(field_name),
)
if op in self.MATCH_OPERATORS:
if not isinstance(field, self.MATCHABLE_FIELDS):
raise InvalidFilterMatchType(
parameter='filter',
detail="Field '{0}' does not support match operators in a filter.".format(field_name),
)
def _parse_date_param(self, field, source_field_name, op, value):
"""
Allow for ambiguous date filters. This supports operations like finding Nodes created on a given day
even though Node.created is a specific datetime.
:return list<dict>: list of one (specific datetime) or more (date range) parsed query params
"""
time_match = self.DATETIME_PATTERN.match(value)
if op != 'eq' or time_match:
return {
'op': op,
'value': self.convert_value(value, field),
'source_field_name': source_field_name,
}
else: # TODO: let times be as generic as possible (i.e. whole month, whole year)
start = self.convert_value(value, field)
stop = start + datetime.timedelta(days=1)
return [
{
'op': 'gte',
'value': start,
'source_field_name': source_field_name,
}, {
'op': 'lt',
'value': stop,
'source_field_name': source_field_name,
},
]
def bulk_get_values(self, value, field):
"""
Returns list of values from query_param for IN query
If url contained `/nodes/?filter[id]=12345, abcde`, the returned values would be:
[u'12345', u'abcde']
"""
value = value.lstrip('[').rstrip(']')
separated_values = value.split(',')
values = [self.convert_value(val.strip(), field) for val in separated_values]
return values
def parse_query_params(self, query_params):
"""Maps query params to a dict usable for filtering
:param dict query_params:
:return dict: of the format {
<resolved_field_name>: {
'op': <comparison_operator>,
'value': <resolved_value>,
'source_field_name': <model_field_source_of_serializer_field>
}
}
"""
query = {}
for key, value in query_params.items():
match = self.QUERY_PATTERN.match(key)
if match:
match_dict = match.groupdict()
fields = match_dict['fields']
field_names = re.findall(self.FILTER_FIELDS, fields.strip())
query.update({key: {}})
for field_name in field_names:
field = self._get_field_or_error(field_name)
op = match_dict.get('op') or self._get_default_operator(field)
self._validate_operator(field, field_name, op)
source_field_name = field_name
if not isinstance(field, ser.SerializerMethodField):
source_field_name = self.convert_key(field_name, field)
# Special case date(time)s to allow for ambiguous date matches
if isinstance(field, self.DATE_FIELDS):
query.get(key).update({
field_name: self._parse_date_param(field, source_field_name, op, value),
})
elif not isinstance(value, int) and source_field_name in ['_id', 'guid._id', 'journal_id']:
query.get(key).update({
field_name: {
'op': 'in',
'value': self.bulk_get_values(value, field),
'source_field_name': source_field_name,
},
})
elif not isinstance(value, int) and source_field_name == 'root':
query.get(key).update({
field_name: {
'op': op,
'value': self.bulk_get_values(value, field),
'source_field_name': source_field_name,
},
})
elif self.should_parse_special_query_params(field_name):
query = self.parse_special_query_params(field_name, key, value, query)
else:
query.get(key).update({
field_name: {
'op': op,
'value': self.convert_value(value, field),
'source_field_name': source_field_name,
},
})
self.postprocess_query_param(key, field_name, query[key][field_name])
return query
def postprocess_query_param(self, key, field_name, operation):
"""Hook to update parsed query parameters. Overrides of this method should either
update ``operation`` in-place or do nothing.
"""
pass
def should_parse_special_query_params(self, field_name):
""" This should be overridden in subclasses for custom filtering behavior
"""
return False
def parse_special_query_params(self, field_name, key, value, query):
""" This should be overridden in subclasses for custom filtering behavior
"""
pass
def convert_key(self, field_name, field):
"""Used so that that queries on fields with the source attribute set will work
:param basestring field_name: text representation of the field name
:param rest_framework.fields.Field field: Field instance
"""
field = utils.decompose_field(field)
source = field.source
if source == '*':
source = getattr(field, 'filter_key', None)
return source or field_name
def convert_value(self, value, field):
"""Used to convert incoming values from query params to the appropriate types for filter comparisons
:param basestring value: value to be resolved
:param rest_framework.fields.Field field: Field instance
"""
field = utils.decompose_field(field)
if isinstance(field, ShowIfVersion):
field = field.field
if isinstance(field, ser.BooleanField):
if utils.is_truthy(value):
return True
elif utils.is_falsy(value):
return False
else:
raise InvalidFilterValue(
value=value,
field_type='bool',
)
elif isinstance(field, self.DATE_FIELDS):
try:
ret = date_parser.parse(value, ignoretz=False)
if not ret.tzinfo:
ret = ret.replace(tzinfo=pytz.utc)
return ret
except ValueError:
raise InvalidFilterValue(
value=value,
field_type='date',
)
elif isinstance(field, (self.RELATIONSHIP_FIELDS, ser.SerializerMethodField, ser.ManyRelatedField)):
if value == 'null':
value = None
return value
elif isinstance(field, self.LIST_FIELDS) or isinstance((getattr(field, 'field', None)), self.LIST_FIELDS):
if value == 'null':
value = []
return value
else:
try:
return field.to_internal_value(value)
except ValidationError:
raise InvalidFilterValue(
value=value,
)
class ListFilterMixin(FilterMixin):
"""View mixin that adds a get_queryset_from_request method which uses query params
of the form `filter[field_name]=value` to filter a list of objects.
Subclasses must define `get_default_queryset()`.
Serializers that want to restrict which fields are used for filtering need to have a variable called
filterable_fields which is a frozenset of strings representing the field names as they appear in the serialization.
"""
FILTERS = {
'eq': operator.eq,
'lt': operator.lt,
'lte': operator.le,
'gt': operator.gt,
'gte': operator.ge,
}
def __init__(self, *args, **kwargs):
super(FilterMixin, self).__init__(*args, **kwargs)
if not self.serializer_class:
raise NotImplementedError()
def get_default_queryset(self):
raise NotImplementedError('Must define get_default_queryset')
def get_queryset_from_request(self):
default_queryset = self.get_default_queryset()
if not self.kwargs.get('is_embedded') and self.request.query_params:
param_queryset = self.param_queryset(self.request.query_params, default_queryset)
return param_queryset
else:
return default_queryset
def param_queryset(self, query_params, default_queryset):
"""filters default queryset based on query parameters"""
filters = self.parse_query_params(query_params)
queryset = default_queryset
query_parts = []
if filters:
for key, field_names in filters.items():
sub_query_parts = []
for field_name, data in field_names.items():
operations = data if isinstance(data, list) else [data]
if isinstance(queryset, list):
for operation in operations:
queryset = self.get_filtered_queryset(field_name, operation, queryset)
else:
sub_query_parts.append(
functools.reduce(
operator.and_, [
self.build_query_from_field(field_name, operation)
for operation in operations
],
),
)
if not isinstance(queryset, list):
sub_query = functools.reduce(operator.or_, sub_query_parts)
query_parts.append(sub_query)
if not isinstance(queryset, list):
for query in query_parts:
queryset = queryset.filter(query)
return queryset
def build_query_from_field(self, field_name, operation):
query_field_name = operation['source_field_name']
if operation['op'] == 'ne':
return ~Q(**{query_field_name: operation['value']})
elif operation['op'] != 'eq':
query_field_name = '{}__{}'.format(query_field_name, operation['op'])
return Q(**{query_field_name: operation['value']})
return Q(**{query_field_name: operation['value']})
def postprocess_query_param(self, key, field_name, operation):
# tag queries will usually be on Tag.name,
# ?filter[tags]=foo should be translated to MQ('tags__name', 'eq', 'foo')
# But queries on lists should be tags, e.g.
# ?filter[tags]=foo,bar should be translated to MQ('tags', 'isnull', True)
# ?filter[tags]=[] should be translated to MQ('tags', 'isnull', True)
if field_name == 'tags':
if operation['value'] not in (list(), tuple()):
operation['source_field_name'] = 'tags__name'
operation['op'] = 'iexact'
elif operation['value'] == []:
operation['source_field_name'] = 'tags__isnull'
operation['value'] = True
operation['op'] = 'eq'
# contributors iexact because guid matching
if field_name == 'contributors':
if operation['value'] not in (list(), tuple()):
operation['source_field_name'] = '_contributors__guids___id'
operation['op'] = 'iexact'
if field_name == 'kind':
operation['source_field_name'] = 'is_file'
# The value should be boolean
operation['value'] = operation['value'] == 'file'
if field_name == 'bibliographic':
operation['op'] = 'exact'
if field_name == 'permission':
operation['op'] = 'exact'
if field_name == 'id':
operation['source_field_name'] = (
'guids___id'
if issubclass(self.model_class, GuidMixin)
else self.model_class.primary_identifier_name
)
operation['op'] = 'in'
if field_name == 'subjects':
self.postprocess_subject_query_param(operation)
def postprocess_subject_query_param(self, operation):
if Subject.objects.filter(_id=operation['value']).exists():
operation['source_field_name'] = 'subjects___id'
else:
operation['source_field_name'] = 'subjects__text'
operation['op'] = 'iexact'
def get_filtered_queryset(self, field_name, params, default_queryset):
"""filters default queryset based on the serializer field type"""
field = self.serializer_class._declared_fields[field_name]
source_field_name = params['source_field_name']
if isinstance(field, ser.SerializerMethodField):
return_val = [
item for item in default_queryset
if self.FILTERS[params['op']](self.get_serializer_method(field_name)(item), params['value'])
]
elif isinstance(field, ser.CharField):
if source_field_name in ('_id', 'root'):
# Param parser treats certain ID fields as bulk queries: a list of options, instead of just one
# Respect special-case behavior, and enforce exact match for these list fields.
options = set(item.lower() for item in params['value'])
return_val = [
item for item in default_queryset
if getattr(item, source_field_name, '') in options
]
else:
# TODO: What is {}.lower()? Possible bug
return_val = [
item for item in default_queryset
if params['value'].lower() in getattr(item, source_field_name, {}).lower()
]
elif isinstance(field, ser.ListField):
return_val = [
item for item in default_queryset
if params['value'].lower() in [
lowercase(i.lower) for i in getattr(item, source_field_name, [])
]
]
else:
try:
return_val = [
item for item in default_queryset
if self.FILTERS[params['op']](getattr(item, source_field_name, None), params['value'])
]
except TypeError:
raise InvalidFilterValue(detail='Could not apply filter to specified field')
return return_val
def get_serializer_method(self, field_name):
"""
:param field_name: The name of a SerializerMethodField
:return: The function attached to the SerializerMethodField to get its value
"""
serializer = self.get_serializer()
serializer_method_name = 'get_' + field_name
return getattr(serializer, serializer_method_name)
class PreprintFilterMixin(ListFilterMixin):
"""View mixin that uses ListFilterMixin, adding postprocessing for preprint querying
Subclasses must define `get_default_queryset()`.
"""
def postprocess_query_param(self, key, field_name, operation):
if field_name == 'provider':
operation['source_field_name'] = 'provider___id'
if field_name == 'id':
operation['source_field_name'] = 'guids___id'
if field_name == 'subjects':
self.postprocess_subject_query_param(operation)
def preprints_queryset(self, base_queryset, auth_user, allow_contribs=True, public_only=False):
return Preprint.objects.can_view(
base_queryset=base_queryset,
user=auth_user,
allow_contribs=allow_contribs,
public_only=public_only,
)
|
apache-2.0
|
johnstyle/ghostium
|
assets/_components/highlight.js.origin/docs/conf.py
|
6
|
7771
|
# -*- coding: utf-8 -*-
#
# highlight.js documentation build configuration file, created by
# sphinx-quickstart on Wed Sep 12 23:48:27 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'highlight.js'
copyright = u'2012, Ivan Sagalaev'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '8.4'
# The full version, including alpha/beta/rc tags.
release = '8.4'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'highlightjsdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'highlightjs.tex', u'highlight.js Documentation',
u'Ivan Sagalaev', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'highlightjs', u'highlight.js Documentation',
[u'Ivan Sagalaev'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'highlightjs', u'highlight.js Documentation',
u'Ivan Sagalaev', 'highlightjs', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
|
mit
|
guru-digital/CouchPotatoServer
|
couchpotato/core/media/movie/providers/metadata/base.py
|
48
|
7263
|
import os
import shutil
import traceback
from couchpotato.core.event import addEvent, fireEvent
from couchpotato.core.helpers.encoding import sp
from couchpotato.core.helpers.variable import getIdentifier, underscoreToCamel
from couchpotato.core.logger import CPLog
from couchpotato.core.media._base.providers.metadata.base import MetaDataBase
from couchpotato.environment import Env
log = CPLog(__name__)
class MovieMetaData(MetaDataBase):
enabled_option = 'meta_enabled'
def __init__(self):
addEvent('renamer.after', self.create)
def create(self, message = None, group = None):
if self.isDisabled(): return
if not group: group = {}
log.info('Creating %s metadata.', self.getName())
# Update library to get latest info
try:
group['media'] = fireEvent('movie.update', group['media'].get('_id'), identifier = getIdentifier(group['media']), extended = True, single = True)
except:
log.error('Failed to update movie, before creating metadata: %s', traceback.format_exc())
root_name = self.getRootName(group)
meta_name = os.path.basename(root_name)
root = os.path.dirname(root_name)
movie_info = group['media'].get('info')
for file_type in ['nfo']:
try:
self._createType(meta_name, root, movie_info, group, file_type, 0)
except:
log.error('Unable to create %s file: %s', ('nfo', traceback.format_exc()))
for file_type in ['thumbnail', 'fanart', 'banner', 'disc_art', 'logo', 'clear_art', 'landscape', 'extra_thumbs', 'extra_fanart']:
try:
if file_type == 'thumbnail':
num_images = len(movie_info['images']['poster_original'])
elif file_type == 'fanart':
num_images = len(movie_info['images']['backdrop_original'])
else:
num_images = len(movie_info['images'][file_type])
for i in range(num_images):
self._createType(meta_name, root, movie_info, group, file_type, i)
except:
log.error('Unable to create %s file: %s', (file_type, traceback.format_exc()))
def _createType(self, meta_name, root, movie_info, group, file_type, i): # Get file path
camelcase_method = underscoreToCamel(file_type.capitalize())
name = getattr(self, 'get' + camelcase_method + 'Name')(meta_name, root, i)
if name and (self.conf('meta_' + file_type) or self.conf('meta_' + file_type) is None):
# Get file content
content = getattr(self, 'get' + camelcase_method)(movie_info = movie_info, data = group, i = i)
if content:
log.debug('Creating %s file: %s', (file_type, name))
if os.path.isfile(content):
content = sp(content)
name = sp(name)
if not os.path.exists(os.path.dirname(name)):
os.makedirs(os.path.dirname(name))
shutil.copy2(content, name)
shutil.copyfile(content, name)
# Try and copy stats seperately
try: shutil.copystat(content, name)
except: pass
else:
self.createFile(name, content)
group['renamed_files'].append(name)
try:
os.chmod(sp(name), Env.getPermission('file'))
except:
log.debug('Failed setting permissions for %s: %s', (name, traceback.format_exc()))
def getRootName(self, data = None):
if not data: data = {}
return os.path.join(data['destination_dir'], data['filename'])
def getFanartName(self, name, root, i):
return
def getThumbnailName(self, name, root, i):
return
def getBannerName(self, name, root, i):
return
def getClearArtName(self, name, root, i):
return
def getLogoName(self, name, root, i):
return
def getDiscArtName(self, name, root, i):
return
def getLandscapeName(self, name, root, i):
return
def getExtraThumbsName(self, name, root, i):
return
def getExtraFanartName(self, name, root, i):
return
def getNfoName(self, name, root, i):
return
def getNfo(self, movie_info = None, data = None, i = 0):
if not data: data = {}
if not movie_info: movie_info = {}
def getThumbnail(self, movie_info = None, data = None, wanted_file_type = 'poster_original', i = 0):
if not data: data = {}
if not movie_info: movie_info = {}
# See if it is in current files
files = data['media'].get('files')
if files.get('image_' + wanted_file_type):
if os.path.isfile(files['image_' + wanted_file_type][i]):
return files['image_' + wanted_file_type][i]
# Download using existing info
try:
images = movie_info['images'][wanted_file_type]
file_path = fireEvent('file.download', url = images[i], single = True)
return file_path
except:
pass
def getFanart(self, movie_info = None, data = None, i = 0):
if not data: data = {}
if not movie_info: movie_info = {}
return self.getThumbnail(movie_info = movie_info, data = data, wanted_file_type = 'backdrop_original', i = i)
def getBanner(self, movie_info = None, data = None, i = 0):
if not data: data = {}
if not movie_info: movie_info = {}
return self.getThumbnail(movie_info = movie_info, data = data, wanted_file_type = 'banner', i = i)
def getClearArt(self, movie_info = None, data = None, i = 0):
if not data: data = {}
if not movie_info: movie_info = {}
return self.getThumbnail(movie_info = movie_info, data = data, wanted_file_type = 'clear_art', i = i)
def getLogo(self, movie_info = None, data = None, i = 0):
if not data: data = {}
if not movie_info: movie_info = {}
return self.getThumbnail(movie_info = movie_info, data = data, wanted_file_type = 'logo', i = i)
def getDiscArt(self, movie_info = None, data = None, i = 0):
if not data: data = {}
if not movie_info: movie_info = {}
return self.getThumbnail(movie_info = movie_info, data = data, wanted_file_type = 'disc_art', i = i)
def getLandscape(self, movie_info = None, data = None, i = 0):
if not data: data = {}
if not movie_info: movie_info = {}
return self.getThumbnail(movie_info = movie_info, data= data, wanted_file_type = 'landscape', i = i)
def getExtraThumbs(self, movie_info = None, data = None, i = 0):
if not data: data = {}
if not movie_info: movie_info = {}
return self.getThumbnail(movie_info = movie_info, data = data, wanted_file_type = 'extra_thumbs', i = i)
def getExtraFanart(self, movie_info = None, data = None, i = 0):
if not data: data = {}
if not movie_info: movie_info = {}
return self.getThumbnail(movie_info = movie_info, data = data, wanted_file_type = 'extra_fanart', i = i)
|
gpl-3.0
|
Mozta/pagina-diagnostijuego
|
venv/lib/python2.7/site-packages/pip/commands/uninstall.py
|
798
|
2884
|
from __future__ import absolute_import
import pip
from pip.wheel import WheelCache
from pip.req import InstallRequirement, RequirementSet, parse_requirements
from pip.basecommand import Command
from pip.exceptions import InstallationError
class UninstallCommand(Command):
"""
Uninstall packages.
pip is able to uninstall most installed packages. Known exceptions are:
- Pure distutils packages installed with ``python setup.py install``, which
leave behind no metadata to determine what files were installed.
- Script wrappers installed by ``python setup.py develop``.
"""
name = 'uninstall'
usage = """
%prog [options] <package> ...
%prog [options] -r <requirements file> ..."""
summary = 'Uninstall packages.'
def __init__(self, *args, **kw):
super(UninstallCommand, self).__init__(*args, **kw)
self.cmd_opts.add_option(
'-r', '--requirement',
dest='requirements',
action='append',
default=[],
metavar='file',
help='Uninstall all the packages listed in the given requirements '
'file. This option can be used multiple times.',
)
self.cmd_opts.add_option(
'-y', '--yes',
dest='yes',
action='store_true',
help="Don't ask for confirmation of uninstall deletions.")
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options, args):
with self._build_session(options) as session:
format_control = pip.index.FormatControl(set(), set())
wheel_cache = WheelCache(options.cache_dir, format_control)
requirement_set = RequirementSet(
build_dir=None,
src_dir=None,
download_dir=None,
isolated=options.isolated_mode,
session=session,
wheel_cache=wheel_cache,
)
for name in args:
requirement_set.add_requirement(
InstallRequirement.from_line(
name, isolated=options.isolated_mode,
wheel_cache=wheel_cache
)
)
for filename in options.requirements:
for req in parse_requirements(
filename,
options=options,
session=session,
wheel_cache=wheel_cache):
requirement_set.add_requirement(req)
if not requirement_set.has_requirements:
raise InstallationError(
'You must give at least one requirement to %(name)s (see '
'"pip help %(name)s")' % dict(name=self.name)
)
requirement_set.uninstall(auto_confirm=options.yes)
|
gpl-3.0
|
zstyblik/infernal-twin
|
build/pillow/PIL/ImagePath.py
|
47
|
1239
|
#
# The Python Imaging Library
# $Id$
#
# path interface
#
# History:
# 1996-11-04 fl Created
# 2002-04-14 fl Added documentation stub class
#
# Copyright (c) Secret Labs AB 1997.
# Copyright (c) Fredrik Lundh 1996.
#
# See the README file for information on usage and redistribution.
#
from PIL import Image
# the Python class below is overridden by the C implementation.
class Path(object):
def __init__(self, xy):
pass
##
# Compacts the path, by removing points that are close to each
# other. This method modifies the path in place.
def compact(self, distance=2):
pass
##
# Gets the bounding box.
def getbbox(self):
pass
##
# Maps the path through a function.
def map(self, function):
pass
##
# Converts the path to Python list.
#
# @param flat By default, this function returns a list of 2-tuples
# [(x, y), ...]. If this argument is true, it returns a flat
# list [x, y, ...] instead.
# @return A list of coordinates.
def tolist(self, flat=0):
pass
##
# Transforms the path.
def transform(self, matrix):
pass
# override with C implementation
Path = Image.core.path
|
gpl-3.0
|
aldenjenkins/foobargamingwebsite
|
paypal/pro/migrations/0001_initial.py
|
12
|
2843
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='PayPalNVP',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('method', models.CharField(max_length=64, blank=True)),
('ack', models.CharField(max_length=32, blank=True)),
('profilestatus', models.CharField(max_length=32, blank=True)),
('timestamp', models.DateTimeField(null=True, blank=True)),
('profileid', models.CharField(max_length=32, blank=True)),
('profilereference', models.CharField(max_length=128, blank=True)),
('correlationid', models.CharField(max_length=32, blank=True)),
('token', models.CharField(max_length=64, blank=True)),
('payerid', models.CharField(max_length=64, blank=True)),
('firstname', models.CharField(max_length=255, verbose_name='First Name', blank=True)),
('lastname', models.CharField(max_length=255, verbose_name='Last Name', blank=True)),
('street', models.CharField(max_length=255, verbose_name='Street Address', blank=True)),
('city', models.CharField(max_length=255, verbose_name='City', blank=True)),
('state', models.CharField(max_length=255, verbose_name='State', blank=True)),
('countrycode', models.CharField(max_length=2, verbose_name='Country', blank=True)),
('zip', models.CharField(max_length=32, verbose_name='Postal / Zip Code', blank=True)),
('invnum', models.CharField(max_length=255, blank=True)),
('custom', models.CharField(max_length=255, blank=True)),
('flag', models.BooleanField(default=False)),
('flag_code', models.CharField(max_length=32, blank=True)),
('flag_info', models.TextField(blank=True)),
('ipaddress', models.IPAddressField(blank=True)),
('query', models.TextField(blank=True)),
('response', models.TextField(blank=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('user', models.ForeignKey(blank=True, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'db_table': 'paypal_nvp',
'verbose_name': 'PayPal NVP',
},
bases=(models.Model,),
),
]
|
bsd-3-clause
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.