text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
# coding=utf-8
"""Definitions for basic report.
"""
from __future__ import absolute_import
from safe.utilities.i18n import tr
__copyright__ = "Copyright 2016, The InaSAFE Project"
__license__ = "GPL version 3"
__email__ = "info@inasafe.org"
__revision__ = '$Format:%H$'
# Meta description about component
# component generation type
jinja2_component_type = {
'key': 'jinja2_component_type',
'name': 'Jinja2',
'description': tr('A component that is generated using Jinja2 API.')
}
qgis_composer_component_type = {
'key': 'qgis_composer_component_type',
'name': 'QGISComposer',
'description': tr('A component that is generated using QGISComposer API.')
}
qt_renderer_component_type = {
'key': 'qt_renderer_component_type',
'name': 'QtRenderer',
'description': tr('A component that is generated using QtRenderer API.')
}
available_component_type = [
jinja2_component_type,
qgis_composer_component_type,
qt_renderer_component_type
]
# Tags
# Tags is a way to categorize different component quickly for easy
# retrieval
final_product_tag = {
'key': 'final_product_tag',
'name': tr('Final Product'),
'description': tr(
'Tag this component as a Final Product of report generation.')
}
infographic_product_tag = {
'key': 'infographic_product_tag',
'name': tr('Infographic'),
'description': tr(
'Tag this component as an Infographic related product.')
}
map_product_tag = {
'key': 'map_product_tag',
'name': tr('Map'),
'description': tr(
'Tag this component as a product mainly to show map.')
}
table_product_tag = {
'key': 'table_product_tag',
'name': tr('Table'),
'description': tr(
'Tag this component as a product mainly with table.')
}
template_product_tag = {
'key': 'template_product_tag',
'name': tr(
'Tag this component as a QGIS Template product.')
}
product_type_tag = [
table_product_tag,
map_product_tag,
template_product_tag,
infographic_product_tag
]
html_product_tag = {
'key': 'html_product_tag',
'name': tr('HTML'),
'description': tr('Tag this product as HTML output.')
}
pdf_product_tag = {
'key': 'pdf_product_tag',
'name': tr('PDF'),
'description': tr('Tag this product as PDF output.')
}
qpt_product_tag = {
'key': 'qpt_product_tag',
'name': tr('QPT'),
'description': tr('Tag this product as QPT output.')
}
png_product_tag = {
'key': 'png_product_tag',
'name': tr('PNG'),
'description': tr('Tag this product as PNG output.')
}
svg_product_tag = {
'key': 'svg_product_tag',
'name': tr('SVG'),
'description': tr('Tag this product as SVG output.')
}
product_output_type_tag = [
html_product_tag,
pdf_product_tag,
qpt_product_tag,
png_product_tag,
]
|
Gustry/inasafe
|
safe/definitions/reports/__init__.py
|
Python
|
gpl-3.0
| 2,816 | 0 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import sys
if (sys.version_info > (3,)):
import http.client
from http.client import BAD_REQUEST, CONFLICT, NOT_FOUND, OK
else:
import httplib
from httplib import BAD_REQUEST, CONFLICT, NOT_FOUND, OK
from flask import session, request, make_response
from flask_restful import Resource
from cairis.data.ConceptReferenceDAO import ConceptReferenceDAO
from cairis.tools.JsonConverter import json_serialize
from cairis.tools.MessageDefinitions import ConceptReferenceMessage
from cairis.tools.ModelDefinitions import ConceptReferenceModel
from cairis.tools.SessionValidator import get_session_id
__author__ = 'Shamal Faily'
class ConceptReferencesAPI(Resource):
def get(self):
session_id = get_session_id(session, request)
constraint_id = request.args.get('constraint_id', -1)
dao = ConceptReferenceDAO(session_id)
crs = dao.get_concept_references(constraint_id=constraint_id)
dao.close()
resp = make_response(json_serialize(crs, session_id=session_id))
resp.headers['Content-Type'] = "application/json"
return resp
def post(self):
session_id = get_session_id(session, request)
dao = ConceptReferenceDAO(session_id)
new_cr = dao.from_json(request)
dao.add_concept_reference(new_cr)
dao.close()
resp_dict = {'message': 'Concept Reference successfully added'}
resp = make_response(json_serialize(resp_dict, session_id=session_id), OK)
resp.contenttype = 'application/json'
return resp
class ConceptReferenceByNameAPI(Resource):
def get(self, name):
session_id = get_session_id(session, request)
dao = ConceptReferenceDAO(session_id)
found_cr = dao.get_concept_reference(name)
dao.close()
resp = make_response(json_serialize(found_cr, session_id=session_id))
resp.headers['Content-Type'] = "application/json"
return resp
def put(self, name):
session_id = get_session_id(session, request)
dao = ConceptReferenceDAO(session_id)
upd_cr = dao.from_json(request)
dao.update_concept_reference(upd_cr, name)
dao.close()
resp_dict = {'message': 'Concept Reference successfully updated'}
resp = make_response(json_serialize(resp_dict), OK)
resp.contenttype = 'application/json'
return resp
def delete(self, name):
session_id = get_session_id(session, request)
dao = ConceptReferenceDAO(session_id)
dao.delete_concept_reference(name)
dao.close()
resp_dict = {'message': 'Concept Reference successfully deleted'}
resp = make_response(json_serialize(resp_dict), OK)
resp.contenttype = 'application/json'
return resp
|
nathanbjenx/cairis
|
cairis/controllers/ConceptReferenceController.py
|
Python
|
apache-2.0
| 3,396 | 0.00265 |
import json
import os
import shutil
import zipfile
from build import cd
def create_template(name, path, **kw):
os.makedirs(os.path.join(path, 'module'))
with open(os.path.join(path, 'module', 'manifest.json'), 'w') as manifest_file:
manifest = {
"name": name,
"version": "0.1",
"description": "My module template"
}
with open(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'platform_version.txt'))) as platform_version_file:
manifest['platform_version'] = platform_version_file.read()
json.dump(manifest, manifest_file, indent=4, sort_keys=True)
# Copy template module
template_path = os.path.abspath(os.path.join(os.path.split(__file__)[0], 'templatemodule'))
for root, dirnames, filenames in os.walk(template_path):
for filename in filenames:
relative_path = os.path.join(root, filename)[len(template_path)+1:]
with open(os.path.join(root, filename), 'r') as source:
lines = source.readlines()
new_dir = os.path.split(os.path.join(path, 'module', relative_path.replace('templatemodule', name)))[0]
if not os.path.isdir(new_dir):
os.makedirs(new_dir)
with open(os.path.join(path, 'module', relative_path.replace('templatemodule', name)), 'w') as output:
for line in lines:
output.write(line.replace('templatemodule', name))
return load(path, manifest)
def load(path, manifest, **kw):
module_model = {}
module_model['local_path'] = path
module_model['module_dynamic_path'] = os.path.join(path, ".trigger", "module_dynamic")
module_model['files'] = {
'manifest': os.path.join(path, 'module', 'manifest.json'),
'module_structure': os.path.join(path, ".trigger", "schema", "module_structure.json")
}
module_model['rawfiles'] = {
'dynamic_platform_version': os.path.join(path, ".trigger", "platform_version.txt")
}
module_model['directories'] = {
'module_directory': os.path.join(path, 'module')
}
return module_model
def create_upload_zip(path, subdirs = [], **kw):
module_path = os.path.abspath(os.path.join(path, 'module'))
zip_base = os.path.abspath(os.path.join(path, '.trigger', 'upload_tmp'))
if os.path.exists(zip_base+".zip"):
os.unlink(zip_base+".zip")
if len(subdirs):
zip_path = _make_partial_archive(zip_base, subdirs, root_dir=module_path)
else:
zip_path = shutil.make_archive(zip_base, 'zip', root_dir=module_path)
return zip_path
def _make_partial_archive(zip_base, subdirs, root_dir):
zip = zipfile.ZipFile(zip_base + ".zip", "w")
with cd(root_dir):
for subdir in subdirs:
if not os.path.exists(subdir):
continue
for root, dirs, files in os.walk(subdir):
for file in files:
zip.write(os.path.join(root, file))
zip.close()
return zip_base + ".zip"
|
mnaughto/trigger-statusbar
|
.trigger/module_dynamic/module.py
|
Python
|
mit
| 2,707 | 0.026967 |
"""
WSGI config for server_admin project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os, sys
sys.path.append('/home/terrywong/server_admin')
sys.path.append('/home/terrywong/server_admin/server_admin')
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "server_admin.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "server_admin.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
KungFuLucky7/server_admin
|
server_admin/wsgi.py
|
Python
|
gpl-2.0
| 1,551 | 0.001289 |
from __future__ import absolute_import
import functools
import logging
import six
import time
from datetime import datetime, timedelta
from django.conf import settings
from django.utils.http import urlquote
from django.views.decorators.csrf import csrf_exempt
from enum import Enum
from pytz import utc
from rest_framework.authentication import SessionAuthentication
from rest_framework.parsers import JSONParser
from rest_framework.renderers import JSONRenderer
from rest_framework.response import Response
from rest_framework.views import APIView
from sentry import tsdb
from sentry.app import raven
from sentry.auth import access
from sentry.models import Environment
from sentry.utils.cursors import Cursor
from sentry.utils.dates import to_datetime
from sentry.utils.http import absolute_uri, is_valid_origin
from sentry.utils.audit import create_audit_entry
from .authentication import ApiKeyAuthentication, TokenAuthentication
from .paginator import Paginator
from .permissions import NoPermission
__all__ = ['DocSection', 'Endpoint', 'EnvironmentMixin', 'StatsMixin']
ONE_MINUTE = 60
ONE_HOUR = ONE_MINUTE * 60
ONE_DAY = ONE_HOUR * 24
LINK_HEADER = '<{uri}&cursor={cursor}>; rel="{name}"; results="{has_results}"; cursor="{cursor}"'
DEFAULT_AUTHENTICATION = (
TokenAuthentication, ApiKeyAuthentication, SessionAuthentication, )
logger = logging.getLogger(__name__)
audit_logger = logging.getLogger('sentry.audit.api')
class DocSection(Enum):
ACCOUNTS = 'Accounts'
EVENTS = 'Events'
ORGANIZATIONS = 'Organizations'
PROJECTS = 'Projects'
RELEASES = 'Releases'
TEAMS = 'Teams'
class Endpoint(APIView):
authentication_classes = DEFAULT_AUTHENTICATION
renderer_classes = (JSONRenderer, )
parser_classes = (JSONParser, )
permission_classes = (NoPermission, )
def build_cursor_link(self, request, name, cursor):
querystring = u'&'.join(
u'{0}={1}'.format(urlquote(k), urlquote(v)) for k, v in six.iteritems(request.GET)
if k != 'cursor'
)
base_url = absolute_uri(urlquote(request.path))
if querystring:
base_url = '{0}?{1}'.format(base_url, querystring)
else:
base_url = base_url + '?'
return LINK_HEADER.format(
uri=base_url,
cursor=six.text_type(cursor),
name=name,
has_results='true' if bool(cursor) else 'false',
)
def convert_args(self, request, *args, **kwargs):
return (args, kwargs)
def handle_exception(self, request, exc):
try:
response = super(Endpoint, self).handle_exception(exc)
except Exception as exc:
import sys
import traceback
sys.stderr.write(traceback.format_exc())
event_id = raven.captureException(request=request)
context = {
'detail': 'Internal Error',
'errorId': event_id,
}
response = Response(context, status=500)
response.exception = True
return response
def create_audit_entry(self, request, transaction_id=None, **kwargs):
return create_audit_entry(request, transaction_id, audit_logger, **kwargs)
def initialize_request(self, request, *args, **kwargs):
rv = super(Endpoint, self).initialize_request(request, *args, **kwargs)
# If our request is being made via our internal API client, we need to
# stitch back on auth and user information
if getattr(request, '__from_api_client__', False):
if rv.auth is None:
rv.auth = getattr(request, 'auth', None)
if rv.user is None:
rv.user = getattr(request, 'user', None)
return rv
@csrf_exempt
def dispatch(self, request, *args, **kwargs):
"""
Identical to rest framework's dispatch except we add the ability
to convert arguments (for common URL params).
"""
self.args = args
self.kwargs = kwargs
request = self.initialize_request(request, *args, **kwargs)
self.request = request
self.headers = self.default_response_headers # deprecate?
if settings.SENTRY_API_RESPONSE_DELAY:
time.sleep(settings.SENTRY_API_RESPONSE_DELAY / 1000.0)
origin = request.META.get('HTTP_ORIGIN', 'null')
# A "null" value should be treated as no Origin for us.
# See RFC6454 for more information on this behavior.
if origin == 'null':
origin = None
try:
if origin and request.auth:
allowed_origins = request.auth.get_allowed_origins()
if not is_valid_origin(origin, allowed=allowed_origins):
response = Response('Invalid origin: %s' %
(origin, ), status=400)
self.response = self.finalize_response(
request, response, *args, **kwargs)
return self.response
self.initial(request, *args, **kwargs)
if getattr(request, 'user', None) and request.user.is_authenticated():
raven.user_context({
'id': request.user.id,
'username': request.user.username,
'email': request.user.email,
})
# Get the appropriate handler method
if request.method.lower() in self.http_method_names:
handler = getattr(self, request.method.lower(),
self.http_method_not_allowed)
(args, kwargs) = self.convert_args(request, *args, **kwargs)
self.args = args
self.kwargs = kwargs
else:
handler = self.http_method_not_allowed
if getattr(request, 'access', None) is None:
# setup default access
request.access = access.from_request(request)
response = handler(request, *args, **kwargs)
except Exception as exc:
response = self.handle_exception(request, exc)
if origin:
self.add_cors_headers(request, response)
self.response = self.finalize_response(
request, response, *args, **kwargs)
return self.response
def add_cors_headers(self, request, response):
response['Access-Control-Allow-Origin'] = request.META['HTTP_ORIGIN']
response['Access-Control-Allow-Methods'] = ', '.join(
self.http_method_names)
def add_cursor_headers(self, request, response, cursor_result):
if cursor_result.hits is not None:
response['X-Hits'] = cursor_result.hits
if cursor_result.max_hits is not None:
response['X-Max-Hits'] = cursor_result.max_hits
response['Link'] = ', '.join(
[
self.build_cursor_link(
request, 'previous', cursor_result.prev),
self.build_cursor_link(request, 'next', cursor_result.next),
]
)
def respond(self, context=None, **kwargs):
return Response(context, **kwargs)
def paginate(
self, request, on_results=None, paginator=None,
paginator_cls=Paginator, default_per_page=100, **paginator_kwargs
):
assert (paginator and not paginator_kwargs) or (paginator_cls and paginator_kwargs)
per_page = int(request.GET.get('per_page', default_per_page))
input_cursor = request.GET.get('cursor')
if input_cursor:
input_cursor = Cursor.from_string(input_cursor)
else:
input_cursor = None
assert per_page <= max(100, default_per_page)
if not paginator:
paginator = paginator_cls(**paginator_kwargs)
cursor_result = paginator.get_result(
limit=per_page,
cursor=input_cursor,
)
# map results based on callback
if on_results:
results = on_results(cursor_result.results)
response = Response(results)
self.add_cursor_headers(request, response, cursor_result)
return response
class EnvironmentMixin(object):
def _get_environment_func(self, request, organization_id):
"""\
Creates a function that when called returns the ``Environment``
associated with a request object, or ``None`` if no environment was
provided. If the environment doesn't exist, an ``Environment.DoesNotExist``
exception will be raised.
This returns as a callable since some objects outside of the API
endpoint need to handle the "environment was provided but does not
exist" state in addition to the two non-exceptional states (the
environment was provided and exists, or the environment was not
provided.)
"""
return functools.partial(
self._get_environment_from_request,
request,
organization_id,
)
def _get_environment_id_from_request(self, request, organization_id):
environment = self._get_environment_from_request(request, organization_id)
return environment and environment.id
def _get_environment_from_request(self, request, organization_id):
if not hasattr(request, '_cached_environment'):
environment_param = request.GET.get('environment')
if environment_param is None:
environment = None
else:
environment = Environment.get_for_organization_id(
name=environment_param,
organization_id=organization_id,
)
request._cached_environment = environment
return request._cached_environment
class StatsMixin(object):
def _parse_args(self, request, environment_id=None):
resolution = request.GET.get('resolution')
if resolution:
resolution = self._parse_resolution(resolution)
assert resolution in tsdb.get_rollups()
end = request.GET.get('until')
if end:
end = to_datetime(float(end))
else:
end = datetime.utcnow().replace(tzinfo=utc)
start = request.GET.get('since')
if start:
start = to_datetime(float(start))
assert start <= end, 'start must be before or equal to end'
else:
start = end - timedelta(days=1, seconds=-1)
return {
'start': start,
'end': end,
'rollup': resolution,
'environment_id': environment_id,
}
def _parse_resolution(self, value):
if value.endswith('h'):
return int(value[:-1]) * ONE_HOUR
elif value.endswith('d'):
return int(value[:-1]) * ONE_DAY
elif value.endswith('m'):
return int(value[:-1]) * ONE_MINUTE
elif value.endswith('s'):
return int(value[:-1])
else:
raise ValueError(value)
|
looker/sentry
|
src/sentry/api/base.py
|
Python
|
bsd-3-clause
| 11,066 | 0.000633 |
import json
from PIL import Image
import collections
with open('../config/nodes.json') as data_file:
nodes = json.load(data_file)
# empty fucker
ordered_nodes = [None] * len(nodes)
# populate fucker
for i, pos in nodes.items():
ordered_nodes[int(i)] = [pos['x'], pos['y']]
filename = "04_rgb_vertical_lines"
im = Image.open("../gif_generators/output/"+filename+".gif") #Can be many different formats.
target_size = 400, 400
resize = False
if target_size != im.size:
resize = True
data = []
# To iterate through the entire gif
try:
frame_num = 0
while True:
im.seek(frame_num)
frame_data = []
# do something to im
img = im.convert('RGB')
if resize == True:
print "Resizing"
img.thumbnail(target_size, Image.ANTIALIAS)
for x, y in ordered_nodes:
frame_data.append(img.getpixel((x, y)))
#print r, g, b
data.append(frame_data)
# write to json
print frame_num
frame_num+=1
except EOFError:
pass # end of sequence
#print data
#print r, g, b
with open(filename+'.json', 'w') as outfile:
json.dump({
"meta": {},
"data": data
}, outfile)
print im.size #Get the width and hight of the image for iterating over
#print pix[,y] #Get the RGBA Value of the a pixel of an image
|
Ibuprofen/gizehmoviepy
|
gif_parsers/read_rgb.py
|
Python
|
mit
| 1,281 | 0.014832 |
import os.path
from crumbs.utils.bin_utils import create_get_binary_path
from bam_crumbs.settings import get_setting
BIN_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
'..', '..', 'bin'))
get_binary_path = create_get_binary_path(os.path.split(__file__)[0],
get_setting)
|
pziarsolo/bam_crumbs
|
bam_crumbs/utils/bin.py
|
Python
|
gpl-3.0
| 370 | 0 |
from django.contrib import admin
from .models import Line
# Register your models here.
admin.site.register(Line)
|
MirzaBaig715/DjangoURLShortner
|
urlshortenerapp/admin.py
|
Python
|
mit
| 114 | 0 |
"""
Runs peaktrough.py, which generates Cooley-Rupert figures for specified
series from FRED.
Execute peaktrough.py first, then run this program.
Written by Dave Backus under the watchful eye of Chase Coleman and Spencer Lyon
Date: July 10, 2014
"""
# import functions from peaktrough.py. * means all of them
# generates the msg "UMD has deleted: peaktrough" which means it reloads
from peaktrough import *
# do plots one at a time
manhandle_freddata("GDPC1", saveshow="show")
print("aaaa")
# do plots all at once with map
fred_series = ["GDPC1", "PCECC96", "GPDIC96", "OPHNFB"]
# uses default saveshow parameter
gdpc1, pcecc96, gpdic96, ophnfb = map(manhandle_freddata, fred_series)
print("xxxx")
# lets us change saveshow parameter
gdpc1, pcecc96, gpdic96, ophnfb = map(lambda s:
manhandle_freddata(s, saveshow="save"), fred_series)
print("yyyy")
# skip lhs (this doesn't seem to work, not sure why)
map(lambda s:
manhandle_freddata(s, saveshow="show"), fred_series)
print("zzzz")
|
DaveBackus/Data_Bootcamp
|
Code/Lab/fred_CooleyRupert_run.py
|
Python
|
mit
| 1,002 | 0.001996 |
# -*- coding: utf-8 -*-
from django.apps import AppConfig
class PostsConfig(AppConfig):
name = 'posts'
verbose_name = '图片列表'
|
mittya/duoclub
|
duoclub/posts/apps.py
|
Python
|
mit
| 144 | 0 |
#!/usr/bin/env python
#
# PyGab - Python Jabber Framework
# Copyright (c) 2008, Patrick Kennedy
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# - Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from common import utils
from core.mounts import *
try:
exec(utils.get_import(
mod=utils.get_module(), from_=['mounts'], import_=['*']))
except ImportError, e:
# If the bot module doesn't overwrite anything, no problem.
pass
#raise
|
PatrickKennedy/pygab
|
common/mounts.py
|
Python
|
bsd-2-clause
| 1,656 | 0.006039 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from google.appengine.api.app_identity.app_identity import get_default_gcs_bucket_name
from google.appengine.ext.blobstore import blobstore
from blob_app import blob_facade
from config.template_middleware import TemplateResponse
from gaecookie.decorator import no_csrf
from tekton import router
from routes.updown import upload, download
from tekton.gae.middleware.redirect import RedirectResponse
@no_csrf
def index(_logged_user):
success_url = router.to_path(upload)
bucket = get_default_gcs_bucket_name()
url = blobstore.create_upload_url(success_url, gs_bucket_name=bucket)
cmd = blob_facade.list_blob_files_cmd(_logged_user)
blob_files = cmd()
delete_path = router.to_path(delete)
download_path = router.to_path(download)
blob_file_form = blob_facade.blob_file_form()
def localize_blob_file(blob_file):
blob_file_dct = blob_file_form.fill_with_model(blob_file, 64)
blob_file_dct['delete_path'] = router.to_path(delete_path, blob_file_dct['id'])
blob_file_dct['download_path'] = router.to_path(download_path,
blob_file.blob_key,
blob_file_dct['filename'])
return blob_file_dct
localized_blob_files = [localize_blob_file(blob_file) for blob_file in blob_files]
context = {'upload_url': url,
'blob_files': localized_blob_files}
return TemplateResponse(context, 'updown/home.html')
def delete(blob_file_id):
blob_facade.delete_blob_file_cmd(blob_file_id).execute()
return RedirectResponse(router.to_path(index))
|
renzon/appengineepython
|
backend/appengine/routes/updown/home.py
|
Python
|
mit
| 1,716 | 0.002914 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests that all containers are imported correctly."""
import unittest
from tests import test_lib
class ContainersImportTest(test_lib.ImportCheckTestCase):
"""Tests that container classes are imported correctly."""
_IGNORABLE_FILES = frozenset(['manager.py', 'interface.py'])
def testContainersImported(self):
"""Tests that all parsers are imported."""
self._AssertFilesImportedInInit(
test_lib.CONTAINERS_PATH, self._IGNORABLE_FILES)
if __name__ == '__main__':
unittest.main()
|
kiddinn/plaso
|
tests/containers/init_imports.py
|
Python
|
apache-2.0
| 558 | 0.007168 |
#!/usr/bin/env python3
import unittest
from gppylib.operations.test.regress.test_package import GppkgTestCase, GppkgSpec, BuildGppkg, RPMSpec, BuildRPM, run_command, run_remote_command
class SimpleGppkgTestCase(GppkgTestCase):
"""Covers simple build/install/remove/update test cases"""
def test00_simple_build(self):
self.build(self.alpha_spec, self.A_spec)
def test01_simple_install(self):
gppkg_file = self.alpha_spec.get_filename()
self.install(gppkg_file)
#Check RPM database
self.check_rpm_install(self.A_spec.get_package_name())
def test02_simple_update(self):
gppkg_file = self.alpha_spec.get_filename()
self.install(gppkg_file)
update_rpm_spec = RPMSpec("A", "1", "2")
update_gppkg_spec = GppkgSpec("alpha", "1.1")
update_gppkg_file = self.build(update_gppkg_spec, update_rpm_spec)
self.update(update_gppkg_file)
#Check for the packages
self.check_rpm_install(update_rpm_spec.get_package_name())
def test03_simple_uninstall(self):
gppkg_file = self.alpha_spec.get_filename()
self.install(gppkg_file)
self.remove(gppkg_file)
results = run_command("gppkg -q --all")
results = results.split('\n')[self.start_output:self.end_output]
self.assertEqual(results, [])
def test04_help(self):
help_options = ["--help", "-h", "-?"]
for opt in help_options:
results = run_command("gppkg " + opt)
self.assertNotEqual(results, "")
def test05_version(self):
results = run_command("gppkg --version")
self.assertNotEqual(results, "")
if __name__ == "__main__":
unittest.main()
|
50wu/gpdb
|
gpMgmt/bin/gppylib/operations/test/regress/test_package/test_regress_simple_gppkg.py
|
Python
|
apache-2.0
| 1,745 | 0.006304 |
"""
Test basic std::vector functionality but with a declaration from
the debug info (the Foo struct) as content.
"""
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestDbgInfoContentVector(TestBase):
mydir = TestBase.compute_mydir(__file__)
# FIXME: This should work on more setups, so remove these
# skipIf's in the future.
@add_test_categories(["libc++"])
@skipIf(compiler=no_match("clang"))
@skipIf(oslist=no_match(["linux"]))
@skipIf(debug_info=no_match(["dwarf"]))
def test(self):
self.build()
lldbutil.run_to_source_breakpoint(self,
"// Set break point at this line.", lldb.SBFileSpec("main.cpp"))
self.runCmd("settings set target.import-std-module true")
self.expect("expr (size_t)a.size()", substrs=['(size_t) $0 = 3'])
self.expect("expr (int)a.front().a", substrs=['(int) $1 = 3'])
self.expect("expr (int)a[1].a", substrs=['(int) $2 = 1'])
self.expect("expr (int)a.back().a", substrs=['(int) $3 = 2'])
self.expect("expr std::reverse(a.begin(), a.end())")
self.expect("expr (int)a.front().a", substrs=['(int) $4 = 2'])
self.expect("expr (int)(a.begin()->a)", substrs=['(int) $5 = 2'])
self.expect("expr (int)(a.rbegin()->a)", substrs=['(int) $6 = 3'])
self.expect("expr a.pop_back()")
self.expect("expr (int)a.back().a", substrs=['(int) $7 = 1'])
self.expect("expr (size_t)a.size()", substrs=['(size_t) $8 = 2'])
self.expect("expr (int)a.at(0).a", substrs=['(int) $9 = 2'])
self.expect("expr a.push_back({4})")
self.expect("expr (int)a.back().a", substrs=['(int) $10 = 4'])
self.expect("expr (size_t)a.size()", substrs=['(size_t) $11 = 3'])
|
apple/swift-lldb
|
packages/Python/lldbsuite/test/commands/expression/import-std-module/vector-dbg-info-content/TestDbgInfoContentVector.py
|
Python
|
apache-2.0
| 1,822 | 0.001098 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import inselect
REQUIREMENTS = [
# TODO How to specify OpenCV? 'cv2>=3.1.0',
'numpy>=1.11.1,<1.12',
'Pillow>=3.4.2,<3.5',
'python-dateutil>=2.6.0,<2.7',
'pytz>=2016.7',
'PyYAML>=3.12,<3.2',
'schematics>=1.1.1,<1.2',
'scikit-learn>=0.18.1,<0.19',
'scipy>=0.18.1,<0.19',
'unicodecsv>=0.14.1,<0.15',
]
SCRIPTS = ('export_metadata', 'ingest', 'read_barcodes', 'save_crops', 'segment')
setup_data = {
'name': 'inselect',
'version': inselect.__version__,
'author': (u'Lawrence Hudson, Alice Heaton, Pieter Holtzhausen, '
u'Stéfan van der Walt'),
'author_email': 'l.hudson@nhm.ac.uk',
'maintainer': 'Lawrence Hudson',
'maintainer_email': 'l.hudson@nhm.ac.uk',
'url': 'https://github.com/NaturalHistoryMuseum/inselect/',
'license': 'Modified BSD',
'description': inselect.__doc__,
'long_description': inselect.__doc__,
'packages': [
'inselect', 'inselect.gui', 'inselect.gui.plugins',
'inselect.gui.views', 'inselect.gui.views.boxes', 'inselect.lib',
'inselect.lib.templates', 'inselect.scripts',
],
'include_package_data': True,
'test_suite': 'inselect.tests',
'scripts': ['inselect/scripts/{0}.py'.format(script) for script in SCRIPTS],
'install_requires': REQUIREMENTS,
'extras_require': {
'gui': [
'ExifRead>=2.1.2', 'humanize>=0.5.1', 'psutil>=5.0.0',
'PyQt5>=5.6.0'
],
'barcodes': ['gouda>=0.1.13', 'pylibdmtx>=0.1.6', 'pyzbar>=0.1.3'],
'windows': ['pywin32>=220'],
'development': ['coveralls>=1.1', 'mock>=2.0.0', 'nose>=1.3.7'],
},
'entry_points': {
'gui_scripts':
['inselect = inselect.gui.app:main'],
'console_scripts':
['{0} = inselect.scripts.{0}:main'.format(script) for script in SCRIPTS],
},
'classifiers': [
'Development Status :: 4 - Beta',
'Topic :: Utilities',
'Topic :: Scientific/Engineering :: Bio-Informatics'
'Programming Language :: Python :: 3.5',
],
}
def setuptools_setup():
"""setuptools setup"""
from setuptools import setup
setup(**setup_data)
def _qt_files(site_packages):
"""Returns a list of tuples (src, dest) of Qt dependencies to be installed.
Elements are instances of Path.
site_packages should be an instance of Path to the site-packages directory.
IF we leave cx_Freeze to do its thing then the entirety of PyQt5, Qt5 and
uic are included in the installer. The only way to avoid horrible bloat is
to hand-tune which files we include.
This whole system is fucked beyond belief.
"""
from pathlib import Path
return [
# Qt DLLs
(
site_packages.joinpath('PyQt5/Qt/bin').joinpath(dep),
dep
)
for dep in ('Qt5Core.dll', 'Qt5Gui.dll', 'Qt5Widgets.dll')
] + [
# Qt plugins
(
site_packages.joinpath('PyQt5/Qt/plugins/platforms').joinpath(dep),
Path('platforms').joinpath(dep)
)
for dep in ('qwindows.dll',)
] + [
# PyQt extension modules
(
site_packages.joinpath('PyQt5').joinpath(dep),
Path('PyQt5').joinpath(dep)
)
for dep in ('__init__.py', 'Qt.pyd', 'QtCore.pyd', 'QtGui.pyd', 'QtWidgets.pyd')
]
def cx_setup():
"""cx_Freeze setup. Used for building Windows installers"""
import scipy
from pathlib import Path
from distutils.sysconfig import get_python_lib
from cx_Freeze import setup, Executable
from pylibdmtx import pylibdmtx
from pyzbar import pyzbar
# Useful paths
environment_root = Path(sys.executable).parent
site_packages = Path(get_python_lib())
project_root = Path(__file__).parent
# Files as tuples (source, dest)
include_files = [
# Evil, evil, evil
# cx_Freeze breaks pywintypes and pythoncom on Python 3.5
# https://bitbucket.org/anthony_tuininga/cx_freeze/issues/194/error-with-frozen-executable-using-35-and
(site_packages.joinpath('win32/lib/pywintypes.py'), 'pywintypes.py'),
(site_packages.joinpath('pythoncom.py'), 'pythoncom.py'),
# Binary dependencies that are not detected
(environment_root.joinpath('Library/bin/mkl_core.dll'), 'mkl_core.dll'),
(environment_root.joinpath('Library/bin/mkl_intel_thread.dll'), 'mkl_intel_thread.dll'),
(environment_root.joinpath('Library/bin/libiomp5md.dll'), 'libiomp5md.dll'),
# Stylesheet
(project_root.joinpath('inselect/gui/inselect.qss'), 'inselect.qss'),
] + [
# DLLs that are not detected because they are loaded by ctypes
(dep._name, Path(dep._name).name)
for dep in pylibdmtx.EXTERNAL_DEPENDENCIES + pyzbar.EXTERNAL_DEPENDENCIES
] + _qt_files(site_packages)
# Convert instances of Path to strs
include_files = [(str(source), str(dest)) for source, dest in include_files]
# Directories as strings
include_files += [
# Fixes scipy freeze
# http://stackoverflow.com/a/32822431/1773758
str(Path(scipy.__file__).parent),
]
# Packages to exclude.
exclude_packages = [
str(p.relative_to(site_packages)).replace('\\', '.') for p in
site_packages.rglob('*/tests')
]
setup(
name=setup_data['name'],
version=setup_data['version'],
options={
'build_exe': {
'packages':
setup_data.get('packages', []) + [
'urllib', 'sklearn.neighbors', 'win32com.gen_py',
'win32timezone',
],
'excludes': [
# '_bz2', # Required by sklearn
'_decimal', '_elementtree', '_hashlib', '_lzma',
'_ssl', 'curses',
'distutils', 'email', 'http', 'lib2to3', 'mock', 'nose',
'PyQt5',
# 'pydoc', # Required by sklearn
'tcl', 'Tkinter', 'ttk', 'Tkconstants',
# 'unittest', # Required by numpy.core.multiarray
'win32com.HTML', 'win32com.test', 'win32evtlog', 'win32pdh',
'win32trace', 'win32ui', 'win32wnet',
'xml', 'xmlrpc',
'inselect.tests',
] + exclude_packages,
'includes': [
],
'include_files': include_files,
'include_msvcr': True,
'optimize': 2,
},
'bdist_msi': {
'upgrade_code': '{fe2ed61d-cd5e-45bb-9d16-146f725e522f}'
}
},
executables=[
Executable(
script='inselect/scripts/inselect.py',
targetName='inselect.exe',
icon='icons/inselect.ico',
base='Win32GUI',
shortcutName='Inselect', # See http://stackoverflow.com/a/15736406
shortcutDir='ProgramMenuFolder'
)
] + [
Executable(
script='inselect/scripts/{0}.py'.format(script),
targetName='{0}.exe'.format(script),
icon='icons/inselect.ico',
base='Console'
)
for script in SCRIPTS
],
)
if (3, 5) <= sys.version_info:
if 'bdist_msi' in sys.argv:
cx_setup()
else:
setuptools_setup()
else:
sys.exit('Only Python >= 3.5 is supported')
|
NaturalHistoryMuseum/inselect
|
setup.py
|
Python
|
bsd-3-clause
| 7,651 | 0.001699 |
# -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import datetime
from helpers import unittest
from datetime import timedelta
import luigi
import luigi.date_interval
import luigi.interface
import luigi.notifications
from helpers import with_config
from luigi.mock import MockTarget, MockFileSystem
from luigi.parameter import ParameterException
from worker_test import email_patch
luigi.notifications.DEBUG = True
class A(luigi.Task):
p = luigi.IntParameter()
class WithDefault(luigi.Task):
x = luigi.Parameter(default='xyz')
class Foo(luigi.Task):
bar = luigi.Parameter()
p2 = luigi.IntParameter()
multi = luigi.Parameter(is_list=True)
not_a_param = "lol"
class Bar(luigi.Task):
multibool = luigi.BoolParameter(is_list=True)
def run(self):
Bar._val = self.multibool
class Baz(luigi.Task):
bool = luigi.BoolParameter()
def run(self):
Baz._val = self.bool
class ForgotParam(luigi.Task):
param = luigi.Parameter()
def run(self):
pass
class ForgotParamDep(luigi.Task):
def requires(self):
return ForgotParam()
def run(self):
pass
class HasGlobalParam(luigi.Task):
x = luigi.Parameter()
global_param = luigi.IntParameter(is_global=True, default=123) # global parameters need default values
global_bool_param = luigi.BoolParameter(is_global=True, default=False)
def run(self):
self.complete = lambda: True
def complete(self):
return False
class HasGlobalParamDep(luigi.Task):
x = luigi.Parameter()
def requires(self):
return HasGlobalParam(self.x)
_shared_global_param = luigi.Parameter(is_global=True, default='123')
class SharedGlobalParamA(luigi.Task):
shared_global_param = _shared_global_param
class SharedGlobalParamB(luigi.Task):
shared_global_param = _shared_global_param
class BananaDep(luigi.Task):
x = luigi.Parameter()
y = luigi.Parameter(default='def')
def output(self):
return MockTarget('banana-dep-%s-%s' % (self.x, self.y))
def run(self):
self.output().open('w').close()
class Banana(luigi.Task):
x = luigi.Parameter()
y = luigi.Parameter()
style = luigi.Parameter(default=None)
def requires(self):
if self.style is None:
return BananaDep() # will fail
elif self.style == 'x-arg':
return BananaDep(self.x)
elif self.style == 'y-kwarg':
return BananaDep(y=self.y)
elif self.style == 'x-arg-y-arg':
return BananaDep(self.x, self.y)
else:
raise Exception('unknown style')
def output(self):
return MockTarget('banana-%s-%s' % (self.x, self.y))
def run(self):
self.output().open('w').close()
class MyConfig(luigi.Config):
mc_p = luigi.IntParameter()
mc_q = luigi.IntParameter(default=73)
class MyConfigWithoutSection(luigi.Config):
use_cmdline_section = False
mc_r = luigi.IntParameter()
mc_s = luigi.IntParameter(default=99)
class NoopTask(luigi.Task):
pass
class ParameterTest(unittest.TestCase):
def setUp(self):
super(ParameterTest, self).setUp()
# Need to restore some defaults for the global params since they are overriden
HasGlobalParam.global_param.set_global(123)
HasGlobalParam.global_bool_param.set_global(False)
def test_default_param(self):
self.assertEqual(WithDefault().x, 'xyz')
def test_missing_param(self):
def create_a():
return A()
self.assertRaises(luigi.parameter.MissingParameterException, create_a)
def test_unknown_param(self):
def create_a():
return A(p=5, q=4)
self.assertRaises(luigi.parameter.UnknownParameterException, create_a)
def test_unknown_param_2(self):
def create_a():
return A(1, 2, 3)
self.assertRaises(luigi.parameter.UnknownParameterException, create_a)
def test_duplicated_param(self):
def create_a():
return A(5, p=7)
self.assertRaises(luigi.parameter.DuplicateParameterException, create_a)
def test_parameter_registration(self):
self.assertEqual(len(Foo.get_params()), 3)
def test_task_creation(self):
f = Foo("barval", p2=5, multi=('m1', 'm2'))
self.assertEqual(len(f.get_params()), 3)
self.assertEqual(f.bar, "barval")
self.assertEqual(f.p2, 5)
self.assertEqual(f.multi, ('m1', 'm2'))
self.assertEqual(f.not_a_param, "lol")
def test_multibool(self):
luigi.run(['--local-scheduler', '--no-lock', 'Bar', '--multibool', 'true', '--multibool', 'false'])
self.assertEqual(Bar._val, (True, False))
def test_multibool_empty(self):
luigi.run(['--local-scheduler', '--no-lock', 'Bar'])
self.assertEqual(Bar._val, tuple())
def test_bool_false(self):
luigi.run(['--local-scheduler', '--no-lock', 'Baz'])
self.assertEqual(Baz._val, False)
def test_bool_true(self):
luigi.run(['--local-scheduler', '--no-lock', 'Baz', '--bool'])
self.assertEqual(Baz._val, True)
def test_forgot_param(self):
self.assertRaises(luigi.parameter.MissingParameterException, luigi.run, ['--local-scheduler', '--no-lock', 'ForgotParam'],)
@email_patch
def test_forgot_param_in_dep(self, emails):
# A programmatic missing parameter will cause an error email to be sent
luigi.run(['--local-scheduler', '--no-lock', 'ForgotParamDep'])
self.assertNotEquals(emails, [])
def test_default_param_cmdline(self):
luigi.run(['--local-scheduler', '--no-lock', 'WithDefault'])
self.assertEqual(WithDefault().x, 'xyz')
def test_global_param_defaults(self):
h = HasGlobalParam(x='xyz')
self.assertEqual(h.global_param, 123)
self.assertEqual(h.global_bool_param, False)
def test_global_param_cmdline(self):
luigi.run(['--local-scheduler', '--no-lock', 'HasGlobalParam', '--x', 'xyz', '--global-param', '124'])
h = HasGlobalParam(x='xyz')
self.assertEqual(h.global_param, 124)
self.assertEqual(h.global_bool_param, False)
def test_global_param_cmdline_flipped(self):
luigi.run(['--local-scheduler', '--no-lock', '--global-param', '125', 'HasGlobalParam', '--x', 'xyz'])
h = HasGlobalParam(x='xyz')
self.assertEqual(h.global_param, 125)
self.assertEqual(h.global_bool_param, False)
def test_global_param_override(self):
h1 = HasGlobalParam(x='xyz', global_param=124)
h2 = HasGlobalParam(x='xyz')
self.assertEquals(h1.global_param, 124)
self.assertEquals(h2.global_param, 123)
def test_global_param_dep_cmdline(self):
luigi.run(['--local-scheduler', '--no-lock', 'HasGlobalParamDep', '--x', 'xyz', '--global-param', '124'])
h = HasGlobalParam(x='xyz')
self.assertEqual(h.global_param, 124)
self.assertEqual(h.global_bool_param, False)
def test_global_param_dep_cmdline_optparse(self):
luigi.run(['--local-scheduler', '--no-lock', '--task', 'HasGlobalParamDep', '--x', 'xyz', '--global-param', '124'], use_optparse=True)
h = HasGlobalParam(x='xyz')
self.assertEqual(h.global_param, 124)
self.assertEqual(h.global_bool_param, False)
def test_global_param_dep_cmdline_bool(self):
luigi.run(['--local-scheduler', '--no-lock', 'HasGlobalParamDep', '--x', 'xyz', '--global-bool-param'])
h = HasGlobalParam(x='xyz')
self.assertEqual(h.global_param, 123)
self.assertEqual(h.global_bool_param, True)
def test_global_param_shared(self):
luigi.run(['--local-scheduler', '--no-lock', 'SharedGlobalParamA', '--shared-global-param', 'abc'])
b = SharedGlobalParamB()
self.assertEqual(b.shared_global_param, 'abc')
def test_insignificant_parameter(self):
class InsignificantParameterTask(luigi.Task):
foo = luigi.Parameter(significant=False, default='foo_default')
bar = luigi.Parameter()
t1 = InsignificantParameterTask(foo='x', bar='y')
self.assertEqual(t1.task_id, 'InsignificantParameterTask(bar=y)')
t2 = InsignificantParameterTask('u', 'z')
self.assertEqual(t2.foo, 'u')
self.assertEqual(t2.bar, 'z')
self.assertEqual(t2.task_id, 'InsignificantParameterTask(bar=z)')
def test_local_significant_param(self):
""" Obviously, if anything should be positional, so should local
significant parameters """
class MyTask(luigi.Task):
# This could typically be "--label-company=disney"
x = luigi.Parameter(significant=True)
MyTask('arg')
self.assertRaises(luigi.parameter.MissingParameterException,
lambda: MyTask())
def test_local_insignificant_param(self):
""" Ensure we have the same behavior as in before a78338c """
class MyTask(luigi.Task):
# This could typically be "--num-threads=True"
x = luigi.Parameter(significant=False)
MyTask('arg')
self.assertRaises(luigi.parameter.MissingParameterException,
lambda: MyTask())
class TestNewStyleGlobalParameters(unittest.TestCase):
def setUp(self):
super(TestNewStyleGlobalParameters, self).setUp()
MockTarget.fs.clear()
BananaDep.y.reset_global()
def expect_keys(self, expected):
self.assertEquals(set(MockTarget.fs.get_all_data().keys()), set(expected))
def test_x_arg(self):
luigi.run(['--local-scheduler', '--no-lock', 'Banana', '--x', 'foo', '--y', 'bar', '--style', 'x-arg'])
self.expect_keys(['banana-foo-bar', 'banana-dep-foo-def'])
def test_x_arg_override(self):
luigi.run(['--local-scheduler', '--no-lock', 'Banana', '--x', 'foo', '--y', 'bar', '--style', 'x-arg', '--BananaDep-y', 'xyz'])
self.expect_keys(['banana-foo-bar', 'banana-dep-foo-xyz'])
def test_x_arg_override_stupid(self):
luigi.run(['--local-scheduler', '--no-lock', 'Banana', '--x', 'foo', '--y', 'bar', '--style', 'x-arg', '--BananaDep-x', 'blabla'])
self.expect_keys(['banana-foo-bar', 'banana-dep-foo-def'])
def test_x_arg_y_arg(self):
luigi.run(['--local-scheduler', '--no-lock', 'Banana', '--x', 'foo', '--y', 'bar', '--style', 'x-arg-y-arg'])
self.expect_keys(['banana-foo-bar', 'banana-dep-foo-bar'])
def test_x_arg_y_arg_override(self):
luigi.run(['--local-scheduler', '--no-lock', 'Banana', '--x', 'foo', '--y', 'bar', '--style', 'x-arg-y-arg', '--BananaDep-y', 'xyz'])
self.expect_keys(['banana-foo-bar', 'banana-dep-foo-bar'])
def test_x_arg_y_arg_override_all(self):
luigi.run(['--local-scheduler', '--no-lock', 'Banana', '--x', 'foo', '--y', 'bar', '--style', 'x-arg-y-arg', '--BananaDep-y', 'xyz', '--BananaDep-x', 'blabla'])
self.expect_keys(['banana-foo-bar', 'banana-dep-foo-bar'])
def test_y_arg_override(self):
luigi.run(['--local-scheduler', '--no-lock', 'Banana', '--x', 'foo', '--y', 'bar', '--style', 'y-kwarg', '--BananaDep-x', 'xyz'])
self.expect_keys(['banana-foo-bar', 'banana-dep-xyz-bar'])
def test_y_arg_override_both(self):
luigi.run(['--local-scheduler', '--no-lock', 'Banana', '--x', 'foo', '--y', 'bar', '--style', 'y-kwarg', '--BananaDep-x', 'xyz', '--BananaDep-y', 'blah'])
self.expect_keys(['banana-foo-bar', 'banana-dep-xyz-bar'])
def test_y_arg_override_banana(self):
luigi.run(['--local-scheduler', '--no-lock', 'Banana', '--y', 'bar', '--style', 'y-kwarg', '--BananaDep-x', 'xyz', '--Banana-x', 'baz'])
self.expect_keys(['banana-baz-bar', 'banana-dep-xyz-bar'])
class TestRemoveGlobalParameters(unittest.TestCase):
def setUp(self):
super(TestRemoveGlobalParameters, self).setUp()
MyConfig.mc_p.reset_global()
MyConfig.mc_q.reset_global()
MyConfigWithoutSection.mc_r.reset_global()
MyConfigWithoutSection.mc_s.reset_global()
def run_and_check(self, args):
run_exit_status = luigi.run(['--local-scheduler', '--no-lock'] + args)
self.assertTrue(run_exit_status)
return run_exit_status
def test_use_config_class_1(self):
self.run_and_check(['--MyConfig-mc-p', '99', '--mc-r', '55', 'NoopTask'])
self.assertEqual(MyConfig().mc_p, 99)
self.assertEqual(MyConfig().mc_q, 73)
self.assertEqual(MyConfigWithoutSection().mc_r, 55)
self.assertEqual(MyConfigWithoutSection().mc_s, 99)
def test_use_config_class_2(self):
self.run_and_check(['NoopTask', '--MyConfig-mc-p', '99', '--mc-r', '55'])
self.assertEqual(MyConfig().mc_p, 99)
self.assertEqual(MyConfig().mc_q, 73)
self.assertEqual(MyConfigWithoutSection().mc_r, 55)
self.assertEqual(MyConfigWithoutSection().mc_s, 99)
def test_use_config_class_more_args(self):
self.run_and_check(['--MyConfig-mc-p', '99', '--mc-r', '55', 'NoopTask', '--mc-s', '123', '--MyConfig-mc-q', '42'])
self.assertEqual(MyConfig().mc_p, 99)
self.assertEqual(MyConfig().mc_q, 42)
self.assertEqual(MyConfigWithoutSection().mc_r, 55)
self.assertEqual(MyConfigWithoutSection().mc_s, 123)
@with_config({"MyConfig": {"mc_p": "666", "mc_q": "777"}})
def test_use_config_class_with_configuration(self):
self.run_and_check(['--mc-r', '555', 'NoopTask'])
self.assertEqual(MyConfig().mc_p, 666)
self.assertEqual(MyConfig().mc_q, 777)
self.assertEqual(MyConfigWithoutSection().mc_r, 555)
self.assertEqual(MyConfigWithoutSection().mc_s, 99)
@with_config({"MyConfigWithoutSection": {"mc_r": "999", "mc_s": "888"}})
def test_use_config_class_with_configuration_2(self):
self.run_and_check(['NoopTask', '--MyConfig-mc-p', '222', '--mc-r', '555'])
self.assertEqual(MyConfig().mc_p, 222)
self.assertEqual(MyConfig().mc_q, 73)
self.assertEqual(MyConfigWithoutSection().mc_r, 555)
self.assertEqual(MyConfigWithoutSection().mc_s, 888)
def test_misc_1(self):
class Dogs(luigi.Config):
n_dogs = luigi.IntParameter()
class CatsWithoutSection(luigi.Config):
use_cmdline_section = False
n_cats = luigi.IntParameter()
self.run_and_check(['--n-cats', '123', '--Dogs-n-dogs', '456', 'WithDefault'])
self.assertEqual(Dogs().n_dogs, 456)
self.assertEqual(CatsWithoutSection().n_cats, 123)
self.run_and_check(['WithDefault', '--n-cats', '321', '--Dogs-n-dogs', '654'])
self.assertEqual(Dogs().n_dogs, 654)
self.assertEqual(CatsWithoutSection().n_cats, 321)
def test_global_significant_param(self):
""" We don't want any kind of global param to be positional """
class MyTask(luigi.Task):
# This could typically be called "--test-dry-run"
x_g1 = luigi.Parameter(default='y', is_global=True, significant=True)
self.assertRaises(luigi.parameter.UnknownParameterException,
lambda: MyTask('arg'))
def test_global_insignificant_param(self):
""" We don't want any kind of global param to be positional """
class MyTask(luigi.Task):
# This could typically be "--yarn-pool=development"
x_g2 = luigi.Parameter(default='y', is_global=True, significant=False)
self.assertRaises(luigi.parameter.UnknownParameterException,
lambda: MyTask('arg'))
def test_mixed_params(self):
""" Essentially for what broke in a78338c and was reported in #738 """
class MyTask(luigi.Task):
# This could typically be "--num-threads=True"
x_g3 = luigi.Parameter(default='y', is_global=True)
local_param = luigi.Parameter()
MyTask('setting_local_param')
def test_mixed_params_inheritence(self):
""" A slightly more real-world like test case """
class TaskWithOneGlobalParam(luigi.Task):
non_positional_param = luigi.Parameter(default='y', is_global=True)
class TaskWithOnePositionalParam(TaskWithOneGlobalParam):
""" Try to mess with positional parameters by subclassing """
only_positional_param = luigi.Parameter()
def complete(self):
return True
class PositionalParamsRequirer(luigi.Task):
def requires(self):
return TaskWithOnePositionalParam('only_positional_value')
def run(self):
pass
self.run_and_check(['PositionalParamsRequirer'])
self.run_and_check(['PositionalParamsRequirer', '--non-positional-param', 'z'])
class TestParamWithDefaultFromConfig(unittest.TestCase):
def testNoSection(self):
self.assertRaises(ParameterException, lambda: luigi.Parameter(config_path=dict(section="foo", name="bar")).value)
@with_config({"foo": {}})
def testNoValue(self):
self.assertRaises(ParameterException, lambda: luigi.Parameter(config_path=dict(section="foo", name="bar")).value)
@with_config({"foo": {"bar": "baz"}})
def testDefault(self):
class A(luigi.Task):
p = luigi.Parameter(config_path=dict(section="foo", name="bar"))
self.assertEqual("baz", A().p)
self.assertEqual("boo", A(p="boo").p)
@with_config({"foo": {"bar": "2001-02-03T04"}})
def testDateHour(self):
p = luigi.DateHourParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(datetime.datetime(2001, 2, 3, 4, 0, 0), p.value)
@with_config({"foo": {"bar": "2001-02-03"}})
def testDate(self):
p = luigi.DateParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(datetime.date(2001, 2, 3), p.value)
@with_config({"foo": {"bar": "123"}})
def testInt(self):
p = luigi.IntParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(123, p.value)
@with_config({"foo": {"bar": "true"}})
def testBool(self):
p = luigi.BoolParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(True, p.value)
@with_config({"foo": {"bar": "2001-02-03-2001-02-28"}})
def testDateInterval(self):
p = luigi.DateIntervalParameter(config_path=dict(section="foo", name="bar"))
expected = luigi.date_interval.Custom.parse("2001-02-03-2001-02-28")
self.assertEqual(expected, p.value)
@with_config({"foo": {"bar": "1 day"}})
def testTimeDelta(self):
p = luigi.TimeDeltaParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(timedelta(days=1), p.value)
@with_config({"foo": {"bar": "2 seconds"}})
def testTimeDeltaPlural(self):
p = luigi.TimeDeltaParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(timedelta(seconds=2), p.value)
@with_config({"foo": {"bar": "3w 4h 5m"}})
def testTimeDeltaMultiple(self):
p = luigi.TimeDeltaParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(timedelta(weeks=3, hours=4, minutes=5), p.value)
@with_config({"foo": {"bar": "P4DT12H30M5S"}})
def testTimeDelta8601(self):
p = luigi.TimeDeltaParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(timedelta(days=4, hours=12, minutes=30, seconds=5), p.value)
@with_config({"foo": {"bar": "P5D"}})
def testTimeDelta8601NoTimeComponent(self):
p = luigi.TimeDeltaParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(timedelta(days=5), p.value)
@with_config({"foo": {"bar": "P5W"}})
def testTimeDelta8601Weeks(self):
p = luigi.TimeDeltaParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(timedelta(weeks=5), p.value)
@with_config({"foo": {"bar": "P3Y6M4DT12H30M5S"}})
def testTimeDelta8601YearMonthNotSupported(self):
def f():
return luigi.TimeDeltaParameter(config_path=dict(section="foo", name="bar")).value
self.assertRaises(luigi.parameter.ParameterException, f) # ISO 8601 durations with years or months are not supported
@with_config({"foo": {"bar": "PT6M"}})
def testTimeDelta8601MAfterT(self):
p = luigi.TimeDeltaParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(timedelta(minutes=6), p.value)
@with_config({"foo": {"bar": "P6M"}})
def testTimeDelta8601MBeforeT(self):
def f():
return luigi.TimeDeltaParameter(config_path=dict(section="foo", name="bar")).value
self.assertRaises(luigi.parameter.ParameterException, f) # ISO 8601 durations with months are not supported
def testHasDefaultNoSection(self):
luigi.Parameter(config_path=dict(section="foo", name="bar")).has_value
self.assertFalse(luigi.Parameter(config_path=dict(section="foo", name="bar")).has_value)
@with_config({"foo": {}})
def testHasDefaultNoValue(self):
self.assertFalse(luigi.Parameter(config_path=dict(section="foo", name="bar")).has_value)
@with_config({"foo": {"bar": "baz"}})
def testHasDefaultWithBoth(self):
self.assertTrue(luigi.Parameter(config_path=dict(section="foo", name="bar")).has_value)
@with_config({"foo": {"bar": "one\n\ttwo\n\tthree\n"}})
def testDefaultList(self):
p = luigi.Parameter(is_list=True, config_path=dict(section="foo", name="bar"))
self.assertEqual(('one', 'two', 'three'), p.value)
@with_config({"foo": {"bar": "1\n2\n3"}})
def testDefaultIntList(self):
p = luigi.IntParameter(is_list=True, config_path=dict(section="foo", name="bar"))
self.assertEqual((1, 2, 3), p.value)
@with_config({"foo": {"bar": "baz"}})
def testWithDefault(self):
p = luigi.Parameter(config_path=dict(section="foo", name="bar"), default='blah')
self.assertEqual('baz', p.value) # config overrides default
def testWithDefaultAndMissing(self):
p = luigi.Parameter(config_path=dict(section="foo", name="bar"), default='blah')
self.assertEqual('blah', p.value)
@with_config({"foo": {"bar": "baz"}})
def testGlobal(self):
p = luigi.Parameter(config_path=dict(section="foo", name="bar"), is_global=True, default='blah')
self.assertEqual('baz', p.value)
p.set_global('meh')
self.assertEqual('meh', p.value)
def testGlobalAndMissing(self):
p = luigi.Parameter(config_path=dict(section="foo", name="bar"), is_global=True, default='blah')
self.assertEqual('blah', p.value)
p.set_global('meh')
self.assertEqual('meh', p.value)
@with_config({"A": {"p": "p_default"}})
def testDefaultFromTaskName(self):
class A(luigi.Task):
p = luigi.Parameter()
self.assertEqual("p_default", A().p)
self.assertEqual("boo", A(p="boo").p)
@with_config({"A": {"p": "999"}})
def testDefaultFromTaskNameInt(self):
class A(luigi.Task):
p = luigi.IntParameter()
self.assertEqual(999, A().p)
self.assertEqual(777, A(p=777).p)
@with_config({"A": {"p": "p_default"}, "foo": {"bar": "baz"}})
def testDefaultFromConfigWithTaskNameToo(self):
class A(luigi.Task):
p = luigi.Parameter(config_path=dict(section="foo", name="bar"))
self.assertEqual("p_default", A().p)
self.assertEqual("boo", A(p="boo").p)
@with_config({"A": {"p": "p_default_2"}})
def testDefaultFromTaskNameWithDefault(self):
class A(luigi.Task):
p = luigi.Parameter(default="banana")
self.assertEqual("p_default_2", A().p)
self.assertEqual("boo_2", A(p="boo_2").p)
@with_config({"MyClass": {"p_wohoo": "p_default_3"}})
def testWithLongParameterName(self):
class MyClass(luigi.Task):
p_wohoo = luigi.Parameter(default="banana")
self.assertEqual("p_default_3", MyClass().p_wohoo)
self.assertEqual("boo_2", MyClass(p_wohoo="boo_2").p_wohoo)
@with_config({"RangeDaily": {"days_back": "123"}})
def testSettingOtherMember(self):
class A(luigi.Task):
pass
self.assertEqual(123, luigi.tools.range.RangeDaily(of=A).days_back)
self.assertEqual(70, luigi.tools.range.RangeDaily(of=A, days_back=70).days_back)
class OverrideEnvStuff(unittest.TestCase):
def setUp(self):
env_params_cls = luigi.interface.core
env_params_cls.scheduler_port.reset_global()
@with_config({"core": {"default-scheduler-port": '6543'}})
def testOverrideSchedulerPort(self):
env_params = luigi.interface.core()
self.assertEqual(env_params.scheduler_port, 6543)
@with_config({"core": {"scheduler-port": '6544'}})
def testOverrideSchedulerPort2(self):
env_params = luigi.interface.core()
self.assertEqual(env_params.scheduler_port, 6544)
@with_config({"core": {"scheduler_port": '6545'}})
def testOverrideSchedulerPort3(self):
env_params = luigi.interface.core()
self.assertEqual(env_params.scheduler_port, 6545)
if __name__ == '__main__':
luigi.run(use_optparse=True)
|
17zuoye/luigi
|
test/parameter_test.py
|
Python
|
apache-2.0
| 25,868 | 0.002203 |
# -*- python -*-
# -*- coding: utf-8 -*-
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2011-2012 Serge Noiraud
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Python modules
#
#-------------------------------------------------------------------------
import os
from gi.repository import GObject
#------------------------------------------------------------------------
#
# Set up logging
#
#------------------------------------------------------------------------
import logging
_LOG = logging.getLogger("maps.dummylayer")
#-------------------------------------------------------------------------
#
# Gramps Modules
#
#-------------------------------------------------------------------------
#-------------------------------------------------------------------------
#
# osmGpsMap
#
#-------------------------------------------------------------------------
try:
from gi.repository import OsmGpsMap as osmgpsmap
except:
raise
class DummyLayer(GObject.GObject, osmgpsmap.MapLayer):
def __init__(self):
"""
Initialize the dummy layer
"""
GObject.GObject.__init__(self)
def do_draw(self, gpsmap, gdkdrawable):
"""
Draw the layer
"""
pass
def do_render(self, gpsmap):
"""
Render the layer
"""
pass
def do_busy(self):
"""
The layer is busy
"""
return False
def do_button_press(self, gpsmap, gdkeventbutton):
"""
Someone press a button
"""
return False
GObject.type_register(DummyLayer)
|
pmghalvorsen/gramps_branch
|
gramps/plugins/lib/maps/dummylayer.py
|
Python
|
gpl-2.0
| 2,350 | 0.005106 |
import copy
import operator
from functools import wraps, update_wrapper
# You can't trivially replace this `functools.partial` because this binds to
# classes and returns bound instances, whereas functools.partial (on CPython)
# is a type and its instances don't bind.
def curry(_curried_func, *args, **kwargs):
def _curried(*moreargs, **morekwargs):
return _curried_func(*(args+moreargs), **dict(kwargs, **morekwargs))
return _curried
def memoize(func, cache, num_args):
"""
Wrap a function so that results for any argument tuple are stored in
'cache'. Note that the args to the function must be usable as dictionary
keys.
Only the first num_args are considered when creating the key.
"""
@wraps(func)
def wrapper(*args):
mem_args = args[:num_args]
if mem_args in cache:
return cache[mem_args]
result = func(*args)
cache[mem_args] = result
return result
return wrapper
class cached_property(object):
"""
Decorator that creates converts a method with a single
self argument into a property cached on the instance.
"""
def __init__(self, func):
self.func = func
def __get__(self, instance, type):
res = instance.__dict__[self.func.__name__] = self.func(instance)
return res
class Promise(object):
"""
This is just a base class for the proxy class created in
the closure of the lazy function. It can be used to recognize
promises in code.
"""
pass
def lazy(func, *resultclasses):
"""
Turns any callable into a lazy evaluated callable. You need to give result
classes or types -- at least one is needed so that the automatic forcing of
the lazy evaluation code is triggered. Results are not memoized; the
function is evaluated on every access.
"""
class __proxy__(Promise):
"""
Encapsulate a function call and act as a proxy for methods that are
called on the result of that function. The function is not evaluated
until one of the methods on the result is called.
"""
__dispatch = None
def __init__(self, args, kw):
self.__args = args
self.__kw = kw
if self.__dispatch is None:
self.__prepare_class__()
def __reduce__(self):
return (
_lazy_proxy_unpickle,
(func, self.__args, self.__kw) + resultclasses
)
def __prepare_class__(cls):
cls.__dispatch = {}
for resultclass in resultclasses:
cls.__dispatch[resultclass] = {}
for type_ in reversed(resultclass.mro()):
for (k, v) in type_.__dict__.items():
# All __promise__ return the same wrapper method, but they
# also do setup, inserting the method into the dispatch
# dict.
meth = cls.__promise__(resultclass, k, v)
if hasattr(cls, k):
continue
setattr(cls, k, meth)
cls._delegate_str = str in resultclasses
cls._delegate_unicode = unicode in resultclasses
assert not (cls._delegate_str and cls._delegate_unicode), "Cannot call lazy() with both str and unicode return types."
if cls._delegate_unicode:
cls.__unicode__ = cls.__unicode_cast
elif cls._delegate_str:
cls.__str__ = cls.__str_cast
__prepare_class__ = classmethod(__prepare_class__)
def __promise__(cls, klass, funcname, method):
# Builds a wrapper around some magic method and registers that magic
# method for the given type and method name.
def __wrapper__(self, *args, **kw):
# Automatically triggers the evaluation of a lazy value and
# applies the given magic method of the result type.
res = func(*self.__args, **self.__kw)
for t in type(res).mro():
if t in self.__dispatch:
return self.__dispatch[t][funcname](res, *args, **kw)
raise TypeError("Lazy object returned unexpected type.")
if klass not in cls.__dispatch:
cls.__dispatch[klass] = {}
cls.__dispatch[klass][funcname] = method
return __wrapper__
__promise__ = classmethod(__promise__)
def __unicode_cast(self):
return func(*self.__args, **self.__kw)
def __str_cast(self):
return str(func(*self.__args, **self.__kw))
def __cmp__(self, rhs):
if self._delegate_str:
s = str(func(*self.__args, **self.__kw))
elif self._delegate_unicode:
s = unicode(func(*self.__args, **self.__kw))
else:
s = func(*self.__args, **self.__kw)
if isinstance(rhs, Promise):
return -cmp(rhs, s)
else:
return cmp(s, rhs)
def __mod__(self, rhs):
if self._delegate_str:
return str(self) % rhs
elif self._delegate_unicode:
return unicode(self) % rhs
else:
raise AssertionError('__mod__ not supported for non-string types')
def __deepcopy__(self, memo):
# Instances of this class are effectively immutable. It's just a
# collection of functions. So we don't need to do anything
# complicated for copying.
memo[id(self)] = self
return self
@wraps(func)
def __wrapper__(*args, **kw):
# Creates the proxy object, instead of the actual value.
return __proxy__(args, kw)
return __wrapper__
def _lazy_proxy_unpickle(func, args, kwargs, *resultclasses):
return lazy(func, *resultclasses)(*args, **kwargs)
def allow_lazy(func, *resultclasses):
"""
A decorator that allows a function to be called with one or more lazy
arguments. If none of the args are lazy, the function is evaluated
immediately, otherwise a __proxy__ is returned that will evaluate the
function when needed.
"""
@wraps(func)
def wrapper(*args, **kwargs):
for arg in list(args) + kwargs.values():
if isinstance(arg, Promise):
break
else:
return func(*args, **kwargs)
return lazy(func, *resultclasses)(*args, **kwargs)
return wrapper
empty = object()
def new_method_proxy(func):
def inner(self, *args):
if self._wrapped is empty:
self._setup()
return func(self._wrapped, *args)
return inner
class LazyObject(object):
"""
A wrapper for another class that can be used to delay instantiation of the
wrapped class.
By subclassing, you have the opportunity to intercept and alter the
instantiation. If you don't need to do that, use SimpleLazyObject.
"""
def __init__(self):
self._wrapped = empty
__getattr__ = new_method_proxy(getattr)
def __setattr__(self, name, value):
if name == "_wrapped":
# Assign to __dict__ to avoid infinite __setattr__ loops.
self.__dict__["_wrapped"] = value
else:
if self._wrapped is empty:
self._setup()
setattr(self._wrapped, name, value)
def __delattr__(self, name):
if name == "_wrapped":
raise TypeError("can't delete _wrapped.")
if self._wrapped is empty:
self._setup()
delattr(self._wrapped, name)
def _setup(self):
"""
Must be implemented by subclasses to initialise the wrapped object.
"""
raise NotImplementedError
# introspection support:
__members__ = property(lambda self: self.__dir__())
__dir__ = new_method_proxy(dir)
class SimpleLazyObject(LazyObject):
"""
A lazy object initialised from any function.
Designed for compound objects of unknown type. For builtins or objects of
known type, use django.utils.functional.lazy.
"""
def __init__(self, func):
"""
Pass in a callable that returns the object to be wrapped.
If copies are made of the resulting SimpleLazyObject, which can happen
in various circumstances within Django, then you must ensure that the
callable can be safely run more than once and will return the same
value.
"""
self.__dict__['_setupfunc'] = func
super(SimpleLazyObject, self).__init__()
def _setup(self):
self._wrapped = self._setupfunc()
__str__ = new_method_proxy(str)
__unicode__ = new_method_proxy(unicode)
def __deepcopy__(self, memo):
if self._wrapped is empty:
# We have to use SimpleLazyObject, not self.__class__, because the
# latter is proxied.
result = SimpleLazyObject(self._setupfunc)
memo[id(self)] = result
return result
else:
return copy.deepcopy(self._wrapped, memo)
# Because we have messed with __class__ below, we confuse pickle as to what
# class we are pickling. It also appears to stop __reduce__ from being
# called. So, we define __getstate__ in a way that cooperates with the way
# that pickle interprets this class. This fails when the wrapped class is a
# builtin, but it is better than nothing.
def __getstate__(self):
if self._wrapped is empty:
self._setup()
return self._wrapped.__dict__
# Need to pretend to be the wrapped class, for the sake of objects that care
# about this (especially in equality tests)
__class__ = property(new_method_proxy(operator.attrgetter("__class__")))
__eq__ = new_method_proxy(operator.eq)
__hash__ = new_method_proxy(hash)
__nonzero__ = new_method_proxy(bool)
class lazy_property(property):
"""
A property that works with subclasses by wrapping the decorated
functions of the base class.
"""
def __new__(cls, fget=None, fset=None, fdel=None, doc=None):
if fget is not None:
@wraps(fget)
def fget(instance, instance_type=None, name=fget.__name__):
return getattr(instance, name)()
if fset is not None:
@wraps(fset)
def fset(instance, value, name=fset.__name__):
return getattr(instance, name)(value)
if fdel is not None:
@wraps(fdel)
def fdel(instance, name=fdel.__name__):
return getattr(instance, name)()
return property(fget, fset, fdel, doc)
def partition(predicate, values):
"""
Splits the values into two sets, based on the return value of the function
(True/False). e.g.:
>>> partition(lambda: x > 3, range(5))
[1, 2, 3], [4]
"""
results = ([], [])
for item in values:
results[predicate(item)].append(item)
return results
|
ychen820/microblog
|
y/google-cloud-sdk/platform/google_appengine/lib/django-1.4/django/utils/functional.py
|
Python
|
bsd-3-clause
| 11,110 | 0.00144 |
# -*- coding: utf-8 -*-
## Comments and reviews for records.
## This file is part of Invenio.
## Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""HTML Templates for commenting features """
__revision__ = "$Id$"
import cgi
# Invenio imports
from invenio.urlutils import create_html_link
from invenio.webuser import get_user_info, collect_user_info, isGuestUser, get_email
from invenio.dateutils import convert_datetext_to_dategui
from invenio.webmessage_mailutils import email_quoted_txt2html
from invenio.webcomment_config import \
CFG_WEBCOMMENT_MAX_ATTACHED_FILES, \
CFG_WEBCOMMENT_MAX_ATTACHMENT_SIZE
from invenio.config import CFG_SITE_URL, \
CFG_SITE_SECURE_URL, \
CFG_SITE_LANG, \
CFG_SITE_NAME, \
CFG_SITE_NAME_INTL,\
CFG_SITE_SUPPORT_EMAIL,\
CFG_WEBCOMMENT_ALLOW_REVIEWS, \
CFG_WEBCOMMENT_ALLOW_COMMENTS, \
CFG_WEBCOMMENT_USE_RICH_TEXT_EDITOR, \
CFG_WEBCOMMENT_NB_REPORTS_BEFORE_SEND_EMAIL_TO_ADMIN, \
CFG_WEBCOMMENT_AUTHOR_DELETE_COMMENT_OPTION, \
CFG_CERN_SITE
from invenio.htmlutils import get_html_text_editor
from invenio.messages import gettext_set_language
from invenio.bibformat import format_record
from invenio.access_control_engine import acc_authorize_action
from invenio.websearch_templates import get_fieldvalues
class Template:
"""templating class, refer to webcomment.py for examples of call"""
def tmpl_get_first_comments_without_ranking(self, recID, ln, comments, nb_comments_total, warnings):
"""
@param recID: record id
@param ln: language
@param comments: tuple as returned from webcomment.py/query_retrieve_comments_or_remarks
@param nb_comments_total: total number of comments for this record
@param warnings: list of warning tuples (warning_msg, arg1, arg2, ...)
@return: html of comments
"""
# load the right message language
_ = gettext_set_language(ln)
# naming data fields of comments
c_nickname = 0
c_user_id = 1
c_date_creation = 2
c_body = 3
c_id = 4
warnings = self.tmpl_warnings(warnings, ln)
# comments
comment_rows = ''
max_comment_round_name = comments[-1][0]
for comment_round_name, comments_list in comments:
comment_rows += '<div id="cmtRound%i" class="cmtRound">' % (comment_round_name)
comment_rows += _('%(x_nb)i comments for round "%(x_name)s"') % {'x_nb': len(comments_list), 'x_name': comment_round_name} + "<br/>"
for comment in comments_list:
if comment[c_nickname]:
nickname = comment[c_nickname]
display = nickname
else:
(uid, nickname, display) = get_user_info(comment[c_user_id])
messaging_link = self.create_messaging_link(nickname, display, ln)
comment_rows += """
<tr>
<td>"""
report_link = '%s/record/%s/comments/report?ln=%s&comid=%s' % (CFG_SITE_URL, recID, ln, comment[c_id])
reply_link = '%s/record/%s/comments/add?ln=%s&comid=%s&action=REPLY' % (CFG_SITE_URL, recID, ln, comment[c_id])
comment_rows += self.tmpl_get_comment_without_ranking(req=None, ln=ln, nickname=messaging_link, comment_uid=comment[c_user_id],
date_creation=comment[c_date_creation],
body=comment[c_body], status='', nb_reports=0,
report_link=report_link, reply_link=reply_link, recID=recID)
comment_rows += """
<br />
<br />
</td>
</tr>"""
# Close comment round
comment_rows += '</div>'
# write button
write_button_label = _("Write a comment")
write_button_link = '%s/record/%s/comments/add' % (CFG_SITE_URL, recID)
write_button_form = '<input type="hidden" name="ln" value="%s"/>' % ln
write_button_form = self.createhiddenform(action=write_button_link, method="get", text=write_button_form, button=write_button_label)
# output
if nb_comments_total > 0:
out = warnings
comments_label = len(comments) > 1 and _("Showing the latest %i comments:") % len(comments) \
or ""
out += """
<table>
<tr>
<td class="blocknote">%(comment_title)s</td>
</tr>
</table>
%(comments_label)s<br />
<table border="0" cellspacing="5" cellpadding="5" width="100%%">
%(comment_rows)s
</table>
%(view_all_comments_link)s
<br />
<br />
%(write_button_form)s<br />""" % \
{'comment_title': _("Discuss this document"),
'comments_label': comments_label,
'nb_comments_total' : nb_comments_total,
'recID': recID,
'comment_rows': comment_rows,
'tab': ' '*4,
'siteurl': CFG_SITE_URL,
's': nb_comments_total>1 and 's' or "",
'view_all_comments_link': nb_comments_total>0 and '''<a href="%s/record/%s/comments/display">View all %s comments</a>''' \
% (CFG_SITE_URL, recID, nb_comments_total) or "",
'write_button_form': write_button_form,
'nb_comments': len(comments)
}
else:
out = """
<!-- comments title table -->
<table>
<tr>
<td class="blocknote">%(discuss_label)s:</td>
</tr>
</table>
%(detailed_info)s
<br />
%(form)s
<br />""" % {'form': write_button_form,
'discuss_label': _("Discuss this document"),
'detailed_info': _("Start a discussion about any aspect of this document.")
}
return out
def tmpl_record_not_found(self, status='missing', recID="", ln=CFG_SITE_LANG):
"""
Displays a page when bad or missing record ID was given.
@param status: 'missing' : no recID was given
'inexistant': recID doesn't have an entry in the database
'nan' : recID is not a number
'invalid' : recID is an error code, i.e. in the interval [-99,-1]
@param return: body of the page
"""
_ = gettext_set_language(ln)
if status == 'inexistant':
body = _("Sorry, the record %s does not seem to exist.") % (recID,)
elif status in ('nan', 'invalid'):
body = _("Sorry, %s is not a valid ID value.") % (recID,)
else:
body = _("Sorry, no record ID was provided.")
body += "<br /><br />"
link = "<a href=\"%s?ln=%s\">%s</a>." % (CFG_SITE_URL, ln, CFG_SITE_NAME_INTL.get(ln, CFG_SITE_NAME))
body += _("You may want to start browsing from %s") % link
return body
def tmpl_get_first_comments_with_ranking(self, recID, ln, comments=None, nb_comments_total=None, avg_score=None, warnings=[]):
"""
@param recID: record id
@param ln: language
@param comments: tuple as returned from webcomment.py/query_retrieve_comments_or_remarks
@param nb_comments_total: total number of comments for this record
@param avg_score: average score of all reviews
@param warnings: list of warning tuples (warning_msg, arg1, arg2, ...)
@return: html of comments
"""
# load the right message language
_ = gettext_set_language(ln)
# naming data fields of comments
c_nickname = 0
c_user_id = 1
c_date_creation = 2
c_body = 3
c_nb_votes_yes = 4
c_nb_votes_total = 5
c_star_score = 6
c_title = 7
c_id = 8
warnings = self.tmpl_warnings(warnings, ln)
#stars
if avg_score > 0:
avg_score_img = 'stars-' + str(avg_score).split('.')[0] + '-' + str(avg_score).split('.')[1] + '.png'
else:
avg_score_img = "stars-0-0.png"
# voting links
useful_dict = { 'siteurl' : CFG_SITE_URL,
'recID' : recID,
'ln' : ln,
'yes_img' : 'smchk_gr.gif', #'yes.gif',
'no_img' : 'iconcross.gif' #'no.gif'
}
link = '<a href="%(siteurl)s/record/%(recID)s/reviews/vote?ln=%(ln)s&comid=%%(comid)s' % useful_dict
useful_yes = link + '&com_value=1">' + _("Yes") + '</a>'
useful_no = link + '&com_value=-1">' + _("No") + '</a>'
#comment row
comment_rows = ' '
max_comment_round_name = comments[-1][0]
for comment_round_name, comments_list in comments:
comment_rows += '<div id="cmtRound%i" class="cmtRound">' % (comment_round_name)
comment_rows += _('%(x_nb)i comments for round "%(x_name)s"') % {'x_nb': len(comments_list), 'x_name': comment_round_name} + "<br/>"
for comment in comments_list:
if comment[c_nickname]:
nickname = comment[c_nickname]
display = nickname
else:
(uid, nickname, display) = get_user_info(comment[c_user_id])
messaging_link = self.create_messaging_link(nickname, display, ln)
comment_rows += '''
<tr>
<td>'''
report_link = '%s/record/%s/reviews/report?ln=%s&comid=%s' % (CFG_SITE_URL, recID, ln, comment[c_id])
comment_rows += self.tmpl_get_comment_with_ranking(None, ln=ln, nickname=messaging_link,
comment_uid=comment[c_user_id],
date_creation=comment[c_date_creation],
body=comment[c_body],
status='', nb_reports=0,
nb_votes_total=comment[c_nb_votes_total],
nb_votes_yes=comment[c_nb_votes_yes],
star_score=comment[c_star_score],
title=comment[c_title], report_link=report_link, recID=recID)
comment_rows += '''
%s %s / %s<br />''' % (_("Was this review helpful?"), useful_yes % {'comid':comment[c_id]}, useful_no % {'comid':comment[c_id]})
comment_rows += '''
<br />
</td>
</tr>'''
# Close comment round
comment_rows += '</div>'
# write button
write_button_link = '''%s/record/%s/reviews/add''' % (CFG_SITE_URL, recID)
write_button_form = ' <input type="hidden" name="ln" value="%s"/>' % ln
write_button_form = self.createhiddenform(action=write_button_link, method="get", text=write_button_form, button=_("Write a review"))
if nb_comments_total > 0:
avg_score_img = str(avg_score_img)
avg_score = str(avg_score)
nb_comments_total = str(nb_comments_total)
score = '<b>'
score += _("Average review score: %(x_nb_score)s based on %(x_nb_reviews)s reviews") % \
{'x_nb_score': '</b><img src="' + CFG_SITE_URL + '/img/' + avg_score_img + '" alt="' + avg_score + '" />',
'x_nb_reviews': nb_comments_total}
useful_label = _("Readers found the following %s reviews to be most helpful.")
useful_label %= len(comments) > 1 and len(comments) or ""
view_all_comments_link ='<a href="%s/record/%s/reviews/display?ln=%s&do=hh">' % (CFG_SITE_URL, recID, ln)
view_all_comments_link += _("View all %s reviews") % nb_comments_total
view_all_comments_link += '</a><br />'
out = warnings + """
<!-- review title table -->
<table>
<tr>
<td class="blocknote">%(comment_title)s:</td>
</tr>
</table>
%(score_label)s<br />
%(useful_label)s
<!-- review table -->
<table style="border: 0px; border-collapse: separate; border-spacing: 5px; padding: 5px; width: 100%%">
%(comment_rows)s
</table>
%(view_all_comments_link)s
%(write_button_form)s<br />
""" % \
{ 'comment_title' : _("Rate this document"),
'score_label' : score,
'useful_label' : useful_label,
'recID' : recID,
'view_all_comments' : _("View all %s reviews") % (nb_comments_total,),
'write_comment' : _("Write a review"),
'comment_rows' : comment_rows,
'tab' : ' '*4,
'siteurl' : CFG_SITE_URL,
'view_all_comments_link': nb_comments_total>0 and view_all_comments_link or "",
'write_button_form' : write_button_form
}
else:
out = '''
<!-- review title table -->
<table>
<tr>
<td class="blocknote">%s:</td>
</tr>
</table>
%s<br />
%s
<br />''' % (_("Rate this document"),
_("Be the first to review this document."),
write_button_form)
return out
def tmpl_get_comment_without_ranking(self, req, ln, nickname, comment_uid, date_creation, body, status, nb_reports, reply_link=None, report_link=None, undelete_link=None, delete_links=None, unreport_link=None, recID=-1, com_id='', attached_files=None):
"""
private function
@param req: request object to fetch user info
@param ln: language
@param nickname: nickname
@param date_creation: date comment was written
@param body: comment body
@param status: status of the comment:
da: deleted by author
dm: deleted by moderator
ok: active
@param nb_reports: number of reports the comment has
@param reply_link: if want reply and report, give the http links
@param report_link: if want reply and report, give the http links
@param undelete_link: http link to delete the message
@param delete_links: http links to delete the message
@param unreport_link: http link to unreport the comment
@param recID: recID where the comment is posted
@param com_id: ID of the comment displayed
@param attached_files: list of attached files
@return: html table of comment
"""
from invenio.search_engine import guess_primary_collection_of_a_record
# load the right message language
_ = gettext_set_language(ln)
date_creation = convert_datetext_to_dategui(date_creation, ln=ln)
if attached_files is None:
attached_files = []
out = ''
final_body = email_quoted_txt2html(body)
title = _('%(x_name)s wrote on %(x_date)s:') % {'x_name': nickname,
'x_date': '<i>' + date_creation + '</i>'}
title += '<a name=%s></a>' % com_id
links = ''
moderator_links = ''
if reply_link:
links += '<a href="' + reply_link +'">' + _("Reply") +'</a>'
if report_link and status != 'ap':
links += ' | '
if report_link and status != 'ap':
links += '<a href="' + report_link +'">' + _("Report abuse") + '</a>'
# Check if user is a comment moderator
record_primary_collection = guess_primary_collection_of_a_record(recID)
user_info = collect_user_info(req)
(auth_code, auth_msg) = acc_authorize_action(user_info, 'moderatecomments', collection=record_primary_collection)
if status in ['dm', 'da'] and req:
if not auth_code:
if status == 'dm':
final_body = '<div style="color:#a3a3a3;font-style:italic;">(Comment deleted by the moderator) - not visible for users<br /><br />' +\
final_body + '</div>'
else:
final_body = '<div style="color:#a3a3a3;font-style:italic;">(Comment deleted by the author) - not visible for users<br /><br />' +\
final_body + '</div>'
links = ''
moderator_links += '<a style="color:#8B0000;" href="' + undelete_link + '">' + _("Undelete comment") + '</a>'
else:
if status == 'dm':
final_body = '<div style="color:#a3a3a3;font-style:italic;">Comment deleted by the moderator</div>'
else:
final_body = '<div style="color:#a3a3a3;font-style:italic;">Comment deleted by the author</div>'
links = ''
else:
if not auth_code:
moderator_links += '<a style="color:#8B0000;" href="' + delete_links['mod'] +'">' + _("Delete comment") + '</a>'
elif (user_info['uid'] == comment_uid) and CFG_WEBCOMMENT_AUTHOR_DELETE_COMMENT_OPTION:
moderator_links += '<a style="color:#8B0000;" href="' + delete_links['auth'] +'">' + _("Delete comment") + '</a>'
if nb_reports >= CFG_WEBCOMMENT_NB_REPORTS_BEFORE_SEND_EMAIL_TO_ADMIN:
if not auth_code:
final_body = '<div style="color:#a3a3a3;font-style:italic;">(Comment reported. Pending approval) - not visible for users<br /><br />' + final_body + '</div>'
links = ''
moderator_links += ' | '
moderator_links += '<a style="color:#8B0000;" href="' + unreport_link +'">' + _("Unreport comment") + '</a>'
else:
final_body = '<div style="color:#a3a3a3;font-style:italic;">This comment is pending approval due to user reports</div>'
links = ''
if links and moderator_links:
links = links + ' || ' + moderator_links
elif not links:
links = moderator_links
attached_files_html = ''
if attached_files:
attached_files_html = '<div class="cmtfilesblock"><b>%s:</b><br/>' % (len(attached_files) == 1 and _("Attached file") or _("Attached files"))
for (filename, filepath, fileurl) in attached_files:
attached_files_html += create_html_link(urlbase=fileurl, urlargd={},
link_label=cgi.escape(filename)) + '<br />'
attached_files_html += '</div>'
out += """
<div style="margin-bottom:20px;background:#F9F9F9;border:1px solid #DDD">%(title)s<br />
<blockquote>
%(body)s
</blockquote>
<br />
%(attached_files_html)s
<div style="float:right">%(links)s</div>
</div>""" % \
{'title' : '<div style="background-color:#EEE;padding:2px;"><img src="%s/img/user-icon-1-24x24.gif" alt="" /> %s</div>' % (CFG_SITE_URL, title),
'body' : final_body,
'links' : links,
'attached_files_html': attached_files_html}
return out
def tmpl_get_comment_with_ranking(self, req, ln, nickname, comment_uid, date_creation, body, status, nb_reports, nb_votes_total, nb_votes_yes, star_score, title, report_link=None, delete_links=None, undelete_link=None, unreport_link=None, recID=-1):
"""
private function
@param req: request object to fetch user info
@param ln: language
@param nickname: nickname
@param date_creation: date comment was written
@param body: comment body
@param status: status of the comment
@param nb_reports: number of reports the comment has
@param nb_votes_total: total number of votes for this review
@param nb_votes_yes: number of positive votes for this record
@param star_score: star score for this record
@param title: title of review
@param report_link: if want reply and report, give the http links
@param undelete_link: http link to delete the message
@param delete_link: http link to delete the message
@param unreport_link: http link to unreport the comment
@param recID: recID where the comment is posted
@return: html table of review
"""
from invenio.search_engine import guess_primary_collection_of_a_record
# load the right message language
_ = gettext_set_language(ln)
if star_score > 0:
star_score_img = 'stars-' + str(star_score) + '-0.png'
else:
star_score_img = 'stars-0-0.png'
out = ""
date_creation = convert_datetext_to_dategui(date_creation, ln=ln)
reviewed_label = _("Reviewed by %(x_nickname)s on %(x_date)s") % {'x_nickname': nickname, 'x_date':date_creation}
useful_label = _("%(x_nb_people)i out of %(x_nb_total)i people found this review useful") % {'x_nb_people': nb_votes_yes,
'x_nb_total': nb_votes_total}
links = ''
_body = ''
if body != '':
_body = '''
<blockquote>
%s
</blockquote>''' % email_quoted_txt2html(body, linebreak_html='')
# Check if user is a comment moderator
record_primary_collection = guess_primary_collection_of_a_record(recID)
user_info = collect_user_info(req)
(auth_code, auth_msg) = acc_authorize_action(user_info, 'moderatecomments', collection=record_primary_collection)
if status in ['dm', 'da'] and req:
if not auth_code:
if status == 'dm':
_body = '<div style="color:#a3a3a3;font-style:italic;">(Review deleted by moderator) - not visible for users<br /><br />' +\
_body + '</div>'
else:
_body = '<div style="color:#a3a3a3;font-style:italic;">(Review deleted by author) - not visible for users<br /><br />' +\
_body + '</div>'
links = '<a style="color:#8B0000;" href="' + undelete_link + '">' + _("Undelete review") + '</a>'
else:
if status == 'dm':
_body = '<div style="color:#a3a3a3;font-style:italic;">Review deleted by moderator</div>'
else:
_body = '<div style="color:#a3a3a3;font-style:italic;">Review deleted by author</div>'
links = ''
else:
if not auth_code:
links += '<a style="color:#8B0000;" href="' + delete_links['mod'] +'">' + _("Delete review") + '</a>'
if nb_reports >= CFG_WEBCOMMENT_NB_REPORTS_BEFORE_SEND_EMAIL_TO_ADMIN:
if not auth_code:
_body = '<div style="color:#a3a3a3;font-style:italic;">(Review reported. Pending approval) - not visible for users<br /><br />' + _body + '</div>'
links += ' | '
links += '<a style="color:#8B0000;" href="' + unreport_link +'">' + _("Unreport review") + '</a>'
else:
_body = '<div style="color:#a3a3a3;font-style:italic;">This review is pending approval due to user reports.</div>'
links = ''
out += '''
<div style="background:#F9F9F9;border:1px solid #DDD">
<div style="background-color:#EEE;padding:2px;">
<img src="%(siteurl)s/img/%(star_score_img)s" alt="%(star_score)s" style="margin-right:10px;"/><b>%(title)s</b><br />
%(reviewed_label)s<br />
%(useful_label)s
</div>
%(body)s
</div>
%(abuse)s''' % {'siteurl' : CFG_SITE_URL,
'star_score_img': star_score_img,
'star_score' : star_score,
'title' : title,
'reviewed_label': reviewed_label,
'useful_label' : useful_label,
'body' : _body,
'abuse' : links
}
return out
def tmpl_get_comments(self, req, recID, ln,
nb_per_page, page, nb_pages,
display_order, display_since,
CFG_WEBCOMMENT_ALLOW_REVIEWS,
comments, total_nb_comments,
avg_score,
warnings,
border=0, reviews=0,
total_nb_reviews=0,
nickname='', uid=-1, note='',score=5,
can_send_comments=False,
can_attach_files=False,
user_is_subscribed_to_discussion=False,
user_can_unsubscribe_from_discussion=False,
display_comment_rounds=None):
"""
Get table of all comments
@param recID: record id
@param ln: language
@param nb_per_page: number of results per page
@param page: page number
@param display_order: hh = highest helpful score, review only
lh = lowest helpful score, review only
hs = highest star score, review only
ls = lowest star score, review only
od = oldest date
nd = newest date
@param display_since: all= no filtering by date
nd = n days ago
nw = n weeks ago
nm = n months ago
ny = n years ago
where n is a single digit integer between 0 and 9
@param CFG_WEBCOMMENT_ALLOW_REVIEWS: is ranking enable, get from config.py/CFG_WEBCOMMENT_ALLOW_REVIEWS
@param comments: tuple as returned from webcomment.py/query_retrieve_comments_or_remarks
@param total_nb_comments: total number of comments for this record
@param avg_score: average score of reviews for this record
@param warnings: list of warning tuples (warning_msg, color)
@param border: boolean, active if want to show border around each comment/review
@param reviews: boolean, enabled for reviews, disabled for comments
@param can_send_comments: boolean, if user can send comments or not
@param can_attach_files: boolean, if user can attach file to comment or not
@param user_is_subscribed_to_discussion: True if user already receives new comments by email
@param user_can_unsubscribe_from_discussion: True is user is allowed to unsubscribe from discussion
"""
# load the right message language
_ = gettext_set_language(ln)
# CERN hack begins: display full ATLAS user name. Check further below too.
current_user_fullname = ""
override_nickname_p = False
if CFG_CERN_SITE:
from invenio.search_engine import get_all_collections_of_a_record
user_info = collect_user_info(uid)
if 'atlas-readaccess-active-members [CERN]' in user_info['group']:
# An ATLAS member is never anonymous to its colleagues
# when commenting inside ATLAS collections
recid_collections = get_all_collections_of_a_record(recID)
if 'ATLAS' in str(recid_collections):
override_nickname_p = True
current_user_fullname = user_info.get('external_fullname', '')
# CERN hack ends
# naming data fields of comments
if reviews:
c_nickname = 0
c_user_id = 1
c_date_creation = 2
c_body = 3
c_status = 4
c_nb_reports = 5
c_nb_votes_yes = 6
c_nb_votes_total = 7
c_star_score = 8
c_title = 9
c_id = 10
c_round_name = 11
c_restriction = 12
reply_to = 13
discussion = 'reviews'
comments_link = '<a href="%s/record/%s/comments/">%s</a> (%i)' % (CFG_SITE_URL, recID, _('Comments'), total_nb_comments)
reviews_link = '<b>%s (%i)</b>' % (_('Reviews'), total_nb_reviews)
add_comment_or_review = self.tmpl_add_comment_form_with_ranking(recID, uid, current_user_fullname or nickname, ln, '', score, note, warnings, show_title_p=True, can_attach_files=can_attach_files)
else:
c_nickname = 0
c_user_id = 1
c_date_creation = 2
c_body = 3
c_status = 4
c_nb_reports = 5
c_id = 6
c_round_name = 7
c_restriction = 8
reply_to = 9
discussion = 'comments'
comments_link = '<b>%s (%i)</b>' % (_('Comments'), total_nb_comments)
reviews_link = '<a href="%s/record/%s/reviews/">%s</a> (%i)' % (CFG_SITE_URL, recID, _('Reviews'), total_nb_reviews)
add_comment_or_review = self.tmpl_add_comment_form(recID, uid, nickname, ln, note, warnings, can_attach_files=can_attach_files, user_is_subscribed_to_discussion=user_is_subscribed_to_discussion)
# voting links
useful_dict = { 'siteurl' : CFG_SITE_URL,
'recID' : recID,
'ln' : ln,
'do' : display_order,
'ds' : display_since,
'nb' : nb_per_page,
'p' : page,
'reviews' : reviews,
'discussion' : discussion
}
useful_yes = '<a href="%(siteurl)s/record/%(recID)s/%(discussion)s/vote?ln=%(ln)s&comid=%%(comid)s&com_value=1&do=%(do)s&ds=%(ds)s&nb=%(nb)s&p=%(p)s&referer=%(siteurl)s/record/%(recID)s/%(discussion)s/display">' + _("Yes") + '</a>'
useful_yes %= useful_dict
useful_no = '<a href="%(siteurl)s/record/%(recID)s/%(discussion)s/vote?ln=%(ln)s&comid=%%(comid)s&com_value=-1&do=%(do)s&ds=%(ds)s&nb=%(nb)s&p=%(p)s&referer=%(siteurl)s/record/%(recID)s/%(discussion)s/display">' + _("No") + '</a>'
useful_no %= useful_dict
warnings = self.tmpl_warnings(warnings, ln)
link_dic = { 'siteurl' : CFG_SITE_URL,
'module' : 'comments',
'function' : 'index',
'discussion': discussion,
'arguments' : 'do=%s&ds=%s&nb=%s' % (display_order, display_since, nb_per_page),
'arg_page' : '&p=%s' % page,
'page' : page,
'rec_id' : recID}
if not req:
req = None
## comments table
comments_rows = ''
last_comment_round_name = None
comment_round_names = [comment[0] for comment in comments]
if comment_round_names:
last_comment_round_name = comment_round_names[-1]
for comment_round_name, comments_list in comments:
comment_round_style = "display:none;"
comment_round_is_open = False
if comment_round_name in display_comment_rounds:
comment_round_is_open = True
comment_round_style = ""
comments_rows += '<div id="cmtRound%s" class="cmtround">' % (comment_round_name)
if not comment_round_is_open and \
(comment_round_name or len(comment_round_names) > 1):
new_cmtgrp = list(display_comment_rounds)
new_cmtgrp.append(comment_round_name)
comments_rows += '''<img src="/img/right-trans.gif" id="cmtarrowiconright%(grp_id)s" alt="Open group" /><img src="/img/down-trans.gif" id="cmtarrowicondown%(grp_id)s" alt="Close group" style="display:none" />
<a class="cmtgrpswitch" name="cmtgrpLink%(grp_id)s" onclick="var cmtarrowicondown=document.getElementById('cmtarrowicondown%(grp_id)s');var cmtarrowiconright=document.getElementById('cmtarrowiconright%(grp_id)s');var subgrp=document.getElementById('cmtSubRound%(grp_id)s');if (subgrp.style.display==''){subgrp.style.display='none';cmtarrowiconright.style.display='';cmtarrowicondown.style.display='none';}else{subgrp.style.display='';cmtarrowiconright.style.display='none';cmtarrowicondown.style.display='';};return false;"''' % {'grp_id': comment_round_name}
comments_rows += 'href=\"%(siteurl)s/record/%(rec_id)s/%(discussion)s/%(function)s?%(arguments)s&%(arg_page)s' % link_dic
comments_rows += '&' + '&'.join(["cmtgrp=" + grp for grp in new_cmtgrp if grp != 'none']) + \
'#cmtgrpLink%s' % (comment_round_name) + '\">'
comments_rows += _('%(x_nb)i comments for round "%(x_name)s"') % {'x_nb': len(comments_list), 'x_name': comment_round_name} + "</a><br/>"
elif comment_round_name or len(comment_round_names) > 1:
new_cmtgrp = list(display_comment_rounds)
new_cmtgrp.remove(comment_round_name)
comments_rows += '''<img src="/img/right-trans.gif" id="cmtarrowiconright%(grp_id)s" alt="Open group" style="display:none" /><img src="/img/down-trans.gif" id="cmtarrowicondown%(grp_id)s" alt="Close group" />
<a class="cmtgrpswitch" name="cmtgrpLink%(grp_id)s" onclick="var cmtarrowicondown=document.getElementById('cmtarrowicondown%(grp_id)s');var cmtarrowiconright=document.getElementById('cmtarrowiconright%(grp_id)s');var subgrp=document.getElementById('cmtSubRound%(grp_id)s');if (subgrp.style.display==''){subgrp.style.display='none';cmtarrowiconright.style.display='';cmtarrowicondown.style.display='none';}else{subgrp.style.display='';cmtarrowiconright.style.display='none';cmtarrowicondown.style.display='';};return false;"''' % {'grp_id': comment_round_name}
comments_rows += 'href=\"%(siteurl)s/record/%(rec_id)s/%(discussion)s/%(function)s?%(arguments)s&%(arg_page)s' % link_dic
comments_rows += '&' + ('&'.join(["cmtgrp=" + grp for grp in new_cmtgrp if grp != 'none']) or 'cmtgrp=none' ) + \
'#cmtgrpLink%s' % (comment_round_name) + '\">'
comments_rows += _('%(x_nb)i comments for round "%(x_name)s"') % {'x_nb': len(comments_list), 'x_name': comment_round_name}+ "</a><br/>"
comments_rows += '<div id="cmtSubRound%s" class="cmtsubround" style="%s">' % (comment_round_name,
comment_round_style)
thread_history = [0]
for comment in comments_list:
if comment[reply_to] not in thread_history:
# Going one level down in the thread
thread_history.append(comment[reply_to])
depth = thread_history.index(comment[reply_to])
else:
depth = thread_history.index(comment[reply_to])
thread_history = thread_history[:depth + 1]
# CERN hack begins: display full ATLAS user name.
comment_user_fullname = ""
if CFG_CERN_SITE and override_nickname_p:
comment_user_fullname = get_email(comment[c_user_id])
# CERN hack ends
if comment[c_nickname]:
_nickname = comment[c_nickname]
display = _nickname
else:
(uid, _nickname, display) = get_user_info(comment[c_user_id])
messaging_link = self.create_messaging_link(_nickname, comment_user_fullname or display, ln)
from invenio.webcomment import get_attached_files # FIXME
files = get_attached_files(recID, comment[c_id])
# do NOT delete the HTML comment below. It is used for parsing... (I plead unguilty!)
comments_rows += """
<!-- start comment row -->
<div style="margin-left:%spx">""" % (depth*20)
delete_links = {}
if not reviews:
report_link = '%(siteurl)s/record/%(recID)s/comments/report?ln=%(ln)s&comid=%%(comid)s&do=%(do)s&ds=%(ds)s&nb=%(nb)s&p=%(p)s&referer=%(siteurl)s/record/%(recID)s/comments/display' % useful_dict % {'comid':comment[c_id]}
reply_link = '%(siteurl)s/record/%(recID)s/comments/add?ln=%(ln)s&action=REPLY&comid=%%(comid)s' % useful_dict % {'comid':comment[c_id]}
delete_links['mod'] = "%s/admin/webcomment/webcommentadmin.py/del_single_com_mod?ln=%s&id=%s" % (CFG_SITE_URL, ln, comment[c_id])
delete_links['auth'] = "%s/admin/webcomment/webcommentadmin.py/del_single_com_auth?ln=%s&id=%s" % (CFG_SITE_URL, ln, comment[c_id])
undelete_link = "%s/admin/webcomment/webcommentadmin.py/undel_com?ln=%s&id=%s" % (CFG_SITE_URL, ln, comment[c_id])
unreport_link = "%s/admin/webcomment/webcommentadmin.py/unreport_com?ln=%s&id=%s" % (CFG_SITE_URL, ln, comment[c_id])
comments_rows += self.tmpl_get_comment_without_ranking(req, ln, messaging_link, comment[c_user_id], comment[c_date_creation], comment[c_body], comment[c_status], comment[c_nb_reports], reply_link, report_link, undelete_link, delete_links, unreport_link, recID, comment[c_id], files)
else:
report_link = '%(siteurl)s/record/%(recID)s/reviews/report?ln=%(ln)s&comid=%%(comid)s&do=%(do)s&ds=%(ds)s&nb=%(nb)s&p=%(p)s&referer=%(siteurl)s/record/%(recID)s/reviews/display' % useful_dict % {'comid': comment[c_id]}
delete_links['mod'] = "%s/admin/webcomment/webcommentadmin.py/del_single_com_mod?ln=%s&id=%s" % (CFG_SITE_URL, ln, comment[c_id])
delete_links['auth'] = "%s/admin/webcomment/webcommentadmin.py/del_single_com_auth?ln=%s&id=%s" % (CFG_SITE_URL, ln, comment[c_id])
undelete_link = "%s/admin/webcomment/webcommentadmin.py/undel_com?ln=%s&id=%s" % (CFG_SITE_URL, ln, comment[c_id])
unreport_link = "%s/admin/webcomment/webcommentadmin.py/unreport_com?ln=%s&id=%s" % (CFG_SITE_URL, ln, comment[c_id])
comments_rows += self.tmpl_get_comment_with_ranking(req, ln, messaging_link, comment[c_user_id], comment[c_date_creation], comment[c_body], comment[c_status], comment[c_nb_reports], comment[c_nb_votes_total], comment[c_nb_votes_yes], comment[c_star_score], comment[c_title], report_link, delete_links, undelete_link, unreport_link, recID)
helpful_label = _("Was this review helpful?")
report_abuse_label = "(" + _("Report abuse") + ")"
yes_no_separator = '<td> / </td>'
if comment[c_nb_reports] >= CFG_WEBCOMMENT_NB_REPORTS_BEFORE_SEND_EMAIL_TO_ADMIN or comment[c_status] in ['dm', 'da']:
report_abuse_label = ""
helpful_label = ""
useful_yes = ""
useful_no = ""
yes_no_separator = ""
comments_rows += """
<table>
<tr>
<td>%(helpful_label)s %(tab)s</td>
<td> %(yes)s </td>
%(yes_no_separator)s
<td> %(no)s </td>
<td class="reportabuse">%(tab)s%(tab)s<a href="%(report)s">%(report_abuse_label)s</a></td>
</tr>
</table>""" \
% {'helpful_label': helpful_label,
'yes' : useful_yes % {'comid':comment[c_id]},
'yes_no_separator': yes_no_separator,
'no' : useful_no % {'comid':comment[c_id]},
'report' : report_link % {'comid':comment[c_id]},
'report_abuse_label': comment[c_nb_reports] >= CFG_WEBCOMMENT_NB_REPORTS_BEFORE_SEND_EMAIL_TO_ADMIN and '' or report_abuse_label,
'tab' : ' '*2}
# do NOT remove HTML comment below. It is used for parsing...
comments_rows += """
</div>
<!-- end comment row -->"""
comments_rows += '</div></div>'
## page links
page_links = ''
# Previous
if page != 1:
link_dic['arg_page'] = 'p=%s' % (page - 1)
page_links += '<a href=\"%(siteurl)s/record/%(rec_id)s/%(discussion)s/%(function)s?%(arguments)s&%(arg_page)s\"><<</a> ' % link_dic
else:
page_links += ' %s ' % (' '*(len(_('Previous'))+7))
# Page Numbers
for i in range(1, nb_pages+1):
link_dic['arg_page'] = 'p=%s' % i
link_dic['page'] = '%s' % i
if i != page:
page_links += '''
<a href=\"%(siteurl)s/record/%(rec_id)s/%(discussion)s/%(function)s?%(arguments)s&%(arg_page)s\">%(page)s</a> ''' % link_dic
else:
page_links += ''' <b>%s</b> ''' % i
# Next
if page != nb_pages:
link_dic['arg_page'] = 'p=%s' % (page + 1)
page_links += '''
<a href=\"%(siteurl)s/record/%(rec_id)s/%(discussion)s/%(function)s?%(arguments)s&%(arg_page)s\">>></a> ''' % link_dic
else:
page_links += '%s' % (' '*(len(_('Next'))+7))
## stuff for ranking if enabled
if reviews:
if avg_score > 0:
avg_score_img = 'stars-' + str(avg_score).split('.')[0] + '-' + str(avg_score).split('.')[1] + '.png'
else:
avg_score_img = "stars-0-0.png"
ranking_average = '<br /><b>'
ranking_average += _("Average review score: %(x_nb_score)s based on %(x_nb_reviews)s reviews") % \
{'x_nb_score': '</b><img src="' + CFG_SITE_URL + '/img/' + avg_score_img + '" alt="' + str(avg_score) + '" />',
'x_nb_reviews': str(total_nb_reviews)}
ranking_average += '<br />'
else:
ranking_average = ""
write_button_link = '''%s/record/%s/%s/add''' % (CFG_SITE_URL, recID, discussion)
write_button_form = '<input type="hidden" name="ln" value="%s"/>'
write_button_form = self.createhiddenform(action=write_button_link,
method="get",
text=write_button_form,
button = reviews and _('Write a review') or _('Write a comment'))
if reviews:
total_label = _("There is a total of %s reviews")
else:
total_label = _("There is a total of %s comments")
total_label %= total_nb_comments
review_or_comment_first = ''
if reviews == 0 and total_nb_comments == 0 and can_send_comments:
review_or_comment_first = _("Start a discussion about any aspect of this document.") + '<br />'
elif reviews == 1 and total_nb_reviews == 0 and can_send_comments:
review_or_comment_first = _("Be the first to review this document.") + '<br />'
# do NOT remove the HTML comments below. Used for parsing
body = '''
%(comments_and_review_tabs)s
<!-- start comments table -->
<div style="border: %(border)spx solid black; width: 95%%; margin:10px;font-size:small">
%(comments_rows)s
</div>
<!-- end comments table -->
%(review_or_comment_first)s
<br />''' % \
{ 'record_label': _("Record"),
'back_label': _("Back to search results"),
'total_label': total_label,
'write_button_form' : write_button_form,
'write_button_form_again' : total_nb_comments>3 and write_button_form or "",
'comments_rows' : comments_rows,
'total_nb_comments' : total_nb_comments,
'comments_or_reviews' : reviews and _('review') or _('comment'),
'comments_or_reviews_title' : reviews and _('Review') or _('Comment'),
'siteurl' : CFG_SITE_URL,
'module' : "comments",
'recid' : recID,
'ln' : ln,
'border' : border,
'ranking_avg' : ranking_average,
'comments_and_review_tabs' : CFG_WEBCOMMENT_ALLOW_REVIEWS and \
CFG_WEBCOMMENT_ALLOW_COMMENTS and \
'%s | %s <br />' % \
(comments_link, reviews_link) or '',
'review_or_comment_first' : review_or_comment_first
}
# form is not currently used. reserved for an eventual purpose
#form = """
# Display <select name="nb" size="1"> per page
# <option value="all">All</option>
# <option value="10">10</option>
# <option value="25">20</option>
# <option value="50">50</option>
# <option value="100" selected="selected">100</option>
# </select>
# comments per page that are <select name="ds" size="1">
# <option value="all" selected="selected">Any age</option>
# <option value="1d">1 day old</option>
# <option value="3d">3 days old</option>
# <option value="1w">1 week old</option>
# <option value="2w">2 weeks old</option>
# <option value="1m">1 month old</option>
# <option value="3m">3 months old</option>
# <option value="6m">6 months old</option>
# <option value="1y">1 year old</option>
# </select>
# and sorted by <select name="do" size="1">
# <option value="od" selected="selected">Oldest first</option>
# <option value="nd">Newest first</option>
# %s
# </select>
# """ % \
# (reviews==1 and '''
# <option value=\"hh\">most helpful</option>
# <option value=\"lh\">least helpful</option>
# <option value=\"hs\">highest star ranking</option>
# <option value=\"ls\">lowest star ranking</option>
# </select>''' or '''
# </select>''')
#
#form_link = "%(siteurl)s/%(module)s/%(function)s" % link_dic
#form = self.createhiddenform(action=form_link, method="get", text=form, button='Go', recid=recID, p=1)
pages = """
<div>
%(v_label)s %(comments_or_reviews)s %(results_nb_lower)s-%(results_nb_higher)s <br />
%(page_links)s
</div>
""" % \
{'v_label': _("Viewing"),
'page_links': _("Page:") + page_links ,
'comments_or_reviews': reviews and _('review') or _('comment'),
'results_nb_lower': len(comments)>0 and ((page-1) * nb_per_page)+1 or 0,
'results_nb_higher': page == nb_pages and (((page-1) * nb_per_page) + len(comments)) or (page * nb_per_page)}
if nb_pages > 1:
#body = warnings + body + form + pages
body = warnings + body + pages
else:
body = warnings + body
if reviews == 0:
if not user_is_subscribed_to_discussion:
body += '<small>'
body += '<div class="comment-subscribe">' + '<img src="%s/img/mail-icon-12x8.gif" border="0" alt="" />' % CFG_SITE_URL + \
' ' + '<b>' + create_html_link(urlbase=CFG_SITE_URL + '/record/' + \
str(recID) + '/comments/subscribe',
urlargd={},
link_label=_('Subscribe')) + \
'</b>' + ' to this discussion. You will then receive all new comments by email.' + '</div>'
body += '</small><br />'
elif user_can_unsubscribe_from_discussion:
body += '<small>'
body += '<div class="comment-subscribe">' + '<img src="%s/img/mail-icon-12x8.gif" border="0" alt="" />' % CFG_SITE_URL + \
' ' + '<b>' + create_html_link(urlbase=CFG_SITE_URL + '/record/' + \
str(recID) + '/comments/unsubscribe',
urlargd={},
link_label=_('Unsubscribe')) + \
'</b>' + ' from this discussion. You will no longer receive emails about new comments.' + '</div>'
body += '</small><br />'
if can_send_comments:
body += add_comment_or_review
else:
body += '<br/><em>' + _("You are not authorized to comment or review.") + '</em>'
return '<div style="margin-left:10px;margin-right:10px;">' + body + '</div>'
def create_messaging_link(self, to, display_name, ln=CFG_SITE_LANG):
"""prints a link to the messaging system"""
link = "%s/yourmessages/write?msg_to=%s&ln=%s" % (CFG_SITE_URL, to, ln)
if to:
return '<a href="%s" class="maillink">%s</a>' % (link, display_name)
else:
return display_name
def createhiddenform(self, action="", method="get", text="", button="confirm", cnfrm='', **hidden):
"""
create select with hidden values and submit button
@param action: name of the action to perform on submit
@param method: 'get' or 'post'
@param text: additional text, can also be used to add non hidden input
@param button: value/caption on the submit button
@param cnfrm: if given, must check checkbox to confirm
@param **hidden: dictionary with name=value pairs for hidden input
@return: html form
"""
output = """
<form action="%s" method="%s">""" % (action, method.lower().strip() in ['get', 'post'] and method or 'get')
output += """
<table style="width:90%">
<tr>
<td style="vertical-align: top">
"""
output += text + '\n'
if cnfrm:
output += """
<input type="checkbox" name="confirm" value="1" />"""
for key in hidden.keys():
if type(hidden[key]) is list:
for value in hidden[key]:
output += """
<input type="hidden" name="%s" value="%s" />""" % (key, value)
else:
output += """
<input type="hidden" name="%s" value="%s" />""" % (key, hidden[key])
output += """
</td>
</tr>
<tr>
<td>"""
output += """
<input class="adminbutton" type="submit" value="%s" />""" % (button, )
output += """
</td>
</tr>
</table>
</form>"""
return output
def create_write_comment_hiddenform(self, action="", method="get", text="", button="confirm", cnfrm='', enctype='', **hidden):
"""
create select with hidden values and submit button
@param action: name of the action to perform on submit
@param method: 'get' or 'post'
@param text: additional text, can also be used to add non hidden input
@param button: value/caption on the submit button
@param cnfrm: if given, must check checkbox to confirm
@param **hidden: dictionary with name=value pairs for hidden input
@return: html form
"""
enctype_attr = ''
if enctype:
enctype_attr = 'enctype=' + enctype
output = """
<form action="%s" method="%s" %s>""" % (action, method.lower().strip() in ['get', 'post'] and method or 'get', enctype_attr)
if cnfrm:
output += """
<input type="checkbox" name="confirm" value="1" />"""
for key in hidden.keys():
if type(hidden[key]) is list:
for value in hidden[key]:
output += """
<input type="hidden" name="%s" value="%s" />""" % (key, value)
else:
output += """
<input type="hidden" name="%s" value="%s" />""" % (key, hidden[key])
output += text + '\n'
output += """
</form>"""
return output
def tmpl_warnings(self, warnings, ln=CFG_SITE_LANG):
"""
Prepare the warnings list
@param warnings: list of warning tuples (warning_msg, arg1, arg2, etc)
@return: html string of warnings
"""
red_text_warnings = ['WRN_WEBCOMMENT_FEEDBACK_NOT_RECORDED',
'WRN_WEBCOMMENT_ALREADY_VOTED']
green_text_warnings = ['WRN_WEBCOMMENT_FEEDBACK_RECORDED',
'WRN_WEBCOMMENT_SUBSCRIBED',
'WRN_WEBCOMMENT_UNSUBSCRIBED']
from invenio.errorlib import get_msgs_for_code_list
span_class = 'important'
out = ""
if type(warnings) is not list:
warnings = [warnings]
if len(warnings) > 0:
warnings_parsed = get_msgs_for_code_list(warnings, 'warning', ln)
for (warning_code, warning_text) in warnings_parsed:
if not warning_code.startswith('WRN'):
#display only warnings that begin with WRN to user
continue
if warning_code in red_text_warnings:
span_class = 'important'
elif warning_code in green_text_warnings:
span_class = 'exampleleader'
else:
span_class = 'important'
out += '''
<span class="%(span_class)s">%(warning)s</span><br />''' % \
{ 'span_class' : span_class,
'warning' : warning_text }
return out
else:
return ""
def tmpl_add_comment_form(self, recID, uid, nickname, ln, msg,
warnings, textual_msg=None, can_attach_files=False,
user_is_subscribed_to_discussion=False, reply_to=None):
"""
Add form for comments
@param recID: record id
@param uid: user id
@param ln: language
@param msg: comment body contents for when refreshing due to
warning, or when replying to a comment
@param textual_msg: same as 'msg', but contains the textual
version in case user cannot display FCKeditor
@param warnings: list of warning tuples (warning_msg, color)
@param can_attach_files: if user can upload attach file to record or not
@param user_is_subscribed_to_discussion: True if user already receives new comments by email
@param reply_to: the ID of the comment we are replying to. None if not replying
@return html add comment form
"""
_ = gettext_set_language(ln)
link_dic = { 'siteurl' : CFG_SITE_URL,
'module' : 'comments',
'function' : 'add',
'arguments' : 'ln=%s&action=%s' % (ln, 'SUBMIT'),
'recID' : recID}
if textual_msg is None:
textual_msg = msg
# FIXME a cleaner handling of nicknames is needed.
if not nickname:
(uid, nickname, display) = get_user_info(uid)
if nickname:
note = _("Note: Your nickname, %s, will be displayed as author of this comment.") % ('<i>' + nickname + '</i>')
else:
(uid, nickname, display) = get_user_info(uid)
link = '<a href="%s/youraccount/edit">' % CFG_SITE_SECURE_URL
note = _("Note: you have not %(x_url_open)sdefined your nickname%(x_url_close)s. %(x_nickname)s will be displayed as the author of this comment.") % \
{'x_url_open': link,
'x_url_close': '</a>',
'x_nickname': ' <br /><i>' + display + '</i>'}
if not CFG_WEBCOMMENT_USE_RICH_TEXT_EDITOR:
note += '<br />' + ' '*10 + cgi.escape('You can use some HTML tags: <a href>, <strong>, <blockquote>, <br />, <p>, <em>, <ul>, <li>, <b>, <i>')
#from invenio.search_engine import print_record
#record_details = print_record(recID=recID, format='hb', ln=ln)
warnings = self.tmpl_warnings(warnings, ln)
# Prepare file upload settings. We must enable file upload in
# the fckeditor + a simple file upload interface (independant from editor)
file_upload_url = None
simple_attach_file_interface = ''
if isGuestUser(uid):
simple_attach_file_interface = "<small><em>%s</em></small><br/>" % _("Once logged in, authorized users can also attach files.")
if can_attach_files:
# Note that files can be uploaded only when user is logged in
#file_upload_url = '%s/record/%i/comments/attachments/put' % \
# (CFG_SITE_URL, recID)
simple_attach_file_interface = '''
<div id="uploadcommentattachmentsinterface">
<small>%(attach_msg)s: <em>(%(nb_files_limit_msg)s. %(file_size_limit_msg)s)</em></small><br />
<input class="multi max-%(CFG_WEBCOMMENT_MAX_ATTACHED_FILES)s" type="file" name="commentattachment[]"/><br />
<noscript>
<input type="file" name="commentattachment[]" /><br />
</noscript>
</div>
''' % \
{'CFG_WEBCOMMENT_MAX_ATTACHED_FILES': CFG_WEBCOMMENT_MAX_ATTACHED_FILES,
'attach_msg': CFG_WEBCOMMENT_MAX_ATTACHED_FILES == 1 and _("Optionally, attach a file to this comment") or \
_("Optionally, attach files to this comment"),
'nb_files_limit_msg': _("Max one file") and CFG_WEBCOMMENT_MAX_ATTACHED_FILES == 1 or \
_("Max %i files") % CFG_WEBCOMMENT_MAX_ATTACHED_FILES,
'file_size_limit_msg': CFG_WEBCOMMENT_MAX_ATTACHMENT_SIZE > 0 and _("Max %(x_nb_bytes)s per file") % {'x_nb_bytes': (CFG_WEBCOMMENT_MAX_ATTACHMENT_SIZE < 1024*1024 and (str(CFG_WEBCOMMENT_MAX_ATTACHMENT_SIZE/1024) + 'KB') or (str(CFG_WEBCOMMENT_MAX_ATTACHMENT_SIZE/(1024*1024)) + 'MB'))} or ''}
editor = get_html_text_editor(name='msg',
content=msg,
textual_content=textual_msg,
width='100%',
height='400px',
enabled=CFG_WEBCOMMENT_USE_RICH_TEXT_EDITOR,
file_upload_url=file_upload_url,
toolbar_set = "WebComment")
subscribe_to_discussion = ''
if not user_is_subscribed_to_discussion:
# Offer to subscribe to discussion
subscribe_to_discussion = '<small><input type="checkbox" name="subscribe" id="subscribe"/><label for="subscribe">%s</label></small>' % _("Send me an email when a new comment is posted")
form = """<div id="comment-write"><h2>%(add_comment)s</h2>
%(editor)s
<br />
%(simple_attach_file_interface)s
<span class="reportabuse">%(note)s</span>
<div class="submit-area">
%(subscribe_to_discussion)s<br />
<input class="adminbutton" type="submit" value="Add comment" />
%(reply_to)s
</div>
""" % {'note': note,
'record_label': _("Article") + ":",
'comment_label': _("Comment") + ":",
'add_comment': _('Add comment'),
'editor': editor,
'subscribe_to_discussion': subscribe_to_discussion,
'reply_to': reply_to and '<input type="hidden" name="comid" value="%s"/>' % reply_to or '',
'simple_attach_file_interface': simple_attach_file_interface}
form_link = "%(siteurl)s/record/%(recID)s/comments/%(function)s?%(arguments)s" % link_dic
form = self.create_write_comment_hiddenform(action=form_link, method="post", text=form, button='Add comment',
enctype='multipart/form-data')
form += '</div>'
return warnings + form
def tmpl_add_comment_form_with_ranking(self, recID, uid, nickname, ln, msg, score, note,
warnings, textual_msg=None, show_title_p=False,
can_attach_files=False):
"""
Add form for reviews
@param recID: record id
@param uid: user id
@param ln: language
@param msg: comment body contents for when refreshing due to warning
@param textual_msg: the textual version of 'msg' when user cannot display FCKeditor
@param score: review score
@param note: review title
@param warnings: list of warning tuples (warning_msg, color)
@param show_title_p: if True, prefix the form with "Add Review" as title
@param can_attach_files: if user can upload attach file to record or not
@return: html add review form
"""
_ = gettext_set_language(ln)
link_dic = { 'siteurl' : CFG_SITE_URL,
'module' : 'comments',
'function' : 'add',
'arguments' : 'ln=%s&action=%s' % (ln, 'SUBMIT'),
'recID' : recID}
warnings = self.tmpl_warnings(warnings, ln)
if textual_msg is None:
textual_msg = msg
#from search_engine import print_record
#record_details = print_record(recID=recID, format='hb', ln=ln)
if nickname:
note_label = _("Note: Your nickname, %s, will be displayed as the author of this review.")
note_label %= ('<i>' + nickname + '</i>')
else:
(uid, nickname, display) = get_user_info(uid)
link = '<a href="%s/youraccount/edit">' % CFG_SITE_SECURE_URL
note_label = _("Note: you have not %(x_url_open)sdefined your nickname%(x_url_close)s. %(x_nickname)s will be displayed as the author of this comment.") % \
{'x_url_open': link,
'x_url_close': '</a>',
'x_nickname': ' <br /><i>' + display + '</i>'}
selected0 = ''
selected1 = ''
selected2 = ''
selected3 = ''
selected4 = ''
selected5 = ''
if score == 0:
selected0 = ' selected="selected"'
elif score == 1:
selected1 = ' selected="selected"'
elif score == 2:
selected2 = ' selected="selected"'
elif score == 3:
selected3 = ' selected="selected"'
elif score == 4:
selected4 = ' selected="selected"'
elif score == 5:
selected5 = ' selected="selected"'
## file_upload_url = None
## if can_attach_files:
## file_upload_url = '%s/record/%i/comments/attachments/put' % \
## (CFG_SITE_URL, recID)
editor = get_html_text_editor(name='msg',
content=msg,
textual_content=msg,
width='90%',
height='400px',
enabled=CFG_WEBCOMMENT_USE_RICH_TEXT_EDITOR,
# file_upload_url=file_upload_url,
toolbar_set = "WebComment")
form = """%(add_review)s
<table style="width: 100%%">
<tr>
<td style="padding-bottom: 10px;">%(rate_label)s:
<select name=\"score\" size=\"1\">
<option value=\"0\"%(selected0)s>-%(select_label)s-</option>
<option value=\"5\"%(selected5)s>***** (best)</option>
<option value=\"4\"%(selected4)s>****</option>
<option value=\"3\"%(selected3)s>***</option>
<option value=\"2\"%(selected2)s>**</option>
<option value=\"1\"%(selected1)s>* (worst)</option>
</select>
</td>
</tr>
<tr>
<td>%(title_label)s:</td>
</tr>
<tr>
<td style="padding-bottom: 10px;">
<input type="text" name="note" maxlength="250" style="width:90%%" value="%(note)s" />
</td>
</tr>
<tr>
<td>%(write_label)s:</td>
</tr>
<tr>
<td>
%(editor)s
</td>
</tr>
<tr>
<td class="reportabuse">%(note_label)s</td></tr>
</table>
""" % {'article_label': _('Article'),
'rate_label': _("Rate this article"),
'select_label': _("Select a score"),
'title_label': _("Give a title to your review"),
'write_label': _("Write your review"),
'note_label': note_label,
'note' : note!='' and note or "",
'msg' : msg!='' and msg or "",
#'record' : record_details
'add_review': show_title_p and ('<h2>'+_('Add review')+'</h2>') or '',
'selected0': selected0,
'selected1': selected1,
'selected2': selected2,
'selected3': selected3,
'selected4': selected4,
'selected5': selected5,
'editor': editor,
}
form_link = "%(siteurl)s/record/%(recID)s/reviews/%(function)s?%(arguments)s" % link_dic
form = self.createhiddenform(action=form_link, method="post", text=form, button=_('Add Review'))
return warnings + form
def tmpl_add_comment_successful(self, recID, ln, reviews, warnings, success):
"""
@param recID: record id
@param ln: language
@return: html page of successfully added comment/review
"""
_ = gettext_set_language(ln)
link_dic = { 'siteurl' : CFG_SITE_URL,
'module' : 'comments',
'function' : 'display',
'arguments' : 'ln=%s&do=od' % ln,
'recID' : recID,
'discussion': reviews == 1 and 'reviews' or 'comments'}
link = "%(siteurl)s/record/%(recID)s/%(discussion)s/%(function)s?%(arguments)s" % link_dic
if warnings:
out = self.tmpl_warnings(warnings, ln) + '<br /><br />'
else:
if reviews:
out = _("Your review was successfully added.") + '<br /><br />'
else:
out = _("Your comment was successfully added.") + '<br /><br />'
link += "#%s" % success
out += '<a href="%s">' % link
out += _('Back to record') + '</a>'
return out
def tmpl_create_multiple_actions_form(self,
form_name="",
form_action="",
method="get",
action_display={},
action_field_name="",
button_label="",
button_name="",
content="",
**hidden):
""" Creates an HTML form with a multiple choice of actions and a button to select it.
@param form_action: link to the receiver of the formular
@param form_name: name of the HTML formular
@param method: either 'GET' or 'POST'
@param action_display: dictionary of actions.
action is HTML name (name of action)
display is the string provided in the popup
@param action_field_name: html name of action field
@param button_label: what's written on the button
@param button_name: html name of the button
@param content: what's inside te formular
@param **hidden: dictionary of name/value pairs of hidden fields.
"""
output = """
<form action="%s" method="%s">""" % (form_action, method)
output += """
<table>
<tr>
<td style="vertical-align: top" colspan="2">
"""
output += content + '\n'
for key in hidden.keys():
if type(hidden[key]) is list:
for value in hidden[key]:
output += """
<input type="hidden" name="%s" value="%s" />""" % (key, value)
else:
output += """
<input type="hidden" name="%s" value="%s" />""" % (key, hidden[key])
output += """
</td>
</tr>
<tr>
<td style="text-align:right;">"""
if type(action_display) is dict and len(action_display.keys()):
output += """
<select name="%s">""" % action_field_name
for (key, value) in action_display.items():
output += """
<option value="%s">%s</option>""" % (key, value)
output += """
</select>"""
output += """
</td>
<td style="text-align:left;">
<input class="adminbutton" type="submit" value="%s" name="%s"/>""" % (button_label, button_name)
output += """
</td>
</tr>
</table>
</form>"""
return output
def tmpl_admin_index(self, ln):
"""
Index page
"""
# load the right message language
_ = gettext_set_language(ln)
out = '<ol>'
if CFG_WEBCOMMENT_ALLOW_COMMENTS or CFG_WEBCOMMENT_ALLOW_REVIEWS:
if CFG_WEBCOMMENT_ALLOW_COMMENTS:
out += '<h3>Comments status</h3>'
out += '<li><a href="%(siteurl)s/admin/webcomment/webcommentadmin.py/hot?ln=%(ln)s&comments=1">%(hot_cmt_label)s</a></li>' % \
{'siteurl': CFG_SITE_URL, 'ln': ln, 'hot_cmt_label': _("View most commented records")}
out += '<li><a href="%(siteurl)s/admin/webcomment/webcommentadmin.py/latest?ln=%(ln)s&comments=1">%(latest_cmt_label)s</a></li>' % \
{'siteurl': CFG_SITE_URL, 'ln': ln, 'latest_cmt_label': _("View latest commented records")}
out += '<li><a href="%(siteurl)s/admin/webcomment/webcommentadmin.py/comments?ln=%(ln)s&reviews=0">%(reported_cmt_label)s</a></li>' % \
{'siteurl': CFG_SITE_URL, 'ln': ln, 'reported_cmt_label': _("View all comments reported as abuse")}
if CFG_WEBCOMMENT_ALLOW_REVIEWS:
out += '<h3>Reviews status</h3>'
out += '<li><a href="%(siteurl)s/admin/webcomment/webcommentadmin.py/hot?ln=%(ln)s&comments=0">%(hot_rev_label)s</a></li>' % \
{'siteurl': CFG_SITE_URL, 'ln': ln, 'hot_rev_label': _("View most reviewed records")}
out += '<li><a href="%(siteurl)s/admin/webcomment/webcommentadmin.py/latest?ln=%(ln)s&comments=0">%(latest_rev_label)s</a></li>' % \
{'siteurl': CFG_SITE_URL, 'ln': ln, 'latest_rev_label': _("View latest reviewed records")}
out += '<li><a href="%(siteurl)s/admin/webcomment/webcommentadmin.py/comments?ln=%(ln)s&reviews=1">%(reported_rev_label)s</a></li>' % \
{'siteurl': CFG_SITE_URL, 'ln': ln, 'reported_rev_label': _("View all reviews reported as abuse")}
#<li><a href="%(siteurl)s/admin/webcomment/webcommentadmin.py/delete?ln=%(ln)s&comid=-1">%(delete_label)s</a></li>
out +="""
<h3>General</h3>
<li><a href="%(siteurl)s/admin/webcomment/webcommentadmin.py/users?ln=%(ln)s">%(view_users)s</a></li>
<li><a href="%(siteurl)s/help/admin/webcomment-admin-guide">%(guide)s</a></li>
""" % {'siteurl' : CFG_SITE_URL,
#'delete_label': _("Delete/Undelete comment(s) or suppress abuse report(s)"),
'view_users': _("View all users who have been reported"),
'ln' : ln,
'guide' : _("Guide")}
else:
out += _("Comments and reviews are disabled") + '<br />'
out += '</ol>'
from invenio.bibrankadminlib import addadminbox
return addadminbox('<b>%s</b>'% _("Menu"), [out])
def tmpl_admin_delete_form(self, ln, warnings):
"""
Display admin interface to fetch list of records to delete
@param warnings: list of warning_tuples where warning_tuple is (warning_message, text_color)
see tmpl_warnings, color is optional
"""
# load the right message language
_ = gettext_set_language(ln)
warnings = self.tmpl_warnings(warnings, ln)
out = '''
<br />
%s<br />
<br />'''% _("Please enter the ID of the comment/review so that you can view it before deciding whether to delete it or not")
form = '''
<table>
<tr>
<td>%s</td>
<td><input type=text name="comid" size="10" maxlength="10" value="" /></td>
</tr>
<tr>
<td><br /></td>
<tr>
</table>
<br />
%s <br/>
<br />
<table>
<tr>
<td>%s</td>
<td><input type=text name="recid" size="10" maxlength="10" value="" /></td>
</tr>
<tr>
<td><br /></td>
<tr>
</table>
<br />
''' % (_("Comment ID:"),
_("Or enter a record ID to list all the associated comments/reviews:"),
_("Record ID:"))
form_link = "%s/admin/webcomment/webcommentadmin.py/delete?ln=%s" % (CFG_SITE_URL, ln)
form = self.createhiddenform(action=form_link, method="get", text=form, button=_('View Comment'))
return warnings + out + form
def tmpl_admin_users(self, ln, users_data):
"""
@param users_data: tuple of ct, i.e. (ct, ct, ...)
where ct is a tuple (total_number_reported, total_comments_reported, total_reviews_reported, total_nb_votes_yes_of_reported,
total_nb_votes_total_of_reported, user_id, user_email, user_nickname)
sorted by order of ct having highest total_number_reported
"""
_ = gettext_set_language(ln)
u_reports = 0
u_comment_reports = 1
u_reviews_reports = 2
u_nb_votes_yes = 3
u_nb_votes_total = 4
u_uid = 5
u_email = 6
u_nickname = 7
if not users_data:
return self.tmpl_warnings([(_("There have been no reports so far."), 'green')])
user_rows = ""
for utuple in users_data:
com_label = _("View all %s reported comments") % utuple[u_comment_reports]
com_link = '''<a href="%s/admin/webcomment/webcommentadmin.py/comments?ln=%s&uid=%s&reviews=0">%s</a><br />''' % \
(CFG_SITE_URL, ln, utuple[u_uid], com_label)
rev_label = _("View all %s reported reviews") % utuple[u_reviews_reports]
rev_link = '''<a href="%s/admin/webcomment/webcommentadmin.py/comments?ln=%s&uid=%s&reviews=1">%s</a>''' % \
(CFG_SITE_URL, ln, utuple[u_uid], rev_label)
if not utuple[u_nickname]:
user_info = get_user_info(utuple[u_uid])
nickname = user_info[2]
else:
nickname = utuple[u_nickname]
if CFG_WEBCOMMENT_ALLOW_REVIEWS:
review_row = """
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>"""
review_row %= (utuple[u_nb_votes_yes],
utuple[u_nb_votes_total] - utuple[u_nb_votes_yes],
utuple[u_nb_votes_total])
else:
review_row = ''
user_rows += """
<tr>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%(nickname)s</td>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%(email)s</td>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%(uid)s</td>%(review_row)s
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray; font-weight: bold;">%(reports)s</td>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%(com_link)s%(rev_link)s</td>
</tr>""" % { 'nickname' : nickname,
'email' : utuple[u_email],
'uid' : utuple[u_uid],
'reports' : utuple[u_reports],
'review_row': review_row,
'siteurl' : CFG_SITE_URL,
'ln' : ln,
'com_link' : CFG_WEBCOMMENT_ALLOW_COMMENTS and com_link or "",
'rev_link' : CFG_WEBCOMMENT_ALLOW_REVIEWS and rev_link or ""
}
out = "<br />"
out += _("Here is a list, sorted by total number of reports, of all users who have had a comment reported at least once.")
out += """
<br />
<br />
<table class="admin_wvar" style="width: 100%%;">
<thead>
<tr class="adminheaderleft">
<th>"""
out += _("Nickname") + '</th>\n'
out += '<th>' + _("Email") + '</th>\n'
out += '<th>' + _("User ID") + '</th>\n'
if CFG_WEBCOMMENT_ALLOW_REVIEWS > 0:
out += '<th>' + _("Number positive votes") + '</th>\n'
out += '<th>' + _("Number negative votes") + '</th>\n'
out += '<th>' + _("Total number votes") + '</th>\n'
out += '<th>' + _("Total number of reports") + '</th>\n'
out += '<th>' + _("View all user's reported comments/reviews") + '</th>\n'
out += """
</tr>
</thead>
<tbody>%s
</tbody>
</table>
""" % user_rows
return out
def tmpl_admin_select_comment_checkbox(self, cmt_id):
""" outputs a checkbox named "comidXX" where XX is cmt_id """
return '<input type="checkbox" name="comid%i" />' % int(cmt_id)
def tmpl_admin_user_info(self, ln, nickname, uid, email):
""" prepares informations about a user"""
_ = gettext_set_language(ln)
out = """
%(nickname_label)s: %(messaging)s<br />
%(uid_label)s: %(uid)i<br />
%(email_label)s: <a href="mailto:%(email)s">%(email)s</a>"""
out %= {'nickname_label': _("Nickname"),
'messaging': self.create_messaging_link(uid, nickname, ln),
'uid_label': _("User ID"),
'uid': int(uid),
'email_label': _("Email"),
'email': email}
return out
def tmpl_admin_review_info(self, ln, reviews, nb_reports, cmt_id, rec_id, status):
""" outputs information about a review """
_ = gettext_set_language(ln)
if reviews:
reported_label = _("This review has been reported %i times")
else:
reported_label = _("This comment has been reported %i times")
reported_label %= int(nb_reports)
out = """
%(reported_label)s<br />
<a href="%(siteurl)s/record/%(rec_id)i?ln=%(ln)s">%(rec_id_label)s</a><br />
%(cmt_id_label)s"""
out %= {'reported_label': reported_label,
'rec_id_label': _("Record") + ' #' + str(rec_id),
'siteurl': CFG_SITE_URL,
'rec_id': int(rec_id),
'cmt_id_label': _("Comment") + ' #' + str(cmt_id),
'ln': ln}
if status in ['dm', 'da']:
out += '<br /><div style="color:red;">Marked as deleted</div>'
return out
def tmpl_admin_latest(self, ln, comment_data, comments, error, user_collections, collection):
"""
@param comment_data: same type of tuple as that
which is return by webcommentadminlib.py/query_get_latest i.e.
tuple (nickname, uid, date_creation, body, id) if latest comments or
tuple (nickname, uid, date_creation, body, star_score, id) if latest reviews
"""
_ = gettext_set_language(ln)
out = """
<script type='text/javascript'>
function collectionChange()
{
document.collection_form.submit();
}
</script>
"""
out += '<form method="get" name="collection_form" action="%s/admin/webcomment/webcommentadmin.py/latest?ln=%s&comments=%s">' % (CFG_SITE_URL, ln, comments)
out += '<input type="hidden" name="ln" value=%s>' % ln
out += '<input type="hidden" name="comments" value=%s>' % comments
out += '<div> Filter by collection: <select name="collection" onchange="javascript:collectionChange();">'
for collection_name in user_collections:
if collection_name == collection:
out += '<option "SELECTED" value="%(collection_name)s">%(collection_name)s</option>' % {'collection_name': cgi.escape(collection_name)}
else:
out += '<option value="%(collection_name)s">%(collection_name)s</option>' % {'collection_name': cgi.escape(collection_name)}
out += '</select></div></form><br />'
if error == 1:
out += "<i>User is not authorized to view such collection.</i><br />"
return out
elif error == 2:
out += "<i>There are no %s for this collection.</i><br />" % (comments and 'comments' or 'reviews')
return out
out += """
<ol>
"""
for (cmt_tuple, meta_data) in comment_data:
bibrec_id = meta_data[3]
content = format_record(bibrec_id, "hs")
if not comments:
out += """
<li> %(content)s <br/> <span class="moreinfo"> <a class="moreinfo" href=%(comment_url)s> reviewed by %(user)s</a>
(%(stars)s) \"%(body)s\" on <i> %(date)s </i></li> </span> <br/>
""" % {'content': content,
'comment_url': CFG_SITE_URL + '/record/' + str(bibrec_id) + '/reviews',
'user':cmt_tuple[0] ,
'stars': '*' * int(cmt_tuple[4]) ,
'body': cmt_tuple[3][:20] + '...',
'date': cmt_tuple[2]}
else:
out += """
<li> %(content)s <br/> <span class="moreinfo"> <a class="moreinfo" href=%(comment_url)s> commented by %(user)s</a>,
\"%(body)s\" on <i> %(date)s </i></li> </span> <br/>
""" % {'content': content,
'comment_url': CFG_SITE_URL + '/record/' + str(bibrec_id) + '/comments',
'user':cmt_tuple[0] ,
'body': cmt_tuple[3][:20] + '...',
'date': cmt_tuple[2]}
out += """</ol>"""
return out
def tmpl_admin_hot(self, ln, comment_data, comments, error, user_collections, collection):
"""
@param comment_data: same type of tuple as that
which is return by webcommentadminlib.py/query_get_hot i.e.
tuple (id_bibrec, date_last_comment, users, count)
"""
_ = gettext_set_language(ln)
out = """
<script type='text/javascript'>
function collectionChange()
{
document.collection_form.submit();
}
</script>
"""
out += '<form method="get" name="collection_form" action="%s/admin/webcomment/webcommentadmin.py/hot?ln=%s&comments=%s">' % (CFG_SITE_URL, ln, comments)
out += '<input type="hidden" name="ln" value=%s>' % ln
out += '<input type="hidden" name="comments" value=%s>' % comments
out += '<div> Filter by collection: <select name="collection" onchange="javascript:collectionChange();">'
for collection_name in user_collections:
if collection_name == collection:
out += '<option "SELECTED" value="%(collection_name)s">%(collection_name)s</option>' % {'collection_name': cgi.escape(collection_name)}
else:
out += '<option value="%(collection_name)s">%(collection_name)s</option>' % {'collection_name': cgi.escape(collection_name)}
out += '</select></div></form><br />'
if error == 1:
out += "<i>User is not authorized to view such collection.</i><br />"
return out
elif error == 2:
out += "<i>There are no %s for this collection.</i><br />" % (comments and 'comments' or 'reviews')
return out
for cmt_tuple in comment_data:
bibrec_id = cmt_tuple[0]
content = format_record(bibrec_id, "hs")
last_comment_date = cmt_tuple[1]
total_users = cmt_tuple[2]
total_comments = cmt_tuple[3]
if comments:
comment_url = CFG_SITE_URL + '/record/' + str(bibrec_id) + '/comments'
str_comment = int(total_comments) > 1 and 'comments' or 'comment'
else:
comment_url = CFG_SITE_URL + '/record/' + str(bibrec_id) + '/reviews'
str_comment = int(total_comments) > 1 and 'reviews' or 'review'
out += """
<li> %(content)s <br/> <span class="moreinfo"> <a class="moreinfo" href=%(comment_url)s> %(total_comments)s
%(str_comment)s</a>
(%(total_users)s %(user)s), latest on <i> %(last_comment_date)s </i></li> </span> <br/>
""" % {'content': content,
'comment_url': comment_url ,
'total_comments': total_comments,
'str_comment': str_comment,
'total_users': total_users,
'user': int(total_users) > 1 and 'users' or 'user',
'last_comment_date': last_comment_date}
out += """</ol>"""
return out
def tmpl_admin_comments(self, ln, uid, comID, recID, comment_data, reviews, error, user_collections, collection):
"""
@param comment_data: same type of tuple as that
which is returned by webcomment.py/query_retrieve_comments_or_remarks i.e.
tuple of comment where comment is
tuple (nickname,
date_creation,
body,
id) if ranking disabled or
tuple (nickname,
date_creation,
body,
nb_votes_yes,
nb_votes_total,
star_score,
title,
id)
"""
_ = gettext_set_language(ln)
coll_form = """
<script type='text/javascript'>
function collectionChange()
{
document.collection_form.submit();
}
</script>
"""
coll_form += '<form method="get" name="collection_form" action="%s/admin/webcomment/webcommentadmin.py/comments?ln=%s&reviews=%s">' % (CFG_SITE_URL, ln, reviews)
coll_form += '<input type="hidden" name="ln" value=%s>' % ln
coll_form += '<input type="hidden" name="reviews" value=%s>' % reviews
coll_form += '<div> Filter by collection: <select name="collection" onchange="javascript:collectionChange();">'
for collection_name in user_collections:
if collection_name == collection:
coll_form += '<option "SELECTED" value="%(collection_name)s">%(collection_name)s</option>' % {'collection_name': cgi.escape(collection_name)}
else:
coll_form += '<option value="%(collection_name)s">%(collection_name)s</option>' % {'collection_name': cgi.escape(collection_name)}
coll_form += '</select></div></form><br />'
if error == 1:
coll_form += "<i>User is not authorized to view such collection.</i><br />"
return coll_form
elif error == 2:
coll_form += "<i>There are no %s for this collection.</i><br />" % (reviews and 'reviews' or 'comments')
return coll_form
comments = []
comments_info = []
checkboxes = []
users = []
for (cmt_tuple, meta_data) in comment_data:
if reviews:
comments.append(self.tmpl_get_comment_with_ranking(None,#request object
ln,
cmt_tuple[0],#nickname
cmt_tuple[1],#userid
cmt_tuple[2],#date_creation
cmt_tuple[3],#body
cmt_tuple[9],#status
0,
cmt_tuple[5],#nb_votes_total
cmt_tuple[4],#nb_votes_yes
cmt_tuple[6],#star_score
cmt_tuple[7]))#title
else:
comments.append(self.tmpl_get_comment_without_ranking(None,#request object
ln,
cmt_tuple[0],#nickname
cmt_tuple[1],#userid
cmt_tuple[2],#date_creation
cmt_tuple[3],#body
cmt_tuple[5],#status
0,
None, #reply_link
None, #report_link
None, #undelete_link
None)) #delete_links
users.append(self.tmpl_admin_user_info(ln,
meta_data[0], #nickname
meta_data[1], #uid
meta_data[2]))#email
if reviews:
status = cmt_tuple[9]
else:
status = cmt_tuple[5]
comments_info.append(self.tmpl_admin_review_info(ln,
reviews,
meta_data[5], # nb abuse reports
meta_data[3], # cmt_id
meta_data[4], # rec_id
status)) # status
checkboxes.append(self.tmpl_admin_select_comment_checkbox(meta_data[3]))
form_link = "%s/admin/webcomment/webcommentadmin.py/del_com?ln=%s" % (CFG_SITE_URL, ln)
out = """
<table class="admin_wvar" style="width:100%%;">
<thead>
<tr class="adminheaderleft">
<th>%(review_label)s</th>
<th>%(written_by_label)s</th>
<th>%(review_info_label)s</th>
<th>%(select_label)s</th>
</tr>
</thead>
<tbody>""" % {'review_label': reviews and _("Review") or _("Comment"),
'written_by_label': _("Written by"),
'review_info_label': _("General informations"),
'select_label': _("Select")}
for i in range (0, len(comments)):
out += """
<tr>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>
<td class="admintd" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>
</tr>""" % (comments[i], users[i], comments_info[i], checkboxes[i])
out += """
</tbody>
</table>"""
if reviews:
action_display = {
'delete': _('Delete selected reviews'),
'unreport': _('Suppress selected abuse report'),
'undelete': _('Undelete selected reviews')
}
else:
action_display = {
'undelete': _('Undelete selected comments'),
'delete': _('Delete selected comments'),
'unreport': _('Suppress selected abuse report')
}
form = self.tmpl_create_multiple_actions_form(form_name="admin_comment",
form_action=form_link,
method="post",
action_display=action_display,
action_field_name='action',
button_label=_("OK"),
button_name="okbutton",
content=out)
if uid > 0:
header = '<br />'
if reviews:
header += _("Here are the reported reviews of user %s") % uid
else:
header += _("Here are the reported comments of user %s") % uid
header += '<br /><br />'
if comID > 0 and recID <= 0 and uid <= 0:
if reviews:
header = '<br />' +_("Here is review %s")% comID + '<br /><br />'
else:
header = '<br />' +_("Here is comment %s")% comID + '<br /><br />'
if uid > 0 and comID > 0 and recID <= 0:
if reviews:
header = '<br />' + _("Here is review %(x_cmtID)s written by user %(x_user)s") % {'x_cmtID': comID, 'x_user': uid}
else:
header = '<br />' + _("Here is comment %(x_cmtID)s written by user %(x_user)s") % {'x_cmtID': comID, 'x_user': uid}
header += '<br/ ><br />'
if comID <= 0 and recID <= 0 and uid <= 0:
header = '<br />'
if reviews:
header += _("Here are all reported reviews sorted by the most reported")
else:
header += _("Here are all reported comments sorted by the most reported")
header += "<br /><br />"
elif recID > 0:
header = '<br />'
if reviews:
header += _("Here are all reviews for record %i, sorted by the most reported" % recID)
header += '<br /><a href="%s/admin/webcomment/webcommentadmin.py/delete?comid=&recid=%s&reviews=0">%s</a>' % (CFG_SITE_URL, recID, _("Show comments"))
else:
header += _("Here are all comments for record %i, sorted by the most reported" % recID)
header += '<br /><a href="%s/admin/webcomment/webcommentadmin.py/delete?comid=&recid=%s&reviews=1">%s</a>' % (CFG_SITE_URL, recID, _("Show reviews"))
header += "<br /><br />"
return coll_form + header + form
def tmpl_admin_del_com(self, del_res, ln=CFG_SITE_LANG):
"""
@param del_res: list of the following tuple (comment_id, was_successfully_deleted),
was_successfully_deleted is boolean (0=false, >0=true
"""
_ = gettext_set_language(ln)
table_rows = ''
for deltuple in del_res:
table_rows += """
<tr>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>
</tr>""" % (deltuple[0], deltuple[1]>0 and _("Yes") or "<span class=\"important\">" +_("No") + "</span>")
out = """
<table class="admin_wvar">
<tr class="adminheaderleft">
<td style="padding-right:10px;">%s</td>
<td>%s</td>
</tr>%s
<table>""" % (_("comment ID"), _("successfully deleted"), table_rows)
return out
def tmpl_admin_undel_com(self, del_res, ln=CFG_SITE_LANG):
"""
@param del_res: list of the following tuple (comment_id, was_successfully_undeleted),
was_successfully_undeleted is boolean (0=false, >0=true
"""
_ = gettext_set_language(ln)
table_rows = ''
for deltuple in del_res:
table_rows += """
<tr>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>
</tr>""" % (deltuple[0], deltuple[1]>0 and _("Yes") or "<span class=\"important\">" +_("No") + "</span>")
out = """
<table class="admin_wvar">
<tr class="adminheaderleft">
<td style="padding-right:10px;">%s</td>
<td>%s</td>
</tr>%s
<table>""" % (_("comment ID"), _("successfully undeleted"), table_rows)
return out
def tmpl_admin_suppress_abuse_report(self, del_res, ln=CFG_SITE_LANG):
"""
@param del_res: list of the following tuple (comment_id, was_successfully_deleted),
was_successfully_deleted is boolean (0=false, >0=true
"""
_ = gettext_set_language(ln)
table_rows = ''
for deltuple in del_res:
table_rows += """
<tr>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>
<td class="admintdleft" style="padding: 5px; border-bottom: 1px solid lightgray;">%s</td>
</tr>""" % (deltuple[0], deltuple[1]>0 and _("Yes") or "<span class=\"important\">" +_("No") + "</span>")
out = """
<table class="admin_wvar">
<tr class="adminheaderleft">
<td style ="padding-right: 10px;">%s</td>
<td>%s</td>
</tr>%s
<table>""" % (_("comment ID"), _("successfully suppressed abuse report"), table_rows)
return out
def tmpl_mini_review(self, recID, ln=CFG_SITE_LANG, action='SUBMIT',
avg_score=0, nb_comments_total=0):
"""Display the mini version of reviews (only the grading part)"""
_ = gettext_set_language(ln)
url = '%s/record/%s/reviews/add?ln=%s&action=%s' % (CFG_SITE_URL, recID, ln, action)
if avg_score > 0:
score = _("Average review score: %(x_nb_score)s based on %(x_nb_reviews)s reviews") % \
{'x_nb_score': '<b>%.1f</b>' % avg_score,
'x_nb_reviews': nb_comments_total}
else:
score = '(' +_("Not yet reviewed") + ')'
if avg_score == 5:
s1, s2, s3, s4, s5 = 'full', 'full', 'full', 'full', 'full'
elif avg_score >= 4.5:
s1, s2, s3, s4, s5 = 'full', 'full', 'full', 'full', 'half'
elif avg_score >= 4:
s1, s2, s3, s4, s5 = 'full', 'full', 'full', 'full', ''
elif avg_score >= 3.5:
s1, s2, s3, s4, s5 = 'full', 'full', 'full', 'half', ''
elif avg_score >= 3:
s1, s2, s3, s4, s5 = 'full', 'full', 'full', '', ''
elif avg_score >= 2.5:
s1, s2, s3, s4, s5 = 'full', 'full', 'half', '', ''
elif avg_score >= 2:
s1, s2, s3, s4, s5 = 'full', 'full', '', '', ''
elif avg_score >= 1.5:
s1, s2, s3, s4, s5 = 'full', 'half', '', '', ''
elif avg_score == 1:
s1, s2, s3, s4, s5 = 'full', '', '', '', ''
else:
s1, s2, s3, s4, s5 = '', '', '', '', ''
out = '''
<small class="detailedRecordActions">%(rate)s:</small><br /><br />
<div style="margin:auto;width:160px;">
<span style="display:none;">Rate this document:</span>
<div class="star %(s1)s" ><a href="%(url)s&score=1">1</a>
<div class="star %(s2)s" ><a href="%(url)s&score=2">2</a>
<div class="star %(s3)s" ><a href="%(url)s&score=3">3</a>
<div class="star %(s4)s" ><a href="%(url)s&score=4">4</a>
<div class="star %(s5)s" ><a href="%(url)s&score=5">5</a></div></div></div></div></div>
<div style="clear:both"> </div>
</div>
<small>%(score)s</small>
''' % {'url': url,
'score': score,
'rate': _("Rate this document"),
's1': s1,
's2': s2,
's3': s3,
's4': s4,
's5': s5
}
return out
def tmpl_email_new_comment_header(self, recID, title, reviews,
comID, report_numbers,
can_unsubscribe=True,
ln=CFG_SITE_LANG, uid=-1):
"""
Prints the email header used to notify subscribers that a new
comment/review was added.
@param recid: the ID of the commented/reviewed record
@param title: the title of the commented/reviewed record
@param reviews: True if it is a review, else if a comment
@param comID: the comment ID
@param report_numbers: the report number(s) of the record
@param can_unsubscribe: True if user can unsubscribe from alert
@param ln: language
"""
# load the right message language
_ = gettext_set_language(ln)
user_info = collect_user_info(uid)
out = _("Hello:") + '\n\n' + \
(reviews and _("The following review was sent to %(CFG_SITE_NAME)s by %(user_nickname)s:") or \
_("The following comment was sent to %(CFG_SITE_NAME)s by %(user_nickname)s:")) % \
{'CFG_SITE_NAME': CFG_SITE_NAME,
'user_nickname': user_info['nickname']}
out += '\n(<%s>)' % (CFG_SITE_URL + '/record/' + str(recID))
out += '\n\n\n'
return out
def tmpl_email_new_comment_footer(self, recID, title, reviews,
comID, report_numbers,
can_unsubscribe=True,
ln=CFG_SITE_LANG):
"""
Prints the email footer used to notify subscribers that a new
comment/review was added.
@param recid: the ID of the commented/reviewed record
@param title: the title of the commented/reviewed record
@param reviews: True if it is a review, else if a comment
@param comID: the comment ID
@param report_numbers: the report number(s) of the record
@param can_unsubscribe: True if user can unsubscribe from alert
@param ln: language
"""
# load the right message language
_ = gettext_set_language(ln)
out = '\n\n-- \n'
out += _("This is an automatic message, please don't reply to it.")
out += '\n'
out += _("To post another comment, go to <%(x_url)s> instead.") % \
{'x_url': CFG_SITE_URL + '/record/' + str(recID) + \
(reviews and '/reviews' or '/comments') + '/add'}
out += '\n'
if not reviews:
out += _("To specifically reply to this comment, go to <%(x_url)s>") % \
{'x_url': CFG_SITE_URL + '/record/' + str(recID) + \
'/comments/add?action=REPLY&comid=' + str(comID)}
out += '\n'
if can_unsubscribe:
out += _("To unsubscribe from this discussion, go to <%(x_url)s>") % \
{'x_url': CFG_SITE_URL + '/record/' + str(recID) + \
'/comments/unsubscribe'}
out += '\n'
out += _("For any question, please use <%(CFG_SITE_SUPPORT_EMAIL)s>") % \
{'CFG_SITE_SUPPORT_EMAIL': CFG_SITE_SUPPORT_EMAIL}
return out
def tmpl_email_new_comment_admin(self, recID):
"""
Prints the record information used in the email to notify the
system administrator that a new comment has been posted.
@param recID: the ID of the commented/reviewed record
"""
out = ""
title = get_fieldvalues(recID, "245__a")
authors = ', '.join(get_fieldvalues(recID, "100__a") + get_fieldvalues(recID, "700__a"))
#res_author = ""
#res_rep_num = ""
#for author in authors:
# res_author = res_author + ' ' + author
dates = get_fieldvalues(recID, "260__c")
report_nums = get_fieldvalues(recID, "037__a")
report_nums += get_fieldvalues(recID, "088__a")
report_nums = ', '.join(report_nums)
#for rep_num in report_nums:
# res_rep_num = res_rep_num + ', ' + rep_num
out += " Title = %s \n" % (title and title[0] or "No Title")
out += " Authors = %s \n" % authors
if dates:
out += " Date = %s \n" % dates[0]
out += " Report number = %s" % report_nums
return out
|
kaplun/Invenio-OpenAIRE
|
modules/webcomment/lib/webcomment_templates.py
|
Python
|
gpl-2.0
| 109,494 | 0.006694 |
from sopel import module
from sopel.tools import Identifier
import time
import re
TIMEOUT = 36000
@module.rule('^(</?3)\s+([a-zA-Z0-9\[\]\\`_\^\{\|\}-]{1,32})\s*$')
@module.intent('ACTION')
@module.require_chanmsg("You may only modify someone's rep in a channel.")
def heart_cmd(bot, trigger):
luv_h8(bot, trigger, trigger.group(2), 'h8' if '/' in trigger.group(1) else 'luv')
@module.rule('.*?(?:([a-zA-Z0-9\[\]\\`_\^\{\|\}-]{1,32})(\+{2}|-{2})).*?')
@module.require_chanmsg("You may only modify someone's rep in a channel.")
def karma_cmd(bot, trigger):
if re.match('^({prefix})({cmds})'.format(prefix=bot.config.core.prefix, cmds='|'.join(luv_h8_cmd.commands)),
trigger.group(0)):
return # avoid processing commands if people try to be tricky
for (nick, act) in re.findall('(?:([a-zA-Z0-9\[\]\\`_\^\{\|\}-]{1,32})(\+{2}|-{2}))', trigger.raw):
if luv_h8(bot, trigger, nick, 'luv' if act == '++' else 'h8', warn_nonexistent=False):
break
@module.commands('luv', 'h8')
@module.example(".luv Phixion")
@module.example(".h8 Thaya")
@module.require_chanmsg("You may only modify someone's rep in a channel.")
def luv_h8_cmd(bot, trigger):
if not trigger.group(3):
bot.reply("No user specified.")
return
target = Identifier(trigger.group(3))
luv_h8(bot, trigger, target, trigger.group(1))
def luv_h8(bot, trigger, target, which, warn_nonexistent=True):
target = verified_nick(bot, target, trigger.sender)
which = which.lower() # issue #18
pfx = change = selfreply = None # keep PyCharm & other linters happy
if not target:
if warn_nonexistent:
bot.reply("You can only %s someone who is here." % which)
return False
if rep_too_soon(bot, trigger.nick):
return False
if which == 'luv':
selfreply = "No narcissism allowed!"
pfx, change = 'in', 1
if which == 'h8':
selfreply = "Go to 4chan if you really hate yourself!"
pfx, change = 'de', -1
if not (pfx and change and selfreply): # safeguard against leaving something in the above mass-None assignment
bot.say("Logic error! Please report this to %s." % bot.config.core.owner)
return
if is_self(bot, trigger.nick, target):
bot.reply(selfreply)
return False
rep = mod_rep(bot, trigger.nick, target, change)
bot.say("%s has %screased %s's reputation score to %d" % (trigger.nick, pfx, target, rep))
return True
@module.commands('rep')
@module.example(".rep Phixion")
def show_rep(bot, trigger):
target = trigger.group(3) or trigger.nick
rep = get_rep(bot, target)
if rep is None:
bot.say("%s has no reputation score yet." % target)
return
bot.say("%s's current reputation score is %d." % (target, rep))
# helpers
def get_rep(bot, target):
return bot.db.get_nick_value(Identifier(target), 'rep_score')
def set_rep(bot, caller, target, newrep):
bot.db.set_nick_value(Identifier(target), 'rep_score', newrep)
bot.db.set_nick_value(Identifier(caller), 'rep_used', time.time())
def mod_rep(bot, caller, target, change):
rep = get_rep(bot, target) or 0
rep += change
set_rep(bot, caller, target, rep)
return rep
def get_rep_used(bot, nick):
return bot.db.get_nick_value(Identifier(nick), 'rep_used') or 0
def set_rep_used(bot, nick):
bot.db.set_nick_value(Identifier(nick), 'rep_used', time.time())
def rep_used_since(bot, nick):
now = time.time()
last = get_rep_used(bot, nick)
return abs(last - now)
def rep_too_soon(bot, nick):
since = rep_used_since(bot, nick)
if since < TIMEOUT:
bot.notice("You must wait %d more seconds before changing someone's rep again." % (TIMEOUT - since), nick)
return True
else:
return False
def is_self(bot, nick, target):
nick = Identifier(nick)
target = Identifier(target)
if nick == target:
return True # shortcut to catch common goofballs
try:
nick_id = bot.db.get_nick_id(nick, False)
target_id = bot.db.get_nick_id(target, False)
except ValueError:
return False # if either nick doesn't have an ID, they can't be in a group
return nick_id == target_id
def verified_nick(bot, nick, channel):
nick = re.search('([a-zA-Z0-9\[\]\\`_\^\{\|\}-]{1,32})', nick).group(1)
if not nick:
return None
nick = Identifier(nick)
if nick.lower() not in bot.privileges[channel.lower()]:
if nick.endswith('--'):
if Identifier(nick[:-2]).lower() in bot.privileges[channel.lower()]:
return Identifier(nick[:-2])
return None
return nick
|
ridelore/sopel-modules
|
rep.py
|
Python
|
apache-2.0
| 4,834 | 0.008068 |
# Copyright (C) 2010 Agorabox. All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import utils
import os
from datetime import datetime
import new
from gettext import dgettext
from ufo.debugger import Debugger
from ufo.constants import ShareDoc
from ufo.database import *
from ufo.utils import get_user_infos
from ufo.user import user
class TranslatableText:
def __init__(self, text):
self.text = text
def __repr__(self):
return dgettext("python-ufo", self.text)
def _(message):
return TranslatableText(message)
class action:
def __init__(self, description):
self.description = description
def __call__(self, func):
func.action = True
func.description = self.description
return func
class NotificationDocument(Document, Debugger):
doctype = TextField(default="NotificationDocument")
subtype = TextField(default="")
date = DateTimeField(default=datetime.now)
initiator = TextField()
target = TextField()
by_id = ViewField('notification',
language = 'javascript',
map_fun = "function (doc) {" \
"if (doc.doctype === 'NotificationDocument') {" \
"emit(doc._id, doc);" \
"}" \
"}")
by_subtype_and_initiator = ViewField('notification',
language = 'javascript',
map_fun = "function (doc) {" \
"if (doc.doctype === 'NotificationDocument' && doc.subtype && doc.initiator) {" \
"emit([doc.subtype, doc.initiator], doc);" \
"}" \
"}")
def __init__(self, *args, **fields):
super(NotificationDocument, self).__init__(*args, **fields)
if fields.get('initiator') and fields.get('target'):
self.initiator = fields['initiator']
self.target = fields['target']
@action(_("Dismiss"))
def dismiss(self):
user.dismiss(self)
def __getitem__(self, key):
try:
value = getattr(self, "pretty_" + key)
except:
try:
value = getattr(self, key)
except:
value = super(Document, self).__getitem__(key)
if isinstance(value, TranslatableText):
return repr(value)
else:
return value
@property
def fullname(self):
return get_user_infos(login=self.initiator)['fullname']
@property
def actions(self):
actions = {}
for k, v in self.__class__.__dict__.items():
if type(v) == new.function and getattr(v, "action", False):
actions[k] = repr(v.description)
return actions
@property
def default_action(self):
for action in self.actions.values():
if getattr(getattr(self, action), "default", False):
return action
return "dismiss"
class NewFriendshipNotification(NotificationDocument):
subtype = TextField(default="NewFriendship")
title = _('New friendship invitation')
body = _('You have been invited by %(fullname)s to be his/her friend.')
summary = _("%(fullname)s wants to be your friend")
def __init__(self, **fields):
super(NewFriendshipNotification, self).__init__()
if fields.get('initiator') and fields.get('target'):
self.initiator = fields['initiator']
self.target = fields['target']
@action(_("Accept"))
def accept_invitation(self):
self.debug("Accepting the friend invitation from '%s' to '%s'"
% (self.initiator, self.target))
user.accept_friend(self.initiator)
@action(_("Refuse"))
def refuse_invitation(self):
self.debug("Refusing the friend invitation from '%s' to '%s'"
% (self.initiator, self.target))
user.refuse_friend(self.initiator)
@action(_("Block user"))
def block_invitation(self):
self.debug("Blocking the friend invitation from '%s' to '%s'"
% (self.initiator, self.target))
user.block_user(self.initiator)
class FollowRequestNotification(NotificationDocument):
subtype = TextField(default="FollowRequest")
title = _('New file sharing request')
body = _('%(fullname)s would like to be in your followers list.')
summary = _("%(fullname)s wants to follow you")
@action(_("Accept"))
def accept_invitation(self):
self.debug("Accepting the follow request from '%s' to '%s'"
% (self.initiator, self.target))
user.accept_following(self.initiator)
@action(_("Refuse"))
def refuse_invitation(self):
self.debug("Refusing the follow request from '%s' to '%s'"
% (self.initiator, self.target))
user.refuse_following(self.initiator)
@action(_("Block user"))
def block_invitation(self):
self.debug("Blocking the follow request from '%s' to '%s'"
% (self.initiator, self.target))
user.block_user(self.initiator)
class AcceptedFriendshipNotification(NotificationDocument):
subtype = TextField(default="AcceptedFriendship")
title = _('Friendship invitation accepted')
body = _('%(fullname)s has accepted your friendship invitation, '
'you can now share some document with him/her.')
summary = _("%(fullname)s has accepted your invitation")
@action
def accept_friend(self):
self.debug("Proceed pending shares from '%s' to '%s'" % (self.initiator, self.target))
# user.accept_friend(self.initiator)
class CanceledFriendshipNotification(NotificationDocument):
subtype = TextField(default="CanceledFriendship")
title = _('A friendship has been canceled')
body = _('%(fullname)s has removed you from his friend list, '
'you can not access his files any more.')
summary = _("%(fullname)s has canceled his friendship with you")
class RefusedFriendshipNotification(NotificationDocument):
subtype = TextField(default="RefusedFriendship")
title = _('%(fullname)s has refused your friend request')
body = _('%(fullname)s would rather be stranger than friends.')
summary = _("%(fullname)s has refused your friend request")
class NewShareNotification(NotificationDocument):
subtype = TextField(default="NewShare")
files = ListField(TextField())
title = _('Someone has shared some files with you')
body = _('%(fullname)s has shared the following files with you : %(files)s')
summary = _("%(fullname)s has shared some files with you")
def __init__(self, **fields):
super(NewShareNotification, self).__init__(**fields)
if fields.get('files'):
self.files = fields['files']
class CanceledShareNotification(NotificationDocument):
subtype = TextField(default="CanceledShare")
files = ListField(TextField())
title = _('A share has been canceled')
body = _('%(fullname)s has canceled the share of \'%(file)s\', '
'you can\'t access the file any more.')
summary = _("%(fullname)s has canceled a share with you")
def __init__(self, **fields):
super(CanceledShareNotification, self).__init__()
if fields.get('files'):
self.files = fields['files']
|
vienin/python-ufo
|
ufo/notify.py
|
Python
|
gpl-2.0
| 8,320 | 0.007452 |
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
import base64
from datetime import datetime, timedelta
import functools
import json
import os
import time
import yaml
import jinja2
import jmespath
from dateutil import parser
from dateutil.tz import gettz, tzutc
try:
from botocore.exceptions import ClientError
except ImportError: # pragma: no cover
pass # Azure provider
class Providers:
AWS = 0
Azure = 1
def get_jinja_env(template_folders):
env = jinja2.Environment(trim_blocks=True, autoescape=False) # nosec nosemgrep
env.filters['yaml_safe'] = functools.partial(yaml.safe_dump, default_flow_style=False)
env.filters['date_time_format'] = date_time_format
env.filters['get_date_time_delta'] = get_date_time_delta
env.filters['from_json'] = json.loads
env.filters['get_date_age'] = get_date_age
env.globals['format_resource'] = resource_format
env.globals['format_struct'] = format_struct
env.globals['resource_tag'] = get_resource_tag_value
env.globals['get_resource_tag_value'] = get_resource_tag_value
env.globals['search'] = jmespath.search
env.loader = jinja2.FileSystemLoader(template_folders)
return env
def get_rendered_jinja(
target, sqs_message, resources, logger,
specified_template, default_template, template_folders):
env = get_jinja_env(template_folders)
mail_template = sqs_message['action'].get(specified_template, default_template)
if not os.path.isabs(mail_template):
mail_template = '%s.j2' % mail_template
try:
template = env.get_template(mail_template)
except Exception as error_msg:
logger.error("Invalid template reference %s\n%s" % (mail_template, error_msg))
return
# recast seconds since epoch as utc iso datestring, template
# authors can use date_time_format helper func to convert local
# tz. if no execution start time was passed use current time.
execution_start = datetime.utcfromtimestamp(
sqs_message.get(
'execution_start',
time.mktime(
datetime.utcnow().timetuple())
)).isoformat()
rendered_jinja = template.render(
recipient=target,
resources=resources,
account=sqs_message.get('account', ''),
account_id=sqs_message.get('account_id', ''),
partition=sqs_message.get('partition', ''),
event=sqs_message.get('event', None),
action=sqs_message['action'],
policy=sqs_message['policy'],
execution_start=execution_start,
region=sqs_message.get('region', ''))
return rendered_jinja
# eg, target_tag_keys could be resource-owners ['Owners', 'SupportTeam']
# and this function would go through the resource and look for any tag keys
# that match Owners or SupportTeam, and return those values as targets
def get_resource_tag_targets(resource, target_tag_keys):
if 'Tags' not in resource:
return []
if isinstance(resource['Tags'], dict):
tags = resource['Tags']
else:
tags = {tag['Key']: tag['Value'] for tag in resource['Tags']}
targets = []
for target_tag_key in target_tag_keys:
if target_tag_key in tags:
targets.append(tags[target_tag_key])
return targets
def get_message_subject(sqs_message):
default_subject = 'Custodian notification - %s' % (sqs_message['policy']['name'])
subject = sqs_message['action'].get('subject', default_subject)
jinja_template = jinja2.Template(subject)
subject = jinja_template.render(
account=sqs_message.get('account', ''),
account_id=sqs_message.get('account_id', ''),
partition=sqs_message.get('partition', ''),
event=sqs_message.get('event', None),
action=sqs_message['action'],
policy=sqs_message['policy'],
region=sqs_message.get('region', '')
)
return subject
def setup_defaults(config):
config.setdefault('region', 'us-east-1')
config.setdefault('ses_region', config.get('region'))
config.setdefault('memory', 1024)
config.setdefault('runtime', 'python3.7')
config.setdefault('timeout', 300)
config.setdefault('subnets', None)
config.setdefault('security_groups', None)
config.setdefault('contact_tags', [])
config.setdefault('ldap_uri', None)
config.setdefault('ldap_bind_dn', None)
config.setdefault('ldap_bind_user', None)
config.setdefault('ldap_bind_password', None)
config.setdefault('endpoint_url', None)
config.setdefault('datadog_api_key', None)
config.setdefault('slack_token', None)
config.setdefault('slack_webhook', None)
def date_time_format(utc_str, tz_str='US/Eastern', format='%Y %b %d %H:%M %Z'):
return parser.parse(utc_str).astimezone(gettz(tz_str)).strftime(format)
def get_date_time_delta(delta):
return str(datetime.now().replace(tzinfo=gettz('UTC')) + timedelta(delta))
def get_date_age(date):
return (datetime.now(tz=tzutc()) - parser.parse(date)).days
def format_struct(evt):
return json.dumps(evt, indent=2, ensure_ascii=False)
def get_resource_tag_value(resource, k):
for t in resource.get('Tags', []):
if t['Key'] == k:
return t['Value']
return ''
def strip_prefix(value, prefix):
if value.startswith(prefix):
return value[len(prefix):]
return value
def resource_format(resource, resource_type):
if resource_type.startswith('aws.'):
resource_type = strip_prefix(resource_type, 'aws.')
if resource_type == 'ec2':
tag_map = {t['Key']: t['Value'] for t in resource.get('Tags', ())}
return "%s %s %s %s %s %s" % (
resource['InstanceId'],
resource.get('VpcId', 'NO VPC!'),
resource['InstanceType'],
resource.get('LaunchTime'),
tag_map.get('Name', ''),
resource.get('PrivateIpAddress'))
elif resource_type == 'ami':
return "%s %s %s" % (
resource.get('Name'), resource['ImageId'], resource['CreationDate'])
elif resource_type == 'sagemaker-notebook':
return "%s" % (resource['NotebookInstanceName'])
elif resource_type == 's3':
return "%s" % (resource['Name'])
elif resource_type == 'ebs':
return "%s %s %s %s" % (
resource['VolumeId'],
resource['Size'],
resource['State'],
resource['CreateTime'])
elif resource_type == 'rds':
return "%s %s %s %s" % (
resource['DBInstanceIdentifier'],
"%s-%s" % (
resource['Engine'], resource['EngineVersion']),
resource['DBInstanceClass'],
resource['AllocatedStorage'])
elif resource_type == 'rds-cluster':
return "%s %s %s" % (
resource['DBClusterIdentifier'],
"%s-%s" % (
resource['Engine'], resource['EngineVersion']),
resource['AllocatedStorage'])
elif resource_type == 'asg':
tag_map = {t['Key']: t['Value'] for t in resource.get('Tags', ())}
return "%s %s %s" % (
resource['AutoScalingGroupName'],
tag_map.get('Name', ''),
"instances: %d" % (len(resource.get('Instances', []))))
elif resource_type == 'elb':
tag_map = {t['Key']: t['Value'] for t in resource.get('Tags', ())}
if 'ProhibitedPolicies' in resource:
return "%s %s %s %s" % (
resource['LoadBalancerName'],
"instances: %d" % len(resource['Instances']),
"zones: %d" % len(resource['AvailabilityZones']),
"prohibited_policies: %s" % ','.join(
resource['ProhibitedPolicies']))
return "%s %s %s" % (
resource['LoadBalancerName'],
"instances: %d" % len(resource['Instances']),
"zones: %d" % len(resource['AvailabilityZones']))
elif resource_type == 'redshift':
return "%s %s %s" % (
resource['ClusterIdentifier'],
'nodes:%d' % len(resource['ClusterNodes']),
'encrypted:%s' % resource['Encrypted'])
elif resource_type == 'emr':
return "%s status:%s" % (
resource['Id'],
resource['Status']['State'])
elif resource_type == 'cfn':
return "%s" % (
resource['StackName'])
elif resource_type == 'launch-config':
return "%s" % (
resource['LaunchConfigurationName'])
elif resource_type == 'security-group':
name = resource.get('GroupName', '')
for t in resource.get('Tags', ()):
if t['Key'] == 'Name':
name = t['Value']
return "%s %s %s inrules: %d outrules: %d" % (
name,
resource['GroupId'],
resource.get('VpcId', 'na'),
len(resource.get('IpPermissions', ())),
len(resource.get('IpPermissionsEgress', ())))
elif resource_type == 'log-group':
if 'lastWrite' in resource:
return "name: %s last_write: %s" % (
resource['logGroupName'],
resource['lastWrite'])
return "name: %s" % (resource['logGroupName'])
elif resource_type == 'cache-cluster':
return "name: %s created: %s status: %s" % (
resource['CacheClusterId'],
resource['CacheClusterCreateTime'],
resource['CacheClusterStatus'])
elif resource_type == 'cache-snapshot':
cid = resource.get('CacheClusterId')
if cid is None:
cid = ', '.join([
ns['CacheClusterId'] for ns in resource['NodeSnapshots']])
return "name: %s cluster: %s source: %s" % (
resource['SnapshotName'],
cid,
resource['SnapshotSource'])
elif resource_type == 'redshift-snapshot':
return "name: %s db: %s" % (
resource['SnapshotIdentifier'],
resource['DBName'])
elif resource_type == 'ebs-snapshot':
return "name: %s date: %s" % (
resource['SnapshotId'],
resource['StartTime'])
elif resource_type == 'subnet':
return "%s %s %s %s %s %s" % (
resource['SubnetId'],
resource['VpcId'],
resource['AvailabilityZone'],
resource['State'],
resource['CidrBlock'],
resource['AvailableIpAddressCount'])
elif resource_type == 'account':
return " %s %s" % (
resource['account_id'],
resource['account_name'])
elif resource_type == 'cloudtrail':
return "%s" % (
resource['Name'])
elif resource_type == 'vpc':
return "%s " % (
resource['VpcId'])
elif resource_type == 'iam-group':
return " %s %s %s" % (
resource['GroupName'],
resource['Arn'],
resource['CreateDate'])
elif resource_type == 'rds-snapshot':
return " %s %s %s" % (
resource['DBSnapshotIdentifier'],
resource['DBInstanceIdentifier'],
resource['SnapshotCreateTime'])
elif resource_type == 'iam-user':
return " %s " % (
resource['UserName'])
elif resource_type == 'iam-role':
return " %s %s " % (
resource['RoleName'],
resource['CreateDate'])
elif resource_type == 'iam-policy':
return " %s " % (
resource['PolicyName'])
elif resource_type == 'iam-profile':
return " %s " % (
resource['InstanceProfileId'])
elif resource_type == 'dynamodb-table':
return "name: %s created: %s status: %s" % (
resource['TableName'],
resource['CreationDateTime'],
resource['TableStatus'])
elif resource_type == "sqs":
return "QueueURL: %s QueueArn: %s " % (
resource['QueueUrl'],
resource['QueueArn'])
elif resource_type == "efs":
return "name: %s id: %s state: %s" % (
resource['Name'],
resource['FileSystemId'],
resource['LifeCycleState']
)
elif resource_type == "network-addr":
return "ip: %s id: %s scope: %s" % (
resource['PublicIp'],
resource['AllocationId'],
resource['Domain']
)
elif resource_type == "route-table":
return "id: %s vpc: %s" % (
resource['RouteTableId'],
resource['VpcId']
)
elif resource_type == "app-elb":
return "arn: %s zones: %s scheme: %s" % (
resource['LoadBalancerArn'],
len(resource['AvailabilityZones']),
resource['Scheme'])
elif resource_type == "nat-gateway":
return "id: %s state: %s vpc: %s" % (
resource['NatGatewayId'],
resource['State'],
resource['VpcId'])
elif resource_type == "internet-gateway":
return "id: %s attachments: %s" % (
resource['InternetGatewayId'],
len(resource['Attachments']))
elif resource_type == 'lambda':
return "Name: %s RunTime: %s \n" % (
resource['FunctionName'],
resource['Runtime'])
else:
return "%s" % format_struct(resource)
def get_provider(mailer_config):
if mailer_config.get('queue_url', '').startswith('asq://'):
return Providers.Azure
return Providers.AWS
def kms_decrypt(config, logger, session, encrypted_field):
if config.get(encrypted_field):
try:
kms = session.client('kms')
return kms.decrypt(
CiphertextBlob=base64.b64decode(config[encrypted_field]))[
'Plaintext'].decode('utf8')
except (TypeError, base64.binascii.Error) as e:
logger.warning(
"Error: %s Unable to base64 decode %s, will assume plaintext." %
(e, encrypted_field))
except ClientError as e:
if e.response['Error']['Code'] != 'InvalidCiphertextException':
raise
logger.warning(
"Error: %s Unable to decrypt %s with kms, will assume plaintext." %
(e, encrypted_field))
return config[encrypted_field]
else:
logger.debug("No encrypted value to decrypt.")
return None
def decrypt(config, logger, session, encrypted_field):
if config.get(encrypted_field):
provider = get_provider(config)
if provider == Providers.Azure:
from c7n_mailer.azure_mailer.utils import azure_decrypt
return azure_decrypt(config, logger, session, encrypted_field)
elif provider == Providers.AWS:
return kms_decrypt(config, logger, session, encrypted_field)
else:
raise Exception("Unknown provider")
else:
logger.debug("No encrypted value to decrypt.")
return None
# https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-event-reference-user-identity.html
def get_aws_username_from_event(logger, event):
if event is None:
return None
identity = event.get('detail', {}).get('userIdentity', {})
if not identity:
logger.warning("Could not get recipient from event \n %s" % (
format_struct(event)))
return None
if identity['type'] == 'AssumedRole':
logger.debug(
'In some cases there is no ldap uid is associated with AssumedRole: %s',
identity['arn'])
logger.debug(
'We will try to assume that identity is in the AssumedRoleSessionName')
user = identity['arn'].rsplit('/', 1)[-1]
if user is None or user.startswith('i-') or user.startswith('awslambda'):
return None
if ':' in user:
user = user.split(':', 1)[-1]
return user
if identity['type'] == 'IAMUser' or identity['type'] == 'WebIdentityUser':
return identity['userName']
if identity['type'] == 'Root':
return None
# this conditional is left here as a last resort, it should
# be better documented with an example UserIdentity json
if ':' in identity['principalId']:
user_id = identity['principalId'].split(':', 1)[-1]
else:
user_id = identity['principalId']
return user_id
|
thisisshi/cloud-custodian
|
tools/c7n_mailer/c7n_mailer/utils.py
|
Python
|
apache-2.0
| 16,298 | 0.000675 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2019-11-24 03:04
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('logger', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='logentry',
name='log_type',
field=models.IntegerField(choices=[(0, 'Debug'), (1, 'Info'), (2, 'Warning'), (3, 'Error'), (4, 'Critical')], default=1),
),
migrations.AlterField(
model_name='logentry',
name='message',
field=models.TextField(default=''),
),
]
|
imvu/bluesteel
|
app/logic/logger/migrations/0002_auto_20191123_1904.py
|
Python
|
mit
| 679 | 0.001473 |
#!/usr/bin/env python
############################################################################
#
# Copyright (C) 2012, 2013 PX4 Development Team. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# 3. Neither the name PX4 nor the names of its contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
############################################################################
#
# PX4 firmware image generator
#
# The PX4 firmware file is a JSON-encoded Python object, containing
# metadata fields and a zlib-compressed base64-encoded firmware image.
#
import sys
import argparse
import json
import base64
import zlib
import time
import subprocess
#
# Construct a basic firmware description
#
def mkdesc():
proto = {}
proto['magic'] = "PX4FWv1"
proto['board_id'] = 0
proto['board_revision'] = 0
proto['version'] = ""
proto['summary'] = ""
proto['description'] = ""
proto['git_identity'] = ""
proto['build_time'] = 0
proto['image'] = bytes()
proto['image_size'] = 0
return proto
# Parse commandline
parser = argparse.ArgumentParser(description="Firmware generator for the PX autopilot system.")
parser.add_argument("--prototype", action="store", help="read a prototype description from a file")
parser.add_argument("--board_id", action="store", help="set the board ID required")
parser.add_argument("--board_revision", action="store", help="set the board revision required")
parser.add_argument("--version", action="store", help="set a version string")
parser.add_argument("--summary", action="store", help="set a brief description")
parser.add_argument("--description", action="store", help="set a longer description")
parser.add_argument("--git_identity", action="store", help="the working directory to check for git identity")
parser.add_argument("--parameter_xml", action="store", help="the parameters.xml file")
parser.add_argument("--airframe_xml", action="store", help="the airframes.xml file")
parser.add_argument("--image", action="store", help="the firmware image")
args = parser.parse_args()
# Fetch the firmware descriptor prototype if specified
if args.prototype != None:
f = open(args.prototype,"r")
desc = json.load(f)
f.close()
else:
desc = mkdesc()
desc['build_time'] = int(time.time())
if args.board_id != None:
desc['board_id'] = int(args.board_id)
if args.board_revision != None:
desc['board_revision'] = int(args.board_revision)
if args.version != None:
desc['version'] = str(args.version)
if args.summary != None:
desc['summary'] = str(args.summary)
if args.description != None:
desc['description'] = str(args.description)
if args.git_identity != None:
cmd = " ".join(["git", "--git-dir", args.git_identity + "/.git", "describe", "--always", "--dirty"])
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout
desc['git_identity'] = str(p.read().strip())
p.close()
if args.parameter_xml != None:
f = open(args.parameter_xml, "rb")
bytes = f.read()
desc['parameter_xml_size'] = len(bytes)
desc['parameter_xml'] = base64.b64encode(zlib.compress(bytes,9)).decode('utf-8')
if args.airframe_xml != None:
f = open(args.airframe_xml, "rb")
bytes = f.read()
desc['airframe_xml_size'] = len(bytes)
desc['airframe_xml'] = base64.b64encode(zlib.compress(bytes,9)).decode('utf-8')
if args.image != None:
f = open(args.image, "rb")
bytes = f.read()
desc['image_size'] = len(bytes)
desc['image'] = base64.b64encode(zlib.compress(bytes,9)).decode('utf-8')
print(json.dumps(desc, indent=4))
|
RickHutten/paparazzi
|
sw/tools/px4/px_mkfw.py
|
Python
|
gpl-2.0
| 4,811 | 0.017044 |
# -*- coding: utf-8 -*-
import pytest
import turnstile.models.message as message
from turnstile.checks import CheckIgnore
from turnstile.checks.commit_msg.specification import check
def test_check():
commit_1 = message.CommitMessage('something', 'https://github.com/jmcs/turnstile/issues/42 m€sságe')
result_1 = check(None, {}, commit_1)
assert result_1.successful
assert result_1.details == []
commit_2 = message.CommitMessage('something', 'invalid-1')
result_2 = check(None, {}, commit_2)
assert not result_2.successful
assert result_2.details == ['invalid-1 is not a valid specification.']
# Merge messages are ignored
with pytest.raises(CheckIgnore):
commit_3 = message.CommitMessage('something', 'Merge stuff')
check(None, {}, commit_3)
commit_4 = message.CommitMessage('something', 'ftp://example.com/spec')
result_4 = check(None, {'specification': {'allowed_schemes': ['https']}}, commit_4)
assert not result_4.successful
assert result_4.details == ['ftp://example.com/spec is not a valid specification.']
commit_5 = message.CommitMessage('something', 'ftp://example.com/spec')
result_5 = check(None, {'specification': {'allowed_schemes': ['https', 'ftp']}}, commit_5)
assert result_5.successful
assert result_5.details == []
|
zalando/turnstile
|
tests/checks/test_specification_check.py
|
Python
|
apache-2.0
| 1,332 | 0.00301 |
import logging, time, commands
from autotest.client.shared import error
from virttest import utils_test, aexpect
def run_timedrift(test, params, env):
"""
Time drift test (mainly for Windows guests):
1) Log into a guest.
2) Take a time reading from the guest and host.
3) Run load on the guest and host.
4) Take a second time reading.
5) Stop the load and rest for a while.
6) Take a third time reading.
7) If the drift immediately after load is higher than a user-
specified value (in %), fail.
If the drift after the rest period is higher than a user-specified value,
fail.
@param test: QEMU test object.
@param params: Dictionary with test parameters.
@param env: Dictionary with the test environment.
"""
# Helper functions
def set_cpu_affinity(pid, mask):
"""
Set the CPU affinity of all threads of the process with PID pid.
Do this recursively for all child processes as well.
@param pid: The process ID.
@param mask: The CPU affinity mask.
@return: A dict containing the previous mask for each thread.
"""
tids = commands.getoutput("ps -L --pid=%s -o lwp=" % pid).split()
prev_masks = {}
for tid in tids:
prev_mask = commands.getoutput("taskset -p %s" % tid).split()[-1]
prev_masks[tid] = prev_mask
commands.getoutput("taskset -p %s %s" % (mask, tid))
children = commands.getoutput("ps --ppid=%s -o pid=" % pid).split()
for child in children:
prev_masks.update(set_cpu_affinity(child, mask))
return prev_masks
def restore_cpu_affinity(prev_masks):
"""
Restore the CPU affinity of several threads.
@param prev_masks: A dict containing TIDs as keys and masks as values.
"""
for tid, mask in prev_masks.items():
commands.getoutput("taskset -p %s %s" % (mask, tid))
vm = env.get_vm(params["main_vm"])
vm.verify_alive()
timeout = int(params.get("login_timeout", 360))
session = vm.wait_for_login(timeout=timeout)
# Collect test parameters:
# Command to run to get the current time
time_command = params.get("time_command")
# Filter which should match a string to be passed to time.strptime()
time_filter_re = params.get("time_filter_re")
# Time format for time.strptime()
time_format = params.get("time_format")
guest_load_command = params.get("guest_load_command")
guest_load_stop_command = params.get("guest_load_stop_command")
host_load_command = params.get("host_load_command")
guest_load_instances = int(params.get("guest_load_instances", "1"))
host_load_instances = int(params.get("host_load_instances", "0"))
# CPU affinity mask for taskset
cpu_mask = params.get("cpu_mask", "0xFF")
load_duration = float(params.get("load_duration", "30"))
rest_duration = float(params.get("rest_duration", "10"))
drift_threshold = float(params.get("drift_threshold", "200"))
drift_threshold_after_rest = float(params.get("drift_threshold_after_rest",
"200"))
guest_load_sessions = []
host_load_sessions = []
try:
# Set the VM's CPU affinity
prev_affinity = set_cpu_affinity(vm.get_shell_pid(), cpu_mask)
try:
# Open shell sessions with the guest
logging.info("Starting load on guest...")
for i in range(guest_load_instances):
load_session = vm.login()
# Set output func to None to stop it from being called so we
# can change the callback function and the parameters it takes
# with no problems
load_session.set_output_func(None)
load_session.set_output_params(())
load_session.set_output_prefix("(guest load %d) " % i)
load_session.set_output_func(logging.debug)
guest_load_sessions.append(load_session)
# Get time before load
# (ht stands for host time, gt stands for guest time)
(ht0, gt0) = utils_test.get_time(session,
time_command,
time_filter_re,
time_format)
# Run some load on the guest
for load_session in guest_load_sessions:
load_session.sendline(guest_load_command)
# Run some load on the host
logging.info("Starting load on host...")
for i in range(host_load_instances):
host_load_sessions.append(
aexpect.run_bg(host_load_command,
output_func=logging.debug,
output_prefix="(host load %d) " % i,
timeout=0.5))
# Set the CPU affinity of the load process
pid = host_load_sessions[-1].get_pid()
set_cpu_affinity(pid, cpu_mask)
# Sleep for a while (during load)
logging.info("Sleeping for %s seconds...", load_duration)
time.sleep(load_duration)
# Get time delta after load
(ht1, gt1) = utils_test.get_time(session,
time_command,
time_filter_re,
time_format)
# Report results
host_delta = ht1 - ht0
guest_delta = gt1 - gt0
drift = 100.0 * (host_delta - guest_delta) / host_delta
logging.info("Host duration: %.2f", host_delta)
logging.info("Guest duration: %.2f", guest_delta)
logging.info("Drift: %.2f%%", drift)
finally:
logging.info("Cleaning up...")
# Restore the VM's CPU affinity
restore_cpu_affinity(prev_affinity)
# Stop the guest load
if guest_load_stop_command:
session.cmd_output(guest_load_stop_command)
# Close all load shell sessions
for load_session in guest_load_sessions:
load_session.close()
for load_session in host_load_sessions:
load_session.close()
# Sleep again (rest)
logging.info("Sleeping for %s seconds...", rest_duration)
time.sleep(rest_duration)
# Get time after rest
(ht2, gt2) = utils_test.get_time(session,
time_command,
time_filter_re,
time_format)
finally:
session.close()
# Report results
host_delta_total = ht2 - ht0
guest_delta_total = gt2 - gt0
drift_total = 100.0 * (host_delta_total - guest_delta_total) / host_delta
logging.info("Total host duration including rest: %.2f", host_delta_total)
logging.info("Total guest duration including rest: %.2f", guest_delta_total)
logging.info("Total drift after rest: %.2f%%", drift_total)
# Fail the test if necessary
if abs(drift) > drift_threshold:
raise error.TestFail("Time drift too large: %.2f%%" % drift)
if abs(drift_total) > drift_threshold_after_rest:
raise error.TestFail("Time drift too large after rest period: %.2f%%"
% drift_total)
|
ehabkost/virt-test
|
qemu/tests/timedrift.py
|
Python
|
gpl-2.0
| 7,552 | 0.001457 |
#! /usr/bin/env python2
# -*- coding: utf-8 -*-
from __future__ import (unicode_literals, absolute_import, division)
import os as os_module
import xbmc
from lib.constants import *
userdatafolder = os_module.path.join(xbmc.translatePath("special://profile").decode("utf-8"), "addon_data", addonid, "test data")
libpath = os_module.path.join(userdatafolder, "Library")
|
eirki/script.service.koalahbonordic
|
tests/mock_constants.py
|
Python
|
mit
| 370 | 0.002703 |
from mybottle import Bottle, run, ServerAdapter, get, post, request
import KalutServer.conf as myconf
class SSLWSGIRefServer(ServerAdapter):
def run(self, handler, quiet=False):
from wsgiref.simple_server import make_server, WSGIRequestHandler
import ssl
if quiet:
class QuietHandler(WSGIRequestHandler):
def log_request(*args, **kw): pass
self.options['handler_class'] = QuietHandler
srv = make_server(self.host, self.port, handler, **self.options)
srv.socket = ssl.wrap_socket (
srv.socket,
certfile=myconf.certfile, # path to chain file
keyfile=myconf.keyfile, # path to RSA private key
server_side=True)
srv.serve_forever()
|
TwoUnderscorez/KalutServer
|
KalutServer/RESTfulAPI/SSLbottle.py
|
Python
|
apache-2.0
| 772 | 0.002591 |
from django.conf.urls import patterns, url
from publicaciones import views
urlpatterns = patterns('',
url(r'^$', views.index, name='index'),
url(r'^(?P<articulo_titulo>[\W\w]+)/$', views.ver_articulo, name='ver_articulo'),
)
|
rickyrish/rickyblog
|
publicaciones/urls.py
|
Python
|
gpl-2.0
| 234 | 0.012821 |
"""
@package mi.dataset.driver.optaa_dj.cspp
@file mi-dataset/mi/dataset/driver/optaa_dj/cspp/optaa_dj_cspp_telemetered_driver.py
@author Joe Padula
@brief Telemetered driver for the optaa_dj_cspp instrument
Release notes:
Initial Release
"""
__author__ = 'jpadula'
from mi.dataset.dataset_driver import SimpleDatasetDriver
from mi.dataset.dataset_parser import DataSetDriverConfigKeys
from mi.dataset.parser.cspp_base import \
DATA_PARTICLE_CLASS_KEY, \
METADATA_PARTICLE_CLASS_KEY
from mi.dataset.parser.optaa_dj_cspp import \
OptaaDjCsppParser, \
OptaaDjCsppMetadataTelemeteredDataParticle, \
OptaaDjCsppInstrumentTelemeteredDataParticle
from mi.core.versioning import version
@version("15.6.1")
def parse(basePythonCodePath, sourceFilePath, particleDataHdlrObj):
"""
This is the method called by Uframe
:param basePythonCodePath This is the file system location of mi-dataset
:param sourceFilePath This is the full path and filename of the file to be parsed
:param particleDataHdlrObj Java Object to consume the output of the parser
:return particleDataHdlrObj
"""
with open(sourceFilePath, 'rU') as stream_handle:
# create an instance of the concrete driver class defined below
driver = OptaaDjCsppTelemeteredDriver(basePythonCodePath, stream_handle, particleDataHdlrObj)
driver.processFileStream()
return particleDataHdlrObj
class OptaaDjCsppTelemeteredDriver(SimpleDatasetDriver):
"""
The optaa_dj_cspp telemetered driver class extends the SimpleDatasetDriver.
"""
def _build_parser(self, stream_handle):
parser_config = {
DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.optaa_dj_cspp',
DataSetDriverConfigKeys.PARTICLE_CLASS: None,
DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: {
METADATA_PARTICLE_CLASS_KEY: OptaaDjCsppMetadataTelemeteredDataParticle,
DATA_PARTICLE_CLASS_KEY: OptaaDjCsppInstrumentTelemeteredDataParticle
}
}
parser = OptaaDjCsppParser(parser_config,
stream_handle,
self._exception_callback)
return parser
|
JeffRoy/mi-dataset
|
mi/dataset/driver/optaa_dj/cspp/optaa_dj_cspp_telemetered_driver.py
|
Python
|
bsd-2-clause
| 2,238 | 0.002681 |
#!/usr/bin/env python
#
# Copyright (c) 2014 Hamilton Kibbe <ham@hamiltonkib.be>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
""" PyAbleton
A library for creating and editing Ableton Live instrument/effect presets in Python.
"""
__author__ = 'ham@hamiltonkib.be'
__version__ = '1.0'
import presets
|
hamiltonkibbe/PyAbleton
|
pyableton/__init__.py
|
Python
|
mit
| 1,356 | 0.00885 |
from pymongo import MongoClient
import schedule
import time
##############
## This script will be deployed in bluemix with --no-route set to true
##############
con = MongoClient("mongodb://abcd:qwerty@ds111798.mlab.com:11798/have_a_seat")
db = con.have_a_seat
cursor = db.Bookings.find()
#Bookings is {customerName:"", customerEmail: "", customerPhone: "", Slot: ""}
dict = {}
db.Exploration.delete_many({})
db.Exploitation.delete_many({})
for i in range(4): # Finding for all slots
for c in cursor:
if c['Slot'] == i and c['customerEmail'] not in dict.keys():
dict[c['customerEmail']] = 1
elif c['Slot'] == i and c['customerEmail'] in dict.keys():
dict[c['customerEmail']] += 1
tuples_list = sorted(dict.items(), key=lambda x: x[1], reverse=True)
print tuples_list
print 'Completed for slot ', i
db.Exploitation.insert({'Slot': i, 'customerEmail': tuples_list[0][0],
'customerName': db.Bookings.find_one({'customerEmail': tuples_list[0][0]})['customerName']})
db.Exploration.insert({'Slot': i, 'customerEmail': tuples_list[len(tuples_list) - 1][0], 'customerName':
db.Bookings.find_one({'customerEmail': tuples_list[len(tuples_list) - 1][0]})['customerName']})
|
VimanyuAgg/Have_a_seat
|
dbanalytics tester.py
|
Python
|
apache-2.0
| 1,275 | 0.004706 |
print 28433 * 2**7830457 + 1
|
floppp/programming_challenges
|
project_euler/051-100/97.py
|
Python
|
mit
| 29 | 0 |
from collections import Counter
def unalk_coeff(l):
'''Source: https://ww2.amstat.org/publications/jse/v15n2/kader.html'''
n = len(l)
freq = Counter(l)
freqsum = 0
for key, freq in freq.items():
p = freq / n
freqsum += p**2
unalk_coeff = 1 - freqsum
return unalk_coeff
def IQV(l, k):
'''k = number of categories
a value can take
Source: http://sjam.selcuk.edu.tr/sjam/article/view/291'''
IQV = (k / (k - 1)) * unalk_coeff(l)
return IQV
def IQV_var(l, k):
'''k = number of categories
a value can take
Source: https://www.youtube.com/watch?v=oQCqaS1ICwk'''
freq = Counter(l)
freqsum = 0
for k, v in freq.items():
freqsum += v
p2sum = 0
for k, v in freq.items():
p2sum += ((v / freqsum) * 100)**2
IQV = (k * (100**2 - p2sum)) / (((100**2) * (k - 1)))
return IQV
def simpsons_d(l):
freq = Counter(l)
n = 0
for k, v in freq.items():
n += v
s = 0
for k, v in freq.items():
s += v * (v - 1)
d = s / (n * n - 1)
return 1 - d
# TEST, Source: https://ww2.amstat.org/publications/jse/v15n2/kader.html
# print(unalk_coeff(['A', 'A', 'A', 'A', 'A', 'A', 'A', 'B', 'B', 'B']))
# print(unalk_coeff(['A', 'A', 'A', 'A', 'A', 'B', 'B', 'B', 'B', 'B']))
# print(unalk_coeff(['A', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'B']))
|
j4n7/record-q
|
qualitative_variation.py
|
Python
|
gpl-3.0
| 1,382 | 0.003618 |
from flask_admin.contrib.sqla.typefmt import DEFAULT_FORMATTERS as BASE_FORMATTERS
import json
from jinja2 import Markup
from wtforms.widgets import html_params
from geoalchemy2.shape import to_shape
from geoalchemy2.elements import WKBElement
from sqlalchemy import func
from flask import current_app
def geom_formatter(view, value):
params = html_params(**{
"data-role": "leaflet",
"disabled": "disabled",
"data-width": 100,
"data-height": 70,
"data-geometry-type": to_shape(value).geom_type,
"data-zoom": 15,
})
if value.srid is -1:
geojson = current_app.extensions['sqlalchemy'].db.session.scalar(func.ST_AsGeoJson(value))
else:
geojson = current_app.extensions['sqlalchemy'].db.session.scalar(func.ST_AsGeoJson(value.ST_Transform( 4326)))
return Markup('<textarea %s>%s</textarea>' % (params, geojson))
DEFAULT_FORMATTERS = BASE_FORMATTERS.copy()
DEFAULT_FORMATTERS[WKBElement] = geom_formatter
|
hexlism/css_platform
|
sleepyenv/lib/python2.7/site-packages/Flask_Admin-1.2.0-py2.7.egg/flask_admin/contrib/geoa/typefmt.py
|
Python
|
apache-2.0
| 988 | 0.004049 |
__doc__="""An experimental SVG renderer for the ReportLab graphics framework.
This will create SVG code from the ReportLab Graphics API (RLG).
To read existing SVG code and convert it into ReportLab graphics
objects download the svglib module here:
http://python.net/~gherman/#svglib
"""
import math, types, sys, os
from operator import getitem
from reportlab.pdfbase.pdfmetrics import stringWidth # for font info
from reportlab.lib.utils import fp_str
from reportlab.lib.colors import black
from reportlab.graphics.renderbase import StateTracker, getStateDelta, Renderer, renderScaledDrawing
from reportlab.graphics.shapes import STATE_DEFAULTS, Path, UserNode
from reportlab.graphics.shapes import * # (only for test0)
from reportlab import rl_config
from reportlab.lib.utils import getStringIO
from xml.dom import getDOMImplementation
### some constants ###
sin = math.sin
cos = math.cos
pi = math.pi
LINE_STYLES = 'stroke-width stroke-linecap stroke fill stroke-dasharray'
TEXT_STYLES = 'font-family font-size'
### top-level user function ###
def drawToString(d, showBoundary=rl_config.showBoundary):
"Returns a SVG as a string in memory, without touching the disk"
s = getStringIO()
drawToFile(d, s, showBoundary=showBoundary)
return s.getvalue()
def drawToFile(d, fn, showBoundary=rl_config.showBoundary):
d = renderScaledDrawing(d)
c = SVGCanvas((d.width, d.height))
draw(d, c, 0, 0, showBoundary=showBoundary)
c.save(fn)
def draw(drawing, canvas, x=0, y=0, showBoundary=rl_config.showBoundary):
"""As it says."""
r = _SVGRenderer()
r.draw(renderScaledDrawing(drawing), canvas, x, y, showBoundary=showBoundary)
### helper functions ###
def _pointsFromList(L):
"""
given a list of coordinates [x0, y0, x1, y1....]
produce a list of points [(x0,y0), (y1,y0),....]
"""
P=[]
for i in range(0,len(L), 2):
P.append((L[i], L[i+1]))
return P
def transformNode(doc, newTag, node=None, **attrDict):
"""Transform a DOM node into new node and copy selected attributes.
Creates a new DOM node with tag name 'newTag' for document 'doc'
and copies selected attributes from an existing 'node' as provided
in 'attrDict'. The source 'node' can be None. Attribute values will
be converted to strings.
E.g.
n = transformNode(doc, "node1", x="0", y="1")
-> DOM node for <node1 x="0" y="1"/>
n = transformNode(doc, "node1", x=0, y=1+1)
-> DOM node for <node1 x="0" y="2"/>
n = transformNode(doc, "node1", node0, x="x0", y="x0", zoo=bar())
-> DOM node for <node1 x="[node0.x0]" y="[node0.y0]" zoo="[bar()]"/>
"""
newNode = doc.createElement(newTag)
for newAttr, attr in attrDict.items():
sattr = str(attr)
if not node:
newNode.setAttribute(newAttr, sattr)
else:
attrVal = node.getAttribute(sattr)
newNode.setAttribute(newAttr, attrVal or sattr)
return newNode
### classes ###
class SVGCanvas:
def __init__(self, size=(300,300)):
self.verbose = 0
self.width, self.height = self.size = size
# self.height = size[1]
self.code = []
self.style = {}
self.path = ''
self._strokeColor = self._fillColor = self._lineWidth = \
self._font = self._fontSize = self._lineCap = \
self._lineJoin = self._color = None
implementation = getDOMImplementation('minidom')
#Based on official example here http://www.w3.org/TR/SVG10/linking.html want:
#<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20010904//EN"
# "http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
#Thus,
#doctype = implementation.createDocumentType("svg",
# "-//W3C//DTD SVG 20010904//EN",
# "http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd")
#
#However, putting that example through http://validator.w3.org/ recommends:
#<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN"
# "http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
#So we'll use that for our SVG 1.0 output.
doctype = implementation.createDocumentType("svg",
"-//W3C//DTD SVG 1.0//EN",
"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd")
self.doc = implementation.createDocument(None,"svg",doctype)
self.svg = self.doc.documentElement
self.svg.setAttribute("width", str(size[0]))
self.svg.setAttribute("height", str(self.height))
#these suggested by Tim Roberts, as updated by peter@maubp.freeserve.co.uk
self.svg.setAttribute("xmlns", "http://www.w3.org/2000/svg")
self.svg.setAttribute("xmlns:xlink", "http://www.w3.org/1999/xlink")
self.svg.setAttribute("version", "1.0")
#self.svg.setAttribute("baseProfile", "full") #disliked in V 1.0
title = self.doc.createElement('title')
text = self.doc.createTextNode('...')
title.appendChild(text)
self.svg.appendChild(title)
desc = self.doc.createElement('desc')
text = self.doc.createTextNode('...')
desc.appendChild(text)
self.svg.appendChild(desc)
self.setFont(STATE_DEFAULTS['fontName'], STATE_DEFAULTS['fontSize'])
self.setStrokeColor(STATE_DEFAULTS['strokeColor'])
self.setLineCap(2)
self.setLineJoin(0)
self.setLineWidth(1)
# Add a rectangular clipping path identical to view area.
clipPath = transformNode(self.doc, "clipPath", id="clip")
clipRect = transformNode(self.doc, "rect", x=0, y=0,
width=self.width, height=self.height)
clipPath.appendChild(clipRect)
self.svg.appendChild(clipPath)
self.groupTree = transformNode(self.doc, "g",
id="group",
transform="scale(1,-1) translate(0,-%d)" % self.height,
style="clip-path: url(#clip)")
self.svg.appendChild(self.groupTree)
self.currGroup = self.groupTree
def save(self, fn=None):
if isinstance(fn,str):
f = open(fn, 'w')
else:
f = fn
f.write(self.doc.toprettyxml(indent=" "))
if f is not fn:
f.close()
### helpers ###
def NOTUSED_stringWidth(self, s, font=None, fontSize=None):
"""Return the logical width of the string if it were drawn
in the current font (defaults to self.font).
"""
font = font or self._font
fontSize = fontSize or self._fontSize
return stringWidth(s, font, fontSize)
def _formatStyle(self, include=''):
include = include.split()
keys = self.style.keys()
if include:
#2.1-safe version of the line below follows:
#keys = filter(lambda k: k in include, keys)
tmp = []
for word in keys:
if word in include:
tmp.append(word)
keys = tmp
items = []
for k in keys:
items.append((k, self.style[k]))
items = map(lambda i: "%s: %s"%(i[0], i[1]), items)
return '; '.join(items) + ';'
def _escape(self, s):
"""
return a copy of string s with special characters in postscript strings
escaped with backslashes.
Have not handled characters that are converted normally in python strings
i.e. \\n -> newline
"""
return s.replace(chr(0x5C), r'\\' ).replace('(', '\(' ).replace(')', '\)')
def _genArcCode(self, x1, y1, x2, y2, startAng, extent):
"""Calculate the path for an arc inscribed in rectangle defined
by (x1,y1),(x2,y2)."""
return
#calculate semi-minor and semi-major axes of ellipse
xScale = abs((x2-x1)/2.0)
yScale = abs((y2-y1)/2.0)
#calculate centre of ellipse
x, y = (x1+x2)/2.0, (y1+y2)/2.0
codeline = 'matrix currentmatrix %s %s translate %s %s scale 0 0 1 %s %s %s setmatrix'
if extent >= 0:
arc='arc'
else:
arc='arcn'
data = (x,y, xScale, yScale, startAng, startAng+extent, arc)
return codeline % data
def _fillAndStroke(self, code, clip=0, link_info=None):
path = transformNode(self.doc, "path",
d=self.path, style=self._formatStyle(LINE_STYLES))
if link_info :
path = self._add_link(path, link_info)
self.currGroup.appendChild(path)
self.path = ''
### styles ###
def setLineCap(self, v):
vals = {0:'butt', 1:'round', 2:'square'}
if self._lineCap != v:
self._lineCap = v
self.style['stroke-linecap'] = vals[v]
def setLineJoin(self, v):
vals = {0:'miter', 1:'round', 2:'bevel'}
if self._lineJoin != v:
self._lineJoin = v
self.style['stroke-linecap'] = vals[v]
def setDash(self, array=[], phase=0):
"""Two notations. Pass two numbers, or an array and phase."""
if type(array) in (types.IntType, types.FloatType):
self.style['stroke-dasharray'] = ', '.join(map(str, ([array, phase])))
elif type(array) in (types.ListType, types.TupleType) and len(array) > 0:
assert phase >= 0, "phase is a length in user space"
self.style['stroke-dasharray'] = ', '.join(map(str, (array+[phase])))
def setStrokeColor(self, color):
self._strokeColor = color
self.setColor(color)
if color == None:
self.style['stroke'] = 'none'
else:
r, g, b = color.red, color.green, color.blue
self.style['stroke'] = 'rgb(%d%%,%d%%,%d%%)' % (r*100, g*100, b*100)
def setColor(self, color):
if self._color != color:
self._color = color
def setFillColor(self, color):
self._fillColor = color
self.setColor(color)
if color == None:
self.style['fill'] = 'none'
else:
r, g, b = color.red, color.green, color.blue
self.style['fill'] = 'rgb(%d%%,%d%%,%d%%)' % (r*100, g*100, b*100)
def setLineWidth(self, width):
if width != self._lineWidth:
self._lineWidth = width
self.style['stroke-width'] = width
def setFont(self, font, fontSize):
if self._font != font or self._fontSize != fontSize:
self._font, self._fontSize = (font, fontSize)
self.style['font-family'] = font
self.style['font-size'] = '%spx' % fontSize
def _add_link(self, dom_object, link_info) :
assert isinstance(link_info, dict)
link = transformNode(self.doc, "a", **link_info)
link.appendChild(dom_object)
return link
### shapes ###
def rect(self, x1,y1, x2,y2, rx=8, ry=8, link_info=None):
"Draw a rectangle between x1,y1 and x2,y2."
if self.verbose: print "+++ SVGCanvas.rect"
rect = transformNode(self.doc, "rect",
x=x1, y=y1, width=x2-x1, height=y2-y1,
style=self._formatStyle(LINE_STYLES))
if link_info :
rect = self._add_link(rect, link_info)
self.currGroup.appendChild(rect)
def roundRect(self, x1,y1, x2,y2, rx=8, ry=8, link_info=None):
"""Draw a rounded rectangle between x1,y1 and x2,y2.
Corners inset as ellipses with x-radius rx and y-radius ry.
These should have x1<x2, y1<y2, rx>0, and ry>0.
"""
rect = transformNode(self.doc, "rect",
x=x1, y=y1, width=x2-x1, height=y2-y1, rx=rx, ry=ry,
style=self._formatStyle(LINE_STYLES))
if link_info :
rect = self._add_link(rect, link_info)
self.currGroup.appendChild(rect)
def drawString(self, s, x, y, angle=0, link_info=None):
if self.verbose: print "+++ SVGCanvas.drawString"
if self._fillColor != None:
self.setColor(self._fillColor)
s = self._escape(s)
st = self._formatStyle(TEXT_STYLES)
if angle != 0:
st = st + " rotate(%f %f %f);" % (angle, x, y)
st = st + " fill: %s;" % self.style['fill']
text = transformNode(self.doc, "text",
x=x, y=y, style=st,
transform="translate(0,%d) scale(1,-1)" % (2*y))
content = self.doc.createTextNode(s)
text.appendChild(content)
if link_info:
text = self._add_link(text, link_info)
self.currGroup.appendChild(text)
def drawCentredString(self, s, x, y, angle=0, text_anchor='middle', link_info=None):
if self.verbose: print "+++ SVGCanvas.drawCentredString"
if self._fillColor != None:
if not text_anchor in ['start', 'inherited']:
textLen = stringWidth(s,self._font,self._fontSize)
if text_anchor=='end':
x -= textLen
elif text_anchor=='middle':
x -= textLen/2.
elif text_anchor=='numeric':
x -= numericXShift(text_anchor,s,textLen,self._font,self._fontSize)
else:
raise ValueError, 'bad value for text_anchor ' + str(text_anchor)
self.drawString(x,y,text,angle=angle, link_info=link_info)
def drawRightString(self, text, x, y, angle=0, link_info=None):
self.drawCentredString(text,x,y,angle=angle,text_anchor='end', link_info=link_info)
def comment(self, data):
"Add a comment."
comment = self.doc.createComment(data)
# self.currGroup.appendChild(comment)
def drawImage(self, image, x1, y1, x2=None, y2=None):
pass
def line(self, x1, y1, x2, y2):
if self._strokeColor != None:
if 0: # something is wrong with line in my SVG viewer...
line = transformNode(self.doc, "line",
x=x1, y=y1, x2=x2, y2=y2,
style=self._formatStyle(LINE_STYLES))
self.currGroup.appendChild(line)
path = transformNode(self.doc, "path",
d="M %f,%f L %f,%f Z" % (x1,y1,x2,y2),
style=self._formatStyle(LINE_STYLES))
self.currGroup.appendChild(path)
def ellipse(self, x1, y1, x2, y2, link_info=None):
"""Draw an orthogonal ellipse inscribed within the rectangle x1,y1,x2,y2.
These should have x1<x2 and y1<y2.
"""
ellipse = transformNode(self.doc, "ellipse",
cx=(x1+x2)/2.0, cy=(y1+y2)/2.0, rx=(x2-x1)/2.0, ry=(y2-y1)/2.0,
style=self._formatStyle(LINE_STYLES))
if link_info:
ellipse = self._add_link(ellipse, link_info)
self.currGroup.appendChild(ellipse)
def circle(self, xc, yc, r, link_info=None):
circle = transformNode(self.doc, "circle",
cx=xc, cy=yc, r=r,
style=self._formatStyle(LINE_STYLES))
if link_info:
circle = self._add_link(circle, link_info)
self.currGroup.appendChild(circle)
def drawCurve(self, x1, y1, x2, y2, x3, y3, x4, y4, closed=0):
pass
return
codeline = '%s m %s curveto'
data = (fp_str(x1, y1), fp_str(x2, y2, x3, y3, x4, y4))
if self._fillColor != None:
self.setColor(self._fillColor)
self.code.append((codeline % data) + ' eofill')
if self._strokeColor != None:
self.setColor(self._strokeColor)
self.code.append((codeline % data)
+ ((closed and ' closepath') or '')
+ ' stroke')
def drawArc(self, x1,y1, x2,y2, startAng=0, extent=360, fromcenter=0):
"""Draw a partial ellipse inscribed within the rectangle x1,y1,x2,y2.
Starting at startAng degrees and covering extent degrees. Angles
start with 0 to the right (+x) and increase counter-clockwise.
These should have x1<x2 and y1<y2.
"""
cx, cy = (x1+x2)/2.0, (y1+y2)/2.0
rx, ry = (x2-x1)/2.0, (y2-y1)/2.0
mx = rx * cos(startAng*pi/180) + cx
my = ry * sin(startAng*pi/180) + cy
ax = rx * cos((startAng+extent)*pi/180) + cx
ay = ry * sin((startAng+extent)*pi/180) + cy
str = ''
if fromcenter:
str = str + "M %f, %f L %f, %f " % (cx, cy, ax, ay)
if fromcenter:
str = str + "A %f, %f %d %d %d %f, %f " % \
(rx, ry, 0, extent>=180, 0, mx, my)
else:
str = str + "M %f, %f A %f, %f %d %d %d %f, %f Z " % \
(mx, my, rx, ry, 0, extent>=180, 0, mx, my)
if fromcenter:
str = str + "L %f, %f Z " % (cx, cy)
path = transformNode(self.doc, "path",
d=str, style=self._formatStyle())
self.currGroup.appendChild(path)
def polygon(self, points, closed=0, link_info=None):
assert len(points) >= 2, 'Polygon must have 2 or more points'
if self._strokeColor != None:
self.setColor(self._strokeColor)
pairs = []
for i in xrange(len(points)):
pairs.append("%f %f" % (points[i]))
pts = ', '.join(pairs)
polyline = transformNode(self.doc, "polygon",
points=pts, style=self._formatStyle(LINE_STYLES))
if link_info:
polyline = self._add_link(polyline, link_info)
self.currGroup.appendChild(polyline)
# self._fillAndStroke(polyCode)
def lines(self, lineList, color=None, width=None):
# print "### lineList", lineList
return
if self._strokeColor != None:
self._setColor(self._strokeColor)
codeline = '%s m %s l stroke'
for line in lineList:
self.code.append(codeline % (fp_str(line[0]), fp_str(line[1])))
def polyLine(self, points):
assert len(points) >= 1, 'Polyline must have 1 or more points'
if self._strokeColor != None:
self.setColor(self._strokeColor)
pairs = []
for i in xrange(len(points)):
pairs.append("%f %f" % (points[i]))
pts = ', '.join(pairs)
polyline = transformNode(self.doc, "polyline",
points=pts, style=self._formatStyle(LINE_STYLES))
self.currGroup.appendChild(polyline)
### groups ###
def startGroup(self):
if self.verbose: print "+++ begin SVGCanvas.startGroup"
currGroup, group = self.currGroup, transformNode(self.doc, "g", transform="")
currGroup.appendChild(group)
self.currGroup = group
if self.verbose: print "+++ end SVGCanvas.startGroup"
return currGroup
def endGroup(self,currGroup):
if self.verbose: print "+++ begin SVGCanvas.endGroup"
self.currGroup = currGroup
if self.verbose: print "+++ end SVGCanvas.endGroup"
def transform(self, a, b, c, d, e, f):
if self.verbose: print "!!! begin SVGCanvas.transform", a, b, c, d, e, f
tr = self.currGroup.getAttribute("transform")
t = 'matrix(%f, %f, %f, %f, %f, %f)' % (a,b,c,d,e,f)
if (a, b, c, d, e, f) != (1, 0, 0, 1, 0, 0):
self.currGroup.setAttribute("transform", "%s %s" % (tr, t))
def translate(self, x, y):
# probably never used
print "!!! begin SVGCanvas.translate"
return
tr = self.currGroup.getAttribute("transform")
t = 'translate(%f, %f)' % (x, y)
self.currGroup.setAttribute("transform", "%s %s" % (tr, t))
def scale(self, x, y):
# probably never used
print "!!! begin SVGCanvas.scale"
return
tr = self.groups[-1].getAttribute("transform")
t = 'scale(%f, %f)' % (x, y)
self.currGroup.setAttribute("transform", "%s %s" % (tr, t))
### paths ###
def moveTo(self, x, y):
self.path = self.path + 'M %f %f ' % (x, y)
def lineTo(self, x, y):
self.path = self.path + 'L %f %f ' % (x, y)
def curveTo(self, x1, y1, x2, y2, x3, y3):
self.path = self.path + 'C %f %f %f %f %f %f ' % (x1, y1, x2, y2, x3, y3)
def closePath(self):
self.path = self.path + 'Z '
def saveState(self):
pass
def restoreState(self):
pass
class _SVGRenderer(Renderer):
"""This draws onto an SVG document.
"""
def __init__(self):
self._tracker = StateTracker()
self.verbose = 0
def drawNode(self, node):
"""This is the recursive method called for each node in the tree.
"""
if self.verbose: print "### begin _SVGRenderer.drawNode(%r)" % node
self._canvas.comment('begin node %s'%`node`)
color = self._canvas._color
style = self._canvas.style.copy()
if not (isinstance(node, Path) and node.isClipPath):
pass # self._canvas.saveState()
#apply state changes
deltas = getStateDelta(node)
self._tracker.push(deltas)
self.applyStateChanges(deltas, {})
#draw the object, or recurse
self.drawNodeDispatcher(node)
rDeltas = self._tracker.pop()
if not (isinstance(node, Path) and node.isClipPath):
pass #self._canvas.restoreState()
self._canvas.comment('end node %s'%`node`)
self._canvas._color = color
#restore things we might have lost (without actually doing anything).
for k, v in rDeltas.items():
if self._restores.has_key(k):
setattr(self._canvas,self._restores[k],v)
self._canvas.style = style
if self.verbose: print "### end _SVGRenderer.drawNode(%r)" % node
_restores = {'strokeColor':'_strokeColor','strokeWidth': '_lineWidth','strokeLineCap':'_lineCap',
'strokeLineJoin':'_lineJoin','fillColor':'_fillColor','fontName':'_font',
'fontSize':'_fontSize'}
def _get_link_info_dict(self, obj):
#We do not want None or False as the link, even if it is the
#attribute's value - use the empty string instead.
url = getattr(obj, "hrefURL", "") or ""
title = getattr(obj, "hrefTitle", "") or ""
if url :
#Is it valid to have a link with no href? The XML requires
#the xlink:href to be present, but you might just want a
#tool tip shown (via the xlink:title attribute). Note that
#giving an href of "" is equivalent to "the current page"
#(a relative link saying go nowhere).
return {"xlink:href":url, "xlink:title":title, "target":"_top"}
#Currently of all the mainstream browsers I have tested, only Safari/webkit
#will show SVG images embedded in HTML using a simple <img src="..." /> tag.
#However, the links don't work (Safari 3.2.1 on the Mac).
#
#Therefore I use the following, which also works for Firefox, Opera, and
#IE 6.0 with Adobe SVG Viewer 6 beta:
#<object data="..." type="image/svg+xml" width="430" height="150" class="img">
#
#Once displayed, Firefox and Safari treat the SVG like a frame, and
#by default clicking on links acts "in frame" and replaces the image.
#Opera does what I expect, and replaces the whole page with the link.
#
#Therefore I use target="_top" to force the links to replace the whole page.
#This now works as expected on Safari 3.2.1, Firefox 3.0.6, Opera 9.20.
#Perhaps the target attribute should be an option, perhaps defaulting to
#"_top" as used here?
else :
return None
def drawGroup(self, group):
if self.verbose: print "### begin _SVGRenderer.drawGroup"
currGroup = self._canvas.startGroup()
a, b, c, d, e, f = self._tracker.getState()['transform']
for childNode in group.getContents():
if isinstance(childNode, UserNode):
node2 = childNode.provideNode()
else:
node2 = childNode
self.drawNode(node2)
self._canvas.transform(a, b, c, d, e, f)
self._canvas.endGroup(currGroup)
if self.verbose: print "### end _SVGRenderer.drawGroup"
def drawRect(self, rect):
link_info = self._get_link_info_dict(rect)
if rect.rx == rect.ry == 0:
#plain old rectangle
self._canvas.rect(
rect.x, rect.y,
rect.x+rect.width, rect.y+rect.height, link_info=link_info)
else:
#cheat and assume ry = rx; better to generalize
#pdfgen roundRect function. TODO
self._canvas.roundRect(
rect.x, rect.y,
rect.x+rect.width, rect.y+rect.height,
rect.rx, rect.ry,
link_info=link_info)
def drawString(self, stringObj):
if self._canvas._fillColor:
S = self._tracker.getState()
text_anchor, x, y, text = S['textAnchor'], stringObj.x, stringObj.y, stringObj.text
if not text_anchor in ('start', 'inherited'):
font, fontSize = S['fontName'], S['fontSize']
textLen = stringWidth(text, font,fontSize)
if text_anchor=='end':
x -= textLen
elif text_anchor=='middle':
x -= textLen/2
elif text_anchor=='numeric':
x -= numericXShift(text_anchor,text,textLen,font,fontSize)
else:
raise ValueError, 'bad value for text_anchor ' + str(text_anchor)
self._canvas.drawString(text,x,y,link_info=self._get_link_info_dict(stringObj))
def drawLine(self, line):
if self._canvas._strokeColor:
self._canvas.line(line.x1, line.y1, line.x2, line.y2)
def drawCircle(self, circle):
self._canvas.circle( circle.cx, circle.cy, circle.r, link_info=self._get_link_info_dict(circle))
def drawWedge(self, wedge):
centerx, centery, radius, startangledegrees, endangledegrees = \
wedge.centerx, wedge.centery, wedge.radius, wedge.startangledegrees, wedge.endangledegrees
yradius = wedge.yradius or wedge.radius
(x1, y1) = (centerx-radius, centery-yradius)
(x2, y2) = (centerx+radius, centery+yradius)
extent = endangledegrees - startangledegrees
self._canvas.drawArc(x1, y1, x2, y2, startangledegrees, extent, fromcenter=1)
def drawPolyLine(self, p):
if self._canvas._strokeColor:
self._canvas.polyLine(_pointsFromList(p.points))
def drawEllipse(self, ellipse):
#need to convert to pdfgen's bounding box representation
x1 = ellipse.cx - ellipse.rx
x2 = ellipse.cx + ellipse.rx
y1 = ellipse.cy - ellipse.ry
y2 = ellipse.cy + ellipse.ry
self._canvas.ellipse(x1,y1,x2,y2, link_info=self._get_link_info_dict(ellipse))
def drawPolygon(self, p):
self._canvas.polygon(_pointsFromList(p.points), closed=1, link_info=self._get_link_info_dict(p))
def drawPath(self, path):
# print "### drawPath", path.points
from reportlab.graphics.shapes import _renderPath
c = self._canvas
drawFuncs = (c.moveTo, c.lineTo, c.curveTo, c.closePath)
isClosed = _renderPath(path, drawFuncs)
if isClosed:
#Only try and add links to closed paths...
link_info = self._get_link_info_dict(path)
else :
c._fillColor = None
link_info = None
c._fillAndStroke([], clip=path.isClipPath, link_info=link_info)
def applyStateChanges(self, delta, newState):
"""This takes a set of states, and outputs the operators
needed to set those properties"""
for key, value in delta.items():
if key == 'transform':
pass
#self._canvas.transform(value[0], value[1], value[2], value[3], value[4], value[5])
elif key == 'strokeColor':
self._canvas.setStrokeColor(value)
elif key == 'strokeWidth':
self._canvas.setLineWidth(value)
elif key == 'strokeLineCap': #0,1,2
self._canvas.setLineCap(value)
elif key == 'strokeLineJoin':
self._canvas.setLineJoin(value)
elif key == 'strokeDashArray':
if value:
self._canvas.setDash(value)
else:
self._canvas.setDash()
elif key == 'fillColor':
self._canvas.setFillColor(value)
elif key in ['fontSize', 'fontName']:
fontname = delta.get('fontName', self._canvas._font)
fontsize = delta.get('fontSize', self._canvas._fontSize)
self._canvas.setFont(fontname, fontsize)
def test0(outdir='svgout'):
# print all drawings and their doc strings from the test
# file
if not os.path.isdir(outdir):
os.mkdir(outdir)
#grab all drawings from the test module
from reportlab.graphics import testshapes
drawings = []
for funcname in dir(testshapes):
#if funcname[0:11] == 'getDrawing2':
# print 'hacked to only show drawing 2'
if funcname[0:10] == 'getDrawing':
drawing = eval('testshapes.' + funcname + '()')
docstring = eval('testshapes.' + funcname + '.__doc__')
drawings.append((drawing, docstring))
# return
i = 0
for (d, docstring) in drawings:
filename = outdir + os.sep + 'renderSVG_%d.svg' % i
drawToFile(d, filename)
# print 'saved', filename
i += 1
def test1():
from reportlab.graphics.testshapes import getDrawing01
d = getDrawing01()
drawToFile(d, "svgout/test.svg")
def test2():
from reportlab.lib.corp import RL_CorpLogo
from reportlab.graphics.shapes import Drawing
rl = RL_CorpLogo()
d = Drawing(rl.width,rl.height)
d.add(rl)
drawToFile(d, "svgout/corplogo.svg")
if __name__=='__main__':
test0()
test1()
test2()
|
fergalmoran/Chrome2Kindle
|
server/reportlab/graphics/renderSVG.py
|
Python
|
mit
| 30,282 | 0.009412 |
import struct
from coinpy.lib.serialization.common.serializer import Serializer
from coinpy.lib.serialization.exceptions import MissingDataException
class VarintSerializer(Serializer):
def __init__(self, desc=""):
self.desc = desc
def serialize(self, value):
if (value < 0xfd):
return (struct.pack("<B", value))
if (value <= 0xffff):
return ("\xfd" + struct.pack("<H", value))
if (value <= 0xffffffff):
return ("\xfe" + struct.pack("<I", value))
return ("\xff" + struct.pack("<Q", value))
def get_size(self, value):
if (value < 0xfd):
return (1)
if (value <= 0xffff):
return (3)
if (value <= 0xffffffff):
return (5)
return (9)
def deserialize(self, data, cursor=0):
if (len(data) - cursor < 1):
raise MissingDataException("Decoding error: not enough data for varint")
prefix = struct.unpack_from("<B", data, cursor)[0]
cursor += 1
if (prefix < 0xFD):
return (prefix, cursor)
if (len(data) - cursor < {0xFD: 2, 0xFE: 4, 0xFF: 8}[prefix]):
raise MissingDataException("Decoding error: not enough data for varint of type : %d" % (prefix))
if (prefix == 0xFD):
return (struct.unpack_from("<H", data, cursor)[0], cursor + 2)
if (prefix == 0xFE):
return (struct.unpack_from("<I", data, cursor)[0], cursor + 4)
return (struct.unpack_from("<Q", data, cursor)[0], cursor + 8)
|
sirk390/coinpy
|
coinpy-lib/src/coinpy/lib/serialization/structures/s11n_varint.py
|
Python
|
lgpl-3.0
| 1,579 | 0.005066 |
#!/usr/bin/env python
# ===================================
# Copyright (c) Microsoft Corporation. All rights reserved.
# See license.txt for license information.
# ===================================
import socket
import os
import sys
import imp
import md5
import sha
import codecs
import base64
import platform
import shutil
protocol = imp.load_source('protocol', '../protocol.py')
nxDSCLog = imp.load_source('nxDSCLog', '../nxDSCLog.py')
LG = nxDSCLog.DSCLog
# Paths
CONFIG_PATH = '/etc/opt/microsoft/omsagent/conf/'
SERVER_ADDRESS = '/var/opt/microsoft/omsagent/npm_state/npmdagent.sock'
DEST_FILE_NAME = 'npmd_agent_config.xml'
PLUGIN_PATH = '/opt/microsoft/omsagent/plugin/'
PLUGIN_CONF_PATH = '/etc/opt/microsoft/omsagent/conf/omsagent.d/'
RESOURCE_MODULE_PATH = '/opt/microsoft/omsconfig/modules/nxOMSAgentNPMConfig/DSCResources/MSFT_nxOMSAgentNPMConfigResource/NPM/'
DSC_RESOURCE_VERSION_PATH = '/opt/microsoft/omsconfig/modules/nxOMSAgentNPMConfig/VERSION'
AGENT_RESOURCE_VERSION_PATH = '/var/opt/microsoft/omsagent/npm_state/npm_version'
DSC_X64_AGENT_PATH = 'Agent/64/'
DSC_X86_AGENT_PATH = 'Agent/32/'
DSC_PLUGIN_PATH = 'Plugin/plugin/'
DSC_PLUGIN_CONF_PATH = 'Plugin/conf/'
AGENT_BINARY_PATH = '/opt/microsoft/omsagent/plugin/'
AGENT_SCRIPT_PATH = '/opt/microsoft/omsconfig/Scripts/NPMAgentBinaryCap.sh'
# Constants
X64 = '64bit'
AGENT_BINARY_NAME = 'npmd_agent'
def enum(**enums):
return type('Enum', (), enums)
Commands = enum(LogNPM = 'ErrorLog', StartNPM = 'StartNPM', StopNPM = 'StopNPM', Config = 'Config', Purge = 'Purge')
LogType = enum(Error = 'ERROR', Info = 'INFO')
class INPMDiagnosticLog:
def log(self):
pass
class NPMDiagnosticLogUtil(INPMDiagnosticLog):
def log(self, logType, logString):
# Create a UDS socket
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
try:
try:
# Connect the socket to the port where the server is listening
sock.connect(SERVER_ADDRESS)
# Send data
message = Commands.LogNPM + ':' + '[' + logType + ']' + logString
sock.sendall(message)
except Exception, msg:
LG().Log(LogType.Error, str(msg))
finally:
sock.close()
LOG_ACTION = NPMDiagnosticLogUtil()
class IOMSAgent:
def restart_oms_agent(self):
pass
class OMSAgentUtil(IOMSAgent):
def restart_oms_agent(self):
if os.system('sudo /opt/microsoft/omsagent/bin/service_control restart') == 0:
return True
else:
LOG_ACTION.log(LogType.Error, 'Error restarting omsagent.')
return False
class INPMAgent:
def binary_setcap(self):
pass
class NPMAgentUtil(IOMSAgent):
def binary_setcap(self, binaryPath):
if os.path.exists(AGENT_SCRIPT_PATH) and os.system('sudo %s %s' %(AGENT_SCRIPT_PATH, binaryPath)) == 0:
return True
else:
LOG_ACTION.log(LogType.Error, 'Error setting capabilities to npmd agent binary.')
return False
global show_mof
show_mof = False
OMS_ACTION = OMSAgentUtil()
NPM_ACTION = NPMAgentUtil()
# [key] string ConfigType;
# [write] string ConfigID;
# [write] string Contents;
# [write,ValueMap{"Present", "Absent"},Values{"Present", "Absent"}] string Ensure;
# [write] string ContentChecksum;
def init_vars(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
if ConfigType is not None and ConfigType != '':
ConfigType = ConfigType.encode('ascii', 'ignore')
else:
ConfigType = 'UpdatedAgentConfig'
if ConfigID is not None:
ConfigID = ConfigID.encode('ascii', 'ignore')
else:
ConfigID = ''
if Contents is not None:
Contents = base64.b64decode(Contents)#Contents.encode('ascii', 'ignore')
else:
Contents = ''
if Ensure is not None and Ensure != '':
Ensure = Ensure.encode('ascii', 'ignore')
else:
Ensure = 'Present'
if ContentChecksum is not None:
ContentChecksum = ContentChecksum.encode('ascii', 'ignore')
else:
ContentChecksum = ''
return ConfigType, ConfigID, Contents, Ensure, ContentChecksum
def Set_Marshall(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
recvdContentChecksum = md5.md5(Contents).hexdigest().upper()
if recvdContentChecksum != ContentChecksum:
LOG_ACTION.log(LogType.Info, 'Content received did not match checksum with md5, trying with sha1')
# validate with sha1
recvdContentChecksum = sha.sha(Contents).hexdigest().upper()
if recvdContentChecksum != ContentChecksum:
# data is corrupt do not proceed further
LOG_ACTION.log(LogType.Error, 'Content received did not match checksum with sha1, exiting Set')
return [-1]
(ConfigType, ConfigID, Contents, Ensure, ContentChecksum) = init_vars(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = Set(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
return retval
def Test_Marshall(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
recvdContentChecksum = md5.md5(Contents).hexdigest().upper()
if recvdContentChecksum != ContentChecksum:
LOG_ACTION.log(LogType.Info, 'Content received did not match checksum with md5, trying with sha1')
# validate with sha1
recvdContentChecksum = sha.sha(Contents).hexdigest().upper()
if recvdContentChecksum != ContentChecksum:
# data is corrupt do not proceed further
LOG_ACTION.log(LogType.Error, 'Content received did not match checksum with sha1, exiting Set')
return [0]
(ConfigType, ConfigID, Contents, Ensure, ContentChecksum) = init_vars(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = Test(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
return retval
def Get_Marshall(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
arg_names = list(locals().keys())
(ConfigType, ConfigID, Contents, Ensure, ContentChecksum) = init_vars(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = 0
retval = Get(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
ConfigType = protocol.MI_String(ConfigType)
ConfigID = protocol.MI_String(ConfigID)
Ensure = protocol.MI_String(Ensure)
Contents = protocol.MI_String(Contents)
ContentChecksum = protocol.MI_String(ContentChecksum)
retd = {}
ld = locals()
for k in arg_names:
retd[k] = ld[k]
return retval, retd
############################################################
# Begin user defined DSC functions
############################################################
def SetShowMof(a):
global show_mof
show_mof = a
def ShowMof(op, ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
if not show_mof:
return
mof = ''
mof += op + ' nxOMSAgentNPMConfig MyNPMConfig \n'
mof += '{\n'
mof += ' ConfigType = "' + ConfigType + '"\n'
mof += ' ConfigID = "' + ConfigID + '"\n'
mof += ' Contents = "' + Contents + '"\n'
mof += ' Ensure = "' + Ensure + '"\n'
mof += ' ContentChecksum = "' + ContentChecksum + '"\n'
mof += '}\n'
f = open('./test_mofs.log', 'a')
Print(mof, file=f)
LG().Log(LogType.Info, mof)
f.close()
def Set(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
ShowMof('SET', ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = 0
if ConfigType != 'UpdatedAgentConfig':
LOG_ACTION.log(LogType.Error, 'Config type did not match, exiting set')
return [-1]
if Ensure == 'Absent':
if os.path.exists(AGENT_RESOURCE_VERSION_PATH):
LG().Log(LogType.Info, 'Ensure is absent, but resource is present, purging')
success = PurgeSolution()
if not success:
retval = -1
return [retval]
if TestConfigUpdate(Contents) != 0:
retval = SetConfigUpdate(Contents)
version = TestResourceVersion()
if version != 0:
retval = SetFilesUpdate(version)
return [retval]
def Test(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
ShowMof('TEST', ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = 0
if not os.path.exists(AGENT_SCRIPT_PATH):
LG().Log(LogType.Error, 'npmd set cap script does not exist, exiting test')
return [retval]
if ConfigType != 'UpdatedAgentConfig':
LOG_ACTION.log(LogType.Error, 'Config type did not match, exiting test')
return [retval]
if Ensure == 'Absent':
if os.path.exists(AGENT_RESOURCE_VERSION_PATH):
LG().Log(LogType.Info, 'Ensure is absent, resource is present on the agent, set will purge')
retval = -1
return [retval]
if TestResourceVersion() != 0 or TestConfigUpdate(Contents) != 0:
retval = -1
return [retval]
def Get(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
retval = 0
ShowMof('GET', ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
return [retval]
def Print(s, file=sys.stdout):
file.write(s + '\n')
# Compare resource version in DSC and agent machine
# Returns
# 0 if version is same
# dsc version number if there is a mismatch or agent config not present
def TestResourceVersion():
retval = 0
dscVersion = ReadFile(DSC_RESOURCE_VERSION_PATH)
if not os.path.exists(AGENT_RESOURCE_VERSION_PATH):
#npmd agent is not present, copy binaries
retval = dscVersion
else:
agentVersion = ReadFile(AGENT_RESOURCE_VERSION_PATH)
if agentVersion != dscVersion:
#version mismatch, copy binaries
retval = dscVersion
return retval
def TestConfigUpdate(Contents):
retval = 0
destFileFullPath = CONFIG_PATH.__add__(DEST_FILE_NAME)
if not os.path.exists(CONFIG_PATH):
LOG_ACTION.log(LogType.Error, 'CONFIG_PATH does not exist')
retval = 0
elif not os.path.exists(destFileFullPath):
# Configuration does not exist, fail
retval = -1
else:
origConfigData = ReadFile(destFileFullPath)
#compare
if origConfigData is None or origConfigData != Contents:
retval = -1
return retval
def SetConfigUpdate(Contents):
destFileFullPath = CONFIG_PATH.__add__(DEST_FILE_NAME)
# Update config after checking if directory exists
if not os.path.exists(CONFIG_PATH):
LOG_ACTION.log(LogType.Error, 'CONFIG_PATH does not exist')
retval = -1
else:
retval = WriteFile(destFileFullPath, Contents)
if retval == 0 and os.path.exists(AGENT_RESOURCE_VERSION_PATH): #notify server only if plugin is present
LG().Log(LogType.Info, 'Updated the file, going to notify server')
NotifyServer(Commands.Config)
return retval
def SetFilesUpdate(newVersion):
retval = UpdateAgentBinary(newVersion)
retval &= UpdatePluginFiles()
if retval:
return 0
return -1
def UpdateAgentBinary(newVersion):
retval = True
arch = platform.architecture()
src = ''
if arch is not None and arch[0] == X64:
src = RESOURCE_MODULE_PATH.__add__(DSC_X64_AGENT_PATH)
retval &= DeleteAllFiles(src, AGENT_BINARY_PATH)
retval &= CopyAllFiles(src, AGENT_BINARY_PATH)
else:
src = RESOURCE_MODULE_PATH.__add__(DSC_X86_AGENT_PATH)
retval &= DeleteAllFiles(src, AGENT_BINARY_PATH)
retval &= CopyAllFiles(src, AGENT_BINARY_PATH)
LOG_ACTION.log(LogType.Error, 'npmd agent binary do not support 32-bit.')
#Update version number after deleting and copying new agent files
if retval == True:
WriteFile(AGENT_RESOURCE_VERSION_PATH, newVersion)
# set capabilities to binary
src_files = os.listdir(src)
for file_name in src_files:
if AGENT_BINARY_NAME in file_name:
full_file_name = os.path.join(AGENT_BINARY_PATH, file_name)
break
NPM_ACTION.binary_setcap(full_file_name)
# Notify ruby plugin
#retval &= NotifyServer(Commands.RestartNPM)
return retval
def UpdatePluginFiles():
retval = True
#replace files
retval &= DeleteAllFiles(RESOURCE_MODULE_PATH.__add__(DSC_PLUGIN_PATH), PLUGIN_PATH)
retval &= DeleteAllFiles(RESOURCE_MODULE_PATH.__add__(DSC_PLUGIN_CONF_PATH), PLUGIN_CONF_PATH)
retval &= CopyAllFiles(RESOURCE_MODULE_PATH.__add__(DSC_PLUGIN_PATH), PLUGIN_PATH)
retval &= CopyAllFiles(RESOURCE_MODULE_PATH.__add__(DSC_PLUGIN_CONF_PATH), PLUGIN_CONF_PATH)
# restart oms agent
retval &= OMS_ACTION.restart_oms_agent()
return retval
def CopyAllFiles(src, dest):
try:
src_files = os.listdir(src)
for file_name in src_files:
full_file_name = os.path.join(src, file_name)
if (os.path.isfile(full_file_name)):
shutil.copy(full_file_name, dest)
except:
LOG_ACTION.log(LogType.Error, 'copy_all_files failed for src: ' + src + ' dest: ' + dest)
return False
return True
# Deletes all files present in both directories
def DeleteAllFiles(src, dest):
try:
src_files = os.listdir(src)
for file_name in src_files:
full_file_name = os.path.join(dest, file_name)
if (os.path.isfile(full_file_name)):
os.remove(full_file_name)
except:
LOG_ACTION.log(LogType.Error, 'delete_all_files failed for src: ' + src + ' dest: ' + dest)
return False
return True
def PurgeSolution():
# remove plugin config file so that plugin does not start again
retval = DeleteAllFiles(RESOURCE_MODULE_PATH.__add__(DSC_PLUGIN_CONF_PATH), PLUGIN_CONF_PATH)
# remove resource version file
try:
os.remove(AGENT_RESOURCE_VERSION_PATH)
except:
LOG_ACTION.log(LogType.Error, 'failed to remove version file')
retval = False
# notify ruby plugin to purge agent
NotifyServer(Commands.Purge)
return retval
def NotifyServer(command):
# Create a UDS socket
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
LG().Log(LogType.Info, 'connecting to ' + SERVER_ADDRESS)
try:
try:
# Connect the socket to the port where the server is listening
sock.connect(SERVER_ADDRESS)
# Send data
message = command
LG().Log(LogType.Info, 'sending ' + message)
sock.sendall(message)
except Exception, msg:
LG().Log(LogType.Error, str(msg))
# restart omsagent if command was config update and sock conn failed
if (command == Commands.Config):
OMS_ACTION.restart_oms_agent()
finally:
LG().Log(LogType.Info, 'closing socket')
sock.close()
def WriteFile(path, contents):
retval = 0
try:
dFile = open(path, 'w+')
dFile.write(contents)
dFile.close()
except IOError, error:
LOG_ACTION.log(LogType.Error, "Exception opening file " + path + " Error Code: " + str(error.errno) + " Error: " + error.message + error.strerror)
retval = -1
return retval
def ReadFile(path):
content = None
try:
dFile = codecs.open (path, encoding = 'utf8', mode = "r")
content = dFile.read()
dFile.close()
except IOError, error:
LOG_ACTION.log(LogType.Error, "Exception opening file " + path + " Error Code: " + str(error.errno) + " Error: " + error.message + error.strerror)
return content
|
MSFTOSSMgmt/WPSDSCLinux
|
Providers/Scripts/2.4x-2.5x/Scripts/nxOMSAgentNPMConfig.py
|
Python
|
mit
| 15,607 | 0.006023 |
# -*- coding: utf-8 -*-
"""API documentation build configuration file.
This file is :func:`execfile`\\ d with the current directory set to its
containing dir.
Note that not all possible configuration values are present in this
autogenerated file.
All configuration values have a default; values that are commented out
serve to show the default.
"""
from datetime import date as _date
import re as _re
from textwrap import dedent as _dedent
import docutils.parsers.rst as _rst
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.doctest',
'sphinx.ext.ifconfig',
'sphinx.ext.intersphinx',
'sphinx.ext.mathjax',
'sphinx.ext.todo',
'sphinx.ext.viewcode',
'sphinxcontrib.cheeseshop',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u''
author = u''
copyright = u'{} {}'.format(_date.today().year, author)
description = u''
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = ''
# The full version, including alpha/beta/rc tags.
release = ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
rst_prolog = \
'''\
.. role:: bash(code)
:language: bash
.. role:: python(code)
:language: python
'''
rst_prolog = _dedent(rst_prolog)
nitpicky = True
# FIXME: encapsulate this in a Sphinx extension. make ``rfc_uri_tmpl`` a
# Sphinx config setting
rfc_uri_tmpl = 'https://tools.ietf.org/html/rfc{}.html'
def rfc_role(role, rawtext, text, lineno, inliner, options={}, content=[]):
_rst.roles.set_classes(options)
rfcrefpattern = r'(?:(?P<displaytext>[^<]*)'\
r' <)?(?P<refname>[^>]*)(?(displaytext)>|)'
match = _re.match(rfcrefpattern, _rst.roles.utils.unescape(text))
if match:
rfcnum, anchorsep, anchor = match.group('refname').partition('#')
try:
rfcnum = int(rfcnum)
if rfcnum <= 0:
raise ValueError
except ValueError:
message = \
inliner\
.reporter\
.error('invalid RFC number {!r}; expected a positive integer'
.format(rfcnum),
line=lineno)
problem = inliner.problematic(rawtext, rawtext, message)
return [problem], [message]
uri = rfc_uri_tmpl.format(rfcnum)
if anchor:
uri += anchorsep + anchor
displaytext = match.group('displaytext')
if displaytext:
refnode = _rst.nodes.reference(rawtext, displaytext, refuri=uri,
**options)
else:
displaytext = 'RFC {}'.format(rfcnum)
if anchor:
displaytext += ' ' + anchor.replace('-', ' ')
strongnode = _rst.nodes.strong(rawtext, displaytext)
refnode = _rst.nodes.reference('', '', strongnode, refuri=uri,
**options)
return [refnode], []
else:
message = \
inliner\
.reporter\
.error('invalid RFC reference {!r}'.format(text), line=lineno)
problem = inliner.problematic(rawtext, rawtext, message)
return [problem], [message]
_rst.roles.register_local_role('rfc', rfc_role)
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = '{}-doc'.format(project)
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', '{}.tex'.format(project), u'{} documentation'.format(project),
author, 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [('index', project.lower(), u'{} documentation'.format(project),
[author], 1)]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [('index', project, u'{} documentation'.format(project),
author, project, description, 'Miscellaneous')]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# -- Options for intersphinx ---------------------------------------------------
intersphinx_mapping = \
{'python': ('http://docs.python.org/2/', None),
'sphinx': ('http://sphinx.readthedocs.org/en/latest/', None),
}
# -- Options for autodoc -------------------------------------------------------
autodoc_default_flags = ['show-inheritance']
|
nisavid/spruce-project
|
doc/conf.tmpl.py
|
Python
|
lgpl-3.0
| 11,930 | 0.006287 |
import os
import sys
import string
filenames = os.listdir(os.getcwd())
for file in filenames:
if os.path.splitext(file)[1] == ".o" or os.path.splitext(file)[1] == ".elf" :
print "objdumparm.exe -D "+file
os.system("C:/WindRiver/gnu/4.1.2-vxworks-6.8/x86-win32/bin/objdumparm.exe -D "+file +" > " +file + ".txt")
os.system("pause")
|
honor6-dev/android_kernel_huawei_h60
|
drivers/vendor/hisi/build/scripts/obj_cmp_tools/vxworks_dassemble.py
|
Python
|
gpl-2.0
| 348 | 0.022989 |
"""
WSGI config for cloudlynt project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cloudlynt.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
drunken-pypers/cloudlynt
|
cloudlynt/wsgi.py
|
Python
|
mit
| 1,140 | 0.000877 |
# stupid 2.7.2 by peterm, patch by Smack
# http://robotgame.org/viewrobot/5715
import random
import math
import rg
def around(l):
return rg.locs_around(l)
def around2(l):
return [(l[0]+2, l[1]), (l[0]+1, l[1]+1), (l[0], l[1]+2), (l[0]-1, l[1]+1),
(l[0]-2, l[1]), (l[0]-1, l[1]-1), (l[0], l[1]-2), (l[0]+1, l[1]-1)]
def diag(l1, l2):
if rg.wdist(l1, l2) == 2:
if abs(l1[0] - l2[0]) == 1:
return True
return False
def infront(l1, l2):
if rg.wdist(l1, l2) == 2:
if diag(l1, l2):
return False
else:
return True
return False
def mid(l1, l2):
return (int((l1[0]+l2[0]) / 2), int((l1[1]+l2[1]) / 2))
def sign(x):
if x > 0:
return 1
elif x == 0:
return 0
else:
return -1
class Robot:
def act(self, game):
robots = game['robots']
##print self.location, "starts thinking"
def isenemy(l):
if robots.get(l) != None:
if robots[l]['player_id'] != self.player_id:
return True
return False
def isteammate(l):
if robots.get(l) != None:
if robots[l]['player_id'] == self.player_id:
return True
return False
def isempty(l):
if ('normal' in rg.loc_types(l)) and not ('obstacle' in rg.loc_types(l)):
if robots.get(l) == None:
return True
return False
def isspawn(l):
if 'spawn' in rg.loc_types(l):
return True
return False
# scan the area around
enemies = []
for loc in around(self.location):
if isenemy(loc):
enemies.append(loc)
moveable = []
moveable_safe = []
for loc in around(self.location):
if isempty(loc):
moveable.append(loc)
if isempty(loc) and not isspawn(loc):
moveable_safe.append(loc)
def guard():
return ['guard']
def suicide():
return ['suicide']
def canflee():
return len(moveable) > 0
def flee():
if len(moveable_safe) > 0:
return ['move', random.choice(moveable_safe)]
if len(moveable) > 0:
return ['move', random.choice(moveable)]
return guard()
def canattack():
return len(enemies) > 0
def attack():
r = enemies[0]
for loc in enemies:
if robots[loc]['hp'] > robots[r]['hp']:
r = loc
return ['attack', r]
def panic():
if canflee():
return flee()
elif canattack():
return attack()
else:
return guard()
def imove(to):
f = self.location
d = (to[0]-f[0], to[1]-f[1])
di = (sign(d[0]), sign(d[1]))
good = []
if di[0]*di[1] != 0:
good.append((di[0], 0))
good.append((0, di[1]))
else:
good.append(di)
for dmove in good:
loc = (f[0]+dmove[0], f[1]+dmove[1])
if isempty(loc):
return ['move', loc]
return flee()
##print "There are", len(enemies), "enemies close"
if len(enemies) > 1:
# we gonna die next turn if we don't move?
if self.hp <= len(enemies)*10:
# it's ok to suicide if you take someone else with you
for loc in enemies:
if robots[loc]['hp'] <= 15:
##print "Suicide!"
pass#return suicide()
##print "Too many enemies around, panic!"
return panic()
elif len(enemies) == 1:
if self.hp <= 10:
if robots[enemies[0]]['hp'] > 15:
##print "Enemy will kill me, panic!"
return panic()
elif robots[enemies[0]]['hp'] <= 10:
##print "I will kill enemy, attack!"
return attack()
#else:
# # might tweak this
# ##print "I'm too low on health, suicide!"
# return suicide()
else:
if robots[enemies[0]]['hp'] <= 10:
if self.hp <= 15:
# avoid suiciders
##print "Avoiding suicider, panic!"
return panic()
else:
##print "Attack!"
return attack()
# if we're at spawn, get out
if isspawn(self.location):
##print "I'm on spawn, panic!"
return panic()
closehelp = None
prediction = None
# are there enemies in 2 squares?
for loc in around2(self.location):
if isenemy(loc):
##print "Enemy in 2 squares:", loc
# try to help teammates
for loc2 in around(loc):
if isteammate(loc2):
##print "And a teammate close to him:", loc2
closehelp = imove(loc)
# predict and attack
if infront(loc, self.location):
prediction = ['attack', mid(loc, self.location)]
elif rg.wdist(rg.toward(loc, rg.CENTER_POINT), self.location) == 1:
prediction = ['attack', rg.toward(loc, rg.CENTER_POINT)]
else:
prediction = ['attack', (self.location[0], loc[1])]
if closehelp != None:
##print "Help teammate fight:", closehelp
return closehelp
if prediction != None:
##print "Predict:", prediction
return prediction
# move randomly
##print "Can't decide, panic!"
return panic()
|
andrewgailey/robogen
|
robogen/rgkit/backup bots/stupid272.py
|
Python
|
unlicense
| 6,333 | 0.011211 |
'''
Given an XML file that describes a text file containing a header and a table, parse
the XML into it's descriptive elements.
Created on Feb 27, 2017
@author: cyoung
'''
import xml.etree.ElementTree as ElementTree
from gov.noaa.gmd.table_2_netcdf.Util import Util
class TableDataDesc:
#XML element names
ELEMENT_NAME="name"
ELEMENT_DATA_TYPE="data-type"
ELEMENT_GLOBAL_ATTRIBUTE="global-attribute"
ELEMENT_GLOBAL_ATTRIBUTE_STRATEGY="global-attribute-strategy"
ELEMENT_HEADER_STRATEGY="header-strategy"
ELEMENT_CLASS_NAME="class-name"
ELEMENT_VARIABLE="variable"
ELEMENT_VARIABLE_ATTRIBUTE="variable-attribute"
ELEMENT_VARIABLE_ATTRIBUTE_STRATEGY="variable-attribute-strategy"
ELEMENT_VARIABLE_NAME="variable-name"
ELEMENT_VARIABLE_STRATEGY="variable-strategy"
def __init__ (self, xmlFile):
self.xmlFile=xmlFile
self.tree = ElementTree.parse(xmlFile)
def getAllColumnDesc (self):
pass
def getAllGlobalAttributeDesc(self):
root = self.tree.getroot()
elements=root.findall(".//"+self.ELEMENT_GLOBAL_ATTRIBUTE)
gads=[]
for e in elements:
gads.append(self.__getGlobalAttributeDesc(e))
return gads
def getAllVariableAttributeDesc(self):
root = self.tree.getroot()
elements=root.findall(".//"+self.ELEMENT_VARIABLE_ATTRIBUTE)
gads=[]
for e in elements:
gads.append(self.__getVariableAttributeDesc(e))
return gads
def getAllVariableDesc(self):
root = self.tree.getroot()
elements=root.findall(".//"+self.ELEMENT_VARIABLE)
gads=[]
for e in elements:
gads.append(self.__getVariableDesc(e))
return gads
def getColumnDesc(self, columnName):
pass
def getGlobalAttributeDesc(self, attributeName):
element=self.__getGlobalAttributeElement(attributeName)
return self.__getGlobalAttributeDesc(element)
def getGlobalAttributeStrategyDesc(self, attributeName):
element=self.__getGlobalAttributeStrategyElement(attributeName)
className=element.find(self.ELEMENT_CLASS_NAME).text
return GlobalAttributeStrategyDesc(className)
def getHeaderStrategyDesc(self):
element=self.__getHeaderStrategyElement()
className=element.find(self.ELEMENT_CLASS_NAME).text
return HeaderStrategyDesc(className)
def getVariableAttributeDesc(self, variableName):
pass
def getVariableAttributeStrategyDesc(self, variableName):
pass
def getVariableDesc(self, variableName):
element=self.__getVariableElement()
name=element.find(self.ELEMENT_NAME).text
child=element.find(self.ELEMENT_VARIABLE_STRATEGY)
className=child.find(self.ELEMENT_CLASS_NAME).text
return VariableDesc(name, className)
def __getGlobalAttributeDesc(self, element):
name=element.find(self.ELEMENT_NAME).text
dataType=element.find(self.ELEMENT_DATA_TYPE).text
child=element.find(self.ELEMENT_GLOBAL_ATTRIBUTE_STRATEGY)
className=child.find(self.ELEMENT_CLASS_NAME).text
strategyDesc=GlobalAttributeStrategyDesc(className)
return GlobalAttributeDesc(name, dataType, strategyDesc)
def __getGlobalAttributeElement(self, attributeName):
root = self.tree.getroot()
elements=root.findall(".//"+self.ELEMENT_GLOBAL_ATTRIBUTE)
element=None
for e in elements:
if e.find(self.ELEMENT_NAME).text == attributeName:
element=e
break
if element is None:
raise Exception(self.ELEMENT_GLOBAL_ATTRIBUTE+" element with name '"+attributeName+
"' not found in file '"+self.xmlFile+"'.")
return element
def __getGlobalAttributeStrategyElement(self, attributeName):
globalAttributeElement=self.__getGlobalAttributeElement(attributeName)
element=globalAttributeElement.find(self.ELEMENT_GLOBAL_ATTRIBUTE_STRATEGY)
if element is None:
raise Exception(self.ELEMENT_GLOBAL_ATTRIBUTE_STRATEGY+" element with name '"+attributeName+
"' not found in file '"+self.xmlFile+"'.")
return element
def __getVariableAttributeDesc(self, element):
name=element.find(self.ELEMENT_VARIABLE_NAME).text
dataType=element.find(self.ELEMENT_DATA_TYPE).text
child=element.find(self.ELEMENT_VARIABLE_ATTRIBUTE_STRATEGY)
className=child.find(self.ELEMENT_CLASS_NAME).text
strategyDesc=VariableAttributeStrategyDesc(className)
return VariableAttributeDesc(name, dataType, "attributes", strategyDesc)
def __getVariableDesc(self, element):
name=element.find(self.ELEMENT_NAME).text
child=element.find(self.ELEMENT_VARIABLE_STRATEGY)
className=child.find(self.ELEMENT_CLASS_NAME).text
strategyDesc=VariableStrategyDesc(className)
return VariableDesc(name, strategyDesc)
def __getHeaderStrategyElement(self):
root = self.tree.getroot()
elements=root.findall(".//"+self.ELEMENT_HEADER_STRATEGY)
if len(elements) == 0:
raise Exception(self.ELEMENT_HEADER_STRATEGY+" element "+
"' not found in file '"+self.xmlFile+"'.")
return elements[0]
def __eq__(self, other):
if self.xmlFile != other.xmlFile:
return False
return True
class ColumnDesc:
def __init__ (self, columnName, index, dataType):
self.columnName=columnName
self.index=index
self.dataType=dataType
def getColumnName(self):
return self.columnName
def getDataType(self):
return self.dataType
def getIndex(self):
return self.index
def __eq__(self, other):
if self.columnName != other.columnName:
return False
if self.index != other.index:
return False
if self.dataType != other.dataType:
return False
return True
class GlobalAttributeDesc:
def __init__ (self, attributeName, attributeType, globalAttributeStrategyDesc):
self.attributeName=attributeName
self.attributeType=attributeType
self.globalAttributeStrategyDesc=globalAttributeStrategyDesc
def getAttributeName(self):
return self.attributeName
def getAttributeType(self):
return self.attributeType
def getGlobalAttributeStrategyDesc(self):
return self.globalAttributeStrategyDesc
def __eq__(self, other):
if self.attributeName != other.attributeName:
return False
if self.attributeType != other.attributeType:
return False
if self.globalAttributeStrategyDesc != other.globalAttributeStrategyDesc:
return False
return True
#A base class for strategy descriptions.
class StrategyDesc(object):
#Hold the name of the strategy class to be loaded.
def __init__ (self, strategyClassName):
self.strategyClassName=strategyClassName
def getStrategyClassName(self):
return self.strategyClassName
def __eq__(self, other):
if self.strategyClassName != other.strategyClassName:
return False
return True
class GlobalAttributeStrategyDesc(StrategyDesc):
def __init__ (self, strategyClassName):
super().__init__(strategyClassName)
def getStrategyClassName(self):
return self.strategyClassName
#Return the value parsed from the header of the given global attribute
def parse (self, attributeName, header):
#Instantiate the strategy class by name.
c=Util().getClass(self.strategyClassName)
return c.parse(attributeName, header)
class HeaderStrategyDesc(StrategyDesc):
def __init__ (self, strategyClassName):
super().__init__(strategyClassName)
def getStrategyClassName(self):
return self.strategyClassName
#Return the header parsed from the file.
def parse (self, file):
c=Util().getClass(self.strategyClassName)
return c.parse(file)
class VariableAttributeDesc:
def __init__ (self, variableName, variableType, attributes, variableAttributeStrategyDesc):
self.variableName=variableName
self.variableType=variableType
self.attributes=attributes
self.variableAttributeStrategyDesc=variableAttributeStrategyDesc
def getVariableName(self):
return self.variableName
def getVariableType(self):
return self.variableType
def getAttributes(self):
return self.attributes
def getVariableAttributeStrategyDesc(self):
return self.variableAttributeStrategyDesc
def __eq__(self, other):
if self.variableName != other.variableName:
return False
if self.variableType != other.variableType:
return False
if self.attributes != other.attributes:
return False
return True
#A strategy for parsing variable attributes
class VariableAttributeStrategyDesc:
def __init__ (self, strategyClassName):
self.strategyClassName=strategyClassName
#Parse the variable attributes from the header
def parse (self, variableName, header):
#Return the variable attribute
return Util().getClass(self.strategyClassName).parse(variableName, header)
class VariableDesc:
def __init__ (self, variableName, variableStrategyDesc):
self.variableName=variableName
self.variableStrategyDesc=variableStrategyDesc
def getVariableName(self):
return self.variableName
def getVariableStrategyDesc(self):
return self.variableStrategyDesc
def __eq__(self, other):
if self.variableName != other.variableName:
return False
if self.variableStrategyDesc != other.variableStrategyDesc:
return False
return True
#A strategy for parsing variable attributes
class VariableStrategyDesc:
def __init__ (self, strategyClassName):
self.strategyClassName=strategyClassName
#Parse the variable attributes from the header
def parse (self, variableName, header):
#Return the variable
return Util().getClass(self.strategyClassName).parse(variableName, header)
#A variable attribute. Variables may have multiple attributes.
class Attribute:
def __init__ (self, name, value):
self.name=name
self.value=value
def getName(self):
return self.name
def getValue(self):
return self.value
def __eq__(self, other):
if self.name != other.name:
return False
if self.value != other.value:
return False
return True
|
charles-g-young/Table2NetCDF
|
gov/noaa/gmd/table_2_netcdf/TableDataDesc.py
|
Python
|
apache-2.0
| 11,062 | 0.013741 |
"""phial's custom errors."""
class ArgumentValidationError(Exception):
"""Exception indicating argument validation has failed."""
pass
class ArgumentTypeValidationError(ArgumentValidationError):
"""Exception indicating argument type validation has failed."""
pass
|
sedders123/phial
|
phial/errors.py
|
Python
|
mit
| 286 | 0 |
# Copyright (c) 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from troveclient import base
class Flavor(base.Resource):
"""
A Flavor is an Instance type, specifying among other things, RAM size.
"""
def __repr__(self):
return "<Flavor: %s>" % self.name
class Flavors(base.ManagerWithFind):
"""
Manage :class:`Flavor` resources.
"""
resource_class = Flavor
def __repr__(self):
return "<Flavors Manager at %s>" % id(self)
def _list(self, url, response_key):
resp, body = self.api.client.get(url)
if not body:
raise Exception("Call to " + url + " did not return a body.")
return [self.resource_class(self, res) for res in body[response_key]]
def list(self):
"""
Get a list of all flavors.
:rtype: list of :class:`Flavor`.
"""
return self._list("/flavors", "flavors")
def get(self, flavor):
"""
Get a specific flavor.
:rtype: :class:`Flavor`
"""
return self._get("/flavors/%s" % base.getid(flavor),
"flavor")
|
citrix-openstack/build-python-troveclient
|
troveclient/flavors.py
|
Python
|
apache-2.0
| 1,700 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import wx
import wx.lib.newevent
from threading import Thread, Lock
import signal
import logging
from argparse import ArgumentParser, SUPPRESS
from ConfigParser import ConfigParser
from subprocess import Popen
import subprocess as sb
# CoreEventHandler
import socket
import urllib2
import json
import re
# local libraries
from common import APPNAME
from common import DEFAULT_PORT
from log import LoggingConfiguration
from command import Command
from crossplatform import CrossPlatform
from avahiservice import AvahiService
from streamserver import StreamServer
from streamreceiver import StreamReceiver
from areachooser import FrmAreaChooser
from common import VERSION
SomeNewEvent, EVT_SOME_NEW_EVENT = wx.lib.newevent.NewEvent()
class UiAdvanced(wx.Frame):
def __init__(self, parent, title, core):
super(UiAdvanced, self).__init__(parent, title=title,
size=wx.DefaultSize,
style=wx.DEFAULT_FRAME_STYLE)
self._core = core
self._core.register_listener(self)
self._input = dict()
self.Bind(EVT_SOME_NEW_EVENT, self.handler)
self.Bind(wx.EVT_CLOSE, self.OnCloseWindow)
# ~/Downloads/png2ico/png2ico icon.ico
# desktop-mirror-64.png desktop-mirror-32.png desktop-mirror-16.png
self.SetIcon(wx.Icon(CrossPlatform.get().share_path('icon.ico'),
wx.BITMAP_TYPE_ICO))
self.InitUI()
self.ConfigLoad()
self.OnClickFullScreen(None)
self.Centre()
self.Show()
def ConfigLoad(self):
#filepath = CrossPlatform.get().user_config_path('ui.ini')
#if not os.path.exists(filepath):
filepath = CrossPlatform.get().system_config_path()
logging.info('Loading config from ' + filepath)
config = ConfigParser()
config.read(filepath)
if not config.has_section('input'):
config.add_section('input')
else:
for w in self._input:
if config.has_option('input', w):
self._input[w].SetValue(config.get('input', w))
self.config = config
def ConfigSave(self):
config = self.config
for w in self._input:
config.set('input', w, self._input[w].GetValue())
filepath = CrossPlatform.get().user_config_path('ui.ini')
logging.info('Saving config to ' + filepath)
with open(filepath, 'w') as configfile:
config.write(configfile)
def OnAvahi(self, data):
hosts = self._core.hosts
unique = []
targets = self._core.targets
widget = self._input['address']
val = widget.GetValue()
widget.Clear()
#logging.debug('val: {}'.format(val))
#logging.debug('hosts: {}'.format(hosts))
for f in targets:
for service in targets[f]:
key = service['host']
if key in unique:
continue
unique.append(key)
t = {'host': service['host'],
'service': service['service'],
'port': service['port'],
'ip': hosts[service['host']][0]}
logging.debug('Adding one {}'.format(t))
widget.Append('{} - {}:{}'.format(t['host'],
t['ip'],
t['port']))
widget.SetClientData(widget.GetCount() - 1, t)
# After appending, widget value will be cleared
widget.SetValue(val)
def OnSelection(self, data):
self._input['x'].SetValue(str(data[0]))
self._input['y'].SetValue(str(data[1]))
self._input['w'].SetValue(str(data[2]))
self._input['h'].SetValue(str(data[3]))
self._input_rb_area.SetLabel('Area ({}x{}+{}+{})'.format(
data[2],
data[3],
data[0],
data[1]))
def OnStreamServer(self, data):
#status_str = {StreamServer.S_STOPPED: 'Stopped',
# StreamServer.S_STARTING: 'Start...',
# StreamServer.S_STARTED: 'Started',
# StreamServer.S_STOPPING: 'Stop...'}
#self.statusbar.SetStatusText(status_str[data])
if StreamServer.S_STARTED != data:
return
ip = self._target['ip']
ports = (self._target['port'],)
service = self._target['service']
if service == 'auto':
ports = (8089, DEFAULT_PORT + 1)
for port in ports:
try:
self._core.playme(ip, port, service)
break
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)
logging.warn('{} {} {}'.format(exc_type,
fname[1],
exc_tb.tb_lineno))
else:
msg = ('Connection Error\n'
' - IP: {}\n'
' - Port: {}\n'
' - Service: {}').format(ip, ports, service)
wx.MessageBox(msg, APPNAME,
style=wx.OK | wx.CENTRE | wx.ICON_ERROR)
def OnStreamReceiver(self, data):
if data[0] != StreamReceiver.EVENT_ASK_TO_PLAY:
logging.warn('Unknown event: {}'.format(data))
return
dlg = wx.MessageDialog(self,
('Stream Request. Accept?'),
APPNAME,
wx.YES_NO | wx.NO_DEFAULT | wx.ICON_QUESTION)
if dlg.ShowModal() == wx.ID_YES:
if CrossPlatform.get().is_linux():
cmdline = ['ffplay', data[1]]
Popen(cmdline)
else:
startupinfo = sb.STARTUPINFO()
startupinfo.dwFlags |= sb.STARTF_USESHOWWINDOW
startupinfo.wShowWindow = 0
cmdline = ['ffplay', data[1] + ' live=1']
Popen(cmdline, startupinfo=startupinfo)
def handler(self, evt):
logging.debug('UI event {0}: {1}'.format(evt.attr1, evt.attr2))
dispatch = {'avahi': self.OnAvahi,
'selection': self.OnSelection,
'server': self.OnStreamServer,
'srx': self.OnStreamReceiver}
if evt.attr1 in dispatch:
dispatch[evt.attr1](evt.attr2)
def InitUI(self):
def titleBox(hide=True):
font = wx.SystemSettings_GetFont(wx.SYS_SYSTEM_FONT)
font.SetPointSize(16)
hbox = wx.BoxSizer(wx.HORIZONTAL)
text1 = wx.StaticText(panel, label="Desktop Mirror")
text1.SetFont(font)
hbox.Add(text1, flag=wx.TOP | wx.LEFT | wx.BOTTOM, border=15)
#hbox = wx.BoxSizer(wx.HORIZONTAL)
#line = wx.StaticLine(panel)
#hbox.Add(line, 1, flag=wx.EXPAND | wx.ALL, border=10)
#vbox.Add(hbox, 1, wx.ALL, 5)
if hide:
map(lambda w: w.Hide(),
[w.GetWindow() for w in hbox.GetChildren()
if w.GetWindow() is not None])
return hbox
def targetBox():
hbox = wx.BoxSizer(wx.HORIZONTAL)
#hbox.Add(wx.StaticText(panel, label="Target"),
# flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL, border=5)
cb = wx.ComboBox(panel, 500, "127.0.0.1",
style=wx.CB_DROPDOWN | wx.TE_PROCESS_ENTER
)
cb.SetMinSize((250, 0))
button1 = wx.Button(panel, label="Streaming")
hbox.Add(cb, 1, flag=wx.EXPAND | wx.ALL | wx.ALIGN_RIGHT,
border=0)
hbox.Add(button1, 0, flag=wx.EXPAND | wx.LEFT | wx.ALIGN_RIGHT,
border=5)
self._input['address'] = cb
self._input_stream = button1
self.Bind(wx.EVT_COMBOBOX, self.OnTargetChosen, cb)
self.Bind(wx.EVT_TEXT, self.OnTargetKey, cb)
self.Bind(wx.EVT_TEXT_ENTER, self.OnTargetKeyEnter, cb)
self.Bind(wx.EVT_BUTTON, self.OnClickStream, button1)
return hbox
def geometryBox(hide=True):
sb = wx.StaticBox(panel, label="Geometry")
boxsizer = wx.StaticBoxSizer(sb, wx.VERTICAL)
hbox = wx.BoxSizer(wx.HORIZONTAL)
tc1 = wx.TextCtrl(panel)
tc2 = wx.TextCtrl(panel)
tc3 = wx.TextCtrl(panel)
tc4 = wx.TextCtrl(panel)
self._input['x'] = tc1
self._input['y'] = tc2
self._input['w'] = tc3
self._input['h'] = tc4
hbox.Add(wx.StaticText(panel, label="X"),
flag=wx.TOP | wx.LEFT | wx.BOTTOM, border=5)
hbox.AddSpacer(5)
hbox.Add(tc1, 1, flag=wx.EXPAND)
hbox.AddSpacer(10)
hbox.Add(wx.StaticText(panel, label="Y"),
flag=wx.TOP | wx.LEFT | wx.BOTTOM, border=5)
hbox.AddSpacer(5)
hbox.Add(tc2, 1, flag=wx.EXPAND)
hbox.AddSpacer(10)
hbox.Add(wx.StaticText(panel, label="W"),
flag=wx.TOP | wx.LEFT | wx.BOTTOM, border=5)
hbox.AddSpacer(5)
hbox.Add(tc3, 1, flag=wx.EXPAND)
hbox.AddSpacer(10)
hbox.Add(wx.StaticText(panel, label="H"),
flag=wx.TOP | wx.LEFT | wx.BOTTOM, border=5)
hbox.AddSpacer(5)
hbox.Add(tc4, 1, flag=wx.EXPAND)
boxsizer.Add(hbox, flag=wx.LEFT | wx.TOP | wx.EXPAND, border=5)
hbox2 = wx.BoxSizer(wx.HORIZONTAL)
button1 = wx.Button(panel, label="Select Area")
hbox2.Add(button1, 1,
flag=wx.EXPAND | wx.ALL | wx.ALIGN_RIGHT, border=15)
button2 = wx.Button(panel, label="Full Screen")
hbox2.Add(button2, 1, flag=wx.EXPAND | wx.ALL | wx.ALIGN_RIGHT,
border=15)
boxsizer.Add(hbox2, flag=wx.LEFT | wx.TOP | wx.EXPAND, border=5)
self.Bind(wx.EVT_BUTTON, self.OnClickSelectionArea, button1)
self.Bind(wx.EVT_BUTTON, self.OnClickFullScreen, button2)
if hide:
map(lambda w: w.Hide(),
[w.GetWindow() for w in hbox.GetChildren()
if w.GetWindow() is not None])
map(lambda w: w.Hide(),
[w.GetWindow() for w in hbox2.GetChildren()
if w.GetWindow() is not None])
sb.Hide()
return boxsizer
def videoBox(hide=True):
sb = wx.StaticBox(panel, label="Video")
boxsizer = wx.StaticBoxSizer(sb, wx.VERTICAL)
fgs = wx.FlexGridSizer(3, 2, 5, 25)
tc1 = wx.TextCtrl(panel)
tc2 = wx.TextCtrl(panel)
self._input['video_input'] = tc1
self._input['video_output'] = tc2
fgs.AddMany([(wx.StaticText(panel, label="input")),
(tc1, 1, wx.EXPAND),
(wx.StaticText(panel, label="output")),
(tc2, 1, wx.EXPAND)])
fgs.AddGrowableCol(1, 1)
boxsizer.Add(fgs, flag=wx.LEFT | wx.TOP | wx.EXPAND, border=5)
if hide:
map(lambda w: w.Hide(),
[w.GetWindow() for w in fgs.GetChildren()
if w.GetWindow() is not None])
sb.Hide()
return boxsizer
def audioBox(hide=True):
sb = wx.StaticBox(panel, label="Audio")
boxsizer = wx.StaticBoxSizer(sb, wx.VERTICAL)
fgs = wx.FlexGridSizer(3, 2, 5, 25)
tc1 = wx.TextCtrl(panel)
tc2 = wx.TextCtrl(panel)
self._input['audio_input'] = tc1
self._input['audio_output'] = tc2
fgs.AddMany([(wx.StaticText(panel, label="input")),
(tc1, 1, wx.EXPAND),
(wx.StaticText(panel, label="output")),
(tc2, 1, wx.EXPAND)])
fgs.AddGrowableCol(1, 1)
boxsizer.Add(fgs, flag=wx.LEFT | wx.TOP | wx.EXPAND, border=5)
if hide:
map(lambda w: w.Hide(),
[w.GetWindow() for w in fgs.GetChildren()
if w.GetWindow() is not None])
sb.Hide()
return boxsizer
def fullareaBox(hide=True):
hbox = wx.BoxSizer(wx.HORIZONTAL)
rb1 = wx.RadioButton(panel, -1, 'Fullscreen', style=wx.RB_GROUP)
hbox.Add(rb1, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL, border=5)
rb2 = wx.RadioButton(panel, -1, 'Area')
hbox.Add(rb2, 1, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL, border=5)
self._input_rb_fullscreen = rb1
self._input_rb_area = rb2
self.Bind(wx.EVT_RADIOBUTTON, self.OnClickFullArea, id=rb1.GetId())
self.Bind(wx.EVT_RADIOBUTTON, self.OnClickFullArea, id=rb2.GetId())
return hbox
panel = wx.Panel(self, -1)
hbox = wx.BoxSizer(wx.HORIZONTAL)
vboxL = wx.BoxSizer(wx.VERTICAL)
vboxR = wx.BoxSizer(wx.VERTICAL)
png = wx.Image(CrossPlatform.get().share_path('desktop-mirror-64.png'),
wx.BITMAP_TYPE_ANY).ConvertToBitmap()
image = wx.StaticBitmap(panel, -1, png, (0, 0),
(png.GetWidth(), png.GetHeight()))
vboxL.Add(image)
flags = wx.EXPAND
#vboxR.Add(titleBox(), 0, wx.ALL, 0)
vboxR.Add(targetBox(), 1, flag=flags | wx.TOP, border=10)
vboxR.Add(fullareaBox(), 0, flag=flags, border=10)
for fn in (titleBox, geometryBox, videoBox, audioBox):
fn()
#vboxR.Add(fn(), 0, flag=flags, border=10)
hbox.Add(vboxL, 0, wx.ALL | wx.EXPAND | wx.ALIGN_CENTER, 10)
hbox.Add(vboxR, 1, wx.ALL | wx.EXPAND | wx.ALIGN_CENTER, 10)
#self.statusbar = self.CreateStatusBar()
panel.SetAutoLayout(True)
panel.SetSizer(hbox)
panel.Layout()
panel.Fit()
self.Fit()
def InitUIFull(self):
def titleBox():
font = wx.SystemSettings_GetFont(wx.SYS_SYSTEM_FONT)
font.SetPointSize(16)
hbox = wx.BoxSizer(wx.HORIZONTAL)
text1 = wx.StaticText(panel, label="Desktop Mirror")
text1.SetFont(font)
hbox.Add(text1, flag=wx.TOP | wx.LEFT | wx.BOTTOM, border=15)
#hbox = wx.BoxSizer(wx.HORIZONTAL)
#line = wx.StaticLine(panel)
#hbox.Add(line, 1, flag=wx.EXPAND | wx.ALL, border=10)
#vbox.Add(hbox, 1, wx.ALL, 5)
return hbox
def targetBox():
hbox = wx.BoxSizer(wx.HORIZONTAL)
hbox.Add(wx.StaticText(panel, label="Target"),
flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL, border=5)
cb = wx.ComboBox(panel, 500, "127.0.0.1",
style=wx.CB_DROPDOWN | wx.TE_PROCESS_ENTER
)
button1 = wx.Button(panel, label="Streaming")
hbox.Add(cb, 1, flag=wx.EXPAND | wx.ALL | wx.ALIGN_RIGHT,
border=15)
hbox.Add(button1, 0, flag=wx.EXPAND | wx.ALL | wx.ALIGN_RIGHT,
border=15)
self._input['address'] = cb
self._input_stream = button1
self.Bind(wx.EVT_COMBOBOX, self.OnTargetChosen, cb)
self.Bind(wx.EVT_TEXT, self.OnTargetKey, cb)
self.Bind(wx.EVT_TEXT_ENTER, self.OnTargetKeyEnter, cb)
self.Bind(wx.EVT_BUTTON, self.OnClickStream, button1)
return hbox
def geometryBox():
sb = wx.StaticBox(panel, label="Geometry")
boxsizer = wx.StaticBoxSizer(sb, wx.VERTICAL)
hbox = wx.BoxSizer(wx.HORIZONTAL)
tc1 = wx.TextCtrl(panel)
tc2 = wx.TextCtrl(panel)
tc3 = wx.TextCtrl(panel)
tc4 = wx.TextCtrl(panel)
self._input['x'] = tc1
self._input['y'] = tc2
self._input['w'] = tc3
self._input['h'] = tc4
hbox.Add(wx.StaticText(panel, label="X"),
flag=wx.TOP | wx.LEFT | wx.BOTTOM, border=5)
hbox.AddSpacer(5)
hbox.Add(tc1, 1, flag=wx.EXPAND)
hbox.AddSpacer(10)
hbox.Add(wx.StaticText(panel, label="Y"),
flag=wx.TOP | wx.LEFT | wx.BOTTOM, border=5)
hbox.AddSpacer(5)
hbox.Add(tc2, 1, flag=wx.EXPAND)
hbox.AddSpacer(10)
hbox.Add(wx.StaticText(panel, label="W"),
flag=wx.TOP | wx.LEFT | wx.BOTTOM, border=5)
hbox.AddSpacer(5)
hbox.Add(tc3, 1, flag=wx.EXPAND)
hbox.AddSpacer(10)
hbox.Add(wx.StaticText(panel, label="H"),
flag=wx.TOP | wx.LEFT | wx.BOTTOM, border=5)
hbox.AddSpacer(5)
hbox.Add(tc4, 1, flag=wx.EXPAND)
boxsizer.Add(hbox, flag=wx.LEFT | wx.TOP | wx.EXPAND, border=5)
hbox = wx.BoxSizer(wx.HORIZONTAL)
button1 = wx.Button(panel, label="Select Area")
hbox.Add(button1, 1,
flag=wx.EXPAND | wx.ALL | wx.ALIGN_RIGHT, border=15)
button2 = wx.Button(panel, label="Full Screen")
hbox.Add(button2, 1, flag=wx.EXPAND | wx.ALL | wx.ALIGN_RIGHT,
border=15)
boxsizer.Add(hbox, flag=wx.LEFT | wx.TOP | wx.EXPAND, border=5)
self.Bind(wx.EVT_BUTTON, self.OnClickSelectionArea, button1)
self.Bind(wx.EVT_BUTTON, self.OnClickFullScreen, button2)
return boxsizer
def videoBox():
sb = wx.StaticBox(panel, label="Video")
boxsizer = wx.StaticBoxSizer(sb, wx.VERTICAL)
fgs = wx.FlexGridSizer(3, 2, 5, 25)
tc1 = wx.TextCtrl(panel)
tc2 = wx.TextCtrl(panel)
self._input['video_input'] = tc1
self._input['video_output'] = tc2
fgs.AddMany([(wx.StaticText(panel, label="input")),
(tc1, 1, wx.EXPAND),
(wx.StaticText(panel, label="output")),
(tc2, 1, wx.EXPAND)])
fgs.AddGrowableCol(1, 1)
boxsizer.Add(fgs, flag=wx.LEFT | wx.TOP | wx.EXPAND, border=5)
#fgs.GetContainingWindow().Hide()
map(lambda w: w.Hide(), [w.GetWindow() for w in fgs.GetChildren()
if w.GetWindow() is not None])
sb.Hide()
return boxsizer
def audioBox():
sb = wx.StaticBox(panel, label="Audio")
boxsizer = wx.StaticBoxSizer(sb, wx.VERTICAL)
fgs = wx.FlexGridSizer(3, 2, 5, 25)
tc1 = wx.TextCtrl(panel)
tc2 = wx.TextCtrl(panel)
self._input['audio_input'] = tc1
self._input['audio_output'] = tc2
fgs.AddMany([(wx.StaticText(panel, label="input")),
(tc1, 1, wx.EXPAND),
(wx.StaticText(panel, label="output")),
(tc2, 1, wx.EXPAND)])
fgs.AddGrowableCol(1, 1)
boxsizer.Add(fgs, flag=wx.LEFT | wx.TOP | wx.EXPAND, border=5)
map(lambda w: w.Hide(), [w.GetWindow() for w in fgs.GetChildren()
if w.GetWindow() is not None])
sb.Hide()
return boxsizer
panel = wx.Panel(self, -1)
vbox = wx.BoxSizer(wx.VERTICAL)
flags = wx.EXPAND | wx.TOP | wx.LEFT | wx.RIGHT
vbox.Add(titleBox(), 0, wx.ALL, 0)
vbox.Add(targetBox(), 0, flag=flags, border=10)
vbox.Add(geometryBox(), 0, flag=flags, border=10)
vbox.Add(videoBox(), 0, flag=flags, border=10)
vbox.Add(audioBox(), 0, flag=flags, border=10)
vbox.AddSpacer(10)
self.statusbar = self.CreateStatusBar()
panel.SetAutoLayout(True)
panel.SetSizer(vbox)
panel.Layout()
panel.Fit()
self.Fit()
def StartStreamServer(self):
def guess_target():
target = None
cb = self._input['address']
hostname = cb.GetValue()
m = re.search('^(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})(:\d{1,5})?$',
hostname)
if target is None and m is not None:
if m.group(2) is not None:
port = m.group(2)[1:]
service = '_xbmc-web._tcp'
if int(port) >= int(DEFAULT_PORT):
service = '_desktop-mirror._tcp'
else:
port = DEFAULT_PORT + 1
service = 'auto'
return {'ip': m.group(1), 'port': port,
'service': service}
for i in xrange(0, cb.GetCount()):
if hostname != cb.GetString(i):
continue
data = cb.GetClientData(i)
return {'ip': data['ip'],
'port': data['port'],
'service': data['service']}
return target
core = self._core
if not hasattr(self, '_target') or self._target is None:
self._target = guess_target()
if self._target is None:
return False
inp = self._input
core.stream_server_start(video_input=inp['video_input'].GetValue(),
audio_input=inp['audio_input'].GetValue(),
video_output=inp['video_output'].GetValue(),
audio_output=inp['audio_output'].GetValue(),
x=inp['x'].GetValue(),
y=inp['y'].GetValue(),
w=inp['w'].GetValue(),
h=inp['h'].GetValue(),
ip=self._target['ip'],
service=self._target['service'])
return True
def OnCloseWindow(self, event):
self.ConfigSave()
self.Destroy()
logging.debug('Quit UiAdvanced')
def OnTargetChosen(self, evt):
cb = evt.GetEventObject()
data = cb.GetClientData(evt.GetSelection())
self._target = {'ip': data['ip'], 'port': data['port'],
'service': data['service']}
logging.info('OnTargetChosen: {} ClientData: {}'.format(
evt.GetString(), data))
self._target_chosen_cache = evt.GetString()
self._input_stream.Enable(True)
def OnTargetKey(self, evt):
logging.info('OnTargetKey: %s' % evt.GetString())
if hasattr(self, '_target_chosen_cache') and \
self._target_chosen_cache == evt.GetString():
return
self._target = None
m = re.search('^(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})(:\d{1,5})?$',
evt.GetString())
try:
if m.group(2) is not None:
port = m.group(2)[1:]
service = '_xbmc-web._tcp'
if int(port) >= int(DEFAULT_PORT):
service = '_desktop-mirror._tcp'
else:
port = DEFAULT_PORT + 1
service = 'auto'
self._target = {'ip': m.group(1), 'port': port,
'service': service}
self._input_stream.Enable(True)
evt.Skip()
except:
## Not available in 2.8
# evt.GetEventObject().SetBackgroundColour(wx.Colour(255, 0, 0,
#128));
self._input_stream.Enable(False)
evt.skip()
return
def OnTargetKeyEnter(self, evt):
logging.info('OnTargetKeyEnter: %s' % evt.GetString())
evt.Skip()
def OnClickSelectionArea(self, evt):
self._core.launch_selection_area_process()
def OnClickStream(self, evt):
core = self._core
obj = evt.GetEventObject()
if core.is_streaming():
core.stream_server_stop()
obj.SetLabel('Stream')
return
if self.StartStreamServer():
obj.SetLabel('Stop')
else:
cb = self._input['address']
wx.MessageBox('{} is down'.format(cb.GetValue()),
APPNAME,
style=wx.OK | wx.CENTRE | wx.ICON_ERROR)
def OnClickFullScreen(self, evt):
geometry = wx.Display().GetGeometry()
self._input['x'].SetValue(str(geometry[0]))
self._input['y'].SetValue(str(geometry[1]))
self._input['w'].SetValue(str(geometry[2]))
self._input['h'].SetValue(str(geometry[3]))
def OnClickFullArea(self, evt):
logging.debug('Event: {}'.format(evt.GetId()))
if evt.GetId() == self._input_rb_fullscreen.GetId():
self.OnClickFullScreen(evt)
else:
self.OnClickSelectionArea(evt)
def sync(func):
def wrapper(*args, **kv):
self = args[0]
self._lock.acquire()
try:
return func(*args, **kv)
finally:
self._lock.release()
return wrapper
class Core(Thread):
def __init__(self, args, extra_args):
Thread.__init__(self)
self._args = args
self._extra_args = extra_args
self._threads = []
self._event_handler = CoreEventHandler()
if CrossPlatform.get().is_linux():
signal.signal(signal.SIGCHLD, self.signal_handler)
signal.signal(signal.SIGTERM, self.signal_handler)
def is_streaming(self):
if hasattr(self, '_stream_server') and self._stream_server is not None:
return True
return False
def stream_server_start(self, *args, **kargs):
if self.is_streaming():
return
logging.info('StreamServer start: {}'.format(kargs))
self._stream_server = StreamServer(kargs, lambda data:
self.handler('server', data))
self._stream_server.start()
def stream_server_stop(self):
if hasattr(self, '_stream_server') and self._stream_server is not None:
self._stream_server.stop()
self._stream_server = None
def playme(self, remote_ip, remote_port, service):
def myip(remote_ip):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect((remote_ip, 0))
return s.getsockname()[0]
def xbmc():
stream_url = self._stream_server.url.format(ip=myip(remote_ip))
url = 'http://{}:{}/xbmcCmds/xbmcHttp?command=PlayFile({})'.format(
remote_ip, remote_port, stream_url)
req = urllib2.Request(url)
logging.info('url = {}'.format(url))
response = urllib2.urlopen(req, None, 5)
result = response.read()
logging.info('result: {}'.format(result))
def desktop_mirror():
stream_url = self._stream_server.url.format(ip=myip(remote_ip))
data_as_json = json.dumps({'method': 'Player.Open',
'id': 1, 'jsonrpc': '2.0',
'params': {'item': {'file': stream_url}}}
)
url = 'http://{}:{}/jsonrpc'.format(remote_ip, remote_port)
logging.info('url = {}'.format(url))
logging.info(' json = {}'.format(data_as_json))
req = urllib2.Request(url, data_as_json,
{'Content-Type': 'application/json'})
response = urllib2.urlopen(req, None, 5)
result = response.read()
logging.info('result: {}'.format(result))
result = json.loads(result)
#switch back to json with pretty format
logging.debug(json.dumps(result, indent=4))
#logging.info('Got streaming url: {}'.
# format(self._stream_server.url))
#if service == '_desktop-mirror._tcp':
# desktop_mirror()
#else:
# xbmc()
desktop_mirror()
@property
def targets(self):
if not hasattr(self, '_avahi_browse'):
return dict()
return self._avahi_browse.targets
@property
def hosts(self):
if not hasattr(self, '_avahi_browse'):
return dict()
return self._avahi_browse.hosts
def run(self):
self._avahi_browse = AvahiService(lambda data:
self.handler('avahi', data))
self._stream_recever = StreamReceiver(lambda data:
self.handler('srx', data))
self._threads.append(self._avahi_browse)
self._threads.append(self._stream_recever)
for thread in self._threads:
thread.start()
for thread in self._threads:
thread.join()
def stop(self):
for thread in self._threads:
logging.debug('Stopping thread - {}'.format(thread.name))
thread.stop()
self.stream_server_stop()
def launch_selection_area_process(self):
SelectionArea(lambda data:
self.handler('selection', data)).start()
def register_listener(self, ui_window):
self._event_handler.register_listener(ui_window)
def on_event_relay(self, event_name, data):
self._event_handler.on_event_relay(event_name, data)
def on_event_stream_ready(self, event_name, data):
self._event_handler.on_event_stream_ready(event_name, data)
def handler(self, obj_id, data):
self._event_handler.handler(obj_id, data)
def signal_handler(self, signum, frame):
logging.info('signal: ' + str(signum))
if signal.SIGTERM == signum:
self.send_form_destroy()
try:
if CrossPlatform.get().is_linux():
if signal.SIGCHLD == signum:
os.waitpid(-1, os.WNOHANG)
except OSError:
pass
class CoreEventHandler(object):
def __init__(self):
self._lock = Lock()
self._listener = []
def register_listener(self, ui_window):
if ui_window not in self._listener:
self._listener.append(ui_window)
def on_event_relay(self, event_name, data):
evt = SomeNewEvent(attr1=event_name, attr2=data)
for listener in self._listener:
wx.PostEvent(listener, evt)
def on_event_stream_ready(self, event_name, data):
self.on_event_relay(event_name, data)
@sync
def handler(self, obj_id, data):
dispatch_map = {'avahi': self.on_event_relay,
'selection': self.on_event_relay,
'server': self.on_event_stream_ready,
'srx': self.on_event_relay}
if obj_id in dispatch_map:
dispatch_map[obj_id](obj_id, data)
return
logging.error('event not process: ' + obj_id)
class SelectionAreaExternalProgram(Thread):
def __init__(self, callback):
Thread.__init__(self)
self._callback = callback
def run(self):
if os.path.isfile('lib/areachooser.py') and \
os.access('lib/areachooser.py', os.X_OK):
execution = 'lib/areachooser.py'
else:
execution = 'areachooser.py'
cmd = Command(execution + ' "%x %y %w %h"', True, True).run()
line = cmd.stdout.split()
self._callback(line[0:4])
class SelectionArea(object):
def __init__(self, callback):
#Thread.__init__(self)
self._callback = callback
def run(self):
frame = FrmAreaChooser(None, -1, 'Live Area', self._callback)
frame.Show(True)
frame.SetTransparent(100)
frame.Center()
def start(self):
self.run()
class MyArgumentParser(object):
"""Command-line argument parser
"""
def __init__(self):
"""Create parser object
"""
description = ('IBS command line interface. '
'')
epilog = ('')
parser = ArgumentParser(description=description, epilog=epilog)
log_levels = ['notset', 'debug', 'info',
'warning', 'error', 'critical']
parser.add_argument('--log-level', dest='log_level_str',
default='info', choices=log_levels,
help=('Log level. '
'One of {0} or {1} (%(default)s by default)'
.format(', '.join(log_levels[:-1]),
log_levels[-1])))
parser.add_argument('--log-dir', dest='log_dir',
default=os.path.join(wx.StandardPaths_Get().
GetTempDir()),
help=('Path to the directory to store log files'))
parser.add_argument('-z', '--zsync-input', dest='zsync_file',
default=None,
help=('file path of zsync input path'))
parser.add_argument('-g', '--osd', action='store_true', dest="osd",
default=False,
help=('show OSD notify during monitor'))
# Append to log on subsequent startups
parser.add_argument('--append', action='store_true',
default=False, help=SUPPRESS)
self.parser = parser
def parse(self):
"""Parse command-line arguments
"""
args, extra_args = self.parser.parse_known_args()
args.log_level = getattr(logging, args.log_level_str.upper())
# Log filename shows clearly the type of test (pm_operation)
# and the times it was repeated (repetitions)
args.log_filename = os.path.join(args.log_dir,
('{0}.log'
.format(APPNAME)))
return args, extra_args
def main():
app = wx.App(redirect=False)
app.SetAppName(APPNAME)
args, extra_args = MyArgumentParser().parse()
LoggingConfiguration.set(args.log_level, args.log_filename, args.append)
logging.debug('Arguments: {0!r}'.format(args))
logging.debug('Extra Arguments: {0!r}'.format(extra_args))
sys.stdout = open(CrossPlatform.get().user_config_path('stdout.log'), 'w')
sys.stderr = open(CrossPlatform.get().user_config_path('stderr.log'), 'w')
core = Core(args, extra_args)
try:
core.start()
UiAdvanced(None, title="Desktop Mirror - " + VERSION, core=core)
app.MainLoop()
except KeyboardInterrupt:
logging.info('^c')
except:
import traceback
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
logging.warn(''.join('!! ' + line for line in lines))
finally:
core.stop()
core.join(5)
if __name__ == '__main__':
main()
|
bx5974/desktop-mirror
|
lib/advanced.py
|
Python
|
apache-2.0
| 35,589 | 0.001264 |
"""Basic quilt-like functionality
"""
__copyright__ = """
Copyright (C) 2005, Catalin Marinas <catalin.marinas@gmail.com>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License version 2 as
published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
import sys, os, re
from email.Utils import formatdate
from stgit.exception import *
from stgit.utils import *
from stgit.out import *
from stgit.run import *
from stgit import git, basedir, templates
from stgit.config import config
from shutil import copyfile
from stgit.lib import git as libgit, stackupgrade
# stack exception class
class StackException(StgException):
pass
class FilterUntil:
def __init__(self):
self.should_print = True
def __call__(self, x, until_test, prefix):
if until_test(x):
self.should_print = False
if self.should_print:
return x[0:len(prefix)] != prefix
return False
#
# Functions
#
__comment_prefix = 'STG:'
__patch_prefix = 'STG_PATCH:'
def __clean_comments(f):
"""Removes lines marked for status in a commit file
"""
f.seek(0)
# remove status-prefixed lines
lines = f.readlines()
patch_filter = FilterUntil()
until_test = lambda t: t == (__patch_prefix + '\n')
lines = [l for l in lines if patch_filter(l, until_test, __comment_prefix)]
# remove empty lines at the end
while len(lines) != 0 and lines[-1] == '\n':
del lines[-1]
f.seek(0); f.truncate()
f.writelines(lines)
# TODO: move this out of the stgit.stack module, it is really for
# higher level commands to handle the user interaction
def edit_file(series, line, comment, show_patch = True):
fname = '.stgitmsg.txt'
tmpl = templates.get_template('patchdescr.tmpl')
f = file(fname, 'w+')
if line:
print >> f, line
elif tmpl:
print >> f, tmpl,
else:
print >> f
print >> f, __comment_prefix, comment
print >> f, __comment_prefix, \
'Lines prefixed with "%s" will be automatically removed.' \
% __comment_prefix
print >> f, __comment_prefix, \
'Trailing empty lines will be automatically removed.'
if show_patch:
print >> f, __patch_prefix
# series.get_patch(series.get_current()).get_top()
diff_str = git.diff(rev1 = series.get_patch(series.get_current()).get_bottom())
f.write(diff_str)
#Vim modeline must be near the end.
print >> f, __comment_prefix, 'vi: set textwidth=75 filetype=diff nobackup:'
f.close()
call_editor(fname)
f = file(fname, 'r+')
__clean_comments(f)
f.seek(0)
result = f.read()
f.close()
os.remove(fname)
return result
#
# Classes
#
class StgitObject:
"""An object with stgit-like properties stored as files in a directory
"""
def _set_dir(self, dir):
self.__dir = dir
def _dir(self):
return self.__dir
def create_empty_field(self, name):
create_empty_file(os.path.join(self.__dir, name))
def _get_field(self, name, multiline = False):
id_file = os.path.join(self.__dir, name)
if os.path.isfile(id_file):
line = read_string(id_file, multiline)
if line == '':
return None
else:
return line
else:
return None
def _set_field(self, name, value, multiline = False):
fname = os.path.join(self.__dir, name)
if value and value != '':
write_string(fname, value, multiline)
elif os.path.isfile(fname):
os.remove(fname)
class Patch(StgitObject):
"""Basic patch implementation
"""
def __init_refs(self):
self.__top_ref = self.__refs_base + '/' + self.__name
self.__log_ref = self.__top_ref + '.log'
def __init__(self, name, series_dir, refs_base):
self.__series_dir = series_dir
self.__name = name
self._set_dir(os.path.join(self.__series_dir, self.__name))
self.__refs_base = refs_base
self.__init_refs()
def create(self):
os.mkdir(self._dir())
def delete(self, keep_log = False):
if os.path.isdir(self._dir()):
for f in os.listdir(self._dir()):
os.remove(os.path.join(self._dir(), f))
os.rmdir(self._dir())
else:
out.warn('Patch directory "%s" does not exist' % self._dir())
try:
# the reference might not exist if the repository was corrupted
git.delete_ref(self.__top_ref)
except git.GitException, e:
out.warn(str(e))
if not keep_log and git.ref_exists(self.__log_ref):
git.delete_ref(self.__log_ref)
def get_name(self):
return self.__name
def rename(self, newname):
olddir = self._dir()
old_top_ref = self.__top_ref
old_log_ref = self.__log_ref
self.__name = newname
self._set_dir(os.path.join(self.__series_dir, self.__name))
self.__init_refs()
git.rename_ref(old_top_ref, self.__top_ref)
if git.ref_exists(old_log_ref):
git.rename_ref(old_log_ref, self.__log_ref)
os.rename(olddir, self._dir())
def __update_top_ref(self, ref):
git.set_ref(self.__top_ref, ref)
self._set_field('top', ref)
self._set_field('bottom', git.get_commit(ref).get_parent())
def __update_log_ref(self, ref):
git.set_ref(self.__log_ref, ref)
def get_old_bottom(self):
return git.get_commit(self.get_old_top()).get_parent()
def get_bottom(self):
return git.get_commit(self.get_top()).get_parent()
def get_old_top(self):
return self._get_field('top.old')
def get_top(self):
return git.rev_parse(self.__top_ref)
def set_top(self, value, backup = False):
if backup:
curr_top = self.get_top()
self._set_field('top.old', curr_top)
self._set_field('bottom.old', git.get_commit(curr_top).get_parent())
self.__update_top_ref(value)
def restore_old_boundaries(self):
top = self._get_field('top.old')
if top:
self.__update_top_ref(top)
return True
else:
return False
def get_description(self):
return self._get_field('description', True)
def set_description(self, line):
self._set_field('description', line, True)
def get_authname(self):
return self._get_field('authname')
def set_authname(self, name):
self._set_field('authname', name or git.author().name)
def get_authemail(self):
return self._get_field('authemail')
def set_authemail(self, email):
self._set_field('authemail', email or git.author().email)
def get_authdate(self):
date = self._get_field('authdate')
if not date:
return date
if re.match('[0-9]+\s+[+-][0-9]+', date):
# Unix time (seconds) + time zone
secs_tz = date.split()
date = formatdate(int(secs_tz[0]))[:-5] + secs_tz[1]
return date
def set_authdate(self, date):
self._set_field('authdate', date or git.author().date)
def get_commname(self):
return self._get_field('commname')
def set_commname(self, name):
self._set_field('commname', name or git.committer().name)
def get_commemail(self):
return self._get_field('commemail')
def set_commemail(self, email):
self._set_field('commemail', email or git.committer().email)
def get_log(self):
return self._get_field('log')
def set_log(self, value, backup = False):
self._set_field('log', value)
self.__update_log_ref(value)
class PatchSet(StgitObject):
def __init__(self, name = None):
try:
if name:
self.set_name (name)
else:
self.set_name (git.get_head_file())
self.__base_dir = basedir.get()
except git.GitException, ex:
raise StackException, 'GIT tree not initialised: %s' % ex
self._set_dir(os.path.join(self.__base_dir, 'patches', self.get_name()))
def get_name(self):
return self.__name
def set_name(self, name):
self.__name = name
def _basedir(self):
return self.__base_dir
def get_head(self):
"""Return the head of the branch
"""
crt = self.get_current_patch()
if crt:
return crt.get_top()
else:
return self.get_base()
def get_protected(self):
return os.path.isfile(os.path.join(self._dir(), 'protected'))
def protect(self):
protect_file = os.path.join(self._dir(), 'protected')
if not os.path.isfile(protect_file):
create_empty_file(protect_file)
def unprotect(self):
protect_file = os.path.join(self._dir(), 'protected')
if os.path.isfile(protect_file):
os.remove(protect_file)
def __branch_descr(self):
return 'branch.%s.description' % self.get_name()
def get_description(self):
return config.get(self.__branch_descr()) or ''
def set_description(self, line):
if line:
config.set(self.__branch_descr(), line)
else:
config.unset(self.__branch_descr())
def head_top_equal(self):
"""Return true if the head and the top are the same
"""
crt = self.get_current_patch()
if not crt:
# we don't care, no patches applied
return True
return git.get_head() == crt.get_top()
def is_initialised(self):
"""Checks if series is already initialised
"""
return config.get(stackupgrade.format_version_key(self.get_name())
) != None
def shortlog(patches):
log = ''.join(Run('git', 'log', '--pretty=short',
p.get_top(), '^%s' % p.get_bottom()).raw_output()
for p in patches)
return Run('git', 'shortlog').raw_input(log).raw_output()
class Series(PatchSet):
"""Class including the operations on series
"""
def __init__(self, name = None):
"""Takes a series name as the parameter.
"""
PatchSet.__init__(self, name)
# Update the branch to the latest format version if it is
# initialized, but don't touch it if it isn't.
stackupgrade.update_to_current_format_version(
libgit.Repository.default(), self.get_name())
self.__refs_base = 'refs/patches/%s' % self.get_name()
self.__applied_file = os.path.join(self._dir(), 'applied')
self.__unapplied_file = os.path.join(self._dir(), 'unapplied')
self.__hidden_file = os.path.join(self._dir(), 'hidden')
# where this series keeps its patches
self.__patch_dir = os.path.join(self._dir(), 'patches')
# trash directory
self.__trash_dir = os.path.join(self._dir(), 'trash')
def __patch_name_valid(self, name):
"""Raise an exception if the patch name is not valid.
"""
if not name or re.search('[^\w.-]', name):
raise StackException, 'Invalid patch name: "%s"' % name
def get_patch(self, name):
"""Return a Patch object for the given name
"""
return Patch(name, self.__patch_dir, self.__refs_base)
def get_current_patch(self):
"""Return a Patch object representing the topmost patch, or
None if there is no such patch."""
crt = self.get_current()
if not crt:
return None
return self.get_patch(crt)
def get_current(self):
"""Return the name of the topmost patch, or None if there is
no such patch."""
try:
applied = self.get_applied()
except StackException:
# No "applied" file: branch is not initialized.
return None
try:
return applied[-1]
except IndexError:
# No patches applied.
return None
def get_applied(self):
if not os.path.isfile(self.__applied_file):
raise StackException, 'Branch "%s" not initialised' % self.get_name()
return read_strings(self.__applied_file)
def set_applied(self, applied):
write_strings(self.__applied_file, applied)
def get_unapplied(self):
if not os.path.isfile(self.__unapplied_file):
raise StackException, 'Branch "%s" not initialised' % self.get_name()
return read_strings(self.__unapplied_file)
def set_unapplied(self, unapplied):
write_strings(self.__unapplied_file, unapplied)
def get_hidden(self):
if not os.path.isfile(self.__hidden_file):
return []
return read_strings(self.__hidden_file)
def get_base(self):
# Return the parent of the bottommost patch, if there is one.
if os.path.isfile(self.__applied_file):
bottommost = file(self.__applied_file).readline().strip()
if bottommost:
return self.get_patch(bottommost).get_bottom()
# No bottommost patch, so just return HEAD
return git.get_head()
def get_parent_remote(self):
value = config.get('branch.%s.remote' % self.get_name())
if value:
return value
elif 'origin' in git.remotes_list():
out.note(('No parent remote declared for stack "%s",'
' defaulting to "origin".' % self.get_name()),
('Consider setting "branch.%s.remote" and'
' "branch.%s.merge" with "git config".'
% (self.get_name(), self.get_name())))
return 'origin'
else:
raise StackException, 'Cannot find a parent remote for "%s"' % self.get_name()
def __set_parent_remote(self, remote):
value = config.set('branch.%s.remote' % self.get_name(), remote)
def get_parent_branch(self):
value = config.get('branch.%s.stgit.parentbranch' % self.get_name())
if value:
return value
elif git.rev_parse('heads/origin'):
out.note(('No parent branch declared for stack "%s",'
' defaulting to "heads/origin".' % self.get_name()),
('Consider setting "branch.%s.stgit.parentbranch"'
' with "git config".' % self.get_name()))
return 'heads/origin'
else:
raise StackException, 'Cannot find a parent branch for "%s"' % self.get_name()
def __set_parent_branch(self, name):
if config.get('branch.%s.remote' % self.get_name()):
# Never set merge if remote is not set to avoid
# possibly-erroneous lookups into 'origin'
config.set('branch.%s.merge' % self.get_name(), name)
config.set('branch.%s.stgit.parentbranch' % self.get_name(), name)
def set_parent(self, remote, localbranch):
if localbranch:
if remote:
self.__set_parent_remote(remote)
self.__set_parent_branch(localbranch)
# We'll enforce this later
# else:
# raise StackException, 'Parent branch (%s) should be specified for %s' % localbranch, self.get_name()
def __patch_is_current(self, patch):
return patch.get_name() == self.get_current()
def patch_applied(self, name):
"""Return true if the patch exists in the applied list
"""
return name in self.get_applied()
def patch_unapplied(self, name):
"""Return true if the patch exists in the unapplied list
"""
return name in self.get_unapplied()
def patch_hidden(self, name):
"""Return true if the patch is hidden.
"""
return name in self.get_hidden()
def patch_exists(self, name):
"""Return true if there is a patch with the given name, false
otherwise."""
return self.patch_applied(name) or self.patch_unapplied(name) \
or self.patch_hidden(name)
def init(self, create_at=False, parent_remote=None, parent_branch=None):
"""Initialises the stgit series
"""
if self.is_initialised():
raise StackException, '%s already initialized' % self.get_name()
for d in [self._dir()]:
if os.path.exists(d):
raise StackException, '%s already exists' % d
if (create_at!=False):
git.create_branch(self.get_name(), create_at)
os.makedirs(self.__patch_dir)
self.set_parent(parent_remote, parent_branch)
self.create_empty_field('applied')
self.create_empty_field('unapplied')
config.set(stackupgrade.format_version_key(self.get_name()),
str(stackupgrade.FORMAT_VERSION))
def rename(self, to_name):
"""Renames a series
"""
to_stack = Series(to_name)
if to_stack.is_initialised():
raise StackException, '"%s" already exists' % to_stack.get_name()
patches = self.get_applied() + self.get_unapplied()
git.rename_branch(self.get_name(), to_name)
for patch in patches:
git.rename_ref('refs/patches/%s/%s' % (self.get_name(), patch),
'refs/patches/%s/%s' % (to_name, patch))
git.rename_ref('refs/patches/%s/%s.log' % (self.get_name(), patch),
'refs/patches/%s/%s.log' % (to_name, patch))
if os.path.isdir(self._dir()):
rename(os.path.join(self._basedir(), 'patches'),
self.get_name(), to_stack.get_name())
# Rename the config section
for k in ['branch.%s', 'branch.%s.stgit']:
config.rename_section(k % self.get_name(), k % to_name)
self.__init__(to_name)
def clone(self, target_series):
"""Clones a series
"""
try:
# allow cloning of branches not under StGIT control
base = self.get_base()
except:
base = git.get_head()
Series(target_series).init(create_at = base)
new_series = Series(target_series)
# generate an artificial description file
new_series.set_description('clone of "%s"' % self.get_name())
# clone self's entire series as unapplied patches
try:
# allow cloning of branches not under StGIT control
applied = self.get_applied()
unapplied = self.get_unapplied()
patches = applied + unapplied
patches.reverse()
except:
patches = applied = unapplied = []
for p in patches:
patch = self.get_patch(p)
newpatch = new_series.new_patch(p, message = patch.get_description(),
can_edit = False, unapplied = True,
bottom = patch.get_bottom(),
top = patch.get_top(),
author_name = patch.get_authname(),
author_email = patch.get_authemail(),
author_date = patch.get_authdate())
if patch.get_log():
out.info('Setting log to %s' % patch.get_log())
newpatch.set_log(patch.get_log())
else:
out.info('No log for %s' % p)
# fast forward the cloned series to self's top
new_series.forward_patches(applied)
# Clone parent informations
value = config.get('branch.%s.remote' % self.get_name())
if value:
config.set('branch.%s.remote' % target_series, value)
value = config.get('branch.%s.merge' % self.get_name())
if value:
config.set('branch.%s.merge' % target_series, value)
value = config.get('branch.%s.stgit.parentbranch' % self.get_name())
if value:
config.set('branch.%s.stgit.parentbranch' % target_series, value)
def delete(self, force = False):
"""Deletes an stgit series
"""
if self.is_initialised():
patches = self.get_unapplied() + self.get_applied() + \
self.get_hidden();
if not force and patches:
raise StackException, \
'Cannot delete: the series still contains patches'
for p in patches:
self.get_patch(p).delete()
# remove the trash directory if any
if os.path.exists(self.__trash_dir):
for fname in os.listdir(self.__trash_dir):
os.remove(os.path.join(self.__trash_dir, fname))
os.rmdir(self.__trash_dir)
# FIXME: find a way to get rid of those manual removals
# (move functionality to StgitObject ?)
if os.path.exists(self.__applied_file):
os.remove(self.__applied_file)
if os.path.exists(self.__unapplied_file):
os.remove(self.__unapplied_file)
if os.path.exists(self.__hidden_file):
os.remove(self.__hidden_file)
if os.path.exists(self._dir()+'/orig-base'):
os.remove(self._dir()+'/orig-base')
if not os.listdir(self.__patch_dir):
os.rmdir(self.__patch_dir)
else:
out.warn('Patch directory %s is not empty' % self.__patch_dir)
try:
os.removedirs(self._dir())
except OSError:
raise StackException('Series directory %s is not empty'
% self._dir())
try:
git.delete_branch(self.get_name())
except git.GitException:
out.warn('Could not delete branch "%s"' % self.get_name())
config.remove_section('branch.%s' % self.get_name())
config.remove_section('branch.%s.stgit' % self.get_name())
def refresh_patch(self, files = None, message = None, edit = False,
empty = False,
show_patch = False,
cache_update = True,
author_name = None, author_email = None,
author_date = None,
committer_name = None, committer_email = None,
backup = True, sign_str = None, log = 'refresh',
notes = None, bottom = None):
"""Generates a new commit for the topmost patch
"""
patch = self.get_current_patch()
if not patch:
raise StackException, 'No patches applied'
descr = patch.get_description()
if not (message or descr):
edit = True
descr = ''
elif message:
descr = message
# TODO: move this out of the stgit.stack module, it is really
# for higher level commands to handle the user interaction
if not message and edit:
descr = edit_file(self, descr.rstrip(), \
'Please edit the description for patch "%s" ' \
'above.' % patch.get_name(), show_patch)
if not author_name:
author_name = patch.get_authname()
if not author_email:
author_email = patch.get_authemail()
if not committer_name:
committer_name = patch.get_commname()
if not committer_email:
committer_email = patch.get_commemail()
descr = add_sign_line(descr, sign_str, committer_name, committer_email)
if not bottom:
bottom = patch.get_bottom()
if empty:
tree_id = git.get_commit(bottom).get_tree()
else:
tree_id = None
commit_id = git.commit(files = files,
message = descr, parents = [bottom],
cache_update = cache_update,
tree_id = tree_id,
set_head = True,
allowempty = True,
author_name = author_name,
author_email = author_email,
author_date = author_date,
committer_name = committer_name,
committer_email = committer_email)
patch.set_top(commit_id, backup = backup)
patch.set_description(descr)
patch.set_authname(author_name)
patch.set_authemail(author_email)
patch.set_authdate(author_date)
patch.set_commname(committer_name)
patch.set_commemail(committer_email)
if log:
self.log_patch(patch, log, notes)
return commit_id
def new_patch(self, name, message = None, can_edit = True,
unapplied = False, show_patch = False,
top = None, bottom = None, commit = True,
author_name = None, author_email = None, author_date = None,
committer_name = None, committer_email = None,
before_existing = False, sign_str = None):
"""Creates a new patch, either pointing to an existing commit object,
or by creating a new commit object.
"""
assert commit or (top and bottom)
assert not before_existing or (top and bottom)
assert not (commit and before_existing)
assert (top and bottom) or (not top and not bottom)
assert commit or (not top or (bottom == git.get_commit(top).get_parent()))
if name != None:
self.__patch_name_valid(name)
if self.patch_exists(name):
raise StackException, 'Patch "%s" already exists' % name
# TODO: move this out of the stgit.stack module, it is really
# for higher level commands to handle the user interaction
def sign(msg):
return add_sign_line(msg, sign_str,
committer_name or git.committer().name,
committer_email or git.committer().email)
if not message and can_edit:
descr = edit_file(
self, sign(''),
'Please enter the description for the patch above.',
show_patch)
else:
descr = sign(message)
head = git.get_head()
if name == None:
name = make_patch_name(descr, self.patch_exists)
patch = self.get_patch(name)
patch.create()
patch.set_description(descr)
patch.set_authname(author_name)
patch.set_authemail(author_email)
patch.set_authdate(author_date)
patch.set_commname(committer_name)
patch.set_commemail(committer_email)
if before_existing:
insert_string(self.__applied_file, patch.get_name())
elif unapplied:
patches = [patch.get_name()] + self.get_unapplied()
write_strings(self.__unapplied_file, patches)
set_head = False
else:
append_string(self.__applied_file, patch.get_name())
set_head = True
if commit:
if top:
top_commit = git.get_commit(top)
else:
bottom = head
top_commit = git.get_commit(head)
# create a commit for the patch (may be empty if top == bottom);
# only commit on top of the current branch
assert(unapplied or bottom == head)
commit_id = git.commit(message = descr, parents = [bottom],
cache_update = False,
tree_id = top_commit.get_tree(),
allowempty = True, set_head = set_head,
author_name = author_name,
author_email = author_email,
author_date = author_date,
committer_name = committer_name,
committer_email = committer_email)
# set the patch top to the new commit
patch.set_top(commit_id)
else:
patch.set_top(top)
self.log_patch(patch, 'new')
return patch
def delete_patch(self, name, keep_log = False):
"""Deletes a patch
"""
self.__patch_name_valid(name)
patch = self.get_patch(name)
if self.__patch_is_current(patch):
self.pop_patch(name)
elif self.patch_applied(name):
raise StackException, 'Cannot remove an applied patch, "%s", ' \
'which is not current' % name
elif not name in self.get_unapplied():
raise StackException, 'Unknown patch "%s"' % name
# save the commit id to a trash file
write_string(os.path.join(self.__trash_dir, name), patch.get_top())
patch.delete(keep_log = keep_log)
unapplied = self.get_unapplied()
unapplied.remove(name)
write_strings(self.__unapplied_file, unapplied)
def forward_patches(self, names):
"""Try to fast-forward an array of patches.
On return, patches in names[0:returned_value] have been pushed on the
stack. Apply the rest with push_patch
"""
unapplied = self.get_unapplied()
forwarded = 0
top = git.get_head()
for name in names:
assert(name in unapplied)
patch = self.get_patch(name)
head = top
bottom = patch.get_bottom()
top = patch.get_top()
# top != bottom always since we have a commit for each patch
if head == bottom:
# reset the backup information. No logging since the
# patch hasn't changed
patch.set_top(top, backup = True)
else:
head_tree = git.get_commit(head).get_tree()
bottom_tree = git.get_commit(bottom).get_tree()
if head_tree == bottom_tree:
# We must just reparent this patch and create a new commit
# for it
descr = patch.get_description()
author_name = patch.get_authname()
author_email = patch.get_authemail()
author_date = patch.get_authdate()
committer_name = patch.get_commname()
committer_email = patch.get_commemail()
top_tree = git.get_commit(top).get_tree()
top = git.commit(message = descr, parents = [head],
cache_update = False,
tree_id = top_tree,
allowempty = True,
author_name = author_name,
author_email = author_email,
author_date = author_date,
committer_name = committer_name,
committer_email = committer_email)
patch.set_top(top, backup = True)
self.log_patch(patch, 'push(f)')
else:
top = head
# stop the fast-forwarding, must do a real merge
break
forwarded+=1
unapplied.remove(name)
if forwarded == 0:
return 0
git.switch(top)
append_strings(self.__applied_file, names[0:forwarded])
write_strings(self.__unapplied_file, unapplied)
return forwarded
def merged_patches(self, names):
"""Test which patches were merged upstream by reverse-applying
them in reverse order. The function returns the list of
patches detected to have been applied. The state of the tree
is restored to the original one
"""
patches = [self.get_patch(name) for name in names]
patches.reverse()
merged = []
for p in patches:
if git.apply_diff(p.get_top(), p.get_bottom()):
merged.append(p.get_name())
merged.reverse()
git.reset()
return merged
def push_empty_patch(self, name):
"""Pushes an empty patch on the stack
"""
unapplied = self.get_unapplied()
assert(name in unapplied)
# patch = self.get_patch(name)
head = git.get_head()
append_string(self.__applied_file, name)
unapplied.remove(name)
write_strings(self.__unapplied_file, unapplied)
self.refresh_patch(bottom = head, cache_update = False, log = 'push(m)')
def push_patch(self, name):
"""Pushes a patch on the stack
"""
unapplied = self.get_unapplied()
assert(name in unapplied)
patch = self.get_patch(name)
head = git.get_head()
bottom = patch.get_bottom()
top = patch.get_top()
# top != bottom always since we have a commit for each patch
if head == bottom:
# A fast-forward push. Just reset the backup
# information. No need for logging
patch.set_top(top, backup = True)
git.switch(top)
append_string(self.__applied_file, name)
unapplied.remove(name)
write_strings(self.__unapplied_file, unapplied)
return False
# Need to create a new commit an merge in the old patch
ex = None
modified = False
# Try the fast applying first. If this fails, fall back to the
# three-way merge
if not git.apply_diff(bottom, top):
# if git.apply_diff() fails, the patch requires a diff3
# merge and can be reported as modified
modified = True
# merge can fail but the patch needs to be pushed
try:
git.merge_recursive(bottom, head, top)
except git.GitException, ex:
out.error('The merge failed during "push".',
'Revert the operation with "stg undo".')
append_string(self.__applied_file, name)
unapplied.remove(name)
write_strings(self.__unapplied_file, unapplied)
if not ex:
# if the merge was OK and no conflicts, just refresh the patch
# The GIT cache was already updated by the merge operation
if modified:
log = 'push(m)'
else:
log = 'push'
self.refresh_patch(bottom = head, cache_update = False, log = log)
else:
# we make the patch empty, with the merged state in the
# working tree.
self.refresh_patch(bottom = head, cache_update = False,
empty = True, log = 'push(c)')
raise StackException, str(ex)
return modified
def pop_patch(self, name, keep = False):
"""Pops the top patch from the stack
"""
applied = self.get_applied()
applied.reverse()
assert(name in applied)
patch = self.get_patch(name)
if git.get_head_file() == self.get_name():
if keep and not git.apply_diff(git.get_head(), patch.get_bottom(),
check_index = False):
raise StackException(
'Failed to pop patches while preserving the local changes')
git.switch(patch.get_bottom(), keep)
else:
git.set_branch(self.get_name(), patch.get_bottom())
# save the new applied list
idx = applied.index(name) + 1
popped = applied[:idx]
popped.reverse()
unapplied = popped + self.get_unapplied()
write_strings(self.__unapplied_file, unapplied)
del applied[:idx]
applied.reverse()
write_strings(self.__applied_file, applied)
def empty_patch(self, name):
"""Returns True if the patch is empty
"""
self.__patch_name_valid(name)
patch = self.get_patch(name)
bottom = patch.get_bottom()
top = patch.get_top()
if bottom == top:
return True
elif git.get_commit(top).get_tree() \
== git.get_commit(bottom).get_tree():
return True
return False
def rename_patch(self, oldname, newname):
self.__patch_name_valid(newname)
applied = self.get_applied()
unapplied = self.get_unapplied()
if oldname == newname:
raise StackException, '"To" name and "from" name are the same'
if newname in applied or newname in unapplied:
raise StackException, 'Patch "%s" already exists' % newname
if oldname in unapplied:
self.get_patch(oldname).rename(newname)
unapplied[unapplied.index(oldname)] = newname
write_strings(self.__unapplied_file, unapplied)
elif oldname in applied:
self.get_patch(oldname).rename(newname)
applied[applied.index(oldname)] = newname
write_strings(self.__applied_file, applied)
else:
raise StackException, 'Unknown patch "%s"' % oldname
def log_patch(self, patch, message, notes = None):
"""Generate a log commit for a patch
"""
top = git.get_commit(patch.get_top())
old_log = patch.get_log()
if message is None:
# replace the current log entry
if not old_log:
raise StackException, \
'No log entry to annotate for patch "%s"' \
% patch.get_name()
replace = True
log_commit = git.get_commit(old_log)
msg = log_commit.get_log().split('\n')[0]
log_parent = log_commit.get_parent()
if log_parent:
parents = [log_parent]
else:
parents = []
else:
# generate a new log entry
replace = False
msg = '%s\t%s' % (message, top.get_id_hash())
if old_log:
parents = [old_log]
else:
parents = []
if notes:
msg += '\n\n' + notes
log = git.commit(message = msg, parents = parents,
cache_update = False, tree_id = top.get_tree(),
allowempty = True)
patch.set_log(log)
def hide_patch(self, name):
"""Add the patch to the hidden list.
"""
unapplied = self.get_unapplied()
if name not in unapplied:
# keep the checking order for backward compatibility with
# the old hidden patches functionality
if self.patch_applied(name):
raise StackException, 'Cannot hide applied patch "%s"' % name
elif self.patch_hidden(name):
raise StackException, 'Patch "%s" already hidden' % name
else:
raise StackException, 'Unknown patch "%s"' % name
if not self.patch_hidden(name):
# check needed for backward compatibility with the old
# hidden patches functionality
append_string(self.__hidden_file, name)
unapplied.remove(name)
write_strings(self.__unapplied_file, unapplied)
def unhide_patch(self, name):
"""Remove the patch from the hidden list.
"""
hidden = self.get_hidden()
if not name in hidden:
if self.patch_applied(name) or self.patch_unapplied(name):
raise StackException, 'Patch "%s" not hidden' % name
else:
raise StackException, 'Unknown patch "%s"' % name
hidden.remove(name)
write_strings(self.__hidden_file, hidden)
if not self.patch_applied(name) and not self.patch_unapplied(name):
# check needed for backward compatibility with the old
# hidden patches functionality
append_string(self.__unapplied_file, name)
|
miracle2k/stgit
|
stgit/stack.py
|
Python
|
gpl-2.0
| 40,808 | 0.007131 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Training helper that checkpoints models and creates session."""
import time
import numpy as np
from tensorflow.python.client import session
from tensorflow.python.distribute import distribution_strategy_context
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import checkpoint_management
from tensorflow.python.util.tf_export import tf_export
def _maybe_name(obj):
"""Returns object name if it has one, or a message otherwise.
This is useful for names that apper in error messages.
Args:
obj: Object to get the name of.
Returns:
name, "None", or a "no name" message.
"""
if obj is None:
return "None"
elif hasattr(obj, "name"):
return obj.name
else:
return "<no name for %s>" % type(obj)
def _restore_checkpoint_and_maybe_run_saved_model_initializers(
sess, saver, path):
"""Restores checkpoint values and SavedModel initializers if found."""
# NOTE: All references to SavedModel refer to SavedModels loaded from the
# load_v2 API (which does not require the `sess` argument).
# If the graph contains resources loaded from a SavedModel, they are not
# restored when calling `saver.restore`. Thus, the SavedModel initializer must
# be called with `saver.restore` to properly initialize the model.
# The SavedModel init is stored in the "saved_model_initializers" collection.
# This collection is part of the MetaGraph's default_init_op, so it is already
# called by MonitoredSession as long as the saver doesn't restore any
# checkpoints from the working dir.
saved_model_init_ops = ops.get_collection("saved_model_initializers")
if saved_model_init_ops:
sess.run(saved_model_init_ops)
# The saver must be called *after* the SavedModel init, because the SavedModel
# init will restore the variables from the SavedModel variables directory.
# Initializing/restoring twice is not ideal but there's no other way to do it.
saver.restore(sess, path)
@tf_export(v1=["train.SessionManager"])
class SessionManager(object):
"""Training helper that restores from checkpoint and creates session.
This class is a small wrapper that takes care of session creation and
checkpoint recovery. It also provides functions that to facilitate
coordination among multiple training threads or processes.
* Checkpointing trained variables as the training progresses.
* Initializing variables on startup, restoring them from the most recent
checkpoint after a crash, or wait for checkpoints to become available.
### Usage:
```python
with tf.Graph().as_default():
...add operations to the graph...
# Create a SessionManager that will checkpoint the model in '/tmp/mydir'.
sm = SessionManager()
sess = sm.prepare_session(master, init_op, saver, checkpoint_dir)
# Use the session to train the graph.
while True:
sess.run(<my_train_op>)
```
`prepare_session()` initializes or restores a model. It requires `init_op`
and `saver` as an argument.
A second process could wait for the model to be ready by doing the following:
```python
with tf.Graph().as_default():
...add operations to the graph...
# Create a SessionManager that will wait for the model to become ready.
sm = SessionManager()
sess = sm.wait_for_session(master)
# Use the session to train the graph.
while True:
sess.run(<my_train_op>)
```
`wait_for_session()` waits for a model to be initialized by other processes.
"""
def __init__(self,
local_init_op=None,
ready_op=None,
ready_for_local_init_op=None,
graph=None,
recovery_wait_secs=30,
local_init_run_options=None,
local_init_feed_dict=None):
"""Creates a SessionManager.
The `local_init_op` is an `Operation` that is run always after a new session
was created. If `None`, this step is skipped.
The `ready_op` is an `Operation` used to check if the model is ready. The
model is considered ready if that operation returns an empty 1D string
tensor. If the operation returns a non empty 1D string tensor, the elements
are concatenated and used to indicate to the user why the model is not
ready.
The `ready_for_local_init_op` is an `Operation` used to check if the model
is ready to run local_init_op. The model is considered ready if that
operation returns an empty 1D string tensor. If the operation returns a non
empty 1D string tensor, the elements are concatenated and used to indicate
to the user why the model is not ready.
If `ready_op` is `None`, the model is not checked for readiness.
`recovery_wait_secs` is the number of seconds between checks that
the model is ready. It is used by processes to wait for a model to
be initialized or restored. Defaults to 30 seconds.
Args:
local_init_op: An `Operation` run immediately after session creation.
Usually used to initialize tables and local variables.
ready_op: An `Operation` to check if the model is initialized.
ready_for_local_init_op: An `Operation` to check if the model is ready
to run local_init_op.
graph: The `Graph` that the model will use.
recovery_wait_secs: Seconds between checks for the model to be ready.
local_init_run_options: RunOptions to be passed to session.run when
executing the local_init_op.
local_init_feed_dict: Optional session feed dictionary to use when running
the local_init_op.
Raises:
ValueError: If ready_for_local_init_op is not None but local_init_op is
None
"""
# Sets default values of arguments.
if graph is None:
graph = ops.get_default_graph()
self._local_init_op = local_init_op
self._ready_op = ready_op
self._ready_for_local_init_op = ready_for_local_init_op
self._graph = graph
self._recovery_wait_secs = recovery_wait_secs
self._target = None
self._local_init_run_options = local_init_run_options
self._local_init_feed_dict = local_init_feed_dict
if ready_for_local_init_op is not None and local_init_op is None:
raise ValueError("If you pass a ready_for_local_init_op "
"you must also pass a local_init_op "
", ready_for_local_init_op [%s]" %
ready_for_local_init_op)
def _restore_checkpoint(self,
master,
saver=None,
checkpoint_dir=None,
checkpoint_filename_with_path=None,
wait_for_checkpoint=False,
max_wait_secs=7200,
config=None):
"""Creates a `Session`, and tries to restore a checkpoint.
Args:
master: `String` representation of the TensorFlow master to use.
saver: A `Saver` object used to restore a model.
checkpoint_dir: Path to the checkpoint files. The latest checkpoint in the
dir will be used to restore.
checkpoint_filename_with_path: Full file name path to the checkpoint file.
wait_for_checkpoint: Whether to wait for checkpoint to become available.
max_wait_secs: Maximum time to wait for checkpoints to become available.
config: Optional `ConfigProto` proto used to configure the session.
Returns:
A pair (sess, is_restored) where 'is_restored' is `True` if
the session could be restored, `False` otherwise.
Raises:
ValueError: If both checkpoint_dir and checkpoint_filename_with_path are
set.
"""
self._target = master
# This is required to so that we initialize the TPU device before
# restoring from checkpoint since we'll be placing variables on the device
# and TPUInitialize wipes out the memory of the device.
strategy = distribution_strategy_context.get_strategy()
if strategy and hasattr(strategy.extended,
"_experimental_initialize_system"):
strategy.extended._experimental_initialize_system() # pylint: disable=protected-access
sess = session.Session(self._target, graph=self._graph, config=config)
if checkpoint_dir and checkpoint_filename_with_path:
raise ValueError("Can not provide both checkpoint_dir and "
"checkpoint_filename_with_path.")
# If either saver or checkpoint_* is not specified, cannot restore. Just
# return.
if not saver or not (checkpoint_dir or checkpoint_filename_with_path):
return sess, False
if checkpoint_filename_with_path:
_restore_checkpoint_and_maybe_run_saved_model_initializers(
sess, saver, checkpoint_filename_with_path)
return sess, True
# Waits up until max_wait_secs for checkpoint to become available.
wait_time = 0
ckpt = checkpoint_management.get_checkpoint_state(checkpoint_dir)
while not ckpt or not ckpt.model_checkpoint_path:
if wait_for_checkpoint and wait_time < max_wait_secs:
logging.info("Waiting for checkpoint to be available.")
time.sleep(self._recovery_wait_secs)
wait_time += self._recovery_wait_secs
ckpt = checkpoint_management.get_checkpoint_state(checkpoint_dir)
else:
return sess, False
# Loads the checkpoint.
_restore_checkpoint_and_maybe_run_saved_model_initializers(
sess, saver, ckpt.model_checkpoint_path)
saver.recover_last_checkpoints(ckpt.all_model_checkpoint_paths)
return sess, True
def prepare_session(self,
master,
init_op=None,
saver=None,
checkpoint_dir=None,
checkpoint_filename_with_path=None,
wait_for_checkpoint=False,
max_wait_secs=7200,
config=None,
init_feed_dict=None,
init_fn=None):
"""Creates a `Session`. Makes sure the model is ready to be used.
Creates a `Session` on 'master'. If a `saver` object is passed in, and
`checkpoint_dir` points to a directory containing valid checkpoint
files, then it will try to recover the model from checkpoint. If
no checkpoint files are available, and `wait_for_checkpoint` is
`True`, then the process would check every `recovery_wait_secs`,
up to `max_wait_secs`, for recovery to succeed.
If the model cannot be recovered successfully then it is initialized by
running the `init_op` and calling `init_fn` if they are provided.
The `local_init_op` is also run after init_op and init_fn, regardless of
whether the model was recovered successfully, but only if
`ready_for_local_init_op` passes.
If the model is recovered from a checkpoint it is assumed that all
global variables have been initialized, in particular neither `init_op`
nor `init_fn` will be executed.
It is an error if the model cannot be recovered and no `init_op`
or `init_fn` or `local_init_op` are passed.
Args:
master: `String` representation of the TensorFlow master to use.
init_op: Optional `Operation` used to initialize the model.
saver: A `Saver` object used to restore a model.
checkpoint_dir: Path to the checkpoint files. The latest checkpoint in the
dir will be used to restore.
checkpoint_filename_with_path: Full file name path to the checkpoint file.
wait_for_checkpoint: Whether to wait for checkpoint to become available.
max_wait_secs: Maximum time to wait for checkpoints to become available.
config: Optional `ConfigProto` proto used to configure the session.
init_feed_dict: Optional dictionary that maps `Tensor` objects to feed
values. This feed dictionary is passed to the session `run()` call when
running the init op.
init_fn: Optional callable used to initialize the model. Called after the
optional `init_op` is called. The callable must accept one argument,
the session being initialized.
Returns:
A `Session` object that can be used to drive the model.
Raises:
RuntimeError: If the model cannot be initialized or recovered.
ValueError: If both checkpoint_dir and checkpoint_filename_with_path are
set.
"""
sess, is_loaded_from_checkpoint = self._restore_checkpoint(
master,
saver,
checkpoint_dir=checkpoint_dir,
checkpoint_filename_with_path=checkpoint_filename_with_path,
wait_for_checkpoint=wait_for_checkpoint,
max_wait_secs=max_wait_secs,
config=config)
if not is_loaded_from_checkpoint:
if init_op is None and not init_fn and self._local_init_op is None:
raise RuntimeError("Model is not initialized and no init_op or "
"init_fn or local_init_op was given")
if init_op is not None:
sess.run(init_op, feed_dict=init_feed_dict)
if init_fn:
init_fn(sess)
local_init_success, msg = self._try_run_local_init_op(sess)
if not local_init_success:
raise RuntimeError(
"Init operations did not make model ready for local_init. "
"Init op: %s, init fn: %s, error: %s" % (_maybe_name(init_op),
init_fn,
msg))
is_ready, msg = self._model_ready(sess)
if not is_ready:
raise RuntimeError(
"Init operations did not make model ready. "
"Init op: %s, init fn: %s, local_init_op: %s, error: %s" %
(_maybe_name(init_op), init_fn, self._local_init_op, msg))
return sess
def recover_session(self,
master,
saver=None,
checkpoint_dir=None,
checkpoint_filename_with_path=None,
wait_for_checkpoint=False,
max_wait_secs=7200,
config=None):
"""Creates a `Session`, recovering if possible.
Creates a new session on 'master'. If the session is not initialized
and can be recovered from a checkpoint, recover it.
Args:
master: `String` representation of the TensorFlow master to use.
saver: A `Saver` object used to restore a model.
checkpoint_dir: Path to the checkpoint files. The latest checkpoint in the
dir will be used to restore.
checkpoint_filename_with_path: Full file name path to the checkpoint file.
wait_for_checkpoint: Whether to wait for checkpoint to become available.
max_wait_secs: Maximum time to wait for checkpoints to become available.
config: Optional `ConfigProto` proto used to configure the session.
Returns:
A pair (sess, initialized) where 'initialized' is `True` if
the session could be recovered and initialized, `False` otherwise.
Raises:
ValueError: If both checkpoint_dir and checkpoint_filename_with_path are
set.
"""
sess, is_loaded_from_checkpoint = self._restore_checkpoint(
master,
saver,
checkpoint_dir=checkpoint_dir,
checkpoint_filename_with_path=checkpoint_filename_with_path,
wait_for_checkpoint=wait_for_checkpoint,
max_wait_secs=max_wait_secs,
config=config)
# Always try to run local_init_op
local_init_success, msg = self._try_run_local_init_op(sess)
if not is_loaded_from_checkpoint:
# Do not need to run checks for readiness
return sess, False
restoring_file = checkpoint_dir or checkpoint_filename_with_path
if not local_init_success:
logging.info(
"Restoring model from %s did not make model ready for local init:"
" %s", restoring_file, msg)
return sess, False
is_ready, msg = self._model_ready(sess)
if not is_ready:
logging.info("Restoring model from %s did not make model ready: %s",
restoring_file, msg)
return sess, False
logging.info("Restored model from %s", restoring_file)
return sess, is_loaded_from_checkpoint
def wait_for_session(self, master, config=None, max_wait_secs=float("Inf")):
"""Creates a new `Session` and waits for model to be ready.
Creates a new `Session` on 'master'. Waits for the model to be
initialized or recovered from a checkpoint. It's expected that
another thread or process will make the model ready, and that this
is intended to be used by threads/processes that participate in a
distributed training configuration where a different thread/process
is responsible for initializing or recovering the model being trained.
NB: The amount of time this method waits for the session is bounded
by max_wait_secs. By default, this function will wait indefinitely.
Args:
master: `String` representation of the TensorFlow master to use.
config: Optional ConfigProto proto used to configure the session.
max_wait_secs: Maximum time to wait for the session to become available.
Returns:
A `Session`. May be None if the operation exceeds the timeout
specified by config.operation_timeout_in_ms.
Raises:
tf.DeadlineExceededError: if the session is not available after
max_wait_secs.
"""
self._target = master
if max_wait_secs is None:
max_wait_secs = float("Inf")
timer = _CountDownTimer(max_wait_secs)
while True:
sess = session.Session(self._target, graph=self._graph, config=config)
not_ready_msg = None
not_ready_local_msg = None
local_init_success, not_ready_local_msg = self._try_run_local_init_op(
sess)
if local_init_success:
# Successful if local_init_op is None, or ready_for_local_init_op passes
is_ready, not_ready_msg = self._model_ready(sess)
if is_ready:
return sess
self._safe_close(sess)
# Do we have enough time left to try again?
remaining_ms_after_wait = (
timer.secs_remaining() - self._recovery_wait_secs)
if remaining_ms_after_wait < 0:
raise errors.DeadlineExceededError(
None, None,
"Session was not ready after waiting %d secs." % (max_wait_secs,))
logging.info("Waiting for model to be ready. "
"Ready_for_local_init_op: %s, ready: %s",
not_ready_local_msg, not_ready_msg)
time.sleep(self._recovery_wait_secs)
def _safe_close(self, sess):
"""Closes a session without raising an exception.
Just like sess.close() but ignores exceptions.
Args:
sess: A `Session`.
"""
# pylint: disable=broad-except
try:
sess.close()
except Exception:
# Intentionally not logging to avoid user complaints that
# they get cryptic errors. We really do not care that Close
# fails.
pass
# pylint: enable=broad-except
def _model_ready(self, sess):
"""Checks if the model is ready or not.
Args:
sess: A `Session`.
Returns:
A tuple (is_ready, msg), where is_ready is True if ready and False
otherwise, and msg is `None` if the model is ready, a `String` with the
reason why it is not ready otherwise.
"""
return _ready(self._ready_op, sess, "Model not ready")
def _model_ready_for_local_init(self, sess):
"""Checks if the model is ready to run local_init_op.
Args:
sess: A `Session`.
Returns:
A tuple (is_ready, msg), where is_ready is True if ready to run
local_init_op and False otherwise, and msg is `None` if the model is
ready to run local_init_op, a `String` with the reason why it is not ready
otherwise.
"""
return _ready(self._ready_for_local_init_op, sess,
"Model not ready for local init")
def _try_run_local_init_op(self, sess):
"""Tries to run _local_init_op, if not None, and is ready for local init.
Args:
sess: A `Session`.
Returns:
A tuple (is_successful, msg), where is_successful is True if
_local_init_op is None, or we ran _local_init_op, and False otherwise;
and msg is a `String` with the reason why the model was not ready to run
local init.
"""
if self._local_init_op is not None:
is_ready_for_local_init, msg = self._model_ready_for_local_init(sess)
if is_ready_for_local_init:
logging.info("Running local_init_op.")
sess.run(self._local_init_op, feed_dict=self._local_init_feed_dict,
options=self._local_init_run_options)
logging.info("Done running local_init_op.")
return True, None
else:
return False, msg
return True, None
def _ready(op, sess, msg):
"""Checks if the model is ready or not, as determined by op.
Args:
op: An op, either _ready_op or _ready_for_local_init_op, which defines the
readiness of the model.
sess: A `Session`.
msg: A message to log to warning if not ready
Returns:
A tuple (is_ready, msg), where is_ready is True if ready and False
otherwise, and msg is `None` if the model is ready, a `String` with the
reason why it is not ready otherwise.
"""
if op is None:
return True, None
else:
try:
ready_value = sess.run(op)
# The model is considered ready if ready_op returns an empty 1-D tensor.
# Also compare to `None` and dtype being int32 for backward
# compatibility.
if (ready_value is None or ready_value.dtype == np.int32 or
ready_value.size == 0):
return True, None
else:
# TODO(sherrym): If a custom ready_op returns other types of tensor,
# or strings other than variable names, this message could be
# confusing.
non_initialized_varnames = ", ".join(
[i.decode("utf-8") for i in ready_value])
return False, "Variables not initialized: " + non_initialized_varnames
except errors.FailedPreconditionError as e:
if "uninitialized" not in str(e):
logging.warning("%s : error [%s]", msg, str(e))
raise e
return False, str(e)
class _CountDownTimer(object):
__slots__ = ["_start_time_secs", "_duration_secs"]
def __init__(self, duration_secs):
self._start_time_secs = time.time()
self._duration_secs = duration_secs
def secs_remaining(self):
diff = self._duration_secs - (time.time() - self._start_time_secs)
return max(0, diff)
|
tensorflow/tensorflow
|
tensorflow/python/training/session_manager.py
|
Python
|
apache-2.0
| 23,320 | 0.004417 |
#!/usr/bin/env python
#
# Copyright (c) 2020 by VMware, Inc. ("VMware")
# Used Copyright (c) 2018 by Network Device Education Foundation,
# Inc. ("NetDEF") in this file.
#
# Permission to use, copy, modify, and/or distribute this software
# for any purpose with or without fee is hereby granted, provided
# that the above copyright notice and this permission notice appear
# in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND VMWARE DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL VMWARE BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
#
"""
Following tests are covered to test BGP Multi-VRF:
FUNC_1:
Within each VRF, each address must be unambiguous on DUT.
FUNC_2:
Different VRFs can have ambiguous/overlapping
addresses on DUT.
FUNC_3:
Create static routes(IPv4+IPv6) associated to specific VRFs
and verify on DUT that same prefixes are present in corresponding
routing table.
FUNC_4_&_5:
Each VRF should be mapped with a unique VLAN on DUT
for traffic segregation, when using a single physical interface.
FUNC_6:
Advertise same set of prefixes from different VRFs
and verify on remote router that these prefixes are not
leaking to each other
FUNC_7:
Redistribute Static routes and verify on remote routers
that routes are advertised within specific VRF instance, which
those static routes belong to.
FUNC_8:
Test end to end traffic isolation based on VRF tables.
FUNC_9:
Use static routes for inter-vrf communication
(route-leaking) on DUT.
FUNC_10:
Verify intra-vrf and inter-vrf communication between
iBGP peers.
FUNC_11:
Verify intra-vrf and inter-vrf communication
between eBGP peers.
FUNC_12_a:
Configure route-maps within a VRF, to alter BGP attributes.
Verify that route-map doesn't affect any other VRF instances'
routing on DUT.
FUNC_12_b:
Configure route-maps within a VRF, to alter BGP attributes.
Verify that route-map doesn't affect any other VRF instances'
routing on DUT.
FUNC_12_c:
Configure route-maps within a VRF, to alter BGP attributes.
Verify that route-map doesn't affect any other VRF instances'
routing on DUT.
FUNC_12_d:
Configure route-maps within a VRF, to alter BGP attributes.
Verify that route-map doesn't affect any other VRF instances'
routing on DUT.
FUNC_12_e:
Configure route-maps within a VRF, to alter BGP attributes.
Verify that route-map doesn't affect any other VRF instances'
routing on DUT.
FUNC_12_f:
Configure route-maps within a VRF, to alter BGP attributes.
Verify that route-map doesn't affect any other VRF instances'
routing on DUT.
FUNC_13:
Configure a route-map on DUT to match traffic based
on a VRF interfaces.
FUNC_14:
Test VRF-lite with Static+BGP originated routes.
FUNC_15:
Configure prefix-lists on DUT and apply to BGP peers to
permit/deny prefixes.
FUNC_16_1:
Configure a route-map on DUT to match traffic based various
match/set causes.
FUNC_16_2:
Configure a route-map on DUT to match traffic based various
match/set causes.
FUNC_16_3:
Configure a route-map on DUT to match traffic based various
match/set causes.
"""
import os
import sys
import time
import pytest
# Save the Current Working Directory to find configuration files.
CWD = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(CWD, "../"))
sys.path.append(os.path.join(CWD, "../lib/"))
# Required to instantiate the topology builder class.
# pylint: disable=C0413
# Import topogen and topotest helpers
from lib.topogen import Topogen, get_topogen
from lib.topotest import iproute2_is_vrf_capable
from lib.common_config import (
step,
verify_rib,
start_topology,
write_test_header,
check_address_types,
write_test_footer,
reset_config_on_routers,
create_route_maps,
create_static_routes,
create_prefix_lists,
create_interface_in_kernel,
create_bgp_community_lists,
check_router_status,
apply_raw_config,
required_linux_kernel_version,
)
from lib.topolog import logger
from lib.bgp import (
verify_bgp_rib,
create_router_bgp,
verify_bgp_community,
verify_bgp_convergence,
verify_best_path_as_per_bgp_attribute,
)
from lib.topojson import build_config_from_json
pytestmark = [pytest.mark.bgpd, pytest.mark.staticd]
# Global variables
NETWORK1_1 = {"ipv4": "1.1.1.1/32", "ipv6": "1::1/128"}
NETWORK1_2 = {"ipv4": "1.1.1.2/32", "ipv6": "1::2/128"}
NETWORK2_1 = {"ipv4": "2.1.1.1/32", "ipv6": "2::1/128"}
NETWORK2_2 = {"ipv4": "2.1.1.2/32", "ipv6": "2::2/128"}
NETWORK3_1 = {"ipv4": "3.1.1.1/32", "ipv6": "3::1/128"}
NETWORK3_2 = {"ipv4": "3.1.1.2/32", "ipv6": "3::2/128"}
NETWORK4_1 = {"ipv4": "4.1.1.1/32", "ipv6": "4::1/128"}
NETWORK4_2 = {"ipv4": "4.1.1.2/32", "ipv6": "4::2/128"}
NETWORK5_1 = {"ipv4": "5.1.1.1/32", "ipv6": "5::1/128"}
NETWORK5_2 = {"ipv4": "5.1.1.2/32", "ipv6": "5::2/128"}
NETWORK6_1 = {"ipv4": "6.1.1.1/32", "ipv6": "6::1/128"}
NETWORK6_2 = {"ipv4": "6.1.1.2/32", "ipv6": "6::2/128"}
NETWORK7_1 = {"ipv4": "7.1.1.1/32", "ipv6": "7::1/128"}
NETWORK7_2 = {"ipv4": "7.1.1.2/32", "ipv6": "7::2/128"}
NETWORK8_1 = {"ipv4": "8.1.1.1/32", "ipv6": "8::1/128"}
NETWORK8_2 = {"ipv4": "8.1.1.2/32", "ipv6": "8::2/128"}
NEXT_HOP_IP = {"ipv4": "Null0", "ipv6": "Null0"}
LOOPBACK_1 = {
"ipv4": "10.10.10.10/32",
"ipv6": "10::10:10/128",
}
LOOPBACK_2 = {
"ipv4": "20.20.20.20/32",
"ipv6": "20::20:20/128",
}
def setup_module(mod):
"""
Sets up the pytest environment
* `mod`: module name
"""
# Required linux kernel version for this suite to run.
result = required_linux_kernel_version("4.15")
if result is not True:
pytest.skip("Kernel requirements are not met")
# iproute2 needs to support VRFs for this suite to run.
if not iproute2_is_vrf_capable():
pytest.skip("Installed iproute2 version does not support VRFs")
testsuite_run_time = time.asctime(time.localtime(time.time()))
logger.info("Testsuite start time: {}".format(testsuite_run_time))
logger.info("=" * 40)
logger.info("Running setup_module to create topology")
# This function initiates the topology build with Topogen...
json_file = "{}/bgp_multi_vrf_topo1.json".format(CWD)
tgen = Topogen(json_file, mod.__name__)
global topo
topo = tgen.json_topo
# ... and here it calls Mininet initialization functions.
# Starting topology, create tmp files which are loaded to routers
# to start deamons and then start routers
start_topology(tgen)
# Creating configuration from JSON
build_config_from_json(tgen, topo)
global BGP_CONVERGENCE
global ADDR_TYPES
ADDR_TYPES = check_address_types()
BGP_CONVERGENCE = verify_bgp_convergence(tgen, topo)
assert BGP_CONVERGENCE is True, "setup_module : Failed \n Error: {}".format(
BGP_CONVERGENCE
)
logger.info("Running setup_module() done")
def teardown_module():
"""Teardown the pytest environment"""
logger.info("Running teardown_module to delete topology")
tgen = get_topogen()
# Stop toplogy and Remove tmp files
tgen.stop_topology()
logger.info(
"Testsuite end time: {}".format(time.asctime(time.localtime(time.time())))
)
logger.info("=" * 40)
#####################################################
#
# Testcases
#
#####################################################
def test_address_unambiguous_within_each_vrf_p0(request):
"""
FUNC_1:
Within each VRF, each address must be unambiguous on DUT.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
if tgen.routers_have_failure():
check_router_status(tgen)
step("Configure a set of static routes(IPv4+IPv6) in " "RED_A on router RED-1")
for addr_type in ADDR_TYPES:
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": NETWORK1_1[addr_type],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
}
]
}
}
result = create_static_routes(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step(
"Configure the same static routes(IPv4+IPv6) with a TAG value"
"of 500 in RED_A on router RED-1"
)
for addr_type in ADDR_TYPES:
input_dict_2 = {
"red1": {
"static_routes": [
{
"network": NETWORK1_1[addr_type],
"next_hop": NEXT_HOP_IP[addr_type],
"tag": 500,
"vrf": "RED_A",
}
]
}
}
result = create_static_routes(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Redistribute static..")
input_dict_3 = {
"red1": {
"bgp": {
"local_as": "500",
"vrf": "RED_A",
"address_family": {
"ipv4": {"unicast": {"redistribute": [{"redist_type": "static"}]}},
"ipv6": {"unicast": {"redistribute": [{"redist_type": "static"}]}},
},
}
}
}
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that static routes(IPv4+IPv6) is overridden and doesn't"
" have duplicate entries within VRF RED_A on router RED-1"
)
for addr_type in ADDR_TYPES:
dut = "red1"
input_dict_2 = {
"red1": {
"static_routes": [
{
"network": NETWORK1_1[addr_type],
"next_hop": NEXT_HOP_IP[addr_type],
"tag": 500,
"vrf": "RED_A",
}
]
}
}
result = verify_rib(tgen, addr_type, dut, input_dict_2, tag=500)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
step("Make sure routes are not present in global routing table")
for addr_type in ADDR_TYPES:
dut = "red1"
input_dict_2 = {
"red1": {
"static_routes": [
{
"network": NETWORK1_1[addr_type],
"next_hop": NEXT_HOP_IP[addr_type],
}
]
}
}
result = verify_rib(tgen, addr_type, dut, input_dict_2, expected=False)
assert result is not True, (
"Testcase {} : Failed \n Expected Behaviour: Routes are not "
"present on Global Routing table \n Error {}".format(tc_name, result)
)
write_test_footer(tc_name)
def test_ambiguous_overlapping_addresses_in_different_vrfs_p0(request):
"""
FUNC_2:
Different VRFs can have ambiguous/overlapping
addresses on DUT.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
reset_config_on_routers(tgen)
if tgen.routers_have_failure():
check_router_status(tgen)
step("Configure a set of static routes(IPv4+IPv6) in vrf RED_A" "on router RED-1")
for addr_type in ADDR_TYPES:
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
}
]
}
}
result = create_static_routes(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step(
"Configure the same static routes(IPv4+IPv6) with a"
" TAG value of 500 in vrf RED_B on router RED-1"
)
for addr_type in ADDR_TYPES:
input_dict_2 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"tag": 500,
"vrf": "RED_B",
}
]
}
}
result = create_static_routes(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Redistribute static..")
input_dict_3 = {}
for dut in ["red1"]:
temp = {dut: {"bgp": []}}
input_dict_3.update(temp)
if "red" in dut:
VRFS = ["RED_A", "RED_B"]
AS_NUM = [500, 500]
elif "blue" in dut:
VRFS = ["BLUE_A", "BLUE_B"]
AS_NUM = [800, 800]
for vrf, as_num in zip(VRFS, AS_NUM):
temp[dut]["bgp"].append(
{
"local_as": as_num,
"vrf": vrf,
"address_family": {
"ipv4": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
"ipv6": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
},
}
)
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify that RED_A has the static routes without any" " TAG value")
for addr_type in ADDR_TYPES:
dut = "red1"
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
}
]
}
}
result = verify_rib(tgen, addr_type, dut, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
result = verify_rib(tgen, addr_type, dut, input_dict_1, tag=500, expected=False)
assert result is not True, (
"Testcase {} : Failed \n "
"Routes are present with tag value 500 \n Error: {}".format(tc_name, result)
)
logger.info("Expected Behavior: {}".format(result))
step(
"Verify that RED_B has the same routes with TAG value "
"500 on same device RED-1"
)
for addr_type in ADDR_TYPES:
dut = "red1"
input_dict_2 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"tag": 500,
"vrf": "RED_B",
}
]
}
}
result = verify_rib(tgen, addr_type, dut, input_dict_2, tag=500)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
step("Make sure routes are not present in global routing table")
for addr_type in ADDR_TYPES:
dut = "red1"
input_dict_2 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
}
]
}
}
result = verify_rib(tgen, addr_type, dut, input_dict_2, expected=False)
assert result is not True, (
"Testcase {} : Failed \n Expected Behaviour: Routes are not "
"present on Global Routing table \n Error {}".format(tc_name, result)
)
write_test_footer(tc_name)
def test_static_routes_associated_to_specific_vrfs_p0(request):
"""
FUNC_3:
Create static routes(IPv4+IPv6) associated to specific VRFs
and verify on DUT that same prefixes are present in corresponding
routing table.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
reset_config_on_routers(tgen)
if tgen.routers_have_failure():
check_router_status(tgen)
step(
"Configure a set of unique static(IPv4+IPv6) routes in vrf"
" RED_A on router RED-1"
)
for addr_type in ADDR_TYPES:
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
}
}
result = create_static_routes(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step(
"Configure set of unique static routes(IPv4+IPv6) in vrf "
"RED_B on router RED-1"
)
for addr_type in ADDR_TYPES:
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
}
}
result = create_static_routes(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Redistribute static..")
input_dict_3 = {}
for dut in ["red1", "blue1"]:
temp = {dut: {"bgp": []}}
input_dict_3.update(temp)
if "red" in dut:
VRFS = ["RED_A", "RED_B"]
AS_NUM = [500, 500]
elif "blue" in dut:
VRFS = ["BLUE_A", "BLUE_B"]
AS_NUM = [800, 800]
for vrf, as_num in zip(VRFS, AS_NUM):
temp[dut]["bgp"].append(
{
"local_as": as_num,
"vrf": vrf,
"address_family": {
"ipv4": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
"ipv6": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
},
}
)
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that static routes 1.x.x.x/32 and 1::x/128 appear " "in VRF RED_A table"
)
step(
"Verify that static routes 2.x.x.x/32 and 2::x/128 appear " "in VRF RED_B table"
)
for addr_type in ADDR_TYPES:
dut = "red1"
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
}
}
result = verify_rib(tgen, addr_type, dut, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
step(
"Verify that static routes 1.x.x.x/32 and 1::x/128 appear "
"in VRF BLUE_A table"
)
step(
"Verify that static routes 2.x.x.x/32 and 2::x/128 appear "
"in VRF BLUE_B table"
)
for addr_type in ADDR_TYPES:
dut = "blue1"
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
}
}
result = verify_rib(tgen, addr_type, dut, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
step("Make sure routes are not present in global routing table")
for addr_type in ADDR_TYPES:
dut = "blue1"
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
},
]
}
}
result = verify_rib(tgen, addr_type, dut, input_dict_2, expected=False)
assert result is not True, (
"Testcase {} : Failed \n Expected Behaviour: Routes are not "
"present on Global Routing table \n Error {}".format(tc_name, result)
)
write_test_footer(tc_name)
def test_vrf_with_unique_physical_interface_p0(request):
"""
FUNC_4_&_5:
Each VRF should be mapped with a unique VLAN on DUT
for traffic segregation, when using a single physical interface.
Each VRF should be mapped to a unique physical
interface(without VLAN tagging) on DUT for traffic segregation.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
reset_config_on_routers(tgen)
if tgen.routers_have_failure():
check_router_status(tgen)
step(
"R1 is receiving routes in 4 VRFs instances "
"(RED_A, RED_B, BLUE_A, BLUE_B) from RED_1 and BLUE_1."
)
for addr_type in ADDR_TYPES:
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
}
}
result = create_static_routes(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step(
"Advertise a set of unique BGP prefixes(IPv4+IPv6) from "
"routers RED_1 & BLUE_1 in each VRF using static redistribution"
)
for addr_type in ADDR_TYPES:
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK3_1[addr_type]] + [NETWORK3_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK4_1[addr_type]] + [NETWORK4_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
}
}
result = create_static_routes(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Redistribute static..")
input_dict_3 = {}
for dut in ["red1", "blue1"]:
temp = {dut: {"bgp": []}}
input_dict_3.update(temp)
if "red" in dut:
VRFS = ["RED_A", "RED_B"]
AS_NUM = [500, 500]
elif "blue" in dut:
VRFS = ["BLUE_A", "BLUE_B"]
AS_NUM = [800, 800]
for vrf, as_num in zip(VRFS, AS_NUM):
temp[dut]["bgp"].append(
{
"local_as": as_num,
"vrf": vrf,
"address_family": {
"ipv4": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
"ipv6": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
},
}
)
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Each VRF table on R2 should maintain it's associated "
"routes and and accordingly install in zebra"
)
for addr_type in ADDR_TYPES:
dut = "r2"
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
}
}
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK3_1[addr_type]] + [NETWORK3_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK4_1[addr_type]] + [NETWORK4_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
}
}
result = verify_rib(tgen, addr_type, dut, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
result = verify_rib(tgen, addr_type, dut, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
write_test_footer(tc_name)
def test_prefixes_leaking_p0(request):
"""
FUNC_6:
Advertise same set of prefixes from different VRFs
and verify on remote router that these prefixes are not
leaking to each other
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
reset_config_on_routers(tgen)
if tgen.routers_have_failure():
check_router_status(tgen)
step("Configure a set of static routes(IPv4+IPv6) in vrf " "RED_A on router RED-1")
for addr_type in ADDR_TYPES:
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
}
]
},
"blue1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
}
]
},
}
result = create_static_routes(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step(
"Configure a set of static routes(IPv4+IPv6) in vrf " "BLUE_A on router BLUE-1"
)
for addr_type in ADDR_TYPES:
input_dict_2 = {
"red1": {
"static_routes": [
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
}
]
},
"blue1": {
"static_routes": [
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
}
]
},
}
result = create_static_routes(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step(
"Configure the same set of static routes with a "
"metric value of 123 in vrf RED_B on router RED-1"
)
step(
"Configure the same set of static routes with a "
"metric value of 123 in vrf BLUE_B on router BLUE-1"
)
input_dict_3 = {
"red1": {
"bgp": [
{
"local_as": "500",
"vrf": "RED_A",
"address_family": {
"ipv4": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
"ipv6": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
},
},
{
"local_as": "500",
"vrf": "RED_B",
"address_family": {
"ipv4": {
"unicast": {
"redistribute": [
{
"redist_type": "static",
"attribute": {"metric": 123},
}
]
}
},
"ipv6": {
"unicast": {
"redistribute": [
{
"redist_type": "static",
"attribute": {"metric": 123},
}
]
}
},
},
},
]
},
"blue1": {
"bgp": [
{
"local_as": "800",
"vrf": "BLUE_A",
"address_family": {
"ipv4": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
"ipv6": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
},
},
{
"local_as": "800",
"vrf": "BLUE_B",
"address_family": {
"ipv4": {
"unicast": {
"redistribute": [
{
"redist_type": "static",
"attribute": {"metric": 123},
}
]
}
},
"ipv6": {
"unicast": {
"redistribute": [
{
"redist_type": "static",
"attribute": {"metric": 123},
}
]
}
},
},
},
]
},
}
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify on R1 that RED_A doesn't receive any static "
"route with metric value 123"
)
for addr_type in ADDR_TYPES:
dut = "r1"
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
}
]
},
"blue1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
}
]
},
}
input_dict_2 = {
"red1": {
"static_routes": [
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
}
]
},
"blue1": {
"static_routes": [
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
}
]
},
}
result = verify_rib(tgen, addr_type, dut, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
result = verify_rib(
tgen, addr_type, dut, input_dict_1, metric=123, expected=False
)
assert result is not True, (
"Testcase {} : Failed \n "
"Routes are present with metric value 123 \n Error: {}".format(
tc_name, result
)
)
logger.info("Expected Behavior: {}".format(result))
result = verify_rib(tgen, addr_type, dut, input_dict_2, metric=123)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
result = verify_rib(
tgen, addr_type, dut, input_dict_2, metric=0, expected=False
)
assert result is not True, (
"Testcase {} : Failed \n "
"Routes are present with metric value 0 \n Error: {}".format(
tc_name, result
)
)
logger.info("Expected Behavior: {}".format(result))
write_test_footer(tc_name)
def test_static_routes_advertised_within_specific_vrf_p0(request):
"""
FUNC_7:
Redistribute Static routes and verify on remote routers
that routes are advertised within specific VRF instance, which
those static routes belong to.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
reset_config_on_routers(tgen)
if tgen.routers_have_failure():
check_router_status(tgen)
step(
"Advertise a set of unique BGP prefixes(IPv4+IPv6) "
"through static redistribution into VRF RED_A and RED_B"
" from router RED-1."
)
for addr_type in ADDR_TYPES:
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
}
}
result = create_static_routes(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step(
"Advertise same as above set of BGP prefixes(IPv4+IPv6) "
"through static redistribution into VRF BLUE_A and BLUE_B"
" from router BLUE-1."
)
for addr_type in ADDR_TYPES:
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
}
}
result = create_static_routes(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Redistribute static..")
input_dict_3 = {}
for dut in ["red1", "blue1"]:
temp = {dut: {"bgp": []}}
input_dict_3.update(temp)
if "red" in dut:
VRFS = ["RED_A", "RED_B"]
AS_NUM = [500, 500]
elif "blue" in dut:
VRFS = ["BLUE_A", "BLUE_B"]
AS_NUM = [800, 800]
for vrf, as_num in zip(VRFS, AS_NUM):
temp[dut]["bgp"].append(
{
"local_as": as_num,
"vrf": vrf,
"address_family": {
"ipv4": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
"ipv6": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
},
}
)
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that static routes are installed into vrfs RED_A"
"and RED_B tables only, not in global routing table of RED_1"
)
for addr_type in ADDR_TYPES:
dut = "red1"
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
}
}
result = verify_rib(tgen, addr_type, dut, input_dict_1, protocol="static")
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
step(
"Verify that static routes are installed into vrfs BLUE_A and"
"BLUE_B tables only, not in global routing table of BLUE_1."
)
for addr_type in ADDR_TYPES:
dut = "blue1"
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
}
}
result = verify_rib(tgen, addr_type, dut, input_dict_2, protocol="static")
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
step(
"Verify on router R1, that each set of prefixes is received"
" into associated vrf tables only."
)
result = verify_bgp_convergence(tgen, topo)
assert result is True, "Testcase {} : Failed \n Error {}".format(tc_name, result)
for addr_type in ADDR_TYPES:
dut = "r1"
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
}
}
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
}
}
result = verify_rib(tgen, addr_type, dut, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
result = verify_rib(tgen, addr_type, dut, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
write_test_footer(tc_name)
def test_end_to_end_traffic_isolation_p0(request):
"""
FUNC_8:
Test end to end traffic isolation based on VRF tables.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
reset_config_on_routers(tgen)
if tgen.routers_have_failure():
check_router_status(tgen)
step(
"Advertise unique BGP prefixes(IPv4+IPv6) from RED_1 "
"in vrf instances(RED_A and RED_B)."
)
for addr_type in ADDR_TYPES:
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
}
}
result = create_static_routes(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step(
"Advertise unique BGP prefixes(IPv4+IPv6) from from BLUE_1 in"
" vrf instances(BLUE_A and BLUE_B)."
)
for addr_type in ADDR_TYPES:
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK3_1[addr_type]] + [NETWORK3_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK4_1[addr_type]] + [NETWORK4_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
}
}
result = create_static_routes(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Redistribute static..")
input_dict_3 = {}
for dut in ["red1", "blue1"]:
temp = {dut: {"bgp": []}}
input_dict_3.update(temp)
if "red" in dut:
VRFS = ["RED_A", "RED_B"]
AS_NUM = [500, 500]
elif "blue" in dut:
VRFS = ["BLUE_A", "BLUE_B"]
AS_NUM = [800, 800]
for vrf, as_num in zip(VRFS, AS_NUM):
temp[dut]["bgp"].append(
{
"local_as": as_num,
"vrf": vrf,
"address_family": {
"ipv4": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
"ipv6": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
},
}
)
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Use below commands to send prefixes with as-path prepend"
"VRF BLUE_A and BLUE_B from router BLUE-1."
)
for addr_type in ADDR_TYPES:
input_dict_4 = {
"blue1": {
"route_maps": {
"ASP_{}".format(addr_type): [
{
"action": "permit",
"set": {"path": {"as_num": 123, "as_action": "prepend"}},
}
]
}
}
}
result = create_route_maps(tgen, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Apply route-map to neighbours")
input_dict_5 = {
"blue1": {
"bgp": [
{
"local_as": "800",
"vrf": "BLUE_A",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"blue1-link1": {
"route_maps": [
{
"name": "ASP_ipv4",
"direction": "out",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"blue1-link1": {
"route_maps": [
{
"name": "ASP_ipv6",
"direction": "out",
}
]
}
}
}
}
}
},
},
},
{
"local_as": "800",
"vrf": "BLUE_B",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"blue1-link2": {
"route_maps": [
{
"name": "ASP_ipv4",
"direction": "out",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"blue1-link2": {
"route_maps": [
{
"name": "ASP_ipv6",
"direction": "out",
}
]
}
}
}
}
}
},
},
},
]
}
}
result = create_router_bgp(tgen, topo, input_dict_5)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify on R1 that BLUE_A and BLUE_B VRFs are receiving the"
" prefixes with as-path 123 prepended."
)
for addr_type in ADDR_TYPES:
dut = "r1"
input_dict_6 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK3_1[addr_type]] + [NETWORK3_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
]
}
}
result = verify_bgp_rib(tgen, addr_type, dut, input_dict_6)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
result = verify_rib(tgen, addr_type, dut, input_dict_6)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
for addr_type in ADDR_TYPES:
dut = "r1"
input_dict_7 = {
"red1": {
"static_routes": [
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
{
"network": [NETWORK4_1[addr_type]] + [NETWORK4_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
}
}
result = verify_bgp_rib(tgen, addr_type, dut, input_dict_7)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
result = verify_rib(tgen, addr_type, dut, input_dict_7)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
step(
"Use below commands to send prefixes with as-path prepend VRF"
" BLUE_A and BLUE_B from router BLUE-1."
)
input_dict_6 = {
"red2": {
"bgp": [
{
"local_as": "500",
"vrf": "RED_A",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r3": {
"dest_link": {
"red2-link1": {
"allowas-in": {"number_occurences": 2}
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r3": {
"dest_link": {
"red2-link1": {
"allowas-in": {"number_occurences": 2}
}
}
}
}
}
},
},
},
{
"local_as": "500",
"vrf": "RED_B",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r3": {
"dest_link": {
"red2-link2": {
"allowas-in": {"number_occurences": 2}
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r3": {
"dest_link": {
"red2-link2": {
"allowas-in": {"number_occurences": 2}
}
}
}
}
}
},
},
},
]
},
"blue2": {
"bgp": [
{
"local_as": "800",
"vrf": "BLUE_A",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r3": {
"dest_link": {
"blue2-link1": {
"allowas-in": {"number_occurences": 2}
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r3": {
"dest_link": {
"blue2-link1": {
"allowas-in": {"number_occurences": 2}
}
}
}
}
}
},
},
},
{
"local_as": "800",
"vrf": "BLUE_B",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r3": {
"dest_link": {
"blue2-link2": {
"allowas-in": {"number_occurences": 2}
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r3": {
"dest_link": {
"blue2-link2": {
"allowas-in": {"number_occurences": 2}
}
}
}
}
}
},
},
},
]
},
}
result = create_router_bgp(tgen, topo, input_dict_6)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify that router RED-2 receives the prefixes in respective" " VRF tables.")
for addr_type in ADDR_TYPES:
dut = "red2"
input_dict_6 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
}
}
result = verify_bgp_rib(tgen, addr_type, dut, input_dict_6)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
result = verify_bgp_rib(tgen, addr_type, dut, input_dict_6)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
for addr_type in ADDR_TYPES:
dut = "blue2"
input_dict_7 = {
"red1": {
"static_routes": [
{
"network": [NETWORK3_1[addr_type]] + [NETWORK3_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK4_1[addr_type]] + [NETWORK4_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
}
}
result = verify_bgp_rib(tgen, addr_type, dut, input_dict_7)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
result = verify_bgp_rib(tgen, addr_type, dut, input_dict_7)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
write_test_footer(tc_name)
def test_static_routes_for_inter_vrf_route_leaking_p0(request):
"""
FUNC_9:
Use static routes for inter-vrf communication
(route-leaking) on DUT.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
reset_config_on_routers(tgen)
if tgen.routers_have_failure():
check_router_status(tgen)
step(
"Configure unique loopback interfaces in VRFs RED_A "
"and RED_B on router RED_1."
)
for addr_type in ADDR_TYPES:
create_interface_in_kernel(
tgen,
"red1",
"loopback1",
LOOPBACK_1[addr_type],
"RED_A",
)
create_interface_in_kernel(
tgen,
"red1",
"loopback2",
LOOPBACK_2[addr_type],
"RED_B",
)
step(
"Create a static routes in vrf RED_B on router RED_1 pointing"
" next-hop as interface's IP in vrf RED_A"
)
intf_red1_r11 = topo["routers"]["red1"]["links"]["r1-link1"]["interface"]
intf_red1_r10 = topo["routers"]["red1"]["links"]["r1-link2"]["interface"]
for addr_type in ADDR_TYPES:
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": LOOPBACK_1[addr_type],
"interface": intf_red1_r10,
"nexthop_vrf": "RED_B",
"vrf": "RED_A",
},
{
"network": LOOPBACK_2[addr_type],
"interface": intf_red1_r11,
"nexthop_vrf": "RED_A",
"vrf": "RED_B",
},
]
}
}
result = create_static_routes(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Redistribute static..")
input_dict_3 = {}
for dut in ["red1"]:
temp = {dut: {"bgp": []}}
input_dict_3.update(temp)
if "red" in dut:
VRFS = ["RED_A", "RED_B"]
AS_NUM = [500, 500]
elif "blue" in dut:
VRFS = ["BLUE_A", "BLUE_B"]
AS_NUM = [800, 800]
for vrf, as_num in zip(VRFS, AS_NUM):
temp[dut]["bgp"].append(
{
"local_as": as_num,
"vrf": vrf,
"address_family": {
"ipv4": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
"ipv6": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
},
}
)
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that static routes are installed into vrfs RED_A"
"and RED_B tables only, not in global routing table of RED_1"
)
for addr_type in ADDR_TYPES:
dut = "red1"
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": LOOPBACK_1[addr_type],
"interface": intf_red1_r10,
"nexthop_vrf": "RED_B",
"vrf": "RED_A",
},
{
"network": LOOPBACK_2[addr_type],
"interface": intf_red1_r11,
"nexthop_vrf": "RED_A",
"vrf": "RED_B",
},
]
}
}
result = verify_rib(tgen, addr_type, dut, input_dict_1, protocol="static")
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
write_test_footer(tc_name)
def test_inter_vrf_and_intra_vrf_communication_iBGP_p0(request):
"""
FUNC_10:
Verify intra-vrf and inter-vrf communication between
iBGP peers.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
reset_config_on_routers(tgen)
if tgen.routers_have_failure():
check_router_status(tgen)
step(
"Configure unique loopback IP(IPv4+IPv6) in vrf RED_A on router"
" R1 and advertise it in BGP process using redistribute "
"connected command."
)
for addr_type in ADDR_TYPES:
create_interface_in_kernel(
tgen,
"r1",
"loopback1",
LOOPBACK_1[addr_type],
"RED_A",
)
create_interface_in_kernel(
tgen,
"r1",
"loopback2",
LOOPBACK_2[addr_type],
"BLUE_A",
)
step(
"Create a static routes in vrf RED_B on router RED_1 pointing"
" next-hop as interface's IP in vrf RED_A"
)
intf_r2_r12 = topo["routers"]["r2"]["links"]["r1-link1"]["interface"]
intf_r2_r10 = topo["routers"]["r2"]["links"]["r1-link3"]["interface"]
for addr_type in ADDR_TYPES:
input_dict_1 = {
"r2": {
"static_routes": [
{
"network": LOOPBACK_2[addr_type],
"interface": intf_r2_r10,
"nexthop_vrf": "BLUE_A",
"vrf": "RED_A",
},
{
"network": LOOPBACK_1[addr_type],
"interface": intf_r2_r12,
"nexthop_vrf": "RED_A",
"vrf": "BLUE_A",
},
]
}
}
result = create_static_routes(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Redistribute connected..")
input_dict_3 = {}
for dut in ["r1"]:
temp = {dut: {"bgp": []}}
input_dict_3.update(temp)
VRFS = ["RED_A", "BLUE_A"]
AS_NUM = [100, 100]
for vrf, as_num in zip(VRFS, AS_NUM):
temp[dut]["bgp"].append(
{
"local_as": as_num,
"vrf": vrf,
"address_family": {
"ipv4": {
"unicast": {"redistribute": [{"redist_type": "connected"}]}
},
"ipv6": {
"unicast": {"redistribute": [{"redist_type": "connected"}]}
},
},
}
)
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Redistribute static..")
input_dict_3 = {}
for dut in ["r2"]:
temp = {dut: {"bgp": []}}
input_dict_3.update(temp)
VRFS = ["RED_A", "BLUE_A"]
AS_NUM = [100, 100]
for vrf, as_num in zip(VRFS, AS_NUM):
temp[dut]["bgp"].append(
{
"local_as": as_num,
"vrf": vrf,
"address_family": {
"ipv4": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
"ipv6": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
},
}
)
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that static routes are installed into vrfs RED_A"
"and RED_B tables only, not in global routing table of RED_1"
)
for addr_type in ADDR_TYPES:
dut = "r2"
input_dict = {
"r2": {
"static_routes": [
{
"network": LOOPBACK_2[addr_type],
"interface": intf_r2_r10,
"nexthop_vrf": "BLUE_A",
"vrf": "RED_A",
},
{
"network": LOOPBACK_1[addr_type],
"interface": intf_r2_r12,
"nexthop_vrf": "RED_A",
"vrf": "BLUE_A",
},
]
}
}
result = verify_rib(tgen, addr_type, dut, input_dict)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
write_test_footer(tc_name)
def test_inter_vrf_and_intra_vrf_communication_eBGP_p0(request):
"""
FUNC_11:
Verify intra-vrf and inter-vrf communication
between eBGP peers.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
reset_config_on_routers(tgen)
if tgen.routers_have_failure():
check_router_status(tgen)
step(
"Configure unique loopback IP(IPv4+IPv6) in vrf RED_A on router"
" R2 and advertise it in BGP process using redistribute "
"connected command."
)
step(
"Configure unique loopback IP(IPv4+IPv6) in vrf BLUE_A on router"
" R2 and advertise it in BGP process using redistribute "
"connected command."
)
for addr_type in ADDR_TYPES:
create_interface_in_kernel(
tgen,
"r2",
"loopback1",
LOOPBACK_1[addr_type],
"RED_A",
)
create_interface_in_kernel(
tgen,
"r2",
"loopback2",
LOOPBACK_2[addr_type],
"BLUE_A",
)
step(
"Create a static routes in vrf RED_B on router RED_1 pointing"
" next-hop as interface's IP in vrf RED_A"
)
intf_r3_r21 = topo["routers"]["r3"]["links"]["r2-link1"]["interface"]
intf_r3_r23 = topo["routers"]["r3"]["links"]["r2-link3"]["interface"]
for addr_type in ADDR_TYPES:
input_dict_1 = {
"r3": {
"static_routes": [
{
"network": LOOPBACK_2[addr_type],
"interface": intf_r3_r23,
"nexthop_vrf": "BLUE_A",
"vrf": "RED_A",
},
{
"network": LOOPBACK_1[addr_type],
"interface": intf_r3_r21,
"nexthop_vrf": "RED_A",
"vrf": "BLUE_A",
},
]
}
}
result = create_static_routes(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Redistribute static..")
input_dict_3 = {}
for dut in ["r3"]:
temp = {dut: {"bgp": []}}
input_dict_3.update(temp)
VRFS = ["RED_A", "BLUE_A"]
AS_NUM = [200, 200]
for vrf, as_num in zip(VRFS, AS_NUM):
temp[dut]["bgp"].append(
{
"local_as": as_num,
"vrf": vrf,
"address_family": {
"ipv4": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
"ipv6": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
},
}
)
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Redistribute connected..")
input_dict_3 = {}
for dut in ["r2"]:
temp = {dut: {"bgp": []}}
input_dict_3.update(temp)
VRFS = ["RED_A", "BLUE_A"]
AS_NUM = [100, 100]
for vrf, as_num in zip(VRFS, AS_NUM):
temp[dut]["bgp"].append(
{
"local_as": as_num,
"vrf": vrf,
"address_family": {
"ipv4": {
"unicast": {"redistribute": [{"redist_type": "connected"}]}
},
"ipv6": {
"unicast": {"redistribute": [{"redist_type": "connected"}]}
},
},
}
)
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that static routes are installed into vrfs RED_A"
"and RED_B tables only, not in global routing table of RED_1"
)
for addr_type in ADDR_TYPES:
dut = "r3"
input_dict = {
"r3": {
"static_routes": [
{
"network": LOOPBACK_2[addr_type],
"interface": intf_r3_r23,
"nexthop_vrf": "BLUE_A",
"vrf": "RED_A",
},
{
"network": LOOPBACK_1[addr_type],
"interface": intf_r3_r21,
"nexthop_vrf": "RED_A",
"vrf": "BLUE_A",
},
]
}
}
result = verify_rib(tgen, addr_type, dut, input_dict)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
write_test_footer(tc_name)
def test_route_map_within_vrf_to_alter_bgp_attribute_nexthop_p0(request):
"""
FUNC_12_a:
Configure route-maps within a VRF, to alter BGP attributes.
Verify that route-map doesn't affect any other VRF instances'
routing on DUT.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
reset_config_on_routers(tgen)
if tgen.routers_have_failure():
check_router_status(tgen)
step(
"Advertise a set of BGP prefixes(IPv4+IPv6) from RED_1 and"
" RED_2 in vrf instances(RED_A and RED_B)."
)
for addr_type in ADDR_TYPES:
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
}
}
result = create_static_routes(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step(
"Advertise same set of BGP prefixes(IPv4+IPv6) from BLUE_1 and"
"BLUE_2 in vrf instances(BLUE_A and BLUE_B)"
)
for addr_type in ADDR_TYPES:
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
}
}
result = create_static_routes(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Redistribute static..")
input_dict_3 = {}
for dut in ["red1", "blue1"]:
temp = {dut: {"bgp": []}}
input_dict_3.update(temp)
if "red" in dut:
VRFS = ["RED_A", "RED_B"]
AS_NUM = [500, 500]
elif "blue" in dut:
VRFS = ["BLUE_A", "BLUE_B"]
AS_NUM = [800, 800]
for vrf, as_num in zip(VRFS, AS_NUM):
temp[dut]["bgp"].append(
{
"local_as": as_num,
"vrf": vrf,
"address_family": {
"ipv4": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
"ipv6": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
},
}
)
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that within vrf instances, BGP best path selection"
" algorithm remains intact and doesn't affect any other VRFs"
" routing decision."
)
for addr_type in ADDR_TYPES:
dut = "r2"
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
}
}
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
}
}
result = verify_rib(tgen, addr_type, dut, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
result = verify_rib(tgen, addr_type, dut, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
step("Delete nexthop-self configure from r1")
input_dict_4 = {
"r1": {
"bgp": [
{
"local_as": "100",
"vrf": "RED_A",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r2": {
"dest_link": {
"r1-link1": {"next_hop_self": False}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r2": {
"dest_link": {
"r1-link1": {"next_hop_self": False}
}
}
}
}
},
},
},
{
"local_as": "100",
"vrf": "RED_B",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r2": {
"dest_link": {
"r1-link2": {"next_hop_self": False}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r2": {
"dest_link": {
"r1-link2": {"next_hop_self": False}
}
}
}
}
},
},
},
{
"local_as": "100",
"vrf": "BLUE_A",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r2": {
"dest_link": {
"r1-link3": {"next_hop_self": False}
}
},
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r2": {
"dest_link": {
"r1-link3": {"next_hop_self": False}
}
}
}
}
},
},
},
{
"local_as": "100",
"vrf": "BLUE_B",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r2": {
"dest_link": {
"r1-link4": {"next_hop_self": False}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r2": {
"dest_link": {
"r1-link4": {"next_hop_self": False}
}
}
}
}
},
},
},
]
}
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that within vrf instances, BGP best path selection"
" algorithm remains intact and doesn't affect any other VRFs"
" routing decision."
)
for addr_type in ADDR_TYPES:
dut = "r2"
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
}
}
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
}
}
result = verify_rib(tgen, addr_type, dut, input_dict_1, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Expected Behaviour: Routes are rejected because nexthop-self config is deleted \n Error {}".format(
tc_name, result
)
result = verify_rib(tgen, addr_type, dut, input_dict_2, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Expected Behaviour: Routes are rejected because nexthop-self config is deleted \n Error {}".format(
tc_name, result
)
write_test_footer(tc_name)
@pytest.mark.parametrize("attribute", ["locPrf", "weight", "metric"])
def test_route_map_within_vrf_to_alter_bgp_attribute_p0(request, attribute):
"""
FUNC_12_b/c/d:
Configure route-maps within a VRF, to alter BGP attributes.
Verify that route-map doesn't affect any other VRF instances'
routing on DUT.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
reset_config_on_routers(tgen)
if tgen.routers_have_failure():
check_router_status(tgen)
step(
"Advertise a set of BGP prefixes(IPv4+IPv6) from RED_1 and"
" RED_2 in vrf instances(RED_A and RED_B)."
)
for addr_type in ADDR_TYPES:
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
},
"red2": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
},
}
result = create_static_routes(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step(
"Advertise same set of BGP prefixes(IPv4+IPv6) from BLUE_1 and"
"BLUE_2 in vrf instances(BLUE_A and BLUE_B)"
)
for addr_type in ADDR_TYPES:
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
},
"blue2": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
},
}
result = create_static_routes(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Redistribute static..")
input_dict_3 = {}
for dut in ["red1", "red2", "blue1", "blue2"]:
temp = {dut: {"bgp": []}}
input_dict_3.update(temp)
if "red" in dut:
VRFS = ["RED_A", "RED_B"]
AS_NUM = [500, 500]
elif "blue" in dut:
VRFS = ["BLUE_A", "BLUE_B"]
AS_NUM = [800, 800]
for vrf, as_num in zip(VRFS, AS_NUM):
temp[dut]["bgp"].append(
{
"local_as": as_num,
"vrf": vrf,
"address_family": {
"ipv4": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
"ipv6": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
},
}
)
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Configure a route-maps to influence BGP parameters - " " Local Preference")
for addr_type in ADDR_TYPES:
input_dict_4 = {
"r2": {
"route_maps": {
"rmap_r1_{}".format(addr_type): [
{"action": "permit", "set": {attribute: 120}}
],
"rmap_r3_{}".format(addr_type): [
{"action": "permit", "set": {attribute: 150}}
],
}
}
}
result = create_route_maps(tgen, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Configure neighbor for route map")
input_dict_4 = {
"r2": {
"bgp": [
{
"local_as": "100",
"vrf": "RED_A",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r2-link1": {
"route_maps": [
{
"name": "rmap_r1_ipv4",
"direction": "in",
}
]
}
}
},
"r3": {
"dest_link": {
"r2-link1": {
"route_maps": [
{
"name": "rmap_r3_ipv4",
"direction": "in",
}
]
}
}
},
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r2-link1": {
"route_maps": [
{
"name": "rmap_r1_ipv6",
"direction": "in",
}
]
}
}
},
"r3": {
"dest_link": {
"r2-link1": {
"route_maps": [
{
"name": "rmap_r3_ipv6",
"direction": "in",
}
]
}
}
},
}
}
},
},
},
{
"local_as": "100",
"vrf": "RED_B",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r2-link2": {
"route_maps": [
{
"name": "rmap_r1_ipv4",
"direction": "in",
}
]
}
}
},
"r3": {
"dest_link": {
"r2-link2": {
"route_maps": [
{
"name": "rmap_r3_ipv4",
"direction": "in",
}
]
}
}
},
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r2-link2": {
"route_maps": [
{
"name": "rmap_r1_ipv6",
"direction": "in",
}
]
}
}
},
"r3": {
"dest_link": {
"r2-link2": {
"route_maps": [
{
"name": "rmap_r3_ipv6",
"direction": "in",
}
]
}
}
},
}
}
},
},
},
{
"local_as": "100",
"vrf": "BLUE_A",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r2-link3": {
"route_maps": [
{
"name": "rmap_r1_ipv4",
"direction": "in",
}
]
}
}
},
"r3": {
"dest_link": {
"r2-link3": {
"route_maps": [
{
"name": "rmap_r3_ipv4",
"direction": "in",
}
]
}
}
},
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r2-link3": {
"route_maps": [
{
"name": "rmap_r1_ipv6",
"direction": "in",
}
]
}
}
},
"r3": {
"dest_link": {
"r2-link3": {
"route_maps": [
{
"name": "rmap_r3_ipv6",
"direction": "in",
}
]
}
}
},
}
}
},
},
},
{
"local_as": "100",
"vrf": "BLUE_B",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r2-link4": {
"route_maps": [
{
"name": "rmap_r1_ipv4",
"direction": "in",
}
]
}
}
},
"r3": {
"dest_link": {
"r2-link4": {
"route_maps": [
{
"name": "rmap_r3_ipv4",
"direction": "in",
}
]
}
}
},
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r2-link4": {
"route_maps": [
{
"name": "rmap_r1_ipv6",
"direction": "in",
}
]
}
}
},
"r3": {
"dest_link": {
"r2-link4": {
"route_maps": [
{
"name": "rmap_r3_ipv6",
"direction": "in",
}
]
}
}
},
}
}
},
},
},
]
}
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that within vrf instances, BGP best path selection"
" algorithm remains intact and doesn't affect any other VRFs"
" routing decision."
)
dut = "r2"
for addr_type in ADDR_TYPES:
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
}
}
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
}
}
result = verify_best_path_as_per_bgp_attribute(
tgen, addr_type, dut, input_dict_1, attribute
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
result = verify_best_path_as_per_bgp_attribute(
tgen, addr_type, dut, input_dict_2, attribute
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
def test_route_map_within_vrf_to_alter_bgp_attribute_aspath_p0(request):
"""
FUNC_12_e:
Configure route-maps within a VRF, to alter BGP attributes.
Verify that route-map doesn't affect any other VRF instances'
routing on DUT.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
reset_config_on_routers(tgen)
if tgen.routers_have_failure():
check_router_status(tgen)
step(
"Advertise a set of BGP prefixes(IPv4+IPv6) from RED_1 and"
" RED_2 in vrf instances(RED_A and RED_B)."
)
for addr_type in ADDR_TYPES:
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
},
"red2": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
},
}
result = create_static_routes(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step(
"Advertise same set of BGP prefixes(IPv4+IPv6) from BLUE_1 and"
"BLUE_2 in vrf instances(BLUE_A and BLUE_B)"
)
for addr_type in ADDR_TYPES:
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
},
"blue2": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
},
}
result = create_static_routes(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Redistribute static..")
input_dict_3 = {}
for dut in ["red1", "red2", "blue1", "blue2"]:
temp = {dut: {"bgp": []}}
input_dict_3.update(temp)
if "red" in dut:
VRFS = ["RED_A", "RED_B"]
AS_NUM = [500, 500]
elif "blue" in dut:
VRFS = ["BLUE_A", "BLUE_B"]
AS_NUM = [800, 800]
for vrf, as_num in zip(VRFS, AS_NUM):
temp[dut]["bgp"].append(
{
"local_as": as_num,
"vrf": vrf,
"address_family": {
"ipv4": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
"ipv6": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
},
}
)
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Configure a route-maps to influence BGP parameters - " " Local Preference")
for addr_type in ADDR_TYPES:
input_dict_4 = {
"r2": {
"route_maps": {
"rmap_r1_{}".format(addr_type): [
{
"action": "permit",
"set": {
"path": {"as_num": "111 222", "as_action": "prepend"}
},
}
]
}
}
}
result = create_route_maps(tgen, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Configure neighbor for route map")
input_dict_4 = {
"r2": {
"bgp": [
{
"local_as": "100",
"vrf": "RED_A",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r2-link1": {
"route_maps": [
{
"name": "rmap_r1_ipv4",
"direction": "in",
}
]
}
}
},
"r3": {"dest_link": {"r2-link1": {}}},
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r2-link1": {
"route_maps": [
{
"name": "rmap_r1_ipv6",
"direction": "in",
}
]
}
}
},
"r3": {"dest_link": {"r2-link1": {}}},
}
}
},
},
},
{
"local_as": "100",
"vrf": "RED_B",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r2-link2": {
"route_maps": [
{
"name": "rmap_r1_ipv4",
"direction": "in",
}
]
}
}
},
"r3": {"dest_link": {"r2-link2": {}}},
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r2-link2": {
"route_maps": [
{
"name": "rmap_r1_ipv6",
"direction": "in",
}
]
}
}
},
"r3": {"dest_link": {"r2-link2": {}}},
}
}
},
},
},
{
"local_as": "100",
"vrf": "BLUE_A",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r2-link3": {
"route_maps": [
{
"name": "rmap_r1_ipv4",
"direction": "in",
}
]
}
}
},
"r3": {"dest_link": {"r2-link3": {}}},
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r2-link3": {
"route_maps": [
{
"name": "rmap_r1_ipv6",
"direction": "in",
}
]
}
}
},
"r3": {"dest_link": {"r2-link3": {}}},
}
}
},
},
},
{
"local_as": "100",
"vrf": "BLUE_B",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r2-link4": {
"route_maps": [
{
"name": "rmap_r1_ipv4",
"direction": "in",
}
]
}
}
},
"r3": {"dest_link": {"r2-link4": {}}},
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r2-link4": {
"route_maps": [
{
"name": "rmap_r1_ipv6",
"direction": "in",
}
]
}
}
},
"r3": {"dest_link": {"r2-link4": {}}},
}
}
},
},
},
]
}
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that within vrf instances, BGP best path selection"
" algorithm remains intact and doesn't affect any other VRFs"
" routing decision."
)
dut = "r2"
attribute = "path"
for addr_type in ADDR_TYPES:
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
}
}
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
}
}
result = verify_best_path_as_per_bgp_attribute(
tgen, addr_type, dut, input_dict_1, attribute
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
result = verify_best_path_as_per_bgp_attribute(
tgen, addr_type, dut, input_dict_2, attribute
)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
def test_route_map_within_vrf_to_alter_bgp_attribute_lcomm_p0(request):
"""
FUNC_12_f:
Configure route-maps within a VRF, to alter BGP attributes.
Verify that route-map doesn't affect any other VRF instances'
routing on DUT.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
reset_config_on_routers(tgen)
if tgen.routers_have_failure():
check_router_status(tgen)
step(
"Advertise a set of BGP prefixes(IPv4+IPv6) from RED_1 and"
" RED_2 in vrf instances(RED_A and RED_B)."
)
for addr_type in ADDR_TYPES:
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
},
"red2": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
},
}
result = create_static_routes(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step(
"Advertise same set of BGP prefixes(IPv4+IPv6) from BLUE_1 and"
"BLUE_2 in vrf instances(BLUE_A and BLUE_B)"
)
for addr_type in ADDR_TYPES:
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
},
"blue2": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
},
}
result = create_static_routes(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Redistribute static..")
input_dict_3 = {}
for dut in ["red1", "red2", "blue1", "blue2"]:
temp = {dut: {"bgp": []}}
input_dict_3.update(temp)
if "red" in dut:
VRFS = ["RED_A", "RED_B"]
AS_NUM = [500, 500]
elif "blue" in dut:
VRFS = ["BLUE_A", "BLUE_B"]
AS_NUM = [800, 800]
for vrf, as_num in zip(VRFS, AS_NUM):
temp[dut]["bgp"].append(
{
"local_as": as_num,
"vrf": vrf,
"address_family": {
"ipv4": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
"ipv6": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
},
}
)
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Configure a route-maps to influence BGP parameters - " " Large-community")
step("Create standard large commumity-list in r2")
for addr_type in ADDR_TYPES:
input_dict_1 = {
"r2": {
"bgp_community_lists": [
{
"community_type": "standard",
"action": "permit",
"name": "rmap_lcomm_{}".format(addr_type),
"value": "1:1:1 1:2:3 2:1:1 2:2:2",
"large": True,
}
]
}
}
result = create_bgp_community_lists(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Create route-maps in red1 and r1")
for addr_type in ADDR_TYPES:
input_dict_4 = {
"red1": {
"route_maps": {
"rmap_red1_{}".format(addr_type): [
{
"action": "permit",
"set": {
"large_community": {"num": "1:1:1 1:2:3 2:1:1 2:2:2"}
},
}
]
}
},
"r2": {
"route_maps": {
"rmap_r1_{}".format(addr_type): [
{
"action": "permit",
"match": {
"large_community_list": {
"id": "rmap_lcomm_" + addr_type
}
},
}
]
}
},
}
result = create_route_maps(tgen, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Configure neighbor for route map in red1")
input_dict_4 = {
"red1": {
"bgp": [
{
"local_as": "500",
"vrf": "RED_A",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"red1-link1": {
"route_maps": [
{
"name": "rmap_red1_ipv4",
"direction": "out",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"red1-link1": {
"route_maps": [
{
"name": "rmap_red1_ipv6",
"direction": "out",
}
]
}
}
}
}
}
},
},
},
{
"local_as": "500",
"vrf": "RED_B",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"red1-link2": {
"route_maps": [
{
"name": "rmap_red1_ipv4",
"direction": "out",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"red1-link2": {
"route_maps": [
{
"name": "rmap_red1_ipv6",
"direction": "out",
}
]
}
}
}
}
}
},
},
},
]
}
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Configure neighbor for route map in r2")
input_dict_4 = {
"r2": {
"bgp": [
{
"local_as": "100",
"vrf": "RED_A",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r2-link1": {
"route_maps": [
{
"name": "rmap_r1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r2-link1": {
"route_maps": [
{
"name": "rmap_r1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
},
},
{
"local_as": "100",
"vrf": "RED_B",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r2-link2": {
"route_maps": [
{
"name": "rmap_r1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r2-link2": {
"route_maps": [
{
"name": "rmap_r1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
},
},
{
"local_as": "100",
"vrf": "BLUE_A",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r2-link3": {
"route_maps": [
{
"name": "rmap_r1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r2-link3": {
"route_maps": [
{
"name": "rmap_r1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
},
},
{
"local_as": "100",
"vrf": "BLUE_B",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r2-link4": {
"route_maps": [
{
"name": "rmap_r1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"r2-link4": {
"route_maps": [
{
"name": "rmap_r1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
},
},
]
}
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"All the prefixes advertised from RED_1 and BLUE_1 should carry"
" attributes set by outbound route-maps within specific vrfs. "
"Router R1 should be able to match and permit/deny those "
"prefixes based on received attributes. Please use below "
"commands to verify."
)
input_dict = {
"largeCommunity": "1:1:1 1:2:3 2:1:1 2:2:2",
}
for addr_type in ADDR_TYPES:
vrf = "RED_A"
routes = [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]]
result = verify_bgp_community(tgen, addr_type, "r2", routes, input_dict, vrf)
assert result is True, "Test case {} : Failed \n Error: {}".format(
tc_name, result
)
for addr_type in ADDR_TYPES:
vrf = "RED_B"
routes = [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]]
result = verify_bgp_community(tgen, addr_type, "r2", routes, input_dict, vrf)
assert result is True, "Test case {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
def test_route_map_match_traffic_based_on_vrf_p0(request):
"""
FUNC_13:
Configure a route-map on DUT to match traffic based
on a VRF interfaces.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
reset_config_on_routers(tgen)
if tgen.routers_have_failure():
check_router_status(tgen)
step(
"Advertise unique BGP prefixes(IPv4+IPv6) from RED_1 "
"in vrf instances(RED_A and RED_B)."
)
for addr_type in ADDR_TYPES:
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
}
}
result = create_static_routes(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step(
"Advertise unique BGP prefixes(IPv4+IPv6) from from BLUE_1 in"
" vrf instances(BLUE_A and BLUE_B)."
)
for addr_type in ADDR_TYPES:
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK3_1[addr_type]] + [NETWORK3_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK4_1[addr_type]] + [NETWORK4_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
}
}
result = create_static_routes(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Redistribute static..")
input_dict_3 = {}
for dut in ["red1", "blue1"]:
temp = {dut: {"bgp": []}}
input_dict_3.update(temp)
if "red" in dut:
VRFS = ["RED_A", "RED_B"]
AS_NUM = [500, 500]
elif "blue" in dut:
VRFS = ["BLUE_A", "BLUE_B"]
AS_NUM = [800, 800]
for vrf, as_num in zip(VRFS, AS_NUM):
temp[dut]["bgp"].append(
{
"local_as": as_num,
"vrf": vrf,
"address_family": {
"ipv4": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
"ipv6": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
},
}
)
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Configure a route-map on R1 to match the prefixes "
"coming from vrf RED_A and set as-prepend to these routes."
)
input_dict_4 = {
"r1": {
"route_maps": {
"ABC": [
{
"action": "permit",
"match": {"source-vrf": "RED_A"},
"set": {"path": {"as_num": 1, "as_action": "prepend"}},
}
]
}
}
}
result = create_route_maps(tgen, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"On R1, import the routes form vrf RED_A and RED_B to BLUE_A and"
" apply the route-map under vrf BLUE_A while importing"
)
raw_config = {
"r1": {
"raw_config": [
"router bgp 100 vrf BLUE_A",
"address-family ipv4 unicast",
"import vrf RED_A",
"import vrf RED_B",
"import vrf route-map ABC",
"address-family ipv6 unicast",
"import vrf RED_A",
"import vrf RED_B",
"import vrf route-map ABC",
]
}
}
result = apply_raw_config(tgen, raw_config)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
step(
"All the prefixes advertised from RED_1 and BLUE_1 in vrfs "
"RED_B and BLUE_B must prepend the AS number in as-path on R2."
)
for addr_type in ADDR_TYPES:
input_dict_7 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK3_1[addr_type]] + [NETWORK3_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
]
}
}
result = verify_bgp_rib(tgen, addr_type, "r1", input_dict_7)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
write_test_footer(tc_name)
def test_vrf_lite_with_static_bgp_originated_routes_p0(request):
"""
FUNC_14:
Test VRF-lite with Static+BGP originated routes.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
reset_config_on_routers(tgen)
if tgen.routers_have_failure():
check_router_status(tgen)
step(
"Advertise unique BGP prefixes(IPv4+IPv6) from from RED_1"
" in vrf instances(RED_A and RED_B)."
)
for addr_type in ADDR_TYPES:
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
}
}
result = create_static_routes(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step(
"Advertise unique BGP prefixes(IPv4+IPv6) from from BLUE_1 in"
" vrf instances(BLUE_A and BLUE_B)."
)
for addr_type in ADDR_TYPES:
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
}
}
result = create_static_routes(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
input_dict_3 = {
"red1": {
"bgp": [
{
"local_as": "500",
"vrf": "RED_A",
"address_family": {
"ipv4": {
"unicast": {
"advertise_networks": [
{
"network": [NETWORK5_1["ipv4"]]
+ [NETWORK5_2["ipv4"]]
}
],
"redistribute": [{"redist_type": "static"}],
}
},
"ipv6": {
"unicast": {
"advertise_networks": [
{
"network": [NETWORK5_1["ipv6"]]
+ [NETWORK5_2["ipv6"]]
}
],
"redistribute": [{"redist_type": "static"}],
}
},
},
},
{
"local_as": "500",
"vrf": "RED_B",
"address_family": {
"ipv4": {
"unicast": {
"advertise_networks": [
{
"network": [NETWORK6_1["ipv4"]]
+ [NETWORK6_2["ipv4"]]
}
],
"redistribute": [{"redist_type": "static"}],
}
},
"ipv6": {
"unicast": {
"advertise_networks": [
{
"network": [NETWORK6_1["ipv6"]]
+ [NETWORK6_2["ipv6"]]
}
],
"redistribute": [{"redist_type": "static"}],
}
},
},
},
]
},
"blue1": {
"bgp": [
{
"local_as": "800",
"vrf": "BLUE_A",
"address_family": {
"ipv4": {
"unicast": {
"advertise_networks": [
{
"network": [NETWORK7_1["ipv4"]]
+ [NETWORK7_2["ipv4"]]
}
],
"redistribute": [{"redist_type": "static"}],
}
},
"ipv6": {
"unicast": {
"advertise_networks": [
{
"network": [NETWORK7_1["ipv6"]]
+ [NETWORK7_2["ipv6"]]
}
],
"redistribute": [{"redist_type": "static"}],
}
},
},
},
{
"local_as": "800",
"vrf": "BLUE_B",
"address_family": {
"ipv4": {
"unicast": {
"advertise_networks": [
{
"network": [NETWORK8_1["ipv4"]]
+ [NETWORK8_2["ipv4"]]
}
],
"redistribute": [{"redist_type": "static"}],
}
},
"ipv6": {
"unicast": {
"advertise_networks": [
{
"network": [NETWORK8_1["ipv6"]]
+ [NETWORK8_2["ipv6"]]
}
],
"redistribute": [{"redist_type": "static"}],
}
},
},
},
]
},
}
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Static routes must be installed in associated VRF" " table only.")
for addr_type in ADDR_TYPES:
dut = "r1"
result = verify_bgp_rib(tgen, addr_type, dut, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
step(
"All the routers must receive advertised as well as "
"redistributed(static) prefixes in associated VRF tables."
)
for addr_type in ADDR_TYPES:
dut = "r1"
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
}
}
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
}
}
result = verify_rib(tgen, addr_type, dut, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
result = verify_rib(tgen, addr_type, dut, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
write_test_footer(tc_name)
def test_prefix_list_to_permit_deny_prefixes_p0(request):
"""
FUNC_15:
Configure prefix-lists on DUT and apply to BGP peers to
permit/deny prefixes.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
reset_config_on_routers(tgen)
if tgen.routers_have_failure():
check_router_status(tgen)
step(
"Advertise unique BGP prefixes(IPv4+IPv6) from from RED_1"
" in vrf instances(RED_A and RED_B)."
)
for addr_type in ADDR_TYPES:
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
}
}
result = create_static_routes(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step(
"Advertise unique BGP prefixes(IPv4+IPv6) from from BLUE_1 in"
" vrf instances(BLUE_A and BLUE_B)."
)
for addr_type in ADDR_TYPES:
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
}
}
result = create_static_routes(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Redistribute static..")
input_dict_3 = {}
for dut in ["red1", "blue1"]:
temp = {dut: {"bgp": []}}
input_dict_3.update(temp)
if "red" in dut:
VRFS = ["RED_A", "RED_B"]
AS_NUM = [500, 500]
elif "blue" in dut:
VRFS = ["BLUE_A", "BLUE_B"]
AS_NUM = [800, 800]
for vrf, as_num in zip(VRFS, AS_NUM):
temp[dut]["bgp"].append(
{
"local_as": as_num,
"vrf": vrf,
"address_family": {
"ipv4": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
"ipv6": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
},
}
)
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Verify routes are present before applying prefix-list")
for addr_type in ADDR_TYPES:
dut = "r1"
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
}
}
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
}
}
result = verify_rib(tgen, addr_type, dut, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
result = verify_rib(tgen, addr_type, dut, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
step(
"On routers RED_1 and BLUE_1, configure prefix-lists to permit"
" 4 prefixes and deny 1 prefix x.x.x.5. Apply these in outbound"
"direction for each neighbour."
)
for addr_type in ADDR_TYPES:
input_dict_4 = {
"red1": {
"prefix_lists": {
addr_type: {
"pflist_red1_{}".format(addr_type): [
{
"seqid": 10,
"network": NETWORK1_1[addr_type],
"action": "permit",
},
{
"seqid": 11,
"network": NETWORK2_1[addr_type],
"action": "permit",
},
{
"seqid": 12,
"network": NETWORK1_2[addr_type],
"action": "deny",
},
{
"seqid": 13,
"network": NETWORK2_2[addr_type],
"action": "deny",
},
]
}
}
},
"blue1": {
"prefix_lists": {
addr_type: {
"pflist_blue1_{}".format(addr_type): [
{
"seqid": 10,
"network": NETWORK1_1[addr_type],
"action": "permit",
},
{
"seqid": 11,
"network": NETWORK2_1[addr_type],
"action": "permit",
},
{
"seqid": 12,
"network": NETWORK1_2[addr_type],
"action": "deny",
},
{
"seqid": 13,
"network": NETWORK2_2[addr_type],
"action": "deny",
},
]
}
}
},
"r1": {
"prefix_lists": {
addr_type: {
"pflist_r1_{}".format(addr_type): [
{
"seqid": 10,
"network": NETWORK1_1[addr_type],
"action": "permit",
},
{
"seqid": 11,
"network": NETWORK2_1[addr_type],
"action": "deny",
},
]
}
}
},
}
result = create_prefix_lists(tgen, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
input_dict_5 = {
"red1": {
"bgp": [
{
"local_as": "500",
"vrf": "RED_A",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"red1-link1": {
"prefix_lists": [
{
"name": "pflist_red1_ipv4",
"direction": "out",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"red1-link1": {
"prefix_lists": [
{
"name": "pflist_red1_ipv6",
"direction": "out",
}
]
}
}
}
}
}
},
},
},
{
"local_as": "500",
"vrf": "RED_B",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"red1-link2": {
"prefix_lists": [
{
"name": "pflist_red1_ipv4",
"direction": "out",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"red1-link2": {
"prefix_lists": [
{
"name": "pflist_red1_ipv6",
"direction": "out",
}
]
}
}
}
}
}
},
},
},
]
},
"blue1": {
"bgp": [
{
"local_as": "800",
"vrf": "BLUE_A",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"blue1-link1": {
"prefix_lists": [
{
"name": "pflist_blue1_ipv4",
"direction": "out",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"blue1-link1": {
"prefix_lists": [
{
"name": "pflist_blue1_ipv6",
"direction": "out",
}
]
}
}
}
}
}
},
},
},
{
"local_as": "800",
"vrf": "BLUE_B",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"blue1-link2": {
"prefix_lists": [
{
"name": "pflist_blue1_ipv4",
"direction": "out",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"blue1-link2": {
"prefix_lists": [
{
"name": "pflist_blue1_ipv6",
"direction": "out",
}
]
}
}
}
}
}
},
},
},
]
},
}
result = create_router_bgp(tgen, topo, input_dict_5)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that within vrf instances, each BGP neighbor receives 1"
" prefixes in routing table and drops (x.x.x.2)."
)
for addr_type in ADDR_TYPES:
dut = "r1"
permitted_routes = {
"red1": {
"static_routes": [
{"network": [NETWORK1_1[addr_type]], "vrf": "RED_A"},
{"network": [NETWORK2_1[addr_type]], "vrf": "RED_B"},
]
}
}
denied_routes = {
"red1": {
"static_routes": [
{"network": [NETWORK1_2[addr_type]], "vrf": "RED_A"},
{"network": [NETWORK2_2[addr_type]], "vrf": "RED_B"},
]
}
}
result = verify_rib(tgen, addr_type, dut, permitted_routes)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
result = verify_rib(tgen, addr_type, dut, denied_routes, expected=False)
assert result is not True, "Testcase {} : Failed \n"
"{}:Expected behaviour: Routes are denied by prefix-list \nError {}".format(
tc_name, result
)
step(
"On router R1, configure prefix-lists to permit 2 "
"prefixes(x.x.x.1-2) and deny 2 prefix(x.x.x.3-4). Apply"
" these in inbound direction for each neighbour."
)
input_dict_6 = {
"r1": {
"bgp": [
{
"local_as": "100",
"vrf": "RED_A",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"red1": {
"dest_link": {
"r1-link1": {
"prefix_lists": [
{
"name": "pflist_r1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"red1": {
"dest_link": {
"r1-link1": {
"prefix_lists": [
{
"name": "pflist_r1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
},
},
{
"local_as": "100",
"vrf": "RED_B",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"red1": {
"dest_link": {
"r1-link2": {
"prefix_lists": [
{
"name": "pflist_r1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"red1": {
"dest_link": {
"r1-link2": {
"prefix_lists": [
{
"name": "pflist_r1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
},
},
{
"local_as": "100",
"vrf": "BLUE_A",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"blue1": {
"dest_link": {
"r1-link1": {
"prefix_lists": [
{
"name": "pflist_r1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"blue1": {
"dest_link": {
"r1-link1": {
"prefix_lists": [
{
"name": "pflist_r1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
},
},
{
"local_as": "100",
"vrf": "BLUE_B",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"blue1": {
"dest_link": {
"r1-link2": {
"prefix_lists": [
{
"name": "pflist_r1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"blue1": {
"dest_link": {
"r1-link2": {
"prefix_lists": [
{
"name": "pflist_r1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
},
},
]
}
}
result = create_router_bgp(tgen, topo, input_dict_6)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that within vrf instances, each BGP neighbor installs"
" only 1 prefix (x.x.x.1)."
)
for addr_type in ADDR_TYPES:
dut = "r2"
permitted_routes = {
"red1": {
"static_routes": [{"network": [NETWORK1_1[addr_type]], "vrf": "RED_A"}]
}
}
denied_routes = {
"red1": {
"static_routes": [{"network": [NETWORK2_1[addr_type]], "vrf": "RED_A"}]
}
}
result = verify_rib(tgen, addr_type, dut, permitted_routes)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
result = verify_rib(tgen, addr_type, dut, denied_routes, expected=False)
assert (
result is not True
), "Testcase {} : Failed \nExpected behaviour: Routes are denied by prefix-list \nError {}".format(
tc_name, result
)
write_test_footer(tc_name)
def test_route_map_set_and_match_tag_p0(request):
"""
FUNC_16_1:
Configure a route-map on DUT to match traffic based various
match/set causes.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
reset_config_on_routers(tgen)
if tgen.routers_have_failure():
check_router_status(tgen)
step(
"Advertise unique BGP prefixes(IPv4+IPv6) from RED_1"
" in vrf instances(RED_A and RED_B)."
)
for addr_type in ADDR_TYPES:
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"tag": 4001,
"vrf": "RED_A",
},
{
"network": [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
}
}
result = create_static_routes(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step(
"Advertise same set of BGP prefixes(IPv4+IPv6) from BLUE_1 and"
"BLUE_2 in vrf instances(BLUE_A and BLUE_B)"
)
for addr_type in ADDR_TYPES:
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK3_1[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"tag": 4001,
"vrf": "BLUE_A",
},
{
"network": [NETWORK3_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK4_1[addr_type]] + [NETWORK4_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
}
}
result = create_static_routes(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Redistribute static..")
input_dict_3 = {}
for dut in ["red1", "blue1"]:
temp = {dut: {"bgp": []}}
input_dict_3.update(temp)
if "red" in dut:
VRFS = ["RED_A", "RED_B"]
AS_NUM = [500, 500]
elif "blue" in dut:
VRFS = ["BLUE_A", "BLUE_B"]
AS_NUM = [800, 800]
for vrf, as_num in zip(VRFS, AS_NUM):
temp[dut]["bgp"].append(
{
"local_as": as_num,
"vrf": vrf,
"address_family": {
"ipv4": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
"ipv6": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
},
}
)
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Configure a route-maps to match tag")
for addr_type in ADDR_TYPES:
input_dict_4 = {
"red1": {
"route_maps": {
"rmap1_{}".format(addr_type): [
{"action": "permit", "match": {addr_type: {"tag": "4001"}}}
]
}
}
}
result = create_route_maps(tgen, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Configure neighbor for route map")
input_dict_4 = {
"red1": {
"bgp": [
{
"local_as": "500",
"vrf": "RED_A",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"red1-link1": {
"route_maps": [
{
"name": "rmap1_ipv4",
"direction": "out",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"red1-link1": {
"route_maps": [
{
"name": "rmap1_ipv6",
"direction": "out",
}
]
}
}
}
}
}
},
},
},
{
"local_as": "500",
"vrf": "RED_B",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"red1-link2": {
"route_maps": [
{
"name": "rmap1_ipv4",
"direction": "out",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"red1-link2": {
"route_maps": [
{
"name": "rmap1_ipv6",
"direction": "out",
}
]
}
}
}
}
}
},
},
},
]
}
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that within vrf instances, BGP best path selection"
" algorithm remains intact and doesn't affect any other VRFs"
" routing decision."
)
dut = "r1"
for addr_type in ADDR_TYPES:
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"tag": 4001,
"vrf": "RED_A",
}
]
}
}
result = verify_rib(tgen, addr_type, dut, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
for addr_type in ADDR_TYPES:
input_dict_2 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
}
}
result = verify_rib(tgen, addr_type, dut, input_dict_2, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Expected Behavior: Routes are denied \nError {}".format(
tc_name, result
)
write_test_footer(tc_name)
def test_route_map_set_and_match_metric_p0(request):
"""
FUNC_16_2:
Configure a route-map on DUT to match traffic based various
match/set causes.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
reset_config_on_routers(tgen)
if tgen.routers_have_failure():
check_router_status(tgen)
step(
"Advertise unique BGP prefixes(IPv4+IPv6) from RED_1"
" in vrf instances(RED_A and RED_B)."
)
for addr_type in ADDR_TYPES:
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
}
}
result = create_static_routes(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step(
"Advertise same set of BGP prefixes(IPv4+IPv6) from BLUE_1 and"
"BLUE_2 in vrf instances(BLUE_A and BLUE_B)"
)
for addr_type in ADDR_TYPES:
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK3_1[addr_type]] + [NETWORK3_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK4_1[addr_type]] + [NETWORK4_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
}
}
result = create_static_routes(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Redistribute static..")
input_dict_3 = {
"red1": {
"bgp": [
{
"local_as": "500",
"vrf": "RED_A",
"address_family": {
"ipv4": {
"unicast": {
"redistribute": [
{
"redist_type": "static",
"attribute": {"metric": 123},
}
]
}
},
"ipv6": {
"unicast": {
"redistribute": [
{
"redist_type": "static",
"attribute": {"metric": 123},
}
]
}
},
},
},
{
"local_as": "500",
"vrf": "RED_B",
"address_family": {
"ipv4": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
"ipv6": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
},
},
]
},
"blue1": {
"bgp": [
{
"local_as": "800",
"vrf": "BLUE_A",
"address_family": {
"ipv4": {
"unicast": {
"redistribute": [
{
"redist_type": "static",
"attribute": {"metric": 123},
}
]
}
},
"ipv6": {
"unicast": {
"redistribute": [
{
"redist_type": "static",
"attribute": {"metric": 123},
}
]
}
},
},
},
{
"local_as": "800",
"vrf": "BLUE_B",
"address_family": {
"ipv4": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
"ipv6": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
},
},
]
},
}
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Configure a route-maps to match tag")
for addr_type in ADDR_TYPES:
input_dict_4 = {
"r1": {
"route_maps": {
"rmap1_{}".format(addr_type): [
{"action": "permit", "match": {"metric": 123}}
]
}
}
}
result = create_route_maps(tgen, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Configure neighbor for route map")
input_dict_4 = {
"r1": {
"bgp": [
{
"local_as": "100",
"vrf": "RED_A",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"red1": {
"dest_link": {
"r1-link1": {
"route_maps": [
{
"name": "rmap1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"red1": {
"dest_link": {
"r1-link1": {
"route_maps": [
{
"name": "rmap1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
},
},
{
"local_as": "100",
"vrf": "RED_B",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"red1": {
"dest_link": {
"r1-link2": {
"route_maps": [
{
"name": "rmap1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"red1": {
"dest_link": {
"r1-link2": {
"route_maps": [
{
"name": "rmap1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
},
},
{
"local_as": "100",
"vrf": "BLUE_A",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"blue1": {
"dest_link": {
"r1-link1": {
"route_maps": [
{
"name": "rmap1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"blue1": {
"dest_link": {
"r1-link1": {
"route_maps": [
{
"name": "rmap1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
},
},
{
"local_as": "100",
"vrf": "BLUE_B",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"blue1": {
"dest_link": {
"r1-link2": {
"route_maps": [
{
"name": "rmap1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"blue1": {
"dest_link": {
"r1-link2": {
"route_maps": [
{
"name": "rmap1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
},
},
]
}
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"Verify that within vrf instances, BGP best path selection"
" algorithm remains intact and doesn't affect any other VRFs"
" routing decision."
)
dut = "r1"
for addr_type in ADDR_TYPES:
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
}
]
}
}
result = verify_rib(tgen, addr_type, dut, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error {}".format(
tc_name, result
)
for addr_type in ADDR_TYPES:
input_dict_2 = {
"red1": {
"static_routes": [
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
}
]
}
}
result = verify_rib(tgen, addr_type, dut, input_dict_2, expected=False)
assert (
result is not True
), "Testcase {} : Failed \n Expected Behavior: Routes are denied \nError {}".format(
tc_name, result
)
write_test_footer(tc_name)
def test_route_map_set_and_match_community_p0(request):
"""
FUNC_16_3:
Configure a route-map on DUT to match traffic based various
match/set causes.
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
reset_config_on_routers(tgen)
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
step(
"Advertise unique BGP prefixes(IPv4+IPv6) from RED_1"
" in vrf instances(RED_A and RED_B)."
)
for addr_type in ADDR_TYPES:
input_dict_1 = {
"red1": {
"static_routes": [
{
"network": [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_A",
},
{
"network": [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "RED_B",
},
]
}
}
result = create_static_routes(tgen, input_dict_1)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step(
"Advertise same set of BGP prefixes(IPv4+IPv6) from BLUE_1 and"
"BLUE_2 in vrf instances(BLUE_A and BLUE_B)"
)
for addr_type in ADDR_TYPES:
input_dict_2 = {
"blue1": {
"static_routes": [
{
"network": [NETWORK3_1[addr_type]] + [NETWORK3_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_A",
},
{
"network": [NETWORK4_1[addr_type]] + [NETWORK4_2[addr_type]],
"next_hop": NEXT_HOP_IP[addr_type],
"vrf": "BLUE_B",
},
]
}
}
result = create_static_routes(tgen, input_dict_2)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Redistribute static..")
input_dict_3 = {}
for dut in ["red1", "blue1"]:
temp = {dut: {"bgp": []}}
input_dict_3.update(temp)
if "red" in dut:
VRFS = ["RED_A", "RED_B"]
AS_NUM = [500, 500]
elif "blue" in dut:
VRFS = ["BLUE_A", "BLUE_B"]
AS_NUM = [800, 800]
for vrf, as_num in zip(VRFS, AS_NUM):
temp[dut]["bgp"].append(
{
"local_as": as_num,
"vrf": vrf,
"address_family": {
"ipv4": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
"ipv6": {
"unicast": {"redistribute": [{"redist_type": "static"}]}
},
},
}
)
result = create_router_bgp(tgen, topo, input_dict_3)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step("Create community-list")
for addr_type in ADDR_TYPES:
input_dict_4 = {
"r1": {
"bgp_community_lists": [
{
"community_type": "standard",
"action": "permit",
"name": "rmap_lcomm_{}".format(addr_type),
"value": "1:1 1:2 1:3 1:4 1:5",
}
]
}
}
result = create_bgp_community_lists(tgen, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Configure a route-maps to match tag")
step("Create route-maps in red1 and r1")
for addr_type in ADDR_TYPES:
input_dict_4 = {
"red1": {
"route_maps": {
"rmap_red1_{}".format(addr_type): [
{
"action": "permit",
"set": {"community": {"num": "1:1 1:2 1:3 1:4 1:5"}},
}
]
}
},
"r1": {
"route_maps": {
"rmap1_{}".format(addr_type): [
{
"action": "permit",
"match": {
"community_list": {"id": "rmap_lcomm_" + addr_type}
},
}
]
}
},
}
result = create_route_maps(tgen, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(
tc_name, result
)
step("Configure neighbor for route map")
input_dict_4 = {
"red1": {
"bgp": [
{
"local_as": "500",
"vrf": "RED_A",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"red1-link1": {
"route_maps": [
{
"name": "rmap_red1_ipv4",
"direction": "out",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"red1-link1": {
"route_maps": [
{
"name": "rmap_red1_ipv6",
"direction": "out",
}
]
}
}
}
}
}
},
},
},
{
"local_as": "500",
"vrf": "RED_B",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"red1-link2": {
"route_maps": [
{
"name": "rmap_red1_ipv4",
"direction": "out",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"r1": {
"dest_link": {
"red1-link2": {
"route_maps": [
{
"name": "rmap_red1_ipv6",
"direction": "out",
}
]
}
}
}
}
}
},
},
},
]
},
"r1": {
"bgp": [
{
"local_as": "100",
"vrf": "RED_A",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"red1": {
"dest_link": {
"r1-link1": {
"route_maps": [
{
"name": "rmap1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"red1": {
"dest_link": {
"r1-link1": {
"route_maps": [
{
"name": "rmap1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
},
},
{
"local_as": "100",
"vrf": "RED_B",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"red1": {
"dest_link": {
"r1-link2": {
"route_maps": [
{
"name": "rmap1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"red1": {
"dest_link": {
"r1-link2": {
"route_maps": [
{
"name": "rmap1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
},
},
{
"local_as": "100",
"vrf": "BLUE_A",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"blue1": {
"dest_link": {
"r1-link1": {
"route_maps": [
{
"name": "rmap1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"blue1": {
"dest_link": {
"r1-link1": {
"route_maps": [
{
"name": "rmap1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
},
},
{
"local_as": "100",
"vrf": "BLUE_B",
"address_family": {
"ipv4": {
"unicast": {
"neighbor": {
"blue1": {
"dest_link": {
"r1-link2": {
"route_maps": [
{
"name": "rmap1_ipv4",
"direction": "in",
}
]
}
}
}
}
}
},
"ipv6": {
"unicast": {
"neighbor": {
"blue1": {
"dest_link": {
"r1-link2": {
"route_maps": [
{
"name": "rmap1_ipv6",
"direction": "in",
}
]
}
}
}
}
}
},
},
},
]
},
}
result = create_router_bgp(tgen, topo, input_dict_4)
assert result is True, "Testcase {} : Failed \n Error: {}".format(tc_name, result)
step(
"All the prefixes advertised from RED_1 and BLUE_1 should carry"
" attributes set by outbound route-maps within specific vrfs. "
"Router R1 should be able to match and permit/deny those "
"prefixes based on received attributes. Please use below "
"commands to verify."
)
input_dict = {
"community": "1:1 1:2 1:3 1:4 1:5",
}
for addr_type in ADDR_TYPES:
vrf = "RED_A"
routes = [NETWORK1_1[addr_type]] + [NETWORK1_2[addr_type]]
result = verify_bgp_community(tgen, addr_type, "r1", routes, input_dict, vrf)
assert result is True, "Test case {} : Failed \n Error: {}".format(
tc_name, result
)
for addr_type in ADDR_TYPES:
vrf = "RED_B"
routes = [NETWORK2_1[addr_type]] + [NETWORK2_2[addr_type]]
result = verify_bgp_community(tgen, addr_type, "r1", routes, input_dict, vrf)
assert result is True, "Test case {} : Failed \n Error: {}".format(
tc_name, result
)
write_test_footer(tc_name)
if __name__ == "__main__":
args = ["-s"] + sys.argv[1:]
sys.exit(pytest.main(args))
|
freerangerouting/frr
|
tests/topotests/bgp_multi_vrf_topo1/test_bgp_multi_vrf_topo1.py
|
Python
|
gpl-2.0
| 229,521 | 0.00132 |
import os
import sys
import codecs
from contextlib import contextmanager
from itertools import repeat
from functools import update_wrapper
from .types import convert_type, IntRange, BOOL
from .utils import make_str, make_default_short_help, echo
from .exceptions import ClickException, UsageError, BadParameter, Abort, \
MissingParameter
from .termui import prompt, confirm
from .formatting import HelpFormatter, join_options
from .parser import OptionParser, split_opt
from .globals import push_context, pop_context
from ._compat import PY2, isidentifier, iteritems, _check_for_unicode_literals
_missing = object()
SUBCOMMAND_METAVAR = 'COMMAND [ARGS]...'
SUBCOMMANDS_METAVAR = 'COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]...'
def _bashcomplete(cmd, prog_name, complete_var=None):
"""Internal handler for the bash completion support."""
if complete_var is None:
complete_var = '_%s_COMPLETE' % (prog_name.replace('-', '_')).upper()
complete_instr = os.environ.get(complete_var)
if not complete_instr:
return
from ._bashcomplete import bashcomplete
if bashcomplete(cmd, prog_name, complete_var, complete_instr):
sys.exit(1)
def batch(iterable, batch_size):
return list(zip(*repeat(iter(iterable), batch_size)))
def invoke_param_callback(callback, ctx, param, value):
code = getattr(callback, '__code__', None)
args = getattr(code, 'co_argcount', 3)
if args < 3:
# This will become a warning in Click 3.0:
from warnings import warn
warn(Warning('Invoked legacy parameter callback "%s". The new '
'signature for such callbacks starting with '
'click 2.0 is (ctx, param, value).'
% callback), stacklevel=3)
return callback(ctx, value)
return callback(ctx, param, value)
@contextmanager
def augment_usage_errors(ctx, param=None):
"""Context manager that attaches extra information to exceptions that
fly.
"""
try:
yield
except BadParameter as e:
if e.ctx is None:
e.ctx = ctx
if param is not None and e.param is None:
e.param = param
raise
except UsageError as e:
if e.ctx is None:
e.ctx = ctx
raise
def iter_params_for_processing(invocation_order, declaration_order):
"""Given a sequence of parameters in the order as should be considered
for processing and an iterable of parameters that exist, this returns
a list in the correct order as they should be processed.
"""
def sort_key(item):
try:
idx = invocation_order.index(item)
except ValueError:
idx = float('inf')
return (not item.is_eager, idx)
return sorted(declaration_order, key=sort_key)
class Context(object):
"""The context is a special internal object that holds state relevant
for the script execution at every single level. It's normally invisible
to commands unless they opt-in to getting access to it.
The context is useful as it can pass internal objects around and can
control special execution features such as reading data from
environment variables.
A context can be used as context manager in which case it will call
:meth:`close` on teardown.
.. versionadded:: 2.0
Added the `resilient_parsing`, `help_option_names`,
`token_normalize_func` parameters.
.. versionadded:: 3.0
Added the `allow_extra_args` and `allow_interspersed_args`
parameters.
.. versionadded:: 4.0
Added the `color`, `ignore_unknown_options`, and
`max_content_width` parameters.
:param command: the command class for this context.
:param parent: the parent context.
:param info_name: the info name for this invocation. Generally this
is the most descriptive name for the script or
command. For the toplevel script it is usually
the name of the script, for commands below it it's
the name of the script.
:param obj: an arbitrary object of user data.
:param auto_envvar_prefix: the prefix to use for automatic environment
variables. If this is `None` then reading
from environment variables is disabled. This
does not affect manually set environment
variables which are always read.
:param default_map: a dictionary (like object) with default values
for parameters.
:param terminal_width: the width of the terminal. The default is
inherit from parent context. If no context
defines the terminal width then auto
detection will be applied.
:param max_content_width: the maximum width for content rendered by
Click (this currently only affects help
pages). This defaults to 80 characters if
not overridden. In other words: even if the
terminal is larger than that, Click will not
format things wider than 80 characters by
default. In addition to that, formatters might
add some safety mapping on the right.
:param resilient_parsing: if this flag is enabled then Click will
parse without any interactivity or callback
invocation. This is useful for implementing
things such as completion support.
:param allow_extra_args: if this is set to `True` then extra arguments
at the end will not raise an error and will be
kept on the context. The default is to inherit
from the command.
:param allow_interspersed_args: if this is set to `False` then options
and arguments cannot be mixed. The
default is to inherit from the command.
:param ignore_unknown_options: instructs click to ignore options it does
not know and keeps them for later
processing.
:param help_option_names: optionally a list of strings that define how
the default help parameter is named. The
default is ``['--help']``.
:param token_normalize_func: an optional function that is used to
normalize tokens (options, choices,
etc.). This for instance can be used to
implement case insensitive behavior.
:param color: controls if the terminal supports ANSI colors or not. The
default is autodetection. This is only needed if ANSI
codes are used in texts that Click prints which is by
default not the case. This for instance would affect
help output.
"""
def __init__(self, command, parent=None, info_name=None, obj=None,
auto_envvar_prefix=None, default_map=None,
terminal_width=None, max_content_width=None,
resilient_parsing=False, allow_extra_args=None,
allow_interspersed_args=None,
ignore_unknown_options=None, help_option_names=None,
token_normalize_func=None, color=None):
#: the parent context or `None` if none exists.
self.parent = parent
#: the :class:`Command` for this context.
self.command = command
#: the descriptive information name
self.info_name = info_name
#: the parsed parameters except if the value is hidden in which
#: case it's not remembered.
self.params = {}
#: the leftover arguments.
self.args = []
if obj is None and parent is not None:
obj = parent.obj
#: the user object stored.
self.obj = obj
self._meta = getattr(parent, 'meta', {})
#: A dictionary (-like object) with defaults for parameters.
if default_map is None \
and parent is not None \
and parent.default_map is not None:
default_map = parent.default_map.get(info_name)
self.default_map = default_map
#: This flag indicates if a subcommand is going to be executed. A
#: group callback can use this information to figure out if it's
#: being executed directly or because the execution flow passes
#: onwards to a subcommand. By default it's None, but it can be
#: the name of the subcommand to execute.
#:
#: If chaining is enabled this will be set to ``'*'`` in case
#: any commands are executed. It is however not possible to
#: figure out which ones. If you require this knowledge you
#: should use a :func:`resultcallback`.
self.invoked_subcommand = None
if terminal_width is None and parent is not None:
terminal_width = parent.terminal_width
#: The width of the terminal (None is autodetection).
self.terminal_width = terminal_width
if max_content_width is None and parent is not None:
max_content_width = parent.max_content_width
#: The maximum width of formatted content (None implies a sensible
#: default which is 80 for most things).
self.max_content_width = max_content_width
if allow_extra_args is None:
allow_extra_args = command.allow_extra_args
#: Indicates if the context allows extra args or if it should
#: fail on parsing.
#:
#: .. versionadded:: 3.0
self.allow_extra_args = allow_extra_args
if allow_interspersed_args is None:
allow_interspersed_args = command.allow_interspersed_args
#: Indicates if the context allows mixing of arguments and
#: options or not.
#:
#: .. versionadded:: 3.0
self.allow_interspersed_args = allow_interspersed_args
if ignore_unknown_options is None:
ignore_unknown_options = command.ignore_unknown_options
#: Instructs click to ignore options that a command does not
#: understand and will store it on the context for later
#: processing. This is primarily useful for situations where you
#: want to call into external programs. Generally this pattern is
#: strongly discouraged because it's not possibly to losslessly
#: forward all arguments.
#:
#: .. versionadded:: 4.0
self.ignore_unknown_options = ignore_unknown_options
if help_option_names is None:
if parent is not None:
help_option_names = parent.help_option_names
else:
help_option_names = ['--help']
#: The names for the help options.
self.help_option_names = help_option_names
if token_normalize_func is None and parent is not None:
token_normalize_func = parent.token_normalize_func
#: An optional normalization function for tokens. This is
#: options, choices, commands etc.
self.token_normalize_func = token_normalize_func
#: Indicates if resilient parsing is enabled. In that case Click
#: will do its best to not cause any failures.
self.resilient_parsing = resilient_parsing
# If there is no envvar prefix yet, but the parent has one and
# the command on this level has a name, we can expand the envvar
# prefix automatically.
if auto_envvar_prefix is None:
if parent is not None \
and parent.auto_envvar_prefix is not None and \
self.info_name is not None:
auto_envvar_prefix = '%s_%s' % (parent.auto_envvar_prefix,
self.info_name.upper())
else:
self.auto_envvar_prefix = auto_envvar_prefix.upper()
self.auto_envvar_prefix = auto_envvar_prefix
if color is None and parent is not None:
color = parent.color
#: Controls if styling output is wanted or not.
self.color = color
self._close_callbacks = []
self._depth = 0
def __enter__(self):
self._depth += 1
push_context(self)
return self
def __exit__(self, exc_type, exc_value, tb):
pop_context()
self._depth -= 1
if self._depth == 0:
self.close()
@contextmanager
def scope(self, cleanup=True):
"""This helper method can be used with the context object to promote
it to the current thread local (see :func:`get_current_context`).
The default behavior of this is to invoke the cleanup functions which
can be disabled by setting `cleanup` to `False`. The cleanup
functions are typically used for things such as closing file handles.
If the cleanup is intended the context object can also be directly
used as a context manager.
Example usage::
with ctx.scope():
assert get_current_context() is ctx
This is equivalent::
with ctx:
assert get_current_context() is ctx
.. versionadded:: 5.0
:param cleanup: controls if the cleanup functions should be run or
not. The default is to run these functions. In
some situations the context only wants to be
temporarily pushed in which case this can be disabled.
Nested pushes automatically defer the cleanup.
"""
if not cleanup:
self._depth += 1
try:
with self as rv:
yield rv
finally:
if not cleanup:
self._depth -= 1
@property
def meta(self):
"""This is a dictionary which is shared with all the contexts
that are nested. It exists so that click utiltiies can store some
state here if they need to. It is however the responsibility of
that code to manage this dictionary well.
The keys are supposed to be unique dotted strings. For instance
module paths are a good choice for it. What is stored in there is
irrelevant for the operation of click. However what is important is
that code that places data here adheres to the general semantics of
the system.
Example usage::
LANG_KEY = __name__ + '.lang'
def set_language(value):
ctx = get_current_context()
ctx.meta[LANG_KEY] = value
def get_language():
return get_current_context().meta.get(LANG_KEY, 'en_US')
.. versionadded:: 5.0
"""
return self._meta
def make_formatter(self):
"""Creates the formatter for the help and usage output."""
return HelpFormatter(width=self.terminal_width,
max_width=self.max_content_width)
def call_on_close(self, f):
"""This decorator remembers a function as callback that should be
executed when the context tears down. This is most useful to bind
resource handling to the script execution. For instance, file objects
opened by the :class:`File` type will register their close callbacks
here.
:param f: the function to execute on teardown.
"""
self._close_callbacks.append(f)
return f
def close(self):
"""Invokes all close callbacks."""
for cb in self._close_callbacks:
cb()
self._close_callbacks = []
@property
def command_path(self):
"""The computed command path. This is used for the ``usage``
information on the help page. It's automatically created by
combining the info names of the chain of contexts to the root.
"""
rv = ''
if self.info_name is not None:
rv = self.info_name
if self.parent is not None:
rv = self.parent.command_path + ' ' + rv
return rv.lstrip()
def find_root(self):
"""Finds the outermost context."""
node = self
while node.parent is not None:
node = node.parent
return node
def find_object(self, object_type):
"""Finds the closest object of a given type."""
node = self
while node is not None:
if isinstance(node.obj, object_type):
return node.obj
node = node.parent
def ensure_object(self, object_type):
"""Like :meth:`find_object` but sets the innermost object to a
new instance of `object_type` if it does not exist.
"""
rv = self.find_object(object_type)
if rv is None:
self.obj = rv = object_type()
return rv
def lookup_default(self, name):
"""Looks up the default for a parameter name. This by default
looks into the :attr:`default_map` if available.
"""
if self.default_map is not None:
rv = self.default_map.get(name)
if callable(rv):
rv = rv()
return rv
def fail(self, message):
"""Aborts the execution of the program with a specific error
message.
:param message: the error message to fail with.
"""
raise UsageError(message, self)
def abort(self):
"""Aborts the script."""
raise Abort()
def exit(self, code=0):
"""Exits the application with a given exit code."""
sys.exit(code)
def get_usage(self):
"""Helper method to get formatted usage string for the current
context and command.
"""
return self.command.get_usage(self)
def get_help(self):
"""Helper method to get formatted help page for the current
context and command.
"""
return self.command.get_help(self)
def invoke(*args, **kwargs):
"""Invokes a command callback in exactly the way it expects. There
are two ways to invoke this method:
1. the first argument can be a callback and all other arguments and
keyword arguments are forwarded directly to the function.
2. the first argument is a click command object. In that case all
arguments are forwarded as well but proper click parameters
(options and click arguments) must be keyword arguments and Click
will fill in defaults.
Note that before Click 3.2 keyword arguments were not properly filled
in against the intention of this code and no context was created. For
more information about this change and why it was done in a bugfix
release see :ref:`upgrade-to-3.2`.
"""
self, callback = args[:2]
ctx = self
# It's also possible to invoke another command which might or
# might not have a callback. In that case we also fill
# in defaults and make a new context for this command.
if isinstance(callback, Command):
other_cmd = callback
callback = other_cmd.callback
ctx = Context(other_cmd, info_name=other_cmd.name, parent=self)
if callback is None:
raise TypeError('The given command does not have a '
'callback that can be invoked.')
for param in other_cmd.params:
if param.name not in kwargs and param.expose_value:
kwargs[param.name] = param.get_default(ctx)
args = args[2:]
with augment_usage_errors(self):
with ctx:
return callback(*args, **kwargs)
def forward(*args, **kwargs):
"""Similar to :meth:`invoke` but fills in default keyword
arguments from the current context if the other command expects
it. This cannot invoke callbacks directly, only other commands.
"""
self, cmd = args[:2]
# It's also possible to invoke another command which might or
# might not have a callback.
if not isinstance(cmd, Command):
raise TypeError('Callback is not a command.')
for param in self.params:
if param not in kwargs:
kwargs[param] = self.params[param]
return self.invoke(cmd, **kwargs)
class BaseCommand(object):
"""The base command implements the minimal API contract of commands.
Most code will never use this as it does not implement a lot of useful
functionality but it can act as the direct subclass of alternative
parsing methods that do not depend on the Click parser.
For instance, this can be used to bridge Click and other systems like
argparse or docopt.
Because base commands do not implement a lot of the API that other
parts of Click take for granted, they are not supported for all
operations. For instance, they cannot be used with the decorators
usually and they have no built-in callback system.
.. versionchanged:: 2.0
Added the `context_settings` parameter.
:param name: the name of the command to use unless a group overrides it.
:param context_settings: an optional dictionary with defaults that are
passed to the context object.
"""
#: the default for the :attr:`Context.allow_extra_args` flag.
allow_extra_args = False
#: the default for the :attr:`Context.allow_interspersed_args` flag.
allow_interspersed_args = True
#: the default for the :attr:`Context.ignore_unknown_options` flag.
ignore_unknown_options = False
def __init__(self, name, context_settings=None):
#: the name the command thinks it has. Upon registering a command
#: on a :class:`Group` the group will default the command name
#: with this information. You should instead use the
#: :class:`Context`\'s :attr:`~Context.info_name` attribute.
self.name = name
if context_settings is None:
context_settings = {}
#: an optional dictionary with defaults passed to the context.
self.context_settings = context_settings
def get_usage(self, ctx):
raise NotImplementedError('Base commands cannot get usage')
def get_help(self, ctx):
raise NotImplementedError('Base commands cannot get help')
def make_context(self, info_name, args, parent=None, **extra):
"""This function when given an info name and arguments will kick
off the parsing and create a new :class:`Context`. It does not
invoke the actual command callback though.
:param info_name: the info name for this invokation. Generally this
is the most descriptive name for the script or
command. For the toplevel script it's usually
the name of the script, for commands below it it's
the name of the script.
:param args: the arguments to parse as list of strings.
:param parent: the parent context if available.
:param extra: extra keyword arguments forwarded to the context
constructor.
"""
for key, value in iteritems(self.context_settings):
if key not in extra:
extra[key] = value
ctx = Context(self, info_name=info_name, parent=parent, **extra)
with ctx.scope(cleanup=False):
self.parse_args(ctx, args)
return ctx
def parse_args(self, ctx, args):
"""Given a context and a list of arguments this creates the parser
and parses the arguments, then modifies the context as necessary.
This is automatically invoked by :meth:`make_context`.
"""
raise NotImplementedError('Base commands do not know how to parse '
'arguments.')
def invoke(self, ctx):
"""Given a context, this invokes the command. The default
implementation is raising a not implemented error.
"""
raise NotImplementedError('Base commands are not invokable by default')
def main(self, args=None, prog_name=None, complete_var=None,
standalone_mode=True, **extra):
"""This is the way to invoke a script with all the bells and
whistles as a command line application. This will always terminate
the application after a call. If this is not wanted, ``SystemExit``
needs to be caught.
This method is also available by directly calling the instance of
a :class:`Command`.
.. versionadded:: 3.0
Added the `standalone_mode` flag to control the standalone mode.
:param args: the arguments that should be used for parsing. If not
provided, ``sys.argv[1:]`` is used.
:param prog_name: the program name that should be used. By default
the program name is constructed by taking the file
name from ``sys.argv[0]``.
:param complete_var: the environment variable that controls the
bash completion support. The default is
``"_<prog_name>_COMPLETE"`` with prog name in
uppercase.
:param standalone_mode: the default behavior is to invoke the script
in standalone mode. Click will then
handle exceptions and convert them into
error messages and the function will never
return but shut down the interpreter. If
this is set to `False` they will be
propagated to the caller and the return
value of this function is the return value
of :meth:`invoke`.
:param extra: extra keyword arguments are forwarded to the context
constructor. See :class:`Context` for more information.
"""
# If we are in Python 3, we will verify that the environment is
# sane at this point of reject further execution to avoid a
# broken script.
if not PY2:
try:
import locale
fs_enc = codecs.lookup(locale.getpreferredencoding()).name
except Exception:
fs_enc = 'ascii'
if fs_enc == 'ascii':
raise RuntimeError('Click will abort further execution '
'because Python 3 was configured to use '
'ASCII as encoding for the environment. '
'Either switch to Python 2 or consult '
'http://click.pocoo.org/python3/ '
'for mitigation steps.')
else:
_check_for_unicode_literals()
if args is None:
args = sys.argv[1:]
else:
args = list(args)
if prog_name is None:
prog_name = make_str(os.path.basename(
sys.argv and sys.argv[0] or __file__))
# Hook for the Bash completion. This only activates if the Bash
# completion is actually enabled, otherwise this is quite a fast
# noop.
_bashcomplete(self, prog_name, complete_var)
try:
try:
with self.make_context(prog_name, args, **extra) as ctx:
rv = self.invoke(ctx)
if not standalone_mode:
return rv
ctx.exit()
except (EOFError, KeyboardInterrupt):
echo(file=sys.stderr)
raise Abort()
except ClickException as e:
if not standalone_mode:
raise
e.show()
sys.exit(e.exit_code)
except Abort:
if not standalone_mode:
raise
echo('Aborted!', file=sys.stderr)
sys.exit(1)
def __call__(self, *args, **kwargs):
"""Alias for :meth:`main`."""
return self.main(*args, **kwargs)
class Command(BaseCommand):
"""Commands are the basic building block of command line interfaces in
Click. A basic command handles command line parsing and might dispatch
more parsing to commands nested below it.
.. versionchanged:: 2.0
Added the `context_settings` parameter.
:param name: the name of the command to use unless a group overrides it.
:param context_settings: an optional dictionary with defaults that are
passed to the context object.
:param callback: the callback to invoke. This is optional.
:param params: the parameters to register with this command. This can
be either :class:`Option` or :class:`Argument` objects.
:param help: the help string to use for this command.
:param epilog: like the help string but it's printed at the end of the
help page after everything else.
:param short_help: the short help to use for this command. This is
shown on the command listing of the parent command.
:param add_help_option: by default each command registers a ``--help``
option. This can be disabled by this parameter.
"""
def __init__(self, name, context_settings=None, callback=None,
params=None, help=None, epilog=None, short_help=None,
options_metavar='[OPTIONS]', add_help_option=True):
BaseCommand.__init__(self, name, context_settings)
#: the callback to execute when the command fires. This might be
#: `None` in which case nothing happens.
self.callback = callback
#: the list of parameters for this command in the order they
#: should show up in the help page and execute. Eager parameters
#: will automatically be handled before non eager ones.
self.params = params or []
self.help = help
self.epilog = epilog
self.options_metavar = options_metavar
if short_help is None and help:
short_help = make_default_short_help(help)
self.short_help = short_help
self.add_help_option = add_help_option
def get_usage(self, ctx):
formatter = ctx.make_formatter()
self.format_usage(ctx, formatter)
return formatter.getvalue().rstrip('\n')
def get_params(self, ctx):
rv = self.params
help_option = self.get_help_option(ctx)
if help_option is not None:
rv = rv + [help_option]
return rv
def format_usage(self, ctx, formatter):
"""Writes the usage line into the formatter."""
pieces = self.collect_usage_pieces(ctx)
formatter.write_usage(ctx.command_path, ' '.join(pieces))
def collect_usage_pieces(self, ctx):
"""Returns all the pieces that go into the usage line and returns
it as a list of strings.
"""
rv = [self.options_metavar]
for param in self.get_params(ctx):
rv.extend(param.get_usage_pieces(ctx))
return rv
def get_help_option_names(self, ctx):
"""Returns the names for the help option."""
all_names = set(ctx.help_option_names)
for param in self.params:
all_names.difference_update(param.opts)
all_names.difference_update(param.secondary_opts)
return all_names
def get_help_option(self, ctx):
"""Returns the help option object."""
help_options = self.get_help_option_names(ctx)
if not help_options or not self.add_help_option:
return
def show_help(ctx, param, value):
if value and not ctx.resilient_parsing:
echo(ctx.get_help(), color=ctx.color)
ctx.exit()
return Option(help_options, is_flag=True,
is_eager=True, expose_value=False,
callback=show_help,
help='Show this message and exit.')
def make_parser(self, ctx):
"""Creates the underlying option parser for this command."""
parser = OptionParser(ctx)
parser.allow_interspersed_args = ctx.allow_interspersed_args
parser.ignore_unknown_options = ctx.ignore_unknown_options
for param in self.get_params(ctx):
param.add_to_parser(parser, ctx)
return parser
def get_help(self, ctx):
"""Formats the help into a string and returns it. This creates a
formatter and will call into the following formatting methods:
"""
formatter = ctx.make_formatter()
self.format_help(ctx, formatter)
return formatter.getvalue().rstrip('\n')
def format_help(self, ctx, formatter):
"""Writes the help into the formatter if it exists.
This calls into the following methods:
- :meth:`format_usage`
- :meth:`format_help_text`
- :meth:`format_options`
- :meth:`format_epilog`
"""
self.format_usage(ctx, formatter)
self.format_help_text(ctx, formatter)
self.format_options(ctx, formatter)
self.format_epilog(ctx, formatter)
def format_help_text(self, ctx, formatter):
"""Writes the help text to the formatter if it exists."""
if self.help:
formatter.write_paragraph()
with formatter.indentation():
formatter.write_text(self.help)
def format_options(self, ctx, formatter):
"""Writes all the options into the formatter if they exist."""
opts = []
for param in self.get_params(ctx):
rv = param.get_help_record(ctx)
if rv is not None:
opts.append(rv)
if opts:
with formatter.section('Options'):
formatter.write_dl(opts)
def format_epilog(self, ctx, formatter):
"""Writes the epilog into the formatter if it exists."""
if self.epilog:
formatter.write_paragraph()
with formatter.indentation():
formatter.write_text(self.epilog)
def parse_args(self, ctx, args):
parser = self.make_parser(ctx)
opts, args, param_order = parser.parse_args(args=args)
for param in iter_params_for_processing(
param_order, self.get_params(ctx)):
value, args = param.handle_parse_result(ctx, opts, args)
if args and not ctx.allow_extra_args and not ctx.resilient_parsing:
ctx.fail('Got unexpected extra argument%s (%s)'
% (len(args) != 1 and 's' or '',
' '.join(map(make_str, args))))
ctx.args = args
return args
def invoke(self, ctx):
"""Given a context, this invokes the attached callback (if it exists)
in the right way.
"""
if self.callback is not None:
return ctx.invoke(self.callback, **ctx.params)
class MultiCommand(Command):
"""A multi command is the basic implementation of a command that
dispatches to subcommands. The most common version is the
:class:`Group`.
:param invoke_without_command: this controls how the multi command itself
is invoked. By default it's only invoked
if a subcommand is provided.
:param no_args_is_help: this controls what happens if no arguments are
provided. This option is enabled by default if
`invoke_without_command` is disabled or disabled
if it's enabled. If enabled this will add
``--help`` as argument if no arguments are
passed.
:param subcommand_metavar: the string that is used in the documentation
to indicate the subcommand place.
:param chain: if this is set to `True` chaining of multiple subcommands
is enabled. This restricts the form of commands in that
they cannot have optional arguments but it allows
multiple commands to be chained together.
:param result_callback: the result callback to attach to this multi
command.
"""
allow_extra_args = True
allow_interspersed_args = False
def __init__(self, name=None, invoke_without_command=False,
no_args_is_help=None, subcommand_metavar=None,
chain=False, result_callback=None, **attrs):
Command.__init__(self, name, **attrs)
if no_args_is_help is None:
no_args_is_help = not invoke_without_command
self.no_args_is_help = no_args_is_help
self.invoke_without_command = invoke_without_command
if subcommand_metavar is None:
if chain:
subcommand_metavar = SUBCOMMANDS_METAVAR
else:
subcommand_metavar = SUBCOMMAND_METAVAR
self.subcommand_metavar = subcommand_metavar
self.chain = chain
#: The result callback that is stored. This can be set or
#: overridden with the :func:`resultcallback` decorator.
self.result_callback = result_callback
def collect_usage_pieces(self, ctx):
rv = Command.collect_usage_pieces(self, ctx)
rv.append(self.subcommand_metavar)
return rv
def format_options(self, ctx, formatter):
Command.format_options(self, ctx, formatter)
self.format_commands(ctx, formatter)
def resultcallback(self, replace=False):
"""Adds a result callback to the chain command. By default if a
result callback is already registered this will chain them but
this can be disabled with the `replace` parameter. The result
callback is invoked with the return value of the subcommand
(or the list of return values from all subcommands if chaining
is enabled) as well as the parameters as they would be passed
to the main callback.
Example::
@click.group()
@click.option('-i', '--input', default=23)
def cli(input):
return 42
@cli.resultcallback()
def process_result(result, input):
return result + input
.. versionadded:: 3.0
:param replace: if set to `True` an already existing result
callback will be removed.
"""
def decorator(f):
old_callback = self.result_callback
if old_callback is None or replace:
self.result_callback = f
return f
def function(__value, *args, **kwargs):
return f(old_callback(__value, *args, **kwargs),
*args, **kwargs)
self.result_callback = rv = update_wrapper(function, f)
return rv
return decorator
def format_commands(self, ctx, formatter):
"""Extra format methods for multi methods that adds all the commands
after the options.
"""
rows = []
for subcommand in self.list_commands(ctx):
cmd = self.get_command(ctx, subcommand)
# What is this, the tool lied about a command. Ignore it
if cmd is None:
continue
help = cmd.short_help or ''
rows.append((subcommand, help))
if rows:
with formatter.section('Commands'):
formatter.write_dl(rows)
def parse_args(self, ctx, args):
if not args and self.no_args_is_help and not ctx.resilient_parsing:
echo(ctx.get_help(), color=ctx.color)
ctx.exit()
return Command.parse_args(self, ctx, args)
def invoke(self, ctx):
def _process_result(value):
if self.result_callback is not None:
value = ctx.invoke(self.result_callback, value,
**ctx.params)
return value
if not ctx.args:
# If we are invoked without command the chain flag controls
# how this happens. If we are not in chain mode, the return
# value here is the return value of the command.
# If however we are in chain mode, the return value is the
# return value of the result processor invoked with an empty
# list (which means that no subcommand actually was executed).
if self.invoke_without_command:
if not self.chain:
return Command.invoke(self, ctx)
with ctx:
Command.invoke(self, ctx)
return _process_result([])
ctx.fail('Missing command.')
args = ctx.args
# If we're not in chain mode, we only allow the invocation of a
# single command but we also inform the current context about the
# name of the command to invoke.
if not self.chain:
# Make sure the context is entered so we do not clean up
# resources until the result processor has worked.
with ctx:
cmd_name, cmd, args = self.resolve_command(ctx, args)
ctx.invoked_subcommand = cmd_name
Command.invoke(self, ctx)
sub_ctx = cmd.make_context(cmd_name, args, parent=ctx)
with sub_ctx:
return _process_result(sub_ctx.command.invoke(sub_ctx))
# In chain mode we create the contexts step by step, but after the
# base command has been invoked. Because at that point we do not
# know the subcommands yet, the invoked subcommand attribute is
# set to ``*`` to inform the command that subcommands are executed
# but nothing else.
with ctx:
ctx.invoked_subcommand = args and '*' or None
Command.invoke(self, ctx)
# Otherwise we make every single context and invoke them in a
# chain. In that case the return value to the result processor
# is the list of all invoked subcommand's results.
contexts = []
while args:
cmd_name, cmd, args = self.resolve_command(ctx, args)
sub_ctx = cmd.make_context(cmd_name, args, parent=ctx,
allow_extra_args=True,
allow_interspersed_args=False)
contexts.append(sub_ctx)
args = sub_ctx.args
rv = []
for sub_ctx in contexts:
with sub_ctx:
rv.append(sub_ctx.command.invoke(sub_ctx))
return _process_result(rv)
def resolve_command(self, ctx, args):
cmd_name = make_str(args[0])
original_cmd_name = cmd_name
# Get the command
cmd = self.get_command(ctx, cmd_name)
# If we can't find the command but there is a normalization
# function available, we try with that one.
if cmd is None and ctx.token_normalize_func is not None:
cmd_name = ctx.token_normalize_func(cmd_name)
cmd = self.get_command(ctx, cmd_name)
# If we don't find the command we want to show an error message
# to the user that it was not provided. However, there is
# something else we should do: if the first argument looks like
# an option we want to kick off parsing again for arguments to
# resolve things like --help which now should go to the main
# place.
if cmd is None:
if split_opt(cmd_name)[0]:
self.parse_args(ctx, ctx.args)
ctx.fail('No such command "%s".' % original_cmd_name)
return cmd_name, cmd, args[1:]
def get_command(self, ctx, cmd_name):
"""Given a context and a command name, this returns a
:class:`Command` object if it exists or returns `None`.
"""
raise NotImplementedError()
def list_commands(self, ctx):
"""Returns a list of subcommand names in the order they should
appear.
"""
return []
class Group(MultiCommand):
"""A group allows a command to have subcommands attached. This is the
most common way to implement nesting in Click.
:param commands: a dictionary of commands.
"""
def __init__(self, name=None, commands=None, **attrs):
MultiCommand.__init__(self, name, **attrs)
#: the registered subcommands by their exported names.
self.commands = commands or {}
def add_command(self, cmd, name=None):
"""Registers another :class:`Command` with this group. If the name
is not provided, the name of the command is used.
"""
name = name or cmd.name
if name is None:
raise TypeError('Command has no name.')
self.commands[name] = cmd
def command(self, *args, **kwargs):
"""A shortcut decorator for declaring and attaching a command to
the group. This takes the same arguments as :func:`command` but
immediately registers the created command with this instance by
calling into :meth:`add_command`.
"""
def decorator(f):
cmd = command(*args, **kwargs)(f)
self.add_command(cmd)
return cmd
return decorator
def group(self, *args, **kwargs):
"""A shortcut decorator for declaring and attaching a group to
the group. This takes the same arguments as :func:`group` but
immediately registers the created command with this instance by
calling into :meth:`add_command`.
"""
def decorator(f):
cmd = group(*args, **kwargs)(f)
self.add_command(cmd)
return cmd
return decorator
def get_command(self, ctx, cmd_name):
return self.commands.get(cmd_name)
def list_commands(self, ctx):
return sorted(self.commands)
class CommandCollection(MultiCommand):
"""A command collection is a multi command that merges multiple multi
commands together into one. This is a straightforward implementation
that accepts a list of different multi commands as sources and
provides all the commands for each of them.
"""
def __init__(self, name=None, sources=None, **attrs):
MultiCommand.__init__(self, name, **attrs)
#: The list of registered multi commands.
self.sources = sources or []
def add_source(self, multi_cmd):
"""Adds a new multi command to the chain dispatcher."""
self.sources.append(multi_cmd)
def get_command(self, ctx, cmd_name):
for source in self.sources:
rv = source.get_command(ctx, cmd_name)
if rv is not None:
return rv
def list_commands(self, ctx):
rv = set()
for source in self.sources:
rv.update(source.list_commands(ctx))
return sorted(rv)
class Parameter(object):
"""A parameter to a command comes in two versions: they are either
:class:`Option`\s or :class:`Argument`\s. Other subclasses are currently
not supported by design as some of the internals for parsing are
intentionally not finalized.
Some settings are supported by both options and arguments.
.. versionchanged:: 2.0
Changed signature for parameter callback to also be passed the
parameter. In Click 2.0, the old callback format will still work,
but it will raise a warning to give you change to migrate the
code easier.
:param param_decls: the parameter declarations for this option or
argument. This is a list of flags or argument
names.
:param type: the type that should be used. Either a :class:`ParamType`
or a Python type. The later is converted into the former
automatically if supported.
:param required: controls if this is optional or not.
:param default: the default value if omitted. This can also be a callable,
in which case it's invoked when the default is needed
without any arguments.
:param callback: a callback that should be executed after the parameter
was matched. This is called as ``fn(ctx, param,
value)`` and needs to return the value. Before Click
2.0, the signature was ``(ctx, value)``.
:param nargs: the number of arguments to match. If not ``1`` the return
value is a tuple instead of single value. The default for
nargs is ``1`` (except if the type is a tuple, then it's
the arity of the tuple).
:param metavar: how the value is represented in the help page.
:param expose_value: if this is `True` then the value is passed onwards
to the command callback and stored on the context,
otherwise it's skipped.
:param is_eager: eager values are processed before non eager ones. This
should not be set for arguments or it will inverse the
order of processing.
:param envvar: a string or list of strings that are environment variables
that should be checked.
"""
param_type_name = 'parameter'
def __init__(self, param_decls=None, type=None, required=False,
default=None, callback=None, nargs=None, metavar=None,
expose_value=True, is_eager=False, envvar=None):
self.name, self.opts, self.secondary_opts = \
self._parse_decls(param_decls or (), expose_value)
self.type = convert_type(type, default)
# Default nargs to what the type tells us if we have that
# information available.
if nargs is None:
if self.type.is_composite:
nargs = self.type.arity
else:
nargs = 1
self.required = required
self.callback = callback
self.nargs = nargs
self.multiple = False
self.expose_value = expose_value
self.default = default
self.is_eager = is_eager
self.metavar = metavar
self.envvar = envvar
@property
def human_readable_name(self):
"""Returns the human readable name of this parameter. This is the
same as the name for options, but the metavar for arguments.
"""
return self.name
def make_metavar(self):
if self.metavar is not None:
return self.metavar
metavar = self.type.get_metavar(self)
if metavar is None:
metavar = self.type.name.upper()
if self.nargs != 1:
metavar += '...'
return metavar
def get_default(self, ctx):
"""Given a context variable this calculates the default value."""
# Otherwise go with the regular default.
if callable(self.default):
rv = self.default()
else:
rv = self.default
return self.type_cast_value(ctx, rv)
def add_to_parser(self, parser, ctx):
pass
def consume_value(self, ctx, opts):
value = opts.get(self.name)
if value is None:
value = ctx.lookup_default(self.name)
if value is None:
value = self.value_from_envvar(ctx)
return value
def type_cast_value(self, ctx, value):
"""Given a value this runs it properly through the type system.
This automatically handles things like `nargs` and `multiple` as
well as composite types.
"""
if self.type.is_composite:
if self.nargs <= 1:
raise TypeError('Attempted to invoke composite type '
'but nargs has been set to %s. This is '
'not supported; nargs needs to be set to '
'a fixed value > 1.' % self.nargs)
if self.multiple:
return tuple(self.type(x or (), self, ctx) for x in value or ())
return self.type(value or (), self, ctx)
def _convert(value, level):
if level == 0:
return self.type(value, self, ctx)
return tuple(_convert(x, level - 1) for x in value or ())
return _convert(value, (self.nargs != 1) + bool(self.multiple))
def process_value(self, ctx, value):
"""Given a value and context this runs the logic to convert the
value as necessary.
"""
# If the value we were given is None we do nothing. This way
# code that calls this can easily figure out if something was
# not provided. Otherwise it would be converted into an empty
# tuple for multiple invocations which is inconvenient.
if value is not None:
return self.type_cast_value(ctx, value)
def value_is_missing(self, value):
if value is None:
return True
if (self.nargs != 1 or self.multiple) and value == ():
return True
return False
def full_process_value(self, ctx, value):
value = self.process_value(ctx, value)
if value is None:
value = self.get_default(ctx)
if self.required and self.value_is_missing(value):
raise MissingParameter(ctx=ctx, param=self)
return value
def resolve_envvar_value(self, ctx):
if self.envvar is None:
return
if isinstance(self.envvar, (tuple, list)):
for envvar in self.envvar:
rv = os.environ.get(envvar)
if rv is not None:
return rv
else:
return os.environ.get(self.envvar)
def value_from_envvar(self, ctx):
rv = self.resolve_envvar_value(ctx)
if rv is not None and self.nargs != 1:
rv = self.type.split_envvar_value(rv)
return rv
def handle_parse_result(self, ctx, opts, args):
with augment_usage_errors(ctx, param=self):
value = self.consume_value(ctx, opts)
try:
value = self.full_process_value(ctx, value)
except Exception:
if not ctx.resilient_parsing:
raise
value = None
if self.callback is not None:
try:
value = invoke_param_callback(
self.callback, ctx, self, value)
except Exception:
if not ctx.resilient_parsing:
raise
if self.expose_value:
ctx.params[self.name] = value
return value, args
def get_help_record(self, ctx):
pass
def get_usage_pieces(self, ctx):
return []
class Option(Parameter):
"""Options are usually optional values on the command line and
have some extra features that arguments don't have.
All other parameters are passed onwards to the parameter constructor.
:param show_default: controls if the default value should be shown on the
help page. Normally, defaults are not shown.
:param prompt: if set to `True` or a non empty string then the user will
be prompted for input if not set. If set to `True` the
prompt will be the option name capitalized.
:param confirmation_prompt: if set then the value will need to be confirmed
if it was prompted for.
:param hide_input: if this is `True` then the input on the prompt will be
hidden from the user. This is useful for password
input.
:param is_flag: forces this option to act as a flag. The default is
auto detection.
:param flag_value: which value should be used for this flag if it's
enabled. This is set to a boolean automatically if
the option string contains a slash to mark two options.
:param multiple: if this is set to `True` then the argument is accepted
multiple times and recorded. This is similar to ``nargs``
in how it works but supports arbitrary number of
arguments.
:param count: this flag makes an option increment an integer.
:param allow_from_autoenv: if this is enabled then the value of this
parameter will be pulled from an environment
variable in case a prefix is defined on the
context.
:param help: the help string.
"""
param_type_name = 'option'
def __init__(self, param_decls=None, show_default=False,
prompt=False, confirmation_prompt=False,
hide_input=False, is_flag=None, flag_value=None,
multiple=False, count=False, allow_from_autoenv=True,
type=None, help=None, **attrs):
default_is_missing = attrs.get('default', _missing) is _missing
Parameter.__init__(self, param_decls, type=type, **attrs)
if prompt is True:
prompt_text = self.name.replace('_', ' ').capitalize()
elif prompt is False:
prompt_text = None
else:
prompt_text = prompt
self.prompt = prompt_text
self.confirmation_prompt = confirmation_prompt
self.hide_input = hide_input
# Flags
if is_flag is None:
if flag_value is not None:
is_flag = True
else:
is_flag = bool(self.secondary_opts)
if is_flag and default_is_missing:
self.default = False
if flag_value is None:
flag_value = not self.default
self.is_flag = is_flag
self.flag_value = flag_value
if self.is_flag and isinstance(self.flag_value, bool) \
and type is None:
self.type = BOOL
self.is_bool_flag = True
else:
self.is_bool_flag = False
# Counting
self.count = count
if count:
if type is None:
self.type = IntRange(min=0)
if default_is_missing:
self.default = 0
self.multiple = multiple
self.allow_from_autoenv = allow_from_autoenv
self.help = help
self.show_default = show_default
# Sanity check for stuff we don't support
if __debug__:
if self.nargs < 0:
raise TypeError('Options cannot have nargs < 0')
if self.prompt and self.is_flag and not self.is_bool_flag:
raise TypeError('Cannot prompt for flags that are not bools.')
if not self.is_bool_flag and self.secondary_opts:
raise TypeError('Got secondary option for non boolean flag.')
if self.is_bool_flag and self.hide_input \
and self.prompt is not None:
raise TypeError('Hidden input does not work with boolean '
'flag prompts.')
if self.count:
if self.multiple:
raise TypeError('Options cannot be multiple and count '
'at the same time.')
elif self.is_flag:
raise TypeError('Options cannot be count and flags at '
'the same time.')
def _parse_decls(self, decls, expose_value):
opts = []
secondary_opts = []
name = None
possible_names = []
for decl in decls:
if isidentifier(decl):
if name is not None:
raise TypeError('Name defined twice')
name = decl
else:
split_char = decl[:1] == '/' and ';' or '/'
if split_char in decl:
first, second = decl.split(split_char, 1)
first = first.rstrip()
possible_names.append(split_opt(first))
opts.append(first)
secondary_opts.append(second.lstrip())
else:
possible_names.append(split_opt(decl))
opts.append(decl)
if name is None and possible_names:
possible_names.sort(key=lambda x: len(x[0]))
name = possible_names[-1][1].replace('-', '_').lower()
if not isidentifier(name):
name = None
if name is None:
if not expose_value:
return None, opts, secondary_opts
raise TypeError('Could not determine name for option')
if not opts and not secondary_opts:
raise TypeError('No options defined but a name was passed (%s). '
'Did you mean to declare an argument instead '
'of an option?' % name)
return name, opts, secondary_opts
def add_to_parser(self, parser, ctx):
kwargs = {
'dest': self.name,
'nargs': self.nargs,
'obj': self,
}
if self.multiple:
action = 'append'
elif self.count:
action = 'count'
else:
action = 'store'
if self.is_flag:
kwargs.pop('nargs', None)
if self.is_bool_flag and self.secondary_opts:
parser.add_option(self.opts, action=action + '_const',
const=True, **kwargs)
parser.add_option(self.secondary_opts, action=action +
'_const', const=False, **kwargs)
else:
parser.add_option(self.opts, action=action + '_const',
const=self.flag_value,
**kwargs)
else:
kwargs['action'] = action
parser.add_option(self.opts, **kwargs)
def get_help_record(self, ctx):
any_prefix_is_slash = []
def _write_opts(opts):
rv, any_slashes = join_options(opts)
if any_slashes:
any_prefix_is_slash[:] = [True]
if not self.is_flag and not self.count:
rv += ' ' + self.make_metavar()
return rv
rv = [_write_opts(self.opts)]
if self.secondary_opts:
rv.append(_write_opts(self.secondary_opts))
help = self.help or ''
extra = []
if self.default is not None and self.show_default:
extra.append('default: %s' % (
', '.join('%s' % d for d in self.default)
if isinstance(self.default, (list, tuple))
else self.default, ))
if self.required:
extra.append('required')
if extra:
help = '%s[%s]' % (help and help + ' ' or '', '; '.join(extra))
return ((any_prefix_is_slash and '; ' or ' / ').join(rv), help)
def get_default(self, ctx):
# If we're a non boolean flag out default is more complex because
# we need to look at all flags in the same group to figure out
# if we're the the default one in which case we return the flag
# value as default.
if self.is_flag and not self.is_bool_flag:
for param in ctx.command.params:
if param.name == self.name and param.default:
return param.flag_value
return None
return Parameter.get_default(self, ctx)
def prompt_for_value(self, ctx):
"""This is an alternative flow that can be activated in the full
value processing if a value does not exist. It will prompt the
user until a valid value exists and then returns the processed
value as result.
"""
# Calculate the default before prompting anything to be stable.
default = self.get_default(ctx)
# If this is a prompt for a flag we need to handle this
# differently.
if self.is_bool_flag:
return confirm(self.prompt, default)
return prompt(self.prompt, default=default,
hide_input=self.hide_input,
confirmation_prompt=self.confirmation_prompt,
value_proc=lambda x: self.process_value(ctx, x))
def resolve_envvar_value(self, ctx):
rv = Parameter.resolve_envvar_value(self, ctx)
if rv is not None:
return rv
if self.allow_from_autoenv and \
ctx.auto_envvar_prefix is not None:
envvar = '%s_%s' % (ctx.auto_envvar_prefix, self.name.upper())
return os.environ.get(envvar)
def value_from_envvar(self, ctx):
rv = self.resolve_envvar_value(ctx)
if rv is None:
return None
value_depth = (self.nargs != 1) + bool(self.multiple)
if value_depth > 0 and rv is not None:
rv = self.type.split_envvar_value(rv)
if self.multiple and self.nargs != 1:
rv = batch(rv, self.nargs)
return rv
def full_process_value(self, ctx, value):
if value is None and self.prompt is not None \
and not ctx.resilient_parsing:
return self.prompt_for_value(ctx)
return Parameter.full_process_value(self, ctx, value)
class Argument(Parameter):
"""Arguments are positional parameters to a command. They generally
provide fewer features than options but can have infinite ``nargs``
and are required by default.
All parameters are passed onwards to the parameter constructor.
"""
param_type_name = 'argument'
def __init__(self, param_decls, required=None, **attrs):
if required is None:
if attrs.get('default') is not None:
required = False
else:
required = attrs.get('nargs', 1) > 0
Parameter.__init__(self, param_decls, required=required, **attrs)
@property
def human_readable_name(self):
if self.metavar is not None:
return self.metavar
return self.name.upper()
def make_metavar(self):
if self.metavar is not None:
return self.metavar
var = self.name.upper()
if not self.required:
var = '[%s]' % var
if self.nargs != 1:
var += '...'
return var
def _parse_decls(self, decls, expose_value):
if not decls:
if not expose_value:
return None, [], []
raise TypeError('Could not determine name for argument')
if len(decls) == 1:
name = arg = decls[0]
name = name.replace('-', '_').lower()
elif len(decls) == 2:
name, arg = decls
else:
raise TypeError('Arguments take exactly one or two '
'parameter declarations, got %d' % len(decls))
return name, [arg], []
def get_usage_pieces(self, ctx):
return [self.make_metavar()]
def add_to_parser(self, parser, ctx):
parser.add_argument(dest=self.name, nargs=self.nargs,
obj=self)
# Circular dependency between decorators and core
from .decorators import command, group
|
gameduell/duell
|
pylib/click/core.py
|
Python
|
bsd-2-clause
| 68,206 | 0.000088 |
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
import os
import xml.dom.minidom as xml
from oslo_config import cfg
from oslo_utils import uuidutils
import six
from sahara import conductor as c
from sahara import context
from sahara.service.edp import base_engine
from sahara.service.edp import hdfs_helper as h
from sahara.service.edp.job_binaries import manager as jb_manager
from sahara.service.edp import job_utils
from sahara.service.edp.oozie import oozie as o
from sahara.service.edp.oozie.workflow_creator import workflow_factory
from sahara.service.validations.edp import job_execution as j
from sahara.utils import edp
from sahara.utils import remote
from sahara.utils import xmlutils as x
CONF = cfg.CONF
conductor = c.API
@six.add_metaclass(abc.ABCMeta)
class OozieJobEngine(base_engine.JobEngine):
def __init__(self, cluster):
self.cluster = cluster
self.plugin = job_utils.get_plugin(self.cluster)
def get_remote_client(self):
return o.RemoteOozieClient(self.get_oozie_server_uri(self.cluster),
self.get_oozie_server(self.cluster),
self.get_hdfs_user())
def get_client(self):
# by default engine will return standard oozie client implementation
return o.OozieClient(self.get_oozie_server_uri(self.cluster),
self.get_oozie_server(self.cluster))
def _get_oozie_job_params(self, hdfs_user, path_to_workflow,
oozie_params, use_hbase_lib,
scheduled_params=None, job_dir=None,
job_execution_type=None):
oozie_libpath_key = "oozie.libpath"
oozie_libpath = ""
rm_path = self.get_resource_manager_uri(self.cluster)
nn_path = self.get_name_node_uri(self.cluster)
hbase_common_lib_path = "%s%s" % (nn_path, h.HBASE_COMMON_LIB_PATH)
if use_hbase_lib:
if oozie_libpath_key in oozie_params:
oozie_libpath = "%s,%s" % (oozie_params.get(oozie_libpath_key,
""), hbase_common_lib_path)
else:
oozie_libpath = hbase_common_lib_path
if job_execution_type == "scheduled":
app_path = "oozie.coord.application.path"
job_parameters = {
"start": scheduled_params.get('start'),
"end": scheduled_params.get('end'),
"frequency": scheduled_params.get('frequency'),
"workflowAppUri": "%s%s" % (nn_path, job_dir),
app_path: "%s%s" % (nn_path, job_dir)}
else:
app_path = "oozie.wf.application.path"
job_parameters = {
app_path: "%s%s" % (nn_path, path_to_workflow)}
job_parameters["nameNode"] = nn_path
job_parameters["user.name"] = hdfs_user
job_parameters["jobTracker"] = rm_path
job_parameters[oozie_libpath_key] = oozie_libpath
job_parameters["oozie.use.system.libpath"] = "true"
# Don't let the application path be overwritten, that can't
# possibly make any sense
if app_path in oozie_params:
del oozie_params[app_path]
if oozie_libpath_key in oozie_params:
del oozie_params[oozie_libpath_key]
job_parameters.update(oozie_params)
return job_parameters
def _upload_workflow_file(self, where, job_dir, wf_xml, hdfs_user):
with remote.get_remote(where) as r:
h.put_file_to_hdfs(r, wf_xml, "workflow.xml", job_dir, hdfs_user)
return "%s/workflow.xml" % job_dir
def _upload_coordinator_file(self, where, job_dir, wf_xml, hdfs_user):
with remote.get_remote(where) as r:
h.put_file_to_hdfs(r, wf_xml, "coordinator.xml", job_dir,
hdfs_user)
return "%s/coordinator.xml" % job_dir
def cancel_job(self, job_execution):
if job_execution.engine_job_id is not None:
client = self.get_client()
client.kill_job(job_execution)
return client.get_job_info(job_execution)
def get_job_status(self, job_execution):
if job_execution.engine_job_id is not None:
return self.get_client().get_job_info(job_execution)
def _prepare_run_job(self, job_execution):
ctx = context.ctx()
# This will be a dictionary of tuples, (native_url, runtime_url)
# keyed by data_source id
data_source_urls = {}
prepared_job_params = {}
job = conductor.job_get(ctx, job_execution.job_id)
input_source, output_source = job_utils.get_input_output_data_sources(
job_execution, job, data_source_urls, self.cluster)
# Updated_job_configs will be a copy of job_execution.job_configs with
# any name or uuid references to data_sources resolved to paths
# assuming substitution is enabled.
# If substitution is not enabled then updated_job_configs will
# just be a reference to job_execution.job_configs to avoid a copy.
# Additional_sources will be a list of any data_sources found.
additional_sources, updated_job_configs = (
job_utils.resolve_data_source_references(job_execution.job_configs,
job_execution.id,
data_source_urls,
self.cluster)
)
job_execution = conductor.job_execution_update(
ctx, job_execution,
{"data_source_urls": job_utils.to_url_dict(data_source_urls)})
# Now that we've recorded the native urls, we can switch to the
# runtime urls
data_source_urls = job_utils.to_url_dict(data_source_urls,
runtime=True)
data_sources = additional_sources + [input_source, output_source]
job_utils.prepare_cluster_for_ds(data_sources,
self.cluster, updated_job_configs,
data_source_urls)
proxy_configs = updated_job_configs.get('proxy_configs')
configs = updated_job_configs.get('configs', {})
use_hbase_lib = configs.get('edp.hbase_common_lib', {})
# Extract all the 'oozie.' configs so that they can be set in the
# job properties file. These are config values for Oozie itself,
# not the job code
oozie_params = {}
for k in list(configs):
if k.startswith('oozie.'):
oozie_params[k] = configs[k]
external_hdfs_urls = self._resolve_external_hdfs_urls(
job_execution.job_configs)
for url in external_hdfs_urls:
h.configure_cluster_for_hdfs(self.cluster, url)
hdfs_user = self.get_hdfs_user()
# TODO(tmckay): this should probably be "get_namenode"
# but that call does not exist in the oozie engine api now.
oozie_server = self.get_oozie_server(self.cluster)
wf_dir = self._create_hdfs_workflow_dir(oozie_server, job)
self._upload_job_files_to_hdfs(oozie_server, wf_dir, job, configs,
proxy_configs)
wf_xml = workflow_factory.get_workflow_xml(
job, self.cluster, updated_job_configs,
input_source, output_source,
hdfs_user, data_source_urls)
path_to_workflow = self._upload_workflow_file(oozie_server, wf_dir,
wf_xml, hdfs_user)
prepared_job_params['context'] = ctx
prepared_job_params['hdfs_user'] = hdfs_user
prepared_job_params['path_to_workflow'] = path_to_workflow
prepared_job_params['use_hbase_lib'] = use_hbase_lib
prepared_job_params['job_execution'] = job_execution
prepared_job_params['oozie_params'] = oozie_params
prepared_job_params['wf_dir'] = wf_dir
prepared_job_params['oozie_server'] = oozie_server
return prepared_job_params
def run_job(self, job_execution):
prepared_job_params = self._prepare_run_job(job_execution)
path_to_workflow = prepared_job_params['path_to_workflow']
hdfs_user = prepared_job_params['hdfs_user']
oozie_params = prepared_job_params['oozie_params']
use_hbase_lib = prepared_job_params['use_hbase_lib']
ctx = prepared_job_params['context']
job_execution = prepared_job_params['job_execution']
job_params = self._get_oozie_job_params(hdfs_user,
path_to_workflow,
oozie_params,
use_hbase_lib)
client = self.get_client()
oozie_job_id = client.add_job(x.create_hadoop_xml(job_params),
job_execution)
job_execution = conductor.job_execution_get(ctx, job_execution.id)
if job_execution.info['status'] == edp.JOB_STATUS_TOBEKILLED:
return (None, edp.JOB_STATUS_KILLED, None)
conductor.job_execution_update(
context.ctx(), job_execution.id,
{'info': {'status': edp.JOB_STATUS_READYTORUN},
'engine_job_id': oozie_job_id})
client.run_job(job_execution, oozie_job_id)
try:
status = client.get_job_info(job_execution, oozie_job_id)['status']
except Exception:
status = None
return (oozie_job_id, status, None)
def run_scheduled_job(self, job_execution):
prepared_job_params = self._prepare_run_job(job_execution)
oozie_server = prepared_job_params['oozie_server']
wf_dir = prepared_job_params['wf_dir']
hdfs_user = prepared_job_params['hdfs_user']
oozie_params = prepared_job_params['oozie_params']
use_hbase_lib = prepared_job_params['use_hbase_lib']
ctx = prepared_job_params['context']
job_execution = prepared_job_params['job_execution']
coord_configs = {"jobTracker": "${jobTracker}",
"nameNode": "${nameNode}"}
coord_xml = self._create_coordinator_xml(coord_configs)
self._upload_coordinator_file(oozie_server, wf_dir, coord_xml,
hdfs_user)
job_params = self._get_oozie_job_params(
hdfs_user, None, oozie_params, use_hbase_lib,
job_execution.job_configs.job_execution_info, wf_dir,
"scheduled")
client = self.get_client()
oozie_job_id = client.add_job(x.create_hadoop_xml(job_params),
job_execution)
job_execution = conductor.job_execution_get(ctx, job_execution.id)
if job_execution.info['status'] == edp.JOB_STATUS_TOBEKILLED:
return (None, edp.JOB_STATUS_KILLED, None)
try:
status = client.get_job_status(job_execution,
oozie_job_id)['status']
except Exception:
status = None
return (oozie_job_id, status, None)
@abc.abstractmethod
def get_hdfs_user(self):
pass
@abc.abstractmethod
def create_hdfs_dir(self, remote, dir_name):
pass
@abc.abstractmethod
def get_oozie_server_uri(self, cluster):
pass
@abc.abstractmethod
def get_oozie_server(self, cluster):
pass
@abc.abstractmethod
def get_name_node_uri(self, cluster):
pass
@abc.abstractmethod
def get_resource_manager_uri(self, cluster):
pass
def validate_job_execution(self, cluster, job, data):
# Shell job type requires no specific fields
if job.type == edp.JOB_TYPE_SHELL:
return
# All other types except Java require input and output
# objects and Java require main class
if job.type == edp.JOB_TYPE_JAVA:
j.check_main_class_present(data, job)
else:
j.check_data_sources(data, job)
job_type, subtype = edp.split_job_type(job.type)
if job_type == edp.JOB_TYPE_MAPREDUCE and (
subtype == edp.JOB_SUBTYPE_STREAMING):
j.check_streaming_present(data, job)
@staticmethod
def get_possible_job_config(job_type):
return workflow_factory.get_possible_job_config(job_type)
@staticmethod
def get_supported_job_types():
return [edp.JOB_TYPE_HIVE,
edp.JOB_TYPE_JAVA,
edp.JOB_TYPE_MAPREDUCE,
edp.JOB_TYPE_MAPREDUCE_STREAMING,
edp.JOB_TYPE_PIG,
edp.JOB_TYPE_SHELL]
def _prepare_job_binaries(self, job_binaries, r):
for jb in job_binaries:
jb_manager.JOB_BINARIES.get_job_binary_by_url(jb.url). \
prepare_cluster(jb, remote=r)
def _upload_job_files_to_hdfs(self, where, job_dir, job, configs,
proxy_configs=None):
mains = list(job.mains) if job.mains else []
libs = list(job.libs) if job.libs else []
builtin_libs = edp.get_builtin_binaries(job, configs)
uploaded_paths = []
hdfs_user = self.get_hdfs_user()
job_dir_suffix = 'lib' if job.type != edp.JOB_TYPE_SHELL else ''
lib_dir = os.path.join(job_dir, job_dir_suffix)
with remote.get_remote(where) as r:
job_binaries = mains + libs
self._prepare_job_binaries(job_binaries, r)
# upload mains
uploaded_paths.extend(self._upload_job_binaries(r, mains,
proxy_configs,
hdfs_user,
job_dir))
# upload libs
if len(libs) and job_dir_suffix:
# HDFS 2.2.0 fails to put file if the lib dir does not exist
self.create_hdfs_dir(r, lib_dir)
uploaded_paths.extend(self._upload_job_binaries(r, libs,
proxy_configs,
hdfs_user,
lib_dir))
# upload buitin_libs
for lib in builtin_libs:
h.put_file_to_hdfs(r, lib['raw'], lib['name'], lib_dir,
hdfs_user)
uploaded_paths.append(lib_dir + lib['name'])
return uploaded_paths
def _upload_job_binaries(self, r, job_binaries, proxy_configs,
hdfs_user, job_dir):
uploaded_paths = []
for jb in job_binaries:
path = jb_manager.JOB_BINARIES. \
get_job_binary_by_url(jb.url). \
copy_binary_to_cluster(jb, proxy_configs=proxy_configs,
remote=r, context=context.ctx())
h.copy_from_local(r, path, job_dir, hdfs_user)
uploaded_paths.append(path)
return uploaded_paths
def _create_hdfs_workflow_dir(self, where, job):
constructed_dir = '/user/%s/' % self.get_hdfs_user()
constructed_dir = self._add_postfix(constructed_dir)
constructed_dir += '%s/%s' % (job.name, uuidutils.generate_uuid())
with remote.get_remote(where) as r:
self.create_hdfs_dir(r, constructed_dir)
return constructed_dir
def _create_coordinator_xml(self, coord_configs, config_filter=None,
appname='coord'):
doc = xml.Document()
# Create the <coordinator-app> base element
coord = doc.createElement('coordinator-app')
coord.attributes['name'] = appname
coord.attributes['start'] = "${start}"
coord.attributes['end'] = "${end}"
coord.attributes['frequency'] = "${frequency}"
coord.attributes['timezone'] = 'UTC'
coord.attributes['xmlns'] = 'uri:oozie:coordinator:0.2'
doc.appendChild(coord)
action = doc.createElement('action')
workflow = doc.createElement('workflow')
coord.appendChild(action)
action.appendChild(workflow)
x.add_text_element_to_tag(doc, "workflow", 'app-path',
"${workflowAppUri}")
configuration = doc.createElement('configuration')
workflow.appendChild(configuration)
default_configs = []
if config_filter is not None:
default_configs = [cfg['name'] for cfg in config_filter]
for name in sorted(coord_configs):
if name in default_configs or config_filter is None:
x.add_property_to_configuration(doc, name, coord_configs[name])
# Return newly created XML
return doc.toprettyxml(indent=" ")
def _add_postfix(self, constructed_dir):
def _append_slash_if_needed(path):
if path[-1] != '/':
path += '/'
return path
constructed_dir = _append_slash_if_needed(constructed_dir)
if CONF.job_workflow_postfix:
constructed_dir = ''.join([str(constructed_dir),
str(CONF.job_workflow_postfix)])
return _append_slash_if_needed(constructed_dir)
def _resolve_external_hdfs_urls(self, job_configs):
external_hdfs_urls = []
for k, v in six.iteritems(job_configs.get('configs', {})):
if isinstance(v, six.string_types) and v.startswith("hdfs://"):
external_hdfs_urls.append(v)
for k, v in six.iteritems(job_configs.get('params', {})):
if isinstance(v, six.string_types) and v.startswith("hdfs://"):
external_hdfs_urls.append(v)
for v in job_configs.get('args', []):
if isinstance(v, six.string_types) and v.startswith("hdfs://"):
external_hdfs_urls.append(v)
return external_hdfs_urls
def suspend_job(self, job_execution):
return self._manage_job(job_execution, edp.JOB_ACTION_SUSPEND)
def _manage_job(self, job_execution, action):
if job_execution.oozie_job_id is not None:
client = self.get_client()
if action == edp.JOB_ACTION_SUSPEND:
client.suspend_job(job_execution)
return client.get_job_status(job_execution)
|
openstack/sahara
|
sahara/service/edp/oozie/engine.py
|
Python
|
apache-2.0
| 19,118 | 0 |
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md'), encoding='utf-8') as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt'), encoding='utf-8') as f:
CHANGES = f.read()
setup(
name='sloth',
version='0.1',
description='',
long_description=README,
license='AGPLv3',
# TODO: add author info
#author='',
#author_email='',
url='https://bitbucket.org/pride/sloth/',
# TODO: add keywords
#keywords='',
install_requires = ['python-dateutil', 'arrow'],
classifiers = [
"License :: OSI Approved :: GNU Affero General Public License v3"
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
packages=find_packages(include=['sloth']),
include_package_data=True,
zip_safe=False,
entry_points="""\
[console_scripts]
sloth-game = sloth.start:run
""",
)
|
Lvl4Sword/Acedia
|
setup.py
|
Python
|
agpl-3.0
| 1,191 | 0.005877 |
#!/usr/bin/python
import unittest
from biosignalformat import *
class TestBaseObjects(unittest.TestCase):
def test_MinimalExperiment(self):
provider = XArchiveProvider("experiment001.7z")
#provider = ZipArchiveProvider("experiment001.zip")
experiment = Experiment({
"name": "Exp!",
"description": "blah!"
})
experiment.setArchiver(provider)
experiment.write()
with self.assertRaises(Exception):
experiment.remove(provider)
metadata = experiment.readMetadata(provider)
self.assertEqual(metadata["name"], "Exp!")
self.assertEqual(metadata["description"], "blah!")
def test_MinimalStructure7z(self):
provider = XArchiveProvider("experiment002B.7z")
experiment = Experiment({
"name": "Exp!",
"description": "blah!"
})
experiment.setArchiver(provider)
subject = Subject({
"name": "Subject001",
"description": "description-subject!"
})
experiment.addSubject(subject)
session = Session({
"name": "Subject001-Session001",
"description": "description-subject-session!"
})
subject.addSession(session)
channel = Channel({
"name": "AF8"
})
session.addChannel(channel)
#channelDataset.rawData = [1e-1222, 2.344, 3.14159265358979323846264338327950288419716939937510582097494459230781640629]
channel.setData([c/1e-12 for c in range(500000)])
experiment.write()
metadata = experiment.readMetadata()
self.assertEqual(metadata["name"], "Exp!")
self.assertEqual(metadata["description"], "blah!")
def test_MinimalStructureZip(self):
provider = ZipArchiveProvider("experiment002.zip")
experiment = Experiment({
"name": "Exp!",
"description": "blah!"
})
experiment.setArchiver(provider)
subject = Subject({
"name": "Subject001",
"description": "description-subject!"
})
experiment.addSubject(subject)
session = Session({
"name": "Subject001-Session001",
"description": "description-subject-session!"
})
subject.addSession(session)
channel = Channel({
"name": "AF8"
})
session.addChannel(channel)
#channelDataset.rawData = [1e-1222, 2.344, 3.14159265358979323846264338327950288419716939937510582097494459230781640629]
channel.setData([c/1e-12 for c in range(500000)])
experiment.write()
metadata = experiment.readMetadata()
self.assertEqual(metadata["name"], "Exp!")
self.assertEqual(metadata["description"], "blah!")
class TestPlugins(unittest.TestCase):
def test_plugins(self):
from biosignalformat.external import sample
self.assertEqual(sample.ConstantVariable, 12)
class TestConverters(unittest.TestCase):
def test_single_edf(self):
from biosignalformat.external import base_converter
#importer = base_converter.EDFImporter("ExampleEDF.edf", SevenZipArchiveProvider("ExampleEDFAscii.bif.7z"))
importer = base_converter.EDFImporter("ExampleEDF.edf", XArchiveProvider("ExampleEDFAscii.bif.zip"))
importer.convert()
def atest_multiple_edf(self):
from biosignalformat.external import base_converter
importer = base_converter.EDFImporter("ExampleEDF.edf", XZipArchiveProvider("ExampleMultipleEDFAscii.bif.7z"))
#importer = base_converter.EDFImporter("ExampleEDF.edf", ZipArchiveProvider("ExampleMultipleEDFAscii.bif.zip"))
importer.convert()
importer2 = base_converter.EDFImporter("ExampleEDF2.edf", experiment=importer.experiment, subject=importer.subject)
importer2.convert()
importer3 = base_converter.EDFImporter("ExampleEDF2.edf", experiment=importer.experiment)
importer3.convert()
def test_single_bdf(self):
from biosignalformat.external import base_converter
#importer = base_converter.BDFImporter("ExampleBDF.bdf", SevenZipArchiveProvider("ExampleBDFAscii.bif.7z"))
importer = base_converter.BDFImporter("ExampleBDF.bdf", XArchiveProvider("ExampleBDFAscii.bif.zip"))
importer.convert()
def test_multiple_bdf(self):
from biosignalformat.external import base_converter
#importer = base_converter.EDFImporter("ExampleBDF.bdf", SevenZipArchiveProvider("ExampleMultipleBDFAscii.bif.7z"))
importer = base_converter.EDFImporter("ExampleBDF.bdf", XArchiveProvider("ExampleMultipleBDFAscii-3.bif.zip"))
importer.convert()
importer2 = base_converter.EDFImporter("ExampleBDF.bdf", experiment=importer.experiment, subject=importer.subject)
importer2.convert()
importer3 = base_converter.EDFImporter("ExampleBDF.bdf", experiment=importer.experiment)
importer3.convert()
def test_all():
test_loader = unittest.TestLoader()
#unittest.TextTestRunner(verbosity=2).run(test_loader.loadTestsFromTestCase(TestBaseObjects))
#unittest.TextTestRunner(verbosity=2).run(test_loader.loadTestsFromTestCase(TestPlugins))
unittest.TextTestRunner(verbosity=2).run(test_loader.loadTestsFromTestCase(TestConverters))
|
marcoapintoo/Biosignal-Intermediate-Format
|
biosignalformat/test.py
|
Python
|
apache-2.0
| 5,325 | 0.005446 |
from datetime import date, datetime
from mock import patch, MagicMock
from unittest import TestCase
from django_mock_queries import utils, constants
class TestUtils(TestCase):
def test_merge_concatenates_lists(self):
l1 = [1, 2, 3]
l2 = [4, 5, 6]
result = utils.merge(l1, l2)
for x in (l1 + l2):
assert x in result
def test_merge_eliminates_duplicate_entries(self):
l1 = [1, 2]
l2 = [2, 3]
result = utils.merge(l1, l2)
for x in (l1 + l2):
assert result.count(x) == 1
def test_intersect_creates_list_with_common_elements(self):
l1 = [1, 2]
l2 = [2, 3]
result = utils.intersect(l1, l2)
for x in (l1 + l2):
if x in l1 and x in l2:
assert x in result
else:
assert x not in result
def test_get_attribute_returns_value_with_default_comparison(self):
obj = MagicMock(foo='test')
value, comparison = utils.get_attribute(obj, 'foo')
assert value == 'test'
assert comparison is None
def test_get_attribute_returns_value_with_defined_comparison(self):
obj = MagicMock(foo='test')
value, comparison = utils.get_attribute(obj, 'foo__' + constants.COMPARISON_IEXACT)
assert value == 'test'
assert comparison == constants.COMPARISON_IEXACT
def test_get_attribute_returns_none_with_isnull_comparison(self):
obj = MagicMock(foo=None)
value, comparison = utils.get_attribute(obj, 'foo__' + constants.COMPARISON_ISNULL)
assert value is None
assert comparison == constants.COMPARISON_ISNULL, comparison
def test_get_attribute_returns_nested_object_value(self):
obj = MagicMock(child=MagicMock(foo='test'))
value, comparison = utils.get_attribute(obj, 'child__foo__' + constants.COMPARISON_IEXACT)
assert value == 'test'
assert comparison == constants.COMPARISON_IEXACT
def test_get_attribute_returns_default_value_when_object_is_none(self):
obj = None
default_value = ''
value, comparison = utils.get_attribute(obj, 'foo', default_value)
assert value == default_value
assert comparison is None
def test_get_attribute_with_date(self):
obj = MagicMock(foo=date(2017, 12, 31))
value, comparison = utils.get_attribute(
obj, 'foo__' + constants.COMPARISON_YEAR + '__' + constants.COMPARISON_GT
)
assert value == date(2017, 12, 31)
assert comparison == (constants.COMPARISON_YEAR, constants.COMPARISON_GT)
def test_get_attribute_returns_tuple_with_exact_as_default_comparison(self):
obj = MagicMock(foo=datetime(2017, 1, 1))
value, comparison = utils.get_attribute(obj, 'foo__' + constants.COMPARISON_YEAR)
assert value == datetime(2017, 1, 1)
assert comparison == (constants.COMPARISON_YEAR, constants.COMPARISON_EXACT)
def test_validate_date_or_datetime_raises_value_error(self):
with self.assertRaisesRegexp(ValueError, r'13 is incorrect value for month'):
utils.validate_date_or_datetime(13, constants.COMPARISON_MONTH)
def test_is_match_equality_check_when_comparison_none(self):
result = utils.is_match(1, 1)
assert result is True
result = utils.is_match('a', 'a')
assert result is True
result = utils.is_match(1, '1')
assert result is False
def test_is_match_case_sensitive_equality_check(self):
result = utils.is_match('a', 'A', constants.COMPARISON_EXACT)
assert result is False
result = utils.is_match('a', 'a', constants.COMPARISON_EXACT)
assert result is True
def test_is_match_case_insensitive_equality_check(self):
result = utils.is_match('a', 'A', constants.COMPARISON_IEXACT)
assert result is True
result = utils.is_match('a', 'a', constants.COMPARISON_IEXACT)
assert result is True
def test_is_match_case_sensitive_contains_check(self):
result = utils.is_match('abc', 'A', constants.COMPARISON_CONTAINS)
assert result is False
result = utils.is_match('abc', 'a', constants.COMPARISON_CONTAINS)
assert result is True
def test_is_match_case_insensitive_contains_check(self):
result = utils.is_match('abc', 'A', constants.COMPARISON_ICONTAINS)
assert result is True
result = utils.is_match('abc', 'a', constants.COMPARISON_ICONTAINS)
assert result is True
def test_is_match_startswith_check(self):
result = utils.is_match('abc', 'a', constants.COMPARISON_STARTSWITH)
assert result is True
result = utils.is_match('abc', 'A', constants.COMPARISON_STARTSWITH)
assert result is False
def test_is_match_istartswith_check(self):
result = utils.is_match('abc', 'a', constants.COMPARISON_ISTARTSWITH)
assert result is True
result = utils.is_match('abc', 'A', constants.COMPARISON_ISTARTSWITH)
assert result is True
def test_is_match_endswith_check(self):
result = utils.is_match('abc', 'c', constants.COMPARISON_ENDSWITH)
assert result is True
result = utils.is_match('abc', 'C', constants.COMPARISON_ENDSWITH)
assert result is False
def test_is_match_iendswith_check(self):
result = utils.is_match('abc', 'c', constants.COMPARISON_IENDSWITH)
assert result is True
result = utils.is_match('abc', 'C', constants.COMPARISON_IENDSWITH)
assert result is True
def test_is_match_greater_than_value_check(self):
result = utils.is_match(5, 3, constants.COMPARISON_GT)
assert result is True
result = utils.is_match(3, 5, constants.COMPARISON_GT)
assert result is False
def test_is_match_greater_than_equal_to_value_check(self):
result = utils.is_match(5, 3, constants.COMPARISON_GTE)
assert result is True
result = utils.is_match(5, 5, constants.COMPARISON_GTE)
assert result is True
result = utils.is_match(3, 5, constants.COMPARISON_GTE)
assert result is False
def test_is_match_less_than_value_check(self):
result = utils.is_match(1, 2, constants.COMPARISON_LT)
assert result is True
result = utils.is_match(2, 2, constants.COMPARISON_LT)
assert result is False
def test_is_match_less_than_equal_to_value_check(self):
result = utils.is_match(1, 2, constants.COMPARISON_LTE)
assert result is True
result = utils.is_match(1, 1, constants.COMPARISON_LTE)
assert result is True
result = utils.is_match(2, 1, constants.COMPARISON_LTE)
assert result is False
def test_is_match_isnull_check(self):
result = utils.is_match(1, True, constants.COMPARISON_ISNULL)
assert result is False
result = utils.is_match(1, False, constants.COMPARISON_ISNULL)
assert result is True
result = utils.is_match(None, True, constants.COMPARISON_ISNULL)
assert result is True
result = utils.is_match(None, False, constants.COMPARISON_ISNULL)
assert result is False
result = utils.is_match(None, 1, constants.COMPARISON_ISNULL)
assert result is True
def test_is_match_in_value_check(self):
result = utils.is_match(2, [1, 3], constants.COMPARISON_IN)
assert result is False
result = utils.is_match(1, [1, 3], constants.COMPARISON_IN)
assert result is True
@patch('django_mock_queries.utils.get_attribute')
@patch('django_mock_queries.utils.is_match', MagicMock(return_value=True))
def test_matches_includes_object_in_results_when_match(self, get_attr_mock):
source = [
MagicMock(foo=1),
MagicMock(foo=2),
]
get_attr_mock.return_value = None, None
results = utils.matches(*source, foo__gt=0)
for x in source:
assert x in results
@patch('django_mock_queries.utils.get_attribute')
@patch('django_mock_queries.utils.is_match', MagicMock(return_value=False))
def test_matches_excludes_object_from_results_when_not_match(self, get_attr_mock):
source = [
MagicMock(foo=1),
MagicMock(foo=2),
]
get_attr_mock.return_value = None, None
results = utils.matches(*source, foo__gt=5)
for x in source:
assert x not in results
def test_is_match_regex(self):
result = utils.is_match('Monty Python 1234', r'M\w+\sPython\s\d+', constants.COMPARISON_REGEX)
assert result is True
result = utils.is_match('Monty Python 1234', r'm\w+\spython\s\d+', constants.COMPARISON_REGEX)
assert result is False
result = utils.is_match('Monty Python 1234', r'm\w+Holy Grail\s\d+', constants.COMPARISON_REGEX)
assert result is False
def test_is_match_iregex(self):
result = utils.is_match('Monty Python 1234', r'M\w+\sPython\s\d+', constants.COMPARISON_IREGEX)
assert result is True
result = utils.is_match('Monty Python 1234', r'm\w+\spython\s\d+', constants.COMPARISON_IREGEX)
assert result is True
result = utils.is_match('Monty Python 1234', r'm\w+Holy Grail\s\d+', constants.COMPARISON_IREGEX)
assert result is False
def test_is_match_processes_datetime_field(self):
result = utils.is_match(datetime(2017, 1, 1, 2, 3, 4), 1, (constants.COMPARISON_HOUR, constants.COMPARISON_LT))
assert result is False
def test_is_match_processes_date_field(self):
result = utils.is_match(date(2017, 1, 1), 2016, (constants.COMPARISON_YEAR, constants.COMPARISON_GT))
assert result is True
def test_is_match_range_date_and_datetime(self):
result = utils.is_match(date(2017, 1, 1), (date(2017, 1, 1), date(2017, 1, 2)), constants.COMPARISON_RANGE)
assert result is True
result = utils.is_match(
datetime(2017, 1, 1, 0, 0, 0),
(datetime(2017, 1, 1, 0, 0, 0), datetime(2017, 1, 1, 0, 0, 1)),
constants.COMPARISON_RANGE
)
assert result is True
result = utils.is_match(date(2017, 1, 1), (date(2017, 1, 2), date(2017, 1, 3)), constants.COMPARISON_RANGE)
assert result is False
result = utils.is_match(
datetime(2015, 1, 1, 0, 0, 0),
(datetime(2015, 1, 1, 0, 0, 1), datetime(2015, 1, 1, 0, 0, 2)),
constants.COMPARISON_RANGE
)
assert result is False
def test_is_match_range_numeric(self):
result = utils.is_match(2, (2, 3), constants.COMPARISON_RANGE)
assert result is True
result = utils.is_match(1, (2, 3), constants.COMPARISON_RANGE)
assert result is False
def test_is_match_range_string(self):
result = utils.is_match('b', ('b', 'c'), constants.COMPARISON_RANGE)
assert result is True
result = utils.is_match('a', ('b', 'c'), constants.COMPARISON_RANGE)
assert result is False
|
szykin/django-mock-queries
|
tests/test_utils.py
|
Python
|
mit
| 11,117 | 0.001889 |
import talkey
from gtts import gTTS
import vlc
import time
import wave
import contextlib
class Speaker:
def __init__(self):
self.engine =talkey.Talkey()
def say(self, text_to_say):
self.engine.say(text_to_say)
def google_say(self, text_to_say, fname="1.mp3"):
tts = gTTS(text=text_to_say, lang="en")
tts.save(fname)
self.player = vlc.MediaPlayer(fname)
self.player.play()
self.player.stop()
os.remove(fname)
|
codebhendi/alfred-bot
|
speaker.py
|
Python
|
mit
| 490 | 0.004082 |
# This file is part of cloud-init. See LICENSE file for license information.
"""cloud-init Integration Test Verify Script."""
from tests.cloud_tests.testcases import base
class TestSshKeysGenerate(base.CloudTestCase):
"""Test ssh keys module."""
# TODO: Check cloud-init-output for the correct keys being generated
def test_dsa_public(self):
"""Test dsa public key not generated."""
out = self.get_data_file('dsa_public')
self.assertEqual('', out)
def test_dsa_private(self):
"""Test dsa private key not generated."""
out = self.get_data_file('dsa_private')
self.assertEqual('', out)
def test_rsa_public(self):
"""Test rsa public key not generated."""
out = self.get_data_file('rsa_public')
self.assertEqual('', out)
def test_rsa_private(self):
"""Test rsa public key not generated."""
out = self.get_data_file('rsa_private')
self.assertEqual('', out)
def test_ecdsa_public(self):
"""Test ecdsa public key generated."""
out = self.get_data_file('ecdsa_public')
self.assertIsNotNone(out)
def test_ecdsa_private(self):
"""Test ecdsa public key generated."""
out = self.get_data_file('ecdsa_private')
self.assertIsNotNone(out)
def test_ed25519_public(self):
"""Test ed25519 public key generated."""
out = self.get_data_file('ed25519_public')
self.assertIsNotNone(out)
def test_ed25519_private(self):
"""Test ed25519 public key generated."""
out = self.get_data_file('ed25519_private')
self.assertIsNotNone(out)
# vi: ts=4 expandtab
|
larsks/cloud-init
|
tests/cloud_tests/testcases/modules/ssh_keys_generate.py
|
Python
|
gpl-3.0
| 1,674 | 0 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Contributor.zipcode_short'
db.add_column(u'tx_tecreports_contributor', 'zipcode_short',
self.gf('django.db.models.fields.CharField')(max_length=5, null=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Contributor.zipcode_short'
db.delete_column(u'tx_tecreports_contributor', 'zipcode_short')
models = {
u'tx_tecreports.contributionsbyamount': {
'Meta': {'ordering': "['low']", 'object_name': 'ContributionsByAmount'},
'amount': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'high': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'low': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'name': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250'}),
'report': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stats_by_amount'", 'to': u"orm['tx_tecreports.Report']"}),
'total': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'tx_tecreports.contributionsbydate': {
'Meta': {'ordering': "['date']", 'object_name': 'ContributionsByDate'},
'amount': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'date': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'report': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stats_by_date'", 'to': u"orm['tx_tecreports.Report']"}),
'total': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'tx_tecreports.contributionsbystate': {
'Meta': {'ordering': "['-amount']", 'object_name': 'ContributionsByState'},
'amount': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'report': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stats_by_state'", 'to': u"orm['tx_tecreports.Report']"}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'total': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'tx_tecreports.contributionsbyzipcode': {
'Meta': {'ordering': "['-amount']", 'object_name': 'ContributionsByZipcode'},
'amount': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'report': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stats_by_zipcode'", 'to': u"orm['tx_tecreports.Report']"}),
'total': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'zipcode': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'tx_tecreports.contributor': {
'Meta': {'object_name': 'Contributor'},
'address_1': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'address_2': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'city': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'first_name': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_entity': ('django.db.models.fields.BooleanField', [], {}),
'is_individual': ('django.db.models.fields.BooleanField', [], {}),
'last_name': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'state': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'suffix': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'title': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'type_of': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'contributors'", 'to': u"orm['tx_tecreports.ContributorType']"}),
'zipcode': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'zipcode_short': ('django.db.models.fields.CharField', [], {'max_length': '5', 'null': 'True'})
},
u'tx_tecreports.contributortype': {
'Meta': {'object_name': 'ContributorType'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250'})
},
u'tx_tecreports.employer': {
'Meta': {'object_name': 'Employer'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250'})
},
u'tx_tecreports.filer': {
'Meta': {'object_name': 'Filer'},
'filer_id': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250'}),
'filer_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'filers'", 'to': u"orm['tx_tecreports.FilerType']"}),
'first_name': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'name_prefix': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'name_suffix': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'nickname': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'})
},
u'tx_tecreports.filertype': {
'Meta': {'object_name': 'FilerType'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250'})
},
u'tx_tecreports.filing': {
'Meta': {'object_name': 'Filing'},
'filer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'filings'", 'to': u"orm['tx_tecreports.Filer']"}),
'filing_method': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'filings'", 'to': u"orm['tx_tecreports.FilingMethod']"}),
'is_correction': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'report_due': ('django.db.models.fields.DateField', [], {}),
'report_filed': ('django.db.models.fields.DateField', [], {}),
'report_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '250', 'primary_key': 'True'}),
'report_type': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'tx_tecreports.filingmethod': {
'Meta': {'object_name': 'FilingMethod'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'method': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'tx_tecreports.filingtype': {
'Meta': {'object_name': 'FilingType'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250'})
},
u'tx_tecreports.receipt': {
'Meta': {'ordering': "['date']", 'object_name': 'Receipt'},
'amount': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'contributor': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'receipts'", 'to': u"orm['tx_tecreports.Contributor']"}),
'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'employer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['tx_tecreports.Employer']", 'null': 'True', 'blank': 'True'}),
'fec_id': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_out_of_state_pac': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'job_title': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'name_of_schedule': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': u"orm['tx_tecreports.Receipt']"}),
'receipt_id': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'report': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'receipts'", 'to': u"orm['tx_tecreports.Report']"}),
'travel': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'receipt'", 'unique': 'True', 'null': 'True', 'to': u"orm['tx_tecreports.Travel']"})
},
u'tx_tecreports.report': {
'Meta': {'object_name': 'Report'},
'cash_on_hand': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'filer_id': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'filer_type': ('tx_tecreports.fields.OptionalMaxCharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'from_date': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_being_processed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_original': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'outstanding_loans': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'report_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'report_number': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'through_date': ('django.db.models.fields.DateField', [], {}),
'total_contributions': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'total_expenditures': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'unitemized_contributions': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'unitemized_expenditures': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'unitemized_loans': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'unitemized_pledges': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'})
},
u'tx_tecreports.travel': {
'Meta': {'object_name': 'Travel'},
'arrival_date': ('django.db.models.fields.DateField', [], {}),
'departure_date': ('django.db.models.fields.DateField', [], {}),
'departure_location': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250'}),
'destination': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250'}),
'first_name': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250'}),
'means_of': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250'}),
'purpose': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250'}),
'suffix': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250'}),
'title': ('tx_tecreports.fields.MaxCharField', [], {'max_length': '250'})
}
}
complete_apps = ['tx_tecreports']
|
texas/tx_tecreports
|
tx_tecreports/migrations/0007_auto__add_field_contributor_zipcode_short.py
|
Python
|
apache-2.0
| 13,870 | 0.007354 |
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Integrate Sphinx documentation pages."""
|
lnielsen/invenio
|
invenio/modules/documentation/__init__.py
|
Python
|
gpl-2.0
| 834 | 0.015588 |
class StadisticRouter(object):
"""A router to control all database operations on models in
the stadistic application"""
def db_for_read(self, model, **hints):
"Point all operations on myapp models to 'other'"
if model._meta.app_label == 'stadistic':
return 'nonrel'
return 'default'
def db_for_write(self, model, **hints):
"Point all operations on stadistic models to 'other'"
if model._meta.app_label == 'stadistic':
return 'nonrel'
return 'default'
def allow_relation(self, obj1, obj2, **hints):
"Deny any relation if a model in stadistic is involved"
if obj1._meta.app_label == 'stadistic' or obj2._meta.app_label == 'stadistic':
return True
return True
def allow_syncdb(self, db, model):
"Make sure the stadistic app only appears on the 'nonrel' db"
if db == 'nonrel':
return model._meta.app_label == 'stadistic'
elif model._meta.app_label == 'stadistic':
return False
return True
|
frhumanes/consulting
|
web/src/stadistic/__init__.py
|
Python
|
apache-2.0
| 1,076 | 0.001859 |
# -*- coding: utf-8 -*-
from __future__ import with_statement
import os
import sys
import traceback
# monkey patch bug in python 2.6 and lower
# http://bugs.python.org/issue6122 , http://bugs.python.org/issue1236 , http://bugs.python.org/issue1731717
if sys.version_info < (2, 7) and os.name != "nt":
import errno
import subprocess
def _eintr_retry_call(func, *args):
while True:
try:
return func(*args)
except OSError, e:
if e.errno == errno.EINTR:
continue
raise
#: Unsued timeout option for older python version
def wait(self, timeout=0):
"""
Wait for child process to terminate. Returns returncode
attribute.
"""
if self.returncode is None:
try:
pid, sts = _eintr_retry_call(os.waitpid, self.pid, 0)
except OSError, e:
if e.errno != errno.ECHILD:
raise
#: This happens if SIGCLD is set to be ignored or waiting
#: For child processes has otherwise been disabled for our
#: process. This child is dead, we can't get the status.
sts = 0
self._handle_exitstatus(sts)
return self.returncode
subprocess.Popen.wait = wait
try:
import send2trash
except ImportError:
pass
from module.plugins.internal.Addon import Addon, Expose, threaded
from module.plugins.internal.Plugin import exists, replace_patterns
from module.plugins.internal.Extractor import ArchiveError, CRCError, PasswordError
from module.utils import fs_encode, save_join as fs_join, uniqify
class ArchiveQueue(object):
def __init__(self, plugin, storage):
self.plugin = plugin
self.storage = storage
def get(self):
try:
return [int(pid) for pid in self.plugin.retrieve("ExtractArchive:%s" % self.storage, "").decode('base64').split()]
except Exception:
return []
def set(self, value):
if isinstance(value, list):
item = str(value)[1:-1].replace(' ', '').replace(',', ' ')
else:
item = str(value).strip()
return self.plugin.store("ExtractArchive:%s" % self.storage, item.encode('base64')[:-1])
def delete(self):
return self.plugin.delete("ExtractArchive:%s" % self.storage)
def add(self, item):
queue = self.get()
if item not in queue:
return self.set(queue + [item])
else:
return True
def remove(self, item):
queue = self.get()
try:
queue.remove(item)
except ValueError:
pass
if queue is []:
return self.delete()
return self.set(queue)
class ExtractArchive(Addon):
__name__ = "ExtractArchive"
__type__ = "hook"
__version__ = "1.50"
__status__ = "testing"
__config__ = [("activated" , "bool" , "Activated" , True ),
("fullpath" , "bool" , "Extract with full paths" , True ),
("overwrite" , "bool" , "Overwrite files" , False ),
("keepbroken" , "bool" , "Try to extract broken archives" , False ),
("repair" , "bool" , "Repair broken archives (RAR required)" , False ),
("test" , "bool" , "Test archive before extracting" , False ),
("usepasswordfile", "bool" , "Use password file" , True ),
("passwordfile" , "file" , "Password file" , "passwords.txt" ),
("delete" , "bool" , "Delete archive after extraction" , True ),
("deltotrash" , "bool" , "Move to trash (recycle bin) instead delete", True ),
("subfolder" , "bool" , "Create subfolder for each package" , False ),
("destination" , "folder" , "Extract files to folder" , "" ),
("extensions" , "str" , "Extract archives ending with extension" , "7z,bz2,bzip2,gz,gzip,lha,lzh,lzma,rar,tar,taz,tbz,tbz2,tgz,xar,xz,z,zip"),
("excludefiles" , "str" , "Don't extract the following files" , "*.nfo,*.DS_Store,index.dat,thumb.db" ),
("recursive" , "bool" , "Extract archives in archives" , True ),
("waitall" , "bool" , "Run after all downloads was processed" , False ),
("renice" , "int" , "CPU priority" , 0 )]
__description__ = """Extract different kind of archives"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "vuolter@gmail.com"),
("Immenz" , "immenz@gmx.net" )]
NAME_REPLACEMENTS = [(r'\.part\d+\.rar$', ".part.rar")]
def init(self):
self.event_map = {'allDownloadsProcessed': "all_downloads_processed",
'packageDeleted' : "package_deleted" }
self.queue = ArchiveQueue(self, "Queue")
self.failed = ArchiveQueue(self, "Failed")
self.interval = 60
self.extracting = False
self.last_package = False
self.extractors = []
self.passwords = []
self.repair = False
def activate(self):
for p in ("UnRar", "SevenZip", "UnZip"):
try:
module = self.pyload.pluginManager.loadModule("internal", p)
klass = getattr(module, p)
if klass.find():
self.extractors.append(klass)
if klass.REPAIR:
self.repair = self.get_config('repair')
except OSError, e:
if e.errno == 2:
self.log_warning(_("No %s installed") % p)
else:
self.log_warning(_("Could not activate: %s") % p, e)
if self.pyload.debug:
traceback.print_exc()
except Exception, e:
self.log_warning(_("Could not activate: %s") % p, e)
if self.pyload.debug:
traceback.print_exc()
if self.extractors:
self.log_debug(*["Found %s %s" % (Extractor.__name__, Extractor.VERSION) for Extractor in self.extractors])
self.extract_queued() #: Resume unfinished extractions
else:
self.log_info(_("No Extract plugins activated"))
@threaded
def extract_queued(self, thread):
if self.extracting: #@NOTE: doing the check here for safty (called by coreReady)
return
self.extracting = True
packages = self.queue.get()
while packages:
if self.last_package: #: Called from allDownloadsProcessed
self.last_package = False
if self.extract(packages, thread): #@NOTE: check only if all gone fine, no failed reporting for now
self.manager.dispatchEvent("all_archives_extracted")
self.manager.dispatchEvent("all_archives_processed")
else:
if self.extract(packages, thread): #@NOTE: check only if all gone fine, no failed reporting for now
pass
packages = self.queue.get() #: Check for packages added during extraction
self.extracting = False
#: Deprecated method, use `extract_package` instead
@Expose
def extractPackage(self, *args, **kwargs):
"""
See `extract_package`
"""
return self.extract_package(*args, **kwargs)
@Expose
def extract_package(self, *ids):
"""
Extract packages with given id
"""
for id in ids:
self.queue.add(id)
if not self.get_config('waitall') and not self.extracting:
self.extract_queued()
def package_deleted(self, pid):
self.queue.remove(pid)
def package_finished(self, pypack):
self.queue.add(pypack.id)
if not self.get_config('waitall') and not self.extracting:
self.extract_queued()
def all_downloads_processed(self):
self.last_package = True
if self.get_config('waitall') and not self.extracting:
self.extract_queued()
@Expose
def extract(self, ids, thread=None): #@TODO: Use pypack, not pid to improve method usability
if not ids:
return False
processed = []
extracted = []
failed = []
toList = lambda string: string.replace(' ', '').replace(',', '|').replace(';', '|').split('|')
destination = self.get_config('destination')
subfolder = self.get_config('subfolder')
fullpath = self.get_config('fullpath')
overwrite = self.get_config('overwrite')
renice = self.get_config('renice')
recursive = self.get_config('recursive')
delete = self.get_config('delete')
keepbroken = self.get_config('keepbroken')
extensions = [x.lstrip('.').lower() for x in toList(self.get_config('extensions'))]
excludefiles = toList(self.get_config('excludefiles'))
if extensions:
self.log_debug("Use for extensions: %s" % "|.".join(extensions))
#: Reload from txt file
self.reload_passwords()
download_folder = self.pyload.config.get("general", "download_folder")
#: Iterate packages -> extractors -> targets
for pid in ids:
pypack = self.pyload.files.getPackage(pid)
if not pypack:
self.queue.remove(pid)
continue
self.log_info(_("Check package: %s") % pypack.name)
#: Determine output folder
out = fs_join(download_folder, pypack.folder, destination, "") #: Force trailing slash
if subfolder:
out = fs_join(out, pypack.folder)
if not exists(out):
os.makedirs(out)
matched = False
success = True
files_ids = dict((pylink['name'], ((fs_join(download_folder, pypack.folder, pylink['name'])), pylink['id'], out)) for pylink \
in sorted(pypack.getChildren().values(), key=lambda k: k['name'])).values() #: Remove duplicates
#: Check as long there are unseen files
while files_ids:
new_files_ids = []
if extensions:
files_ids = [(fname, fid, fout) for fname, fid, fout in files_ids \
if filter(lambda ext: fname.lower().endswith(ext), extensions)]
for Extractor in self.extractors:
targets = Extractor.get_targets(files_ids)
if targets:
self.log_debug("Targets for %s: %s" % (Extractor.__name__, targets))
matched = True
for fname, fid, fout in targets:
name = os.path.basename(fname)
if not exists(fname):
self.log_debug(name, "File not found")
continue
self.log_info(name, _("Extract to: %s") % fout)
try:
pyfile = self.pyload.files.getFile(fid)
archive = Extractor(self,
fname,
fout,
fullpath,
overwrite,
excludefiles,
renice,
delete,
keepbroken,
fid)
thread.addActive(pyfile)
archive.init()
try:
new_files = self._extract(pyfile, archive, pypack.password)
finally:
pyfile.setProgress(100)
thread.finishFile(pyfile)
except Exception, e:
self.log_error(name, e)
success = False
continue
#: Remove processed file and related multiparts from list
files_ids = [(fname, fid, fout) for fname, fid, fout in files_ids \
if fname not in archive.get_delete_files()]
self.log_debug("Extracted files: %s" % new_files)
for file in new_files:
self.set_permissions(file)
for filename in new_files:
file = fs_encode(fs_join(os.path.dirname(archive.filename), filename))
if not exists(file):
self.log_debug("New file %s does not exists" % filename)
continue
if recursive and os.path.isfile(file):
new_files_ids.append((filename, fid, os.path.dirname(filename))) #: Append as new target
self.manager.dispatchEvent("archive_extracted", pyfile, archive)
files_ids = new_files_ids #: Also check extracted files
if matched:
if success:
extracted.append(pid)
self.manager.dispatchEvent("package_extracted", pypack)
else:
failed.append(pid)
self.manager.dispatchEvent("package_extract_failed", pypack)
self.failed.add(pid)
else:
self.log_info(_("No files found to extract"))
if not matched or not success and subfolder:
try:
os.rmdir(out)
except OSError:
pass
self.queue.remove(pid)
return True if not failed else False
def _extract(self, pyfile, archive, password):
name = os.path.basename(archive.filename)
pyfile.setStatus("processing")
encrypted = False
try:
self.log_debug("Password: %s" % (password or "None provided"))
passwords = uniqify([password] + self.get_passwords(False)) if self.get_config('usepasswordfile') else [password]
for pw in passwords:
try:
if self.get_config('test') or self.repair:
pyfile.setCustomStatus(_("archive testing"))
if pw:
self.log_debug("Testing with password: %s" % pw)
pyfile.setProgress(0)
archive.verify(pw)
pyfile.setProgress(100)
else:
archive.check(pw)
self.add_password(pw)
break
except PasswordError:
if not encrypted:
self.log_info(name, _("Password protected"))
encrypted = True
except CRCError, e:
self.log_debug(name, e)
self.log_info(name, _("CRC Error"))
if self.repair:
self.log_warning(name, _("Repairing..."))
pyfile.setCustomStatus(_("archive repairing"))
pyfile.setProgress(0)
repaired = archive.repair()
pyfile.setProgress(100)
if not repaired and not self.get_config('keepbroken'):
raise CRCError("Archive damaged")
self.add_password(pw)
break
raise CRCError("Archive damaged")
except ArchiveError, e:
raise ArchiveError(e)
pyfile.setCustomStatus(_("extracting"))
pyfile.setProgress(0)
if not encrypted or not self.get_config('usepasswordfile'):
self.log_debug("Extracting using password: %s" % (password or "None"))
archive.extract(password)
else:
for pw in filter(None, uniqify([password] + self.get_passwords(False))):
try:
self.log_debug("Extracting using password: %s" % pw)
archive.extract(pw)
self.add_password(pw)
break
except PasswordError:
self.log_debug("Password was wrong")
else:
raise PasswordError
pyfile.setProgress(100)
pyfile.setStatus("processing")
delfiles = archive.get_delete_files()
self.log_debug("Would delete: " + ", ".join(delfiles))
if self.get_config('delete'):
self.log_info(_("Deleting %s files") % len(delfiles))
deltotrash = self.get_config('deltotrash')
for f in delfiles:
file = fs_encode(f)
if not exists(file):
continue
if not deltotrash:
os.remove(file)
else:
try:
send2trash.send2trash(file)
except NameError:
self.log_warning(_("Unable to move %s to trash") % os.path.basename(f),
_("Send2Trash lib not found"))
except Exception, e:
self.log_warning(_("Unable to move %s to trash") % os.path.basename(f),
e.message)
else:
self.log_info(_("Moved %s to trash") % os.path.basename(f))
self.log_info(name, _("Extracting finished"))
extracted_files = archive.files or archive.list()
return extracted_files
except PasswordError:
self.log_error(name, _("Wrong password" if password else "No password found"))
except CRCError, e:
self.log_error(name, _("CRC mismatch"), e)
except ArchiveError, e:
self.log_error(name, _("Archive error"), e)
except Exception, e:
self.log_error(name, _("Unknown error"), e)
if self.pyload.debug:
traceback.print_exc()
self.manager.dispatchEvent("archive_extract_failed", pyfile, archive)
raise Exception(_("Extract failed"))
#: Deprecated method, use `get_passwords` instead
@Expose
def getPasswords(self, *args, **kwargs):
"""
See `get_passwords`
"""
return self.get_passwords(*args, **kwargs)
@Expose
def get_passwords(self, reload=True):
"""
List of saved passwords
"""
if reload:
self.reload_passwords()
return self.passwords
def reload_passwords(self):
try:
passwords = []
file = fs_encode(self.get_config('passwordfile'))
with open(file) as f:
for pw in f.read().splitlines():
passwords.append(pw)
except IOError, e:
self.log_error(e)
else:
self.passwords = passwords
#: Deprecated method, use `add_password` instead
@Expose
def addPassword(self, *args, **kwargs):
"""
See `add_password`
"""
return self.add_password(*args, **kwargs)
@Expose
def add_password(self, password):
"""
Adds a password to saved list
"""
try:
self.passwords = uniqify([password] + self.passwords)
file = fs_encode(self.get_config('passwordfile'))
with open(file, "wb") as f:
for pw in self.passwords:
f.write(pw + '\n')
except IOError, e:
self.log_error(e)
|
benbox69/pyload
|
module/plugins/hooks/ExtractArchive.py
|
Python
|
gpl-3.0
| 21,976 | 0.008282 |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
def deslugify(text):
in_bits = text.split('-')
out_bits = list()
for bit in in_bits:
out_bit = bit[0].upper() + bit[1:]
out_bits.append(out_bit)
return ' '.join(out_bits)
class Migration(DataMigration):
def forwards(self, orm):
pass# nothing to do
def backwards(self, orm):
"""need this reverse migration so that creation of unique
constraint (type, name) works in backwards migration 0030
"""
for badge in orm.BadgeData.objects.all():
badge.name = deslugify(badge.slug)
if badge.name == 'Strunk And White':#special case
badge.name = 'Strunk & White'
badge.save()
models = {
'askbot.activity': {
'Meta': {'object_name': 'Activity', 'db_table': "u'activity'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'activity_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auditted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']", 'null': 'True'}),
'receiving_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'received_activity'", 'to': "orm['auth.User']"}),
'recipients': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'incoming_activity'", 'through': "'ActivityAuditStatus'", 'to': "orm['auth.User']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.activityauditstatus': {
'Meta': {'unique_together': "(('user', 'activity'),)", 'object_name': 'ActivityAuditStatus'},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Activity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.anonymousanswer': {
'Meta': {'object_name': 'AnonymousAnswer'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_answers'", 'to': "orm['askbot.Question']"}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'askbot.anonymousquestion': {
'Meta': {'object_name': 'AnonymousQuestion'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'askbot.answer': {
'Meta': {'object_name': 'Answer', 'db_table': "u'answer'"},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['askbot.Question']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.answerrevision': {
'Meta': {'object_name': 'AnswerRevision', 'db_table': "u'answer_revision'"},
'answer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['askbot.Answer']"}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answerrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {})
},
'askbot.award': {
'Meta': {'object_name': 'Award', 'db_table': "u'award'"},
'awarded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_badge'", 'to': "orm['askbot.BadgeData']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notified': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_user'", 'to': "orm['auth.User']"})
},
'askbot.badgedata': {
'Meta': {'unique_together': "(('name', 'type'),)", 'object_name': 'BadgeData'},
'awarded_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'awarded_to': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'badges'", 'through': "'Award'", 'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'multiple': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '50', 'blank': 'True'}),
'type': ('django.db.models.fields.SmallIntegerField', [], {})
},
'askbot.comment': {
'Meta': {'object_name': 'Comment', 'db_table': "u'comment'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'html': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '2048'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comments'", 'to': "orm['auth.User']"})
},
'askbot.emailfeedsetting': {
'Meta': {'object_name': 'EmailFeedSetting'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feed_type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'frequency': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reported_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'subscriber': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notification_subscriptions'", 'to': "orm['auth.User']"})
},
'askbot.favoritequestion': {
'Meta': {'object_name': 'FavoriteQuestion', 'db_table': "u'favorite_question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_favorite_questions'", 'to': "orm['auth.User']"})
},
'askbot.markedtag': {
'Meta': {'object_name': 'MarkedTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_selections'", 'to': "orm['askbot.Tag']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tag_selections'", 'to': "orm['auth.User']"})
},
'askbot.question': {
'Meta': {'object_name': 'Question', 'db_table': "u'question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'answer_accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'answer_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questions'", 'to': "orm['auth.User']"}),
'close_reason': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'closed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'closed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'closed_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'favorited_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'favorite_questions'", 'through': "'FavoriteQuestion'", 'to': "orm['auth.User']"}),
'favourite_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'followed_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'followed_questions'", 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_activity_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_activity_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'last_active_in_questions'", 'to': "orm['auth.User']"}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'questions'", 'to': "orm['askbot.Tag']"}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.questionrevision': {
'Meta': {'object_name': 'QuestionRevision', 'db_table': "u'question_revision'"},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questionrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['askbot.Question']"}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'})
},
'askbot.questionview': {
'Meta': {'object_name': 'QuestionView'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'viewed'", 'to': "orm['askbot.Question']"}),
'when': ('django.db.models.fields.DateTimeField', [], {}),
'who': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'question_views'", 'to': "orm['auth.User']"})
},
'askbot.repute': {
'Meta': {'object_name': 'Repute', 'db_table': "u'repute'"},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'negative': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'positive': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']", 'null': 'True', 'blank': 'True'}),
'reputation': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'reputation_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'reputed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.tag': {
'Meta': {'object_name': 'Tag', 'db_table': "u'tag'"},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_tags'", 'to': "orm['auth.User']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_tags'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'used_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'askbot.vote': {
'Meta': {'unique_together': "(('content_type', 'object_id', 'user'),)", 'object_name': 'Vote', 'db_table': "u'vote'"},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['auth.User']"}),
'vote': ('django.db.models.fields.SmallIntegerField', [], {}),
'voted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'blank': 'True'}),
'hide_ignored_questions': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'tag_filter_setting': ('django.db.models.fields.CharField', [], {'default': "'ignored'", 'max_length': '16'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['askbot']
|
samhoo/askbot-realworld
|
askbot/migrations/0031_synchronize_badge_slug_with_name.py
|
Python
|
gpl-3.0
| 26,451 | 0.008468 |
# cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <ylatuya@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
from cerbero.utils import shell
def checkout(url, dest):
'''
Checkout a url to a given destination
@param url: url to checkout
@type url: string
@param dest: path where to do the checkout
@type url: string
'''
shell.call('svn co %s %s' % (url, dest))
def update(repo, revision='HEAD'):
'''
Update a repositry to a given revision
@param repo: repository path
@type revision: str
@param revision: the revision to checkout
@type revision: str
'''
shell.call('svn up -r %s' % revision, repo)
def checkout_file(url, out_path):
'''
Checkout a single file to out_path
@param url: file URL
@type url: str
@param out_path: output path
@type revision: str
'''
shell.call('svn export --force %s %s' % (url, out_path))
|
nicolewu/cerbero
|
cerbero/utils/svn.py
|
Python
|
lgpl-2.1
| 1,732 | 0.000577 |
"""Manage, delete, order compute instances."""
# :license: MIT, see LICENSE for more details.
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import exceptions
from SoftLayer.CLI import formatting
from SoftLayer.CLI import helpers
from SoftLayer.CLI import template
from SoftLayer.CLI import virt
from SoftLayer import utils
import click
@click.command(epilog="See 'sl vs create-options' for valid options")
@click.option('--domain', '-D', help="Domain portion of the FQDN")
@click.option('--hostname', '-H', help="Host portion of the FQDN")
@click.option('--image',
help="Image GUID. See: 'sl image list' for reference")
@click.option('--cpu', '-c', help="Number of CPU cores", type=click.INT)
@click.option('--memory', '-m', help="Memory in mebibytes", type=virt.MEM_TYPE)
@click.option('--os', '-o',
help="OS install code. Tip: you can specify <OS>_LATEST")
@click.option('--billing',
type=click.Choice(['hourly', 'monthly']),
default='hourly',
help="""Billing rate""")
@click.option('--datacenter', '-d', help="Datacenter shortname")
@click.option('--dedicated/--public',
is_flag=True,
help="Create a dedicated Virtual Server (Private Node)")
@click.option('--san',
is_flag=True,
help="Use SAN storage instead of local disk.")
@click.option('--test',
is_flag=True,
help="Do not actually create the virtual server")
@click.option('--export',
type=click.Path(writable=True, resolve_path=True),
help="Exports options to a template file")
@click.option('--userfile', '-F',
help="Read userdata from file",
type=click.Path(exists=True, readable=True, resolve_path=True))
@click.option('--postinstall', '-i', help="Post-install script to download")
@click.option('--key', '-k',
multiple=True,
help="SSH keys to add to the root user")
@click.option('--disk', multiple=True, help="Disk sizes")
@click.option('--private',
is_flag=True,
help="Forces the VS to only have access the private network")
@click.option('--like',
is_flag=True,
help="Use the configuration from an existing VS")
@click.option('--network', '-n', help="Network port speed in Mbps")
@click.option('--tag', '-g', multiple=True, help="Tags to add to the instance")
@click.option('--template', '-t',
help="A template file that defaults the command-line options",
type=click.Path(exists=True, readable=True, resolve_path=True))
@click.option('--userdata', '-u', help="User defined metadata string")
@click.option('--vlan-public',
help="The ID of the public VLAN on which you want the virtual "
"server placed",
type=click.INT)
@click.option('--vlan-private',
help="The ID of the private VLAN on which you want the virtual "
"server placed",
type=click.INT)
@click.option('--wait',
type=click.INT,
help="Wait until VS is finished provisioning for up to X "
"seconds before returning")
@environment.pass_env
def cli(env, **args):
"""Order/create virtual servers."""
template.update_with_template_args(args, list_args=['disk', 'key'])
vsi = SoftLayer.VSManager(env.client)
_update_with_like_args(env.client, args)
_validate_args(args)
# Do not create a virtual server with test or export
do_create = not (args['export'] or args['test'])
table = formatting.Table(['Item', 'cost'])
table.align['Item'] = 'r'
table.align['cost'] = 'r'
data = _parse_create_args(env.client, args)
output = []
if args.get('test'):
result = vsi.verify_create_instance(**data)
total_monthly = 0.0
total_hourly = 0.0
table = formatting.Table(['Item', 'cost'])
table.align['Item'] = 'r'
table.align['cost'] = 'r'
for price in result['prices']:
total_monthly += float(price.get('recurringFee', 0.0))
total_hourly += float(price.get('hourlyRecurringFee', 0.0))
if args.get('billing') == 'hourly':
rate = "%.2f" % float(price['hourlyRecurringFee'])
elif args.get('billing') == 'monthly':
rate = "%.2f" % float(price['recurringFee'])
table.add_row([price['item']['description'], rate])
total = 0
if args.get('billing') == 'hourly':
total = total_hourly
elif args.get('billing') == 'monthly':
total = total_monthly
billing_rate = 'monthly'
if args.get('hourly'):
billing_rate = 'hourly'
table.add_row(['Total %s cost' % billing_rate, "%.2f" % total])
output.append(table)
output.append(formatting.FormattedItem(
None,
' -- ! Prices reflected here are retail and do not '
'take account level discounts and are not guaranteed.'))
if args['export']:
export_file = args.pop('export')
template.export_to_template(export_file, args,
exclude=['wait', 'test'])
return 'Successfully exported options to a template file.'
if do_create:
if env.skip_confirmations or formatting.confirm(
"This action will incur charges on your account. Continue?"):
result = vsi.create_instance(**data)
table = formatting.KeyValueTable(['name', 'value'])
table.align['name'] = 'r'
table.align['value'] = 'l'
table.add_row(['id', result['id']])
table.add_row(['created', result['createDate']])
table.add_row(['guid', result['globalIdentifier']])
output.append(table)
if args.get('wait'):
ready = vsi.wait_for_ready(
result['id'], int(args.get('wait') or 1))
table.add_row(['ready', ready])
else:
raise exceptions.CLIAbort('Aborting virtual server order.')
return output
def _validate_args(args):
"""Raises an ArgumentError if the given arguments are not valid."""
if all([args['userdata'], args['userfile']]):
raise exceptions.ArgumentError(
'[-u | --userdata] not allowed with [-F | --userfile]')
image_args = [args['os'], args['image']]
if all(image_args):
raise exceptions.ArgumentError(
'[-o | --os] not allowed with [--image]')
if not any(image_args):
raise exceptions.ArgumentError(
'One of [--os | --image] is required')
def _update_with_like_args(env, args):
"""Update arguments with options taken from a currently running VS.
:param VSManager args: A VSManager
:param dict args: CLI arguments
"""
if args['like']:
vsi = SoftLayer.VSManager(env.client)
vs_id = helpers.resolve_id(vsi.resolve_ids, args.pop('like'), 'VS')
like_details = vsi.get_instance(vs_id)
like_args = {
'hostname': like_details['hostname'],
'domain': like_details['domain'],
'cpu': like_details['maxCpu'],
'memory': like_details['maxMemory'],
'hourly': like_details['hourlyBillingFlag'],
'datacenter': like_details['datacenter']['name'],
'network': like_details['networkComponents'][0]['maxSpeed'],
'user-data': like_details['userData'] or None,
'postinstall': like_details.get('postInstallScriptUri'),
'dedicated': like_details['dedicatedAccountHostOnlyFlag'],
'private': like_details['privateNetworkOnlyFlag'],
}
tag_refs = like_details.get('tagReferences', None)
if tag_refs is not None and len(tag_refs) > 0:
like_args['tag'] = [t['tag']['name'] for t in tag_refs]
# Handle mutually exclusive options
like_image = utils.lookup(like_details,
'blockDeviceTemplateGroup',
'globalIdentifier')
like_os = utils.lookup(like_details,
'operatingSystem',
'softwareLicense',
'softwareDescription',
'referenceCode')
if like_image and not args.get('os'):
like_args['image'] = like_image
elif like_os and not args.get('image'):
like_args['os'] = like_os
# Merge like VS options with the options passed in
for key, value in like_args.items():
if args.get(key) in [None, False]:
args[key] = value
def _parse_create_args(client, args):
"""Converts CLI arguments to args for VSManager.create_instance.
:param dict args: CLI arguments
"""
data = {
"hourly": args['billing'] == 'hourly',
"cpus": args['cpu'],
"domain": args['domain'],
"hostname": args['hostname'],
"private": args['private'],
"dedicated": args['dedicated'],
"disks": args['disk'],
"local_disk": not args['san'],
}
data["memory"] = args['memory']
if args.get('os'):
data['os_code'] = args['os']
if args.get('image'):
data['image_id'] = args['image']
if args.get('datacenter'):
data['datacenter'] = args['datacenter']
if args.get('network'):
data['nic_speed'] = args.get('network')
if args.get('userdata'):
data['userdata'] = args['userdata']
elif args.get('userfile'):
with open(args['userfile'], 'r') as userfile:
data['userdata'] = userfile.read()
if args.get('postinstall'):
data['post_uri'] = args.get('postinstall')
# Get the SSH keys
if args.get('key'):
keys = []
for key in args.get('key'):
resolver = SoftLayer.SshKeyManager(client).resolve_ids
key_id = helpers.resolve_id(resolver, key, 'SshKey')
keys.append(key_id)
data['ssh_keys'] = keys
if args.get('vlan_public'):
data['public_vlan'] = args['vlan_public']
if args.get('vlan_private'):
data['private_vlan'] = args['vlan_private']
if args.get('tag'):
data['tags'] = ','.join(args['tag'])
return data
|
cloudify-cosmo/softlayer-python
|
SoftLayer/CLI/virt/create.py
|
Python
|
mit
| 10,420 | 0 |
from icalendar import Calendar, vCalAddress, vText
import icalendar
from datetime import timedelta
from django.template import RequestContext
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.core.urlresolvers import reverse
from django.core.exceptions import ObjectDoesNotExist
#from django.contrib.syndication.views import feed
from django.utils import feedgenerator
from django.template.loader import render_to_string
from django.http import HttpResponse
from evesch.org.models import Organization
from evesch.event.models import Event
from evesch.core.feed.feeds import OrgFeed
from evesch.euser.models import eUser, get_current_user
def org_rss(request,org_short_name,org_feed_hash):
try:
"""
"""
host = request.META['HTTP_HOST']
current_org, message = Organization.objects.get_current_org(org_short_name)
if message:
return HttpResponseRedirect(reverse('org_orgs_list'))
if not org_feed_hash == current_org.org_feed_hash:
return HttpResponseRedirect(reverse('org_org_view', kwargs={'org_short_name':current_org.org_short_name}))
events = current_org.event_set.all().order_by('-event_date')
orgfeed = feedgenerator.Rss201rev2Feed(title=current_org.org_name,
link="http://%s%s" % (host, reverse('event_events_list',kwargs={'org_short_name':current_org.org_short_name,})),
description=current_org.org_desc, language='en',
)
for event in events:
orgfeed.add_item(
title=event.event_name,
link="http://%s%s" % (host, reverse('event_event_view', kwargs={'org_short_name':current_org.org_short_name,'event_hash':event.event_hash})),
description="Event on: %s -- Description: %s" % (event.event_date.strftime('%d %b %Y'), event.event_desc),
categories=(event.event_type,),
author_name=event.event_creator_name,
pubdate=event.event_created_date)
response = HttpResponse()
response['Content-Type'] = 'application/rss+xml'
response.write(orgfeed.writeString('UTF-8'))
#template_name = "error.html"
return response
except ObjectDoesNotExist:
context = {'error':"Organization does not exist",}
template_name = "error.html"
return render_to_response(template_name,context,context_instance=RequestContext(request))
def org_ics(request,org_short_name,org_feed_hash):
host = request.META['HTTP_HOST']
current_org, message = Organization.objects.get_current_org(org_short_name)
if message:
return HttpResponseRedirect(reverse('org_orgs_list'))
if not org_feed_hash == current_org.org_feed_hash:
return HttpResponseRedirect(reverse('org_org_view', kwargs={'org_short_name':current_org.org_short_name}))
events = current_org.event_set.all().order_by('-event_date')
orgical = Calendar()
orgical['summary'] = "Calendar for organization %s" % (current_org.org_name)
orgical.add('prodid', '-//Evesch//NONSGML v1.0//EN')
orgical.add('version', '2.0')
for event in events:
cal_event = icalendar.Event()
cal_event.add('summary', event.event_name)
cal_event.add('dtstart', event.event_date)
cal_event.add('description', event.event_desc)
cal_event.add('categories',event.event_type)
cal_event.add('duration',timedelta(hours=1))
cal_event.add('url',"http://%s%s" % (host, reverse('event_event_view',kwargs={'org_short_name':current_org.org_short_name,'event_hash':event.event_hash,})))
if event.event_creator_name.email:
organizer_n = event.event_creator_name.email
else:
organizer_n = "%s %s" % (event.event_creator_name.first_name, event.event_creator_name.last_name)
organizer = vCalAddress('MAILTO:' + organizer_n)
organizer.params['cn'] = vText("%s %s" % (event.event_creator_name.first_name, event.event_creator_name.last_name))
organizer.params['role'] = vText('CREATOR')
cal_event.add('organizer', organizer, encode=0)
orgical.add_component(cal_event)
template_name = "core/message.html"
context = {}
response = HttpResponse()
response['Content-Type'] = 'text/calendar'
response.write(orgical.to_ical())
#template_name = "error.html"
return response
def user_rss(request,username,user_feed_hash):
try:
""" """
host = request.META['HTTP_HOST']
current_user, message = get_current_user(username)
if message:
return HttpResponseRedirect(reverse('home'))
if not user_feed_hash == current_user.user_feed_hash:
return HttpResponseRedirect(reverse('euser_user_view', kwargs={'username':current_user.username}))
user_events = Event.objects.filter(attendee__in=current_user.attendee_set.all()).order_by('-event_date')
orgfeed = feedgenerator.Rss201rev2Feed(title=current_user.username,
link="http://%s%s" % (host, reverse('euser_user_view', kwargs={'username':current_user.username})) ,
description=current_user.about, language='en',
)
for event in user_events:
orgfeed.add_item(
title=event.event_name,
link="http://%s%s" % (host, reverse('event_event_view', kwargs={'org_short_name':event.event_org.org_short_name,'event_hash':event.event_hash})),
description="Event on: %s -- Description: %s" % (event.event_date.strftime('%d %b %Y'), event.event_desc),
categories=(event.event_type,),
author_name=event.event_creator_name,
pubdate=event.event_created_date)
response = HttpResponse()
response['Content-Type'] = 'application/rss+xml'
response.write(orgfeed.writeString('UTF-8'))
#template_name = "error.html"
return response
except ObjectDoesNotExist:
context = {'error':"Organization does not exist",}
template_name = "error.html"
return render_to_response(template_name,context,context_instance=RequestContext(request))
def user_ics(request,username,user_feed_hash):
host = request.META['HTTP_HOST']
current_user, message = get_current_user(username)
if message:
return HttpResponseRedirect(reverse('home'))
#user_events = Event.objects.all()
if not user_feed_hash == current_user.user_feed_hash:
return HttpResponseRedirect(reverse('euser_user_view', kwargs={'username':current_user.username}))
user_events = Event.objects.filter(attendee__in=current_user.attendee_set.all()).order_by('-event_date')
userical = Calendar()
userical['summary'] = "Calendar for user %s" % (current_user.username)
userical.add('prodid', '-//Evesch//NONSGML v1.0//EN')
userical.add('version', '2.0')
for event in user_events:
cal_event = icalendar.Event()
cal_event.add('summary', event.event_name)
cal_event.add('dtstart', event.event_date)
cal_event.add('description', event.event_desc)
cal_event.add('categories',event.event_type)
cal_event.add('duration',timedelta(hours=1))
cal_event.add('url',"http://" + host + reverse('event_event_view',kwargs={'org_short_name':event.event_org.org_short_name,'event_hash':event.event_hash,}))
if event.event_creator_name.email:
organizer_n = event.event_creator_name.email
else:
organizer_n = "%s %s" % (event.event_creator_name.first_name, event.event_creator_name.last_name)
organizer = vCalAddress('MAILTO:' + organizer_n)
organizer.params['cn'] = vText("%s %s" % (event.event_creator_name.first_name, event.event_creator_name.last_name))
organizer.params['role'] = vText('CREATOR')
cal_event.add('organizer', organizer, encode=0)
userical.add_component(cal_event)
template_name = "core/message.html"
context = {}
response = HttpResponse()
response['Content-Type'] = 'text/calendar'
response.write(userical.as_string())
#template_name = "error.html"
return response
|
JoeJasinski/evesch
|
evesch/core/feed/views.py
|
Python
|
gpl-2.0
| 8,264 | 0.0144 |
"""SCons.Tool.sgiar
Tool-specific initialization for SGI ar (library archive). If CC
exists, static libraries should be built with it, so the prelinker has
a chance to resolve C++ template instantiations.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/sgiar.py 4369 2009/09/19 15:58:29 scons"
import SCons.Defaults
import SCons.Tool
import SCons.Util
def generate(env):
"""Add Builders and construction variables for ar to an Environment."""
SCons.Tool.createStaticLibBuilder(env)
if env.Detect('CC'):
env['AR'] = 'CC'
env['ARFLAGS'] = SCons.Util.CLVar('-ar')
env['ARCOM'] = '$AR $ARFLAGS -o $TARGET $SOURCES'
else:
env['AR'] = 'ar'
env['ARFLAGS'] = SCons.Util.CLVar('r')
env['ARCOM'] = '$AR $ARFLAGS $TARGET $SOURCES'
env['SHLINK'] = '$LINK'
env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared')
env['SHLINKCOM'] = '$SHLINK $SHLINKFLAGS -o $TARGET $SOURCES $_LIBDIRFLAGS $_LIBFLAGS'
env['LIBPREFIX'] = 'lib'
env['LIBSUFFIX'] = '.a'
def exists(env):
return env.Detect('CC') or env.Detect('ar')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
looooo/pivy
|
scons/scons-local-1.2.0.d20090919/SCons/Tool/sgiar.py
|
Python
|
isc
| 2,570 | 0.006226 |
import unittest
from tow.dockerfile import Dockerfile
class DockerfileTest(unittest.TestCase):
def test_parse_spaced_envs(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ["ENV test 1"]
envs = d.envs()
self.assertEqual(envs, {"test": "1"})
def test_parse_many_envs(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ["ENV test 1", "ENV test2=2", "ENV test3 3"]
envs = d.envs()
self.assertEqual(envs, {"test": "1", "test2": "2", "test3": "3"})
def test_parse_multiline(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ['ENV myName="John Doe" myDog=Rex\\ The\\ Dog \\',
'myCat=fluffy']
envs = d.envs()
self.assertEqual(envs, {"myName": "John Doe",
"myDog": "Rex\\ The\\ Dog", "myCat": "fluffy"})
def test_add_copy(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ["FROM ubuntu"]
mapping = ("/tets1", "/test2")
d.add_copy([mapping])
self.assertListEqual(d._Dockerfile__dockerfile, ["FROM ubuntu",
"# TOW COPY BLOCK FROM MAPPING FILE START",
"COPY %s %s" % mapping,
"# TOW COPY BLOCK FROM MAPPING FILE END"])
def test_add_copy_after_from(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ["FROM ubuntu", "ENTRYPOINT [/bin/sh]"]
mapping = ("/tets1", "/test2")
d.add_copy([mapping])
self.assertListEqual(d._Dockerfile__dockerfile, ["FROM ubuntu",
"# TOW COPY BLOCK FROM MAPPING FILE START",
"COPY %s %s" % mapping,
"# TOW COPY BLOCK FROM MAPPING FILE END",
"ENTRYPOINT [/bin/sh]"])
def test_add_copy_after_maintainer(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ["FROM ubuntu", "MAINTAINER test","ENTRYPOINT [/bin/sh]"]
mapping = ("/tets1", "/test2")
d.add_copy([mapping])
self.assertListEqual(d._Dockerfile__dockerfile, ["FROM ubuntu",
"MAINTAINER test",
"# TOW COPY BLOCK FROM MAPPING FILE START",
"COPY %s %s" % mapping,
"# TOW COPY BLOCK FROM MAPPING FILE END",
"ENTRYPOINT [/bin/sh]"])
def test_find_entrypoint_or_cmd(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ['FROM ubuntu', 'ENTRYPOINT ["/bin/sh"]', 'CMD ["-c"]']
self.assertEqual(d.find_entrypoint_or_cmd(), (["/bin/sh"], ["-c"]))
def test_find_entrypoint_or_cmd_shell_style(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ['FROM ubuntu', 'ENTRYPOINT /bin/sh', 'CMD ["-c"]']
self.assertEqual(d.find_entrypoint_or_cmd(), (["/bin/sh"], ["-c"]))
def test_find_entrypoint_or_cmd_cmd_only(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ['FROM ubuntu', 'CMD ["/bin/sh", "-c", "-x"]']
self.assertEqual(d.find_entrypoint_or_cmd(), (None, ["/bin/sh", "-c", "-x"]))
def test_find_entrypoint_or_cmd_entrypoint_only(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ['FROM ubuntu', 'ENTRYPOINT ["/bin/sh"]']
self.assertEqual(d.find_entrypoint_or_cmd(), (["/bin/sh"], None))
def test_find_entrypoint_or_cmd_none(self):
d = Dockerfile("Dockerfile")
d._Dockerfile__dockerfile = ['FROM ubuntu']
self.assertEqual(d.find_entrypoint_or_cmd(), (None, None))
|
docker-tow/tow
|
tests/dockerfile_tests.py
|
Python
|
apache-2.0
| 4,097 | 0.004882 |
#!/usr/bin/env python3
# Copyright (c) 2014-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the rawtransaction RPCs.
Test the following RPCs:
- createrawtransaction
- signrawtransactionwithwallet
- sendrawtransaction
- decoderawtransaction
- getrawtransaction
"""
from collections import OrderedDict
from decimal import Decimal
from io import BytesIO
from test_framework.messages import CTransaction, ToHex
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error, bytes_to_hex_str, connect_nodes_bi, hex_str_to_bytes
class multidict(dict):
"""Dictionary that allows duplicate keys.
Constructed with a list of (key, value) tuples. When dumped by the json module,
will output invalid json with repeated keys, eg:
>>> json.dumps(multidict([(1,2),(1,2)])
'{"1": 2, "1": 2}'
Used to test calls to rpc methods with repeated keys in the json object."""
def __init__(self, x):
dict.__init__(self, x)
self.x = x
def items(self):
return self.x
# Create one-input, one-output, no-fee transaction:
class RawTransactionsTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
self.extra_args = [["-addresstype=legacy"], ["-addresstype=legacy"], ["-addresstype=legacy"]]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self, split=False):
super().setup_network()
connect_nodes_bi(self.nodes, 0, 2)
def run_test(self):
self.log.info('prepare some coins for multiple *rawtransaction commands')
self.nodes[2].generate(1)
self.sync_all()
self.nodes[0].generate(101)
self.sync_all()
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.5)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.0)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),5.0)
self.sync_all()
self.nodes[0].generate(5)
self.sync_all()
self.log.info('Test getrawtransaction on genesis block coinbase returns an error')
block = self.nodes[0].getblock(self.nodes[0].getblockhash(0))
assert_raises_rpc_error(-5, "The genesis block coinbase is not considered an ordinary transaction", self.nodes[0].getrawtransaction, block['merkleroot'])
self.log.info('Check parameter types and required parameters of createrawtransaction')
# Test `createrawtransaction` required parameters
assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction)
assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, [])
# Test `createrawtransaction` invalid extra parameters
assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, [], {}, 0, False, 'foo')
# Test `createrawtransaction` invalid `inputs`
txid = '1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000'
assert_raises_rpc_error(-3, "Expected type array", self.nodes[0].createrawtransaction, 'foo', {})
assert_raises_rpc_error(-1, "JSON value is not an object as expected", self.nodes[0].createrawtransaction, ['foo'], {})
assert_raises_rpc_error(-8, "txid must be hexadecimal string", self.nodes[0].createrawtransaction, [{}], {})
assert_raises_rpc_error(-8, "txid must be hexadecimal string", self.nodes[0].createrawtransaction, [{'txid': 'foo'}], {})
assert_raises_rpc_error(-8, "Invalid parameter, missing vout key", self.nodes[0].createrawtransaction, [{'txid': txid}], {})
assert_raises_rpc_error(-8, "Invalid parameter, missing vout key", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 'foo'}], {})
assert_raises_rpc_error(-8, "Invalid parameter, vout must be positive", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': -1}], {})
assert_raises_rpc_error(-8, "Invalid parameter, sequence number is out of range", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 0, 'sequence': -1}], {})
# Test `createrawtransaction` invalid `outputs`
address = self.nodes[0].getnewaddress()
address2 = self.nodes[0].getnewaddress()
assert_raises_rpc_error(-1, "JSON value is not an array as expected", self.nodes[0].createrawtransaction, [], 'foo')
self.nodes[0].createrawtransaction(inputs=[], outputs={}) # Should not throw for backwards compatibility
self.nodes[0].createrawtransaction(inputs=[], outputs=[])
assert_raises_rpc_error(-8, "Data must be hexadecimal string", self.nodes[0].createrawtransaction, [], {'data': 'foo'})
assert_raises_rpc_error(-5, "Invalid Bitcoin address", self.nodes[0].createrawtransaction, [], {'foo': 0})
assert_raises_rpc_error(-3, "Invalid amount", self.nodes[0].createrawtransaction, [], {address: 'foo'})
assert_raises_rpc_error(-3, "Amount out of range", self.nodes[0].createrawtransaction, [], {address: -1})
assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], multidict([(address, 1), (address, 1)]))
assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], [{address: 1}, {address: 1}])
assert_raises_rpc_error(-8, "Invalid parameter, duplicate key: data", self.nodes[0].createrawtransaction, [], [{"data": 'aa'}, {"data": "bb"}])
assert_raises_rpc_error(-8, "Invalid parameter, duplicate key: data", self.nodes[0].createrawtransaction, [], multidict([("data", 'aa'), ("data", "bb")]))
assert_raises_rpc_error(-8, "Invalid parameter, key-value pair must contain exactly one key", self.nodes[0].createrawtransaction, [], [{'a': 1, 'b': 2}])
assert_raises_rpc_error(-8, "Invalid parameter, key-value pair not an object as expected", self.nodes[0].createrawtransaction, [], [['key-value pair1'], ['2']])
# Test `createrawtransaction` invalid `locktime`
assert_raises_rpc_error(-3, "Expected type number", self.nodes[0].createrawtransaction, [], {}, 'foo')
assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range", self.nodes[0].createrawtransaction, [], {}, -1)
assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range", self.nodes[0].createrawtransaction, [], {}, 4294967296)
# Test `createrawtransaction` invalid `replaceable`
assert_raises_rpc_error(-3, "Expected type bool", self.nodes[0].createrawtransaction, [], {}, 0, 'foo')
self.log.info('Check that createrawtransaction accepts an array and object as outputs')
tx = CTransaction()
# One output
tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs={address: 99}))))
assert_equal(len(tx.vout), 1)
assert_equal(
bytes_to_hex_str(tx.serialize()),
self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}]),
)
# Two outputs
tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=OrderedDict([(address, 99), (address2, 99)])))))
assert_equal(len(tx.vout), 2)
assert_equal(
bytes_to_hex_str(tx.serialize()),
self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {address2: 99}]),
)
# Multiple mixed outputs
tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=multidict([(address, 99), (address2, 99), ('data', '99')])))))
assert_equal(len(tx.vout), 3)
assert_equal(
bytes_to_hex_str(tx.serialize()),
self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {address2: 99}, {'data': '99'}]),
)
for type in ["bech32", "p2sh-segwit", "legacy"]:
addr = self.nodes[0].getnewaddress("", type)
addrinfo = self.nodes[0].getaddressinfo(addr)
pubkey = addrinfo["scriptPubKey"]
self.log.info('sendrawtransaction with missing prevtx info (%s)' %(type))
# Test `signrawtransactionwithwallet` invalid `prevtxs`
inputs = [ {'txid' : txid, 'vout' : 3, 'sequence' : 1000}]
outputs = { self.nodes[0].getnewaddress() : 1 }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
prevtx = dict(txid=txid, scriptPubKey=pubkey, vout=3, amount=1)
succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx])
assert succ["complete"]
if type == "legacy":
del prevtx["amount"]
succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx])
assert succ["complete"]
if type != "legacy":
assert_raises_rpc_error(-3, "Missing amount", self.nodes[0].signrawtransactionwithwallet, rawtx, [
{
"txid": txid,
"scriptPubKey": pubkey,
"vout": 3,
}
])
assert_raises_rpc_error(-3, "Missing vout", self.nodes[0].signrawtransactionwithwallet, rawtx, [
{
"txid": txid,
"scriptPubKey": pubkey,
"amount": 1,
}
])
assert_raises_rpc_error(-3, "Missing txid", self.nodes[0].signrawtransactionwithwallet, rawtx, [
{
"scriptPubKey": pubkey,
"vout": 3,
"amount": 1,
}
])
assert_raises_rpc_error(-3, "Missing scriptPubKey", self.nodes[0].signrawtransactionwithwallet, rawtx, [
{
"txid": txid,
"vout": 3,
"amount": 1
}
])
#########################################
# sendrawtransaction with missing input #
#########################################
self.log.info('sendrawtransaction with missing input')
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1}] #won't exists
outputs = { self.nodes[0].getnewaddress() : 4.998 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
rawtx = self.nodes[2].signrawtransactionwithwallet(rawtx)
# This will raise an exception since there are missing inputs
assert_raises_rpc_error(-25, "Missing inputs", self.nodes[2].sendrawtransaction, rawtx['hex'])
#####################################
# getrawtransaction with block hash #
#####################################
# make a tx by sending then generate 2 blocks; block1 has the tx in it
tx = self.nodes[2].sendtoaddress(self.nodes[1].getnewaddress(), 1)
block1, block2 = self.nodes[2].generate(2)
self.sync_all()
# We should be able to get the raw transaction by providing the correct block
gottx = self.nodes[0].getrawtransaction(tx, True, block1)
assert_equal(gottx['txid'], tx)
assert_equal(gottx['in_active_chain'], True)
# We should not have the 'in_active_chain' flag when we don't provide a block
gottx = self.nodes[0].getrawtransaction(tx, True)
assert_equal(gottx['txid'], tx)
assert 'in_active_chain' not in gottx
# We should not get the tx if we provide an unrelated block
assert_raises_rpc_error(-5, "No such transaction found", self.nodes[0].getrawtransaction, tx, True, block2)
# An invalid block hash should raise the correct errors
assert_raises_rpc_error(-8, "parameter 3 must be hexadecimal", self.nodes[0].getrawtransaction, tx, True, True)
assert_raises_rpc_error(-8, "parameter 3 must be hexadecimal", self.nodes[0].getrawtransaction, tx, True, "foobar")
assert_raises_rpc_error(-8, "parameter 3 must be of length 64", self.nodes[0].getrawtransaction, tx, True, "abcd1234")
assert_raises_rpc_error(-5, "Block hash not found", self.nodes[0].getrawtransaction, tx, True, "0000000000000000000000000000000000000000000000000000000000000000")
# Undo the blocks and check in_active_chain
self.nodes[0].invalidateblock(block1)
gottx = self.nodes[0].getrawtransaction(txid=tx, verbose=True, blockhash=block1)
assert_equal(gottx['in_active_chain'], False)
self.nodes[0].reconsiderblock(block1)
assert_equal(self.nodes[0].getbestblockhash(), block2)
#########################
# RAW TX MULTISIG TESTS #
#########################
# 2of2 test
addr1 = self.nodes[2].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[2].getaddressinfo(addr1)
addr2Obj = self.nodes[2].getaddressinfo(addr2)
# Tests for createmultisig and addmultisigaddress
assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, ["01020304"])
self.nodes[0].createmultisig(2, [addr1Obj['pubkey'], addr2Obj['pubkey']]) # createmultisig can only take public keys
assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 2, [addr1Obj['pubkey'], addr1]) # addmultisigaddress can take both pubkeys and addresses so long as they are in the wallet, which is tested here.
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr1])['address']
#use balance deltas instead of absolute values
bal = self.nodes[2].getbalance()
# send 1.2 BTC to msig adr
txId = self.nodes[0].sendtoaddress(mSigObj, 1.2)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[2].getbalance(), bal+Decimal('1.20000000')) #node2 has both keys of the 2of2 ms addr., tx should affect the balance
# 2of3 test from different nodes
bal = self.nodes[2].getbalance()
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr3 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[1].getaddressinfo(addr1)
addr2Obj = self.nodes[2].getaddressinfo(addr2)
addr3Obj = self.nodes[2].getaddressinfo(addr3)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']])['address']
txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)
decTx = self.nodes[0].gettransaction(txId)
rawTx = self.nodes[0].decoderawtransaction(decTx['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
#THIS IS AN INCOMPLETE FEATURE
#NODE2 HAS TWO OF THREE KEY AND THE FUNDS SHOULD BE SPENDABLE AND COUNT AT BALANCE CALCULATION
assert_equal(self.nodes[2].getbalance(), bal) #for now, assume the funds of a 2of3 multisig tx are not marked as spendable
txDetails = self.nodes[0].gettransaction(txId, True)
rawTx = self.nodes[0].decoderawtransaction(txDetails['hex'])
vout = False
for outpoint in rawTx['vout']:
if outpoint['value'] == Decimal('2.20000000'):
vout = outpoint
break
bal = self.nodes[0].getbalance()
inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex'], "amount" : vout['value']}]
outputs = { self.nodes[0].getnewaddress() : 2.19 }
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
rawTxPartialSigned = self.nodes[1].signrawtransactionwithwallet(rawTx, inputs)
assert_equal(rawTxPartialSigned['complete'], False) #node1 only has one key, can't comp. sign the tx
rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx, inputs)
assert_equal(rawTxSigned['complete'], True) #node2 can sign the tx compl., own two of three keys
self.nodes[2].sendrawtransaction(rawTxSigned['hex'])
rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx
# 2of2 test for combining transactions
bal = self.nodes[2].getbalance()
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[1].getaddressinfo(addr1)
addr2Obj = self.nodes[2].getaddressinfo(addr2)
self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
mSigObjValid = self.nodes[2].getaddressinfo(mSigObj)
txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)
decTx = self.nodes[0].gettransaction(txId)
rawTx2 = self.nodes[0].decoderawtransaction(decTx['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[2].getbalance(), bal) # the funds of a 2of2 multisig tx should not be marked as spendable
txDetails = self.nodes[0].gettransaction(txId, True)
rawTx2 = self.nodes[0].decoderawtransaction(txDetails['hex'])
vout = False
for outpoint in rawTx2['vout']:
if outpoint['value'] == Decimal('2.20000000'):
vout = outpoint
break
bal = self.nodes[0].getbalance()
inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex'], "redeemScript" : mSigObjValid['hex'], "amount" : vout['value']}]
outputs = { self.nodes[0].getnewaddress() : 2.19 }
rawTx2 = self.nodes[2].createrawtransaction(inputs, outputs)
rawTxPartialSigned1 = self.nodes[1].signrawtransactionwithwallet(rawTx2, inputs)
self.log.debug(rawTxPartialSigned1)
assert_equal(rawTxPartialSigned1['complete'], False) #node1 only has one key, can't comp. sign the tx
rawTxPartialSigned2 = self.nodes[2].signrawtransactionwithwallet(rawTx2, inputs)
self.log.debug(rawTxPartialSigned2)
assert_equal(rawTxPartialSigned2['complete'], False) #node2 only has one key, can't comp. sign the tx
rawTxComb = self.nodes[2].combinerawtransaction([rawTxPartialSigned1['hex'], rawTxPartialSigned2['hex']])
self.log.debug(rawTxComb)
self.nodes[2].sendrawtransaction(rawTxComb)
rawTx2 = self.nodes[0].decoderawtransaction(rawTxComb)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx
# decoderawtransaction tests
# witness transaction
encrawtx = "010000000001010000000000000072c1a6a246ae63f74f931e8365e15a089c68d61900000000000000000000ffffffff0100e1f50500000000000102616100000000"
decrawtx = self.nodes[0].decoderawtransaction(encrawtx, True) # decode as witness transaction
assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000'))
assert_raises_rpc_error(-22, 'TX decode failed', self.nodes[0].decoderawtransaction, encrawtx, False) # force decode as non-witness transaction
# non-witness transaction
encrawtx = "01000000010000000000000072c1a6a246ae63f74f931e8365e15a089c68d61900000000000000000000ffffffff0100e1f505000000000000000000"
decrawtx = self.nodes[0].decoderawtransaction(encrawtx, False) # decode as non-witness transaction
assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000'))
# getrawtransaction tests
# 1. valid parameters - only supply txid
txHash = rawTx["hash"]
assert_equal(self.nodes[0].getrawtransaction(txHash), rawTxSigned['hex'])
# 2. valid parameters - supply txid and 0 for non-verbose
assert_equal(self.nodes[0].getrawtransaction(txHash, 0), rawTxSigned['hex'])
# 3. valid parameters - supply txid and False for non-verbose
assert_equal(self.nodes[0].getrawtransaction(txHash, False), rawTxSigned['hex'])
# 4. valid parameters - supply txid and 1 for verbose.
# We only check the "hex" field of the output so we don't need to update this test every time the output format changes.
assert_equal(self.nodes[0].getrawtransaction(txHash, 1)["hex"], rawTxSigned['hex'])
# 5. valid parameters - supply txid and True for non-verbose
assert_equal(self.nodes[0].getrawtransaction(txHash, True)["hex"], rawTxSigned['hex'])
# 6. invalid parameters - supply txid and string "Flase"
assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txHash, "Flase")
# 7. invalid parameters - supply txid and empty array
assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txHash, [])
# 8. invalid parameters - supply txid and empty dict
assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txHash, {})
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 1000}]
outputs = { self.nodes[0].getnewaddress() : 1 }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
decrawtx= self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['vin'][0]['sequence'], 1000)
# 9. invalid parameters - sequence number out of range
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : -1}]
outputs = { self.nodes[0].getnewaddress() : 1 }
assert_raises_rpc_error(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs)
# 10. invalid parameters - sequence number out of range
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 4294967296}]
outputs = { self.nodes[0].getnewaddress() : 1 }
assert_raises_rpc_error(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs)
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 4294967294}]
outputs = { self.nodes[0].getnewaddress() : 1 }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
decrawtx= self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['vin'][0]['sequence'], 4294967294)
####################################
# TRANSACTION VERSION NUMBER TESTS #
####################################
# Test the minimum transaction version number that fits in a signed 32-bit integer.
tx = CTransaction()
tx.nVersion = -0x80000000
rawtx = ToHex(tx)
decrawtx = self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['version'], -0x80000000)
# Test the maximum transaction version number that fits in a signed 32-bit integer.
tx = CTransaction()
tx.nVersion = 0x7fffffff
rawtx = ToHex(tx)
decrawtx = self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['version'], 0x7fffffff)
if __name__ == '__main__':
RawTransactionsTest().main()
|
GlobalBoost/GlobalBoost-Y
|
test/functional/rpc_rawtransaction.py
|
Python
|
mit
| 24,022 | 0.008659 |
from inspect import getmembers
from django.shortcuts import render
from utilities import get_wps_service_engine, list_wps_service_engines, abstract_is_link
def home(request):
"""
Home page for Tethys WPS tool. Lists all the WPS services that are linked.
"""
wps_services = list_wps_service_engines()
context = {'wps_services': wps_services}
return render(request, 'tethys_wps/home.html', context)
def service(request, service):
"""
View that lists the processes for a given service.
"""
wps = get_wps_service_engine(service)
context = {'wps': wps,
'service': service}
return render(request, 'tethys_wps/service.html', context)
def process(request, service, identifier):
"""
View that displays a detailed description for a WPS process.
"""
wps = get_wps_service_engine(service)
wps_process = wps.describeprocess(identifier)
context = {'process': wps_process,
'service': service,
'is_link': abstract_is_link(wps_process)}
return render(request, 'tethys_wps/process.html', context)
|
CI-WATER/django-tethys_wps
|
tethys_wps/views.py
|
Python
|
bsd-2-clause
| 1,115 | 0.000897 |
# -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsComposerEffects.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = '(C) 2012 by Dr. Horst Düster / Dr. Marco Hugentobler'
__date__ = '20/08/2012'
__copyright__ = 'Copyright 2012, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
import qgis
from PyQt4.QtCore import QFileInfo
from PyQt4.QtXml import QDomDocument
from PyQt4.QtGui import (QPainter, QColor)
from qgis.core import (QgsComposerShape,
QgsRectangle,
QgsComposition,
QgsMapRenderer
)
from utilities import (unitTestDataPath,
getQgisTestApp,
TestCase,
unittest,
expectedFailure
)
from qgscompositionchecker import QgsCompositionChecker
QGISAPP, CANVAS, IFACE, PARENT = getQgisTestApp()
TEST_DATA_DIR = unitTestDataPath()
class TestQgsComposerEffects(TestCase):
def __init__(self, methodName):
"""Run once on class initialisation."""
unittest.TestCase.__init__(self, methodName)
# create composition
self.mMapRenderer = QgsMapRenderer()
self.mComposition = QgsComposition(self.mMapRenderer)
self.mComposition.setPaperSize(297, 210)
self.mComposerRect1 = QgsComposerShape(20, 20, 150, 100, self.mComposition)
self.mComposerRect1.setShapeType(QgsComposerShape.Rectangle)
self.mComposerRect1.setBackgroundColor(QColor.fromRgb(255, 150, 0))
self.mComposition.addComposerShape(self.mComposerRect1)
self.mComposerRect2 = QgsComposerShape(50, 50, 150, 100, self.mComposition)
self.mComposerRect2.setShapeType(QgsComposerShape.Rectangle)
self.mComposerRect2.setBackgroundColor(QColor.fromRgb(0, 100, 150))
self.mComposition.addComposerShape(self.mComposerRect2)
def testBlendModes(self):
"""Test that blend modes work for composer items."""
self.mComposerRect2.setBlendMode(QPainter.CompositionMode_Multiply)
checker = QgsCompositionChecker('composereffects_blend', self.mComposition)
myTestResult, myMessage = checker.testComposition()
self.mComposerRect2.setBlendMode(QPainter.CompositionMode_SourceOver)
assert myTestResult == True, myMessage
def testTransparency(self):
"""Test that transparency works for composer items."""
self.mComposerRect2.setTransparency( 50 )
checker = QgsCompositionChecker('composereffects_transparency', self.mComposition)
myTestResult, myMessage = checker.testComposition()
self.mComposerRect2.setTransparency( 100 )
assert myTestResult == True, myMessage
if __name__ == '__main__':
unittest.main()
|
kiith-sa/QGIS
|
tests/src/python/test_qgscomposereffects.py
|
Python
|
gpl-2.0
| 3,073 | 0.004557 |
import traceback
class EnsureExceptionHandledGuard:
"""Helper for ensuring that Future's exceptions were handled.
This solves a nasty problem with Futures and Tasks that have an
exception set: if nobody asks for the exception, the exception is
never logged. This violates the Zen of Python: 'Errors should
never pass silently. Unless explicitly silenced.'
However, we don't want to log the exception as soon as
set_exception() is called: if the calling code is written
properly, it will get the exception and handle it properly. But
we *do* want to log it if result() or exception() was never called
-- otherwise developers waste a lot of time wondering why their
buggy code fails silently.
An earlier attempt added a __del__() method to the Future class
itself, but this backfired because the presence of __del__()
prevents garbage collection from breaking cycles. A way out of
this catch-22 is to avoid having a __del__() method on the Future
class itself, but instead to have a reference to a helper object
with a __del__() method that logs the traceback, where we ensure
that the helper object doesn't participate in cycles, and only the
Future has a reference to it.
The helper object is added when set_exception() is called. When
the Future is collected, and the helper is present, the helper
object is also collected, and its __del__() method will log the
traceback. When the Future's result() or exception() method is
called (and a helper object is present), it removes the the helper
object, after calling its clear() method to prevent it from
logging.
One downside is that we do a fair amount of work to extract the
traceback from the exception, even when it is never logged. It
would seem cheaper to just store the exception object, but that
references the traceback, which references stack frames, which may
reference the Future, which references the _EnsureExceptionHandledGuard,
and then the _EnsureExceptionHandledGuard would be included in a cycle,
which is what we're trying to avoid! As an optimization, we don't
immediately format the exception; we only do the work when
activate() is called, which call is delayed until after all the
Future's callbacks have run. Since usually a Future has at least
one callback (typically set by 'yield from') and usually that
callback extracts the callback, thereby removing the need to
format the exception.
PS. I don't claim credit for this solution. I first heard of it
in a discussion about closing files when they are collected.
"""
__slots__ = ['exc', 'tb', 'hndl', 'cls']
def __init__(self, exc, handler):
self.exc = exc
self.hndl = handler
self.cls = type(exc)
self.tb = None
def activate(self):
exc = self.exc
if exc is not None:
self.exc = None
self.tb = traceback.format_exception(exc.__class__, exc,
exc.__traceback__)
def clear(self):
self.exc = None
self.tb = None
def __del__(self):
if self.tb:
self.hndl(self.cls, self.tb)
|
mikhtonyuk/rxpython
|
concurrent/futures/cooperative/ensure_exception_handled.py
|
Python
|
mit
| 3,261 | 0 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'exampleLoaderTemplate.ui'
#
# Created: Sat Dec 17 23:46:27 2011
# by: PyQt4 UI code generator 4.8.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(762, 302)
self.gridLayout = QtGui.QGridLayout(Form)
self.gridLayout.setMargin(0)
self.gridLayout.setSpacing(0)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.splitter = QtGui.QSplitter(Form)
self.splitter.setOrientation(QtCore.Qt.Horizontal)
self.splitter.setObjectName(_fromUtf8("splitter"))
self.layoutWidget = QtGui.QWidget(self.splitter)
self.layoutWidget.setObjectName(_fromUtf8("layoutWidget"))
self.verticalLayout = QtGui.QVBoxLayout(self.layoutWidget)
self.verticalLayout.setMargin(0)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.exampleTree = QtGui.QTreeWidget(self.layoutWidget)
self.exampleTree.setObjectName(_fromUtf8("exampleTree"))
self.exampleTree.headerItem().setText(0, _fromUtf8("1"))
self.exampleTree.header().setVisible(False)
self.verticalLayout.addWidget(self.exampleTree)
self.loadBtn = QtGui.QPushButton(self.layoutWidget)
self.loadBtn.setObjectName(_fromUtf8("loadBtn"))
self.verticalLayout.addWidget(self.loadBtn)
self.codeView = QtGui.QTextBrowser(self.splitter)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Monospace"))
font.setPointSize(10)
self.codeView.setFont(font)
self.codeView.setObjectName(_fromUtf8("codeView"))
self.gridLayout.addWidget(self.splitter, 0, 0, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(QtGui.QApplication.translate("Form", "Form", None, QtGui.QApplication.UnicodeUTF8))
self.loadBtn.setText(QtGui.QApplication.translate("Form", "Load Example", None, QtGui.QApplication.UnicodeUTF8))
|
robertsj/poropy
|
pyqtgraph/examples/exampleLoaderTemplate.py
|
Python
|
mit
| 2,302 | 0.002172 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-22 11:18
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('cotacao', '0003_auto_20170312_2049'),
]
operations = [
migrations.RemoveField(
model_name='item',
name='pedido',
),
migrations.AddField(
model_name='pedido',
name='itens',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='itens', to='cotacao.Item'),
),
]
|
asleao/sistema-cotacao
|
project/cotacao/migrations/0004_auto_20170322_0818.py
|
Python
|
gpl-3.0
| 665 | 0.001504 |
'''Python sys.excepthook hook to generate apport crash dumps.'''
# Copyright (c) 2006 - 2009 Canonical Ltd.
# Authors: Robert Collins <robert@ubuntu.com>
# Martin Pitt <martin.pitt@ubuntu.com>
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version. See http://www.gnu.org/copyleft/gpl.html for
# the full text of the license.
import os
import sys
CONFIG = '/etc/default/apport'
def enabled():
'''Return whether Apport should generate crash reports.'''
# This doesn't use apport.packaging.enabled() because it is too heavyweight
# See LP: #528355
import re
try:
with open(CONFIG) as f:
conf = f.read()
return re.search('^\s*enabled\s*=\s*0\s*$', conf, re.M) is None
except IOError:
# if the file does not exist, assume it's enabled
return True
def apport_excepthook(exc_type, exc_obj, exc_tb):
'''Catch an uncaught exception and make a traceback.'''
# create and save a problem report. Note that exceptions in this code
# are bad, and we probably need a per-thread reentrancy guard to
# prevent that happening. However, on Ubuntu there should never be
# a reason for an exception here, other than [say] a read only var
# or some such. So what we do is use a try - finally to ensure that
# the original excepthook is invoked, and until we get bug reports
# ignore the other issues.
# import locally here so that there is no routine overhead on python
# startup time - only when a traceback occurs will this trigger.
try:
# ignore 'safe' exit types.
if exc_type in (KeyboardInterrupt, ):
return
# do not do anything if apport was disabled
if not enabled():
return
try:
from cStringIO import StringIO
StringIO # pyflakes
except ImportError:
from io import StringIO
import re, traceback
from apport.fileutils import likely_packaged, get_recent_crashes
# apport will look up the package from the executable path.
try:
binary = os.path.realpath(os.path.join(os.getcwd(), sys.argv[0]))
except (TypeError, AttributeError, IndexError):
# the module has mutated sys.argv, plan B
try:
binary = os.readlink('/proc/%i/exe' % os.getpid())
except OSError:
return
# for interactive python sessions, sys.argv[0] == ''; catch that and
# other irregularities
if not os.access(binary, os.X_OK) or not os.path.isfile(binary):
return
# filter out binaries in user accessible paths
if not likely_packaged(binary):
return
import apport.report
pr = apport.report.Report()
# special handling of dbus-python exceptions
if hasattr(exc_obj, 'get_dbus_name'):
if exc_obj.get_dbus_name() == 'org.freedesktop.DBus.Error.NoReply':
# NoReply is an useless crash, we do not even get the method it
# was trying to call; needs actual crash from D-BUS backend (LP #914220)
return
if exc_obj.get_dbus_name() == 'org.freedesktop.DBus.Error.ServiceUnknown':
dbus_service_unknown_analysis(exc_obj, pr)
# append a basic traceback. In future we may want to include
# additional data such as the local variables, loaded modules etc.
tb_file = StringIO()
traceback.print_exception(exc_type, exc_obj, exc_tb, file=tb_file)
pr['Traceback'] = tb_file.getvalue().strip()
pr.add_proc_info(extraenv=['PYTHONPATH', 'PYTHONHOME'])
pr.add_user_info()
# override the ExecutablePath with the script that was actually running
pr['ExecutablePath'] = binary
if 'ExecutableTimestamp' in pr:
pr['ExecutableTimestamp'] = str(int(os.stat(binary).st_mtime))
try:
pr['PythonArgs'] = '%r' % sys.argv
except AttributeError:
pass
if pr.check_ignored():
return
mangled_program = re.sub('/', '_', binary)
# get the uid for now, user name later
user = os.getuid()
pr_filename = '%s/%s.%i.crash' % (os.environ.get(
'APPORT_REPORT_DIR', '/var/crash'), mangled_program, user)
crash_counter = 0
if os.path.exists(pr_filename):
if apport.fileutils.seen_report(pr_filename):
# flood protection
with open(pr_filename, 'rb') as f:
crash_counter = get_recent_crashes(f) + 1
if crash_counter > 1:
return
# remove the old file, so that we can create the new one with
# os.O_CREAT|os.O_EXCL
os.unlink(pr_filename)
else:
# don't clobber existing report
return
if crash_counter:
pr['CrashCounter'] = str(crash_counter)
with os.fdopen(os.open(pr_filename,
os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o640), 'wb') as f:
pr.write(f)
finally:
# resume original processing to get the default behaviour,
# but do not trigger an AttributeError on interpreter shutdown.
if sys:
sys.__excepthook__(exc_type, exc_obj, exc_tb)
def dbus_service_unknown_analysis(exc_obj, report):
from glob import glob
import subprocess, re
try:
from configparser import ConfigParser, NoSectionError, NoOptionError
(ConfigParser, NoSectionError, NoOptionError) # pyflakes
except ImportError:
# Python 2
from ConfigParser import ConfigParser, NoSectionError, NoOptionError
# determine D-BUS name
m = re.search('name\s+(\S+)\s+was not provided by any .service',
exc_obj.get_dbus_message())
if not m:
if sys.stderr:
sys.stderr.write('Error: cannot parse D-BUS name from exception: '
+ exc_obj.get_dbus_message())
return
dbus_name = m.group(1)
# determine .service file and Exec name for the D-BUS name
services = [] # tuples of (service file, exe name, running)
for f in glob('/usr/share/dbus-1/*services/*.service'):
cp = ConfigParser(interpolation=None)
cp.read(f, encoding='UTF-8')
try:
if cp.get('D-BUS Service', 'Name') == dbus_name:
exe = cp.get('D-BUS Service', 'Exec')
running = (subprocess.call(['pidof', '-sx', exe], stdout=subprocess.PIPE) == 0)
services.append((f, exe, running))
except (NoSectionError, NoOptionError):
if sys.stderr:
sys.stderr.write('Invalid D-BUS .service file %s: %s' % (
f, exc_obj.get_dbus_message()))
continue
if not services:
report['DbusErrorAnalysis'] = 'no service file providing ' + dbus_name
else:
report['DbusErrorAnalysis'] = 'provided by'
for (service, exe, running) in services:
report['DbusErrorAnalysis'] += ' %s (%s is %srunning)' % (
service, exe, ('' if running else 'not '))
def install():
'''Install the python apport hook.'''
sys.excepthook = apport_excepthook
|
windflyer/apport
|
apport_python_hook.py
|
Python
|
gpl-2.0
| 7,544 | 0.001723 |
from fabric.api import run
from fabric.decorators import with_settings
from fabric.colors import green, yellow
from deployer.tasks.requirements import install_requirements
@with_settings(warn_only=True)
def setup_virtualenv(python_version='', app_name='', app_dir='', repo_url=''):
print(green("Setting up virtualenv on {}".format(app_dir)))
print(green('Creating virtualenv'))
if run("pyenv virtualenv {0} {1}-{0}".format(python_version, app_name)).failed:
print(yellow("Virtualenv already exists"))
install_requirements(app_name, python_version)
|
streema/deployer
|
deployer/tasks/virtualenv.py
|
Python
|
mit
| 576 | 0.003472 |
#! usr/bin/python3
# -*- coding: utf-8 -*-
#
# Flicket - copyright Paul Bourne: evereux@gmail.com
import datetime
from flask import redirect, url_for, flash, g
from flask_babel import gettext
from flask_login import login_required
from . import flicket_bp
from application import app, db
from application.flicket.models.flicket_models import FlicketTicket, FlicketStatus
from application.flicket.scripts.email import FlicketMail
from application.flicket.scripts.flicket_functions import add_action
# view to release a ticket user has been assigned.
@flicket_bp.route(app.config['FLICKET'] + 'release/<int:ticket_id>/', methods=['GET', 'POST'])
@login_required
def release(ticket_id=False):
if ticket_id:
ticket = FlicketTicket.query.filter_by(id=ticket_id).first()
# is ticket assigned.
if not ticket.assigned:
flash(gettext('Ticket has not been assigned'), category='warning')
return redirect(url_for('flicket_bp.ticket_view', ticket_id=ticket_id))
# check ticket is owned by user or user is admin
if (ticket.assigned.id != g.user.id) and (not g.user.is_admin):
flash(gettext('You can not release a ticket you are not working on.'), category='warning')
return redirect(url_for('flicket_bp.ticket_view', ticket_id=ticket_id))
# set status to open
status = FlicketStatus.query.filter_by(status='Open').first()
ticket.current_status = status
ticket.last_updated = datetime.datetime.now()
user = ticket.assigned
ticket.assigned = None
user.total_assigned -= 1
db.session.commit()
# add action record
add_action(ticket, 'release')
# send email to state ticket has been released.
f_mail = FlicketMail()
f_mail.release_ticket(ticket)
flash(gettext('You released ticket: %(value)s', value=ticket.id), category='success')
return redirect(url_for('flicket_bp.ticket_view', ticket_id=ticket.id))
return redirect(url_for('flicket_bp.tickets'))
|
evereux/flicket
|
application/flicket/views/release.py
|
Python
|
mit
| 2,064 | 0.002907 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Optional, TYPE_CHECKING
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core import AsyncARMPipelineClient
from msrest import Deserializer, Serializer
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
from ._configuration import PeeringManagementClientConfiguration
from .operations import PeeringManagementClientOperationsMixin
from .operations import LegacyPeeringsOperations
from .operations import Operations
from .operations import PeerAsnsOperations
from .operations import PeeringLocationsOperations
from .operations import PeeringsOperations
from .operations import PeeringServiceLocationsOperations
from .operations import PeeringServicePrefixesOperations
from .operations import PrefixesOperations
from .operations import PeeringServiceProvidersOperations
from .operations import PeeringServicesOperations
from .. import models
class PeeringManagementClient(PeeringManagementClientOperationsMixin):
"""Peering Client.
:ivar legacy_peerings: LegacyPeeringsOperations operations
:vartype legacy_peerings: azure.mgmt.peering.aio.operations.LegacyPeeringsOperations
:ivar operations: Operations operations
:vartype operations: azure.mgmt.peering.aio.operations.Operations
:ivar peer_asns: PeerAsnsOperations operations
:vartype peer_asns: azure.mgmt.peering.aio.operations.PeerAsnsOperations
:ivar peering_locations: PeeringLocationsOperations operations
:vartype peering_locations: azure.mgmt.peering.aio.operations.PeeringLocationsOperations
:ivar peerings: PeeringsOperations operations
:vartype peerings: azure.mgmt.peering.aio.operations.PeeringsOperations
:ivar peering_service_locations: PeeringServiceLocationsOperations operations
:vartype peering_service_locations: azure.mgmt.peering.aio.operations.PeeringServiceLocationsOperations
:ivar peering_service_prefixes: PeeringServicePrefixesOperations operations
:vartype peering_service_prefixes: azure.mgmt.peering.aio.operations.PeeringServicePrefixesOperations
:ivar prefixes: PrefixesOperations operations
:vartype prefixes: azure.mgmt.peering.aio.operations.PrefixesOperations
:ivar peering_service_providers: PeeringServiceProvidersOperations operations
:vartype peering_service_providers: azure.mgmt.peering.aio.operations.PeeringServiceProvidersOperations
:ivar peering_services: PeeringServicesOperations operations
:vartype peering_services: azure.mgmt.peering.aio.operations.PeeringServicesOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The Azure subscription ID.
:type subscription_id: str
:param str base_url: Service URL
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
base_url: Optional[str] = None,
**kwargs: Any
) -> None:
if not base_url:
base_url = 'https://management.azure.com'
self._config = PeeringManagementClientConfiguration(credential, subscription_id, **kwargs)
self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._serialize.client_side_validation = False
self._deserialize = Deserializer(client_models)
self.legacy_peerings = LegacyPeeringsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.operations = Operations(
self._client, self._config, self._serialize, self._deserialize)
self.peer_asns = PeerAsnsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.peering_locations = PeeringLocationsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.peerings = PeeringsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.peering_service_locations = PeeringServiceLocationsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.peering_service_prefixes = PeeringServicePrefixesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.prefixes = PrefixesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.peering_service_providers = PeeringServiceProvidersOperations(
self._client, self._config, self._serialize, self._deserialize)
self.peering_services = PeeringServicesOperations(
self._client, self._config, self._serialize, self._deserialize)
async def _send_request(self, http_request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse:
"""Runs the network request through the client's chained policies.
:param http_request: The network request you want to make. Required.
:type http_request: ~azure.core.pipeline.transport.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to True.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.pipeline.transport.AsyncHttpResponse
"""
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
http_request.url = self._client.format_url(http_request.url, **path_format_arguments)
stream = kwargs.pop("stream", True)
pipeline_response = await self._client._pipeline.run(http_request, stream=stream, **kwargs)
return pipeline_response.http_response
async def close(self) -> None:
await self._client.close()
async def __aenter__(self) -> "PeeringManagementClient":
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_details) -> None:
await self._client.__aexit__(*exc_details)
|
Azure/azure-sdk-for-python
|
sdk/peering/azure-mgmt-peering/azure/mgmt/peering/aio/_peering_management_client.py
|
Python
|
mit
| 6,754 | 0.002813 |
#!python3
"""
This script downloads the favicons
Usage:
python3 update_alexa path/to/data.csv
"""
import os
import requests
favicon_path = os.path.join(os.path.dirname(__file__), "..", "icons")
def download_favicons(links):
for link in links:
netloc = link['netloc']
url = 'http://' + netloc
new_favicon_path = os.path.join(favicon_path, netloc + ".ico")
if not os.path.exists(new_favicon_path):
try:
print(url)
response = requests.get(
"https://realfavicongenerator.p.rapidapi.com/favicon/icon",
params={'platform': 'desktop', "site": url},
headers={'X-Mashape-Key': os.environ.get("mashape_key")}
)
except:
pass
else:
if response:
with open(new_favicon_path, 'wb') as f:
f.write(response.content)
|
engineerapart/TheRemoteFreelancer
|
docs/scripts/download_favicons.py
|
Python
|
unlicense
| 964 | 0.001037 |
import os
import tempfile
import unittest
import logging
from pyidf import ValidationLevel
import pyidf
from pyidf.idf import IDF
from pyidf.daylighting import OutputControlIlluminanceMapStyle
log = logging.getLogger(__name__)
class TestOutputControlIlluminanceMapStyle(unittest.TestCase):
def setUp(self):
self.fd, self.path = tempfile.mkstemp()
def tearDown(self):
os.remove(self.path)
def test_create_outputcontrolilluminancemapstyle(self):
pyidf.validation_level = ValidationLevel.error
obj = OutputControlIlluminanceMapStyle()
# alpha
var_column_separator = "Comma"
obj.column_separator = var_column_separator
idf = IDF()
idf.add(obj)
idf.save(self.path, check=False)
with open(self.path, mode='r') as f:
for line in f:
log.debug(line.strip())
idf2 = IDF(self.path)
self.assertEqual(idf2.outputcontrolilluminancemapstyles[0].column_separator, var_column_separator)
|
rbuffat/pyidf
|
tests/test_outputcontrolilluminancemapstyle.py
|
Python
|
apache-2.0
| 1,023 | 0.002933 |
'''
Created on 24.03.2011
@author: michi
'''
from PyQt4.QtGui import QItemDelegate
from sqlalchemy import Table
from sqlalchemy.sql import Alias,Select
from ems import qt4
class ColumnSectionMapper(object):
def __init__(self,alchemySelect=None, parent=None):
self.__columnConfigs = []
self.__columnConfigIdByName = {}
self.__alchemySelect = alchemySelect
self.__delegate = MapperDelegate(self,parent)
pass
def addColumn(self,columnName,translatedName=None, delegate=None):
if self.__columnConfigIdByName.has_key(columnName):
raise KeyError("Column %s already assigned" % columnName)
index = len(self.__columnConfigs)
self.__columnConfigs.append({'name':columnName,
'translatedName':translatedName,
'delegate':delegate})
self.__columnConfigIdByName[columnName] = index
@property
def translatedColumnNames(self):
names = {}
for config in self.__columnConfigs:
names[config['name']] = config['translatedName']
return names
def __extractTablesFormSelect(self,alchemySelect):
tableDict = {}
for fromCond in alchemySelect.locate_all_froms():
if isinstance(fromCond, Table):
tableDict[str(fromCond.name)] = fromCond
elif isinstance(fromCond,Alias):
if isinstance(fromCond.original,Table):
tableDict[str(fromCond.name)] = fromCond
return tableDict
def getDelegate(self):
return self.__delegate
def getColConfig(self, column):
if isinstance(column, int):
index = column
else:
index = self.__columnConfigIdByName[unicode(column)]
return self.__columnConfigs[index]
def getSelectColumns(self, alchemySelect=None):
if alchemySelect is None:
alchemySelect = self.__alchemySelect
if not isinstance(alchemySelect, Select):
raise TypeError("alchemySelect has to be instanceof sqlalchemy.select")
tableDict = self.__extractTablesFormSelect(alchemySelect)
columnList = []
for config in self.__columnConfigs:
tableName,colName = config['name'].split('.')
if tableDict.has_key(tableName):
columnList.append(tableDict[tableName].c[colName])
return columnList
class MapperDelegate(QItemDelegate):
def __init__(self, mapper, parent=None):
super(MapperDelegate, self).__init__(parent)
self.__mapper = mapper
def getDelegate(self, index):
colName = index.data(qt4.ColumnNameRole).toString()
delegate = self.__mapper.getColConfig(colName)['delegate']
return delegate
def paint(self, painter, option, index):
delegate = self.getDelegate(index)
if delegate is not None:
delegate.paint(painter, option, index)
else:
QItemDelegate.paint(self, painter, option, index)
def createEditor(self, parent, option, index):
delegate = self.getDelegate(index)
if delegate is not None:
return delegate.createEditor(parent, option, index)
else:
return QItemDelegate.createEditor(self, parent, option,
index)
def setEditorData(self, editor, index):
delegate = self.getDelegate(index)
if delegate is not None:
delegate.setEditorData(editor, index)
else:
QItemDelegate.setEditorData(self, editor, index)
def setModelData(self, editor, model, index):
delegate = self.getDelegate(index)
if delegate is not None:
delegate.setModelData(editor, model, index)
else:
QItemDelegate.setModelData(self, editor, model, index)
|
mtils/ems
|
ems/qt4/itemmodel/columnsectionmapper.py
|
Python
|
mit
| 3,948 | 0.008359 |
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import mock
import testtools
from sahara import conductor as cond
from sahara import exceptions as ex
from sahara.plugins import base as pb
from sahara.service.edp import job_manager
from sahara.service.edp import job_utils
from sahara.service.edp.oozie.workflow_creator import workflow_factory
from sahara.swift import swift_helper as sw
from sahara.tests.unit import base
from sahara.tests.unit.service.edp import edp_test_utils as u
from sahara.utils import edp
from sahara.utils import patches as p
conductor = cond.API
_java_main_class = "org.apache.hadoop.examples.WordCount"
_java_opts = "-Dparam1=val1 -Dparam2=val2"
class TestJobManager(base.SaharaWithDbTestCase):
def setUp(self):
super(TestJobManager, self).setUp()
p.patch_minidom_writexml()
pb.setup_plugins()
@mock.patch('uuid.uuid4')
@mock.patch('sahara.utils.remote.get_remote')
def test_create_workflow_dir(self, get_remote, uuid4):
job = mock.Mock()
job.name = "job"
# This is to mock "with remote.get_remote(instance) as r"
remote_instance = mock.Mock()
get_remote.return_value.__enter__ = mock.Mock(
return_value=remote_instance)
remote_instance.execute_command = mock.Mock()
remote_instance.execute_command.return_value = 0, "standard out"
uuid4.return_value = "generated_uuid"
job_utils.create_workflow_dir("where", "/tmp/somewhere", job, "uuid")
remote_instance.execute_command.assert_called_with(
"mkdir -p /tmp/somewhere/job/uuid")
remote_instance.execute_command.reset_mock()
job_utils.create_workflow_dir("where", "/tmp/somewhere", job)
remote_instance.execute_command.assert_called_with(
"mkdir -p /tmp/somewhere/job/generated_uuid")
@mock.patch('sahara.service.edp.binary_retrievers.dispatch.get_raw_binary')
@mock.patch('sahara.utils.remote.get_remote')
def test_upload_job_files(self, get_remote, get_raw_binary):
main_names = ["main1", "main2", "main3"]
lib_names = ["lib1", "lib2", "lib3"]
def make_data_objects(*args):
objs = []
for name in args:
m = mock.Mock()
m.name = name
objs.append(m)
return objs
job = mock.Mock()
job.name = "job"
job.mains = make_data_objects(*main_names)
job.libs = make_data_objects(*lib_names)
# This is to mock "with remote.get_remote(instance) as r"
remote_instance = mock.Mock()
get_remote.return_value.__enter__ = mock.Mock(
return_value=remote_instance)
get_raw_binary.return_value = "data"
paths = job_utils.upload_job_files(
"where", "/somedir", job, libs_subdir=False)
self.assertEqual(paths,
["/somedir/" + n for n in main_names + lib_names])
for path in paths:
remote_instance.write_file_to.assert_any_call(path, "data")
remote_instance.write_file_to.reset_mock()
paths = job_utils.upload_job_files(
"where", "/somedir", job, libs_subdir=True)
remote_instance.execute_command.assert_called_with(
"mkdir -p /somedir/libs")
expected = ["/somedir/" + n for n in main_names]
expected += ["/somedir/libs/" + n for n in lib_names]
self.assertEqual(paths, expected)
for path in paths:
remote_instance.write_file_to.assert_any_call(path, "data")
@mock.patch('sahara.conductor.API.job_binary_get')
def test_build_workflow_for_job_pig(self, job_binary):
job, job_exec = u.create_job_exec(edp.JOB_TYPE_PIG, configs={})
job_binary.return_value = {"name": "script.pig"}
input_data = u.create_data_source('swift://ex/i')
output_data = u.create_data_source('swift://ex/o')
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec, input_data, output_data,
'hadoop')
self.assertIn("""
<param>INPUT=swift://ex.sahara/i</param>
<param>OUTPUT=swift://ex.sahara/o</param>""", res)
self.assertIn("""
<configuration>
<property>
<name>fs.swift.service.sahara.password</name>
<value>admin1</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>admin</value>
</property>
</configuration>""", res)
self.assertIn("<script>script.pig</script>", res)
# testing workflow creation with a proxy domain
self.override_config('use_domain_for_proxy_users', True)
self.override_config("proxy_user_domain_name", 'sahara_proxy_domain')
job, job_exec = u.create_job_exec(edp.JOB_TYPE_PIG, proxy=True)
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec, input_data, output_data,
'hadoop')
self.assertIn("""
<configuration>
<property>
<name>fs.swift.service.sahara.domain.name</name>
<value>sahara_proxy_domain</value>
</property>
<property>
<name>fs.swift.service.sahara.password</name>
<value>55555555-6666-7777-8888-999999999999</value>
</property>
<property>
<name>fs.swift.service.sahara.trust.id</name>
<value>0123456789abcdef0123456789abcdef</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>job_00000000-1111-2222-3333-4444444444444444</value>
</property>
</configuration>""", res)
@mock.patch('sahara.conductor.API.job_binary_get')
def test_build_workflow_swift_configs(self, job_binary):
# Test that swift configs come from either input or output data sources
job, job_exec = u.create_job_exec(edp.JOB_TYPE_PIG, configs={})
job_binary.return_value = {"name": "script.pig"}
input_data = u.create_data_source('swift://ex/i')
output_data = u.create_data_source('hdfs://user/hadoop/out')
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec, input_data, output_data,
'hadoop')
self.assertIn("""
<configuration>
<property>
<name>fs.swift.service.sahara.password</name>
<value>admin1</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>admin</value>
</property>
</configuration>""", res)
input_data = u.create_data_source('hdfs://user/hadoop/in')
output_data = u.create_data_source('swift://ex/o')
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec, input_data, output_data,
'hadoop')
self.assertIn("""
<configuration>
<property>
<name>fs.swift.service.sahara.password</name>
<value>admin1</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>admin</value>
</property>
</configuration>""", res)
job, job_exec = u.create_job_exec(
edp.JOB_TYPE_PIG, configs={'configs': {'dummy': 'value'}})
input_data = u.create_data_source('hdfs://user/hadoop/in')
output_data = u.create_data_source('hdfs://user/hadoop/out')
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec, input_data, output_data,
'hadoop')
self.assertIn("""
<configuration>
<property>
<name>dummy</name>
<value>value</value>
</property>
</configuration>""", res)
def _build_workflow_common(self, job_type, streaming=False, proxy=False):
if streaming:
configs = {'edp.streaming.mapper': '/usr/bin/cat',
'edp.streaming.reducer': '/usr/bin/wc'}
configs = {'configs': configs}
else:
configs = {}
job, job_exec = u.create_job_exec(job_type, configs)
input_data = u.create_data_source('swift://ex/i')
output_data = u.create_data_source('swift://ex/o')
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec, input_data, output_data,
'hadoop')
if streaming:
self.assertIn("""
<streaming>
<mapper>/usr/bin/cat</mapper>
<reducer>/usr/bin/wc</reducer>
</streaming>""", res)
self.assertIn("""
<property>
<name>mapred.output.dir</name>
<value>swift://ex.sahara/o</value>
</property>""", res)
self.assertIn("""
<property>
<name>mapred.input.dir</name>
<value>swift://ex.sahara/i</value>
</property>""", res)
if not proxy:
self.assertIn("""
<property>
<name>fs.swift.service.sahara.password</name>
<value>admin1</value>
</property>""", res)
self.assertIn("""
<property>
<name>fs.swift.service.sahara.username</name>
<value>admin</value>
</property>""", res)
else:
# testing workflow creation with a proxy domain
self.override_config('use_domain_for_proxy_users', True)
self.override_config("proxy_user_domain_name",
'sahara_proxy_domain')
job, job_exec = u.create_job_exec(job_type, proxy=True)
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec, input_data, output_data,
'hadoop')
self.assertIn("""
<property>
<name>fs.swift.service.sahara.domain.name</name>
<value>sahara_proxy_domain</value>
</property>
<property>
<name>fs.swift.service.sahara.password</name>
<value>55555555-6666-7777-8888-999999999999</value>
</property>
<property>
<name>fs.swift.service.sahara.trust.id</name>
<value>0123456789abcdef0123456789abcdef</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>job_00000000-1111-2222-3333-4444444444444444</value>
</property>""", res)
def test_build_workflow_for_job_mapreduce(self):
self._build_workflow_common(edp.JOB_TYPE_MAPREDUCE)
self._build_workflow_common(edp.JOB_TYPE_MAPREDUCE, streaming=True)
self._build_workflow_common(edp.JOB_TYPE_MAPREDUCE, proxy=True)
self._build_workflow_common(edp.JOB_TYPE_MAPREDUCE, streaming=True,
proxy=True)
def test_build_workflow_for_job_java(self):
# If args include swift paths, user and password values
# will have to be supplied via configs instead of being
# lifted from input or output data sources
configs = {sw.HADOOP_SWIFT_USERNAME: 'admin',
sw.HADOOP_SWIFT_PASSWORD: 'admin1'}
configs = {
'configs': configs,
'args': ['swift://ex/i',
'output_path']
}
job, job_exec = u.create_job_exec(edp.JOB_TYPE_JAVA, configs)
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec)
self.assertIn("""
<configuration>
<property>
<name>fs.swift.service.sahara.password</name>
<value>admin1</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>admin</value>
</property>
</configuration>
<main-class>%s</main-class>
<java-opts>%s</java-opts>
<arg>swift://ex.sahara/i</arg>
<arg>output_path</arg>""" % (_java_main_class, _java_opts), res)
# testing workflow creation with a proxy domain
self.override_config('use_domain_for_proxy_users', True)
self.override_config("proxy_user_domain_name", 'sahara_proxy_domain')
configs = {
'configs': {},
'args': ['swift://ex/i',
'output_path']
}
job, job_exec = u.create_job_exec(edp.JOB_TYPE_JAVA, configs,
proxy=True)
res = workflow_factory.get_workflow_xml(job, u.create_cluster(),
job_exec)
self.assertIn("""
<configuration>
<property>
<name>fs.swift.service.sahara.domain.name</name>
<value>sahara_proxy_domain</value>
</property>
<property>
<name>fs.swift.service.sahara.password</name>
<value>55555555-6666-7777-8888-999999999999</value>
</property>
<property>
<name>fs.swift.service.sahara.trust.id</name>
<value>0123456789abcdef0123456789abcdef</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>job_00000000-1111-2222-3333-4444444444444444</value>
</property>
</configuration>
<main-class>%s</main-class>
<java-opts>%s</java-opts>
<arg>swift://ex.sahara/i</arg>
<arg>output_path</arg>""" % (_java_main_class, _java_opts), res)
@mock.patch('sahara.conductor.API.job_binary_get')
def test_build_workflow_for_job_hive(self, job_binary):
job, job_exec = u.create_job_exec(edp.JOB_TYPE_HIVE, configs={})
job_binary.return_value = {"name": "script.q"}
input_data = u.create_data_source('swift://ex/i')
output_data = u.create_data_source('swift://ex/o')
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec, input_data, output_data,
'hadoop')
self.assertIn("""
<job-xml>/user/hadoop/conf/hive-site.xml</job-xml>
<configuration>
<property>
<name>fs.swift.service.sahara.password</name>
<value>admin1</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>admin</value>
</property>
</configuration>
<script>script.q</script>
<param>INPUT=swift://ex.sahara/i</param>
<param>OUTPUT=swift://ex.sahara/o</param>""", res)
# testing workflow creation with a proxy domain
self.override_config('use_domain_for_proxy_users', True)
self.override_config("proxy_user_domain_name", 'sahara_proxy_domain')
job, job_exec = u.create_job_exec(edp.JOB_TYPE_HIVE, proxy=True)
res = workflow_factory.get_workflow_xml(
job, u.create_cluster(), job_exec, input_data, output_data,
'hadoop')
self.assertIn("""
<job-xml>/user/hadoop/conf/hive-site.xml</job-xml>
<configuration>
<property>
<name>fs.swift.service.sahara.domain.name</name>
<value>sahara_proxy_domain</value>
</property>
<property>
<name>fs.swift.service.sahara.password</name>
<value>55555555-6666-7777-8888-999999999999</value>
</property>
<property>
<name>fs.swift.service.sahara.trust.id</name>
<value>0123456789abcdef0123456789abcdef</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>job_00000000-1111-2222-3333-4444444444444444</value>
</property>
</configuration>
<script>script.q</script>
<param>INPUT=swift://ex.sahara/i</param>
<param>OUTPUT=swift://ex.sahara/o</param>""", res)
def test_update_job_dict(self):
w = workflow_factory.BaseFactory()
job_dict = {'configs': {'default1': 'value1',
'default2': 'value2'},
'params': {'param1': 'value1',
'param2': 'value2'},
'args': ['replace this', 'and this']}
edp_configs = {'edp.streaming.mapper': '/usr/bin/cat',
'edp.streaming.reducer': '/usr/bin/wc'}
configs = {'default2': 'changed'}
configs.update(edp_configs)
params = {'param1': 'changed'}
exec_job_dict = {'configs': configs,
'params': params,
'args': ['replaced']}
orig_exec_job_dict = copy.deepcopy(exec_job_dict)
w.update_job_dict(job_dict, exec_job_dict)
self.assertEqual(job_dict,
{'edp_configs': edp_configs,
'configs': {'default1': 'value1',
'default2': 'changed'},
'params': {'param1': 'changed',
'param2': 'value2'},
'args': ['replaced']})
self.assertEqual(orig_exec_job_dict, exec_job_dict)
def test_inject_swift_url_suffix(self):
w = workflow_factory.BaseFactory()
self.assertEqual(w.inject_swift_url_suffix("swift://ex/o"),
"swift://ex.sahara/o")
self.assertEqual(w.inject_swift_url_suffix("swift://ex.sahara/o"),
"swift://ex.sahara/o")
self.assertEqual(w.inject_swift_url_suffix("hdfs://my/path"),
"hdfs://my/path")
@mock.patch('sahara.conductor.API.job_execution_update')
@mock.patch('sahara.service.edp.job_manager._run_job')
def test_run_job_handles_exceptions(self, runjob, job_ex_upd):
runjob.side_effect = ex.SwiftClientException("Unauthorised")
job, job_exec = u.create_job_exec(edp.JOB_TYPE_PIG)
job_manager.run_job(job_exec.id)
self.assertEqual(1, job_ex_upd.call_count)
new_status = job_ex_upd.call_args[0][2]["info"]["status"]
self.assertEqual(edp.JOB_STATUS_FAILED, new_status)
def test_get_plugin(self):
plugin = job_utils.get_plugin(u.create_cluster())
self.assertEqual("vanilla", plugin.name)
@mock.patch('sahara.conductor.API.job_get')
def test_job_type_supported(self, job_get):
job, job_exec = u.create_job_exec(edp.JOB_TYPE_PIG)
job_get.return_value = job
self.assertIsNotNone(job_manager._get_job_engine(u.create_cluster(),
job_exec))
job.type = "unsupported_type"
self.assertIsNone(job_manager._get_job_engine(u.create_cluster(),
job_exec))
@mock.patch('sahara.conductor.API.job_get')
@mock.patch('sahara.conductor.API.job_execution_get')
@mock.patch('sahara.conductor.API.cluster_get')
def test_run_job_unsupported_type(self,
cluster_get, job_exec_get, job_get):
job, job_exec = u.create_job_exec("unsupported_type")
job_exec_get.return_value = job_exec
job_get.return_value = job
cluster = u.create_cluster()
cluster.status = "Active"
cluster_get.return_value = cluster
with testtools.ExpectedException(ex.EDPError):
job_manager._run_job(job_exec.id)
@mock.patch('sahara.conductor.API.data_source_get')
def test_get_data_sources(self, ds):
def _conductor_data_source_get(ctx, id):
return "obj_" + id
job, job_exec = u.create_job_exec(edp.JOB_TYPE_PIG)
job_exec.input_id = 's1'
job_exec.output_id = 's2'
ds.side_effect = _conductor_data_source_get
input_source, output_source = (
job_utils.get_data_sources(job_exec, job))
self.assertEqual('obj_s1', input_source)
self.assertEqual('obj_s2', output_source)
def test_get_data_sources_java(self):
configs = {sw.HADOOP_SWIFT_USERNAME: 'admin',
sw.HADOOP_SWIFT_PASSWORD: 'admin1'}
configs = {
'configs': configs,
'args': ['swift://ex/i',
'output_path']
}
job, job_exec = u.create_job_exec(edp.JOB_TYPE_JAVA, configs)
input_source, output_source = (
job_utils.get_data_sources(job_exec, job))
self.assertEqual(None, input_source)
self.assertEqual(None, output_source)
|
citrix-openstack-build/sahara
|
sahara/tests/unit/service/edp/test_job_manager.py
|
Python
|
apache-2.0
| 20,947 | 0 |
def __load():
import imp, os, sys
ext = 'pygame/font.so'
for path in sys.path:
if not path.endswith('lib-dynload'):
continue
ext_path = os.path.join(path, ext)
if os.path.exists(ext_path):
mod = imp.load_dynamic(__name__, ext_path)
break
else:
raise ImportError(repr(ext) + " not found")
__load()
del __load
|
mokuki082/EggDrop
|
code/build/bdist.macosx-10.6-intel/python3.4-standalone/app/temp/pygame/font.py
|
Python
|
gpl-3.0
| 393 | 0.005089 |
import re
source = [
('assert', 0x00, False, 'vreg'),
('raise', 0x05, False, 'vreg'),
('constant', 0x10, True, 'constant'),
('list', 0x20, True, 'vreg*'),
('move', 0x30, False, 'vreg vreg'),
('call', 0x40, True, 'vreg vreg*'),
('not', 0x41, True, 'vreg'),
('contains', 0x42, True, 'vreg vreg'),
('callv', 0x45, True, 'vreg vreg vreg*'),
('isnull', 0x48, True, 'vreg'),
('return', 0x50, False, 'vreg'),
('yield', 0x51, False, 'vreg'),
('jump', 0x60, False, 'block'),
('cond', 0x70, False, 'vreg block block'),
('func', 0x80, True, 'function'),
('iter', 0xC0, True, 'vreg'),
#('next', 0xC1, True, 'vreg'),
#('iterstop', 0xC2, False, 'block'),
('next', 0xC3, True, 'vreg block'),
('getattr', 0xD0, True, 'vreg string'),
('setattr', 0xD1, True, 'vreg string vreg'),
('getitem', 0xD2, True, 'vreg vreg'),
('setitem', 0xD3, True, 'vreg vreg vreg'),
('getloc', 0xE0, True, 'index'),
('setloc', 0xE1, True, 'index vreg'),
('getupv', 0xE2, True, 'index index'),
('setupv', 0xE3, True, 'index index vreg'),
('getglob', 0xF0, True, 'string'),
('setglob', 0xF1, True, 'string vreg'),
('loglob', 0xFF, False, 'vreg'),
]
enc = {}
dec = {}
names = {}
for opname, opcode, has_result, form in source:
assert opcode not in dec, opcode
pattern = re.split(r"\s+", form.rstrip('*'))
if form.endswith('*'):
variadic = pattern.pop()
else:
variadic = None
enc[opname] = opcode, has_result, pattern, variadic
dec[opcode] = opname, has_result, pattern, variadic
names[opcode] = opname
|
cheery/lever
|
runtime/evaluator/optable.py
|
Python
|
mit
| 1,719 | 0.001163 |
from __future__ import (absolute_import, division, print_function)
import unittest
from testhelpers import WorkspaceCreationHelper
class SpectrumInfoTest(unittest.TestCase):
_ws = None
def setUp(self):
if self.__class__._ws is None:
self.__class__._ws = WorkspaceCreationHelper.create2DWorkspaceWithFullInstrument(2, 1, False) # no monitors
self.__class__._ws.getSpectrum(0).clearDetectorIDs()
def test_hasDetectors(self):
info = self._ws.spectrumInfo()
self.assertEquals(info.hasDetectors(0), False)
self.assertEquals(info.hasDetectors(1), True)
def test_isMasked(self):
info = self._ws.spectrumInfo()
self.assertEquals(info.isMasked(1), False)
if __name__ == '__main__':
unittest.main()
|
ScreamingUdder/mantid
|
Framework/PythonInterface/test/python/mantid/api/SpectrumInfoTest.py
|
Python
|
gpl-3.0
| 788 | 0.005076 |
from django.utils.translation import gettext_lazy as _
from rest_framework import serializers
from ..models import MyUser, Profile
from ..utils import perform_reputation_check
class CreateUserSerializer(serializers.ModelSerializer):
password = serializers.CharField(
style={'input_type': 'password'}
)
class Meta:
model = MyUser
fields = ('email', 'password', 'first_name', 'last_name')
extra_kwargs = {'password': {'write_only': True}}
def create(self, validated_data):
user = MyUser.objects.create_user(
email=validated_data['email'],
first_name=validated_data['first_name'],
last_name=validated_data['last_name'],
password=validated_data['password']
)
return user
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = MyUser
fields = [
'id',
'email',
'first_name',
'last_name',
]
extra_kwargs = {'id': {'read_only': True}, 'email': {'read_only': True}}
def create(self, validated_data):
user = MyUser.objects.create_user(
email=validated_data['email'],
first_name=validated_data['first_name'],
last_name=validated_data['last_name'],
password=validated_data['password']
)
return user
def update(self, instance, validated_data):
instance.first_name = validated_data.get('first_name', instance.first_name)
instance.last_name = validated_data.get('last_name', instance.last_name)
instance.save()
return instance
class FollowSerializer(serializers.ModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='profiles:profile-detail')
full_name = serializers.SerializerMethodField()
class Meta:
model = Profile
fields = ['user_id', 'full_name', 'url']
def get_full_name(self, obj):
return obj.user.get_full_name()
class CreateProfileSerializer(serializers.ModelSerializer):
user = CreateUserSerializer()
class Meta:
model = Profile
fields = [
'user',
'follows'
]
def create(self, validated_data):
new_user = CreateUserSerializer().create(validated_data.pop('user'))
new_profile = Profile.objects.get(user_id=new_user.id)
new_profile.save()
return new_profile
class ProfileSerializer(serializers.ModelSerializer):
user = UserSerializer(read_only=True)
reputation = serializers.CharField(max_length=8, read_only=True)
follows = FollowSerializer(read_only=True, many=True)
url = serializers.HyperlinkedIdentityField(view_name='profiles:profile-detail')
questions_count = serializers.SerializerMethodField()
answers_count = serializers.SerializerMethodField()
followed_by = serializers.SerializerMethodField()
class Meta:
model = Profile
fields = [
'url',
'user',
'reputation',
'follows',
'questions_count',
'answers_count',
'followed_by'
]
def get_questions_count(self, obj):
return obj.user.questions.count()
def get_answers_count(self, obj):
return obj.user.answers.count()
def get_followed_by(self, obj):
return obj.profile_set.count()
class UpdateProfileSerializer(serializers.ModelSerializer):
user = UserSerializer()
class Meta:
model = Profile
fields = [
'user',
'reputation',
'follows',
]
def validate_follows(self, value):
if self.instance in value:
raise serializers.ValidationError(_('User cannot follow self'))
return value
def validate_reputation(self, value):
if value != perform_reputation_check(self.instance.user):
raise serializers.ValidationError(_('Selected reputation is not valid for this user'))
return value
def update(self, instance, validated_data):
UserSerializer().update(instance.user, validated_data.pop('user'))
instance.reputation = validated_data.get('reputation', instance.reputation)
if validated_data['follows']:
instance.follows.add(*validated_data['follows'])
instance.save()
return instance
class AuthorSerializer(serializers.ModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='profiles:profile-detail')
full_name = serializers.SerializerMethodField()
class Meta:
model = MyUser
fields = [
'id',
'email',
'url',
'full_name',
]
def get_full_name(self, obj):
return obj.get_full_name()
|
TheRedLady/codebook
|
codebook/profiles/restapi/serializers.py
|
Python
|
gpl-3.0
| 4,811 | 0.001663 |
# -*- coding: utf-8 -*-
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth.views import login
import notifications.urls
import notifications.tests.views
urlpatterns = [
url(r'^login/$', login, name='login'), # needed for Django 1.6 tests
url(r'^admin/', include(admin.site.urls)),
url(r'^test_make/', notifications.tests.views.make_notification),
url(r'^test/', notifications.tests.views.live_tester),
url(r'^', include(notifications.urls, namespace='notifications')),
]
|
Evidlo/django-notifications
|
notifications/tests/urls.py
|
Python
|
bsd-3-clause
| 544 | 0 |
import random
filler_text = '''Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
Dolor laborum. et magna dolor nostrud Lorem qui deserunt do Excepteur laboris occaecat ut minim nisi dolore reprehenderit exercitation in eu quis aute aliquip laboris dolore deserunt cupidatat consequat. voluptate officia non irure commodo Duis amet, in mollit occaecat tempor ut sunt enim Excepteur sunt dolore minim do dolor fugiat voluptate Excepteur in adipisicing do undefined culpa cupidatat mollit proident, undefined commodo consectetur.
Labore deserunt aute incididunt amet, Excepteur labore qui velit ad Duis qui sint consequat. Ut dolore laboris mollit cillum reprehenderit aute non minim ad dolore Ut Excepteur reprehenderit sint aliqua. Duis reprehenderit culpa et Ut ipsum exercitation exercitation nulla ipsum ea ex in ullamco ea fugiat officia occaecat nulla nostrud cupidatat ea irure ad in ullamco culpa enim ullamco in enim in sed ad Ut velit dolor dolor eu.
Fugiat reprehenderit eiusmod adipisicing ad eiusmod sint aliquip id nostrud Duis aliquip labore ipsum mollit Ut Duis aute sit sed Ut dolor exercitation consequat. qui Duis velit aliquip nulla culpa non consequat. qui elit, amet, esse velit ea ad veniam, Excepteur aliqua. ut deserunt Ut aliquip deserunt elit, occaecat ullamco dolore aliquip voluptate laborum. elit, sit in dolore est.
Ullamco ut velit non culpa veniam, in consequat. nostrud sint amet, nulla in labore cillum non sed Ut veniam, dolor occaecat in do occaecat voluptate nostrud deserunt nisi labore in deserunt voluptate consectetur do quis exercitation nisi laboris eiusmod anim Ut reprehenderit occaecat magna nisi occaecat aliquip sed irure exercitation exercitation undefined adipisicing sint cupidatat eu labore sunt amet, officia Excepteur mollit sint magna sunt.
Aliqua. consectetur elit, Ut est officia veniam, nulla sint in ipsum dolore do aute fugiat exercitation aliquip commodo consequat. consectetur et do officia reprehenderit aute ut laboris quis culpa eu incididunt tempor reprehenderit ipsum aute veniam, aliqua. culpa Duis nostrud aute aliqua. id amet, sit aute id sunt laborum. velit nulla minim adipisicing tempor Duis est sint exercitation quis consequat. ut aliqua. eu reprehenderit.
Culpa Duis incididunt est elit, ea dolore tempor in occaecat non in pariatur. commodo sint commodo ut ut sit commodo reprehenderit ex eu laborum. magna non aliqua. eiusmod Excepteur enim deserunt velit veniam, et dolore eu cupidatat deserunt do irure eu sunt irure voluptate est officia undefined in occaecat labore dolor est mollit dolor incididunt in Excepteur.
Commodo nostrud pariatur. sit Excepteur est sunt culpa in dolore ex tempor cillum ut sint laboris Excepteur ut adipisicing laborum. enim pariatur. reprehenderit ut consectetur ad in dolore Excepteur velit ipsum adipisicing ex deserunt aliqua. cupidatat aliquip nisi sint consectetur laboris velit aliquip ex ullamco dolore in pariatur. non dolor ad velit nostrud veniam, laborum. esse laborum. dolor fugiat aute consequat. velit cillum qui in dolor.
Dolore ex nulla incididunt aute ut aute pariatur. est ipsum sunt occaecat quis ea dolor culpa aute esse id ex incididunt ad consectetur do ex mollit voluptate dolor enim dolor reprehenderit ut anim Duis dolor pariatur. aute velit consequat. in consequat. aliqua. aliquip est fugiat voluptate ad sit esse in adipisicing elit, dolor magna in dolor ullamco occaecat eu aliquip reprehenderit fugiat.
Ex qui Excepteur eiusmod Excepteur officia cillum id aliqua. ad in in minim quis ut culpa in irure nisi pariatur. magna nostrud aliquip eiusmod non occaecat dolor do quis non et ea quis dolor ut incididunt ea proident, Excepteur in tempor nisi sunt eu mollit consectetur mollit nostrud dolor in tempor ut nulla exercitation et dolore reprehenderit consectetur irure tempor sunt tempor elit, eiusmod in sit.
Sit commodo minim id eiusmod in Duis laboris Excepteur ut ea minim magna Duis deserunt velit veniam, proident, aliqua. dolore anim commodo ullamco do labore non ullamco non enim ipsum consectetur irure sint Lorem deserunt dolor commodo cillum velit dolore Excepteur laborum. in tempor anim mollit magna in quis consequat. non ex Duis undefined eiusmod pariatur. dolore dolor dolore pariatur. incididunt eiusmod Excepteur non id Duis et adipisicing in ea eu.
Sit aute nostrud ex laboris ad aliqua. est dolor commodo pariatur. anim Duis velit veniam, incididunt ullamco ad non dolore nisi esse pariatur. Excepteur ut mollit in aute sit anim tempor aliqua. cupidatat dolore ea cupidatat est consectetur Lorem nulla dolor velit ea commodo do officia incididunt nostrud in nostrud pariatur. occaecat anim.
Quis adipisicing fugiat sit sit tempor sit irure elit, consequat. non est est Ut non aute Duis magna eu labore ullamco et fugiat in veniam, dolor dolor sed tempor cupidatat proident, in ut eiusmod ad quis labore ad ipsum officia amet, non dolore nisi aute proident, deserunt Duis nulla Duis proident, sed est irure Ut minim dolor magna proident, magna ullamco commodo.
Dolor mollit ullamco aliqua. eu labore aliqua. sed officia enim qui nostrud eiusmod Excepteur aliquip quis officia in aliquip nostrud tempor proident, ea sed consequat. dolor aliqua. aliqua. in dolor in do ut eu mollit commodo nostrud amet, id Duis qui dolor velit sit cillum sit officia dolor cillum sunt in dolore consectetur tempor irure in sit dolore amet, fugiat nisi nulla sint exercitation cillum officia.
Sit velit ipsum commodo laboris cillum dolore aliquip sint laboris laborum. fugiat anim ipsum cupidatat est qui deserunt irure sit aliqua. veniam, id nisi sunt nisi occaecat mollit eiusmod et sint exercitation id Duis non sit irure cupidatat qui aliqua. do id tempor id in quis elit, fugiat dolore proident, irure do Excepteur qui non irure proident, nulla aliquip minim velit velit dolor voluptate adipisicing incididunt magna incididunt ad ad.
Laborum. in ullamco non consequat. Excepteur pariatur. fugiat eiusmod esse consectetur ea ex velit esse est voluptate tempor in dolor voluptate commodo magna consequat. nisi tempor et sit commodo ut aute cupidatat incididunt ut incididunt elit, ut ad veniam, mollit aute adipisicing consectetur ex id Excepteur ullamco esse laboris sit ad anim in amet, sunt.
Ut incididunt qui reprehenderit dolor Ut mollit tempor pariatur. tempor non commodo laboris Excepteur quis adipisicing aliqua. dolor incididunt Excepteur qui est esse sunt quis ex culpa ad consequat. voluptate sint cupidatat eiusmod minim enim sed aute Excepteur dolore incididunt cillum culpa cillum tempor pariatur. ipsum laborum. reprehenderit aliqua. Ut amet, ipsum amet, sunt veniam, sint Ut sint.
Ut in cillum consectetur adipisicing dolore Ut magna exercitation mollit pariatur. minim consequat. et in veniam, nulla enim ullamco sint Excepteur cupidatat consequat. ut sint fugiat tempor Duis eiusmod Excepteur officia qui anim eu proident, aute qui quis magna pariatur. tempor veniam, non exercitation irure dolor non proident, nisi qui pariatur. enim sint cupidatat fugiat elit, magna culpa in Duis exercitation deserunt et voluptate nostrud anim enim nisi proident, amet.
Est id ad elit, minim nulla velit incididunt ipsum deserunt sunt pariatur. sunt mollit voluptate laborum. mollit laboris voluptate dolore culpa ipsum labore in undefined voluptate cupidatat amet, sed in aliquip dolor tempor dolore in Ut dolor amet, eiusmod cupidatat in aliqua. ullamco incididunt aute Excepteur ad ullamco sit amet, mollit ex officia Duis.
Ex irure labore dolor aute reprehenderit ullamco elit, sit consectetur aliqua. non consectetur veniam, in dolor ipsum exercitation Lorem sed pariatur. laborum. consequat. culpa aliqua. Ut Duis laborum. Ut proident, aliquip adipisicing consectetur culpa magna do irure aute tempor quis incididunt cupidatat consequat. aliqua. dolor amet, dolor id nostrud est sit Excepteur mollit do mollit proident, veniam.
Ut non do deserunt irure incididunt veniam, veniam, occaecat nisi eiusmod ipsum nisi voluptate in officia undefined ex nisi velit nulla cillum est esse tempor consequat. do Ut qui cupidatat commodo fugiat culpa in ea cillum incididunt dolore pariatur. cupidatat Excepteur magna Ut proident, dolor Ut dolore id nostrud nulla mollit dolor veniam, consectetur tempor deserunt enim.
Sit in ut voluptate dolor deserunt occaecat enim aliqua. cupidatat anim enim sed labore qui culpa cillum cupidatat dolor esse dolor dolore commodo exercitation adipisicing qui consectetur adipisicing commodo fugiat nostrud proident, incididunt officia nisi eiusmod enim veniam, elit, occaecat in non in Ut laborum. ad ut reprehenderit ut commodo ex ex minim ad exercitation sint ex ullamco consequat. commodo culpa occaecat commodo nisi nulla labore Duis.
Amet, in ipsum occaecat exercitation qui laborum. undefined amet, officia reprehenderit laboris reprehenderit nulla ad velit dolor sint ex consequat. nulla enim occaecat in deserunt cupidatat cupidatat esse consectetur exercitation exercitation labore nisi consectetur exercitation nostrud nulla aliqua. esse laboris Lorem est mollit magna dolore sunt qui Lorem fugiat mollit consequat. ea reprehenderit reprehenderit et sed voluptate incididunt ullamco nisi consectetur non eiusmod qui nulla cupidatat.
Tempor qui voluptate mollit sed reprehenderit ut ipsum aliquip commodo proident, aliqua. exercitation laborum. occaecat ex deserunt do nostrud dolor Excepteur sit dolor in minim cupidatat nulla est culpa occaecat cillum pariatur. irure cupidatat sed est sit aute fugiat sint ipsum proident, aliquip ut aute sint aliquip ad nostrud nisi consequat. esse commodo laborum. Ut mollit ullamco eu undefined pariatur. velit dolore.
Proident, sit reprehenderit exercitation eu dolore Excepteur voluptate ea in dolor esse occaecat nulla laborum. sed aliquip nulla Duis adipisicing sunt in laborum. nostrud anim do nisi Duis undefined non eu dolore magna irure elit, eiusmod consequat. mollit aliquip dolor id minim id in aliquip ad deserunt dolore id est culpa deserunt culpa officia Excepteur laborum. esse.
In exercitation incididunt laborum. ea laborum. cupidatat ut in dolore nisi est laboris irure consequat. velit reprehenderit in dolor ipsum eu consequat. consequat. et laboris mollit dolore officia Excepteur irure eu cupidatat fugiat non ut Duis pariatur. enim eiusmod exercitation veniam, voluptate cillum elit, undefined sint Lorem Duis tempor deserunt quis consectetur in ut cupidatat consequat. ea ex.
Id adipisicing id tempor consectetur proident, quis laborum. deserunt ut et id veniam, proident, sunt ipsum non nostrud amet, qui ut laboris amet, exercitation cillum sed reprehenderit dolore labore cupidatat pariatur. magna dolore labore dolor id qui proident, enim veniam, aliquip laborum. ut fugiat dolore dolore sint cillum sit ut commodo in reprehenderit consectetur cillum Excepteur ex dolor nostrud ex minim.
Sed Ut ut esse dolor velit nostrud elit, cupidatat adipisicing cupidatat dolor labore dolore adipisicing eu consectetur incididunt Ut ut do aliqua. ex in laborum. eu cupidatat dolore adipisicing aliqua. qui dolore irure labore ex veniam, minim aliqua. cupidatat culpa est officia irure ea exercitation enim in ipsum culpa id adipisicing dolore ipsum aute exercitation ipsum irure anim veniam, dolor id veniam, ut ut sit minim.
Labore occaecat ut esse dolore eiusmod pariatur. tempor Lorem ullamco incididunt sint nisi eu cillum incididunt sunt qui exercitation Duis cupidatat qui proident, ad sit ad velit Lorem deserunt dolore ex ut enim aute nostrud dolore dolor do ea consectetur irure minim culpa ut dolor fugiat occaecat ut nostrud proident, sunt deserunt in ullamco consectetur minim sed commodo deserunt elit, Ut deserunt.
Duis ut dolor minim Ut adipisicing commodo pariatur. proident, dolor ut cupidatat in cupidatat sed et consequat. et laborum. minim enim fugiat occaecat tempor minim id veniam, sed ex ut aliquip ipsum dolore aliqua. exercitation ut laboris proident, ut ea ullamco Ut adipisicing tempor ullamco dolore sint in consequat. ex nisi in.
Anim Duis deserunt deserunt anim exercitation veniam, in id irure ut do ad consectetur tempor reprehenderit in eu adipisicing cupidatat labore laborum. culpa irure est anim exercitation dolor occaecat ut non adipisicing culpa ullamco tempor dolor esse exercitation magna et in dolore do voluptate reprehenderit tempor Excepteur magna dolor non cillum eiusmod dolore anim aute.
Ut sint reprehenderit culpa ut in tempor ea nulla veniam, in elit, irure consectetur dolor aliqua. ad tempor irure ipsum Ut nisi ullamco aliqua. veniam, officia qui occaecat dolore irure Ut reprehenderit mollit eu dolore adipisicing laboris deserunt dolore veniam, amet, velit dolore elit, anim sunt est ex laboris mollit culpa consequat. sint mollit consequat. in eiusmod incididunt ad exercitation esse.
Velit labore in aliquip id ut minim id ex consequat. labore officia Ut nulla commodo elit, sint Excepteur sed dolor eiusmod in reprehenderit aute occaecat nostrud proident, adipisicing enim irure velit dolor mollit officia undefined occaecat nisi nisi pariatur. nostrud labore amet, eiusmod aute id eu commodo dolor dolor pariatur. do reprehenderit aliquip exercitation cupidatat amet, proident, dolor aute esse voluptate.
Dolore irure ut officia et esse sed reprehenderit cupidatat consectetur cupidatat veniam, officia consequat. Excepteur ex ullamco qui sint non voluptate eiusmod irure in laborum. cupidatat dolore laboris adipisicing reprehenderit elit, fugiat in cupidatat quis consequat. ut quis nulla elit, in consectetur labore sed fugiat cillum enim Ut quis aute tempor cillum laboris ex sunt.
Consequat. do sunt eu enim elit, enim veniam, Duis amet, qui amet, officia sint labore dolore dolore eiusmod est qui reprehenderit consequat. aute Excepteur cillum id velit dolore mollit cupidatat minim cupidatat voluptate irure sunt eiusmod Ut ex adipisicing in consequat. enim magna aliqua. mollit laboris ad ex irure cillum esse consequat. non magna aliquip id amet, in esse adipisicing voluptate Duis Duis in.
Dolor consequat. nulla labore exercitation in commodo dolore fugiat anim cillum anim ea sint dolore deserunt ut occaecat aliqua. adipisicing ipsum culpa laborum. ullamco id quis deserunt ut pariatur. eu quis nulla enim commodo in non in quis cillum ullamco nulla ut officia esse ut id elit, dolore Duis enim ea sunt non reprehenderit ipsum dolore incididunt Duis id enim laboris amet.
Est ex amet, consectetur in nulla dolore nisi anim cupidatat sunt officia quis cupidatat amet, veniam, deserunt nisi magna ut aute amet, tempor enim est nulla adipisicing incididunt voluptate veniam, dolor mollit occaecat cupidatat eiusmod occaecat id nulla Lorem reprehenderit reprehenderit aliqua. fugiat labore Excepteur quis pariatur. quis adipisicing sint est dolore Lorem sed dolor est est enim aute irure laboris consectetur.
Sint velit sed incididunt amet, id culpa laboris cupidatat adipisicing culpa dolore deserunt in esse mollit anim sunt deserunt do nisi Ut qui anim ex quis tempor anim occaecat id anim tempor adipisicing Ut non tempor voluptate qui sed laboris id eiusmod aliquip occaecat minim mollit est nostrud aliquip ut nisi sunt sunt labore deserunt nulla incididunt Ut non voluptate cillum quis cupidatat commodo elit, magna.
Reprehenderit aliqua. et undefined cupidatat incididunt ad elit, labore commodo nostrud ullamco aute elit, Ut minim in eiusmod ullamco ullamco quis tempor reprehenderit consequat. magna aliquip ut proident, dolore eiusmod dolor magna commodo adipisicing ex tempor velit amet, reprehenderit nostrud non fugiat Excepteur ea id sunt enim Ut irure ut ipsum.
Consequat. pariatur. eu nulla laborum. aute enim veniam, sed qui ullamco id Ut labore officia non ipsum nostrud dolor dolor ex et consequat. enim id voluptate sit pariatur. aliqua. commodo eiusmod eu esse dolore reprehenderit elit, do ea ea cillum ut occaecat cupidatat enim culpa Excepteur in commodo Lorem et sed exercitation sit qui tempor irure tempor ut ea ullamco esse.
Cillum elit, est sunt Excepteur eu pariatur. dolor sunt minim qui ullamco consequat. ex do magna dolor occaecat enim id adipisicing magna consectetur ea sunt et dolore est non culpa id in sed elit, nulla fugiat ea cupidatat proident, esse minim in proident, nulla proident, amet, ex esse cillum Ut ex labore.
Sunt quis irure magna dolor ut exercitation pariatur. consequat. anim veniam, nulla nulla laborum. fugiat anim deserunt mollit deserunt consequat. pariatur. incididunt sed Lorem veniam, sit reprehenderit ullamco ex tempor cillum do Excepteur ad dolore sint ut do dolore quis undefined non dolore labore quis cupidatat labore et mollit undefined esse anim sed cillum ipsum sunt magna ut.
Commodo fugiat laborum. ea occaecat undefined dolor Ut tempor commodo eiusmod labore sed in in culpa Excepteur Duis velit labore dolor veniam, voluptate Excepteur anim amet, exercitation cillum amet, qui sed nostrud enim in dolore consequat. id proident, ut sit amet, sunt proident, commodo dolore dolor esse officia irure dolore aliquip in dolor aute magna deserunt quis aliqua.
Nulla id reprehenderit sint qui eu magna voluptate in commodo exercitation dolor adipisicing laborum. esse sed ullamco Duis sed aute velit culpa ea ea consequat. reprehenderit officia in sint in dolore veniam, proident, ex amet, sint cillum ut ad est est laboris est ullamco elit, enim nostrud esse culpa eu tempor dolore Duis.
Officia commodo reprehenderit incididunt non ipsum amet, deserunt in et officia ipsum tempor in sed Duis ullamco cupidatat Lorem ullamco dolor aliqua. reprehenderit elit, non do est ipsum ad eu quis ut laborum. commodo nulla et fugiat Excepteur consequat. sunt laboris adipisicing commodo et adipisicing officia reprehenderit nisi in do aute reprehenderit deserunt eu laboris occaecat eu et dolore nisi.
Ex sint ut enim velit adipisicing deserunt qui dolor aliqua. consectetur veniam, cupidatat consequat. aliquip officia irure culpa Excepteur ex qui laborum. et minim reprehenderit Ut ullamco sit aliquip qui in sit eiusmod proident, Ut ut ut adipisicing voluptate eiusmod reprehenderit do id amet, fugiat est labore ea ad Lorem eiusmod.
Aliquip dolore sunt occaecat non ut velit pariatur. minim consequat. anim laborum. elit, enim pariatur. culpa magna cupidatat aliqua. sint nulla ut laboris dolor ea velit ullamco labore eu dolore tempor ut id ut minim veniam, amet, nulla exercitation consectetur in et undefined occaecat ullamco aute laborum. aliqua. ea incididunt in velit elit, dolor incididunt irure ipsum in est ut deserunt sit ut tempor.
Voluptate consequat. consequat. est ut culpa velit incididunt sed quis eu qui occaecat dolor ad quis qui Duis non non ullamco dolor cillum dolor labore Ut qui minim occaecat irure officia esse cupidatat eu irure laboris quis ea tempor laboris consectetur officia officia incididunt dolore sunt sed qui in labore velit sed eiusmod esse.
Minim ea amet, in commodo fugiat laborum. cillum ad eiusmod esse qui do aliquip id esse id dolore laborum. anim aliqua. ea qui aute ea tempor est eu nisi nulla sunt incididunt Ut id minim ipsum incididunt labore sit exercitation laborum. irure velit Duis veniam, nisi officia ex in ex voluptate ea ex elit, mollit sunt cillum cillum.
Eiusmod quis dolore reprehenderit qui aliqua. fugiat dolor tempor elit, pariatur. elit, voluptate do consequat. consectetur aute laborum. sit incididunt fugiat sit veniam, tempor ullamco quis Ut commodo dolore cillum est anim occaecat nulla deserunt Ut ut ut enim dolor dolor reprehenderit Duis nostrud amet, Excepteur dolor dolore ut eiusmod sed ullamco quis laboris aute et aliquip ipsum ea non nostrud veniam.'''
filler_lines = filler_text.split('\n')
def random_text(num_paragraphs=3):
return '\n\n'.join(random.choice(filler_lines) for i in range(num_paragraphs))
|
davidxmoody/diary
|
tests/filler_text.py
|
Python
|
mit
| 20,162 | 0.002579 |
from datetime import datetime, timedelta
from .interchange import WaypointType, ActivityStatisticUnit, ActivityType, LapIntensity, LapTriggerMethod
from .devices import DeviceIdentifier, DeviceIdentifierType
import struct
import sys
import pytz
class FITFileType:
Activity = 4 # The only one we care about now.
class FITManufacturer:
DEVELOPMENT = 255 # $1500/year for one of these numbers.
class FITEvent:
Timer = 0
Lap = 9
Activity = 26
class FITEventType:
Start = 0
Stop = 1
# It's not a coincidence that these enums match the ones in interchange perfectly
class FITLapIntensity:
Active = 0
Rest = 1
Warmup = 2
Cooldown = 3
class FITLapTriggerMethod:
Manual = 0
Time = 1
Distance = 2
PositionStart = 3
PositionLap = 4
PositionWaypoint = 5
PositionMarked = 6
SessionEnd = 7
FitnessEquipment = 8
class FITActivityType:
GENERIC = 0
RUNNING = 1
CYCLING = 2
TRANSITION = 3
FITNESS_EQUIPMENT = 4
SWIMMING = 5
WALKING = 6
ALL = 254
class FITMessageDataType:
def __init__(self, name, typeField, size, packFormat, invalid, formatter=None):
self.Name = name
self.TypeField = typeField
self.Size = size
self.PackFormat = packFormat
self.Formatter = formatter
self.InvalidValue = invalid
class FITMessageTemplate:
def __init__(self, name, number, *args, fields=None):
self.Name = name
self.Number = number
self.Fields = {}
self.FieldNameSet = set()
self.FieldNameList = []
if len(args) == 1 and type(args[0]) is dict:
fields = args[0]
self.Fields = fields
self.FieldNameSet = set(fields.keys()) # It strikes me that keys might already be a set?
else:
# Supply fields in order NUM, NAME, TYPE
for x in range(0, int(len(args)/3)):
n = x * 3
self.Fields[args[n+1]] = {"Name": args[n+1], "Number": args[n], "Type": args[n+2]}
self.FieldNameSet.add(args[n+1])
sortedFields = list(self.Fields.values())
sortedFields.sort(key = lambda x: x["Number"])
self.FieldNameList = [x["Name"] for x in sortedFields] # *ordered*
class FITMessageGenerator:
def __init__(self):
self._types = {}
self._messageTemplates = {}
self._definitions = {}
self._result = []
# All our convience functions for preparing the field types to be packed.
def stringFormatter(input):
raise Exception("Not implemented")
def dateTimeFormatter(input):
# UINT32
# Seconds since UTC 00:00 Dec 31 1989. If <0x10000000 = system time
if input is None:
return struct.pack("<I", 0xFFFFFFFF)
delta = round((input - datetime(hour=0, minute=0, month=12, day=31, year=1989)).total_seconds())
return struct.pack("<I", delta)
def msecFormatter(input):
# UINT32
if input is None:
return struct.pack("<I", 0xFFFFFFFF)
return struct.pack("<I", round((input if type(input) is not timedelta else input.total_seconds()) * 1000))
def mmPerSecFormatter(input):
# UINT16
if input is None:
return struct.pack("<H", 0xFFFF)
return struct.pack("<H", round(input * 1000))
def cmFormatter(input):
# UINT32
if input is None:
return struct.pack("<I", 0xFFFFFFFF)
return struct.pack("<I", round(input * 100))
def altitudeFormatter(input):
# UINT16
if input is None:
return struct.pack("<H", 0xFFFF)
return struct.pack("<H", round((input + 500) * 5)) # Increments of 1/5, offset from -500m :S
def semicirclesFormatter(input):
# SINT32
if input is None:
return struct.pack("<i", 0x7FFFFFFF) # FIT-defined invalid value
return struct.pack("<i", round(input * (2 ** 31 / 180)))
def versionFormatter(input):
# UINT16
if input is None:
return struct.pack("<H", 0xFFFF)
return struct.pack("<H", round(input * 100))
def defType(name, *args, **kwargs):
aliases = [name] if type(name) is not list else name
# Cheap cheap cheap
for alias in aliases:
self._types[alias] = FITMessageDataType(alias, *args, **kwargs)
defType(["enum", "file"], 0x00, 1, "B", 0xFF)
defType("sint8", 0x01, 1, "b", 0x7F)
defType("uint8", 0x02, 1, "B", 0xFF)
defType("sint16", 0x83, 2, "h", 0x7FFF)
defType(["uint16", "manufacturer"], 0x84, 2, "H", 0xFFFF)
defType("sint32", 0x85, 4, "i", 0x7FFFFFFF)
defType("uint32", 0x86, 4, "I", 0xFFFFFFFF)
defType("string", 0x07, None, None, 0x0, formatter=stringFormatter)
defType("float32", 0x88, 4, "f", 0xFFFFFFFF)
defType("float64", 0x89, 8, "d", 0xFFFFFFFFFFFFFFFF)
defType("uint8z", 0x0A, 1, "B", 0x00)
defType("uint16z", 0x0B, 2, "H", 0x00)
defType("uint32z", 0x0C, 4, "I", 0x00)
defType("byte", 0x0D, 1, "B", 0xFF) # This isn't totally correct, docs say "an array of bytes"
# Not strictly FIT fields, but convenient.
defType("date_time", 0x86, 4, None, 0xFFFFFFFF, formatter=dateTimeFormatter)
defType("duration_msec", 0x86, 4, None, 0xFFFFFFFF, formatter=msecFormatter)
defType("distance_cm", 0x86, 4, None, 0xFFFFFFFF, formatter=cmFormatter)
defType("mmPerSec", 0x84, 2, None, 0xFFFF, formatter=mmPerSecFormatter)
defType("semicircles", 0x85, 4, None, 0x7FFFFFFF, formatter=semicirclesFormatter)
defType("altitude", 0x84, 2, None, 0xFFFF, formatter=altitudeFormatter)
defType("version", 0x84, 2, None, 0xFFFF, formatter=versionFormatter)
def defMsg(name, *args):
self._messageTemplates[name] = FITMessageTemplate(name, *args)
defMsg("file_id", 0,
0, "type", "file",
1, "manufacturer", "manufacturer",
2, "product", "uint16",
3, "serial_number", "uint32z",
4, "time_created", "date_time",
5, "number", "uint16")
defMsg("file_creator", 49,
0, "software_version", "uint16",
1, "hardware_version", "uint8")
defMsg("activity", 34,
253, "timestamp", "date_time",
1, "num_sessions", "uint16",
2, "type", "enum",
3, "event", "enum", # Required
4, "event_type", "enum",
5, "local_timestamp", "date_time")
defMsg("session", 18,
253, "timestamp", "date_time",
2, "start_time", "date_time", # Vs timestamp, which was whenever the record was "written"/end of the session
7, "total_elapsed_time", "duration_msec", # Including pauses
8, "total_timer_time", "duration_msec", # Excluding pauses
59, "total_moving_time", "duration_msec",
5, "sport", "enum",
6, "sub_sport", "enum",
0, "event", "enum",
1, "event_type", "enum",
9, "total_distance", "distance_cm",
11,"total_calories", "uint16",
14, "avg_speed", "mmPerSec",
15, "max_speed", "mmPerSec",
16, "avg_heart_rate", "uint8",
17, "max_heart_rate", "uint8",
18, "avg_cadence", "uint8",
19, "max_cadence", "uint8",
20, "avg_power", "uint16",
21, "max_power", "uint16",
22, "total_ascent", "uint16",
23, "total_descent", "uint16",
49, "avg_altitude", "altitude",
50, "max_altitude", "altitude",
71, "min_altitude", "altitude",
57, "avg_temperature", "sint8",
58, "max_temperature", "sint8")
defMsg("lap", 19,
253, "timestamp", "date_time",
0, "event", "enum",
1, "event_type", "enum",
25, "sport", "enum",
23, "intensity", "enum",
24, "lap_trigger", "enum",
2, "start_time", "date_time", # Vs timestamp, which was whenever the record was "written"/end of the session
7, "total_elapsed_time", "duration_msec", # Including pauses
8, "total_timer_time", "duration_msec", # Excluding pauses
52, "total_moving_time", "duration_msec",
9, "total_distance", "distance_cm",
11,"total_calories", "uint16",
13, "avg_speed", "mmPerSec",
14, "max_speed", "mmPerSec",
15, "avg_heart_rate", "uint8",
16, "max_heart_rate", "uint8",
17, "avg_cadence", "uint8", # FIT rolls run and bike cadence into one
18, "max_cadence", "uint8",
19, "avg_power", "uint16",
20, "max_power", "uint16",
21, "total_ascent", "uint16",
22, "total_descent", "uint16",
42, "avg_altitude", "altitude",
43, "max_altitude", "altitude",
62, "min_altitude", "altitude",
50, "avg_temperature", "sint8",
51, "max_temperature", "sint8"
)
defMsg("record", 20,
253, "timestamp", "date_time",
0, "position_lat", "semicircles",
1, "position_long", "semicircles",
2, "altitude", "altitude",
3, "heart_rate", "uint8",
4, "cadence", "uint8",
5, "distance", "distance_cm",
6, "speed", "mmPerSec",
7, "power", "uint16",
13, "temperature", "sint8",
33, "calories", "uint16",
)
defMsg("event", 21,
253, "timestamp", "date_time",
0, "event", "enum",
1, "event_type", "enum")
defMsg("device_info", 23,
253, "timestamp", "date_time",
0, "device_index", "uint8",
1, "device_type", "uint8",
2, "manufacturer", "manufacturer",
3, "serial_number", "uint32z",
4, "product", "uint16",
5, "software_version", "version"
)
def _write(self, contents):
self._result.append(contents)
def GetResult(self):
return b''.join(self._result)
def _defineMessage(self, local_no, global_message, field_names):
assert local_no < 16 and local_no >= 0
if set(field_names) - set(global_message.FieldNameList):
raise ValueError("Attempting to use undefined fields %s" % (set(field_names) - set(global_message.FieldNameList)))
messageHeader = 0b01000000
messageHeader = messageHeader | local_no
local_fields = {}
arch = 0 # Little-endian
global_no = global_message.Number
field_count = len(field_names)
pack_tuple = (messageHeader, 0, arch, global_no, field_count)
for field_name in global_message.FieldNameList:
if field_name in field_names:
field = global_message.Fields[field_name]
field_type = self._types[field["Type"]]
pack_tuple += (field["Number"], field_type.Size, field_type.TypeField)
local_fields[field_name] = field
self._definitions[local_no] = FITMessageTemplate(global_message.Name, local_no, local_fields)
self._write(struct.pack("<BBBHB" + ("BBB" * field_count), *pack_tuple))
return self._definitions[local_no]
def GenerateMessage(self, name, **kwargs):
globalDefn = self._messageTemplates[name]
# Create a subset of the global message's fields
localFieldNamesSet = set()
for fieldName in kwargs:
localFieldNamesSet.add(fieldName)
# I'll look at this later
compressTS = False
# Are these fields covered by an existing local message type?
active_definition = None
for defn_n in self._definitions:
defn = self._definitions[defn_n]
if defn.Name == name:
if defn.FieldNameSet == localFieldNamesSet:
active_definition = defn
# If not, create a new local message type with these fields
if not active_definition:
active_definition_no = len(self._definitions)
active_definition = self._defineMessage(active_definition_no, globalDefn, localFieldNamesSet)
if compressTS and active_definition.Number > 3:
raise Exception("Can't use compressed timestamp when local message number > 3")
messageHeader = 0
if compressTS:
messageHeader = messageHeader | (1 << 7)
tsOffsetVal = -1 # TODO
messageHeader = messageHeader | (active_definition.Number << 4)
else:
messageHeader = messageHeader | active_definition.Number
packResult = [struct.pack("<B", messageHeader)]
for field_name in active_definition.FieldNameList:
field = active_definition.Fields[field_name]
field_type = self._types[field["Type"]]
try:
if field_type.Formatter:
result = field_type.Formatter(kwargs[field_name])
else:
sanitized_value = kwargs[field_name]
if sanitized_value is None:
result = struct.pack("<" + field_type.PackFormat, field_type.InvalidValue)
else:
if field_type.PackFormat in ["B","b", "H", "h", "I", "i"]:
sanitized_value = round(sanitized_value)
try:
result = struct.pack("<" + field_type.PackFormat, sanitized_value)
except struct.error as e: # I guess more specific exception types were too much to ask for.
if "<=" in str(e) or "out of range" in str(e):
result = struct.pack("<" + field_type.PackFormat, field_type.InvalidValue)
else:
raise
except Exception as e:
raise Exception("Failed packing %s=%s - %s" % (field_name, kwargs[field_name], e))
packResult.append(result)
self._write(b''.join(packResult))
class FITIO:
_sportMap = {
ActivityType.Other: 0,
ActivityType.Running: 1,
ActivityType.Cycling: 2,
ActivityType.MountainBiking: 2,
ActivityType.Elliptical: 4,
ActivityType.Swimming: 5,
}
_subSportMap = {
# ActivityType.MountainBiking: 8 there's an issue with cadence upload and this type with GC, so...
}
def _calculateCRC(bytestring, crc=0):
crc_table = [0x0000, 0xCC01, 0xD801, 0x1400, 0xF001, 0x3C00, 0x2800, 0xE401, 0xA001, 0x6C00, 0x7800, 0xB401, 0x5000, 0x9C01, 0x8801, 0x4400]
for byte in bytestring:
tmp = crc_table[crc & 0xF]
crc = (crc >> 4) & 0x0FFF
crc = crc ^ tmp ^ crc_table[byte & 0xF]
tmp = crc_table[crc & 0xF]
crc = (crc >> 4) & 0x0FFF
crc = crc ^ tmp ^ crc_table[(byte >> 4) & 0xF]
return crc
def _generateHeader(dataLength):
# We need to call this once the final records are assembled and their length is known, to avoid having to seek back
header_len = 12
protocolVer = 16 # The FIT SDK code provides these in a very rounabout fashion
profileVer = 810
tag = ".FIT"
return struct.pack("<BBHI4s", header_len, protocolVer, profileVer, dataLength, tag.encode("ASCII"))
def Parse(raw_file):
raise Exception("Not implemented")
def Dump(act, drop_pauses=False):
def toUtc(ts):
if ts.tzinfo:
return ts.astimezone(pytz.utc).replace(tzinfo=None)
else:
raise ValueError("Need TZ data to produce FIT file")
fmg = FITMessageGenerator()
creatorInfo = {
"manufacturer": FITManufacturer.DEVELOPMENT,
"serial_number": 0,
"product": 15706
}
devInfo = {
"manufacturer": FITManufacturer.DEVELOPMENT,
"product": 15706,
"device_index": 0
}
if act.Device:
# GC can get along with out this, Strava needs it
devId = DeviceIdentifier.FindEquivalentIdentifierOfType(DeviceIdentifierType.FIT, act.Device.Identifier)
if devId:
creatorInfo = {
"manufacturer": devId.Manufacturer,
"product": devId.Product,
}
devInfo = {
"manufacturer": devId.Manufacturer,
"product": devId.Product,
"device_index": 0 # Required for GC
}
if act.Device.Serial:
creatorInfo["serial_number"] = int(act.Device.Serial) # I suppose some devices might eventually have alphanumeric serial #s
devInfo["serial_number"] = int(act.Device.Serial)
if act.Device.VersionMajor is not None:
assert act.Device.VersionMinor is not None
devInfo["software_version"] = act.Device.VersionMajor + act.Device.VersionMinor / 100
fmg.GenerateMessage("file_id", type=FITFileType.Activity, time_created=toUtc(act.StartTime), **creatorInfo)
fmg.GenerateMessage("device_info", **devInfo)
sport = FITIO._sportMap[act.Type] if act.Type in FITIO._sportMap else 0
subSport = FITIO._subSportMap[act.Type] if act.Type in FITIO._subSportMap else 0
session_stats = {
"total_elapsed_time": act.EndTime - act.StartTime,
}
# FIT doesn't have different fields for this, but it does have a different interpretation - we eventually need to divide by two in the running case.
# Further complicating the issue is that most sites don't differentiate the two, so they'll end up putting the run cadence back into the bike field.
use_run_cadence = act.Type in [ActivityType.Running, ActivityType.Walking, ActivityType.Hiking]
def _resolveRunCadence(bikeCad, runCad):
nonlocal use_run_cadence
if use_run_cadence:
return runCad if runCad is not None else (bikeCad if bikeCad is not None else None)
else:
return bikeCad
def _mapStat(dict, key, value):
if value is not None:
dict[key] = value
_mapStat(session_stats, "total_moving_time", act.Stats.MovingTime.asUnits(ActivityStatisticUnit.Seconds).Value)
_mapStat(session_stats, "total_timer_time", act.Stats.TimerTime.asUnits(ActivityStatisticUnit.Seconds).Value)
_mapStat(session_stats, "total_distance", act.Stats.Distance.asUnits(ActivityStatisticUnit.Meters).Value)
_mapStat(session_stats, "total_calories", act.Stats.Energy.asUnits(ActivityStatisticUnit.Kilocalories).Value)
_mapStat(session_stats, "avg_speed", act.Stats.Speed.asUnits(ActivityStatisticUnit.MetersPerSecond).Average)
_mapStat(session_stats, "max_speed", act.Stats.Speed.asUnits(ActivityStatisticUnit.MetersPerSecond).Max)
_mapStat(session_stats, "avg_heart_rate", act.Stats.HR.Average)
_mapStat(session_stats, "max_heart_rate", act.Stats.HR.Max)
_mapStat(session_stats, "avg_cadence", _resolveRunCadence(act.Stats.Cadence.Average, act.Stats.RunCadence.Average))
_mapStat(session_stats, "max_cadence", _resolveRunCadence(act.Stats.Cadence.Max, act.Stats.RunCadence.Max))
_mapStat(session_stats, "avg_power", act.Stats.Power.Average)
_mapStat(session_stats, "max_power", act.Stats.Power.Max)
_mapStat(session_stats, "total_ascent", act.Stats.Elevation.asUnits(ActivityStatisticUnit.Meters).Gain)
_mapStat(session_stats, "total_descent", act.Stats.Elevation.asUnits(ActivityStatisticUnit.Meters).Loss)
_mapStat(session_stats, "avg_altitude", act.Stats.Elevation.asUnits(ActivityStatisticUnit.Meters).Average)
_mapStat(session_stats, "max_altitude", act.Stats.Elevation.asUnits(ActivityStatisticUnit.Meters).Max)
_mapStat(session_stats, "min_altitude", act.Stats.Elevation.asUnits(ActivityStatisticUnit.Meters).Min)
_mapStat(session_stats, "avg_temperature", act.Stats.Temperature.asUnits(ActivityStatisticUnit.DegreesCelcius).Average)
_mapStat(session_stats, "max_temperature", act.Stats.Temperature.asUnits(ActivityStatisticUnit.DegreesCelcius).Max)
inPause = False
for lap in act.Laps:
for wp in lap.Waypoints:
if wp.Type == WaypointType.Resume and inPause:
fmg.GenerateMessage("event", timestamp=toUtc(wp.Timestamp), event=FITEvent.Timer, event_type=FITEventType.Start)
inPause = False
elif wp.Type == WaypointType.Pause and not inPause:
fmg.GenerateMessage("event", timestamp=toUtc(wp.Timestamp), event=FITEvent.Timer, event_type=FITEventType.Stop)
inPause = True
if inPause and drop_pauses:
continue
rec_contents = {"timestamp": toUtc(wp.Timestamp)}
if wp.Location:
rec_contents.update({"position_lat": wp.Location.Latitude, "position_long": wp.Location.Longitude})
if wp.Location.Altitude is not None:
rec_contents.update({"altitude": wp.Location.Altitude})
if wp.HR is not None:
rec_contents.update({"heart_rate": wp.HR})
if wp.RunCadence is not None:
rec_contents.update({"cadence": wp.RunCadence})
if wp.Cadence is not None:
rec_contents.update({"cadence": wp.Cadence})
if wp.Power is not None:
rec_contents.update({"power": wp.Power})
if wp.Temp is not None:
rec_contents.update({"temperature": wp.Temp})
if wp.Calories is not None:
rec_contents.update({"calories": wp.Calories})
if wp.Distance is not None:
rec_contents.update({"distance": wp.Distance})
if wp.Speed is not None:
rec_contents.update({"speed": wp.Speed})
fmg.GenerateMessage("record", **rec_contents)
# Man, I love copy + paste and multi-cursor editing
# But seriously, I'm betting that, some time down the road, a stat will pop up in X but not in Y, so I won't feel so bad about the C&P abuse
lap_stats = {}
_mapStat(lap_stats, "total_elapsed_time", lap.EndTime - lap.StartTime)
_mapStat(lap_stats, "total_moving_time", lap.Stats.MovingTime.asUnits(ActivityStatisticUnit.Seconds).Value)
_mapStat(lap_stats, "total_timer_time", lap.Stats.TimerTime.asUnits(ActivityStatisticUnit.Seconds).Value)
_mapStat(lap_stats, "total_distance", lap.Stats.Distance.asUnits(ActivityStatisticUnit.Meters).Value)
_mapStat(lap_stats, "total_calories", lap.Stats.Energy.asUnits(ActivityStatisticUnit.Kilocalories).Value)
_mapStat(lap_stats, "avg_speed", lap.Stats.Speed.asUnits(ActivityStatisticUnit.MetersPerSecond).Average)
_mapStat(lap_stats, "max_speed", lap.Stats.Speed.asUnits(ActivityStatisticUnit.MetersPerSecond).Max)
_mapStat(lap_stats, "avg_heart_rate", lap.Stats.HR.Average)
_mapStat(lap_stats, "max_heart_rate", lap.Stats.HR.Max)
_mapStat(lap_stats, "avg_cadence", _resolveRunCadence(lap.Stats.Cadence.Average, lap.Stats.RunCadence.Average))
_mapStat(lap_stats, "max_cadence", _resolveRunCadence(lap.Stats.Cadence.Max, lap.Stats.RunCadence.Max))
_mapStat(lap_stats, "avg_power", lap.Stats.Power.Average)
_mapStat(lap_stats, "max_power", lap.Stats.Power.Max)
_mapStat(lap_stats, "total_ascent", lap.Stats.Elevation.asUnits(ActivityStatisticUnit.Meters).Gain)
_mapStat(lap_stats, "total_descent", lap.Stats.Elevation.asUnits(ActivityStatisticUnit.Meters).Loss)
_mapStat(lap_stats, "avg_altitude", lap.Stats.Elevation.asUnits(ActivityStatisticUnit.Meters).Average)
_mapStat(lap_stats, "max_altitude", lap.Stats.Elevation.asUnits(ActivityStatisticUnit.Meters).Max)
_mapStat(lap_stats, "min_altitude", lap.Stats.Elevation.asUnits(ActivityStatisticUnit.Meters).Min)
_mapStat(lap_stats, "avg_temperature", lap.Stats.Temperature.asUnits(ActivityStatisticUnit.DegreesCelcius).Average)
_mapStat(lap_stats, "max_temperature", lap.Stats.Temperature.asUnits(ActivityStatisticUnit.DegreesCelcius).Max)
# These are some really... stupid lookups.
# Oh well, futureproofing.
lap_stats["intensity"] = ({
LapIntensity.Active: FITLapIntensity.Active,
LapIntensity.Rest: FITLapIntensity.Rest,
LapIntensity.Warmup: FITLapIntensity.Warmup,
LapIntensity.Cooldown: FITLapIntensity.Cooldown,
})[lap.Intensity]
lap_stats["lap_trigger"] = ({
LapTriggerMethod.Manual: FITLapTriggerMethod.Manual,
LapTriggerMethod.Time: FITLapTriggerMethod.Time,
LapTriggerMethod.Distance: FITLapTriggerMethod.Distance,
LapTriggerMethod.PositionStart: FITLapTriggerMethod.PositionStart,
LapTriggerMethod.PositionLap: FITLapTriggerMethod.PositionLap,
LapTriggerMethod.PositionWaypoint: FITLapTriggerMethod.PositionWaypoint,
LapTriggerMethod.PositionMarked: FITLapTriggerMethod.PositionMarked,
LapTriggerMethod.SessionEnd: FITLapTriggerMethod.SessionEnd,
LapTriggerMethod.FitnessEquipment: FITLapTriggerMethod.FitnessEquipment,
})[lap.Trigger]
fmg.GenerateMessage("lap", timestamp=toUtc(lap.EndTime), start_time=toUtc(lap.StartTime), event=FITEvent.Lap, event_type=FITEventType.Start, sport=sport, **lap_stats)
# These need to be at the end for Strava
fmg.GenerateMessage("session", timestamp=toUtc(act.EndTime), start_time=toUtc(act.StartTime), sport=sport, sub_sport=subSport, event=FITEvent.Timer, event_type=FITEventType.Start, **session_stats)
fmg.GenerateMessage("activity", timestamp=toUtc(act.EndTime), local_timestamp=act.EndTime.replace(tzinfo=None), num_sessions=1, type=FITActivityType.GENERIC, event=FITEvent.Activity, event_type=FITEventType.Stop)
records = fmg.GetResult()
header = FITIO._generateHeader(len(records))
crc = FITIO._calculateCRC(records, FITIO._calculateCRC(header))
return header + records + struct.pack("<H", crc)
|
cmgrote/tapiriik
|
tapiriik/services/fit.py
|
Python
|
apache-2.0
| 22,972 | 0.030167 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VirtualNetworkPeeringsOperations(object):
"""VirtualNetworkPeeringsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2017_10_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified virtual network peering.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param virtual_network_peering_name: The name of the virtual network peering.
:type virtual_network_peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
virtual_network_peering_name=virtual_network_peering_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetworkPeering"
"""Gets the specified virtual network peering.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param virtual_network_peering_name: The name of the virtual network peering.
:type virtual_network_peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetworkPeering, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2017_10_01.models.VirtualNetworkPeering
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeering"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
accept = "application/json, text/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
virtual_network_peering_parameters, # type: "_models.VirtualNetworkPeering"
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetworkPeering"
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeering"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(virtual_network_peering_parameters, 'VirtualNetworkPeering')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
virtual_network_peering_parameters, # type: "_models.VirtualNetworkPeering"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VirtualNetworkPeering"]
"""Creates or updates a peering in the specified virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param virtual_network_peering_name: The name of the peering.
:type virtual_network_peering_name: str
:param virtual_network_peering_parameters: Parameters supplied to the create or update virtual
network peering operation.
:type virtual_network_peering_parameters: ~azure.mgmt.network.v2017_10_01.models.VirtualNetworkPeering
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetworkPeering or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2017_10_01.models.VirtualNetworkPeering]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeering"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
virtual_network_peering_name=virtual_network_peering_name,
virtual_network_peering_parameters=virtual_network_peering_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.VirtualNetworkPeeringListResult"]
"""Gets all virtual network peerings in a virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkPeeringListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2017_10_01.models.VirtualNetworkPeeringListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeeringListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkPeeringListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2017_10_01/operations/_virtual_network_peerings_operations.py
|
Python
|
mit
| 22,805 | 0.004955 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from PySide import QtCore
from PySide import QtGui
from PySide import QtWebKit
import sys
import re
import datetime
import os
import time
import subprocess
import threading
import atexit
import webbrowser
import functools
import operator
import Viewer
import Controller
class Principal(QtGui.QMainWindow):
def __init__(self, parent=None):
QtGui.QMainWindow.__init__(self, parent)
# create the menu
##################################################
self.menu = Viewer.MyMenu()
self.setMenuBar(self.menu)
get_remote_corpus = Controller.myxml()
if get_remote_corpus.get():
if get_remote_corpus.parse():
for corpus in get_remote_corpus.getDataCorpus():
t = QtGui.QAction(corpus[0], self)
t.triggered.connect(functools.partial(self.connect_server,
"prosperologie.org", corpus[1], corpus[0]))
self.menu.distant.addAction(t)
self.menu.local_connect.triggered.connect(self.connect_server_localhost)
"""To delete: direct access to corpus editing tab"""
self.menu.local_edit.triggered.connect(self.add_edit_corpus_tab)
""" end """
self.menu.menu_P1P2.triggered.connect(self.P1toP2)
self.menu.codex.triggered.connect(self.codex_window)
self.menu.server_vars.triggered.connect(self.display_server_vars)
self.menu.contexts.triggered.connect(self.display_contexts)
self.menu.pers.triggered.connect(self.display_pers)
self.menu.marlowe_gen.triggered.connect(self.add_gen_mrlw_tab)
self.menu.Marlowe_remote.triggered.connect(self.MarloweViewer)
self.menu.manual.triggered.connect(lambda: webbrowser.open('http://mypads.framapad.org/mypads/?/mypads/group/doxa-g71fm7ki/pad/view/interface-p2-manuel-de-l-utilisateur-hsa17wo'))
# create the status bar
##################################################
self.status = self.statusBar()
self.status.showMessage(self.tr("Ready"))
#create the progressebar
##################################################
self.PrgBar = Viewer.PrgBar(self)
self.status.addPermanentWidget(self.PrgBar.bar)
# create the toolbar
##################################################
self.toolbar = self.addToolBar("")
#self.toolbar.setIconSize(QtCore.QSize(16, 16))
self.toolbar.setMovable(0)
self.toolbar_descr_corpus = QtGui.QLabel()
self.toolbar.addWidget(self.toolbar_descr_corpus)
spacer2 = QtGui.QLabel()
spacer2.setSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
self.toolbar.addWidget(spacer2)
self.toolbar_name_corpus = QtGui.QLabel()
self.toolbar.addWidget(self.toolbar_name_corpus)
##################################################
#cadrans NO - NE - SO - SE
#
# ###########
# # # #
# # NO # NE #
# # # #
# ###########
# # # #
# # SO # SE #
# # # #
# ###########
##################################################
#cadran NO
##################################################
##### Tab for actants #############
##################################################
self.actantsTab = Viewer.actantsTab()
self.actantsTab.L.addAction(QtGui.QAction('texts', self,
triggered=lambda: self.show_texts_from_list(0)))
self.actantsTab.L.addAction(QtGui.QAction('copy list', self,
triggered=lambda: self.copy_lw(self.actantsTab.L)))
self.actantsTab.L1.addAction(QtGui.QAction('copy list', self,
triggered=lambda: self.copy_lw(self.actantsTab.L1)))
self.actantsTab.L2.addAction(QtGui.QAction('copy list', self,
triggered=lambda: self.copy_lw(self.actantsTab.L2)))
##### Tab for authors #############
##################################################
self.authorsTab = Viewer.authorsTab()
self.authorsTab.L.currentItemChanged.connect(self.authLchanged)
self.authorsTab.S.currentIndexChanged.connect(self.authLchanged)
##### Tab for concepts #############
##################################################
self.NOT2 = Viewer.ConceptTab()
self.NOT2.select.currentIndexChanged.connect(self.select_concept)
self.NOT2.sort_command.currentIndexChanged.connect(self.affiche_concepts_scores)
self.NOT2.dep0.listw.currentItemChanged.connect(self.cdep0_changed)
self.NOT2.depI.listw.currentItemChanged.connect(self.cdepI_changed)
self.NOT2.depII.listw.currentItemChanged.connect(self.cdepII_changed)
self.NOT2.depI.deselected.connect(lambda: self.NOT2.depII.listw.clear())
self.NOT2.dep0.deselected.connect(lambda: [self.NOT2.depI.listw.clear(),
self.NOT2.depII.listw.clear()])
#TODO add those below
for i in range(7,12):
self.NOT2.sort_command.model().item(i).setEnabled(False)
##### Tab for syntax items (Lexicon) #############
##################################################
self.NOT1 = Viewer.LexiconTab()
self.NOT1.select.currentIndexChanged.connect(self.select_liste)
self.NOT1.sort_command.currentIndexChanged.connect(self.affiche_liste_scores)
self.NOT1.dep0.listw.currentItemChanged.connect(self.ldep0_changed)
#TODO add those below
for i in range(6,11):
self.NOT1.sort_command.model().item(i).setEnabled(False)
#context menus activation
self.NOT1.dep0.listw.addAction(QtGui.QAction('texts', self,
triggered=lambda: self.show_texts_from_list(0)))
self.NOT1.dep0.listw.addAction(QtGui.QAction('network', self,
triggered=lambda: self.show_network(0)))
self.NOT1.dep0.listw.addAction(QtGui.QAction('copy list', self,
triggered=lambda: self.copy_lw(self.NOT1.dep0.listw)))
self.NOT2.dep0.listw.addAction(QtGui.QAction('texts', self,
triggered=lambda: self.show_texts_from_list(0)))
self.NOT2.dep0.listw.addAction(QtGui.QAction('network', self,
triggered=lambda: self.show_network(0)))
self.NOT2.dep0.listw.addAction(QtGui.QAction('copy list', self,
triggered=lambda: self.copy_lw(self.NOT2.dep0.listw)))
self.NOT2.depI.listw.addAction(QtGui.QAction('texts', self,
triggered=lambda: self.show_texts_from_list(1)))
self.NOT2.depI.listw.addAction(QtGui.QAction('network', self,
triggered=lambda: self.show_network(1)))
self.NOT2.depI.listw.addAction(QtGui.QAction('copy list', self,
triggered=lambda: self.copy_lw(self.NOT2.depI.listw)))
self.NOT2.depII.listw.addAction(QtGui.QAction('texts', self,
triggered=lambda: self.show_texts_from_list(2)))
self.NOT2.depII.listw.addAction(QtGui.QAction('network', self,
triggered=lambda: self.show_network(2)))
self.NOT2.depII.listw.addAction(QtGui.QAction('copy list', self,
triggered=lambda: self.copy_lw(self.NOT2.depII.listw)))
##### Tab for persons #############
##################################################
self.show_persons = Viewer.personsTab()
self.show_persons.L.listw.addAction(QtGui.QAction('texts', self,
triggered=lambda: self.show_texts_from_list("pers")))
self.show_persons.L.listw.addAction(QtGui.QAction('network', self,
triggered=lambda: self.show_network("pers")))
self.show_persons.L.listw.addAction(QtGui.QAction('copy list', self,
triggered=lambda: self.copy_lw(self.show_persons.L.listw)))
#Networks tab
##################################################
self.tabNetworks = QtGui.QTabWidget()
self.tabNetworks.setTabsClosable(True)
self.tabNetworks.tabCloseRequested.connect(self.tabNetworks.removeTab)
#NO QTabWidget
################################################
self.NOTs = QtGui.QTabWidget()
self.NOTs.addTab(self.actantsTab, self.tr("Actants"))
self.NOTs.addTab(self.authorsTab, self.tr("Authors"))
self.NOTs.addTab(self.NOT2, self.tr("Concepts"))
self.NOTs.addTab(self.NOT1, self.tr("Lexicon"))
self.NOTs.currentChanged.connect(self.change_NOTab)
self.NOTs.setTabsClosable(True)
self.NOTs.tabCloseRequested.connect(self.NOTs.removeTab)
Viewer.hide_close_buttons(self.NOTs,0)
Viewer.hide_close_buttons(self.NOTs,1)
Viewer.hide_close_buttons(self.NOTs,2)
Viewer.hide_close_buttons(self.NOTs,3)
##################################################
#cadran NE
##################################################
# Journal
##################################################
self.journal = Viewer.Journal(self)
#Explorer Tab
################################################
#TODO ajouter navigateur (concepts)
#TODO menu contextuel seulement quand selection
self.explorer_widget = Viewer.Explorer()
self.explorer_widget.saisie.returnPressed.connect(self.explorer)
self.explorer_widget.liste.listw.currentItemChanged.connect(self.explo_item_selected)
self.explorer_widget.liste.listw.addAction(QtGui.QAction(self.tr('texts'),
self, triggered=self.explo_show_text))
self.explorer_widget.explo_lexi.listw.addAction(QtGui.QAction(self.tr('change type TODO'),
self))
##### Tab for formulae #############
##################################################
formulaeTab = Viewer.Formulae()
formulaeTab.tempButton.clicked.connect(self.recupFormules)
#Access by context CTX
################################################
self.CTXs = Viewer.Contexts()
self.CTXs.l.currentItemChanged.connect(self.contexts_contents)
#evaluer directement les variables du serveur
##################################################
self.server_vars = Viewer.ServerVars()
self.server_vars.champ.returnPressed.connect(self.server_vars_Evalue)
self.server_vars.button_eval.clicked.connect(self.server_vars_Evalue)
self.server_vars.button_getsem.clicked.connect(self.server_getsem_Evalue)
self.server_vars.button_eval_index.clicked.connect(self.server_index_Evalue)
#NE QTabWidget
##################################################
self.NETs = QtGui.QTabWidget()
self.NETs.setTabsClosable(True)
self.NETs.tabCloseRequested.connect(self.NETs.removeTab)
self.journal_index = self.NETs.addTab(self.journal.journal,
self.tr("Journal"))
Viewer.hide_close_buttons(self.NETs, 0)
self.NETs.addTab(self.explorer_widget, self.tr("Search and Type"))
Viewer.hide_close_buttons(self.NETs, 1)
self.NETs.addTab(formulaeTab, self.tr("Formulae"))
Viewer.hide_close_buttons(self.NETs, 2)
#Project editing tab
###
self.param_corpus = Viewer.Corpus_tab(self)
#display in selector languages contained in dictio.cfg
listLangCfg = []
with open('server/dictio.cfg') as diccfg:
listLangCfg = re.findall("\[LANGUES (\S*)\]", diccfg.read())
self.param_corpus.Lang.addItems(listLangCfg)
QtCore.QObject.connect(self.param_corpus.send_codex_ViewListeTextes,
QtCore.SIGNAL("triggered()"), self.send_codex_ViewListeTextes)
self.param_corpus.launchPRC_button.clicked.connect(self.launchPRC)
self.NETs.addTab(self.param_corpus, self.tr("Project"))
Viewer.hide_close_buttons(self.NETs, 3)
#give focus to editing tab
self.NETs.setCurrentIndex(3)
##################################################
#cadran SO
##################################################
#l'onglet des textes
##################################################
self.SOT1 = QtGui.QTabWidget()
self.SOT1.setTabsClosable(True)
self.SOT1.tabCloseRequested.connect(self.SOT1.removeTab)
#SO QTabWidget
##################################################
#TODO les expression englobantes
self.SOTs = QtGui.QTabWidget()
self.SOTs.setTabsClosable(True)
self.SOTs.tabCloseRequested.connect(self.SOTs.removeTab)
self.SOTs.addTab(self.SOT1, self.tr("Texts"))
Viewer.hide_close_buttons(self.SOTs,0)
##################################################
#cadran SE
##################################################
# onglet proprietes du texte
##################################################
self.textProperties = QtGui.QTabWidget()
# sous onglet proprietes saillantes
self.saillantes = Viewer.SaillantesProperties()
self.saillantes.Act.doubleClicked.connect(self.deploie_Actant)
self.saillantes.Cat.doubleClicked.connect(self.deploie_Cat)
self.saillantes.Col.doubleClicked.connect(self.deploie_Col)
self.textProperties.addTab(self.saillantes, self.tr("Salient structures"))
# sous onglet des éléments
self.text_elements = Viewer.TextElements()
self.textProperties.addTab(self.text_elements.widget,
self.tr("Text elements"))
self.text_elements.element_list.doubleClicked.connect(self.deploie_text_elements)
#TODO add those below
temp_apports = QtGui.QWidget()
self.textProperties.addTab(temp_apports, self.tr("Contributions"))
self.textProperties.setTabToolTip(2, self.tr("Apports et reprises"))
self.textProperties.setTabEnabled(2, False)
temp_proches = QtGui.QWidget()
self.textProperties.addTab(temp_proches, self.tr("Analogous"))
self.textProperties.setTabToolTip(3, self.tr("Textes proches"))
self.textProperties.setTabEnabled(3, False)
#CTX content
##################################################
self.SET2 = Viewer.textCTX()
self.SET2.T.cellChanged.connect(self.onChangeCTX)
self.SET2.valid.clicked.connect(self.saveCTX)
self.SET2.valid.setEnabled(False)
self.SET2.reset.clicked.connect(self.resetCTX)
self.SET2.reset.setEnabled(False)
# onglet contenu du texte
##################################################
self.textContent = QtGui.QTextEdit()
#sentences tab
##################################################
self.tab_sentences = QtGui.QTabWidget()
self.tab_sentences.setTabsClosable(True)
self.tab_sentences.tabCloseRequested.connect(self.tab_sentences.removeTab)
#SE QTabWidget
##################################################
self.SETs = QtGui.QTabWidget()
self.SETs.addTab(self.textProperties, self.tr("Properties"))
self.SETs.addTab(self.SET2, self.tr("Context"))
self.SETs.addTab(self.textContent, self.tr("Text"))
self.SETs.addTab(self.tab_sentences, self.tr("Sentences"))
self.SETs.currentChanged.connect(self.change_SETab)
self.textProperties.currentChanged.connect(self.change_text_prop_tab)
self.text_elements.selector.currentIndexChanged.connect(self.show_text_elements)
################################################
###Main Layout en grid
################################################
#FIXME corriger resize des grids sur petits ecrans
main = QtGui.QWidget()
grid = QtGui.QGridLayout()
grid.addWidget(self.NOTs,0, 0)
grid.addWidget(self.NETs, 0, 1)
grid.addWidget(self.SOTs, 1, 0)
grid.addWidget(self.SETs, 1, 1)
grid.setContentsMargins(5,10,5,5)
grid.setSpacing(10)
main.setLayout(grid)
self.setCentralWidget(main)
#TODO allow cadran to resize to the whole window
#grid.setRowMinimumHeight(0,1000))
#testeur = QtGui.QPushButton('+')
#self.NOTs.setCornerWidget(testeur)
#grid.setContentsMargins(2,2,2,2)
with open('version.txt') as V:
v = V.read()
self.setWindowTitle(v)
self.setWindowIcon(QtGui.QIcon("images/Prospero-II.png"))
self.show()
################################################
#Fin de la methode principale d'affichage
#début des fonctions
################################################
def activity(self, message):
"""Add message to the journal"""
self.status.showMessage(message)
time = "%s" % datetime.datetime.now()
self.journal.history.append("%s %s" % (time[:19], message))
#pb rights with windows Program folder
#with open("P-II-gui.log",'a') as logfile:
# logfile.write("%s %s\n" % (time[:19], message.encode("utf-8")))
def destroy_texts_tabs(self):
for i in reversed(range(self.SOT1.tabBar().count())):
self.SOT1.tabBar().removeTab(i)
def display_authors(self):
#TODO sort by number of pages, get texts
ask = u"$aut[0:]"
result = self.client.eval_var(ask)
list_results = re.split(", ", result)
self.activity(self.tr("Displaying %d authors")%len(list_results))
self.authorsTab.L.clear()
self.PrgBar.perc(len(list_results))
for i, aut in enumerate(list_results):
ask = u"$aut%d.txt[0:]" % i
result = self.client.eval_var(ask)
txts = re.split(", ", result)
n = len(txts)
item = QtGui.QListWidgetItem()
item.setText("%d %s" % (n, aut))
ask = "$aut%d.nbpg" % i
nbpg = self.client.eval_var(ask)
firstTxt = txts[0]
firstTxt = self.listeObjetsTextes[self.dicTxtSem[firstTxt]]
firstDate = firstTxt.getCTX("date")
firstDate = firstDate[0:10]
lastTxt = txts[-1]
lastTxt = self.listeObjetsTextes[self.dicTxtSem[lastTxt]]
lastDate = lastTxt.getCTX("date")
lastDate = lastDate[0:10]
item.setToolTip("<table><tr><th colspan=\"2\">%s</th></tr><tr><td>number of texts</td><td align=\"right\">%d</td><tr><td>number of pages</td><td align=\"right\">%s</td></tr><tr><td>first text date</td><td align=\"right\">%s</td></tr><tr><td>last text date</td><td align=\"right\">%s</td></tr></table>"%(aut, n, nbpg, firstDate, lastDate))
self.authorsTab.L.addItem(item)
self.PrgBar.percAdd(1)
self.PrgBar.reset()
def actsLchanged(self):
if hasattr(self, "client"):
self.PrgBar.perc(24)
self.actantsTab.L1.clear()
self.actantsTab.L2.clear()
cur = self.actantsTab.L.currentItem().text()
value, item = re.split(" ",cur,1)
sem = self.client.eval_get_sem(item, "$act")
ask = "%s.resact[0:]" % (sem)
result = self.client.eval_var(ask)
network = re.split(", ", result)
if len(network):
vals = re.split(", ",
self.client.eval_var("%s.resact[0:].val" % (sem)))
#FIXME resact : actants du réseau, pb valeurs
if vals == network:
print "C29950 values instead of network"
else:
self.actantsTab.L1.addItems(["%d %s"%(int(vals[row]),
element) for row, element in enumerate(network)])
self.PrgBar.percAdd(12)
#incompatibles : jamais actants dans le même texte
ask2 = "%s.act_incomp[0:]" % (sem)
result2 = self.client.eval_var(ask2)
incomp = re.split(", ", result2)
if len(incomp):
el_val = { element: int(val)
for val, element in [ Controller.sp_el(el)
for el in self.actants_list_valued ] }
incomp_valued = sorted( [ [el_val[i], i]
for i in incomp ], key=operator.itemgetter(0), reverse=True)
self.actantsTab.L2.addItems(["%d %s"%(val, element)
for val, element in incomp_valued])
self.PrgBar.percAdd(12)
def authLchanged(self):
#TODO score, deploiement, acces aux textes et aux enonces
if hasattr(self, "client"):
self.authorsTab.L2.clear()
row = self.authorsTab.L.currentRow()
if (row == -1): #if no author selected, take first
self.authorsTab.L.setCurrentRow(0)
row = 0
which = Controller.semantiques[self.authorsTab.S.currentText()]
ask = "$aut%s.%s[0:]" % (row, which)
result1 = self.client.eval_var(ask)
concepts = re.split(", ", result1)
#FIXME pb no answer for pers & undef, different sizes for act
if which in ['$pers', '$undef', '$act']:
for i, el in enumerate(concepts):
ask = "$aut%s.%s%d.val" % (row, which, i)
val = self.client.eval_var(ask)
self.authorsTab.L2.addItem("%s %s"%(val, el))
else:
ask2 = "$aut%s.val_freq_%s[0:]" % (row, which[1:])
result2 = self.client.eval_var(ask2)
result2 = re.split(", ", result2)
liste_valued = ["%s %s"%(int(val), concepts[row]) for row,
val in enumerate(result2)]
self.authorsTab.L2.addItems(liste_valued)
def create_corpus_texts_tab(self):
"""create a tab for corpus texts"""
#FIXME reset if open a new corpus
self.destroy_texts_tabs()
n = len(self.listeObjetsTextes)
self.activity(self.tr("Displaying text list (%d items)") % n)
self.CorpusTexts = Viewer.ListTexts(False,
self.dicTxtSem.values(), self.listeObjetsTextes, self)
self.CorpusTexts.corpus.itemSelectionChanged.connect(self.onSelectText)
self.SOT1.addTab(self.CorpusTexts, self.tr("corpus (%d)")%n)
Viewer.hide_close_buttons(self.SOT1,0) #corpus text tab permanent
def onSelectText(self):
"""when a text is selected, select it in other lists and display text properties"""
row = self.SOT1.focusWidget().currentRow()
txt = self.SOT1.focusWidget().widget_list[row]
self.semantique_txt_item = txt.sem
self.activity(self.tr("Displaying %s %s %s") %txt.getResume())
#find txt in other tabs
for t in range(self.SOT1.count()):
lw = self.SOT1.widget(t).findChildren(QtGui.QListWidget)
for i, l in enumerate(lw):
l.itemSelectionChanged.disconnect(self.onSelectText)
tab_txts = l.widget_list
if txt in tab_txts:
l.setCurrentRow(tab_txts.index(txt))
else:
l.deselect_all()
l.itemSelectionChanged.connect(self.onSelectText)
#display properties in selected tab
if (self.SETs.currentIndex() == 0):
self.show_textProperties(self.semantique_txt_item)
elif (self.SETs.currentIndex() == 1):
self.show_textCTX(self.semantique_txt_item)
elif (self.SETs.currentIndex() == 2):
self.show_textContent(self.semantique_txt_item)
def deselectText(self):
"""vide les listes pour eviter confusion et deselectionne les listwidget"""
self.saillantes.Act.clear()
self.saillantes.Cat.clear()
self.saillantes.Col.clear()
self.text_elements.element_list.clear()
self.efface_textCTX()
self.textContent.clear()
if hasattr(self, "semantique_txt_item"):
del self.semantique_txt_item
for listwidget in self.SOT1.findChildren(QtGui.QListWidget) :
listwidget.itemSelectionChanged.disconnect(self.onSelectText)
listwidget.deselect_all()
listwidget.itemSelectionChanged.connect(self.onSelectText)
def change_NOTab(self):
if (self.NOTs.currentIndex() == 1): #Authors
if hasattr(self, "client"): # si connecte
self.display_authors()
elif (self.NOTs.currentIndex() == 2): #Concepts
if hasattr(self, "client"): # si connecte
if not hasattr(self, "sem_concept"):
self.select_concept(self.NOT2.select.currentText())
elif (self.NOTs.currentIndex() == 3):#Lexicon
if hasattr(self, "client"):
if not hasattr(self, "sem_liste_concept"):
self.select_liste(self.NOT1.select.currentText())
def change_SETab(self):
if hasattr(self, "semantique_txt_item"):
sem_txt = self.semantique_txt_item
if (self.SETs.currentIndex () == 0):
self.saillantes.Act.clear()
self.saillantes.Cat.clear()
self.saillantes.Col.clear()
self.show_textProperties(sem_txt)
elif (self.SETs.currentIndex () == 1):
self.efface_textCTX()
self.show_textCTX(sem_txt)
elif (self.SETs.currentIndex () == 2):
self.textContent.clear()
self.show_textContent(sem_txt)
self.resetCTX()
def change_text_prop_tab(self):
if hasattr(self, "semantique_txt_item"):
sem_txt = self.semantique_txt_item
if (self.textProperties.currentIndex () == 0):
self.show_sailent(sem_txt)
elif (self.textProperties.currentIndex() == 1):
self.show_text_elements(sem_txt)
def onChangeCTX(self):
r = self.SET2.T.currentRow()
if (r != -1):
self.SET2.T.currentItem().setBackground(QtGui.QColor(237,243,254)) # cyan
self.SET2.valid.setEnabled(True)
self.SET2.reset.setEnabled(True)
def saveCTX(self):
sem_txt = self.semantique_txt_item
modif = []
for r in range(self.SET2.T.rowCount()):
field = self.SET2.T.item(r, 0).text()
val = self.SET2.T.item(r, 1).text()
ask = u"%s.ctx.%s" % (sem_txt, field)
result = self.client.eval_var(ask)
result = re.sub(u"^\s*", "", result)
if (result != val):
self.activity("Edit context %s %s %s" % (sem_txt, field, val))
#JPC FIXME IL N'EDITE PAS LE FICHIER A TOUS LES COUPS !!
self.client.eval_set_ctx(sem_txt, field, val)
self.client.add_cache_var(sem_txt +".ctx."+field, val)
self.listeObjetsTextes[sem_txt].setCTX(field, val)
modif.append(field)
#FIXME a la creation d'un nouveau champ ?
#self.client.eval_set_ctx(sem_txt, "testfield", val)
#FIXME pb de cache quand remet a jour la liste des ctx
self.maj_metadatas()
# mettre à jour listes des textes si auteur, date, titre
#TODO tri quand date
if len(set(modif) & set(["author", "date", "title"])):
txt = self.listeObjetsTextes[sem_txt]
for t in range(self.SOT1.count()):
lw = self.SOT1.widget(t).findChildren(QtGui.QListWidget)
for i, l in enumerate(lw):
for w in l.widget_list:
if w == txt:
oldResume = l.currentItem().resume
num = re.findall(" \[\d*\]", oldResume[0])
newResume = w.getResume()
if len(num):
newResume = (newResume[0] + num[0],
newResume[1], newResume[2])
l.currentItem().resume = newResume
l.currentItem().updateText()
self.SET2.valid.setEnabled(False)
self.SET2.reset.setEnabled(False)
self.resetCTX()
def resetCTX(self):
self.SET2.valid.setEnabled(False)
self.SET2.reset.setEnabled(False)
self.show_textCTX(self.semantique_txt_item)
def select_concept(self, typ):
""" quand un element de Concepts est selectionné """
self.sem_concept = Controller.semantiques[self.NOT2.select.currentText()]
#if (self.sem_concept in ["$col"]):
##deployment for collections
#self.NOT2.sort_command.setCurrentIndex(1)
self.affiche_concepts_scores()
def select_liste(self, typ):
""" quand un element de Lexicon est selectionné """
self.sem_liste_concept = Controller.semantiques[self.NOT1.select.currentText()]
self.affiche_liste_scores()
def change_liste(self, content):
self.NOT1.dep0.listw.clear()
#self.NOT1.depI.listw.clear()
#self.NOT1.depII.listw.clear()
for r in range(len(content)):
i = QtGui.QListWidgetItem(content[r])
self.NOT1.dep0.listw.addItem(i)
#i.setToolTip('rank:%d'%(r+1))
def change_liste_concepts(self, content):
self.NOT2.dep0.listw.clear()
self.NOT2.depI.listw.clear()
self.NOT2.depII.listw.clear()
self.NOT2.dep0.listw.addItems(content)
def affiche_concepts_scores(self):
which = self.NOT2.sort_command.currentText()
typ = self.NOT2.select.currentText()
if hasattr(self, "client"):
sem = Controller.semantiques[typ]
content = self.client.recup_liste_concept(sem)
#FIXME EN COURS
if sem in ["$ef", '$col', '$cat_ent', '$cat_epr',
'$cat_qua', '$cat_mar']:
if not hasattr(self, 'gcs'):
self.gcs = self.client.recup_liste_concepts_tot()
for i, gc in enumerate(self.gcs):
#print i, content[0], gc[0], len(content), len(gc)
if content[0] == gc[0]:
#if set(content) < set(gc):
content = gc
break;
if (content == ['']):
parent.activity(u"Nothing to Display for %s" % (typ))
else:
self.activity(self.tr("Displaying %s list (%d items) ordered by %s") % (typ,
len(content), which))
liste_valued =[]
self.PrgBar.perc(len(content))
sort = Controller.hash_sort[which]
ask = "val_%s_%s[0:]" % (sort, sem[1:])
result = self.client.eval_var(ask)
if (which in ["first apparition", "last apparition"]):
liste_valued = [[val, content[row]] for row,
val in enumerate(re.split(", ", result))]
else:
#FIXME EN COURS
if len(result) < len(content):
liste_valued = []
result = re.split(", ", result)
for row, el in enumerate(content):
if row < len(result):
liste_valued.append([int(result[row]), el])
else:
liste_valued.append([0, el])
else:
liste_valued = [[int(val), content[row]] for row,
val in enumerate(re.split(", ", result))]
#TODO the same for I et II
liste_final = []
if (which == "alphabetically"):
for i in sorted(liste_valued, key=lambda x: x[1], reverse = 0):
item_resume = u"%s %s" % (i[0], i[1])
liste_final.append(item_resume)
elif (which in ["first apparition", "last apparition"]):
for i in sorted(liste_valued,
key=lambda x: ''.join(sorted(x[0].split('/'), reverse=1)),
reverse = 0):
item_resume = u"%s %s" % (i[0], i[1])
liste_final.append(item_resume)
else :
for i in sorted(liste_valued, key=lambda x: x[0], reverse=1):
item_resume = u"%s %s" % (i[0], i[1])
liste_final.append(item_resume)
self.change_liste_concepts(liste_final)
def affiche_liste_scores(self):
which = self.NOT1.sort_command.currentText()
typ = self.NOT1.select.currentText()
if hasattr(self, "client"):
sem = Controller.semantiques[typ]
content = self.client.recup_liste_concept(self.sem_liste_concept)
liste_final = []
if (content == ['']):
self.activity(u"Nothing to Display for %s" % (typ))
else:
self.activity(u"Displaying %s list (%d items) ordered by %s" % (typ,
len(content), which))
liste_valued =[]
self.PrgBar.perc(len(content))
sort = Controller.hash_sort[which]
ask = "val_%s_%s[0:]" % (sort, sem[1:])
result = re.split(', ', self.client.eval_var(ask))
#FIXME does not return anything for undef
if sem in ['$undef']:
print "C18389", len(result), len(content)
print ask
result = result[:len(content)]
if (which in ["first apparition", "last apparition"]):
liste_valued = [[val, content[row]] for row,
val in enumerate(result)]
else:
liste_valued = [[int(val), content[row]] for row,
val in enumerate(result)]
liste_final = []
if (which == "alphabetically"):
for i in sorted(liste_valued, key=lambda x: x[1], reverse = 0):
item_resume = u"%s %s" % (i[0], i[1])
liste_final.append(item_resume)
elif (which in ["first apparition", "last apparition"]):
for i in sorted(liste_valued,
key=lambda x: ''.join(sorted(x[0].split('/'), reverse=1)),
reverse = 0):
item_resume = u"%s %s" % (i[0], i[1])
liste_final.append(item_resume)
else :
for i in sorted(liste_valued, key=lambda x: x[0], reverse=1):
item_resume = u"%s %s" % (i[0], i[1])
liste_final.append(item_resume)
self.change_liste(liste_final)
def ldep0_changed(self):
itemT = self.NOT1.dep0.listw.currentItem()
if (not len(self.NOT1.dep0.listw.selectedItems())):
self.NOT1.dep0.listw.setCurrentItem(itemT)
if (itemT):
value, item = re.split(" ",itemT.text(),1)
self.activity("%s selected" % (item))
self.activity("%s selected, value %s" % (item, value))
sem = Controller.semantiques[self.NOT1.select.currentText()]
#FIXME $qual whereas elsewhere $qualite
if (sem == '$qualite'):
print "C32584 changed $qual for $qualite"
self.semantique_lexicon_item_0 = re.sub('$qual', '$qualite',
self.client.eval_get_sem(item, "$qual"))
else :
self.semantique_lexicon_item_0 = self.client.eval_get_sem(item, sem)
def cdep0_changed(self,level):
""" suite au changement de sélection, mettre à jour les vues dépendantes """
#FIXME et quand les valeurs du niveau 0 sont nulles, il n'affiche pas du tout le dico ?
which_concepts = self.NOT2.sort_command.currentText()
itemT = self.NOT2.dep0.listw.currentItem()
if (not len(self.NOT2.dep0.listw.selectedItems())):
self.NOT2.dep0.listw.setCurrentItem(itemT)
if (itemT):
value, item = re.split(" ",itemT.text(),1)
self.activity(self.tr("%s selected, value %s") % (item, value))
self.NOT2.depI.listw.clear()
self.NOT2.depII.listw.clear()
sem = self.sem_concept
self.semantique_concept_item = self.client.eval_get_sem(item, sem)
if self.semantique_concept_item == "":
#FIXME pb avec certains éléments des catégories de qual
print "C990", [item, sem]
ask = "%s.rep[0:]"% self.semantique_concept_item
result = self.client.eval_var(ask)
result = re.split(", ", result)
if (result != [u'']):
if (sem in ["$cat_ent", "$cat_epr", "$cat_mar", "$cat_qua"]):
#display directly on II list
liste_scoree = []
self.PrgBar.perc(len(result))
for r in range(len(result)):
if (which_concepts == "number of texts"):
ask = "%s.rep%d.nbtxt"% (self.semantique_concept_item, r)
elif(which_concepts == "number of authors"):
#FIXME il ne renvoie rien
ask = "%s.rep%d.nbaut"% (self.semantique_concept_item, r)
print "C1977: %s" % ask
elif(which_concepts == "first apparition"):
#FIXME il ne renvoie rien
ask = "%s.rep%d.fapp"% (self.semantique_concept_item, r)
print "C1978: %s" % ask
elif(which_concepts == "last apparition"):
#FIXME il ne renvoie rien
ask = "%s.rep%d.lapp"% (self.semantique_concept_item, r)
print "C1979: %s" % ask
else :
ask = "%s.rep%d.val"% (self.semantique_concept_item, r)
val = int(self.client.eval_var(ask))
if val == 0:
liste_scoree.extend(map(lambda x: [x, 0],
result[r:]))
break
liste_scoree.append([result[r], val ])
self.PrgBar.percAdd(1)
if (which_concepts == "alphabetically"):
liste_scoree.sort()
self.NOT2.depII.listw.addItems(map(lambda x : "%d %s"% (x[1], x[0]), liste_scoree))
self.PrgBar.reset()
else:
self.cdepI_unsorted = []
for r in range(len(result)):
if (which_concepts == "occurences" or which_concepts == "alphabetically"):
ask = "%s.rep%d.val"% (self.semantique_concept_item, r)
elif (which_concepts == "deployment"):
ask = "%s.rep%d.dep"% (self.semantique_concept_item, r)
elif (which_concepts == "number of texts"):
#FIXME does not return anything
ask = "%s.rep%d.nbtxt"% (self.semantique_concept_item, r)
elif (which_concepts == "first apparition"):
#FIXME does not return anything
ask = "%s.rep%d.fapp"% (self.semantique_concept_item, r)
elif (which_concepts == "last apparition"):
#FIXME does not return anything
ask = "%s.rep%d.lapp"% (self.semantique_concept_item, r)
elif (which_concepts == "number of authors"):
#FIXME does not return anything
ask = "%s.rep%d.nbaut"% (self.semantique_concept_item, r)
try:
val = int(self.client.eval_var(ask))
except:
print "C19584", ask, self.client.eval_var(ask)
to_add = "%d %s"%(val, result[r])
#quand on atteint 0, on arrête la boucle et on affecte 0 à toutes les valeurs suivantes
if (val == 0):
self.cdepI_unsorted.extend(map(lambda x : "0 %s" %x, result[r:]))
break
self.cdepI_unsorted.append(to_add)
if (which_concepts == "alphabetically"):
ldepI_sorted = sorted(self.cdepI_unsorted,
key=lambda x: re.split(" ", x)[1], reverse=0)
else :
ldepI_sorted = sorted(self.cdepI_unsorted,
key=lambda x: int(re.split(" ", x)[0]), reverse=1)
self.NOT2.depI.listw.addItems(ldepI_sorted)
# afficher directement II du premier item de I
self.NOT2.depI.listw.setCurrentItem(self.NOT2.depI.listw.item(0))
#self.cdepI_changed()
def cdepI_changed(self):
"""quand un item de D est sélectionné, afficher représentants dans E"""
which_concepts = self.NOT2.sort_command.currentText()
itemT = self.NOT2.depI.listw.currentItem()
if (itemT):
row = self.cdepI_unsorted.index(itemT.text())
self.NOT2.depII.listw.clear() # on efface la liste
ask = "%s.rep%d.rep[0:]" % (self.semantique_concept_item, row)
self.semantique_concept_item_I = u"%s.rep%d" %\
(self.semantique_concept_item, row)
result = self.client.eval_var(ask)
ask2 = "%s.rep%d.rep_present[0:]" % (self.semantique_concept_item, row)
result2 = self.client.eval_var(ask2)
presents = re.split(", ", result2)
if (result != "") :
result = re.split(", ", result)
if (which_concepts == "alphabetically"):
liste_scoree = []
self.PrgBar.perc(len(result))
for r in range(len(result)):
ask = "%s.rep%d.rep%d.val"% (self.semantique_concept_item, row, r)
val = int(self.client.eval_var(ask))
liste_scoree.append([result[r], val])
self.PrgBar.percAdd(1)
self.NOT2.depII.listw.addItems(map(lambda x: "%d %s"% (x[1], x[0]),
sorted(liste_scoree)))
else :
ask2 = "%s.rep%d.rep_present[0:]" % (self.semantique_concept_item, row)
result2 = self.client.eval_var(ask2)
presents = re.split(", ", result2)
self.PrgBar.perc(len(result))
for r in range(len(result)):
ask = "%s.rep%d.rep%d.val"% (self.semantique_concept_item, row, r)
val = int(self.client.eval_var(ask))
if (val == 1 and result[r] in presents):
#quand on atteint 1, on arrête la boucle
self.NOT2.depII.listw.addItems(map(lambda x: "1 %s" %x,
presents[r:]))
absents = list(set(result[r:]) - set(presents[r:]))
self.NOT2.depII.listw.addItems(map(lambda x: "0 %s" %x,
absents))
break
#quand on atteint 0, on arrête la boucle et on affecte 0 à toutes les valeurs suivantes
#if (val == 0):
#self.NOT2.depII.listw.addItems(map(lambda x: "0 %s" %x, result[r:]))
#break
self.NOT2.depII.listw.addItem("%d %s"%(val, result[r]))
self.PrgBar.percAdd(1)
self.PrgBar.reset()
def cdepII_changed(self):
itemT = self.NOT2.depII.listw.currentItem()
if (itemT):
val, item = Controller.sp_el(itemT.text())
row = self.NOT2.depII.listw.currentRow()
self.activity(self.tr("%s selected") % item)
sem = self.sem_concept
if (sem in ["$cat_ent", "$cat_epr", "$cat_mar", "$cat_qua"]):
self.semantique_concept_item_II = u"%s.rep%d" %\
(self.semantique_concept_item, row)
else :
self.semantique_concept_item_II = u"%s.rep%d" %\
(self.semantique_concept_item_I, row)
def server_vars_Evalue(self):
var = self.server_vars.champ.text()
self.server_vars.champ.clear()
result = self.client.eval_var(var)
self.server_vars.result.setTextColor("red")
self.server_vars.result.append("%s" % var)
self.server_vars.result.setTextColor("black")
self.server_vars.result.append(result)
def server_index_Evalue(self):
var = self.server_vars.champ.text()
self.server_vars.champ.clear()
result = self.client.eval_index(var)
result = " ".join(result[0][1])
self.server_vars.result.setTextColor("red")
self.server_vars.result.append("%s" % var)
self.server_vars.result.setTextColor("black")
self.server_vars.result.append(result)
def server_getsem_Evalue(self):
var = self.server_vars.champ.text()
self.server_vars.champ.clear()
items = re.split("\s*", var)
self.server_vars.result.setTextColor("red")
self.server_vars.result.append("%s" % var)
if (len(items) == 2):
self.server_vars.result.setTextColor("black")
el, sem = items
self.server_vars.result.append(self.client.eval_get_sem(el, sem))
def lance_server(self):
self.activity(self.tr("Starting local server"))
self.thread = threading.Thread(target = self.server_thread)
self.thread.start()
self.Param_Server_R_button.setText(self.tr('Stop server'))
self.Param_Server_R_button.clicked.disconnect(self.lance_server)
self.Param_Server_R_button.clicked.connect(self.stop_server)
#TODO la connection locale lancera le serveur local
def server_thread(self):
server_path = self.Param_Server_path_P2.text()
port = self.Param_Server_val_port.text()
PRC = self.Param_Server_path_PRC.text()
#TODO protéger l'adresse du prc
commande = "%s -e -p %s -f %s" % (server_path, port, PRC)
self.activity(self.tr("Loading %s") % PRC)
self.local_server = subprocess.Popen(commande, shell=True)
def stop_server(self):
self.activity(self.tr("Stopping local server"))
self.local_server.terminate()
self.thread.stop()
self.Param_Server_R_button.setText(self.tr('Start server'))
self.Param_Server_R_button.clicked.disconnect(self.stop_server)
self.Param_Server_R_button.clicked.connect(self.lance_server)
def connect_server_localhost(self):
self.connect_server('localhost')
def connect_server(self, h = 'prosperologie.org', p = '60000', name=""):
self.activity(self.tr("Connecting to server"))
#disable connection menus
self.menu.local_edit.setEnabled(False)
self.menu.distant.setEnabled(False)
#self.menu.local_connect.setEnabled(False)
self.client=Controller.client(h, p)
if (self.client.etat):
# donne le focus a l'onglet journal
self.NETs.setCurrentIndex(self.journal_index)
#show actants
self.display_actants()
#recup CTX and TXT
recupTXT = Controller.recupTXT_CTX(self)
self.listeObjetsTextes = recupTXT.listeObjetsTextes
self.dicTxtSem = recupTXT.dicTxtSem
#Show corpus texts list on its own tab
self.create_corpus_texts_tab()
#provide contexts
self.CTXs.l.clear()
self.CTXs.l.addItems(recupTXT.liste_champs_ctx)
#display info in the toolbar
nbpg = self.client.eval_var("$nbpg")
nbtxt = self.client.eval_var("$nbtxt")
volcorpus = self.client.eval_var("$volume_corpus")
if name != "":
message = "Corpus: <b>%s</b> texts: %s pages: %s volume: ?" % (name,
nbtxt, nbpg)
else:
message = "%s texts %s pages %s octets" % (nbtxt, nbpg,
volcorpus)
self.toolbar_descr_corpus.setText(message)
def disconnect_server(self):
"""Disconnect"""
self.activity(self.tr("Disconnecting"))
self.client.disconnect()
self.Param_Server_B.setText(self.tr('Connect to server'))
self.Param_Server_B.clicked.connect(self.connect_server)
def add_edit_corpus_tab(self):
if self.tr("Project") not in [self.NETs.tabText(i)
for i in range(0, self.NETs.count())] :
self.param_corpus = Viewer.Corpus_tab(self)
QtCore.QObject.connect(self.param_corpus.send_codex_ViewListeTextes,
QtCore.SIGNAL("triggered()"), self.send_codex_ViewListeTextes)
self.param_corpus.launchPRC_button.clicked.connect(self.launchPRC)
self.param_corpus_tab_index = self.NETs.addTab(self.param_corpus,
self.tr("Project"))
self.NETs.setCurrentIndex(self.param_corpus_tab_index)
def P1toP2(self):
fname, filt = QtGui.QFileDialog.getOpenFileName(self,
self.tr('Open file'), ".", '*.fic *.cat *.col *dic')
T = Controller.conceptP1toP2(fname)
D = T.readP1()
T.savexml(D)
def display_contexts(self):
i = self.NETs.addTab(self.CTXs, self.tr("Contexts"))
self.NETs.setCurrentIndex(i)
def display_server_vars(self):
i = self.NETs.addTab(self.server_vars, self.tr("Server vars"))
self.NETs.setCurrentIndex(i)
def display_pers(self):
addTab = True
for t in range(3, self.NOTs.count()):
if (self.NOTs.tabText(t) == self.tr("Persons")):
addTab = False
if (addTab):
self.persons_tab_index = self.NOTs.addTab(self.show_persons, self.tr("Persons"))
self.NOTs.setCurrentIndex(self.persons_tab_index)
self.show_persons.L.listw.clear()
ask = "$pers[0:]"
result = self.client.eval_var(ask)
list_results = re.split(", ", result)
self.activity(self.tr("Displaying %d persons")%len(list_results))
if len(list_results) > 0:
self.PrgBar.perc(len(list_results))
ask2 = u"$pers[0:].val"
result2 = self.client.eval_var(ask2)
list_val = re.split(", ", result2)
liste_valued = ["%s %s" %(list_val[row], item) for row, item in
enumerate(list_results)]
self.show_persons.L.listw.addItems(liste_valued)
def display_actants(self):
ask = u"$act[0:]"
result = self.client.eval_var(ask)
list_results = re.split(", ", result)
self.activity(self.tr("Displaying %d actants")%len(list_results))
self.NOTs.setCurrentIndex(0)
self.actantsTab.L.clear()
if len(list_results) > 0:
ask2 = u"val_nbtxt_act[0:]"
result2 = self.client.eval_var(ask2)
list_val = re.split(", ", result2)
liste_valued = [[int(val), list_results[row]]
for row, val in enumerate(list_val)]
self.actants_list_valued = ["%d %s" %(val, item) for val, item in
sorted(liste_valued, reverse=True)]
self.actantsTab.L.addItems(self.actants_list_valued)
self.actantsTab.L.currentItemChanged.connect(self.actsLchanged)
def codex_window(self):
codex_w = codex_window(self)
codex_w.show()
def add_gen_mrlw_tab(self):
self.gen_mrlw = Viewer.MrlwVarGenerator()
self.gen_mrlw_tab_index = self.NETs.addTab(self.gen_mrlw.gen_mrlw,
self.tr("Variant generation"))
self.NETs.setCurrentIndex(self.gen_mrlw_tab_index)
def MarloweViewer(self):
MarloweView = QtWebKit.QWebView()
tabindex = self.NETs.addTab(MarloweView, self.tr("Marlowe"))
self.NETs.setCurrentIndex(tabindex)
url = "http://tiresias.xyz:8080/accueil"
MarloweView.load(QtCore.QUrl(url))
def show_textContent(self, sem_txt):
"""Insert text content in the dedicated window"""
#contentText_semantique = "%s.ph[0:]" % sem_txt
#txt_content = self.client.eval_var(contentText_semantique)
#FIXME this is the worst way to do it
i = 0
b = True
txt_content = ""
while (b):
sem = "%s.ph%d" % (sem_txt, i)
res = self.client.eval_var(sem)
if (res != ""):
txt_content += "%s\n" % res
else :
b = False
i += 1
self.textContent.clear()
self.textContent.append(txt_content)
#move cursor to the beginning of the text
self.textContent.moveCursor(QtGui.QTextCursor.Start)
def efface_textCTX(self):
self.SET2.T.clear()
self.SET2.T.setRowCount(0)
self.SET2.T.setHorizontalHeaderLabels([self.tr('field'),
self.tr('value')]) #on remet les headers apres le clear
def show_textCTX(self, sem):
"""Show text metadata"""
self.efface_textCTX()
ctx = self.listeObjetsTextes[sem].getCTXall()
self.SET2.T.setRowCount(len(ctx))
for r, (field, value) in enumerate(ctx.iteritems()):
itemCTXwidget_field = QtGui.QTableWidgetItem(field)
self.SET2.T.setItem(r, 0, itemCTXwidget_field)
itemCTXwidget_val = QtGui.QTableWidgetItem(value)
self.SET2.T.setItem(r, 1, itemCTXwidget_val)
self.SET2.T.resizeRowsToContents()
def show_textProperties(self, sem_txt):
"""Show text properties according to selected tab"""
if (self.textProperties.currentIndex() == 0):
self.show_sailent(sem_txt)
elif (self.textProperties.currentIndex() == 1):
self.show_text_elements(sem_txt)
def show_text_elements(self, sem_txt):
if hasattr(self, "semantique_txt_item"):
self.text_elements.element_list.clear()
sem_concept = Controller.semantiques[self.text_elements.selector.currentText()]
list_element_sem = "%s.%s[0:]" % (self.semantique_txt_item, sem_concept)
list_element = self.client.eval_var(list_element_sem)
self.text_element_depl = []
if (list_element != u''):
self.list_element_items = re.split(", ", list_element)
#FIXME for pers and undef too
if sem_concept not in ['$pers', '$undef']:
ask = "%s.val_freq_%s[0:]" % (self.semantique_txt_item,
sem_concept[1:])
list_val = re.split(', ', self.client.eval_var(ask))
#FIXME should be same size
if len(list_val) > len(self.list_element_items):
print "C15323 different list size", len(list_val) , len(self.list_element_items)
list_val = list_val[:len(self.list_element_items)]
liste_valued = ["%s %s"% (int(val),
self.list_element_items[row]) for row, val in enumerate(list_val)]
self.text_elements.element_list.addItems(liste_valued)
self.list_elements_valued = { self.list_element_items[row]: int(val)
for row, val in enumerate(list_val) }
else:
self.list_elements_valued = {}
val = False
for i, item in enumerate(self.list_element_items):
ask = u"%s.%s%d.val"%(self.semantique_txt_item, sem_concept, i)
val = int(self.client.eval_var(ask))
if (val == 1):
list_resume = map(lambda x: "1 %s"%x, self.list_element_items[i:])
self.text_elements.element_list.addItems(list_resume)
break
else:
self.list_elements_valued[self.list_element_items[i]] = val
self.text_elements.element_list.addItem("%d %s"%(val, item))
def deploie_text_elements(self):
item = self.text_elements.element_list.currentItem().text()
val, item = Controller.sp_el(item)
self.text_elements.element_list.clear()
sem_concept = Controller.semantiques[self.text_elements.selector.currentText()]
for r in self.list_element_items:
self.text_elements.element_list.addItem(u"%d %s" %\
(self.list_elements_valued[r], r))
if ((r == item) and (item in self.text_element_depl)):
self.text_element_depl.remove(item)
elif (r == item) :
self.text_element_depl.append(item)
if (r in self.text_element_depl):
ask = "%s.%s%d.rep_present[0:]"%(self.semantique_txt_item,
sem_concept, self.list_element_items.index(r))
result = self.client.eval_var(ask)
if (result != u''):
result = re.split(", ", result)
for sub_n in range(len(result)) :
if (result[sub_n] not in self.list_elements_valued.keys()):
ask = "%s.%s%d.rep_present%d.val"%(self.semantique_txt_item,
sem_concept, self.list_element_items.index(r), sub_n)
res = self.client.eval_var(ask)
self.list_elements_valued[result[sub_n]] = res
i = QtGui.QListWidgetItem()
i.setText(u" %s %s"%(self.list_elements_valued[result[sub_n]],
result[sub_n]))
i.setBackground(QtGui.QColor(237,243,254)) # cyan
self.text_elements.element_list.addItem(i)
def show_sailent(self, sem_txt):
#TODO signaler indéfinis importants
self.saillantes.Act.clear()
self.saillantes.Cat.clear()
self.saillantes.Col.clear()
self.PrgBar.perc(36)
#les actants
#les actants en tête sont calculés par le serveur
# ex entre 0 et 4 pages : le poids mini d'un actant est de 2 , le nbre d'actants ideal est 5
# ex entre 5 et 9 pages : le poids mini d'un actant est de 3 , le nbre d'actants ideal est 7
# ...
#
# ex entre 50 et 99 pages : le poids mini d'un actant est de 7 , le nbre d'actants ideal est 25
#
# une page == 2000 caractères
#TexteParametrageDesActants TableDuParametrageDuNombreDActants[]=
#{
# { 0 , 5 , 2 , 5 },
# { 5 , 10, 3 , 7 },
# {10 , 20, 4 ,10},
# {20 , 50, 5 ,15 },
# {50, 100, 6 ,20},
# {100,-1, 7 ,25}, // PS le -1 sert de fin de table .... in for
#};
#TexteParametrage_ConceptsEmergents TableDuParametrageDesConceptsEmergents[]=
#{
# { 0 , 5 , 2 , 2 },
# { 5 , 10, 3 , 3 },
# {10 , 20, 4 , 4},
# {20 , 50, 7 , 5 },
# {50, 100, 10, 7},
# {100,-1, 12 ,10},
#};
self.saillantesAct_deployes = []
list_act_sem = "%s.act[0:]" % sem_txt
result = self.client.eval_var(list_act_sem)
self.list_act = result.split(',')
if len(self.list_act) > 0:
ask = "%s.val_freq_act[0:]"%(sem_txt)
result = self.client.eval_var(ask)
list_val = re.split(', ',result)
liste_valued = ["%d %s"%(int(val), self.list_act[row])
for row, val in enumerate(list_val)]
self.saillantes.Act.addItems(liste_valued)
self.liste_act_valued = { self.list_act[i]: [int(val), 0]
for i, val in enumerate(list_val) }
self.PrgBar.percAdd(12)
#les catégories
self.saillantesCat_deployes = []
liste_cats = []
self.list_cat_txt = {}
for typ in [u"cat_qua", u"cat_mar", u"cat_epr", u"cat_ent"]:
list_cat_sem = "%s.%s[0:]" % (sem_txt, typ)
list_cat = re.split(', ', self.client.eval_var(list_cat_sem))
if (list_cat != [u'']):
for r, c in enumerate(list_cat):
self.list_cat_txt[c] = [typ, r]
ask = "%s.val_freq_%s[0:]"%(sem_txt, typ)
result = self.client.eval_var(ask)
list_val = re.split(', ',result)
#REMOVEME should have same size
if len(list_val) != len(list_cat):
print "C31278 different list size"
try:
liste_valued = [ [int(val), list_cat[row]] for row, val in enumerate(list_val) ]
except:
print "C9338", list_cat, list_val
liste_cats.extend(liste_valued)
self.PrgBar.percAdd(3)
"""
if less than 4 cat, show them all
show until reached .5 of cumulated frequencies (show exaequo)
"""
liste_cats.sort(reverse=True)
if len(liste_cats) <=4 :
self.list_cat_aff = ["%d %s"%(val, el) for val, el in liste_cats]
else:
self.list_cat_aff = []
somme = sum(map(lambda x: x[0], liste_cats))
cum = 0
old_val = False
for val, el in liste_cats:
cum += val
if (float(cum)/somme < 0.5) or (val == old_val):
self.list_cat_aff.append("%d %s"%(val, el))
old_val = val
else:
break
self.saillantes.Cat.addItems(self.list_cat_aff)
# les collections
self.saillantesCol_deployees = []
list_col_sem = "%s.col[0:]" % sem_txt
result = self.client.eval_var(list_col_sem)
if (result != u""):
self.list_col = re.split(", ", result)
self.list_col_valued = {}
vals = re.split(', ', self.client.eval_var("%s.val_dep_col[0:]"%(sem_txt)))
#REMOVEME since its fixed
if len(vals) != len(self.list_col):
print "C31277 different list size"
liste_valued = []
for row, val in enumerate(vals):
self.list_col_valued[self.list_col[row]] = int(val)
liste_valued.append([int(val), self.list_col[row]])
liste_valued = ["%d %s"%(val, item) for val, item
in sorted(liste_valued, reverse=True)]
self.saillantes.Col.addItems(liste_valued)
self.PrgBar.percAdd(12)
def deploie_Col(self):
item = self.saillantes.Col.currentItem().text()
item = re.sub("^\s*\d* ", "", item)
self.saillantes.Col.clear()
for r in self.list_col:
self.saillantes.Col.addItem(u"%d %s" % (self.list_col_valued[r], r))
if ((r == item) and (item in self.saillantesCol_deployees)):
self.saillantesCol_deployees.remove(item)
elif (r == item) :
self.saillantesCol_deployees.append(item)
if (r in self.saillantesCol_deployees):
ask = "%s.col%d.rep_present[0:]"%(self.semantique_txt_item, self.list_col.index(r))
result = self.client.eval_var(ask)
if (result != u''):
result = re.split(", ", result)
for sub_n in range(len(result)) :
if (result[sub_n] not in self.list_col_valued.keys()):
ask = "%s.col%d.rep_present%d.val"%(self.semantique_txt_item, self.list_col.index(r), sub_n)
res = self.client.eval_var(ask)
self.list_col_valued[result[sub_n]] = res
i = QtGui.QListWidgetItem()
i.setText(u"\u00B7 %s %s"%(self.list_col_valued[result[sub_n]],
result[sub_n]))
i.setBackground(QtGui.QColor(237,243,254))
self.saillantes.Col.addItem(i)
def deploie_Cat(self):
item = self.saillantes.Cat.currentItem().text()
item = re.sub("^\s*\d* ", "", item)
self.saillantes.Cat.clear()
for resume in self.list_cat_aff:
self.saillantes.Cat.addItem(resume)
cat = re.sub("^\s*\d* ", "", resume)
if ((cat == item) and (item in self.saillantesCat_deployes)):
self.saillantesCat_deployes.remove(item)
elif (cat == item) :
self.saillantesCat_deployes.append(item)
if (cat in self.saillantesCat_deployes):
sem = self.list_cat_txt[cat]
ask = "%s.%s%d.rep_present[0:]"%(self.semantique_txt_item, sem[0], sem[1])
result = self.client.eval_var(ask)
if (result != u''):
result = re.split(", ", result)
for sub_n in range(len(result)) :
ask = "%s.%s%d.rep_present%d.val"%(self.semantique_txt_item,
sem[0], sem[1], sub_n)
res = self.client.eval_var(ask)
i = QtGui.QListWidgetItem()
i.setText(u" %s %s"%(res, result[sub_n]))
#i.setBackground(QtGui.QColor(245,245,245))
i.setBackground(QtGui.QColor(237,243,254)) # cyan
self.saillantes.Cat.addItem(i)
def deploie_Actant(self):
item = self.saillantes.Act.currentItem().text()
item = re.sub("^\s*\d* ", "", item)
self.saillantes.Act.clear()
for r in self.list_act:
self.saillantes.Act.addItem(u"%d %s" % (self.liste_act_valued[r][0], r))
if ((r == item) and (item in self.saillantesAct_deployes)):
self.saillantesAct_deployes.remove(item)
elif (r == item) :
self.saillantesAct_deployes.append(item)
if (r in self.saillantesAct_deployes):
ask = "%s.act%d.rep_present[0:]"%(self.semantique_txt_item,
self.list_act.index(r))
result = self.client.eval_var(ask)
if (result != u''):
result = re.split(", ", result)
for sub_n in range(len(result)) :
if (result[sub_n] not in self.liste_act_valued.keys()):
ask = "%s.act%d.rep_present%d.val"%(self.semantique_txt_item,
self.list_act.index(r), sub_n)
res = self.client.eval_var(ask)
self.liste_act_valued[result[sub_n]] = [res, 2]
i = QtGui.QListWidgetItem()
i.setText(u" %s %s"%(self.liste_act_valued[result[sub_n]][0],
result[sub_n]))
#i.setBackground(QtGui.QColor(245,245,245))
i.setBackground(QtGui.QColor(237, 243, 254)) # cyan
self.saillantes.Act.addItem(i)
def recup_element_actants(self):
"""get sem and name of item pointed in actants list"""
element = self.actantsTab.L.currentItem().text()
val, element = Controller.sp_el(element)
sem = self.client.eval_get_sem(element, '$act')
return (sem, element)
def recup_element_lexicon(self):
"""get semantic and name of item pointed in lexicon list"""
element = self.NOT1.dep0.listw.currentItem().text()
val, element = Controller.sp_el(element)
return (self.semantique_lexicon_item_0, element)
def recup_element_concepts(self, lvl):
"""get semantic and name of concept pointed in concept list"""
if (lvl == 2):
element = self.NOT2.depII.listw.currentItem().text()
val, element = Controller.sp_el(element)
return (self.semantique_concept_item_II, element)
elif (lvl == 1):
element0 = self.NOT2.dep0.listw.currentItem().text()
val, element0 = Controller.sp_el(element0)
elementI = self.NOT2.depI.listw.currentItem().text()
val, elementI = Controller.sp_el(elementI)
element = u"%s:%s" % (element0, elementI)
return (self.semantique_concept_item_I, element)
else:
element = self.NOT2.dep0.listw.currentItem().text()
val, element = Controller.sp_el(element)
return (self.semantique_concept_item, element)
def recup_element_persons(self):
"""get semantic and name of item pointed in persons list"""
element = self.show_persons.L.listw.currentItem().text()
row = self.show_persons.L.listw.currentRow()
val, element = Controller.sp_el(element)
return ("$pers%d"%row, element)
def add_networks_tab(self):
"""display tab network in the NE cadran"""
self.networks_tab_index = self.NOTs.addTab(self.tabNetworks, self.tr("Networks"))
def show_network(self, lvl):
"""Show the network of a selected item"""
#create the networks tab if not exists
if (not hasattr(self, "networks_tab_index")):
self.add_networks_tab()
#TODO supprimer tab generale quand derniere sous-tab supprimee
if (self.lexicon_or_concepts() == "lexicon"):
sem, element = self.recup_element_lexicon()
elif (self.lexicon_or_concepts() == "concepts"):
sem, element = self.recup_element_concepts(lvl)
elif (lvl == "pers"):
sem, element = self.recup_element_persons()
for i in range(0, self.tabNetworks.count()):
if (self.tabNetworks.tabText(i) == element):
self.tabNetworks.removeTab(i)
res_semantique = "%s.res[0:]" % (sem)
result_network = re.split(", ", self.client.eval_var(res_semantique))
#FIXME give sometimes the values instead of the elements
if (len(result_network)):
ask2 = "%s.res[0:].val"%sem
result = re.split(", ", self.client.eval_var(ask2))
if (result == result_network):
print "C5073 %s has given values instead of element list" % res_semantique
else :
valued = ["%d %s" % (int(val), result_network[row]) for row, val in
enumerate(result)]
network_view = Viewer.NetworksViewer(valued)
network_view.elements.setValue(len(result_network))
index = self.tabNetworks.addTab(network_view.show_network_widget, element)
self.tabNetworks.setTabToolTip(index, element)
self.tabNetworks.setCurrentIndex(index)
self.NOTs.setCurrentIndex(self.networks_tab_index)
self.activity(self.tr("Displaying network for %s (%d items)")% (element,
len(result_network)))
def explo_item_selected(self):
self.explorer_widget.explo_lexi.listw.clear()
if self.explorer_widget.liste.listw.currentItem():
motif = self.explorer_widget.liste.listw.currentItem().text()
val, motif = Controller.sp_el(motif)
## result = self.client.eval_index(motif)
## print "C17248", result
## if (len(result[0][1])):
## for r in result[0][1]:
## if Controller.explo_lexic.has_key(r):
## self.explorer_widget.explo_lexi.listw.addItem(Controller.explo_lexic[r])
## else:
## print "C17249 %s %s" %(motif, r)
## else :
## result = self.client.eval_get_sem(motif, '$undef')
## if result != ['']:
## self.explorer_widget.explo_lexi.listw.addItem('undefined')
result_typage = self.client.eval_var("$typage.get.%s"%motif)
if len(result_typage):
for typ in re.split(',', result_typage):
if Controller.explo_type_dic.has_key(typ):
self.explorer_widget.explo_lexi.listw.addItem(Controller.explo_type_dic[typ])
elif Controller.explo_type_auto.has_key(typ):
self.explorer_widget.explo_lexi.listw.addItem(typ)
else:
print "C17250 %s %s" %(motif, typ)
else :
result = self.client.eval_get_sem(motif, '$undef')
if result != ['']:
self.explorer_widget.explo_lexi.listw.addItem('undefined')
#TODO check concept
def explo_show_text(self):
"""
Show texts containing a pattern
"""
if self.explorer_widget.liste.listw.currentItem():
motif = self.motif #recup from self.explorer
row = self.explorer_widget.liste.listw.currentRow()
element = self.explorer_widget.liste.listw.currentItem().text()
val, element = Controller.sp_el(element)
select_search = self.explorer_widget.select_fix.currentIndex()
types = [u"$search.pre", u"$search.suf", u"$search.rac"]
type_search = types[select_search]
ask = self.client.creer_msg_search(type_search,
motif,
pelement="%d"%row,
txt=True )
result = self.client.eval(ask)
print "C17307", ask, result
liste_textes = re.split(", ", result)
lt_valued = {}
list_sems = map(lambda k: self.dicTxtSem[k], liste_textes)
for i in list_sems:
#TODO scorer/trier
lt_valued[i] = 1
self.show_texts(element, lt_valued)
def show_texts_from_list(self, lvl):
if hasattr(self, "client"):
if (self.lexicon_or_concepts() == "lexicon"):
sem, element = self.recup_element_lexicon()
elif (self.lexicon_or_concepts() == "concepts"):
sem, element = self.recup_element_concepts(lvl)
#print "C11734", sem, element
elif (self.lexicon_or_concepts() == "actants"):
sem, element = self.recup_element_actants()
element = "%s[as actant]" % element
#print "C11735", sem, element
elif (lvl == "pers"):
sem, element = self.recup_element_persons()
print "C2891"
else:
return 0
ask = "%s.txt[0:]" % (sem)
#print "C11736", ask
result = self.client.eval_var(ask)
if (result == ""):
self.activity(self.tr("No text to display for %s") % (element))
else:
liste_textes = re.split(", ", result)
#print "C11737", liste_textes
self.activity(self.tr("Displaying %d texts for %s") % (len(liste_textes), element))
#transform txt filename to sem
list_sems = map(lambda k: self.dicTxtSem[k], liste_textes)
#get element occurences in texts
ask = "%s.txt[0:].val"%(sem)
r = self.client.eval_var(ask)
result = re.split(', ', r)
lt_valued = { list_sems[i]: int(val) for i, val in enumerate(result)}
self.show_texts(element, lt_valued)
def lexicon_or_concepts(self):
i = self.NOTs.currentIndex()
if (i == 3):
return "lexicon"
elif (i == 2):
return "concepts"
elif (i == 0):
return "actants"
else:
return False
def show_texts(self, element, lvalued):
"""Show texts containing a selected item"""
#TODO remove deselect and select the text in the new tab
self.deselectText()
#display
texts_widget = Viewer.ListTexts(element, lvalued,
self.listeObjetsTextes, self)
#TODO sorting by date/score, filter
for sem, tri in texts_widget.sort():
txt = self.listeObjetsTextes[sem]
texts_widget.add(sem, txt.getResume())
texts_widget.corpus.itemSelectionChanged.connect(self.onSelectText)
QtCore.QObject.connect(texts_widget.corpus.action_sentences,
QtCore.SIGNAL("triggered()"), self.teste_wording)
texts_widget.anticorpus.itemSelectionChanged.connect(self.onSelectText)
#insert tab and give focus
self.del_tab_text_doubl(element)
index = self.SOT1.addTab(texts_widget, texts_widget.title)
self.SOT1.setCurrentIndex(index)
self.SOTs.setCurrentIndex(0)
self.SOT1.tabBar().setTabToolTip(index, texts_widget.title)
def del_tab_text_doubl(self, element):
"""delete text tab if exists"""
for i in range(1, self.SOT1.count()):
tab_element = re.sub(" \(\d*\)$", "", self.SOT1.tabText(i))
if (tab_element == element):
self.SOT1.removeTab(i)
def teste_wording(self):
#FIXME info must come from text list
if (self.lexicon_or_concepts() == "lexicon"):
sem, item = self.recup_element_lexicon()
elif (self.lexicon_or_concepts() == "concepts"):
sem, item = self.recup_element_concepts(lvl)
print "C1690", sem, item
score, item = Controller.sp_el(item)
#score, item = re.search("^(\d*) (.*)", item).group(1, 2)
#self.activity("%s double click" % (item))
print "C1691", score, item
if (int(score)):
ask = "$ph.+%s"%(item)
result = self.client.eval_var(ask)
if (not hasattr(self, "tab_sentences_index")):
#FIXME make it closable, only the sentences of the text selected
self.tab_sentences_index = self.SETs.addTab(self.tab_sentences,
self.tr("Sentences"))
for i in range(0, self.tab_sentences.count()):
if (self.tab_sentences.tabText(i) == item):
self.tab_sentences.removeTab(i)
show_sentences_widget = QtGui.QWidget()
show_sentences_box = QtGui.QVBoxLayout()
# on prend toute la place
show_sentences_box.setContentsMargins(0,0,0,0)
show_sentences_box.setSpacing(0)
show_sentences_widget.setLayout(show_sentences_box)
index = self.tab_sentences.addTab(show_sentences_widget, item)
self.tab_sentences.setTabToolTip(index, item)
sentence_text = QtGui.QTextEdit()
show_sentences_box.addWidget(sentence_text)
sentence_text.append(result)
#give focus
self.tab_sentences.setCurrentIndex(index)
self.SOTs.setCurrentIndex(self.tab_sentences_index)
def explorer(self):
self.explorer_widget.liste.listw.clear()
self.motif = self.explorer_widget.saisie.text()
if (self.motif != ""):
types = [u"$search.pre", u"$search.suf", u"$search.rac"]
type_search = types[self.explorer_widget.select_fix.currentIndex()]
if (self.explorer_widget.sensitivity.isChecked()):
type_search = re.sub("search", "searchcs", type_search)
if (self.motif == "abracadabri"): self.explorer_widget.explo_result_count.setText("abracadabra!")
if (self.motif != "" and hasattr(self, "client")):
ask = self.client.creer_msg_search(type_search, self.motif, "[0:]")
result = self.client.eval(ask)
#print "C25712", ask, result
if (result != ''):
liste_result = re.split(", ", result)
self.activity(self.tr("Searching for {%s}: %d results")%(self.motif,
len(liste_result)))
self.explorer_widget.explo_result_count.setText("Found %d results"% len(liste_result))
self.PrgBar.perc(len(liste_result))
for i in range(len(liste_result)):
ask = self.client.creer_msg_search(type_search,
self.motif, "%d"%i, val=True)
r = self.client.eval(ask)
#print "C25713", ask, r
self.PrgBar.percAdd(1)
self.explorer_widget.liste.listw.addItem("%s %s"% (r,
liste_result[i]))
else :
#if nothing found with the pattern
self.activity(self.tr("Searching for {%s}: no result") % (self.motif))
self.explorer_widget.explo_result_count.setText("Nothing found")
def contexts_contents(self):
self.CTXs.cont.clear()
if (self.CTXs.l.currentItem()):
champ = self.CTXs.l.currentItem().text()
result = self.client.eval_var(u"$ctx.%s[0:]" % champ)
result = re.split("(?<!\\\), ", result)#negative lookbehind assertion
dic_CTX = {}
for r in result:
if r in dic_CTX.keys():
dic_CTX[r] = dic_CTX[r] + 1
else:
dic_CTX[r] = 1
for el in sorted(dic_CTX.items(), key= lambda (k, v) : (-v, k)):
self.CTXs.cont.addItem(u"%d %s"%(el[1], re.sub("\\\,", ",", el[0])))
def maj_metadatas(self):
string_ctx = self.client.eval_var("$ctx")
#self.client.add_cache_var(sem_txt +".ctx."+field, val)
current = self.CTXs.l.currentItem()
self.CTXs.cont.clear()
if (current):
self.CTXs.l.setCurrentItem(current)
self.contexts_contents()
def to_clipboard(self, l):
clipboard = QtGui.QApplication.clipboard()
clipboard.setText("\n".join(l))
self.activity(u"%d elements copied to clipboard" % (len(l)))
def copy_lw(self, listw):
n = listw.count()
liste = []
if (n):
for row in range(n):
element = re.sub("^(\d{1,}) (.*)$", "\\2\t\\1",
listw.item(row).text(), 1) #on inverse pour excel
liste.append(element)
self.to_clipboard(liste)
def send_codex_ViewListeTextes(self):
Items = self.param_corpus.ViewListeTextes.selectedItems()
if (Items):
codex_w = codex_window(self)
codex_w.show()
l = []
for item in Items:
l.append(item.text())
codex_w.appendItems(l)
def launchPRC(self):
self.param_corpus.launchPRC_button.setEnabled(False)
#self.NETs.setTabEnabled(3, False)
PRC = self.param_corpus.nameCorpus.text()
if (os.name == 'nt'):
server_path = "server/prospero-II-serveur-64.exe"
else:
server_path = os.path.join(os.getcwd(), "server/prospero-server")
port = 60000
commande = '"%s" -e -d 1 -p %s -f "%s"' % (server_path, port, PRC)
local_server = subprocess.Popen(commande, shell=True)
#FIXME
#only connect when server is ready
time.sleep(5)
self.connect_server("localhost", port)
#FIXME
#kill the server when the gui is closed
atexit.register(local_server.terminate)
###FORMULES###
def recupFormules(self):
ask = "$gescdf.mesFormules0[0:]"
result = self.client.eval(ask)
print "C25713", ask, result
#TODO move to Viewer
class codex_window(QtGui.QWidget):
def __init__(self, parent=None):
super(codex_window, self).__init__(parent, QtCore.Qt.Window)
self.codex_dic = Controller.edit_codex()
if self.codex_dic.cherche_codex():
self.codex_dic.parse_codex_xml("codex.xml")
L = QtGui.QVBoxLayout()
self.setLayout(L)
H2 = QtGui.QHBoxLayout()
L.addLayout(H2)
h22 = QtGui.QVBoxLayout()
H2.addLayout(h22)
h22Buttons = QtGui.QHBoxLayout()
h22.addLayout(h22Buttons)
self.h22Label = QtGui.QLabel(self.tr("Text file list: drag and drop"))
h22Buttons.addWidget(self.h22Label)
h22_spacer = QtGui.QLabel()
h22_spacer.setSizePolicy(QtGui.QSizePolicy.Expanding,
QtGui.QSizePolicy.Minimum)
h22Buttons.addWidget(h22_spacer)
h22gen = QtGui.QPushButton()
h22gen.setIcon(QtGui.QIcon("images/gear.png"))
h22gen.setToolTip(self.tr("test file names"))
h22Buttons.addWidget(h22gen)
QtCore.QObject.connect(h22gen,
QtCore.SIGNAL("clicked()"), self.generate)
self.h22liste = Viewer.ListViewDrop(self)
self.h22liste.fileDropped.connect(self.FilesDropped)
h22.addWidget(self.h22liste)
self.h22liste.setSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.h22liste.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
efface_h22listeItem = QtGui.QAction(self.tr('delete item'), self)
self.h22liste.addAction(efface_h22listeItem)
QtCore.QObject.connect(efface_h22listeItem, QtCore.SIGNAL("triggered()"), self.efface_h22listeItem)
efface_h22liste = QtGui.QAction(self.tr('clear list'), self)
self.h22liste.addAction(efface_h22liste)
QtCore.QObject.connect(efface_h22liste, QtCore.SIGNAL("triggered()"), self.efface_h22liste)
h23 = QtGui.QVBoxLayout()
h23Buttons = QtGui.QHBoxLayout()
h23.addLayout(h23Buttons)
self.h23Label = QtGui.QLabel()
h23Buttons.addWidget(self.h23Label)
h23spacer = QtGui.QLabel()
h23spacer.setSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
h23Buttons.addWidget(h23spacer)
self.h23BT = QtGui.QCheckBox(self.tr("get titles"))
h23Buttons.addWidget(self.h23BT)
#self.h23BT.setChecked(True)
self.h23BT.stateChanged.connect(self.generate)
self.h23BR = QtGui.QCheckBox(self.tr("replace"))
h23Buttons.addWidget(self.h23BR)
h23BS = QtGui.QPushButton(self.tr("save CTX"))
h23Buttons.addWidget(h23BS)
h23BS.clicked.connect(self.saveCTX)
self.h23liste = QtGui.QTableWidget()
self.h23liste.verticalHeader().setVisible(False)
#TODO rendre la liste non editable
h23.addWidget(self.h23liste)
H2.addLayout(h23)
H1 = QtGui.QHBoxLayout()
h11 = QtGui.QVBoxLayout()
H1.addLayout(h11)
self.select_champ = QtGui.QComboBox()
h11.addWidget(self.select_champ)
self.search_line = QtGui.QLineEdit()
h11.addWidget(self.search_line)
self.search_line.returnPressed.connect(self.eval_search_line)
self.search_result = QtGui.QListWidget()
h11.addWidget(self.search_result)
self.search_result.currentItemChanged.connect(self.eval_search_C)
self.search_line.setSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Preferred)
self.search_result.setSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
h12 = QtGui.QVBoxLayout()
H1.addLayout(h12)
h12buttons = QtGui.QHBoxLayout()
h12.addLayout(h12buttons)
self.h12LabelNum = QtGui.QLabel()
h12buttons.addWidget(self.h12LabelNum)
h12buttonsSpacer = QtGui.QLabel()
h12buttons.addWidget(h12buttonsSpacer)
h12buttonsSpacer.setSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
h12BS = QtGui.QPushButton(self.tr("save codex"))
h12BS.clicked.connect(self.codex_dic.save_codex)
h12buttons.addWidget(h12BS)
self.listRad = QtGui.QListWidget()
h12.addWidget(self.listRad)
self.listRad.setSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.listRad.doubleClicked.connect(self.mod_listRadItem)
self.listRad.currentItemChanged.connect(self.changeRad)
self.listRad.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
efface_listRadItem = QtGui.QAction(self.tr('delete item'), self)
self.listRad.addAction(efface_listRadItem)
QtCore.QObject.connect(efface_listRadItem, QtCore.SIGNAL("triggered()"), self.efface_listRadItem)
add_listRadItem = QtGui.QAction(self.tr('add item'), self)
self.listRad.addAction(add_listRadItem)
QtCore.QObject.connect(add_listRadItem, QtCore.SIGNAL("triggered()"), self.add_listRadItem)
self.listRad.setItemDelegate(Viewer.MyDelegate(self))
self.listRad.itemDelegate().closedSignal.connect(self.mod_listRadItem_done)
self.initiate()
h13 = QtGui.QVBoxLayout()
H1.addLayout(h13)
self.h13List = QtGui.QTableWidget()
self.h13List.setColumnCount(2)
self.h13List.setHorizontalHeaderLabels([self.tr('field'),
self.tr('value')])
self.h13List.horizontalHeader().setStretchLastSection(True)
self.h13List.verticalHeader().setVisible(False)
h13.addWidget(self.h13List)
self.h13List.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
efface_listRadValueItem = QtGui.QAction('delete line', self)
self.h13List.addAction(efface_listRadValueItem)
QtCore.QObject.connect(efface_listRadValueItem, QtCore.SIGNAL("triggered()"), self.efface_listRadValueItem)
add_listRadValueItem = QtGui.QAction(self.tr('add line'), self)
self.h13List.addAction(add_listRadValueItem)
QtCore.QObject.connect(add_listRadValueItem, QtCore.SIGNAL("triggered()"), self.add_listRadValueItem)
copy_h13listLine = QtGui.QAction(self.tr('copy line'), self)
self.h13List.addAction(copy_h13listLine)
QtCore.QObject.connect(copy_h13listLine, QtCore.SIGNAL("triggered()"), self.copy_h13listLine)
paste_h13listLine = QtGui.QAction(self.tr('paste line'), self)
self.h13List.addAction(paste_h13listLine)
QtCore.QObject.connect(paste_h13listLine, QtCore.SIGNAL("triggered()"), self.paste_h13listLine)
self.h13List.cellChanged.connect(self.onChangeh13List)
h14 = QtGui.QVBoxLayout()
H1.addLayout(h14)
h14buttons = QtGui.QHBoxLayout()
h14.addLayout(h14buttons)
h14BM = QtGui.QPushButton(self.tr("merge"))
h14buttons.addWidget(h14BM)
h14BM.clicked.connect(self.merge_codex)
self.h14LabelNum = QtGui.QLabel()
h14buttons.addWidget(self.h14LabelNum)
h14buttonsSpacer = QtGui.QLabel()
h14buttons.addWidget(h14buttonsSpacer)
h14buttonsSpacer.setSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.h14MergeList = QtGui.QListWidget()
h14.addWidget(self.h14MergeList)
L.addLayout(H1)
def initiate(self):
self.listRad.currentItemChanged.disconnect(self.changeRad)
self.listRad.doubleClicked.disconnect(self.mod_listRadItem)
if len(self.codex_dic.dico):
self.listRad.clear()
self.listRad.addItems(self.codex_dic.dico.keys())
self.listRad.sortItems()
self.h12LabelNum.setText(self.tr("%d entries")%len(self.codex_dic.dico))
self.reset_select_champ()
self.listRad.doubleClicked.connect(self.mod_listRadItem)
self.listRad.currentItemChanged.connect(self.changeRad)
def reset_select_champ(self):
self.select_champ.clear()
self.select_champ.addItem(u"")
if len(self.codex_dic.dico):
self.select_champ.addItems(self.codex_dic.champs())
self.search_line.clear()
self.search_result.clear()
def efface_listRadItem(self):
item = self.listRad.currentItem().text()
del(self.codex_dic.dico[item])
row = self.listRad.currentRow()
self.listRad.takeItem(row)
def add_listRadItem(self):
item = QtGui.QListWidgetItem("")
self.listRad.insertItem(self.listRad.count(), item)
self.listRad.setCurrentItem(item)
self.mod_listRadItem()
def mod_listRadItem(self):
item = self.listRad.currentItem()
item.setFlags(self.listRad.currentItem().flags() | QtCore.Qt.ItemIsEditable)
#item.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEditable | QtCore.Qt.ItemIsEnabled)
self.listRadItemText = item.text()
if (self.listRad.state() != self.listRad.EditingState):
self.listRad.editItem(item)
def mod_listRadItem_done(self):
item = self.listRad.currentItem()
old = self.listRadItemText
new = item.text()
if (old != new) :
if (new in self.codex_dic.dico.keys()):
item.setText(old)
else :
if (old == ""):
self.codex_dic.dico[new] = { u"author" : "", u"medium" :
"", u"media-type" : "", u"authorship" : "",
u"localisation" : "", u"observations" : "" }
self.changeRad()
else :
self.codex_dic.dico[new] = self.codex_dic.dico[old]
del(self.codex_dic.dico[old])
self.listRad.sortItems()
self.listRad.scrollToItem(item)
def efface_listRadValueItem(self):
if self.h13List.selectedItems() :
row = self.h13List.currentRow()
k = self.listRad.currentItem().text()
f = self.h13List.item(row, 0).text()
del(self.codex_dic.dico[k][f])
self.h13List.removeRow(row)
def add_listRadValueItem(self):
self.h13List.insertRow(0)
def copy_h13listLine(self):
r = self.h13List.currentRow()
if self.h13List.currentItem():
self.copy_h13listLineContent = [self.h13List.item(r, 0).text(), self.h13List.item(r, 1).text()]
def paste_h13listLine(self):
if hasattr(self, "copy_h13listLineContent"):
self.h13List.cellChanged.disconnect(self.onChangeh13List)
field, value = self.copy_h13listLineContent
k = self.listRad.currentItem().text()
row = -1
for r in range(self.h13List.rowCount()):
if (self.h13List.item(r, 0)):
if field == self.h13List.item(r, 0).text() :
row = r
if (row > -1):
self.h13List.item(row, 1).setText(value)
else :
self.h13List.insertRow(r+1)
self.h13List.setItem(r+1, 0, QtGui.QTableWidgetItem(field))
self.h13List.setItem(r+1, 1, QtGui.QTableWidgetItem(value))
self.codex_dic.dico[k][field] = value
self.h13List.cellChanged.connect(self.onChangeh13List)
def efface_h22liste(self):
self.h22liste.clear()
self.h22Label.setText(self.tr("Text file list: drag and drop"))
self.generate()
def efface_h22listeItem(self):
if self.h22liste.selectedItems():
self.h22liste.takeItem(self.h22liste.currentRow())
#self.generate()
self.h22Label.setText(self.tr("%s texts")% self.h22liste.count())
def changeRad(self):
self.h13List.clear()
self.h13List.setHorizontalHeaderLabels([self.tr('field'),
self.tr('value')])
RAD = self.listRad.currentItem().text()
if (RAD == ""):
fields = {}
elif RAD in self.codex_dic.dico.keys():
fields = self.codex_dic.dico[RAD].keys()
self.h13List.setRowCount(len(fields))
#TODO enumerate
r = 0
for field in fields:
i_field = QtGui.QTableWidgetItem(field)
self.h13List.setItem(r, 0, i_field)
v_field = QtGui.QTableWidgetItem(self.codex_dic.dico[RAD][field])
self.h13List.setItem(r, 1, v_field)
r += 1
self.h13List.resizeColumnToContents (0)
def onChangeh13List(self):
r = self.h13List.currentRow()
c = self.h13List.currentColumn()
if ((r != -1) and (c != -1)):
k = self.listRad.currentItem().text()
f = self.h13List.item(r, 0).text()
if (not re.match("^\s*$", f)) :
if (c):
v = self.h13List.currentItem().text()
self.codex_dic.dico[k][f] = v
else:
oldfields = self.codex_dic.champs()
oldfield = self.oldfield(k)
if (oldfield in self.codex_dic.dico[k].keys()):
self.codex_dic.dico[k][f] = self.codex_dic.dico[k][oldfield]
del (self.codex_dic.dico[k][oldfield])
else :
self.codex_dic.dico[k][f] = ""
self.reset_select_champ()
else:
oldfield = self.oldfield(k)
if (oldfield):
self.h13List.item(r, 0).setText (oldfield)
self.h13List.resizeColumnToContents (0)
def oldfield(self, k):
listefield = []
for row in range(self.h13List.rowCount()):
listefield.append(self.h13List.item(row, 0).text())
L = list(set(self.codex_dic.dico[k].keys()) - set(listefield))
if len(L):
return L[0]
else :
return False
def FilesDropped(self, l):
existing = []
for r in range(self.h22liste.count()):
existing.append(self.h22liste.item(r).text())
for url in list(set(l) - set(existing)):
if os.path.splitext(url)[1] in ['.txt', '.TXT']:
item = QtGui.QListWidgetItem(url, self.h22liste)
item.setStatusTip(url)
self.h22Label.setText(self.tr("%s texts")% self.h22liste.count())
print "a"
QtGui.QApplication.processEvents()
self.h22liste.sortItems()
#if os.path.exists(url):
# if os.path.splitext(url)[1] in ['.txt', '.TXT']:
# item = QtGui.QListWidgetItem(url, self.h22liste)
# item.setStatusTip(url)
# self.h22Label.setText(u"%s texts"% self.h22liste.count())
# QtGui.QApplication.processEvents()
#self.h22liste.sortItems()
#self.generate()
def appendItems(self, liste):
self.h22liste.clear()
self.h22liste.addItems(liste)
self.h22Label.setText(self.tr("%s texts")% self.h22liste.count())
self.h22liste.sortItems()
def eval_search_line(self):
self.search_result.clear()
pattern = self.search_line.text()
field = self.select_champ.currentText()
result = self.codex_dic.chercheValue(field, pattern)
for r in result:
self.search_result.addItem(" : ".join(r))
self.search_result.sortItems()
def eval_search_C(self):
item = self.search_result.currentItem()
if (item):
i = item.text()
i = re.split(" : ", i, 1)[0]
item = self.listRad.findItems(i, QtCore.Qt.MatchFlags(QtCore.Qt.MatchExactly))
self.listRad.setCurrentItem(item[0])
def generate(self):
self.CTX_to_be_saved = {}
self.h23liste.clear()
self.h23liste.setRowCount(0)
self.h23liste.setColumnCount(2)
if self.h23BT.checkState():
self.h23liste.setHorizontalHeaderLabels([self.tr('path'),
self.tr('key, date and title')])
else :
self.h23liste.setHorizontalHeaderLabels([self.tr('path'),
self.tr('key and date')])
self.h23liste.horizontalHeader().setStretchLastSection(True)
m = 0
f = 0
#TODO enumerate
for r in range(self.h22liste.count()):
path = self.h22liste.item(r).text()
test = self.codex_dic.eval_file(path)
if (test):
self.match_add(path, test)
m += 1
else :
self.failed_add(path)
f += 1
self.h23Label.setText(self.tr("%d matches, %d fails") % (m, f))
QtGui.QApplication.processEvents()
self.h23liste.resizeColumnToContents (0)
self.h23liste.sortItems(1)
def match_add(self, path, result):
r = self.h23liste.rowCount()
self.h23liste.insertRow(r)
item_path = QtGui.QTableWidgetItem(path)
self.h23liste.setItem(r, 0, item_path)
CTXpath = path[:-3] + "ctx"
self.CTX_to_be_saved[CTXpath] = self.codex_dic.dico[result[0]].copy()
self.CTX_to_be_saved[CTXpath]["date"] = result[1] + " 00:00:00"
if self.h23BT.checkState():
if (os.path.isfile(path)):
title = self.get_title(path)
else :
title = ""
item_value_txt = u" ".join(result) + u" %s"% title
self.CTX_to_be_saved[CTXpath][u"title"] = title
else :
item_value_txt = u" ".join(result)
item_value = QtGui.QTableWidgetItem(item_value_txt)
self.h23liste.setItem(r, 1, item_value)
data = ""
for k, v in self.codex_dic.dico[result[0]].iteritems():
data += "%s:%s\n"%(k, v)
item_path.setToolTip(data[:-1])
item_value.setToolTip(data[:-1])
def get_title(self, path):
"""the first line of the .txt is taken for ctx title"""
with open(path, "rU") as buf:
B = buf.readlines()
title = B[0][:-1]
try :
return title.decode('latin-1')
except :
return title.decode('utf-8')
def failed_add(self, path):
r = self.h23liste.rowCount()
self.h23liste.insertRow(r)
item_path = QtGui.QTableWidgetItem(path)
item_path.setForeground(QtGui.QColor("red"))
self.h23liste.setItem(r, 0, item_path)
item_value = QtGui.QTableWidgetItem(self.tr("\u00A0 no match"))
item_value.setForeground(QtGui.QColor("red"))
self.h23liste.setItem(r, 1, item_value)
item_path.setToolTip(self.tr("no match"))
item_value.setToolTip(self.tr("no match"))
def merge_codex(self):
fname, filt = QtGui.QFileDialog.getOpenFileName(self,
self.tr('Open file'), '.', '*.cfg;*.publi;*.xml')
if (fname) :
m_codex = Controller.edit_codex()
if os.path.splitext(fname)[1] == ".publi":
m_codex.parse_supports_publi(fname)
elif os.path.splitext(fname)[1] == ".cfg":
m_codex.parse_codex_cfg(fname)
elif os.path.splitext(fname)[1] == ".xml":
m_codex.parse_codex_xml(fname)
self.codex_dic.dico, fails = m_codex.fusionne(self.codex_dic.dico, m_codex.dico)
self.initiate()
self.h14MergeList.clear()
for k, v in fails.iteritems():
self.h14MergeList.addItem("%s: %s"%(k, str(v)))
self.h14LabelNum.setText(self.tr("%d fails") % len(fails))
def saveCTX(self):
if hasattr(self, "CTX_to_be_saved"):
for path, v in self.CTX_to_be_saved.iteritems():
if not (os.path.isfile(path) and not self.h23BR.checkState()) :
CTX = Controller.parseCTX()
CTX.path = path
CTX.dico = v
CTX.savefile()
if __name__ == '__main__':
app = QtGui.QApplication(sys.argv)
translator = QtCore.QTranslator()
translator.load('i18n/'+ QtCore.QLocale.system().name())
app.installTranslator(translator)
#Translation: pyside-lupdate -verbose -noobsolete i18n/P2.pro ; lrelease i18n/P2.pro
window = Principal()
#window.show()
sys.exit(app.exec_())
|
josquindebaz/P2Qt
|
p2gui.py
|
Python
|
lgpl-3.0
| 106,057 | 0.009045 |
#
# Autogenerated by Thrift
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
from thrift.Thrift import *
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface:
def ping(self, name):
"""
Parameters:
- name
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot != None:
self._oprot = oprot
self._seqid = 0
def ping(self, name):
"""
Parameters:
- name
"""
self.send_ping(name)
return self.recv_ping()
def send_ping(self, name):
self._oprot.writeMessageBegin('ping', TMessageType.CALL, self._seqid)
args = ping_args()
args.name = name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_ping(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = ping_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success != None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "ping failed: unknown result");
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["ping"] = Processor.process_ping
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_ping(self, seqid, iprot, oprot):
args = ping_args()
args.read(iprot)
iprot.readMessageEnd()
result = ping_result()
result.success = self._handler.ping(args.name)
oprot.writeMessageBegin("ping", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class ping_args:
"""
Attributes:
- name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
)
def __init__(self, name=None,):
self.name = name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ping_args')
if self.name != None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ping_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ping_result')
if self.success != None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
|
lenn0x/Milo-Tracing-Framework
|
src/py/examples/helloworld/HelloWorld.py
|
Python
|
apache-2.0
| 5,966 | 0.015924 |
# -*- coding: utf-8 -*-
# Minio Python Library for Amazon S3 Compatible Cloud Storage,
# (C) 2015, 2016, 2017 Minio, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
minio.error
~~~~~~~~~~~~~~~~~~~
This module provides custom exception classes for Minio library
and API specific errors.
:copyright: (c) 2015, 2016, 2017 by Minio, Inc.
:license: Apache 2.0, see LICENSE for more details.
"""
from xml.etree import cElementTree
from xml.etree.cElementTree import ParseError
if hasattr(cElementTree, 'ParseError'):
## ParseError seems to not have .message like other
## exceptions. Add dynamically new attribute carrying
## value from message.
if not hasattr(ParseError, 'message'):
setattr(ParseError, 'message', ParseError.msg)
_ETREE_EXCEPTIONS = (ParseError, AttributeError, ValueError, TypeError)
else:
_ETREE_EXCEPTIONS = (SyntaxError, AttributeError, ValueError, TypeError)
class MinioError(Exception):
"""
Base class for all exceptions
:param message: User defined message.
"""
def __init__(self, message, **kwargs):
super(MinioError, self).__init__(**kwargs)
self.message = message
def __str__(self):
return "{name}: message: {message}".format(
name=self.__class__.__name__,
message=self.message
)
class InvalidEndpointError(MinioError):
"""
InvalidEndpointError is raised when input endpoint URL is invalid.
"""
pass
class InvalidBucketError(MinioError):
"""
InvalidBucketError is raised when input bucket name is invalid.
NOTE: Bucket names are validated based on Amazon S3 requirements.
"""
pass
class InvalidArgumentError(MinioError):
"""
InvalidArgumentError is raised when an unexpected
argument is received by the callee.
"""
pass
class InvalidSizeError(MinioError):
"""
InvalidSizeError is raised when an unexpected size mismatch occurs.
"""
pass
class InvalidXMLError(MinioError):
"""
InvalidXMLError is raised when an unexpected XML tag or
a missing tag is found during parsing.
"""
pass
class MultiDeleteError(object):
"""
Represents an error raised when trying to delete an object in a
Multi-Object Delete API call :class:`MultiDeleteError <MultiDeleteError>`
:object_name: Object name that had a delete error.
:error_code: Error code.
:error_message: Error message.
"""
def __init__(self, object_name, err_code, err_message):
self.object_name = object_name
self.error_code = err_code
self.error_message = err_message
def __str__(self):
string_format = '<MultiDeleteError: object_name: {} error_code: {}' \
' error_message: {}>'
return string_format.format(self.object_name,
self.error_code,
self.error_message)
class ResponseError(MinioError):
"""
ResponseError is raised when an API call doesn't succeed.
raises :exc:`ResponseError` accordingly.
:param response: Response from http client :class:`urllib3.HTTPResponse`.
"""
def __init__(self, response, method, bucket_name=None,
object_name=None):
super(ResponseError, self).__init__(message='')
# initialize parameter fields
self._response = response
self._xml = response.data
self.method = method
self.bucket_name = bucket_name
self.object_name = object_name
# initialize all ResponseError fields
self.code = ''
# Amz headers
self.request_id = ''
self.host_id = ''
self.region = ''
# handle the error
self._handle_error_response(bucket_name)
def get_exception(self):
"""
Gets the error exception derived from the initialization of
an ErrorResponse object
:return: The derived exception or ResponseError exception
"""
exception = known_errors.get(self.code)
if exception:
return exception(self)
else:
return self
def _handle_error_response(self, bucket_name=None):
"""
Sets error response uses xml body if available, otherwise
relies on HTTP headers.
"""
if not self._response.data:
self._set_error_response_without_body(bucket_name)
else:
self._set_error_response_with_body(bucket_name)
def _set_error_response_with_body(self, bucket_name=None):
"""
Sets all the error response fields with a valid response body.
Raises :exc:`ValueError` if invoked on a zero length body.
:param bucket_name: Optional bucket name resource at which error
occurred.
:param object_name: Option object name resource at which error
occurred.
"""
if len(self._response.data) == 0:
raise ValueError('response data has no body.')
try:
root = cElementTree.fromstring(self._response.data)
except _ETREE_EXCEPTIONS as error:
raise InvalidXMLError('"Error" XML is not parsable. '
'Message: {0}'.format(error.message))
for attribute in root:
if attribute.tag == 'Code':
self.code = attribute.text
elif attribute.tag == 'BucketName':
self.bucket_name = attribute.text
elif attribute.tag == 'Key':
self.object_name = attribute.text
elif attribute.tag == 'Message':
self.message = attribute.text
elif attribute.tag == 'RequestId':
self.request_id = attribute.text
elif attribute.tag == 'HostId':
self.host_id = attribute.text
# Set amz headers.
self._set_amz_headers()
def _set_error_response_without_body(self, bucket_name=None):
"""
Sets all the error response fields from response headers.
"""
if self._response.status == 404:
if bucket_name:
if self.object_name:
self.code = 'NoSuchKey'
self.message = self._response.reason
else:
self.code = 'NoSuchBucket'
self.message = self._response.reason
elif self._response.status == 409:
self.code = 'Confict'
self.message = 'The bucket you tried to delete is not empty.'
elif self._response.status == 403:
self.code = 'AccessDenied'
self.message = self._response.reason
elif self._response.status == 400:
self.code = 'BadRequest'
self.message = self._response.reason
elif self._response.status == 301:
self.code = 'PermanentRedirect'
self.message = self._response.reason
elif self._response.status == 307:
self.code = 'Redirect'
self.message = self._response.reason
elif self._response.status in [405, 501]:
self.code = 'MethodNotAllowed'
self.message = self._response.reason
elif self._response.status == 500:
self.code = 'InternalError'
self.message = 'Internal Server Error.'
else:
self.code = 'UnknownException'
self.message = self._response.reason
# Set amz headers.
self._set_amz_headers()
def _set_amz_headers(self):
"""
Sets x-amz-* error response fields from response headers.
"""
if self._response.headers:
# keeping x-amz-id-2 as part of amz_host_id.
if 'x-amz-id-2' in self._response.headers:
self.host_id = self._response.headers['x-amz-id-2']
if 'x-amz-request-id' in self._response.headers:
self.request_id = self._response.headers['x-amz-request-id']
# This is a new undocumented field, set only if available.
if 'x-amz-bucket-region' in self._response.headers:
self.region = self._response.headers['x-amz-bucket-region']
def __str__(self):
return ('ResponseError: code: {0}, message: {1},'
' bucket_name: {2}, object_name: {3}, request_id: {4},'
' host_id: {5}, region: {6}'.format(self.code,
self.message,
self.bucket_name,
self.object_name,
self.request_id,
self.host_id,
self.region))
# Common error responses listed here
# http://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.htmlRESTErrorResponses
class KnownResponseError(MinioError):
def __init__(self, response_error, **kwargs):
super(KnownResponseError, self).__init__(message=self.message, **kwargs)
self.response_error = response_error
class AccessDenied(KnownResponseError):
message = 'Access Denied'
class AccountProblem(KnownResponseError):
message = 'There is a problem with your account that prevents the ' \
'operation from completing successfully.'
class AmbiguousGrantByEmailAddress(KnownResponseError):
message = 'The email address you provided is associated with ' \
'more than one account.'
class BadDigest(KnownResponseError):
message = 'The Content-MD5 you specified did not match what we received.'
class BucketAlreadyExists(KnownResponseError):
message = 'The requested bucket name is not available. The ' \
'bucket namespace is shared by all users of the system. ' \
'Please select a different name and try again.'
class BucketAlreadyOwnedByYou(KnownResponseError):
message = 'Your previous request to create the named bucket ' \
'succeeded and you already own it.'
class BucketNotEmpty(KnownResponseError):
message = 'The bucket you tried to delete is not empty.'
class CredentialNotSupported(KnownResponseError):
message = 'This request does not support credentials.'
class CrossLocationLoggingProhibited(KnownResponseError):
message = 'Cross-location logging not allowed. Buckets in one ' \
'geographic location cannot log information to a bucket ' \
'in another location.'
class EntityTooSmall(KnownResponseError):
message = 'Your proposed upload is smaller than the minimum a' \
'llowed object size.'
class EntityTooLarge(KnownResponseError):
message = 'Your proposed upload exceeds the maximum allowed object size.'
class ExpiredToken(KnownResponseError):
message = 'The provided token has expired.'
class IllegalVersioningConfigurationException(KnownResponseError):
message = 'Indicates that the versioning configuration specified ' \
'in the request is invalid.'
class IncompleteBody(KnownResponseError):
message = 'You did not provide the number of bytes specified by the ' \
'Content-Length HTTP header'
class IncorrectNumberOfFilesInPostRequest(KnownResponseError):
message = 'POST requires exactly one file upload per request.'
class InlineDataTooLarge(KnownResponseError):
message = 'Inline data exceeds the maximum allowed size.'
class InternalError(KnownResponseError):
message = 'We encountered an internal error. Please try again.'
class InvalidAccessKeyId(KnownResponseError):
message = 'The access key Id you provided does not exist in our records.'
class InvalidAddressingHeader(KnownResponseError):
message = 'You must specify the Anonymous role.'
class InvalidArgument(KnownResponseError):
message = 'Invalid Argument'
class InvalidBucketName(KnownResponseError):
message = 'The specified bucket is not valid.'
class InvalidBucketState(KnownResponseError):
message = 'The request is not valid with the current state of the bucket.'
class InvalidDigest(KnownResponseError):
message = 'The Content-MD5 you specified is not valid.'
class InvalidEncryptionAlgorithmError(KnownResponseError):
message = 'The encryption request you specified is not valid. ' \
'The valid value is AES256.'
class InvalidLocationConstraint(KnownResponseError):
message = 'The specified location constraint is not valid.'
class InvalidObjectState(KnownResponseError):
message = 'The operation is not valid for the current state of the object.'
class InvalidPart(KnownResponseError):
message = 'One or more of the specified parts could not be found. ' \
'The part might not have been uploaded, or the specified ' \
'entity tag might not have matched the part\'s entity tag'
class InvalidPartOrder(KnownResponseError):
message = 'The list of parts was not in ascending order.Parts list ' \
'must specified in order by part number.'
class InvalidPayer(KnownResponseError):
message = 'All access to this object has been disabled.'
class InvalidPolicyDocument(KnownResponseError):
message = 'The content of the form does not meet the conditions ' \
'specified in the policy document.'
class InvalidRange(KnownResponseError):
message = 'The requested range cannot be satisfied.'
class InvalidRequest(KnownResponseError):
message = 'Invalid Request'
class InvalidSecurity(KnownResponseError):
message = 'The provided security credentials are not valid.'
class InvalidSOAPRequest(KnownResponseError):
message = 'The SOAP request body is invalid.'
class InvalidStorageClass(KnownResponseError):
message = 'The storage class you specified is not valid.'
class InvalidTargetBucketForLogging(KnownResponseError):
message = 'The target bucket for logging does not exist, ' \
'is not owned by you, or does not have the appropriate ' \
'grants for the log-delivery group.'
class InvalidToken(KnownResponseError):
message = 'The provided token is malformed or otherwise invalid.'
class InvalidURI(KnownResponseError):
message = 'Couldn\'t parse the specified URI.'
class KeyTooLong(KnownResponseError):
message = 'Your key is too long.'
class MalformedACLError(KnownResponseError):
message = 'The XML you provided was not well-formed ' \
'or did not validate against our published schema.'
class MalformedPOSTRequest(KnownResponseError):
message = 'The body of your POST request is not ' \
'well-formed multipart/form-data.'
class MalformedXML(KnownResponseError):
message = 'This happens when the user sends malformed xml (xml that ' \
'doesn\'t conform to the published xsd) for the configuration.'
class MaxMessageLengthExceeded(KnownResponseError):
message = 'Your request was too big.'
class MaxPostPreDataLengthExceededError(KnownResponseError):
message = 'Your POST request fields preceding the ' \
'upload file were too large.'
class MetadataTooLarge(KnownResponseError):
message = 'Your metadata headers exceed the maximum allowed metadata size.'
class MethodNotAllowed(KnownResponseError):
message = 'The specified method is not allowed against this resource'
class MissingAttachment(KnownResponseError):
message = 'A SOAP attachment was expected, but none were found.'
class MissingContentLength(KnownResponseError):
message = 'You must provide the Content-Length HTTP header.'
class MissingRequestBodyError(KnownResponseError):
message = 'This happens when the user sends an empty xml document ' \
'as a request. The error message is, "Request body is empty."'
class MissingSecurityElement(KnownResponseError):
message = 'The SOAP 1.1 request is missing a security element.'
class MissingSecurityHeader(KnownResponseError):
message = 'Your request is missing a required header.'
class NoLoggingStatusForKey(KnownResponseError):
message = 'There is no such thing as a logging ' \
'status subresource for a key.'
class NoSuchBucket(KnownResponseError):
message = 'The specified bucket does not exist.'
class NoSuchKey(KnownResponseError):
message = 'The specified key does not exist.'
class NoSuchLifecycleConfiguration(KnownResponseError):
message = 'The lifecycle configuration does not exist.'
class NoSuchUpload(KnownResponseError):
message = 'The specified multipart upload does not exist. ' \
'The upload ID might be invalid, or the multipart \
upload might have been aborted or completed.'
class NoSuchVersion(KnownResponseError):
message = 'Indicates that the version ID specified in the ' \
'request does not match an existing version.'
class APINotImplemented(KnownResponseError):
message = 'A header you provided implies functionality ' \
'that is not implemented.'
class NotSignedUp(KnownResponseError):
message = 'Your account is not signed up.'
class NoSuchBucketPolicy(KnownResponseError):
message = 'The specified bucket does not have a bucket policy.'
class OperationAborted(KnownResponseError):
message = 'A conflicting conditional operation is currently in ' \
'progress against this resource. Try again.'
class PermanentRedirect(KnownResponseError):
message = 'The bucket you are attempting to access must be addressed ' \
'using the specified endpoint. Send all future requests ' \
'to this endpoint.'
class PreconditionFailed(KnownResponseError):
message = 'At least one of the preconditions you specified did not hold.'
class Redirect(KnownResponseError):
message = 'Temporary redirect.'
class RestoreAlreadyInProgress(KnownResponseError):
message = 'Object restore is already in progress.'
class RequestIsNotMultiPartContent(KnownResponseError):
message = 'Bucket POST must be of the enclosure-type multipart/form-data.'
class RequestTimeout(KnownResponseError):
message = 'Your socket connection to the server was not read ' \
'from or written to within the timeout period.'
class RequestTimeTooSkewed(KnownResponseError):
message = 'The difference between the request time and the ' \
'server\'s time is too large.'
class RequestTorrentOfBucketError(KnownResponseError):
message = 'Requesting the torrent file of a bucket is not permitted.'
class SignatureDoesNotMatch(KnownResponseError):
message = 'The request signature we calculated does not match the ' \
'signature you provided.'
class ServiceUnavailable(KnownResponseError):
message = 'Reduce your request rate.'
class SlowDown(KnownResponseError):
message = 'Reduce your request rate.'
class TemporaryRedirect(KnownResponseError):
message = 'You are being redirected to the bucket while DNS updates.'
class TokenRefreshRequired(KnownResponseError):
message = 'The provided token must be refreshed.'
class TooManyBuckets(KnownResponseError):
message = 'You have attempted to create more buckets than allowed.'
class UnexpectedContent(KnownResponseError):
message = 'This request does not support content.'
class UnresolvableGrantByEmailAddress(KnownResponseError):
message = 'The email address you provided does not match any account ' \
'on record.'
class UserKeyMustBeSpecified(KnownResponseError):
message = 'The bucket POST must contain the specified field name. ' \
'If it is specified, check the order of the fields.'
known_errors = {
'AccessDenied': AccessDenied,
'AcccountProblem': AccountProblem,
'AmbiguousGrantByEmailAddress': AmbiguousGrantByEmailAddress,
'BadDigest': BadDigest,
'BucketAlreadyExists': BucketAlreadyExists,
'BucketAlreadyOwnedByYou': BucketAlreadyOwnedByYou,
'BucketNotEmpty': BucketNotEmpty,
'CredentialNotSupported': CredentialNotSupported,
'CrossLocationLoggingProhibited': CrossLocationLoggingProhibited,
'EntityTooSmall': EntityTooSmall,
'EntityTooLarge': EntityTooLarge,
'ExpiredToken': ExpiredToken,
'IllegalVersioningConfigurationException': IllegalVersioningConfigurationException,
'IncompleteBody': IncompleteBody,
'IncorrectNumberOfFilesInPostRequest': IncorrectNumberOfFilesInPostRequest,
'InlineDataTooLarge': InlineDataTooLarge,
'InternalError': InternalError,
'InvalidAccessKeyId': InvalidAccessKeyId,
'InvalidAddressingHeader': InvalidAddressingHeader,
'InvalidArgument': InvalidArgument,
'InvalidBucketName': InvalidBucketName,
'InvalidBucketState': InvalidBucketState,
'InvalidDigest': InvalidDigest,
'InvalidEncryptionAlgorithmError': InvalidEncryptionAlgorithmError,
'InvalidLocationConstraint': InvalidLocationConstraint,
'InvalidObjectState': InvalidObjectState,
'InvalidPart': InvalidPart,
'InvalidPartOrder': InvalidPartOrder,
'InvalidPayer': InvalidPayer,
'InvalidPolicyDocument': InvalidPolicyDocument,
'InvalidRange': InvalidRange,
'InvalidRequest': InvalidRequest,
'InvalidSecurity': InvalidSecurity,
'InvalidSOAPRequest': InvalidSOAPRequest,
'InvalidStorageClass': InvalidStorageClass,
'InvalidTargetBucketForLogging': InvalidTargetBucketForLogging,
'InvalidToken': InvalidToken,
'InvalidURI': InvalidURI,
'KeyTooLong': KeyTooLong,
'MalformedACLError': MalformedACLError,
'MalformedPOSTRequest': MalformedPOSTRequest,
'MalformedXML': MalformedXML,
'MaxMessageLengthExceeded': MaxMessageLengthExceeded,
'MaxPostPreDataLengthExceededError': MaxPostPreDataLengthExceededError,
'MetadataTooLarge': MetadataTooLarge,
'MethodNotAllowed': MethodNotAllowed,
'MissingAttachment': MissingAttachment,
'MissingContentLength': MissingContentLength,
'MissingRequestBodyError': MissingRequestBodyError,
'MissingSecurityElement': MissingSecurityElement,
'MissingSecurityHeader': MissingSecurityHeader,
'NoLoggingStatusForKey': NoLoggingStatusForKey,
'NoSuchBucket': NoSuchBucket,
'NoSuchKey': NoSuchKey,
'NoSuchLifecycleConfiguration': NoSuchLifecycleConfiguration,
'NoSuchUpload': NoSuchUpload,
'NoSuchVersion': NoSuchVersion,
'NotImplemented': APINotImplemented,
'NotSignedUp': NotSignedUp,
'NoSuchBucketPolicy': NoSuchBucketPolicy,
'OperationAborted': OperationAborted,
'PermanentRedirect': PermanentRedirect,
'PreconditionFailed': PreconditionFailed,
'Redirect': Redirect,
'RestoreAlreadyInProgress': RestoreAlreadyInProgress,
'RequestIsNotMultiPartContent': RequestIsNotMultiPartContent,
'RequestTimeout': RequestTimeout,
'RequestTimeTooSkewed': RequestTimeTooSkewed,
'RequestTorrentOfBucketError': RequestTorrentOfBucketError,
'SignatureDoesNotMatch': SignatureDoesNotMatch,
'ServiceUnavailable': ServiceUnavailable,
'SlowDown': SlowDown,
'TemporaryRedirect': TemporaryRedirect,
'TokenRefreshRequired': TokenRefreshRequired,
'TooManyBuckets': TooManyBuckets,
'UnexpectedContent': UnexpectedContent,
'UnresolvableGrantByEmailAddress': UnresolvableGrantByEmailAddress,
'UserKeyMustBeSpecified': UserKeyMustBeSpecified,
}
|
NitishT/minio-py
|
minio/error.py
|
Python
|
apache-2.0
| 23,884 | 0.003601 |
# coding: utf-8
from mongomock import MongoClient as MockMongoClient
from .base import *
# For tests, don't use KoBoCAT's DB
DATABASES = {
'default': dj_database_url.config(default='sqlite:///%s/db.sqlite3' % BASE_DIR),
}
DATABASE_ROUTERS = ['kpi.db_routers.TestingDatabaseRouter']
TESTING = True
# Decrease prod value to speed-up tests
SUBMISSION_LIST_LIMIT = 100
ENV = 'testing'
# Run all Celery tasks synchronously during testing
CELERY_TASK_ALWAYS_EAGER = True
MONGO_CONNECTION_URL = 'mongodb://fakehost/formhub_test'
MONGO_CONNECTION = MockMongoClient(
MONGO_CONNECTION_URL, j=True, tz_aware=True)
MONGO_DB = MONGO_CONNECTION['formhub_test']
|
kobotoolbox/kpi
|
kobo/settings/testing.py
|
Python
|
agpl-3.0
| 664 | 0.001506 |
# -*- coding: utf-8 -*-
"""
Copyright (c) 2015, Philipp Klaus. All rights reserved.
License: GPLv3
"""
from distutils.core import setup
setup(name='netio230a',
version = '1.1.9',
description = 'Python package to control the Koukaam NETIO-230A',
long_description = 'Python software to access the Koukaam NETIO-230A and NETIO-230B: power distribution units / controllable power outlets with Ethernet interface',
author = 'Philipp Klaus',
author_email = 'philipp.l.klaus@web.de',
url = 'https://github.com/pklaus/netio230a',
license = 'GPL3+',
packages = ['netio230a'],
scripts = ['scripts/netio230a_cli', 'scripts/netio230a_discovery', 'scripts/netio230a_fakeserver'],
zip_safe = True,
platforms = 'any',
keywords = 'Netio230A Koukaam PDU',
classifiers = [
'Development Status :: 4 - Beta',
'Operating System :: OS Independent',
'License :: OSI Approved :: GPL License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
]
)
|
pklaus/netio230a
|
setup.py
|
Python
|
gpl-3.0
| 1,135 | 0.026432 |
r"""A proxy enabling multiple wiring guide instances to interact with the same
SpiNNaker boards.
A very simple protocol is used between the client and server. Clients may send
the following new-line delimited commands to the server:
* ``VERSION,[versionstring]\n`` The server will disconnect any client with an
incompatible version number reported for ``[versionstring]``. Returns
``OK\n``.
* ``LED,[c],[f],[b],[lednum],[state]\n`` Turn on or off the specified LED. Note
that the LED remains switched on while *any* client wants it to be on.
Returns ``OK\n``.
* ``TARGET,[c],[f],[b],[link]\n`` Discover what link is at the other end of the
supplied link. Returns ``[c],[f],[b],[link]\n`` or ``None\n`` if no link is
connected. Note that links are represented by their number, not their name.
"""
import traceback
import socket
import select
from collections import defaultdict
import logging
from six import iteritems
from spinner.version import __version__
from spinner.topology import Direction
DEFAULT_PORT = 6512
class ProxyError(Exception):
"""Exception raised when the proxy cannot connect."""
pass
class ProxyServer(object):
"""A proxy server enabling multiple wiring guide instances to interact with
the same SpiNNaker boards.
"""
def __init__(self, bmp_controller, wiring_probe,
hostname="", port=DEFAULT_PORT):
self.bmp_controller = bmp_controller
self.wiring_probe = wiring_probe
# Open a TCP socket
self.server_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server_sock.setsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR, 1)
self.server_sock.bind((hostname, port))
self.server_sock.listen(5)
self.client_socks = []
# A buffer for unprocessed data received from each client
self.client_buffer = {}
# For each LED, maintains a set of clients which have turned it on
self.led_setters = defaultdict(set)
def add_client(self, sock, addr):
"""Register a new client."""
logging.info("New connection {} from {}".format(sock, addr))
self.client_socks.append(sock)
# Create buffer for received data (and schedule its deletion upon
# disconnection)
self.client_buffer[sock] = b""
def remove_client(self, sock):
"""Disconnect and cleanup after a particular child."""
logging.info("Closing socket {}".format(sock))
# Remove buffer
self.client_buffer.pop(sock)
# Turn off any LEDs left on by the client
for (c, f, b, led), socks in iteritems(self.led_setters):
if sock in socks:
self.set_led(sock, c, f, b, led, False)
# Close socket
self.client_socks.remove(sock)
sock.close()
def set_led(self, sock, c, f, b, led, state):
"""Set the state of a diagnostic LED.
An LED is turned on if at least one client has turned it on. An LED is only
turned off if all clients which have turned the LED on have also turned it
off again.
"""
setters = self.led_setters[(c, f, b, led)]
cur_led_state = bool(setters)
if state:
setters.add(sock)
else:
setters.discard(sock)
new_led_state = bool(setters)
if cur_led_state != new_led_state:
self.bmp_controller.set_led(led, new_led_state, c, f, b)
def handle_version(self, sock, args):
"""Handle "VERSION" commands.
This command contains, as the argument, the SpiNNer version number of the
remote client. If the version of the client does not match the server, the
client is disconnected.
Arguments: vX.Y.Z
Returns: OK
"""
# Check for identical version
assert args.decode("ascii") == __version__
sock.send(b"OK\n")
def handle_led(self, sock, args):
"""Handle "LED" commands.
Set the state of a diagnostic LED on a board.
Arguments: c,f,b,led,state
Returns: OK
"""
c, f, b, led, state = map(int, args.split(b","))
self.set_led(sock, c, f, b, led, state)
sock.send(b"OK\n")
def handle_target(self, sock, args):
"""Handle "TARGET" commands.
Determine what is at the other end of a given link.
Arguments: c,f,b,d
Returns: c,f,b,d or None
"""
c, f, b, d = map(int, args.split(b","))
target = self.wiring_probe.get_link_target(c, f, b, d)
if target is None:
sock.send(b"None\n")
else:
sock.send("{},{},{},{}\n".format(*map(int, target)).encode("ascii"))
def process_data(self, sock, data):
"""Process data received from a socket."""
# Prepend any previously unprocessed data
data = self.client_buffer[sock] + data
# Handle any received commands. If a command fails (or is invalid) the
# connection is dropped.
try:
while b"\n" in data:
line, _, data = data.partition(b"\n")
logging.debug("Handling command {} from {}".format(line, sock))
cmd, _, args = line.partition(b",")
# If an unrecognised command arrives, this lookup will fail and get
# caught by the exception handler, printing an error and disconnecting
# the client.
{
b"VERSION": self.handle_version,
b"LED": self.handle_led,
b"TARGET": self.handle_target,
}[cmd](sock, args)
except Exception as e:
logging.exception(
"Disconnected client {} due to bad command (above)".format(sock))
self.remove_client(sock)
return
# Retain any remaining unprocessed data
self.client_buffer[sock] = data
def main(self):
logging.info("Starting proxy server...")
try:
while True:
ready, _1, _2 = select.select([self.server_sock] + self.client_socks, [], [])
for sock in ready:
if sock is self.server_sock:
# New client connected!
self.add_client(*self.server_sock.accept())
else:
# Data arrived from a client
try:
data = sock.recv(1024)
except (IOError, OSError) as exc:
logging.error(
"Socket {} failed to receive: {}".format(sock, exc))
# Cause socket to get closed
data = b"" # pragma: no branch
if len(data) == 0:
# Connection closed
self.remove_client(sock)
else:
self.process_data(sock, data)
except KeyboardInterrupt:
# Disconnect all clients (also cleans up LED states, etc.)
for sock in self.client_socks:
self.remove_client(sock)
logging.info("Proxy server terminated cleanly.")
class ProxyClient(object):
"""A client for the ProxyServer object defined above.
This object implements a BMPController-compatible ``set_led`` method and
WiringProbe compatible ``get_link_target`` method and thus may be substituted
for the above when these functions are all that are required, e.g. for the
InteractiveWiringGuide.
"""
def __init__(self, hostname, port=DEFAULT_PORT):
"""Connect to a running ProxyServer."""
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect((hostname, port))
# A receive buffer
self.buf = b""
# Check for protocol version compatibility.
self.check_version()
def recvline(self):
"""Wait for a full line to be received from the server."""
while b"\n" not in self.buf:
data = self.sock.recv(1024)
self.buf += data
if len(data) == 0:
raise ProxyError("Remote server closed the connection.")
line, _, self.buf = self.buf.partition(b"\n")
return line
def check_version(self):
"""Check that the remote server has a compatible protocol version."""
self.sock.send("VERSION,{}\n".format(__version__).encode("ascii"))
if self.recvline() != b"OK":
raise ProxyError("Remote server has incompatible protocol version")
def set_led(self, led, state, c, f, b):
"""Set the state of an LED on the remote machine."""
self.sock.send("LED,{},{},{},{},{}\n".format(
c, f, b, led, int(state)).encode("ascii"))
if self.recvline() != b"OK":
raise ProxyError("Got unexpected response to LED command.")
def get_link_target(self, c, f, b, d):
"""Discover the other end of a specified link on a remote machine."""
self.sock.send("TARGET,{},{},{},{}\n".format(
c, f, b, int(d)).encode("ascii"))
response = self.recvline()
if response == b"None":
return None
else:
try:
c, f, b, d = map(int, response.split(b","))
return (c, f, b, Direction(d))
except ValueError:
raise ProxyError("Got unexpected response to TARGET command.")
|
SpiNNakerManchester/SpiNNer
|
spinner/proxy.py
|
Python
|
gpl-2.0
| 8,272 | 0.040015 |
from . import Renderer
from PIL import Image, ImageFont, ImageQt, ImageDraw
from PyQt5 import QtGui
'''
Renders a single line of text at a given position.
'''
class TextRenderer(Renderer):
MSFACTOR = 8
def __init__(self, gl, text, pos, size = 64):
super().__init__(gl)
self.text = text
self.pos = pos
if size > 64:
self.MSFACTOR = 4
if size > 128:
self.sizeAdjust = size / 128
self.fSize = 128
else:
self.fSize = size
self.sizeAdjust = 1
self.callList = self.genSymbolCallList()
def genSymbolCallList(self):
genList = self.gl.glGenLists(1)
try:
font = ImageFont.truetype('resources/interface/Roboto.ttf', self.fSize * self.MSFACTOR)
except OSError:
print("Font not found, loading failsafe.")
font = ImageFont.truetype('arial.ttf', self.fSize * self.MSFACTOR)
# works on Windows; may still fail on Linux and OSX. Documentation unclear.
textSize = font.getsize(self.text)
border = 5
image = Image.new("RGBA", (textSize[0] + 2*border, textSize[1] + 2*border), None)
draw = ImageDraw.Draw(image)
draw.text((border, border), self.text, font=font, fill="white")
del draw
imgWidth = float(self.sizeAdjust * image.size[0] / self.MSFACTOR)
imgHeight = float(self.sizeAdjust * image.size[1] / self.MSFACTOR)
self.vertices =[0.0, self.fSize - imgHeight, 2.0,
0.0, float(self.fSize), 2.0,
imgWidth, float(self.fSize), 2.0,
imgWidth, self.fSize - imgHeight, 2.0]
self.texCoords=[0.0, 0.0, 2.0,
0.0, 1.0, 2.0,
1.0, 1.0, 2.0,
1.0, 0.0, 2.0]
self.texture = QtGui.QOpenGLTexture(ImageQt.ImageQt(image), True)
self.texture.setMinMagFilters(QtGui.QOpenGLTexture.LinearMipMapLinear, QtGui.QOpenGLTexture.Linear)
self.gl.glNewList(genList, self.gl.GL_COMPILE)
self.gl.glColor4f(1.0, 1.0, 1.0, 0.0)
self.gl.glMatrixMode(self.gl.GL_MODELVIEW)
self.gl.glPushMatrix()
self.gl.glTranslated(self.pos.x - self.sizeAdjust * (image.size[0] / (2 * self.MSFACTOR) - border), self.pos.y - image.size[1] / (2 * self.MSFACTOR), 0)
self.texture.bind()
self.gl.glEnableClientState(self.gl.GL_VERTEX_ARRAY)
self.gl.glEnableClientState(self.gl.GL_TEXTURE_COORD_ARRAY)
self.gl.glVertexPointer(3, self.gl.GL_FLOAT, 0, self.vertices)
self.gl.glTexCoordPointer(3, self.gl.GL_FLOAT, 0, self.texCoords)
self.gl.glEnable(self.gl.GL_TEXTURE_2D)
self.gl.glDrawArrays(self.gl.GL_QUADS, 0, 4)
self.gl.glDisable(self.gl.GL_TEXTURE_2D)
self.texture.release()
self.gl.glPopMatrix()
self.gl.glEndList()
return genList
|
eeucalyptus/eeDA
|
app/graphics/textrenderer.py
|
Python
|
apache-2.0
| 3,090 | 0.009385 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
This module is a special module to define functions or other resources
which need to be imported outside of openstack_dashboard.api.nova
(like cinder.py) to avoid cyclic imports.
"""
from django.conf import settings
from glanceclient import exc as glance_exceptions
from novaclient import api_versions
from novaclient import client as nova_client
from horizon import exceptions as horizon_exceptions
from horizon.utils import memoized
from openstack_dashboard.api import base
from openstack_dashboard.api import glance
from openstack_dashboard.api import microversions
from openstack_dashboard.contrib.developer.profiler import api as profiler
# Supported compute versions
VERSIONS = base.APIVersionManager("compute", preferred_version=2)
VERSIONS.load_supported_version(1.1, {"client": nova_client, "version": 1.1})
VERSIONS.load_supported_version(2, {"client": nova_client, "version": 2})
INSECURE = getattr(settings, 'OPENSTACK_SSL_NO_VERIFY', False)
CACERT = getattr(settings, 'OPENSTACK_SSL_CACERT', None)
class Server(base.APIResourceWrapper):
"""Simple wrapper around novaclient.server.Server.
Preserves the request info so image name can later be retrieved.
"""
_attrs = ['addresses', 'attrs', 'id', 'image', 'links', 'description',
'metadata', 'name', 'private_ip', 'public_ip', 'status', 'uuid',
'image_name', 'VirtualInterfaces', 'flavor', 'key_name', 'fault',
'tenant_id', 'user_id', 'created', 'locked',
'OS-EXT-STS:power_state', 'OS-EXT-STS:task_state',
'OS-EXT-SRV-ATTR:instance_name', 'OS-EXT-SRV-ATTR:host',
'OS-EXT-AZ:availability_zone', 'OS-DCF:diskConfig']
def __init__(self, apiresource, request):
super(Server, self).__init__(apiresource)
self.request = request
# TODO(gabriel): deprecate making a call to Glance as a fallback.
@property
def image_name(self):
if not self.image:
return None
elif hasattr(self.image, 'name'):
return self.image.name
elif 'name' in self.image:
return self.image['name']
else:
try:
image = glance.image_get(self.request, self.image['id'])
self.image['name'] = image.name
return image.name
except (glance_exceptions.ClientException,
horizon_exceptions.ServiceCatalogException):
self.image['name'] = None
return None
@property
def internal_name(self):
return getattr(self, 'OS-EXT-SRV-ATTR:instance_name', "")
@property
def availability_zone(self):
return getattr(self, 'OS-EXT-AZ:availability_zone', "")
@property
def host_server(self):
return getattr(self, 'OS-EXT-SRV-ATTR:host', '')
@memoized.memoized
def get_microversion(request, features):
client = novaclient(request)
min_ver, max_ver = api_versions._get_server_version_range(client)
return (microversions.get_microversion_for_features(
'nova', features, api_versions.APIVersion, min_ver, max_ver))
def get_auth_params_from_request(request):
"""Extracts properties needed by novaclient call from the request object.
These will be used to memoize the calls to novaclient.
"""
return (
request.user.username,
request.user.token.id,
request.user.tenant_id,
request.user.token.project.get('domain_id'),
base.url_for(request, 'compute'),
base.url_for(request, 'identity')
)
@memoized.memoized
def cached_novaclient(request, version=None):
(
username,
token_id,
project_id,
project_domain_id,
nova_url,
auth_url
) = get_auth_params_from_request(request)
if version is None:
version = VERSIONS.get_active_version()['version']
c = nova_client.Client(version,
username,
token_id,
project_id=project_id,
project_domain_id=project_domain_id,
auth_url=auth_url,
insecure=INSECURE,
cacert=CACERT,
http_log_debug=settings.DEBUG,
auth_token=token_id,
endpoint_override=nova_url)
return c
def novaclient(request, version=None):
if isinstance(version, api_versions.APIVersion):
version = version.get_string()
return cached_novaclient(request, version)
def get_novaclient_with_instance_desc(request):
microversion = get_microversion(request, "instance_description")
return novaclient(request, version=microversion)
@profiler.trace
def server_get(request, instance_id):
return Server(get_novaclient_with_instance_desc(request).servers.get(
instance_id), request)
|
NeCTAR-RC/horizon
|
openstack_dashboard/api/_nova.py
|
Python
|
apache-2.0
| 5,498 | 0 |
import doctest
from insights.parsers import ls_var_cache_pulp
from insights.parsers.ls_var_cache_pulp import LsVarCachePulp
from insights.tests import context_wrap
LS_VAR_CACHE_PULP = """
total 0
drwxrwxr-x. 5 48 1000 216 Jan 21 12:56 .
drwxr-xr-x. 10 0 0 121 Jan 20 13:57 ..
lrwxrwxrwx. 1 0 0 19 Jan 21 12:56 cache -> /var/lib/pulp/cache
drwxr-xr-x. 2 48 48 6 Jan 21 13:03 reserved_resource_worker-0@dhcp130-202.gsslab.pnq2.redhat.com
drwxr-xr-x. 2 48 48 6 Jan 21 02:03 reserved_resource_worker-1@dhcp130-202.gsslab.pnq2.redhat.com
drwxr-xr-x. 2 48 48 6 Jan 20 14:03 resource_manager@dhcp130-202.gsslab.pnq2.redhat.com
"""
def test_ls_var_cache_pulp():
ls_var_cache_pulp = LsVarCachePulp(context_wrap(LS_VAR_CACHE_PULP, path="insights_commands/ls_-lan_.var.cache.pulp"))
assert ls_var_cache_pulp.files_of('/var/cache/pulp') == ['cache']
cache_item = ls_var_cache_pulp.dir_entry('/var/cache/pulp', 'cache')
assert cache_item is not None
assert '/var/lib/pulp/' in cache_item['link']
def test_ls_var_lib_mongodb_doc_examples():
env = {
'ls_var_cache_pulp': LsVarCachePulp(context_wrap(LS_VAR_CACHE_PULP, path="insights_commands/ls_-lan_.var.cache.pulp")),
}
failed, total = doctest.testmod(ls_var_cache_pulp, globs=env)
assert failed == 0
|
RedHatInsights/insights-core
|
insights/parsers/tests/test_ls_var_cache_pulp.py
|
Python
|
apache-2.0
| 1,314 | 0.003805 |
import os
import unittest
from collections import namedtuple
from unittest.mock import patch
from ray.tune.function_runner import wrap_function
from ray.tune.integration.mlflow import MLflowLoggerCallback, MLflowLogger, \
mlflow_mixin, MLflowTrainableMixin
class MockTrial(
namedtuple("MockTrial",
["config", "trial_name", "trial_id", "logdir"])):
def __hash__(self):
return hash(self.trial_id)
def __str__(self):
return self.trial_name
MockRunInfo = namedtuple("MockRunInfo", ["run_id"])
class MockRun:
def __init__(self, run_id, tags=None):
self.run_id = run_id
self.tags = tags
self.info = MockRunInfo(run_id)
self.params = []
self.metrics = []
self.artifacts = []
def log_param(self, key, value):
self.params.append({key: value})
def log_metric(self, key, value):
self.metrics.append({key: value})
def log_artifact(self, artifact):
self.artifacts.append(artifact)
def set_terminated(self, status):
self.terminated = True
self.status = status
MockExperiment = namedtuple("MockExperiment", ["name", "experiment_id"])
class MockMlflowClient:
def __init__(self, tracking_uri=None, registry_uri=None):
self.tracking_uri = tracking_uri
self.registry_uri = registry_uri
self.experiments = [MockExperiment("existing_experiment", 0)]
self.runs = {0: []}
self.active_run = None
def set_tracking_uri(self, tracking_uri):
self.tracking_uri = tracking_uri
def get_experiment_by_name(self, name):
try:
index = self.experiment_names.index(name)
return self.experiments[index]
except ValueError:
return None
def get_experiment(self, experiment_id):
experiment_id = int(experiment_id)
try:
return self.experiments[experiment_id]
except IndexError:
return None
def create_experiment(self, name):
experiment_id = len(self.experiments)
self.experiments.append(MockExperiment(name, experiment_id))
self.runs[experiment_id] = []
return experiment_id
def create_run(self, experiment_id, tags=None):
experiment_runs = self.runs[experiment_id]
run_id = (experiment_id, len(experiment_runs))
run = MockRun(run_id=run_id, tags=tags)
experiment_runs.append(run)
return run
def start_run(self, experiment_id, run_name):
# Creates new run and sets it as active.
run = self.create_run(experiment_id)
self.active_run = run
def get_mock_run(self, run_id):
return self.runs[run_id[0]][run_id[1]]
def log_param(self, run_id, key, value):
run = self.get_mock_run(run_id)
run.log_param(key, value)
def log_metric(self, run_id, key, value, step):
run = self.get_mock_run(run_id)
run.log_metric(key, value)
def log_artifacts(self, run_id, local_dir):
run = self.get_mock_run(run_id)
run.log_artifact(local_dir)
def set_terminated(self, run_id, status):
run = self.get_mock_run(run_id)
run.set_terminated(status)
@property
def experiment_names(self):
return [e.name for e in self.experiments]
def clear_env_vars():
if "MLFLOW_EXPERIMENT_NAME" in os.environ:
del os.environ["MLFLOW_EXPERIMENT_NAME"]
if "MLFLOW_EXPERIMENT_ID" in os.environ:
del os.environ["MLFLOW_EXPERIMENT_ID"]
class MLflowTest(unittest.TestCase):
@patch("mlflow.tracking.MlflowClient", MockMlflowClient)
def testMlFlowLoggerCallbackConfig(self):
# Explicitly pass in all args.
logger = MLflowLoggerCallback(
tracking_uri="test1",
registry_uri="test2",
experiment_name="test_exp")
logger.setup()
self.assertEqual(logger.client.tracking_uri, "test1")
self.assertEqual(logger.client.registry_uri, "test2")
self.assertListEqual(logger.client.experiment_names,
["existing_experiment", "test_exp"])
self.assertEqual(logger.experiment_id, 1)
# Check if client recognizes already existing experiment.
logger = MLflowLoggerCallback(experiment_name="existing_experiment")
logger.setup()
self.assertListEqual(logger.client.experiment_names,
["existing_experiment"])
self.assertEqual(logger.experiment_id, 0)
# Pass in experiment name as env var.
clear_env_vars()
os.environ["MLFLOW_EXPERIMENT_NAME"] = "test_exp"
logger = MLflowLoggerCallback()
logger.setup()
self.assertListEqual(logger.client.experiment_names,
["existing_experiment", "test_exp"])
self.assertEqual(logger.experiment_id, 1)
# Pass in existing experiment name as env var.
clear_env_vars()
os.environ["MLFLOW_EXPERIMENT_NAME"] = "existing_experiment"
logger = MLflowLoggerCallback()
logger.setup()
self.assertListEqual(logger.client.experiment_names,
["existing_experiment"])
self.assertEqual(logger.experiment_id, 0)
# Pass in existing experiment id as env var.
clear_env_vars()
os.environ["MLFLOW_EXPERIMENT_ID"] = "0"
logger = MLflowLoggerCallback()
logger.setup()
self.assertListEqual(logger.client.experiment_names,
["existing_experiment"])
self.assertEqual(logger.experiment_id, "0")
# Pass in non existing experiment id as env var.
clear_env_vars()
os.environ["MLFLOW_EXPERIMENT_ID"] = "500"
with self.assertRaises(ValueError):
logger = MLflowLoggerCallback()
logger.setup()
# Experiment name env var should take precedence over id env var.
clear_env_vars()
os.environ["MLFLOW_EXPERIMENT_NAME"] = "test_exp"
os.environ["MLFLOW_EXPERIMENT_ID"] = "0"
logger = MLflowLoggerCallback()
logger.setup()
self.assertListEqual(logger.client.experiment_names,
["existing_experiment", "test_exp"])
self.assertEqual(logger.experiment_id, 1)
@patch("mlflow.tracking.MlflowClient", MockMlflowClient)
def testMlFlowLoggerLogging(self):
clear_env_vars()
trial_config = {"par1": 4, "par2": 9.}
trial = MockTrial(trial_config, "trial1", 0, "artifact")
logger = MLflowLoggerCallback(
experiment_name="test1", save_artifact=True)
logger.setup()
# Check if run is created.
logger.on_trial_start(iteration=0, trials=[], trial=trial)
# New run should be created for this trial with correct tag.
mock_run = logger.client.runs[1][0]
self.assertDictEqual(mock_run.tags, {"trial_name": "trial1"})
self.assertTupleEqual(mock_run.run_id, (1, 0))
self.assertTupleEqual(logger._trial_runs[trial], mock_run.run_id)
# Params should be logged.
self.assertListEqual(mock_run.params, [{"par1": 4}, {"par2": 9}])
# When same trial is started again, new run should not be created.
logger.on_trial_start(iteration=0, trials=[], trial=trial)
self.assertEqual(len(logger.client.runs[1]), 1)
# Check metrics are logged properly.
result = {
"metric1": 0.8,
"metric2": 1,
"metric3": None,
"training_iteration": 0
}
logger.on_trial_result(0, [], trial, result)
mock_run = logger.client.runs[1][0]
# metric3 is not logged since it cannot be converted to float.
self.assertListEqual(mock_run.metrics, [{
"metric1": 0.8
}, {
"metric2": 1.0
}, {
"training_iteration": 0
}])
# Check that artifact is logged on termination.
logger.on_trial_complete(0, [], trial)
mock_run = logger.client.runs[1][0]
self.assertListEqual(mock_run.artifacts, ["artifact"])
self.assertTrue(mock_run.terminated)
self.assertEqual(mock_run.status, "FINISHED")
@patch("mlflow.tracking.MlflowClient", MockMlflowClient)
def testMlFlowLegacyLoggerConfig(self):
mlflow = MockMlflowClient()
with patch.dict("sys.modules", mlflow=mlflow):
clear_env_vars()
trial_config = {"par1": 4, "par2": 9.}
trial = MockTrial(trial_config, "trial1", 0, "artifact")
# No experiment_id is passed in config, should raise an error.
with self.assertRaises(ValueError):
logger = MLflowLogger(trial_config, "/tmp", trial)
trial_config.update({
"logger_config": {
"mlflow_tracking_uri": "test_tracking_uri",
"mlflow_experiment_id": 0
}
})
trial = MockTrial(trial_config, "trial2", 1, "artifact")
logger = MLflowLogger(trial_config, "/tmp", trial)
experiment_logger = logger._trial_experiment_logger
client = experiment_logger.client
self.assertEqual(client.tracking_uri, "test_tracking_uri")
# Check to make sure that a run was created on experiment_id 0.
self.assertEqual(len(client.runs[0]), 1)
mock_run = client.runs[0][0]
self.assertDictEqual(mock_run.tags, {"trial_name": "trial2"})
self.assertListEqual(mock_run.params, [{"par1": 4}, {"par2": 9}])
@patch("ray.tune.integration.mlflow._import_mlflow",
lambda: MockMlflowClient())
def testMlFlowMixinConfig(self):
clear_env_vars()
trial_config = {"par1": 4, "par2": 9.}
@mlflow_mixin
def train_fn(config):
return 1
train_fn.__mixins__ = (MLflowTrainableMixin, )
# No MLflow config passed in.
with self.assertRaises(ValueError):
wrapped = wrap_function(train_fn)(trial_config)
trial_config.update({"mlflow": {}})
# No tracking uri or experiment_id/name passed in.
with self.assertRaises(ValueError):
wrapped = wrap_function(train_fn)(trial_config)
# Invalid experiment-id
trial_config["mlflow"].update({"experiment_id": "500"})
# No tracking uri or experiment_id/name passed in.
with self.assertRaises(ValueError):
wrapped = wrap_function(train_fn)(trial_config)
trial_config["mlflow"].update({
"tracking_uri": "test_tracking_uri",
"experiment_name": "existing_experiment"
})
wrapped = wrap_function(train_fn)(trial_config)
client = wrapped._mlflow
self.assertEqual(client.tracking_uri, "test_tracking_uri")
self.assertTupleEqual(client.active_run.run_id, (0, 0))
with patch("ray.tune.integration.mlflow._import_mlflow",
lambda: client):
train_fn.__mixins__ = (MLflowTrainableMixin, )
wrapped = wrap_function(train_fn)(trial_config)
client = wrapped._mlflow
self.assertTupleEqual(client.active_run.run_id, (0, 1))
# Set to experiment that does not already exist.
# New experiment should be created.
trial_config["mlflow"]["experiment_name"] = "new_experiment"
with self.assertRaises(ValueError):
wrapped = wrap_function(train_fn)(trial_config)
if __name__ == "__main__":
import pytest
import sys
sys.exit(pytest.main(["-v", __file__]))
|
pcmoritz/ray-1
|
python/ray/tune/tests/test_integration_mlflow.py
|
Python
|
apache-2.0
| 11,695 | 0 |
# __main__.py is used when a package is executed as a module, i.e.: `python -m pptx_downsizer`
if __name__ == '__main__':
from .pptx_downsizer import cli
cli()
|
scholer/pptx-downsizer
|
pptx_downsizer/__main__.py
|
Python
|
gpl-3.0
| 170 | 0.005882 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.