text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
import os
from pvfactors.geometry.timeseries import TsPointCoords, TsLineCoords
from pvfactors.geometry.pvrow import TsPVRow
from pvfactors.geometry.pvground import TsGround, TsGroundElement
import pandas as pd
import numpy as np
from pvfactors.geometry.pvrow import PVRow
from pvfactors.geometry.base import \
BaseSide, PVSegment, PVSurface, ShadeCollection
from pvfactors.config import MIN_X_GROUND, MAX_X_GROUND
def test_ts_pvrow():
"""Test timeseries pv row creation and shading cases.
Note that shading must always be zero when pv rows are flat"""
xy_center = (0, 2)
width = 2.
df_inputs = pd.DataFrame({
'rotation_vec': [20., -30., 0.],
'shaded_length_front': [1.3, 0., 1.9],
'shaded_length_back': [0, 0.3, 0.6]})
cut = {'front': 3, 'back': 4}
ts_pvrow = TsPVRow.from_raw_inputs(
xy_center, width, df_inputs.rotation_vec,
cut, df_inputs.shaded_length_front,
df_inputs.shaded_length_back)
# check segment index
assert len(ts_pvrow.front.list_segments) == 3
assert [s.index for s in ts_pvrow.front.list_segments] == [0, 1, 2]
# Check timeseries length of front and back segments
for seg in ts_pvrow.front.list_segments:
np.testing.assert_allclose(width / cut['front'], seg.length)
for seg in ts_pvrow.back.list_segments:
np.testing.assert_allclose(width / cut['back'], seg.length)
# Check shaded length on either sides of pv rows
expected_front_shading = np.where(df_inputs.rotation_vec,
df_inputs.shaded_length_front, 0.)
expected_back_shading = np.where(df_inputs.rotation_vec,
df_inputs.shaded_length_back, 0.)
np.testing.assert_allclose(expected_front_shading,
ts_pvrow.front.shaded_length)
np.testing.assert_allclose(expected_back_shading,
ts_pvrow.back.shaded_length)
def test_plot_ts_pvrow():
is_ci = os.environ.get('CI', False)
if not is_ci:
import matplotlib.pyplot as plt
# Create a PV row
xy_center = (0, 2)
width = 2.
df_inputs = pd.DataFrame({
'rotation_vec': [20., -30., 0.],
'shaded_length_front': [1.3, 0., 1.9],
'shaded_length_back': [0, 0.3, 0.6]})
cut = {'front': 3, 'back': 4}
ts_pvrow = TsPVRow.from_raw_inputs(
xy_center, width, df_inputs.rotation_vec,
cut, df_inputs.shaded_length_front,
df_inputs.shaded_length_back)
# Plot it at ts 0
f, ax = plt.subplots()
ts_pvrow.plot_at_idx(0, ax)
plt.show()
# Plot it at ts 1
f, ax = plt.subplots()
ts_pvrow.plot_at_idx(1, ax)
plt.show()
# Plot it at ts 2: flat case
f, ax = plt.subplots()
ts_pvrow.plot_at_idx(2, ax)
plt.show()
def test_ts_pvrow_to_geometry():
"""Check that the geometries are created correctly"""
xy_center = (0, 2)
width = 2.
df_inputs = pd.DataFrame({
'rotation_vec': [20., -30., 0.],
'shaded_length_front': [1.3, 0., 1.9],
'shaded_length_back': [0, 0.3, 0.6]})
cut = {'front': 3, 'back': 4}
param_names = ['test1', 'test2']
ts_pvrow = TsPVRow.from_raw_inputs(
xy_center, width, df_inputs.rotation_vec,
cut, df_inputs.shaded_length_front,
df_inputs.shaded_length_back, param_names=param_names)
pvrow = ts_pvrow.at(0)
# Check classes of geometries
assert isinstance(pvrow, PVRow)
assert isinstance(pvrow.front, BaseSide)
assert isinstance(pvrow.back, BaseSide)
assert isinstance(pvrow.front.list_segments[0], PVSegment)
assert isinstance(pvrow.back.list_segments[0].illum_collection,
ShadeCollection)
assert isinstance(pvrow.front.list_segments[1].illum_collection
.list_surfaces[0], PVSurface)
# Check some values
np.testing.assert_allclose(pvrow.front.shaded_length, 1.3)
front_surface = (pvrow.front.list_segments[1].illum_collection
.list_surfaces[0])
back_surface = (pvrow.back.list_segments[1].illum_collection
.list_surfaces[0])
n_vector_front = front_surface.n_vector
n_vector_back = back_surface.n_vector
expected_n_vec_front = np.array([-0.68404029, 1.87938524])
np.testing.assert_allclose(n_vector_front, expected_n_vec_front)
np.testing.assert_allclose(n_vector_back, - expected_n_vec_front)
assert front_surface.param_names == param_names
assert back_surface.param_names == param_names
def test_ts_ground_from_ts_pvrow():
"""Check that ground geometries are created correctly from ts pvrow"""
# Create a ts pv row
xy_center = (0, 2)
width = 2.
df_inputs = pd.DataFrame({
'rotation_vec': [20., -90., 0.],
'shaded_length_front': [1.3, 0., 1.9],
'shaded_length_back': [0, 0.3, 0.6]})
cut = {'front': 3, 'back': 4}
param_names = ['test1', 'test2']
ts_pvrow = TsPVRow.from_raw_inputs(
xy_center, width, df_inputs.rotation_vec,
cut, df_inputs.shaded_length_front,
df_inputs.shaded_length_back, param_names=param_names)
# Create ground from it
alpha_vec = np.deg2rad([80., 90., 70.])
ts_ground = TsGround.from_ts_pvrows_and_angles(
[ts_pvrow], alpha_vec, df_inputs.rotation_vec, param_names=param_names)
assert len(ts_ground.shadow_elements) == 1
# Check at specific times
ground_0 = ts_ground.at(0)
assert ground_0.n_surfaces == 4
assert ground_0.list_segments[0].shaded_collection.n_surfaces == 1
ground_1 = ts_ground.at(1) # vertical, sun above
assert ground_1.n_surfaces == 2 # only 2 illuminated surfaces
assert ground_1.list_segments[0].shaded_collection.n_surfaces == 0
assert ground_1.shaded_length == 0 # no shadow (since shadow length 0ish)
np.testing.assert_allclose(ground_0.shaded_length, 1.7587704831436)
np.testing.assert_allclose(ts_ground.at(2).shaded_length, width) # flat
# Check that all have surface params
for surf in ground_0.all_surfaces:
assert surf.param_names == param_names
def test_ts_ground_overlap():
shadow_coords = np.array([
[[[0, 0], [0, 0]], [[2, 1], [0, 0]]],
[[[1, 2], [0, 0]], [[5, 5], [0, 0]]]
])
overlap = [True, False]
# Test without overlap
ts_ground = TsGround.from_ordered_shadows_coords(shadow_coords)
np.testing.assert_allclose(ts_ground.shadow_elements[0].b2.x, [2, 1])
# Test with overlap
ts_ground = TsGround.from_ordered_shadows_coords(shadow_coords,
flag_overlap=overlap)
np.testing.assert_allclose(ts_ground.shadow_elements[0].b2.x, [1, 1])
def test_ts_ground_to_geometry():
# There should be an overlap
shadow_coords = np.array([
[[[0, 0], [0, 0]], [[2, 1], [0, 0]]],
[[[1, 2], [0, 0]], [[5, 5], [0, 0]]]
])
overlap = [True, False]
cut_point_coords = [TsPointCoords.from_array(np.array([[2, 2], [0, 0]]))]
# Test with overlap
ts_ground = TsGround.from_ordered_shadows_coords(
shadow_coords, flag_overlap=overlap, cut_point_coords=cut_point_coords)
# Run some checks for index 0
pvground = ts_ground.at(0, merge_if_flag_overlap=False,
with_cut_points=False)
assert pvground.n_surfaces == 4
assert pvground.list_segments[0].illum_collection.n_surfaces == 2
assert pvground.list_segments[0].shaded_collection.n_surfaces == 2
assert pvground.list_segments[0].shaded_collection.length == 5
np.testing.assert_allclose(pvground.shaded_length, 5)
# Run some checks for index 1
pvground = ts_ground.at(1, with_cut_points=False)
assert pvground.n_surfaces == 5
assert pvground.list_segments[0].illum_collection.n_surfaces == 3
assert pvground.list_segments[0].shaded_collection.n_surfaces == 2
assert pvground.list_segments[0].shaded_collection.length == 4
np.testing.assert_allclose(pvground.shaded_length, 4)
# Run some checks for index 0, when merging
pvground = ts_ground.at(0, merge_if_flag_overlap=True,
with_cut_points=False)
assert pvground.n_surfaces == 3
assert pvground.list_segments[0].illum_collection.n_surfaces == 2
assert pvground.list_segments[0].shaded_collection.n_surfaces == 1
assert pvground.list_segments[0].shaded_collection.length == 5
np.testing.assert_allclose(pvground.shaded_length, 5)
# Run some checks for index 0, when merging and with cut points
pvground = ts_ground.at(0, merge_if_flag_overlap=True,
with_cut_points=True)
assert pvground.n_surfaces == 4
assert pvground.list_segments[0].illum_collection.n_surfaces == 2
assert pvground.list_segments[0].shaded_collection.n_surfaces == 2
assert pvground.list_segments[0].shaded_collection.length == 5
np.testing.assert_allclose(pvground.shaded_length, 5)
def test_shadows_coords_left_right_of_cut_point():
"""Test that coords left and right of cut point are created correctly"""
# Ground inputs
shadow_coords = np.array([
[[[0], [0]], [[2], [0]]],
[[[3], [0]], [[5], [0]]]
], dtype=float)
overlap = [False]
# --- Create timeseries ground
cut_point = TsPointCoords([2.5], [0])
ts_ground = TsGround.from_ordered_shadows_coords(
shadow_coords, flag_overlap=overlap,
cut_point_coords=[cut_point])
# Get left and right shadows
shadows_left = ts_ground.shadow_coords_left_of_cut_point(0)
shadows_right = ts_ground.shadow_coords_right_of_cut_point(0)
# Reformat for testing
shadows_left = [shadow.as_array for shadow in shadows_left]
shadows_right = [shadow.as_array for shadow in shadows_right]
expected_shadows_left = [shadow_coords[0],
[cut_point.as_array, cut_point.as_array]]
expected_shadows_right = [[cut_point.as_array, cut_point.as_array],
shadow_coords[1]]
# Test that correct
np.testing.assert_allclose(shadows_left, expected_shadows_left)
np.testing.assert_allclose(shadows_right, expected_shadows_right)
# --- Case where pv rows are flat, cut point are inf
cut_point = TsPointCoords([np.inf], [0])
ts_ground = TsGround.from_ordered_shadows_coords(
shadow_coords, flag_overlap=overlap,
cut_point_coords=[cut_point])
# Get right shadows
shadows_right = ts_ground.shadow_coords_right_of_cut_point(0)
# Test that correct
maxi = MAX_X_GROUND
expected_shadows_right = np.array([[[[maxi], [0.]], [[maxi], [0.]]],
[[[maxi], [0.]], [[maxi], [0.]]]])
shadows_right = [shadow.as_array for shadow in shadows_right]
np.testing.assert_allclose(shadows_right, expected_shadows_right)
# --- Case where pv rows are flat, cut point are - inf
cut_point = TsPointCoords([- np.inf], [0])
ts_ground = TsGround.from_ordered_shadows_coords(
shadow_coords, flag_overlap=overlap,
cut_point_coords=[cut_point])
# Get left shadows
shadows_left = ts_ground.shadow_coords_left_of_cut_point(0)
# Test that correct
mini = MIN_X_GROUND
expected_shadows_left = np.array([[[[mini], [0.]], [[mini], [0.]]],
[[[mini], [0.]], [[mini], [0.]]]])
shadows_left = [shadow.as_array for shadow in shadows_left]
np.testing.assert_allclose(shadows_left, expected_shadows_left)
def test_ts_ground_elements_surfaces():
"""Check timeseries ground elements are created correctly"""
# Create timeseries coords
gnd_element_coords = TsLineCoords.from_array(
np.array([[[-1, -1], [0, 0]], [[1, 1], [0, 0]]]))
pt_coords_1 = TsPointCoords.from_array(np.array([[-0.5, -1], [0, 0]]))
pt_coords_2 = TsPointCoords.from_array(np.array([[0.5, 0], [0, 0]]))
# Create gnd element
gnd_element = TsGroundElement(
gnd_element_coords,
list_ordered_cut_pts_coords=[pt_coords_1, pt_coords_2])
# Check that structures contain the correct number of ts surfaces
assert len(gnd_element.surface_list) == 3
assert len(gnd_element.surface_dict[0]['left']) == 1
assert len(gnd_element.surface_dict[1]['left']) == 2
assert len(gnd_element.surface_dict[0]['right']) == 2
assert len(gnd_element.surface_dict[1]['right']) == 1
# Check that the objects are the same
assert (gnd_element.surface_list[0]
== gnd_element.surface_dict[0]['left'][0])
assert (gnd_element.surface_list[0]
== gnd_element.surface_dict[1]['left'][0])
assert (gnd_element.surface_list[1]
== gnd_element.surface_dict[0]['right'][0])
assert (gnd_element.surface_list[1]
== gnd_element.surface_dict[1]['left'][1])
assert (gnd_element.surface_list[2]
== gnd_element.surface_dict[0]['right'][1])
assert (gnd_element.surface_list[2]
== gnd_element.surface_dict[1]['right'][0])
# Now check surfaces lengths
np.testing.assert_allclose(gnd_element.surface_list[0].length, [0.5, 0])
np.testing.assert_allclose(gnd_element.surface_list[1].length, [1, 1])
np.testing.assert_allclose(gnd_element.surface_list[2].length, [0.5, 1])
# Check coords of surfaces
np.testing.assert_allclose(gnd_element.surface_list[0].b1.x, [-1, -1])
np.testing.assert_allclose(gnd_element.surface_list[0].b2.x, [-0.5, -1])
|
SunPower/pvfactors
|
pvfactors/tests/test_geometry/test_timeseries.py
|
Python
|
bsd-3-clause
| 13,582 | 0 |
#
# (c) 2020 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from io import StringIO
import pytest
from units.compat import unittest
from ansible.plugins.connection import local
from ansible.playbook.play_context import PlayContext
class TestLocalConnectionClass(unittest.TestCase):
def test_local_connection_module(self):
play_context = PlayContext()
play_context.prompt = (
'[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: '
)
in_stream = StringIO()
self.assertIsInstance(local.Connection(play_context, in_stream), local.Connection)
|
azaghal/ansible
|
test/units/plugins/connection/test_local.py
|
Python
|
gpl-3.0
| 1,355 | 0.001476 |
#!/usr/bin/python
# _*_ coding: utf-8 _*_
import zlib
s = b'witch which has which witches wrist watch'
print len(s)
t = zlib.compress(s)
print len(t)
print t
print zlib.decompress(t)
print zlib.crc32(s)
|
louistin/thinkstation
|
a_byte_of_python/unit_15_standard_library/compress_test.py
|
Python
|
mit
| 207 | 0 |
# UrbanFootprint v1.5
# Copyright (C) 2017 Calthorpe Analytics
#
# This file is part of UrbanFootprint version 1.5
#
# UrbanFootprint is distributed under the terms of the GNU General
# Public License version 3, as published by the Free Software Foundation. This
# code is distributed WITHOUT ANY WARRANTY, without implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License v3 for more details; see <http://www.gnu.org/licenses/>.
import pwd
import shlex
import subprocess
from optparse import make_option
import os
from distutils import spawn
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from footprint.utils.postgres_utils import build_postgres_conn_string, postgres_env_password_loaded
class Command(BaseCommand):
args = '<destination_folder> (optional - if not specified use settings.py option)'
help = 'Creates a data dump'
# I hate having to use optparse. We should be using argparse.
# When https://code.djangoproject.com/ticket/19973 gets fixed, we can
# use the new way of parsing (which will likely use argparse instead).
# In the meantime we'll stick with the documented way of doing this
option_list = BaseCommand.option_list + (
make_option('--destination-folder',
action='store',
type='string',
dest='destination_folder',
default=getattr(settings, 'CALTHORPE_DATA_DUMP_LOCATION', ''),
help='output folder for daily dump'),
)
def handle(self, *args, **options):
rsync = spawn.find_executable('rsync')
if rsync is None:
raise CommandError('rsync not found')
pg_dump = spawn.find_executable('pg_dump')
if pg_dump is None:
raise CommandError('pg_dump not found')
if options['destination_folder'] == '':
raise CommandError('--destination-folder not specified in command line nor settings.py')
# make sure destination folder exists
if not os.path.exists(options['destination_folder']):
try:
os.makedirs(options['destination_folder'])
except Exception, e:
raise Exception("Cannot create directory with user %s. Exception %s" % (
pwd.getpwuid(os.getuid())[0],
e.message))
pg_output_file_name = os.path.join(options['destination_folder'], 'pg_dump.dmp')
media_output_copy_folder = os.path.join(options['destination_folder'], 'media')
# make sure destination daily media folder also exists
if not os.path.exists(media_output_copy_folder):
os.makedirs(media_output_copy_folder)
#################
#rsync folder
rsync += ' -rapthzvO {extra} {src} {dest}'.format(extra=settings.CALTHORPE_DAILY_DUMP_RSYNC_EXTRA_PARAMS,
src=settings.MEDIA_ROOT,
dest=media_output_copy_folder)
self.stdout.write(rsync + '\n')
output = self.exec_cmd(rsync)
self.stdout.write(output)
#################
#do database dump
print settings.DATABASES['default']
with postgres_env_password_loaded():
pg_dump += ' {pg_conn_string} -Fc -f {output_file_name}'.format(
pg_conn_string=build_postgres_conn_string(settings.DATABASES['default']),
output_file_name=pg_output_file_name)
output = self.exec_cmd(pg_dump)
self.stdout.write(output)
self.stdout.write('Wrote ' + pg_output_file_name + '\n')
def exec_cmd(self, cmd):
p = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, err = p.communicate()
if p.returncode != 0:
raise CommandError('Error Executing "{cmd}\n{output}\n"'.format(cmd=cmd, output=out))
return out
|
CalthorpeAnalytics/urbanfootprint
|
footprint/main/management/commands/create_datadump.py
|
Python
|
gpl-3.0
| 4,014 | 0.005232 |
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack.api_version_request \
import MAX_IMAGE_META_PROXY_API_VERSION
from nova.api.openstack.api_version_request \
import MAX_PROXY_API_SUPPORT_VERSION
from nova.api.openstack.api_version_request \
import MIN_WITHOUT_IMAGE_META_PROXY_API_VERSION
from nova.api.openstack.api_version_request \
import MIN_WITHOUT_PROXY_API_SUPPORT_VERSION
from nova.api.openstack.compute.schemas import limits
from nova.api.openstack.compute.views import limits as limits_views
from nova.api.openstack import wsgi
from nova.api import validation
from nova.policies import limits as limits_policies
from nova import quota
QUOTAS = quota.QUOTAS
# This is a list of limits which needs to filter out from the API response.
# This is due to the deprecation of network related proxy APIs, the related
# limit should be removed from the API also.
FILTERED_LIMITS_2_36 = ['floating_ips', 'security_groups',
'security_group_rules']
FILTERED_LIMITS_2_57 = list(FILTERED_LIMITS_2_36)
FILTERED_LIMITS_2_57.extend(['injected_files', 'injected_file_content_bytes'])
class LimitsController(wsgi.Controller):
"""Controller for accessing limits in the OpenStack API."""
@wsgi.Controller.api_version("2.1", MAX_PROXY_API_SUPPORT_VERSION)
@wsgi.expected_errors(())
@validation.query_schema(limits.limits_query_schema)
def index(self, req):
return self._index(req)
@wsgi.Controller.api_version(MIN_WITHOUT_PROXY_API_SUPPORT_VERSION, # noqa
MAX_IMAGE_META_PROXY_API_VERSION) # noqa
@wsgi.expected_errors(())
@validation.query_schema(limits.limits_query_schema)
def index(self, req):
return self._index(req, FILTERED_LIMITS_2_36)
@wsgi.Controller.api_version( # noqa
MIN_WITHOUT_IMAGE_META_PROXY_API_VERSION, '2.56') # noqa
@wsgi.expected_errors(())
@validation.query_schema(limits.limits_query_schema)
def index(self, req):
return self._index(req, FILTERED_LIMITS_2_36, max_image_meta=False)
@wsgi.Controller.api_version('2.57') # noqa
@wsgi.expected_errors(())
@validation.query_schema(limits.limits_query_schema_275, '2.75')
@validation.query_schema(limits.limits_query_schema, '2.57', '2.74')
def index(self, req):
return self._index(req, FILTERED_LIMITS_2_57, max_image_meta=False)
def _index(self, req, filtered_limits=None, max_image_meta=True):
"""Return all global limit information."""
context = req.environ['nova.context']
context.can(limits_policies.BASE_POLICY_NAME)
project_id = req.params.get('tenant_id', context.project_id)
quotas = QUOTAS.get_project_quotas(context, project_id,
usages=True)
builder = limits_views.ViewBuilder()
return builder.build(req, quotas, filtered_limits=filtered_limits,
max_image_meta=max_image_meta)
|
rahulunair/nova
|
nova/api/openstack/compute/limits.py
|
Python
|
apache-2.0
| 3,590 | 0 |
"""Config flow for OpenWeatherMap."""
from pyowm import OWM
from pyowm.exceptions.api_call_error import APICallError
from pyowm.exceptions.api_response_error import UnauthorizedError
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import (
CONF_API_KEY,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_MODE,
CONF_NAME,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from .const import (
CONF_LANGUAGE,
DEFAULT_FORECAST_MODE,
DEFAULT_LANGUAGE,
DEFAULT_NAME,
FORECAST_MODES,
LANGUAGES,
)
from .const import DOMAIN # pylint:disable=unused-import
SCHEMA = vol.Schema(
{
vol.Required(CONF_API_KEY): str,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): str,
vol.Optional(CONF_LATITUDE): cv.latitude,
vol.Optional(CONF_LONGITUDE): cv.longitude,
vol.Optional(CONF_MODE, default=DEFAULT_FORECAST_MODE): vol.In(FORECAST_MODES),
vol.Optional(CONF_LANGUAGE, default=DEFAULT_LANGUAGE): vol.In(LANGUAGES),
}
)
class OpenWeatherMapConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Config flow for OpenWeatherMap."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return OpenWeatherMapOptionsFlow(config_entry)
async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user."""
errors = {}
if user_input is not None:
latitude = user_input[CONF_LATITUDE]
longitude = user_input[CONF_LONGITUDE]
await self.async_set_unique_id(f"{latitude}-{longitude}")
self._abort_if_unique_id_configured()
try:
api_online = await _is_owm_api_online(
self.hass, user_input[CONF_API_KEY]
)
if not api_online:
errors["base"] = "invalid_api_key"
except UnauthorizedError:
errors["base"] = "invalid_api_key"
except APICallError:
errors["base"] = "cannot_connect"
if not errors:
return self.async_create_entry(
title=user_input[CONF_NAME], data=user_input
)
return self.async_show_form(step_id="user", data_schema=SCHEMA, errors=errors)
async def async_step_import(self, import_input=None):
"""Set the config entry up from yaml."""
config = import_input.copy()
if CONF_NAME not in config:
config[CONF_NAME] = DEFAULT_NAME
if CONF_LATITUDE not in config:
config[CONF_LATITUDE] = self.hass.config.latitude
if CONF_LONGITUDE not in config:
config[CONF_LONGITUDE] = self.hass.config.longitude
if CONF_MODE not in config:
config[CONF_MODE] = DEFAULT_FORECAST_MODE
if CONF_LANGUAGE not in config:
config[CONF_LANGUAGE] = DEFAULT_LANGUAGE
return await self.async_step_user(config)
class OpenWeatherMapOptionsFlow(config_entries.OptionsFlow):
"""Handle options."""
def __init__(self, config_entry):
"""Initialize options flow."""
self.config_entry = config_entry
async def async_step_init(self, user_input=None):
"""Manage the options."""
if user_input is not None:
return self.async_create_entry(title="", data=user_input)
return self.async_show_form(
step_id="init",
data_schema=self._get_options_schema(),
)
def _get_options_schema(self):
return vol.Schema(
{
vol.Optional(
CONF_MODE,
default=self.config_entry.options.get(
CONF_MODE, DEFAULT_FORECAST_MODE
),
): vol.In(FORECAST_MODES),
vol.Optional(
CONF_LANGUAGE,
default=self.config_entry.options.get(
CONF_LANGUAGE, DEFAULT_LANGUAGE
),
): vol.In(LANGUAGES),
}
)
async def _is_owm_api_online(hass, api_key):
owm = OWM(api_key)
return await hass.async_add_executor_job(owm.is_API_online)
|
GenericStudent/home-assistant
|
homeassistant/components/openweathermap/config_flow.py
|
Python
|
apache-2.0
| 4,415 | 0.00068 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class NetworkInterfacesOperations:
"""NetworkInterfacesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
network_interface_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
network_interface_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
async def get(
self,
resource_group_name: str,
network_interface_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.NetworkInterface":
"""Gets information about the specified network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkInterface, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_07_01.models.NetworkInterface
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
network_interface_name: str,
parameters: "_models.NetworkInterface",
**kwargs: Any
) -> "_models.NetworkInterface":
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'NetworkInterface')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
network_interface_name: str,
parameters: "_models.NetworkInterface",
**kwargs: Any
) -> AsyncLROPoller["_models.NetworkInterface"]:
"""Creates or updates a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param parameters: Parameters supplied to the create or update network interface operation.
:type parameters: ~azure.mgmt.network.v2019_07_01.models.NetworkInterface
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either NetworkInterface or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_07_01.models.NetworkInterface]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
async def _update_tags_initial(
self,
resource_group_name: str,
network_interface_name: str,
parameters: "_models.TagsObject",
**kwargs: Any
) -> "_models.NetworkInterface":
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
async def begin_update_tags(
self,
resource_group_name: str,
network_interface_name: str,
parameters: "_models.TagsObject",
**kwargs: Any
) -> AsyncLROPoller["_models.NetworkInterface"]:
"""Updates a network interface tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param parameters: Parameters supplied to update network interface tags.
:type parameters: ~azure.mgmt.network.v2019_07_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either NetworkInterface or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_07_01.models.NetworkInterface]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_tags_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
def list_all(
self,
**kwargs: Any
) -> AsyncIterable["_models.NetworkInterfaceListResult"]:
"""Gets all network interfaces in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_07_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/networkInterfaces'} # type: ignore
def list(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.NetworkInterfaceListResult"]:
"""Gets all network interfaces in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_07_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces'} # type: ignore
async def _get_effective_route_table_initial(
self,
resource_group_name: str,
network_interface_name: str,
**kwargs: Any
) -> Optional["_models.EffectiveRouteListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.EffectiveRouteListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = self._get_effective_route_table_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('EffectiveRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_effective_route_table_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveRouteTable'} # type: ignore
async def begin_get_effective_route_table(
self,
resource_group_name: str,
network_interface_name: str,
**kwargs: Any
) -> AsyncLROPoller["_models.EffectiveRouteListResult"]:
"""Gets all route tables applied to a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either EffectiveRouteListResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_07_01.models.EffectiveRouteListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.EffectiveRouteListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_effective_route_table_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('EffectiveRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_effective_route_table.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveRouteTable'} # type: ignore
async def _list_effective_network_security_groups_initial(
self,
resource_group_name: str,
network_interface_name: str,
**kwargs: Any
) -> Optional["_models.EffectiveNetworkSecurityGroupListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.EffectiveNetworkSecurityGroupListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = self._list_effective_network_security_groups_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('EffectiveNetworkSecurityGroupListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_effective_network_security_groups_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveNetworkSecurityGroups'} # type: ignore
async def begin_list_effective_network_security_groups(
self,
resource_group_name: str,
network_interface_name: str,
**kwargs: Any
) -> AsyncLROPoller["_models.EffectiveNetworkSecurityGroupListResult"]:
"""Gets all network security groups applied to a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either EffectiveNetworkSecurityGroupListResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_07_01.models.EffectiveNetworkSecurityGroupListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.EffectiveNetworkSecurityGroupListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._list_effective_network_security_groups_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('EffectiveNetworkSecurityGroupListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_effective_network_security_groups.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveNetworkSecurityGroups'} # type: ignore
def list_virtual_machine_scale_set_vm_network_interfaces(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
virtualmachine_index: str,
**kwargs: Any
) -> AsyncIterable["_models.NetworkInterfaceListResult"]:
"""Gets information about all network interfaces in a virtual machine in a virtual machine scale
set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_07_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_virtual_machine_scale_set_vm_network_interfaces.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_virtual_machine_scale_set_vm_network_interfaces.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces'} # type: ignore
def list_virtual_machine_scale_set_network_interfaces(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
**kwargs: Any
) -> AsyncIterable["_models.NetworkInterfaceListResult"]:
"""Gets all network interfaces in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_07_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_virtual_machine_scale_set_network_interfaces.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_virtual_machine_scale_set_network_interfaces.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/networkInterfaces'} # type: ignore
async def get_virtual_machine_scale_set_network_interface(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
virtualmachine_index: str,
network_interface_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.NetworkInterface":
"""Get the specified network interface in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkInterface, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_07_01.models.NetworkInterface
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
# Construct URL
url = self.get_virtual_machine_scale_set_network_interface.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_virtual_machine_scale_set_network_interface.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}'} # type: ignore
def list_virtual_machine_scale_set_ip_configurations(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
virtualmachine_index: str,
network_interface_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> AsyncIterable["_models.NetworkInterfaceIPConfigurationListResult"]:
"""Get the specified network interface ip configuration in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceIPConfigurationListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_07_01.models.NetworkInterfaceIPConfigurationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceIPConfigurationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_virtual_machine_scale_set_ip_configurations.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceIPConfigurationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_virtual_machine_scale_set_ip_configurations.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}/ipConfigurations'} # type: ignore
async def get_virtual_machine_scale_set_ip_configuration(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
virtualmachine_index: str,
network_interface_name: str,
ip_configuration_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.NetworkInterfaceIPConfiguration":
"""Get the specified network interface ip configuration in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param ip_configuration_name: The name of the ip configuration.
:type ip_configuration_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkInterfaceIPConfiguration, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_07_01.models.NetworkInterfaceIPConfiguration
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceIPConfiguration"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
# Construct URL
url = self.get_virtual_machine_scale_set_ip_configuration.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'ipConfigurationName': self._serialize.url("ip_configuration_name", ip_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterfaceIPConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_virtual_machine_scale_set_ip_configuration.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}/ipConfigurations/{ipConfigurationName}'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_07_01/aio/operations/_network_interfaces_operations.py
|
Python
|
mit
| 64,200 | 0.005109 |
# SPDX-License-Identifier: GPL-2.0+
# Copyright (c) 2016 Google, Inc
# Written by Simon Glass <sjg@chromium.org>
#
# Entry-type module for the 16-bit x86 reset code for U-Boot
#
from binman.entry import Entry
from binman.etype.blob import Entry_blob
class Entry_x86_reset16(Entry_blob):
"""x86 16-bit reset code for U-Boot
Properties / Entry arguments:
- filename: Filename of u-boot-x86-reset16.bin (default
'u-boot-x86-reset16.bin')
x86 CPUs start up in 16-bit mode, even if they are 32-bit CPUs. This code
must be placed at a particular address. This entry holds that code. It is
typically placed at offset CONFIG_RESET_VEC_LOC. The code is responsible
for jumping to the x86-start16 code, which continues execution.
For 64-bit U-Boot, the 'x86_reset16_spl' entry type is used instead.
"""
def __init__(self, section, etype, node):
super().__init__(section, etype, node)
def GetDefaultFilename(self):
return 'u-boot-x86-reset16.bin'
|
Digilent/u-boot-digilent
|
tools/binman/etype/x86_reset16.py
|
Python
|
gpl-2.0
| 1,018 | 0.000982 |
import logging
import os
import datetime
import six
import humanfriendly
from pathlib import Path
from django.db import models
from django.utils.html import format_html
from django.utils.encoding import uri_to_iri
from django.core.management import call_command
from django.utils.safestring import mark_safe
from django.conf import settings
from django.contrib.auth.models import User
from django.db.models.signals import pre_save, post_init, post_save
from django.dispatch import receiver
from django.core.urlresolvers import reverse
from django.core.files import File
from sortedm2m.fields import SortedManyToManyField
from uuslug import uuslug
# from moviepy.editor import VideoFileClip # get video duration
from .my_storage import VodStorage
from admin_resumable.fields import (
ModelAdminResumableFileField, ModelAdminResumableImageField,
ModelAdminResumableMultiFileField, ModelAdminResumableRestoreFileField
)
from xpinyin import Pinyin # for pinyin search
if six.PY3:
from django.utils.encoding import smart_str
else:
from django.utils.encoding import smart_unicode as smart_str
"""
Copy data in XXX model:
>>>
from vodmanagement.models import *
objs=Vod.objects.all()
for i in range(0,10):
newobj=objs[0]
newobj.pk=None
newobj.save()
>>>
This script will copy 10 objs[0] in database
"""
class UserPermission(models.Model):
user = models.OneToOneField(User)
permission = models.CharField(max_length=100, blank=True, null=True)
end_date = models.DateTimeField(blank=True, null=True)
def __str__(self):
return str(self.user)
def has_permision(self):
delta = self.end_date.date() - datetime.date.today()
print(delta.days)
if delta.days >= 0:
return True
return False
class VodManager(models.Manager):
def active(self, *args, **kwargs):
return super(VodManager, self) # .filter(draft=False).filter(publish__lte=timezone.now())
def upload_location(instance, filename):
# filebase, extension = filename.split(".")
# return "%s/%s.%s" %(instance.id, instance.id, extension)
VodModel = instance.__class__
print('save')
if VodModel.objects.count() is not 0:
new_id = VodModel.objects.order_by("id").last().id - 1
else:
new_id = 0
"""
instance.__class__ gets the model Post. We must use this method because the model is defined below.
Then create a queryset ordered by the "id"s of each object,
Then we get the last object in the queryset with `.last()`
Which will give us the most recently created Model instance
We add 1 to it, so we get what should be the same id as the the post we are creating.
"""
print('save image')
return "%s/%s" % (new_id, filename)
def upload_image_location(instance, filename):
VodModel = instance.__class__
if VodModel.objects.count() is not 0:
new_id = VodModel.objects.order_by("id").last().id + 1
else:
new_id = 0
folder = instance.save_path
if folder == "default":
category = instance.category.name
else:
category = instance.category.name + '_' + folder
return "%s/images/%s/%s" % (category, new_id, filename)
def upload_record_image_location(instance, filename):
return "%s/images/%s" % (settings.RECORD_MEDIA_FOLDER, filename)
def default_description(instance):
default = instance.title
print(default)
return 'The %s description' % default
# Create your models here.
def default_filedir():
return settings.MEDIA_ROOT
# ---------------------------------------------------------------------
# if leave path blank,it will save it as the default file dir:settings.MEDIA_ROOT
class FileDirectory(models.Model):
path = models.CharField(max_length=512, default=default_filedir, blank=True)
class Meta:
verbose_name = '视频上传路径'
verbose_name_plural = '视频上传路径管理'
def __str__(self):
return self.path
def save(self, *args, **kwargs):
if self.path is None or self.path == "":
self.path = default_filedir()
super(FileDirectory, self).save(*args, **kwargs)
# ---------------------------------------------------------------------
# Two selections only:Common,Special purpose
TYPES = (
('common', 'Common'),
('special', 'Special purpose'),
)
VIDEO_QUALITY = [
('SD', '标清'),
('HD', '高清'),
('FHD', '超清'),
]
SAVE_PATH = (
('', settings.LOCAL_MEDIA_ROOT),
)
class VideoRegion(models.Model):
name = models.CharField(max_length=200, verbose_name='地区', unique=True)
class Meta:
verbose_name = '视频地区管理'
verbose_name_plural = '视频地区'
def __str__(self):
return self.name
class VideoCategory(models.Model):
name = models.CharField(max_length=128, verbose_name='分类名称')
type = models.CharField(max_length=128, choices=TYPES, default='common', verbose_name='类型')
isSecret = models.BooleanField(default=False, verbose_name='是否加密')
level = models.IntegerField(null=False, blank=False, default=1, choices=((1, '一级分类'), (2, '二级分类')),
verbose_name='分类等级')
subset = models.ManyToManyField('self', blank=True, verbose_name='分类关系')
class Meta:
verbose_name = '视频分类'
verbose_name_plural = '视频分类管理'
def __str__(self):
base_name = self.name + str(' (level %d)' % (self.level))
if self.subset.first() and self.level == 2:
return '--'.join([self.subset.first().name, base_name])
else:
return base_name
def save(self, *args, **kwargs):
super(VideoCategory, self).save(*args, **kwargs)
def colored_level(self):
color_code = 'red' if self.level == 1 else 'green'
return format_html(
'<span style="color:{};">{}</span>',
color_code,
self.get_level_display()
)
colored_level.short_description = '分级'
# ---------------------------------------------------------------------
class MultipleUpload(models.Model):
files = ModelAdminResumableMultiFileField(null=True, blank=True, storage=VodStorage(), verbose_name='文件')
save_path = models.CharField(max_length=128, blank=False, null=True, verbose_name='保存路径')
category = models.ForeignKey(VideoCategory, null=True, verbose_name='分类')
class Meta:
verbose_name = '批量上传'
verbose_name_plural = '批量上传管理'
# ---------------------------------------------------------------------
# TODO(hhy): Please Leave This Model Here. It Will Be Use In The Future.
# class VideoTag(models.Model):
# name = models.CharField(max_length=200, null=False, blank=False)
#
# def __str__(self):
# return self.name
class Restore(models.Model):
txt_file = models.FileField(blank=True, null=True, verbose_name='备份配置文件')
zip_file = ModelAdminResumableRestoreFileField(null=True, blank=True, storage=VodStorage(), verbose_name='压缩包')
save_path = models.CharField(max_length=128, blank=False, null=True) # ,default=FileDirectory.objects.first())
class Meta:
verbose_name = '视频导入'
verbose_name_plural = '视频导入'
def save(self, force_insert=False, force_update=False, using=None,
update_fields=None):
result = super(Restore, self).save()
file_path = self.txt_file.path
call_command('loaddata', file_path)
return result
class Vod(models.Model):
title = models.CharField(max_length=120, verbose_name='标题')
# image = models.ImageField(upload_to=upload_image_location, null=True, blank=True)
# video = models.FileField(null=True,blank=True,storage=VodStorage())
image = ModelAdminResumableImageField(null=True, blank=True, storage=VodStorage(), max_length=1000,
verbose_name='缩略图')
video = ModelAdminResumableFileField(null=True, blank=True, storage=VodStorage(), max_length=1000,
verbose_name='视频')
duration = models.CharField(max_length=50, blank=True, null=True, verbose_name='时长')
local_video = models.FilePathField(path=settings.LOCAL_MEDIA_ROOT, blank=True, recursive=True)
definition = models.CharField(max_length=10, choices=VIDEO_QUALITY, blank=False, default='H', verbose_name='清晰度')
category = models.ForeignKey(VideoCategory, null=True, blank=True, verbose_name='分类')
save_path = models.CharField(max_length=128, blank=False, null=True, default='default', verbose_name='保存路径') # ,default=FileDirectory.objects.first())
year = models.CharField(max_length=10, blank=False, null=True, default=datetime.datetime.now().year, verbose_name='年份')
region = models.ForeignKey(VideoRegion, to_field='name', null=True, blank=True, on_delete=models.SET_NULL, verbose_name='地区')
file_size = models.CharField(max_length=128, default='0B', editable=False, verbose_name='文件大小')
view_count = models.IntegerField(default=0, verbose_name='观看次数')
view_count_temp = 0
creator = models.ForeignKey(User, null=True, blank=False, editable=False)
description = models.TextField(blank=True, verbose_name='简介')
select_name = models.CharField(max_length=100, blank=False, verbose_name='选集名称', default='1')
updated = models.DateTimeField(auto_now=True, auto_now_add=False)
timestamp = models.DateTimeField(auto_now=False, auto_now_add=True, verbose_name='创建时间') # The first time added
slug = models.SlugField(unique=True, blank=True)
search_word = models.CharField(max_length=10000, null=True, blank=True)
# tags = models.ManyToManyField(VideoTag, blank=True)
video_list = SortedManyToManyField('self', blank=True)
# video_list = models.ManyToManyField('self', blank=True, symmetrical=False)
active = models.IntegerField(null=True, blank=False, default=0, choices=((1, 'Yes'), (0, 'No')))
progress = models.IntegerField(null=True, blank=True, default=0)
objects = VodManager()
class Meta:
verbose_name = '视频'
verbose_name_plural = '视频列表'
ordering = ["-timestamp", "-updated"]
def save(self, without_valid=False, *args, **kwargs):
logging.debug('==== 保存点播节目 %s ====' % self.title)
p = Pinyin()
full_pinyin = p.get_pinyin(smart_str(self.title), '')
first_pinyin = p.get_initials(smart_str(self.title), '').lower()
self.search_word = " ".join([full_pinyin, first_pinyin])
logging.debug("video path:", self.video)
if self.description is None or self.description == "":
self.description = default_description(self)
if self.local_video != '' and self.local_video is not None:
basename = Path(self.local_video).relative_to(Path(settings.LOCAL_MEDIA_ROOT))
self.video.name = str(Path(settings.LOCAL_MEDIA_URL) / basename)
logging.debug("save local_video to filefield done")
if without_valid:
ret = super(Vod, self).save(*args, **kwargs)
return ret
super(Vod, self).save(*args, **kwargs)
try:
if self.video != None and self.video != '':
relative_path = Path(self.video.name).relative_to(settings.MEDIA_URL) # Djan%20go.mp4
rel_name = uri_to_iri(relative_path) # Djan go.mp4
# Make sure the self.video.name is not in the LOCAL_FOLDER
if not self.video.name.startswith(settings.LOCAL_FOLDER_NAME) and \
not self.video.name.startswith(settings.RECORD_MEDIA_FOLDER):
self.video.name = str(rel_name)
logging.debug('save_path:', self.save_path)
logging.debug('video.name:', self.video.name)
logging.debug('size:', self.video.file.size)
self.file_size = humanfriendly.format_size(self.video.file.size)
# duration = VideoFileClip(self.video.path).duration
# self.duration = time_formate(duration)
else:
print("video file is None")
except:
pass
try:
if self.image:
self.image.name = str(uri_to_iri(Path(self.image.name).relative_to(settings.MEDIA_URL)))
except:
pass
return super(Vod, self).save(*args, **kwargs)
def __unicode__(self):
return self.title
def __str__(self):
return self.title
def image_tag(self):
if self.image is not None and str(self.image) != "":
if os.path.exists(self.image.path):
return mark_safe('<img src="%s" width="160" height="90" />' % (self.image.url))
else:
return mark_safe('<img src="#" width="160" height="90" />')
else:
return mark_safe('<img src="%s" width="160" height="90" />' % (settings.DEFAULT_IMAGE_SRC))
image_tag.short_description = '缩略图'
def get_absolute_url(self):
# print("get absolute url:",self.slug)
return reverse("vod:vod-detail", kwargs={"slug": self.slug})
def add_view_count(self):
self.view_count_temp += 1
def colored_active(self):
color_code = 'red' if self.active == 0 else 'green'
return format_html(
'<span style="color:{};">{}</span>',
color_code,
self.get_active_display()
)
colored_active.short_description = '是否激活'
def video_format(self):
suffix = Path(self.video.name).suffix
color_code = 'green' if suffix in ['.mp4', '.m3u8'] else 'red'
return format_html(
'<span style="color:{};">{}</span>',
color_code,
suffix
)
video_format.short_description = '视频文件格式'
def pre_save_post_receiver(sender, instance, *args, **kwargs):
if not instance.slug:
instance.slug = uuslug(instance.title, instance=instance)
def post_init_receiver(sender, instance, *args, **kwargs):
pass
pre_save.connect(pre_save_post_receiver, sender=Vod)
post_init.connect(post_init_receiver, sender=Vod)
|
xahhy/Django-vod
|
vodmanagement/models.py
|
Python
|
lgpl-3.0
| 14,356 | 0.002854 |
# coding=utf-8
"""Request handler for authentication."""
from __future__ import unicode_literals
import logging
import random
import string
import time
from builtins import range
import jwt
from medusa import app, helpers, notifiers
from medusa.logger.adapters.style import BraceAdapter
from medusa.server.api.v2.base import BaseRequestHandler
from six import text_type
from tornado.escape import json_decode
log = BraceAdapter(logging.getLogger(__name__))
log.logger.addHandler(logging.NullHandler())
class AuthHandler(BaseRequestHandler):
"""Auth request handler."""
#: resource name
name = 'authenticate'
#: allowed HTTP methods
allowed_methods = ('POST', )
def _check_authentication(self):
"""Override authentication check for the authentication endpoint."""
return None
def post(self, *args, **kwargs):
"""Request JWT."""
username = app.WEB_USERNAME
password = app.WEB_PASSWORD
# If the user hasn't set a username and/or password just let them login
if not username.strip() or not password.strip():
return self._login()
if not self.request.body:
return self._failed_login(error='No Credentials Provided')
if self.request.headers['content-type'] != 'application/json':
return self._failed_login(error='Incorrect content-type')
request_body = json_decode(self.request.body)
submitted_username = request_body.get('username')
submitted_password = request_body.get('password')
submitted_exp = request_body.get('exp', 86400)
if username != submitted_username or password != submitted_password:
return self._failed_login(error='Invalid credentials')
return self._login(submitted_exp)
def _login(self, exp=86400):
self.set_header('Content-Type', 'application/json')
if app.NOTIFY_ON_LOGIN and not helpers.is_ip_private(self.request.remote_ip):
notifiers.notify_login(self.request.remote_ip)
log.info('{user} logged into the API v2', {'user': app.WEB_USERNAME})
time_now = int(time.time())
return self._ok(data={
'token': jwt.encode({
'iss': 'Medusa ' + text_type(app.APP_VERSION),
'iat': time_now,
# @TODO: The jti should be saved so we can revoke tokens
'jti': ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(20)),
'exp': time_now + int(exp),
'username': app.WEB_USERNAME,
'apiKey': app.API_KEY
}, app.ENCRYPTION_SECRET, algorithm='HS256').decode('utf-8')
})
def _failed_login(self, error=None):
log.warning('{user} attempted a failed login to the API v2 from IP: {ip}', {
'user': app.WEB_USERNAME,
'ip': self.request.remote_ip
})
return self._unauthorized(error=error)
|
pymedusa/SickRage
|
medusa/server/api/v2/auth.py
|
Python
|
gpl-3.0
| 2,962 | 0.001013 |
#!/usr/bin/python
# This file is part of tcollector.
# Copyright (C) 2010 The tcollector Authors.
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or (at your
# option) any later version. This program is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser
# General Public License for more details. You should have received a copy
# of the GNU Lesser General Public License along with this program. If not,
# see <http://www.gnu.org/licenses/>.
#
# tcollector.py
#
"""Simple manager for collection scripts that run and gather data.
The tcollector gathers the data and sends it to the TSD for storage."""
#
# by Mark Smith <msmith@stumbleupon.com>.
#
import atexit
import errno
import fcntl
import logging
import os
import random
import re
import signal
import socket
import subprocess
import sys
import threading
import time
from logging.handlers import RotatingFileHandler
from Queue import Queue
from Queue import Empty
from Queue import Full
from optparse import OptionParser
# global variables.
COLLECTORS = {}
GENERATION = 0
DEFAULT_LOG = '/var/log/tcollector.log'
LOG = logging.getLogger('tcollector')
ALIVE = True
# If the SenderThread catches more than this many consecutive uncaught
# exceptions, something is not right and tcollector will shutdown.
# Hopefully some kind of supervising daemon will then restart it.
MAX_UNCAUGHT_EXCEPTIONS = 100
DEFAULT_PORT = 4242
MAX_REASONABLE_TIMESTAMP = 2209212000 # Good until Tue 3 Jan 14:00:00 GMT 2040
# How long to wait for datapoints before assuming
# a collector is dead and restarting it
ALLOWED_INACTIVITY_TIME = 600 # seconds
MAX_SENDQ_SIZE = 10000
MAX_READQ_SIZE = 100000
def register_collector(collector):
"""Register a collector with the COLLECTORS global"""
assert isinstance(collector, Collector), "collector=%r" % (collector,)
# store it in the global list and initiate a kill for anybody with the
# same name that happens to still be hanging around
if collector.name in COLLECTORS:
col = COLLECTORS[collector.name]
if col.proc is not None:
LOG.error('%s still has a process (pid=%d) and is being reset,'
' terminating', col.name, col.proc.pid)
col.shutdown()
COLLECTORS[collector.name] = collector
class ReaderQueue(Queue):
"""A Queue for the reader thread"""
def nput(self, value):
"""A nonblocking put, that simply logs and discards the value when the
queue is full, and returns false if we dropped."""
try:
self.put(value, False)
except Full:
LOG.error("DROPPED LINE: %s", value)
return False
return True
class Collector(object):
"""A Collector is a script that is run that gathers some data
and prints it out in standard TSD format on STDOUT. This
class maintains all of the state information for a given
collector and gives us utility methods for working with
it."""
def __init__(self, colname, interval, filename, mtime=0, lastspawn=0):
"""Construct a new Collector."""
self.name = colname
self.interval = interval
self.filename = filename
self.lastspawn = lastspawn
self.proc = None
self.nextkill = 0
self.killstate = 0
self.dead = False
self.mtime = mtime
self.generation = GENERATION
self.buffer = ""
self.datalines = []
# Maps (metric, tags) to (value, repeated, line, timestamp) where:
# value: Last value seen.
# repeated: boolean, whether the last value was seen more than once.
# line: The last line that was read from that collector.
# timestamp: Time at which we saw the value for the first time.
# This dict is used to keep track of and remove duplicate values.
# Since it might grow unbounded (in case we see many different
# combinations of metrics and tags) someone needs to regularly call
# evict_old_keys() to remove old entries.
self.values = {}
self.lines_sent = 0
self.lines_received = 0
self.lines_invalid = 0
self.last_datapoint = int(time.time())
def read(self):
"""Read bytes from our subprocess and store them in our temporary
line storage buffer. This needs to be non-blocking."""
# we have to use a buffer because sometimes the collectors
# will write out a bunch of data points at one time and we
# get some weird sized chunk. This read call is non-blocking.
# now read stderr for log messages, we could buffer here but since
# we're just logging the messages, I don't care to
try:
out = self.proc.stderr.read()
if out:
LOG.debug('reading %s got %d bytes on stderr',
self.name, len(out))
for line in out.splitlines():
LOG.warning('%s: %s', self.name, line)
except IOError, (err, msg):
if err != errno.EAGAIN:
raise
except:
LOG.exception('uncaught exception in stderr read')
# we have to use a buffer because sometimes the collectors will write
# out a bunch of data points at one time and we get some weird sized
# chunk. This read call is non-blocking.
try:
self.buffer += self.proc.stdout.read()
if len(self.buffer):
LOG.debug('reading %s, buffer now %d bytes',
self.name, len(self.buffer))
except IOError, (err, msg):
if err != errno.EAGAIN:
raise
except:
# sometimes the process goes away in another thread and we don't
# have it anymore, so log an error and bail
LOG.exception('uncaught exception in stdout read')
return
# iterate for each line we have
while self.buffer:
idx = self.buffer.find('\n')
if idx == -1:
break
# one full line is now found and we can pull it out of the buffer
line = self.buffer[0:idx].strip()
if line:
self.datalines.append(line)
self.last_datapoint = int(time.time())
self.buffer = self.buffer[idx+1:]
def collect(self):
"""Reads input from the collector and returns the lines up to whomever
is calling us. This is a generator that returns a line as it
becomes available."""
while self.proc is not None:
self.read()
if not len(self.datalines):
return
while len(self.datalines):
yield self.datalines.pop(0)
def shutdown(self):
"""Cleanly shut down the collector"""
if not self.proc:
return
try:
if self.proc.poll() is None:
kill(self.proc)
for attempt in range(5):
if self.proc.poll() is not None:
return
LOG.info('Waiting %ds for PID %d (%s) to exit...'
% (5 - attempt, self.proc.pid, self.name))
time.sleep(1)
kill(self.proc, signal.SIGKILL)
self.proc.wait()
except:
# we really don't want to die as we're trying to exit gracefully
LOG.exception('ignoring uncaught exception while shutting down')
def evict_old_keys(self, cut_off):
"""Remove old entries from the cache used to detect duplicate values.
Args:
cut_off: A UNIX timestamp. Any value that's older than this will be
removed from the cache.
"""
for key in self.values.keys():
time = self.values[key][3]
if time < cut_off:
del self.values[key]
class StdinCollector(Collector):
"""A StdinCollector simply reads from STDIN and provides the
data. This collector presents a uniform interface for the
ReaderThread, although unlike a normal collector, read()/collect()
will be blocking."""
def __init__(self):
super(StdinCollector, self).__init__('stdin', 0, '<stdin>')
# hack to make this work. nobody else will rely on self.proc
# except as a test in the stdin mode.
self.proc = True
def read(self):
"""Read lines from STDIN and store them. We allow this to
be blocking because there should only ever be one
StdinCollector and no normal collectors, so the ReaderThread
is only serving us and we're allowed to block it."""
global ALIVE
line = sys.stdin.readline()
if line:
self.datalines.append(line.rstrip())
else:
ALIVE = False
def shutdown(self):
pass
class ReaderThread(threading.Thread):
"""The main ReaderThread is responsible for reading from the collectors
and assuring that we always read from the input no matter what.
All data read is put into the self.readerq Queue, which is
consumed by the SenderThread."""
def __init__(self, dedupinterval, evictinterval):
"""Constructor.
Args:
dedupinterval: If a metric sends the same value over successive
intervals, suppress sending the same value to the TSD until
this many seconds have elapsed. This helps graphs over narrow
time ranges still see timeseries with suppressed datapoints.
evictinterval: In order to implement the behavior above, the
code needs to keep track of the last value seen for each
combination of (metric, tags). Values older than
evictinterval will be removed from the cache to save RAM.
Invariant: evictinterval > dedupinterval
"""
assert evictinterval > dedupinterval, "%r <= %r" % (evictinterval,
dedupinterval)
super(ReaderThread, self).__init__()
self.readerq = ReaderQueue(MAX_READQ_SIZE)
self.lines_collected = 0
self.lines_dropped = 0
self.dedupinterval = dedupinterval
self.evictinterval = evictinterval
def run(self):
"""Main loop for this thread. Just reads from collectors,
does our input processing and de-duping, and puts the data
into the queue."""
LOG.debug("ReaderThread up and running")
lastevict_time = 0
# we loop every second for now. ideally we'll setup some
# select or other thing to wait for input on our children,
# while breaking out every once in a while to setup selects
# on new children.
while ALIVE:
for col in all_living_collectors():
for line in col.collect():
self.process_line(col, line)
if self.dedupinterval != 0: # if 0 we do not use dedup
now = int(time.time())
if now - lastevict_time > self.evictinterval:
lastevict_time = now
now -= self.evictinterval
for col in all_collectors():
col.evict_old_keys(now)
# and here is the loop that we really should get rid of, this
# just prevents us from spinning right now
time.sleep(1)
def process_line(self, col, line):
"""Parses the given line and appends the result to the reader queue."""
self.lines_collected += 1
col.lines_received += 1
if len(line) >= 1024: # Limit in net.opentsdb.tsd.PipelineFactory
LOG.warning('%s line too long: %s', col.name, line)
col.lines_invalid += 1
return
parsed = re.match('^([-_./a-zA-Z0-9]+)\s+' # Metric name.
'(\d+)\s+' # Timestamp.
'(\S+?)' # Value (int or float).
'((?:\s+[-_./a-zA-Z0-9]+=[-_./a-zA-Z0-9]+)*)$', # Tags
line)
if parsed is None:
LOG.warning('%s sent invalid data: %s', col.name, line)
col.lines_invalid += 1
return
metric, timestamp, value, tags = parsed.groups()
timestamp = int(timestamp)
# De-dupe detection... To reduce the number of points we send to the
# TSD, we suppress sending values of metrics that don't change to
# only once every 10 minutes (which is also when TSD changes rows
# and how much extra time the scanner adds to the beginning/end of a
# graph interval in order to correctly calculate aggregated values).
# When the values do change, we want to first send the previous value
# with what the timestamp was when it first became that value (to keep
# slopes of graphs correct).
#
if self.dedupinterval != 0: # if 0 we do not use dedup
key = (metric, tags)
if key in col.values:
# if the timestamp isn't > than the previous one, ignore this value
if timestamp <= col.values[key][3]:
LOG.error("Timestamp out of order: metric=%s%s,"
" old_ts=%d >= new_ts=%d - ignoring data point"
" (value=%r, collector=%s)", metric, tags,
col.values[key][3], timestamp, value, col.name)
col.lines_invalid += 1
return
elif timestamp >= MAX_REASONABLE_TIMESTAMP:
LOG.error("Timestamp is too far out in the future: metric=%s%s"
" old_ts=%d, new_ts=%d - ignoring data point"
" (value=%r, collector=%s)", metric, tags,
col.values[key][3], timestamp, value, col.name)
return
# if this data point is repeated, store it but don't send.
# store the previous timestamp, so when/if this value changes
# we send the timestamp when this metric first became the current
# value instead of the last. Fall through if we reach
# the dedup interval so we can print the value.
if (col.values[key][0] == value and
(timestamp - col.values[key][3] < self.dedupinterval)):
col.values[key] = (value, True, line, col.values[key][3])
return
# we might have to append two lines if the value has been the same
# for a while and we've skipped one or more values. we need to
# replay the last value we skipped (if changed) so the jumps in
# our graph are accurate,
if ((col.values[key][1] or
(timestamp - col.values[key][3] >= self.dedupinterval))
and col.values[key][0] != value):
col.lines_sent += 1
if not self.readerq.nput(col.values[key][2]):
self.lines_dropped += 1
# now we can reset for the next pass and send the line we actually
# want to send
# col.values is a dict of tuples, with the key being the metric and
# tags (essentially the same as wthat TSD uses for the row key).
# The array consists of:
# [ the metric's value, if this value was repeated, the line of data,
# the value's timestamp that it last changed ]
col.values[key] = (value, False, line, timestamp)
col.lines_sent += 1
if not self.readerq.nput(line):
self.lines_dropped += 1
class SenderThread(threading.Thread):
"""The SenderThread is responsible for maintaining a connection
to the TSD and sending the data we're getting over to it. This
thread is also responsible for doing any sort of emergency
buffering we might need to do if we can't establish a connection
and we need to spool to disk. That isn't implemented yet."""
def __init__(self, reader, dryrun, hosts, self_report_stats, tags, signalfx_api_key=None):
"""Constructor.
Args:
reader: A reference to a ReaderThread instance.
dryrun: If true, data points will be printed on stdout instead of
being sent to the TSD.
hosts: List of (host, port) tuples defining list of TSDs
self_report_stats: If true, the reader thread will insert its own
stats into the metrics reported to TSD, as if those metrics had
been read from a collector.
tags: A dictionary of tags to append for every data point.
"""
super(SenderThread, self).__init__()
self.dryrun = dryrun
self.reader = reader
self.tags = sorted(tags.items())
self.hosts = hosts # A list of (host, port) pairs.
# Randomize hosts to help even out the load.
random.shuffle(self.hosts)
self.blacklisted_hosts = set() # The 'bad' (host, port) pairs.
self.current_tsd = -1 # Index in self.hosts where we're at.
self.host = None # The current TSD host we've selected.
self.port = None # The port of the current TSD.
self.tsd = None # The socket connected to the aforementioned TSD.
self.last_verify = 0
self.sendq = []
self.self_report_stats = self_report_stats
if signalfx_api_key:
LOG.info('Configuring to send to SignalFX in parallel')
self.sfx = signalfx.SignalFx(signalfx_api_key)
else:
self.sfx = None
def pick_connection(self):
"""Picks up a random host/port connection."""
# Try to get the next host from the list, until we find a host that
# isn't in the blacklist, or until we run out of hosts (i.e. they
# are all blacklisted, which typically happens when we lost our
# connectivity to the outside world).
for self.current_tsd in xrange(self.current_tsd + 1, len(self.hosts)):
hostport = self.hosts[self.current_tsd]
if hostport not in self.blacklisted_hosts:
break
else:
LOG.info('No more healthy hosts, retry with previously blacklisted')
random.shuffle(self.hosts)
self.blacklisted_hosts.clear()
self.current_tsd = 0
hostport = self.hosts[self.current_tsd]
self.host, self.port = hostport
LOG.info('Selected connection: %s:%d', self.host, self.port)
def blacklist_connection(self):
"""Marks the current TSD host we're trying to use as blacklisted.
Blacklisted hosts will get another chance to be elected once there
will be no more healthy hosts."""
# FIXME: Enhance this naive strategy.
LOG.info('Blacklisting %s:%s for a while', self.host, self.port)
self.blacklisted_hosts.add((self.host, self.port))
def run(self):
"""Main loop. A simple scheduler. Loop waiting for 5
seconds for data on the queue. If there's no data, just
loop and make sure our connection is still open. If there
is data, wait 5 more seconds and grab all of the pending data and
send it. A little better than sending every line as its
own packet."""
errors = 0 # How many uncaught exceptions in a row we got.
while ALIVE:
try:
self.maintain_conn()
try:
line = self.reader.readerq.get(True, 5)
except Empty:
continue
self.sendq.append(line)
time.sleep(5) # Wait for more data
while True:
# prevents self.sendq fast growing in case of sending fails
# in send_data()
if len(self.sendq) > MAX_SENDQ_SIZE:
break
try:
line = self.reader.readerq.get(False)
except Empty:
break
self.sendq.append(line)
self.send_data()
errors = 0 # We managed to do a successful iteration.
except (ArithmeticError, EOFError, EnvironmentError, LookupError,
ValueError), e:
errors += 1
if errors > MAX_UNCAUGHT_EXCEPTIONS:
shutdown()
raise
LOG.exception('Uncaught exception in SenderThread, ignoring')
time.sleep(1)
continue
except:
LOG.exception('Uncaught exception in SenderThread, going to exit')
shutdown()
raise
def verify_conn(self):
"""Periodically verify that our connection to the TSD is OK
and that the TSD is alive/working."""
if self.tsd is None:
return False
# if the last verification was less than a minute ago, don't re-verify
if self.last_verify > time.time() - 60:
return True
# we use the version command as it is very low effort for the TSD
# to respond
LOG.debug('verifying our TSD connection is alive')
try:
self.tsd.sendall('version\n')
except socket.error, msg:
self.tsd = None
self.blacklist_connection()
return False
bufsize = 4096
while ALIVE:
# try to read as much data as we can. at some point this is going
# to block, but we have set the timeout low when we made the
# connection
try:
buf = self.tsd.recv(bufsize)
except socket.error, msg:
self.tsd = None
self.blacklist_connection()
return False
# If we don't get a response to the `version' request, the TSD
# must be dead or overloaded.
if not buf:
self.tsd = None
self.blacklist_connection()
return False
# Woah, the TSD has a lot of things to tell us... Let's make
# sure we read everything it sent us by looping once more.
if len(buf) == bufsize:
continue
# If everything is good, send out our meta stats. This
# helps to see what is going on with the tcollector.
if self.self_report_stats:
strs = [
('reader.lines_collected',
'', self.reader.lines_collected),
('reader.lines_dropped',
'', self.reader.lines_dropped)
]
for col in all_living_collectors():
strs.append(('collector.lines_sent', 'collector='
+ col.name, col.lines_sent))
strs.append(('collector.lines_received', 'collector='
+ col.name, col.lines_received))
strs.append(('collector.lines_invalid', 'collector='
+ col.name, col.lines_invalid))
ts = int(time.time())
strout = ["tcollector.%s %d %d %s"
% (x[0], ts, x[2], x[1]) for x in strs]
for string in strout:
self.sendq.append(string)
break # TSD is alive.
# if we get here, we assume the connection is good
self.last_verify = time.time()
return True
def maintain_conn(self):
"""Safely connect to the TSD and ensure that it's up and
running and that we're not talking to a ghost connection
(no response)."""
# dry runs are always good
if self.dryrun:
return
# connection didn't verify, so create a new one. we might be in
# this method for a long time while we sort this out.
try_delay = 1
while ALIVE:
if self.verify_conn():
return
# increase the try delay by some amount and some random value,
# in case the TSD is down for a while. delay at most
# approximately 10 minutes.
try_delay *= 1 + random.random()
if try_delay > 600:
try_delay *= 0.5
LOG.debug('SenderThread blocking %0.2f seconds', try_delay)
time.sleep(try_delay)
# Now actually try the connection.
self.pick_connection()
try:
addresses = socket.getaddrinfo(self.host, self.port,
socket.AF_UNSPEC,
socket.SOCK_STREAM, 0)
except socket.gaierror, e:
# Don't croak on transient DNS resolution issues.
if e[0] in (socket.EAI_AGAIN, socket.EAI_NONAME,
socket.EAI_NODATA):
LOG.debug('DNS resolution failure: %s: %s', self.host, e)
continue
raise
for family, socktype, proto, canonname, sockaddr in addresses:
try:
self.tsd = socket.socket(family, socktype, proto)
self.tsd.settimeout(15)
self.tsd.connect(sockaddr)
# if we get here it connected
break
except socket.error, msg:
LOG.warning('Connection attempt failed to %s:%d: %s',
self.host, self.port, msg)
self.tsd.close()
self.tsd = None
if not self.tsd:
LOG.error('Failed to connect to %s:%d', self.host, self.port)
self.blacklist_connection()
def add_tags_to_line(self, line):
for tag, value in self.tags:
if ' %s=' % tag not in line:
line += ' %s=%s' % (tag, value)
return line
def send_data(self):
"""Sends outstanding data in self.sendq to the TSD in one operation."""
# construct the output string
out = ''
# in case of logging we use less efficient variant
if LOG.level == logging.DEBUG:
for line in self.sendq:
line = "put %s" % self.add_tags_to_line(line)
out += line + "\n"
LOG.debug('SENDING: %s', line)
else:
out = "".join("put %s\n" % self.add_tags_to_line(line) for line in self.sendq)
if not out:
LOG.debug('send_data no data?')
return
# try sending our data. if an exception occurs, just error and
# try sending again next time.
try:
if self.dryrun:
print out
else:
self.tsd.sendall(out)
# If SignalFX Api Key is given, attempt to send to SignalFX
if self.sfx:
self.send_data_to_signalfx()
self.sendq = []
except socket.error, msg:
LOG.error('failed to send data: %s', msg)
try:
self.tsd.close()
except socket.error:
pass
self.tsd = None
self.blacklist_connection()
# FIXME: we should be reading the result at some point to drain
# the packets out of the kernel's queue
def send_data_to_signalfx(self):
try:
gauges = []
for line in self.sendq:
split_line = self.add_tags_to_line(line).split()
metric, timestamp, value = split_line[:3]
tags = dict(tag.split('=') for tag in split_line[4:])
gauges.append({
'metric': metric,
'value': self.num(value),
'timestamp': int(timestamp)*1000, # Convert from seconds to milliseconds
'dimensions': tags})
self.sfx.send(gauges=gauges)
except:
LOG.error('Failed to send data to signalfx: %s' % sys.exc_info()[0])
def num(self, s):
try:
return int(s)
except ValueError:
try:
return float(s)
except:
LOG.error('Failed to convert "%s" to a number' % s)
raise
def setup_logging(logfile=DEFAULT_LOG, max_bytes=None, backup_count=None, stdout=False):
"""Sets up logging and associated handlers."""
LOG.setLevel(logging.INFO)
if stdout:
ch = logging.StreamHandler(sys.stdout)
elif backup_count is not None and max_bytes is not None:
assert backup_count > 0
assert max_bytes > 0
ch = RotatingFileHandler(logfile, 'a', max_bytes, backup_count)
else: # Setup stream handler.
ch = logging.StreamHandler(sys.stdout)
ch.setFormatter(logging.Formatter('%(asctime)s %(name)s[%(process)d] '
'%(levelname)s: %(message)s'))
LOG.addHandler(ch)
def parse_cmdline(argv):
"""Parses the command-line."""
# get arguments
default_cdir = os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])),
'collectors')
parser = OptionParser(description='Manages collectors which gather '
'data and report back.')
parser.add_option('-c', '--collector-dir', dest='cdir', metavar='DIR',
default=default_cdir,
help='Directory where the collectors are located.')
parser.add_option('-d', '--dry-run', dest='dryrun', action='store_true',
default=False,
help='Don\'t actually send anything to the TSD, '
'just print the datapoints.')
parser.add_option('-D', '--daemonize', dest='daemonize', action='store_true',
default=False, help='Run as a background daemon.')
parser.add_option('-H', '--host', dest='host', default='localhost',
metavar='HOST',
help='Hostname to use to connect to the TSD.')
parser.add_option('-L', '--hosts-list', dest='hosts', default=False,
metavar='HOSTS',
help='List of host:port to connect to tsd\'s (comma separated).')
parser.add_option('--no-tcollector-stats', dest='no_tcollector_stats',
default=False, action='store_true',
help='Prevent tcollector from reporting its own stats to TSD')
parser.add_option('-s', '--stdin', dest='stdin', action='store_true',
default=False,
help='Run once, read and dedup data points from stdin.')
parser.add_option('-p', '--port', dest='port', type='int',
default=DEFAULT_PORT, metavar='PORT',
help='Port to connect to the TSD instance on. '
'default=%default')
parser.add_option('-v', dest='verbose', action='store_true', default=False,
help='Verbose mode (log debug messages).')
parser.add_option('-t', '--tag', dest='tags', action='append',
default=[], metavar='TAG',
help='Tags to append to all timeseries we send, '
'e.g.: -t TAG=VALUE -t TAG2=VALUE')
parser.add_option('--tags-file', help='file containing tag info like TAG1=VALUE to append to all timeseries we send')
parser.add_option('-P', '--pidfile', dest='pidfile',
default='/var/run/tcollector.pid',
metavar='FILE', help='Write our pidfile')
parser.add_option('--dedup-interval', dest='dedupinterval', type='int',
default=300, metavar='DEDUPINTERVAL',
help='Number of seconds in which successive duplicate '
'datapoints are suppressed before sending to the TSD. '
'Use zero to disable. '
'default=%default')
parser.add_option('--evict-interval', dest='evictinterval', type='int',
default=6000, metavar='EVICTINTERVAL',
help='Number of seconds after which to remove cached '
'values of old data points to save memory. '
'default=%default')
parser.add_option('--max-bytes', dest='max_bytes', type='int',
default=64 * 1024 * 1024,
help='Maximum bytes per a logfile.')
parser.add_option('--backup-count', dest='backup_count', type='int',
default=0, help='Maximum number of logfiles to backup.')
parser.add_option('--logfile', dest='logfile', type='str',
default=DEFAULT_LOG,
help='Filename where logs are written to.')
parser.add_option('--stdout', dest='stdout', action='store_true',
default=False,
help='Print logs to stdout.')
parser.add_option('--signalfx-api-key', dest='signalfx_api_key', default=None,
metavar='KEY',
help='SignalFX Api Key to send to SignalFX REST API in parallel')
(options, args) = parser.parse_args(args=argv[1:])
if options.dedupinterval < 0:
parser.error('--dedup-interval must be at least 0 seconds')
if options.evictinterval <= options.dedupinterval:
parser.error('--evict-interval must be strictly greater than '
'--dedup-interval')
# We cannot write to stdout when we're a daemon.
if (options.daemonize or options.max_bytes) and not options.backup_count:
options.backup_count = 1
return (options, args)
def daemonize():
"""Performs the necessary dance to become a background daemon."""
if os.fork():
os._exit(0)
os.chdir("/")
os.umask(022)
os.setsid()
os.umask(0)
if os.fork():
os._exit(0)
stdin = open(os.devnull)
stdout = open(os.devnull, 'w')
os.dup2(stdin.fileno(), 0)
os.dup2(stdout.fileno(), 1)
os.dup2(stdout.fileno(), 2)
stdin.close()
stdout.close()
for fd in xrange(3, 1024):
try:
os.close(fd)
except OSError: # This FD wasn't opened...
pass # ... ignore the exception.
def setup_python_path(collector_dir):
"""Sets up PYTHONPATH so that collectors can easily import common code."""
mydir = os.path.dirname(collector_dir)
libdir = os.path.join(mydir, 'collectors', 'lib')
if not os.path.isdir(libdir):
return
pythonpath = os.environ.get('PYTHONPATH', '')
if pythonpath:
pythonpath += ':'
pythonpath += mydir
os.environ['PYTHONPATH'] = pythonpath
LOG.debug('Set PYTHONPATH to %r', pythonpath)
def main(argv):
"""The main tcollector entry point and loop."""
options, args = parse_cmdline(argv)
if options.daemonize:
daemonize()
setup_logging(options.logfile, options.max_bytes or None,
options.backup_count or None, stdout=options.stdout)
if options.verbose:
LOG.setLevel(logging.DEBUG) # up our level
if options.pidfile:
write_pid(options.pidfile)
# validate everything
tags = {}
def validate_and_add_tag(tag):
if re.match('^[-_.a-z0-9]+=\S+$', tag, re.IGNORECASE) is None:
assert False, 'Tag string "%s" is invalid.' % tag
k, v = tag.split('=', 1)
if k in tags:
assert False, 'Tag "%s" already declared.' % k
tags[k] = v
for tag in options.tags:
validate_and_add_tag(tag)
if options.tags_file is not None:
with open(options.tags_file) as tags_file:
for line in tags_file:
validate_and_add_tag(line)
if not 'host' in tags and not options.stdin:
tags['host'] = socket.gethostname()
LOG.warning('Tag "host" not specified, defaulting to %s.', tags['host'])
options.cdir = os.path.realpath(options.cdir)
if not os.path.isdir(options.cdir):
LOG.fatal('No such directory: %s', options.cdir)
return 1
modules = load_etc_dir(options, tags)
setup_python_path(options.cdir)
# gracefully handle death for normal termination paths and abnormal
atexit.register(shutdown)
for sig in (signal.SIGTERM, signal.SIGINT):
signal.signal(sig, shutdown_signal)
# at this point we're ready to start processing, so start the ReaderThread
# so we can have it running and pulling in data for us
reader = ReaderThread(options.dedupinterval, options.evictinterval)
reader.start()
# prepare list of (host, port) of TSDs given on CLI
if not options.hosts:
options.hosts = [(options.host, options.port)]
else:
def splitHost(hostport):
if ":" in hostport:
# Check if we have an IPv6 address.
if hostport[0] == "[" and "]:" in hostport:
host, port = hostport.split("]:")
host = host[1:]
else:
host, port = hostport.split(":")
return (host, int(port))
return (hostport, DEFAULT_PORT)
options.hosts = [splitHost(host) for host in options.hosts.split(",")]
if options.host != "localhost" or options.port != DEFAULT_PORT:
options.hosts.append((options.host, options.port))
# If a SignalFX key is passed in, attempt to load signalfx
signalfx_api_key = None
if options.signalfx_api_key:
try:
global signalfx
import signalfx
signalfx_api_key = options.signalfx_api_key
except ImportError:
LOG.warning('A SignalFX API Key was given, but no signalfx python library installation was detected')
# and setup the sender to start writing out to the tsd
sender = SenderThread(reader, options.dryrun, options.hosts,
not options.no_tcollector_stats, tags, signalfx_api_key=signalfx_api_key)
sender.start()
LOG.info('SenderThread startup complete')
# if we're in stdin mode, build a stdin collector and just join on the
# reader thread since there's nothing else for us to do here
if options.stdin:
register_collector(StdinCollector())
stdin_loop(options, modules, sender, tags)
else:
sys.stdin.close()
main_loop(options, modules, sender, tags)
# We're exiting, make sure we don't leave any collector behind.
for col in all_living_collectors():
col.shutdown()
LOG.debug('Shutting down -- joining the reader thread.')
reader.join()
LOG.debug('Shutting down -- joining the sender thread.')
sender.join()
def stdin_loop(options, modules, sender, tags):
"""The main loop of the program that runs when we are in stdin mode."""
global ALIVE
next_heartbeat = int(time.time() + 600)
while ALIVE:
time.sleep(15)
reload_changed_config_modules(modules, options, sender, tags)
now = int(time.time())
if now >= next_heartbeat:
LOG.info('Heartbeat (%d collectors running)'
% sum(1 for col in all_living_collectors()))
next_heartbeat = now + 600
def main_loop(options, modules, sender, tags):
"""The main loop of the program that runs when we're not in stdin mode."""
next_heartbeat = int(time.time() + 600)
while ALIVE:
populate_collectors(options.cdir)
reload_changed_config_modules(modules, options, sender, tags)
reap_children()
check_children()
spawn_children()
time.sleep(15)
now = int(time.time())
if now >= next_heartbeat:
LOG.info('Heartbeat (%d collectors running)'
% sum(1 for col in all_living_collectors()))
next_heartbeat = now + 600
def list_config_modules(etcdir):
"""Returns an iterator that yields the name of all the config modules."""
if not os.path.isdir(etcdir):
return iter(()) # Empty iterator.
return (name for name in os.listdir(etcdir)
if (name.endswith('.py')
and os.path.isfile(os.path.join(etcdir, name))))
def load_etc_dir(options, tags):
"""Loads any Python module from tcollector's own 'etc' directory.
Returns: A dict of path -> (module, timestamp).
"""
etcdir = os.path.join(options.cdir, 'etc')
sys.path.append(etcdir) # So we can import modules from the etc dir.
modules = {} # path -> (module, timestamp)
for name in list_config_modules(etcdir):
path = os.path.join(etcdir, name)
module = load_config_module(name, options, tags)
modules[path] = (module, os.path.getmtime(path))
return modules
def load_config_module(name, options, tags):
"""Imports the config module of the given name
The 'name' argument can be a string, in which case the module will be
loaded by name, or it can be a module object, in which case the module
will get reloaded.
If the module has an 'onload' function, calls it.
Returns: the reference to the module loaded.
"""
if isinstance(name, str):
LOG.info('Loading %s', name)
d = {}
# Strip the trailing .py
module = __import__(name[:-3], d, d)
else:
module = reload(name)
onload = module.__dict__.get('onload')
if callable(onload):
try:
onload(options, tags)
except:
LOG.fatal('Exception while loading %s', name)
raise
return module
def reload_changed_config_modules(modules, options, sender, tags):
"""Reloads any changed modules from the 'etc' directory.
Args:
cdir: The path to the 'collectors' directory.
modules: A dict of path -> (module, timestamp).
Returns: whether or not anything has changed.
"""
etcdir = os.path.join(options.cdir, 'etc')
current_modules = set(list_config_modules(etcdir))
current_paths = set(os.path.join(etcdir, name)
for name in current_modules)
changed = False
# Reload any module that has changed.
for path, (module, timestamp) in modules.iteritems():
if path not in current_paths: # Module was removed.
continue
mtime = os.path.getmtime(path)
if mtime > timestamp:
LOG.info('Reloading %s, file has changed', path)
module = load_config_module(module, options, tags)
modules[path] = (module, mtime)
changed = True
# Remove any module that has been removed.
for path in set(modules).difference(current_paths):
LOG.info('%s has been removed, tcollector should be restarted', path)
del modules[path]
changed = True
# Check for any modules that may have been added.
for name in current_modules:
path = os.path.join(etcdir, name)
if path not in modules:
module = load_config_module(name, options, tags)
modules[path] = (module, os.path.getmtime(path))
changed = True
return changed
def write_pid(pidfile):
"""Write our pid to a pidfile."""
f = open(pidfile, "w")
try:
f.write(str(os.getpid()))
finally:
f.close()
def all_collectors():
"""Generator to return all collectors."""
return COLLECTORS.itervalues()
# collectors that are not marked dead
def all_valid_collectors():
"""Generator to return all defined collectors that haven't been marked
dead in the past hour, allowing temporarily broken collectors a
chance at redemption."""
now = int(time.time())
for col in all_collectors():
if not col.dead or (now - col.lastspawn > 3600):
yield col
# collectors that have a process attached (currenty alive)
def all_living_collectors():
"""Generator to return all defined collectors that have
an active process."""
for col in all_collectors():
if col.proc is not None:
yield col
def shutdown_signal(signum, frame):
"""Called when we get a signal and need to terminate."""
LOG.warning("shutting down, got signal %d", signum)
shutdown()
def kill(proc, signum=signal.SIGTERM):
os.killpg(proc.pid, signum)
def shutdown():
"""Called by atexit and when we receive a signal, this ensures we properly
terminate any outstanding children."""
global ALIVE
# prevent repeated calls
if not ALIVE:
return
# notify threads of program termination
ALIVE = False
LOG.info('shutting down children')
# tell everyone to die
for col in all_living_collectors():
col.shutdown()
LOG.info('exiting')
sys.exit(1)
def reap_children():
"""When a child process dies, we have to determine why it died and whether
or not we need to restart it. This method manages that logic."""
for col in all_living_collectors():
now = int(time.time())
# FIXME: this is not robust. the asyncproc module joins on the
# reader threads when you wait if that process has died. this can cause
# slow dying processes to hold up the main loop. good for now though.
status = col.proc.poll()
if status is None:
continue
col.proc = None
# behavior based on status. a code 0 is normal termination, code 13
# is used to indicate that we don't want to restart this collector.
# any other status code is an error and is logged.
if status == 13:
LOG.info('removing %s from the list of collectors (by request)',
col.name)
col.dead = True
elif status != 0:
LOG.warning('collector %s terminated after %d seconds with '
'status code %d, marking dead',
col.name, now - col.lastspawn, status)
col.dead = True
else:
register_collector(Collector(col.name, col.interval, col.filename,
col.mtime, col.lastspawn))
def check_children():
"""When a child process hasn't received a datapoint in a while,
assume it's died in some fashion and restart it."""
for col in all_living_collectors():
now = int(time.time())
if col.last_datapoint < (now - ALLOWED_INACTIVITY_TIME):
# It's too old, kill it
LOG.warning('Terminating collector %s after %d seconds of inactivity',
col.name, now - col.last_datapoint)
col.shutdown()
register_collector(Collector(col.name, col.interval, col.filename,
col.mtime, col.lastspawn))
def set_nonblocking(fd):
"""Sets the given file descriptor to non-blocking mode."""
fl = fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK
fcntl.fcntl(fd, fcntl.F_SETFL, fl)
def spawn_collector(col):
"""Takes a Collector object and creates a process for it."""
LOG.info('%s (interval=%d) needs to be spawned', col.name, col.interval)
# FIXME: do custom integration of Python scripts into memory/threads
# if re.search('\.py$', col.name) is not None:
# ... load the py module directly instead of using a subprocess ...
try:
col.proc = subprocess.Popen(col.filename, stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True,
preexec_fn=os.setsid)
except OSError, e:
LOG.error('Failed to spawn collector %s: %s' % (col.filename, e))
return
# The following line needs to move below this line because it is used in
# other logic and it makes no sense to update the last spawn time if the
# collector didn't actually start.
col.lastspawn = int(time.time())
set_nonblocking(col.proc.stdout.fileno())
set_nonblocking(col.proc.stderr.fileno())
if col.proc.pid > 0:
col.dead = False
LOG.info('spawned %s (pid=%d)', col.name, col.proc.pid)
return
# FIXME: handle errors better
LOG.error('failed to spawn collector: %s', col.filename)
def spawn_children():
"""Iterates over our defined collectors and performs the logic to
determine if we need to spawn, kill, or otherwise take some
action on them."""
if not ALIVE:
return
for col in all_valid_collectors():
now = int(time.time())
if col.interval == 0:
if col.proc is None:
spawn_collector(col)
elif col.interval <= now - col.lastspawn:
if col.proc is None:
spawn_collector(col)
continue
# I'm not very satisfied with this path. It seems fragile and
# overly complex, maybe we should just reply on the asyncproc
# terminate method, but that would make the main tcollector
# block until it dies... :|
if col.nextkill > now:
continue
if col.killstate == 0:
LOG.warning('warning: %s (interval=%d, pid=%d) overstayed '
'its welcome, SIGTERM sent',
col.name, col.interval, col.proc.pid)
kill(col.proc)
col.nextkill = now + 5
col.killstate = 1
elif col.killstate == 1:
LOG.error('error: %s (interval=%d, pid=%d) still not dead, '
'SIGKILL sent',
col.name, col.interval, col.proc.pid)
kill(col.proc, signal.SIGKILL)
col.nextkill = now + 5
col.killstate = 2
else:
LOG.error('error: %s (interval=%d, pid=%d) needs manual '
'intervention to kill it',
col.name, col.interval, col.proc.pid)
col.nextkill = now + 300
def populate_collectors(coldir):
"""Maintains our internal list of valid collectors. This walks the
collector directory and looks for files. In subsequent calls, this
also looks for changes to the files -- new, removed, or updated files,
and takes the right action to bring the state of our running processes
in line with the filesystem."""
global GENERATION
GENERATION += 1
# get numerics from scriptdir, we're only setup to handle numeric paths
# which define intervals for our monitoring scripts
for interval in os.listdir(coldir):
if not interval.isdigit():
continue
interval = int(interval)
for colname in os.listdir('%s/%d' % (coldir, interval)):
if colname.startswith('.'):
continue
filename = '%s/%d/%s' % (coldir, interval, colname)
if os.path.isfile(filename) and os.access(filename, os.X_OK):
mtime = os.path.getmtime(filename)
# if this collector is already 'known', then check if it's
# been updated (new mtime) so we can kill off the old one
# (but only if it's interval 0, else we'll just get
# it next time it runs)
if colname in COLLECTORS:
col = COLLECTORS[colname]
# if we get a dupe, then ignore the one we're trying to
# add now. there is probably a more robust way of doing
# this...
if col.interval != interval:
LOG.error('two collectors with the same name %s and '
'different intervals %d and %d',
colname, interval, col.interval)
continue
# we have to increase the generation or we will kill
# this script again
col.generation = GENERATION
if col.mtime < mtime:
LOG.info('%s has been updated on disk', col.name)
col.mtime = mtime
if not col.interval:
col.shutdown()
LOG.info('Respawning %s', col.name)
register_collector(Collector(colname, interval,
filename, mtime))
else:
register_collector(Collector(colname, interval, filename,
mtime))
# now iterate over everybody and look for old generations
to_delete = []
for col in all_collectors():
if col.generation < GENERATION:
LOG.info('collector %s removed from the filesystem, forgetting',
col.name)
col.shutdown()
to_delete.append(col.name)
for name in to_delete:
del COLLECTORS[name]
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
optimizely/tcollector
|
tcollector.py
|
Python
|
lgpl-3.0
| 53,880 | 0.001188 |
#!/usr/bin/env python
import numpy as np
import os
import shutil
import mss
import matplotlib
matplotlib.use('TkAgg')
from datetime import datetime
from matplotlib.figure import Figure
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg as FigCanvas
from PIL import ImageTk, Image
import sys
PY3_OR_LATER = sys.version_info[0] >= 3
if PY3_OR_LATER:
# Python 3 specific definitions
import tkinter as tk
import tkinter.ttk as ttk
import tkinter.messagebox as tkMessageBox
else:
# Python 2 specific definitions
import Tkinter as tk
import ttk
import tkMessageBox
from utils import Screenshot, XboxController
IMAGE_SIZE = (320, 240)
IDLE_SAMPLE_RATE = 1500
SAMPLE_RATE = 200
IMAGE_TYPE = ".png"
class MainWindow():
""" Main frame of the application
"""
def __init__(self):
self.root = tk.Tk()
self.sct = mss.mss()
self.root.title('Data Acquisition')
self.root.geometry("660x325")
self.root.resizable(False, False)
# Init controller
self.controller = XboxController()
# Create GUI
self.create_main_panel()
# Timer
self.rate = IDLE_SAMPLE_RATE
self.sample_rate = SAMPLE_RATE
self.idle_rate = IDLE_SAMPLE_RATE
self.recording = False
self.t = 0
self.pause_timer = False
self.on_timer()
self.root.mainloop()
def create_main_panel(self):
# Panels
top_half = tk.Frame(self.root)
top_half.pack(side=tk.TOP, expand=True, padx=5, pady=5)
message = tk.Label(self.root, text="(Note: UI updates are disabled while recording)")
message.pack(side=tk.TOP, padx=5)
bottom_half = tk.Frame(self.root)
bottom_half.pack(side=tk.LEFT, padx=5, pady=10)
# Images
self.img_panel = tk.Label(top_half, image=ImageTk.PhotoImage("RGB", size=IMAGE_SIZE)) # Placeholder
self.img_panel.pack(side = tk.LEFT, expand=False, padx=5)
# Joystick
self.init_plot()
self.PlotCanvas = FigCanvas(figure=self.fig, master=top_half)
self.PlotCanvas.get_tk_widget().pack(side=tk.RIGHT, expand=False, padx=5)
# Recording
textframe = tk.Frame(bottom_half, width=332, height=15, padx=5)
textframe.pack(side=tk.LEFT)
textframe.pack_propagate(0)
self.outputDirStrVar = tk.StringVar()
self.txt_outputDir = tk.Entry(textframe, textvariable=self.outputDirStrVar, width=100)
self.txt_outputDir.pack(side=tk.LEFT)
self.outputDirStrVar.set("samples/" + datetime.now().strftime('%Y-%m-%d_%H:%M:%S'))
self.record_button = ttk.Button(bottom_half, text="Record", command=self.on_btn_record)
self.record_button.pack(side = tk.LEFT, padx=5)
def init_plot(self):
self.plotMem = 50 # how much data to keep on the plot
self.plotData = [[0] * (5)] * self.plotMem # mem storage for plot
self.fig = Figure(figsize=(4,3), dpi=80) # 320,240
self.axes = self.fig.add_subplot(111)
def on_timer(self):
self.poll()
# stop drawing if recording to avoid slow downs
if self.recording == False:
self.draw()
if not self.pause_timer:
self.root.after(self.rate, self.on_timer)
def poll(self):
self.img = self.take_screenshot()
self.controller_data = self.controller.read()
self.update_plot()
if self.recording == True:
self.save_data()
self.t += 1
def take_screenshot(self):
# Get raw pixels from the screen
sct_img = self.sct.grab({ "top": Screenshot.OFFSET_Y,
"left": Screenshot.OFFSET_X,
"width": Screenshot.SRC_W,
"height": Screenshot.SRC_H})
# Create the Image
return Image.frombytes('RGB', sct_img.size, sct_img.bgra, 'raw', 'BGRX')
def update_plot(self):
self.plotData.append(self.controller_data) # adds to the end of the list
self.plotData.pop(0) # remove the first item in the list, ie the oldest
def save_data(self):
image_file = self.outputDir+'/'+'img_'+str(self.t)+IMAGE_TYPE
self.img.save(image_file)
# write csv line
self.outfile.write( image_file + ',' + ','.join(map(str, self.controller_data)) + '\n' )
def draw(self):
# Image
self.img.thumbnail(IMAGE_SIZE, Image.ANTIALIAS) # Resize
self.img_panel.img = ImageTk.PhotoImage(self.img)
self.img_panel['image'] = self.img_panel.img
# Joystick
x = np.asarray(self.plotData)
self.axes.clear()
self.axes.plot(range(0,self.plotMem), x[:,0], 'r')
self.axes.plot(range(0,self.plotMem), x[:,1], 'b')
self.axes.plot(range(0,self.plotMem), x[:,2], 'g')
self.axes.plot(range(0,self.plotMem), x[:,3], 'k')
self.axes.plot(range(0,self.plotMem), x[:,4], 'y')
self.PlotCanvas.draw()
def on_btn_record(self):
# pause timer
self.pause_timer = True
if self.recording:
self.recording = False
else:
self.start_recording()
if self.recording:
self.t = 0 # Reset our counter for the new recording
self.record_button["text"] = "Stop"
self.rate = self.sample_rate
# make / open outfile
self.outfile = open(self.outputDir+'/'+'data.csv', 'a')
else:
self.record_button["text"] = "Record"
self.rate = self.idle_rate
self.outfile.close()
# un pause timer
self.pause_timer = False
self.on_timer()
def start_recording(self):
should_record = True
# check that a dir has been specified
if not self.outputDirStrVar.get():
tkMessageBox.showerror(title='Error', message='Specify the Output Directory', parent=self.root)
should_record = False
else: # a directory was specified
self.outputDir = self.outputDirStrVar.get()
# check if path exists - i.e. may be saving over data
if os.path.exists(self.outputDir):
# overwrite the data, yes/no?
if tkMessageBox.askyesno(title='Warning!', message='Output Directory Exists - Overwrite Data?', parent=self.root):
# delete & re-make the dir:
shutil.rmtree(self.outputDir)
os.mkdir(self.outputDir)
# answer was 'no', so do not overwrite the data
else:
should_record = False
self.txt_outputDir.focus_set()
# directory doesn't exist, so make one
else:
os.mkdir(self.outputDir)
self.recording = should_record
if __name__ == '__main__':
app = MainWindow()
|
kevinhughes27/TensorKart
|
record.py
|
Python
|
mit
| 6,952 | 0.008631 |
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
def trycmd(config):
from buildbot.clients import tryclient
t = tryclient.Try(config)
t.run()
return 0
|
seankelly/buildbot
|
master/buildbot/scripts/trycmd.py
|
Python
|
gpl-2.0
| 903 | 0 |
from hypothesis import given
from ppb_vector import Vector
from utils import floats, vectors
@given(x=floats(), y=floats())
def test_class_member_access(x: float, y: float):
v = Vector(x, y)
assert v.x == x
assert v.y == y
@given(v=vectors())
def test_index_access(v: Vector):
assert v[0] == v.x
assert v[1] == v.y
@given(v=vectors())
def test_key_access(v: Vector):
assert v["x"] == v.x
assert v["y"] == v.y
|
ppb/ppb-vector
|
tests/test_member_access.py
|
Python
|
artistic-2.0
| 444 | 0 |
"""
Copyright (C) 2010 David Fong and Michael Saunders
LSMR uses an iterative method.
07 Jun 2010: Documentation updated
03 Jun 2010: First release version in Python
David Chin-lung Fong clfong@stanford.edu
Institute for Computational and Mathematical Engineering
Stanford University
Michael Saunders saunders@stanford.edu
Systems Optimization Laboratory
Dept of MS&E, Stanford University.
"""
from __future__ import division, print_function, absolute_import
__all__ = ['lsmr']
from numpy import zeros, infty, atleast_1d
from numpy.linalg import norm
from math import sqrt
from scipy.sparse.linalg.interface import aslinearoperator
from .lsqr import _sym_ortho
def lsmr(A, b, damp=0.0, atol=1e-6, btol=1e-6, conlim=1e8,
maxiter=None, show=False, x0=None):
"""Iterative solver for least-squares problems.
lsmr solves the system of linear equations ``Ax = b``. If the system
is inconsistent, it solves the least-squares problem ``min ||b - Ax||_2``.
A is a rectangular matrix of dimension m-by-n, where all cases are
allowed: m = n, m > n, or m < n. B is a vector of length m.
The matrix A may be dense or sparse (usually sparse).
Parameters
----------
A : {matrix, sparse matrix, ndarray, LinearOperator}
Matrix A in the linear system.
b : array_like, shape (m,)
Vector b in the linear system.
damp : float
Damping factor for regularized least-squares. `lsmr` solves
the regularized least-squares problem::
min ||(b) - ( A )x||
||(0) (damp*I) ||_2
where damp is a scalar. If damp is None or 0, the system
is solved without regularization.
atol, btol : float, optional
Stopping tolerances. `lsmr` continues iterations until a
certain backward error estimate is smaller than some quantity
depending on atol and btol. Let ``r = b - Ax`` be the
residual vector for the current approximate solution ``x``.
If ``Ax = b`` seems to be consistent, ``lsmr`` terminates
when ``norm(r) <= atol * norm(A) * norm(x) + btol * norm(b)``.
Otherwise, lsmr terminates when ``norm(A^{T} r) <=
atol * norm(A) * norm(r)``. If both tolerances are 1.0e-6 (say),
the final ``norm(r)`` should be accurate to about 6
digits. (The final x will usually have fewer correct digits,
depending on ``cond(A)`` and the size of LAMBDA.) If `atol`
or `btol` is None, a default value of 1.0e-6 will be used.
Ideally, they should be estimates of the relative error in the
entries of A and B respectively. For example, if the entries
of `A` have 7 correct digits, set atol = 1e-7. This prevents
the algorithm from doing unnecessary work beyond the
uncertainty of the input data.
conlim : float, optional
`lsmr` terminates if an estimate of ``cond(A)`` exceeds
`conlim`. For compatible systems ``Ax = b``, conlim could be
as large as 1.0e+12 (say). For least-squares problems,
`conlim` should be less than 1.0e+8. If `conlim` is None, the
default value is 1e+8. Maximum precision can be obtained by
setting ``atol = btol = conlim = 0``, but the number of
iterations may then be excessive.
maxiter : int, optional
`lsmr` terminates if the number of iterations reaches
`maxiter`. The default is ``maxiter = min(m, n)``. For
ill-conditioned systems, a larger value of `maxiter` may be
needed.
show : bool, optional
Print iterations logs if ``show=True``.
x0 : array_like, shape (n,), optional
Initial guess of x, if None zeros are used.
.. versionadded:: 1.0.0
Returns
-------
x : ndarray of float
Least-square solution returned.
istop : int
istop gives the reason for stopping::
istop = 0 means x=0 is a solution. If x0 was given, then x=x0 is a
solution.
= 1 means x is an approximate solution to A*x = B,
according to atol and btol.
= 2 means x approximately solves the least-squares problem
according to atol.
= 3 means COND(A) seems to be greater than CONLIM.
= 4 is the same as 1 with atol = btol = eps (machine
precision)
= 5 is the same as 2 with atol = eps.
= 6 is the same as 3 with CONLIM = 1/eps.
= 7 means ITN reached maxiter before the other stopping
conditions were satisfied.
itn : int
Number of iterations used.
normr : float
``norm(b-Ax)``
normar : float
``norm(A^T (b - Ax))``
norma : float
``norm(A)``
conda : float
Condition number of A.
normx : float
``norm(x)``
Notes
-----
.. versionadded:: 0.11.0
References
----------
.. [1] D. C.-L. Fong and M. A. Saunders,
"LSMR: An iterative algorithm for sparse least-squares problems",
SIAM J. Sci. Comput., vol. 33, pp. 2950-2971, 2011.
http://arxiv.org/abs/1006.0758
.. [2] LSMR Software, http://web.stanford.edu/group/SOL/software/lsmr/
Examples
--------
>>> from scipy.sparse import csc_matrix
>>> from scipy.sparse.linalg import lsmr
>>> A = csc_matrix([[1., 0.], [1., 1.], [0., 1.]], dtype=float)
The first example has the trivial solution `[0, 0]`
>>> b = np.array([0., 0., 0.], dtype=float)
>>> x, istop, itn, normr = lsmr(A, b)[:4]
>>> istop
0
>>> x
array([ 0., 0.])
The stopping code `istop=0` returned indicates that a vector of zeros was
found as a solution. The returned solution `x` indeed contains `[0., 0.]`.
The next example has a non-trivial solution:
>>> b = np.array([1., 0., -1.], dtype=float)
>>> x, istop, itn, normr = lsmr(A, b)[:4]
>>> istop
1
>>> x
array([ 1., -1.])
>>> itn
1
>>> normr
4.440892098500627e-16
As indicated by `istop=1`, `lsmr` found a solution obeying the tolerance
limits. The given solution `[1., -1.]` obviously solves the equation. The
remaining return values include information about the number of iterations
(`itn=1`) and the remaining difference of left and right side of the solved
equation.
The final example demonstrates the behavior in the case where there is no
solution for the equation:
>>> b = np.array([1., 0.01, -1.], dtype=float)
>>> x, istop, itn, normr = lsmr(A, b)[:4]
>>> istop
2
>>> x
array([ 1.00333333, -0.99666667])
>>> A.dot(x)-b
array([ 0.00333333, -0.00333333, 0.00333333])
>>> normr
0.005773502691896255
`istop` indicates that the system is inconsistent and thus `x` is rather an
approximate solution to the corresponding least-squares problem. `normr`
contains the minimal distance that was found.
"""
A = aslinearoperator(A)
b = atleast_1d(b)
if b.ndim > 1:
b = b.squeeze()
msg = ('The exact solution is x = 0, or x = x0, if x0 was given ',
'Ax - b is small enough, given atol, btol ',
'The least-squares solution is good enough, given atol ',
'The estimate of cond(Abar) has exceeded conlim ',
'Ax - b is small enough for this machine ',
'The least-squares solution is good enough for this machine',
'Cond(Abar) seems to be too large for this machine ',
'The iteration limit has been reached ')
hdg1 = ' itn x(1) norm r norm A''r'
hdg2 = ' compatible LS norm A cond A'
pfreq = 20 # print frequency (for repeating the heading)
pcount = 0 # print counter
m, n = A.shape
# stores the num of singular values
minDim = min([m, n])
if maxiter is None:
maxiter = minDim
if show:
print(' ')
print('LSMR Least-squares solution of Ax = b\n')
print('The matrix A has %8g rows and %8g cols' % (m, n))
print('damp = %20.14e\n' % (damp))
print('atol = %8.2e conlim = %8.2e\n' % (atol, conlim))
print('btol = %8.2e maxiter = %8g\n' % (btol, maxiter))
u = b
normb = norm(b)
if x0 is None:
x = zeros(n)
beta = normb.copy()
else:
x = atleast_1d(x0)
u = u - A.matvec(x)
beta = norm(u)
if beta > 0:
u = (1 / beta) * u
v = A.rmatvec(u)
alpha = norm(v)
else:
v = zeros(n)
alpha = 0
if alpha > 0:
v = (1 / alpha) * v
# Initialize variables for 1st iteration.
itn = 0
zetabar = alpha * beta
alphabar = alpha
rho = 1
rhobar = 1
cbar = 1
sbar = 0
h = v.copy()
hbar = zeros(n)
# Initialize variables for estimation of ||r||.
betadd = beta
betad = 0
rhodold = 1
tautildeold = 0
thetatilde = 0
zeta = 0
d = 0
# Initialize variables for estimation of ||A|| and cond(A)
normA2 = alpha * alpha
maxrbar = 0
minrbar = 1e+100
normA = sqrt(normA2)
condA = 1
normx = 0
# Items for use in stopping rules, normb set earlier
istop = 0
ctol = 0
if conlim > 0:
ctol = 1 / conlim
normr = beta
# Reverse the order here from the original matlab code because
# there was an error on return when arnorm==0
normar = alpha * beta
if normar == 0:
if show:
print(msg[0])
return x, istop, itn, normr, normar, normA, condA, normx
if show:
print(' ')
print(hdg1, hdg2)
test1 = 1
test2 = alpha / beta
str1 = '%6g %12.5e' % (itn, x[0])
str2 = ' %10.3e %10.3e' % (normr, normar)
str3 = ' %8.1e %8.1e' % (test1, test2)
print(''.join([str1, str2, str3]))
# Main iteration loop.
while itn < maxiter:
itn = itn + 1
# Perform the next step of the bidiagonalization to obtain the
# next beta, u, alpha, v. These satisfy the relations
# beta*u = a*v - alpha*u,
# alpha*v = A'*u - beta*v.
u = A.matvec(v) - alpha * u
beta = norm(u)
if beta > 0:
u = (1 / beta) * u
v = A.rmatvec(u) - beta * v
alpha = norm(v)
if alpha > 0:
v = (1 / alpha) * v
# At this point, beta = beta_{k+1}, alpha = alpha_{k+1}.
# Construct rotation Qhat_{k,2k+1}.
chat, shat, alphahat = _sym_ortho(alphabar, damp)
# Use a plane rotation (Q_i) to turn B_i to R_i
rhoold = rho
c, s, rho = _sym_ortho(alphahat, beta)
thetanew = s*alpha
alphabar = c*alpha
# Use a plane rotation (Qbar_i) to turn R_i^T to R_i^bar
rhobarold = rhobar
zetaold = zeta
thetabar = sbar * rho
rhotemp = cbar * rho
cbar, sbar, rhobar = _sym_ortho(cbar * rho, thetanew)
zeta = cbar * zetabar
zetabar = - sbar * zetabar
# Update h, h_hat, x.
hbar = h - (thetabar * rho / (rhoold * rhobarold)) * hbar
x = x + (zeta / (rho * rhobar)) * hbar
h = v - (thetanew / rho) * h
# Estimate of ||r||.
# Apply rotation Qhat_{k,2k+1}.
betaacute = chat * betadd
betacheck = -shat * betadd
# Apply rotation Q_{k,k+1}.
betahat = c * betaacute
betadd = -s * betaacute
# Apply rotation Qtilde_{k-1}.
# betad = betad_{k-1} here.
thetatildeold = thetatilde
ctildeold, stildeold, rhotildeold = _sym_ortho(rhodold, thetabar)
thetatilde = stildeold * rhobar
rhodold = ctildeold * rhobar
betad = - stildeold * betad + ctildeold * betahat
# betad = betad_k here.
# rhodold = rhod_k here.
tautildeold = (zetaold - thetatildeold * tautildeold) / rhotildeold
taud = (zeta - thetatilde * tautildeold) / rhodold
d = d + betacheck * betacheck
normr = sqrt(d + (betad - taud)**2 + betadd * betadd)
# Estimate ||A||.
normA2 = normA2 + beta * beta
normA = sqrt(normA2)
normA2 = normA2 + alpha * alpha
# Estimate cond(A).
maxrbar = max(maxrbar, rhobarold)
if itn > 1:
minrbar = min(minrbar, rhobarold)
condA = max(maxrbar, rhotemp) / min(minrbar, rhotemp)
# Test for convergence.
# Compute norms for convergence testing.
normar = abs(zetabar)
normx = norm(x)
# Now use these norms to estimate certain other quantities,
# some of which will be small near a solution.
test1 = normr / normb
if (normA * normr) != 0:
test2 = normar / (normA * normr)
else:
test2 = infty
test3 = 1 / condA
t1 = test1 / (1 + normA * normx / normb)
rtol = btol + atol * normA * normx / normb
# The following tests guard against extremely small values of
# atol, btol or ctol. (The user may have set any or all of
# the parameters atol, btol, conlim to 0.)
# The effect is equivalent to the normAl tests using
# atol = eps, btol = eps, conlim = 1/eps.
if itn >= maxiter:
istop = 7
if 1 + test3 <= 1:
istop = 6
if 1 + test2 <= 1:
istop = 5
if 1 + t1 <= 1:
istop = 4
# Allow for tolerances set by the user.
if test3 <= ctol:
istop = 3
if test2 <= atol:
istop = 2
if test1 <= rtol:
istop = 1
# See if it is time to print something.
if show:
if (n <= 40) or (itn <= 10) or (itn >= maxiter - 10) or \
(itn % 10 == 0) or (test3 <= 1.1 * ctol) or \
(test2 <= 1.1 * atol) or (test1 <= 1.1 * rtol) or \
(istop != 0):
if pcount >= pfreq:
pcount = 0
print(' ')
print(hdg1, hdg2)
pcount = pcount + 1
str1 = '%6g %12.5e' % (itn, x[0])
str2 = ' %10.3e %10.3e' % (normr, normar)
str3 = ' %8.1e %8.1e' % (test1, test2)
str4 = ' %8.1e %8.1e' % (normA, condA)
print(''.join([str1, str2, str3, str4]))
if istop > 0:
break
# Print the stopping condition.
if show:
print(' ')
print('LSMR finished')
print(msg[istop])
print('istop =%8g normr =%8.1e' % (istop, normr))
print(' normA =%8.1e normAr =%8.1e' % (normA, normar))
print('itn =%8g condA =%8.1e' % (itn, condA))
print(' normx =%8.1e' % (normx))
print(str1, str2)
print(str3, str4)
return x, istop, itn, normr, normar, normA, condA, normx
|
mbayon/TFG-MachineLearning
|
venv/lib/python3.6/site-packages/scipy/sparse/linalg/isolve/lsmr.py
|
Python
|
mit
| 15,126 | 0.000463 |
from datetime import datetime
from django.core.files import storage
from django.contrib.staticfiles.storage import CachedStaticFilesStorage
class DummyStorage(storage.Storage):
"""
A storage class that does implement modified_time() but raises
NotImplementedError when calling
"""
def _save(self, name, content):
return 'dummy'
def delete(self, name):
pass
def exists(self, name):
pass
def modified_time(self, name):
return datetime.date(1970, 1, 1)
class SimpleCachedStaticFilesStorage(CachedStaticFilesStorage):
def file_hash(self, name, content=None):
return 'deploy12345'
|
lecaoquochung/ddnb.django
|
tests/staticfiles_tests/storage.py
|
Python
|
bsd-3-clause
| 660 | 0 |
import json
import urllib2
from isodate.isodatetime import parse_datetime
from isodate.isoerror import ISO8601Error
from django.conf import settings
from util import validate_uuid, convert_to_dict, get_agent_ifp
from Authorization import auth
from StatementValidator import StatementValidator
from ..models import Statement, Agent, Activity
from ..exceptions import ParamConflict, ParamError, Forbidden, NotFound, BadRequest, IDNotFoundError
def check_for_existing_statementId(stmtID):
return Statement.objects.filter(statement_id=stmtID).exists()
def check_for_no_other_params_supplied(query_dict):
supplied = True
if len(query_dict) <= 1:
supplied = False
return supplied
# Extra agent validation for state and profile
def validate_oauth_state_or_profile_agent(req_dict, endpoint):
ag = req_dict['params']['agent']
token = req_dict['auth']['oauth_token']
scopes = token.scope_to_list()
if not 'all' in scopes:
if not isinstance(ag, dict):
ag = json.loads(ag)
try:
agent = Agent.objects.get(**ag)
except Agent.DoesNotExist:
err_msg = "Agent in %s cannot be found to match user in authorization" % endpoint
raise NotFound(err_msg)
if not agent in req_dict['auth']['authority'].member.all():
err_msg = "Authorization doesn't match agent in %s" % endpoint
raise Forbidden(err_msg)
def validate_void_statement(void_id):
# Retrieve statement, check if the verb is 'voided' - if not then set the voided flag to true else return error
# since you cannot unvoid a statement and should just reissue the statement under a new ID.
try:
stmt = Statement.objects.get(statement_id=void_id)
except Statement.DoesNotExist:
err_msg = "Statement with ID %s does not exist" % void_id
raise IDNotFoundError(err_msg)
if stmt.voided:
err_msg = "Statement with ID: %s is already voided, cannot unvoid. Please re-issue the statement under a new ID." % void_id
raise Forbidden(err_msg)
def server_validate_statement_object(stmt_object, auth):
if stmt_object['objectType'] == 'StatementRef' and not check_for_existing_statementId(stmt_object['id']):
err_msg = "No statement with ID %s was found" % stmt_object['id']
raise IDNotFoundError(err_msg)
def validate_stmt_authority(stmt, auth, auth_validated):
# If not validated yet - validate auth first since it supercedes any auth in stmt
if not auth_validated:
if auth['authority']:
if auth['authority'].objectType == 'Group' and not auth['authority'].oauth_identifier:
err_msg = "Statements cannot have a non-Oauth group as the authority"
raise ParamError(err_msg)
else:
return True
# If no auth then validate authority in stmt if there is one
else:
if 'authority' in stmt:
# If they try using a non-oauth group that already exists-throw error
if stmt['authority']['objectType'] == 'Group':
contains_account = len([x for m in stmt['authority']['member'] for x in m.keys() if 'account' in x]) > 0
if contains_account:
for agent in stmt['authority']['member']:
if 'account' in agent:
if not 'oauth' in agent['account']['homePage'].lower():
err_msg = "Statements cannot have a non-Oauth group as the authority"
raise ParamError(err_msg)
# No members contain an account so that means it's not an Oauth group
else:
err_msg = "Statements cannot have a non-Oauth group as the authority"
raise ParamError(err_msg)
else:
return True
else:
return True
# Retrieve JSON data from ID
def get_act_def_data(act_data):
act_url_data = {}
# See if id resolves
try:
req = urllib2.Request(act_data['id'])
req.add_header('Accept', 'application/json, */*')
act_resp = urllib2.urlopen(req, timeout=settings.ACTIVITY_ID_RESOLVE_TIMEOUT)
except Exception:
# Doesn't resolve-hopefully data is in payload
pass
else:
# If it resolves then try parsing JSON from it
try:
act_url_data = json.loads(act_resp.read())
except Exception:
# Resolves but no data to retrieve - this is OK
pass
# If there was data from the URL and a defintion in received JSON already
if act_url_data and 'definition' in act_data:
act_data['definition'] = dict(act_url_data.items() + act_data['definition'].items())
# If there was data from the URL and no definition in the JSON
elif act_url_data and not 'definition' in act_data:
act_data['definition'] = act_url_data
def server_validation(stmt_set, auth, payload_sha2s):
auth_validated = False
if type(stmt_set) is list:
for stmt in stmt_set:
server_validation(stmt, auth, payload_sha2s)
else:
if 'id' in stmt_set:
statement_id = stmt_set['id']
if check_for_existing_statementId(statement_id):
err_msg = "A statement with ID %s already exists" % statement_id
raise ParamConflict(err_msg)
server_validate_statement_object(stmt_set['object'], auth)
if stmt_set['verb']['id'] == 'http://adlnet.gov/expapi/verbs/voided':
validate_void_statement(stmt_set['object']['id'])
if not 'objectType' in stmt_set['object'] or stmt_set['object']['objectType'] == 'Activity':
get_act_def_data(stmt_set['object'])
try:
validator = StatementValidator()
validator.validate_activity(stmt_set['object'])
except Exception, e:
raise BadRequest(e.message)
except ParamError, e:
raise ParamError(e.message)
auth_validated = validate_stmt_authority(stmt_set, auth, auth_validated)
if 'attachments' in stmt_set:
attachment_data = stmt_set['attachments']
validate_attachments(attachment_data, payload_sha2s)
@auth
def statements_post(req_dict):
if req_dict['params'].keys():
raise ParamError("The post statements request contained unexpected parameters: %s" % ", ".join(req_dict['params'].keys()))
if isinstance(req_dict['body'], basestring):
req_dict['body'] = convert_to_dict(req_dict['body'])
try:
validator = StatementValidator(req_dict['body'])
validator.validate()
except Exception, e:
raise BadRequest(e.message)
except ParamError, e:
raise ParamError(e.message)
server_validation(req_dict['body'], req_dict['auth'], req_dict.get('payload_sha2s', None))
return req_dict
@auth
def statements_more_get(req_dict):
if not 'more_id' in req_dict:
err_msg = "Missing more_id while trying to hit /more endpoint"
raise ParamError(err_msg)
return req_dict
def validate_statementId(req_dict):
if 'statementId' in req_dict['params'] and 'voidedStatementId' in req_dict['params']:
err_msg = "Cannot have both statementId and voidedStatementId in a GET request"
raise ParamError(err_msg)
elif 'statementId' in req_dict['params']:
statementId = req_dict['params']['statementId']
voided = False
else:
statementId = req_dict['params']['voidedStatementId']
voided = True
not_allowed = ["agent", "verb", "activity", "registration",
"related_activities", "related_agents", "since",
"until", "limit", "ascending"]
bad_keys = set(not_allowed) & set(req_dict['params'].keys())
if bad_keys:
err_msg = "Cannot have %s in a GET request only 'format' and/or 'attachments' are allowed with 'statementId' and 'voidedStatementId'" % ', '.join(bad_keys)
raise ParamError(err_msg)
# Try to retrieve stmt, if DNE then return empty else return stmt info
try:
st = Statement.objects.get(statement_id=statementId)
except Statement.DoesNotExist:
err_msg = 'There is no statement associated with the id: %s' % statementId
raise IDNotFoundError(err_msg)
auth = req_dict.get('auth', None)
mine_only = auth and 'statements_mine_only' in auth
if auth['authority']:
if mine_only and st.authority.id != auth['authority'].id:
err_msg = "Incorrect permissions to view statements"
raise Forbidden(err_msg)
if st.voided != voided:
if st.voided:
err_msg = 'The requested statement (%s) is voided. Use the "voidedStatementId" parameter to retrieve your statement.' % statementId
else:
err_msg = 'The requested statement (%s) is not voided. Use the "statementId" parameter to retrieve your statement.' % statementId
raise IDNotFoundError(err_msg)
return statementId
@auth
def statements_get(req_dict):
rogueparams = set(req_dict['params']) - set(["statementId","voidedStatementId","agent", "verb", "activity", "registration",
"related_activities", "related_agents", "since",
"until", "limit", "format", "attachments", "ascending"])
if rogueparams:
raise ParamError("The get statements request contained unexpected parameters: %s" % ", ".join(rogueparams))
formats = ['exact', 'canonical', 'ids']
if 'params' in req_dict and 'format' in req_dict['params']:
if req_dict['params']['format'] not in formats:
raise ParamError("The format filter value (%s) was not one of the known values: %s" % (req_dict['params']['format'], ','.join(formats)))
else:
req_dict['params']['format'] = 'exact'
# StatementId could be for voided statement as well
if 'params' in req_dict and ('statementId' in req_dict['params'] or 'voidedStatementId' in req_dict['params']):
req_dict['statementId'] = validate_statementId(req_dict)
if 'since' in req_dict['params']:
try:
parse_datetime(req_dict['params']['since'])
except (Exception, ISO8601Error):
raise ParamError("Since parameter was not a valid ISO8601 timestamp")
if 'until' in req_dict['params']:
try:
parse_datetime(req_dict['params']['until'])
except (Exception, ISO8601Error):
raise ParamError("Until parameter was not a valid ISO8601 timestamp")
# Django converts all query values to string - make boolean depending on if client wants attachments or not
# Only need to do this in GET b/c GET/more will have it saved in pickle information
if 'params' in req_dict and 'attachments' in req_dict['params']:
if req_dict['params']['attachments'].lower() == 'true':
req_dict['params']['attachments'] = True
else:
req_dict['params']['attachments'] = False
else:
req_dict['params']['attachments'] = False
return req_dict
@auth
def statements_put(req_dict):
# Find any unexpected parameters
rogueparams = set(req_dict['params']) - set(["statementId"])
if rogueparams:
raise ParamError("The put statements request contained unexpected parameters: %s" % ", ".join(rogueparams))
# Statement id can must be supplied in query param. If in the body too, it must be the same
if not 'statementId' in req_dict['params']:
raise ParamError("Error -- statements - method = %s, but no statementId parameter or ID given in statement" % req_dict['method'])
else:
statement_id = req_dict['params']['statementId']
# Convert data so it can be parsed
if isinstance(req_dict['body'], basestring):
req_dict['body'] = convert_to_dict(req_dict['body'])
# Try to get id if in body
try:
statement_body_id = req_dict['body']['id']
except Exception, e:
statement_body_id = None
# If ids exist in both places, check if they are equal
if statement_body_id and statement_id != statement_body_id:
raise ParamError("Error -- statements - method = %s, param and body ID both given, but do not match" % req_dict['method'])
# If statement with that ID already exists-raise conflict error
if check_for_existing_statementId(statement_id):
raise ParamConflict("A statement with ID %s already exists" % statement_id)
# Set id inside of statement with param id
if not statement_body_id:
req_dict['body']['id'] = statement_id
# If there are no other params-raise param error since nothing else is supplied
if not check_for_no_other_params_supplied(req_dict['body']):
raise ParamError("No other params are supplied with statementId.")
# Validate statement in body
try:
validator = StatementValidator(req_dict['body'])
validator.validate()
except Exception, e:
raise BadRequest(e.message)
except ParamError, e:
raise ParamError(e.message)
server_validation(req_dict['body'], req_dict['auth'], req_dict.get('payload_sha2s', None))
return req_dict
def validate_attachments(attachment_data, payload_sha2s):
# For each attachment that is in the actual statement
for attachment in attachment_data:
# If the attachment data has a sha2 field, must validate it against the payload data
if 'sha2' in attachment:
sha2 = attachment['sha2']
# Check if the sha2 field is a key in the payload dict
if payload_sha2s:
if not sha2 in payload_sha2s:
err_msg = "Could not find attachment payload with sha: %s" % sha2
raise ParamError(err_msg)
else:
raise BadRequest("Missing X-Experience-API-Hash field in header")
@auth
def activity_state_post(req_dict):
rogueparams = set(req_dict['params']) - set(["activityId", "agent", "stateId", "registration"])
if rogueparams:
raise ParamError("The post activity state request contained unexpected parameters: %s" % ", ".join(rogueparams))
validator = StatementValidator()
if 'activityId' in req_dict['params']:
validator.validate_uri(req_dict['params']['activityId'], "activityId param for activity state")
else:
err_msg = "Error -- activity_state - method = %s, but activityId parameter is missing.." % req_dict['method']
raise ParamError(err_msg)
if not 'stateId' in req_dict['params']:
err_msg = "Error -- activity_state - method = %s, but stateId parameter is missing.." % req_dict['method']
raise ParamError(err_msg)
if 'registration' in req_dict['params']:
if not validate_uuid(req_dict['params']['registration']):
raise ParamError("%s is not a valid uuid for the registration parameter")
if 'agent' in req_dict['params']:
try:
agent = json.loads(req_dict['params']['agent'])
except Exception, e:
raise ParamError("agent param for activity state is not valid")
validator.validate_agent(agent, "Activity state agent param")
else:
err_msg = "Error -- activity_state - method = %s, but agent parameter is missing.." % req_dict['method']
raise ParamError(err_msg)
if 'headers' not in req_dict or ('CONTENT_TYPE' not in req_dict['headers'] or req_dict['headers']['CONTENT_TYPE'] != "application/json"):
err_msg = "The content type for activity state POSTs must be application/json"
raise ParamError(err_msg)
# Must have body included for state
if 'body' not in req_dict:
err_msg = "Could not find the state"
raise ParamError(err_msg)
# Extra validation if oauth
if req_dict['auth']['type'] == 'oauth':
validate_oauth_state_or_profile_agent(req_dict, "state")
# Set state
req_dict['state'] = req_dict.pop('raw_body', req_dict.pop('body', None))
return req_dict
@auth
def activity_state_put(req_dict):
rogueparams = set(req_dict['params']) - set(["activityId", "agent", "stateId", "registration"])
if rogueparams:
raise ParamError("The put activity state request contained unexpected parameters: %s" % ", ".join(rogueparams))
validator = StatementValidator()
if 'activityId' in req_dict['params']:
validator.validate_uri(req_dict['params']['activityId'], "activityId param for activity state")
else:
err_msg = "Error -- activity_state - method = %s, but activityId parameter is missing.." % req_dict['method']
raise ParamError(err_msg)
if not 'stateId' in req_dict['params']:
err_msg = "Error -- activity_state - method = %s, but stateId parameter is missing.." % req_dict['method']
raise ParamError(err_msg)
if 'registration' in req_dict['params']:
if not validate_uuid(req_dict['params']['registration']):
raise ParamError("%s is not a valid uuid for the registration parameter")
if 'agent' in req_dict['params']:
try:
agent = json.loads(req_dict['params']['agent'])
except Exception, e:
raise ParamError("agent param for activity state is not valid")
validator.validate_agent(agent, "Activity state agent param")
else:
err_msg = "Error -- activity_state - method = %s, but agent parameter is missing.." % req_dict['method']
raise ParamError(err_msg)
# Must have body included for state
if 'body' not in req_dict:
err_msg = "Could not find the state"
raise ParamError(err_msg)
# Extra validation if oauth
if req_dict['auth']['type'] == 'oauth':
validate_oauth_state_or_profile_agent(req_dict, "state")
# Set state
req_dict['state'] = req_dict.pop('raw_body', req_dict.pop('body', None))
return req_dict
@auth
def activity_state_get(req_dict):
rogueparams = set(req_dict['params']) - set(["activityId", "agent", "stateId", "registration", "since"])
if rogueparams:
raise ParamError("The get activity state request contained unexpected parameters: %s" % ", ".join(rogueparams))
validator = StatementValidator()
if 'activityId' in req_dict['params']:
validator.validate_uri(req_dict['params']['activityId'], "activityId param for activity state")
else:
err_msg = "Error -- activity_state - method = %s, but activityId parameter is missing.." % req_dict['method']
raise ParamError(err_msg)
if 'registration' in req_dict['params']:
if not validate_uuid(req_dict['params']['registration']):
raise ParamError("%s is not a valid uuid for the registration parameter")
if 'agent' in req_dict['params']:
try:
agent = json.loads(req_dict['params']['agent'])
except Exception, e:
raise ParamError("agent param for activity state is not valid")
validator.validate_agent(agent, "Activity state agent param")
else:
err_msg = "Error -- activity_state - method = %s, but agent parameter is missing.." % req_dict['method']
raise ParamError(err_msg)
if 'since' in req_dict['params']:
try:
parse_datetime(req_dict['params']['since'])
except (Exception, ISO8601Error):
raise ParamError("Since parameter was not a valid ISO8601 timestamp")
# Extra validation if oauth
if req_dict['auth']['type'] == 'oauth':
validate_oauth_state_or_profile_agent(req_dict, "state")
return req_dict
@auth
def activity_state_delete(req_dict):
rogueparams = set(req_dict['params']) - set(["activityId", "agent", "stateId", "registration"])
if rogueparams:
raise ParamError("The delete activity state request contained unexpected parameters: %s" % ", ".join(rogueparams))
validator = StatementValidator()
if 'activityId' in req_dict['params']:
validator.validate_uri(req_dict['params']['activityId'], "activityId param for activity state")
else:
err_msg = "Error -- activity_state - method = %s, but activityId parameter is missing.." % req_dict['method']
raise ParamError(err_msg)
if 'registration' in req_dict['params']:
if not validate_uuid(req_dict['params']['registration']):
raise ParamError("%s is not a valid uuid for the registration parameter")
if 'agent' in req_dict['params']:
try:
agent = json.loads(req_dict['params']['agent'])
except Exception, e:
raise ParamError("agent param for activity state is not valid")
validator.validate_agent(agent, "Activity state agent param")
else:
err_msg = "Error -- activity_state - method = %s, but agent parameter is missing.." % req_dict['method']
raise ParamError(err_msg)
# Extra validation if oauth
if req_dict['auth']['type'] == 'oauth':
validate_oauth_state_or_profile_agent(req_dict, "state")
return req_dict
@auth
def activity_profile_post(req_dict):
rogueparams = set(req_dict['params']) - set(["activityId", "profileId"])
if rogueparams:
raise ParamError("The post activity profile request contained unexpected parameters: %s" % ", ".join(rogueparams))
validator = StatementValidator()
if 'activityId' in req_dict['params']:
validator.validate_uri(req_dict['params']['activityId'], "activityId param for activity profile")
else:
err_msg = "Error -- activity_profile - method = %s, but activityId parameter missing.." % req_dict['method']
raise ParamError(err_msg)
if not 'profileId' in req_dict['params']:
err_msg = "Error -- activity_profile - method = %s, but profileId parameter missing.." % req_dict['method']
raise ParamError(err_msg)
if 'headers' not in req_dict or ('CONTENT_TYPE' not in req_dict['headers'] or req_dict['headers']['CONTENT_TYPE'] != "application/json"):
err_msg = "The content type for activity profile POSTs must be application/json"
raise ParamError(err_msg)
if 'body' not in req_dict:
err_msg = "Could not find the profile document"
raise ParamError(err_msg)
req_dict['profile'] = req_dict.pop('raw_body', req_dict.pop('body', None))
return req_dict
@auth
def activity_profile_put(req_dict):
rogueparams = set(req_dict['params']) - set(["activityId", "profileId"])
if rogueparams:
raise ParamError("The put activity profile request contained unexpected parameters: %s" % ", ".join(rogueparams))
validator = StatementValidator()
if 'activityId' in req_dict['params']:
validator.validate_uri(req_dict['params']['activityId'], "activityId param for activity profile")
else:
err_msg = "Error -- activity_profile - method = %s, but activityId parameter missing.." % req_dict['method']
raise ParamError(err_msg)
if not 'profileId' in req_dict['params']:
err_msg = "Error -- activity_profile - method = %s, but profileId parameter missing.." % req_dict['method']
raise ParamError(err_msg)
if 'body' not in req_dict:
err_msg = "Could not find the profile document"
raise ParamError(err_msg)
# Set profile - req_parse converts all request bodies to dict, act profile needs it as string and need to replace single quotes with double quotes
# b/c of quotation issue when using javascript with activity profile
req_dict['profile'] = req_dict.pop('raw_body', req_dict.pop('body', None))
return req_dict
@auth
def activity_profile_get(req_dict):
rogueparams = set(req_dict['params']) - set(["activityId", "profileId", "since"])
if rogueparams:
raise ParamError("The get activity profile request contained unexpected parameters: %s" % ", ".join(rogueparams))
validator = StatementValidator()
if 'activityId' in req_dict['params']:
validator.validate_uri(req_dict['params']['activityId'], "activityId param for activity profile")
else:
err_msg = "Error -- activity_profile - method = %s, but activityId parameter missing.." % req_dict['method']
raise ParamError(err_msg)
if 'since' in req_dict['params']:
try:
parse_datetime(req_dict['params']['since'])
except (Exception, ISO8601Error):
raise ParamError("Since parameter was not a valid ISO8601 timestamp")
return req_dict
@auth
def activity_profile_delete(req_dict):
rogueparams = set(req_dict['params']) - set(["activityId", "profileId"])
if rogueparams:
raise ParamError("The delete activity profile request contained unexpected parameters: %s" % ", ".join(rogueparams))
validator = StatementValidator()
if 'activityId' in req_dict['params']:
validator.validate_uri(req_dict['params']['activityId'], "activityId param for activity profile")
else:
err_msg = "Error -- activity_profile - method = %s, but activityId parameter missing.." % req_dict['method']
raise ParamError(err_msg)
if not 'profileId' in req_dict['params']:
err_msg = "Error -- activity_profile - method = %s, but profileId parameter missing.." % req_dict['method']
raise ParamError(err_msg)
return req_dict
@auth
def activities_get(req_dict):
rogueparams = set(req_dict['params']) - set(["activityId"])
if rogueparams:
raise ParamError("The get activities request contained unexpected parameters: %s" % ", ".join(rogueparams))
try:
activityId = req_dict['params']['activityId']
except KeyError:
err_msg = "Error -- activities - method = %s, but activityId parameter is missing" % req_dict['method']
raise ParamError(err_msg)
# Try to retrieve activity, if DNE then return empty else return activity info
try:
Activity.objects.get(activity_id=activityId, canonical_version=True)
except Activity.DoesNotExist:
err_msg = "No activity found with ID %s" % activityId
raise IDNotFoundError(err_msg)
return req_dict
@auth
def agent_profile_post(req_dict):
rogueparams = set(req_dict['params']) - set(["agent", "profileId"])
if rogueparams:
raise ParamError("The post agent profile request contained unexpected parameters: %s" % ", ".join(rogueparams))
validator = StatementValidator()
if 'agent' in req_dict['params']:
try:
agent = json.loads(req_dict['params']['agent'])
except Exception, e:
raise ParamError("agent param for agent profile is not valid")
validator.validate_agent(agent, "agent param for agent profile")
else:
err_msg = "Error -- agent_profile - method = %s, but agent parameter missing.." % req_dict['method']
raise ParamError(err_msg)
if not 'profileId' in req_dict['params']:
err_msg = "Error -- agent_profile - method = %s, but profileId parameter missing.." % req_dict['method']
raise ParamError(err_msg)
if 'headers' not in req_dict or ('CONTENT_TYPE' not in req_dict['headers'] or req_dict['headers']['CONTENT_TYPE'] != "application/json"):
err_msg = "The content type for agent profile POSTs must be application/json"
raise ParamError(err_msg)
if 'body' not in req_dict:
err_msg = "Could not find the profile document"
raise ParamError(err_msg)
# Extra validation if oauth
if req_dict['auth']['type'] == 'oauth':
validate_oauth_state_or_profile_agent(req_dict, "profile")
# Set profile
req_dict['profile'] = req_dict.pop('raw_body', req_dict.pop('body', None))
return req_dict
@auth
def agent_profile_put(req_dict):
rogueparams = set(req_dict['params']) - set(["agent", "profileId"])
if rogueparams:
raise ParamError("The put agent profile request contained unexpected parameters: %s" % ", ".join(rogueparams))
validator = StatementValidator()
if 'agent' in req_dict['params']:
try:
agent = json.loads(req_dict['params']['agent'])
except Exception, e:
raise ParamError("agent param for agent profile is not valid")
validator.validate_agent(agent, "agent param for agent profile")
else:
err_msg = "Error -- agent_profile - method = %s, but agent parameter missing.." % req_dict['method']
raise ParamError(err_msg)
if not 'profileId' in req_dict['params']:
err_msg = "Error -- agent_profile - method = %s, but profileId parameter missing.." % req_dict['method']
raise ParamError(err_msg)
if 'body' not in req_dict:
err_msg = "Could not find the profile document"
raise ParamError(err_msg)
# Extra validation if oauth
if req_dict['auth']['type'] == 'oauth':
validate_oauth_state_or_profile_agent(req_dict, "profile")
req_dict['profile'] = req_dict.pop('raw_body', req_dict.pop('body', None))
return req_dict
@auth
def agent_profile_get(req_dict):
rogueparams = set(req_dict['params']) - set(["agent", "profileId", "since"])
if rogueparams:
raise ParamError("The get agent profile request contained unexpected parameters: %s" % ", ".join(rogueparams))
validator = StatementValidator()
if 'agent' in req_dict['params']:
try:
agent = json.loads(req_dict['params']['agent'])
except Exception, e:
raise ParamError("agent param for agent profile is not valid")
validator.validate_agent(agent, "agent param for agent profile")
else:
err_msg = "Error -- agent_profile - method = %s, but agent parameter missing.." % req_dict['method']
raise ParamError(err_msg)
if 'since' in req_dict['params']:
try:
parse_datetime(req_dict['params']['since'])
except (Exception, ISO8601Error):
raise ParamError("Since parameter was not a valid ISO8601 timestamp")
# Extra validation if oauth
if req_dict['auth']['type'] == 'oauth':
validate_oauth_state_or_profile_agent(req_dict, "profile")
return req_dict
@auth
def agent_profile_delete(req_dict):
rogueparams = set(req_dict['params']) - set(["agent", "profileId"])
if rogueparams:
raise ParamError("The delete agent profile request contained unexpected parameters: %s" % ", ".join(rogueparams))
validator = StatementValidator()
if 'agent' in req_dict['params']:
try:
agent = json.loads(req_dict['params']['agent'])
except Exception, e:
raise ParamError("agent param for agent profile is not valid")
validator.validate_agent(agent, "agent param for agent profile")
else:
err_msg = "Error -- agent_profile - method = %s, but agent parameter missing.." % req_dict['method']
raise ParamError(err_msg)
if not 'profileId' in req_dict['params']:
err_msg = "Error -- agent_profile - method = %s, but profileId parameter missing.." % req_dict['method']
raise ParamError(err_msg)
# Extra validation if oauth
if req_dict['auth']['type'] == 'oauth':
validate_oauth_state_or_profile_agent(req_dict, "profile")
return req_dict
@auth
def agents_get(req_dict):
rogueparams = set(req_dict['params']) - set(["agent"])
if rogueparams:
raise ParamError("The get agent request contained unexpected parameters: %s" % ", ".join(rogueparams))
try:
req_dict['params']['agent']
except KeyError:
err_msg = "Error -- agents url, but no agent parameter.. the agent parameter is required"
raise ParamError(err_msg)
agent = json.loads(req_dict['params']['agent'])
params = get_agent_ifp(agent)
if not Agent.objects.filter(**params).exists():
raise IDNotFoundError("Error with Agent. The agent partial did not match any agents on record")
req_dict['agent_ifp'] = params
return req_dict
|
diagonalwalnut/Experience
|
lrs/util/req_validate.py
|
Python
|
apache-2.0
| 32,206 | 0.006334 |
#============================================================================
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#============================================================================
# Copyright (C) 2006 XenSource Ltd.
#============================================================================
from xen.xend.XendAPIConstants import *
from xen.util import auxbin
#
# Shutdown codes and reasons.
#
DOMAIN_POWEROFF = 0
DOMAIN_REBOOT = 1
DOMAIN_SUSPEND = 2
DOMAIN_CRASH = 3
DOMAIN_HALT = 4
DOMAIN_SHUTDOWN_REASONS = {
DOMAIN_POWEROFF: "poweroff",
DOMAIN_REBOOT : "reboot",
DOMAIN_SUSPEND : "suspend",
DOMAIN_CRASH : "crash",
DOMAIN_HALT : "halt"
}
REVERSE_DOMAIN_SHUTDOWN_REASONS = \
dict([(y, x) for x, y in DOMAIN_SHUTDOWN_REASONS.items()])
HVM_PARAM_CALLBACK_IRQ = 0
HVM_PARAM_STORE_PFN = 1
HVM_PARAM_STORE_EVTCHN = 2
HVM_PARAM_PAE_ENABLED = 4
HVM_PARAM_IOREQ_PFN = 5
HVM_PARAM_BUFIOREQ_PFN = 6
HVM_PARAM_NVRAM_FD = 7 # ia64
HVM_PARAM_VHPT_SIZE = 8 # ia64
HVM_PARAM_BUFPIOREQ_PFN = 9 # ia64
HVM_PARAM_VIRIDIAN = 9 # x86
HVM_PARAM_TIMER_MODE = 10
HVM_PARAM_HPET_ENABLED = 11
HVM_PARAM_ACPI_S_STATE = 14
HVM_PARAM_VPT_ALIGN = 16
restart_modes = [
"restart",
"destroy",
"preserve",
"rename-restart",
"coredump-destroy",
"coredump-restart"
]
DOM_STATES = [
'halted',
'paused',
'running',
'suspended',
'shutdown',
'crashed',
'unknown',
]
DOM_STATE_HALTED = XEN_API_VM_POWER_STATE_HALTED
DOM_STATE_PAUSED = XEN_API_VM_POWER_STATE_PAUSED
DOM_STATE_RUNNING = XEN_API_VM_POWER_STATE_RUNNING
DOM_STATE_SUSPENDED = XEN_API_VM_POWER_STATE_SUSPENDED
DOM_STATE_SHUTDOWN = XEN_API_VM_POWER_STATE_SHUTTINGDOWN
DOM_STATE_CRASHED = XEN_API_VM_POWER_STATE_CRASHED
DOM_STATE_UNKNOWN = XEN_API_VM_POWER_STATE_UNKNOWN
DOM_STATES_OLD = [
'running',
'blocked',
'paused',
'shutdown',
'crashed',
'dying'
]
SHUTDOWN_TIMEOUT = (60.0 * 5)
"""Minimum time between domain restarts in seconds."""
MINIMUM_RESTART_TIME = 60
RESTART_IN_PROGRESS = 'xend/restart_in_progress'
DUMPCORE_IN_PROGRESS = 'xend/dumpcore_in_progress'
LAST_SHUTDOWN_REASON = 'xend/last_shutdown_reason'
TRIGGER_NMI = 0
TRIGGER_RESET = 1
TRIGGER_INIT = 2
TRIGGER_POWER = 3
TRIGGER_S3RESUME = 4
TRIGGER_TYPE = {
"nmi" : TRIGGER_NMI,
"reset" : TRIGGER_RESET,
"init" : TRIGGER_INIT,
"s3resume": TRIGGER_S3RESUME,
"power": TRIGGER_POWER
}
#
# Device migration stages (eg. XendDomainInfo, XendCheckpoint, server.tpmif)
#
DEV_MIGRATE_TEST = 0
DEV_MIGRATE_STEP1 = 1
DEV_MIGRATE_STEP2 = 2
DEV_MIGRATE_STEP3 = 3
#
# VTPM-related constants
#
VTPM_DELETE_SCRIPT = auxbin.scripts_dir() + '/vtpm-delete'
#
# Xenstore Constants
#
XS_VMROOT = "/vm/"
NR_PCI_FUNC = 8
NR_PCI_DEV = 32
NR_PCI_DEVFN = NR_PCI_FUNC * NR_PCI_DEV
AUTO_PHP_SLOT = 0x100
#
# tmem
#
TMEM_CONTROL = 0
TMEM_NEW_POOL = 1
TMEM_DESTROY_POOL = 2
TMEM_NEW_PAGE = 3
TMEM_PUT_PAGE = 4
TMEM_GET_PAGE = 5
TMEM_FLUSH_PAGE = 6
TMEM_FLUSH_OBJECT = 7
TMEM_READ = 8
TMEM_WRITE = 9
TMEM_XCHG = 10
TMEMC_THAW = 0
TMEMC_FREEZE = 1
TMEMC_FLUSH = 2
TMEMC_DESTROY = 3
TMEMC_LIST = 4
TMEMC_SET_WEIGHT = 5
TMEMC_SET_CAP = 6
TMEMC_SET_COMPRESS = 7
|
avsm/xen-unstable
|
tools/python/xen/xend/XendConstants.py
|
Python
|
gpl-2.0
| 3,953 | 0.012396 |
# Copyright 2013 Ken Pepple <ken@pepple.info>
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import alfred_utils as utils
import requests
PROWL_URL = "https://api.prowlapp.com/publicapi/"
DEFAULT_PRIORITY = 0
VALID_PRIORITIES = [-2, -1, 0, 1, 2]
def get_api_key():
return utils.get_config('apikey')
def get_priority_key():
try:
p = utils.get_config('priority')
if p not in VALID_PRIORITIES:
p = DEFAULT_PRIORITY
except:
p = DEFAULT_PRIORITY
return p
def verify_apikey(apikey):
parameters = {'apikey': apikey}
r = requests.post(PROWL_URL + "verify", params=parameters)
return r.ok
def save_api_key(apikey):
utils.save_config('apikey',apikey)
def send_prowl(description, application="Alfred", event="event", priority=0):
try:
apikey = get_api_key()
except:
print "No APIKEY. Please configure by holding down the cmd key and pasting in prowl APIKEY."
raise Exception("No APIKEY. Please configure by holding down the cmd key and pasting in prowl APIKEY.")
parameters = {'apikey': apikey, 'event': event, 'application': application,
'priority': priority, 'description': description}
r = requests.post(PROWL_URL + "add", params=parameters)
return r.ok
|
slashk/prowl.alfredworkflow
|
prowl_alfred.py
|
Python
|
apache-2.0
| 1,835 | 0.00545 |
# Copyright(c)2015 NTT corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.object_storage import test_container_sync
from tempest import config
from tempest import test
CONF = config.CONF
# This test can be quite long to run due to its
# dependency on container-sync process running interval.
# You can obviously reduce the container-sync interval in the
# container-server configuration.
class ContainerSyncMiddlewareTest(test_container_sync.ContainerSyncTest):
@classmethod
def resource_setup(cls):
super(ContainerSyncMiddlewareTest, cls).resource_setup()
# Set container-sync-realms.conf info
cls.realm_name = CONF.object_storage.realm_name
cls.key = 'sync_key'
cls.cluster_name = CONF.object_storage.cluster_name
@test.attr(type='slow')
@test.requires_ext(extension='container_sync', service='object')
def test_container_synchronization(self):
def make_headers(cont, cont_client):
# tell first container to synchronize to a second
account_name = cont_client.base_url.split('/')[-1]
headers = {'X-Container-Sync-Key': "%s" % (self.key),
'X-Container-Sync-To': "//%s/%s/%s/%s" %
(self.realm_name, self.cluster_name,
str(account_name), str(cont))}
return headers
self._test_container_synchronization(make_headers)
|
Vaidyanath/tempest
|
tempest/api/object_storage/test_container_sync_middleware.py
|
Python
|
apache-2.0
| 1,985 | 0 |
from __future__ import absolute_import
# Copyright (c) 2010-2015 openpyxl
from .formatting import ConditionalFormatting
from .rule import Rule
|
saukrIppl/seahub
|
thirdpart/openpyxl-2.3.0-py2.7.egg/openpyxl/formatting/__init__.py
|
Python
|
apache-2.0
| 144 | 0 |
import json
import time
import pytest
from anchore_engine.auth.common import (
get_creds_by_registry,
get_docker_registry_userpw,
registry_record_matches,
)
_test_username = "tonystark"
_test_password = "potts"
_test_registry_meta = {
"authorizationToken": "{}:{}".format(_test_username, _test_password)
}
_record_ecr = {
"registry_type": "awsecr",
"registry_meta": json.dumps(_test_registry_meta),
}
_record_not_ecr = {
"registry_type": "other-registry",
"registry_user": _test_username,
"registry_pass": _test_password,
}
_record_ecr_inactive = {
"registry": "docker.io",
"record_state_key": "inactive",
"registry_type": "awsecr",
"registry_meta": json.dumps(_test_registry_meta),
"registry_verify": True,
}
_record_ecr_unavailable = {
"registry": "docker.io",
"record_state_key": "inactive",
"record_state_val": time.time(), # note: technically this could yield nondeterministic results
"registry_type": "awsecr",
"registry_meta": json.dumps(_test_registry_meta),
"registry_verify": True,
}
@pytest.mark.parametrize("registry_record", [_record_ecr, _record_not_ecr])
def test_get_docker_registry_userpw(registry_record):
result = get_docker_registry_userpw(registry_record)
assert result == (_test_username, _test_password)
def test_get_docker_registry_userpw_bad_json():
record_ecr_bad_json = {
"registry_type": "awsecr",
"registry_meta": "this-is-not-valid-json!}",
}
with pytest.raises(Exception):
get_docker_registry_userpw(record_ecr_bad_json)
@pytest.mark.parametrize(
"registry,repository,registry_creds,expected",
[
("docker.io", "library/node", None, (None, None, None)),
(
"docker.io",
"library/node",
[_record_ecr_inactive],
(_test_username, _test_password, True),
),
],
)
def test_get_creds_by_registry(registry, repository, registry_creds, expected):
result = get_creds_by_registry(registry, repository, registry_creds)
assert result == expected
def test_get_creds_by_registry_unavailable():
with pytest.raises(Exception):
get_creds_by_registry("docker.io", "library/node", [_record_ecr_unavailable])
@pytest.mark.parametrize(
"registry_record_str,registry,repository",
[
("docker.io/library/centos", "docker.io", "library/centos"),
("docker.io", "docker.io", "centos"),
("docker.io", "docker.io", "myuser/myrepo"),
],
)
def test_registry_record_matches_exact(registry_record_str, registry, repository):
assert registry_record_matches(registry_record_str, registry, repository)
@pytest.mark.parametrize(
"registry_record_str,registry,repository",
[
("docker.io/library/*", "docker.io", "library/centos"),
("docker.io/*", "docker.io", "library/centos"),
("gcr.io/myproject/*", "gcr.io", "myproject/myuser/myrepo"),
],
)
def test_registry_record_matches_wildcard(registry_record_str, registry, repository):
assert registry_record_matches(registry_record_str, registry, repository)
@pytest.mark.parametrize(
"registry_record_str,registry,repository",
[
("docker.io", "gcr.io", "myproject/myuser"),
("docker.io/*", "gcr.io", "myproject/myuser"),
("docker.io/library/*", "docker.io", "myuser/myrepo"),
("docker.io/myuser/myrepo", "docker.io", "myuser/myrepo2"),
],
)
def test_registry_record_matches_non(registry_record_str, registry, repository):
assert not registry_record_matches(registry_record_str, registry, repository)
|
anchore/anchore-engine
|
tests/unit/anchore_engine/auth/test_common.py
|
Python
|
apache-2.0
| 3,612 | 0.001661 |
#!/usr/bin/env python3
"""
sumton.py : compute the sum of 0 through N
Copyright (C) Simon D. Levy 2016
This file is part of ISCPP.
ISCPP is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This code is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this code. If not, see <http:#www.gnu.org/licenses/>.
"""
def sumToN(n):
res = 0
for k in range(0,n+1):
res = res + k
return res
if __name__ == "__main__":
"""
Example
"""
print(sumToN(5))
|
simondlevy/ISCPP
|
Chapter08/sumton.py
|
Python
|
gpl-3.0
| 926 | 0.006479 |
#!/usr/bin/env python2
# This file is part of Archivematica.
#
# Copyright 2010-2013 Artefactual Systems Inc. <http://artefactual.com>
#
# Archivematica is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Archivematica is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Archivematica. If not, see <http://www.gnu.org/licenses/>.
# @package Archivematica
# @subpackage archivematicaClientScript
# @author Joseph Perry <joseph@artefactual.com>
from __future__ import print_function
import os
import sys
import shutil
import django
django.setup()
# dashboard
from main.models import Job, SIP
# archivematicaCommon
from custom_handlers import get_script_logger
from databaseFunctions import createSIP
if __name__ == '__main__':
logger = get_script_logger("archivematica.mcp.client.generateDIPFromAIPGenerateDIP")
# COPY THE METS FILE
# Move the DIP Directory
fauxUUID = sys.argv[1]
unitPath = sys.argv[2]
date = sys.argv[3]
basename = os.path.basename(unitPath[:-1])
uuidLen = 36
originalSIPName = basename[:-(uuidLen+1)*2]
originalSIPUUID = basename[:-(uuidLen+1)][-uuidLen:]
METSPath = os.path.join(unitPath, "metadata/submissionDocumentation/data/", "METS.%s.xml" % (originalSIPUUID))
if not os.path.isfile(METSPath):
print("Mets file not found: ", METSPath, file=sys.stderr)
exit(-1)
# move mets to DIP
src = METSPath
dst = os.path.join(unitPath, "DIP", os.path.basename(METSPath))
shutil.move(src, dst)
# Move DIP
src = os.path.join(unitPath, "DIP")
dst = os.path.join("/var/archivematica/sharedDirectory/watchedDirectories/uploadDIP/", originalSIPName + "-" + originalSIPUUID)
shutil.move(src, dst)
try:
SIP.objects.get(uuid=originalSIPUUID)
except SIP.DoesNotExist:
# otherwise doesn't appear in dashboard
createSIP(unitPath, UUID=originalSIPUUID)
Job.objects.create(jobtype="Hack to make DIP Jobs appear",
directory=unitPath,
sip_id=originalSIPUUID,
currentstep="Completed successfully",
unittype="unitSIP",
microservicegroup="Upload DIP")
|
sevein/archivematica
|
src/MCPClient/lib/clientScripts/generateDIPFromAIPGenerateDIP.py
|
Python
|
agpl-3.0
| 2,681 | 0.002611 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import json
import frappe
from frappe import _, throw
from frappe.desk.form.assign_to import clear, close_all_assignments
from frappe.model.mapper import get_mapped_doc
from frappe.utils import add_days, cstr, date_diff, get_link_to_form, getdate, today, flt
from frappe.utils.nestedset import NestedSet
class CircularReferenceError(frappe.ValidationError): pass
class EndDateCannotBeGreaterThanProjectEndDateError(frappe.ValidationError): pass
class Task(NestedSet):
nsm_parent_field = 'parent_task'
def get_feed(self):
return '{0}: {1}'.format(_(self.status), self.subject)
def get_customer_details(self):
cust = frappe.db.sql("select customer_name from `tabCustomer` where name=%s", self.customer)
if cust:
ret = {'customer_name': cust and cust[0][0] or ''}
return ret
def validate(self):
self.validate_dates()
self.validate_parent_expected_end_date()
self.validate_parent_project_dates()
self.validate_progress()
self.validate_status()
self.update_depends_on()
self.validate_dependencies_for_template_task()
def validate_dates(self):
if self.exp_start_date and self.exp_end_date and getdate(self.exp_start_date) > getdate(self.exp_end_date):
frappe.throw(_("{0} can not be greater than {1}").format(frappe.bold("Expected Start Date"), \
frappe.bold("Expected End Date")))
if self.act_start_date and self.act_end_date and getdate(self.act_start_date) > getdate(self.act_end_date):
frappe.throw(_("{0} can not be greater than {1}").format(frappe.bold("Actual Start Date"), \
frappe.bold("Actual End Date")))
def validate_parent_expected_end_date(self):
if self.parent_task:
parent_exp_end_date = frappe.db.get_value("Task", self.parent_task, "exp_end_date")
if parent_exp_end_date and getdate(self.get("exp_end_date")) > getdate(parent_exp_end_date):
frappe.throw(_("Expected End Date should be less than or equal to parent task's Expected End Date {0}.").format(getdate(parent_exp_end_date)))
def validate_parent_project_dates(self):
if not self.project or frappe.flags.in_test:
return
expected_end_date = frappe.db.get_value("Project", self.project, "expected_end_date")
if expected_end_date:
validate_project_dates(getdate(expected_end_date), self, "exp_start_date", "exp_end_date", "Expected")
validate_project_dates(getdate(expected_end_date), self, "act_start_date", "act_end_date", "Actual")
def validate_status(self):
if self.is_template and self.status != "Template":
self.status = "Template"
if self.status!=self.get_db_value("status") and self.status == "Completed":
for d in self.depends_on:
if frappe.db.get_value("Task", d.task, "status") not in ("Completed", "Cancelled"):
frappe.throw(_("Cannot complete task {0} as its dependant task {1} are not ccompleted / cancelled.").format(frappe.bold(self.name), frappe.bold(d.task)))
close_all_assignments(self.doctype, self.name)
def validate_progress(self):
if flt(self.progress or 0) > 100:
frappe.throw(_("Progress % for a task cannot be more than 100."))
if flt(self.progress) == 100:
self.status = 'Completed'
if self.status == 'Completed':
self.progress = 100
def validate_dependencies_for_template_task(self):
if self.is_template:
self.validate_parent_template_task()
self.validate_depends_on_tasks()
def validate_parent_template_task(self):
if self.parent_task:
if not frappe.db.get_value("Task", self.parent_task, "is_template"):
parent_task_format = """<a href="#Form/Task/{0}">{0}</a>""".format(self.parent_task)
frappe.throw(_("Parent Task {0} is not a Template Task").format(parent_task_format))
def validate_depends_on_tasks(self):
if self.depends_on:
for task in self.depends_on:
if not frappe.db.get_value("Task", task.task, "is_template"):
dependent_task_format = """<a href="#Form/Task/{0}">{0}</a>""".format(task.task)
frappe.throw(_("Dependent Task {0} is not a Template Task").format(dependent_task_format))
def update_depends_on(self):
depends_on_tasks = self.depends_on_tasks or ""
for d in self.depends_on:
if d.task and d.task not in depends_on_tasks:
depends_on_tasks += d.task + ","
self.depends_on_tasks = depends_on_tasks
def update_nsm_model(self):
frappe.utils.nestedset.update_nsm(self)
def on_update(self):
self.update_nsm_model()
self.check_recursion()
self.reschedule_dependent_tasks()
self.update_project()
self.unassign_todo()
self.populate_depends_on()
def unassign_todo(self):
if self.status == "Completed":
close_all_assignments(self.doctype, self.name)
if self.status == "Cancelled":
clear(self.doctype, self.name)
def update_total_expense_claim(self):
self.total_expense_claim = frappe.db.sql("""select sum(total_sanctioned_amount) from `tabExpense Claim`
where project = %s and task = %s and docstatus=1""",(self.project, self.name))[0][0]
def update_time_and_costing(self):
tl = frappe.db.sql("""select min(from_time) as start_date, max(to_time) as end_date,
sum(billing_amount) as total_billing_amount, sum(costing_amount) as total_costing_amount,
sum(hours) as time from `tabTimesheet Detail` where task = %s and docstatus=1"""
,self.name, as_dict=1)[0]
if self.status == "Open":
self.status = "Working"
self.total_costing_amount= tl.total_costing_amount
self.total_billing_amount= tl.total_billing_amount
self.actual_time= tl.time
self.act_start_date= tl.start_date
self.act_end_date= tl.end_date
def update_project(self):
if self.project and not self.flags.from_project:
frappe.get_cached_doc("Project", self.project).update_project()
def check_recursion(self):
if self.flags.ignore_recursion_check: return
check_list = [['task', 'parent'], ['parent', 'task']]
for d in check_list:
task_list, count = [self.name], 0
while (len(task_list) > count ):
tasks = frappe.db.sql(" select %s from `tabTask Depends On` where %s = %s " %
(d[0], d[1], '%s'), cstr(task_list[count]))
count = count + 1
for b in tasks:
if b[0] == self.name:
frappe.throw(_("Circular Reference Error"), CircularReferenceError)
if b[0]:
task_list.append(b[0])
if count == 15:
break
def reschedule_dependent_tasks(self):
end_date = self.exp_end_date or self.act_end_date
if end_date:
for task_name in frappe.db.sql("""
select name from `tabTask` as parent
where parent.project = %(project)s
and parent.name in (
select parent from `tabTask Depends On` as child
where child.task = %(task)s and child.project = %(project)s)
""", {'project': self.project, 'task':self.name }, as_dict=1):
task = frappe.get_doc("Task", task_name.name)
if task.exp_start_date and task.exp_end_date and task.exp_start_date < getdate(end_date) and task.status == "Open":
task_duration = date_diff(task.exp_end_date, task.exp_start_date)
task.exp_start_date = add_days(end_date, 1)
task.exp_end_date = add_days(task.exp_start_date, task_duration)
task.flags.ignore_recursion_check = True
task.save()
def has_webform_permission(self):
project_user = frappe.db.get_value("Project User", {"parent": self.project, "user":frappe.session.user} , "user")
if project_user:
return True
def populate_depends_on(self):
if self.parent_task:
parent = frappe.get_doc('Task', self.parent_task)
if self.name not in [row.task for row in parent.depends_on]:
parent.append("depends_on", {
"doctype": "Task Depends On",
"task": self.name,
"subject": self.subject
})
parent.save()
def on_trash(self):
if check_if_child_exists(self.name):
throw(_("Child Task exists for this Task. You can not delete this Task."))
self.update_nsm_model()
def after_delete(self):
self.update_project()
def update_status(self):
if self.status not in ('Cancelled', 'Completed') and self.exp_end_date:
from datetime import datetime
if self.exp_end_date < datetime.now().date():
self.db_set('status', 'Overdue', update_modified=False)
self.update_project()
@frappe.whitelist()
def check_if_child_exists(name):
child_tasks = frappe.get_all("Task", filters={"parent_task": name})
child_tasks = [get_link_to_form("Task", task.name) for task in child_tasks]
return child_tasks
@frappe.whitelist()
@frappe.validate_and_sanitize_search_inputs
def get_project(doctype, txt, searchfield, start, page_len, filters):
from erpnext.controllers.queries import get_match_cond
meta = frappe.get_meta(doctype)
searchfields = meta.get_search_fields()
search_columns = ", " + ", ".join(searchfields) if searchfields else ''
search_cond = " or " + " or ".join([field + " like %(txt)s" for field in searchfields])
return frappe.db.sql(""" select name {search_columns} from `tabProject`
where %(key)s like %(txt)s
%(mcond)s
{search_condition}
order by name
limit %(start)s, %(page_len)s""".format(search_columns = search_columns,
search_condition=search_cond), {
'key': searchfield,
'txt': '%' + txt + '%',
'mcond':get_match_cond(doctype),
'start': start,
'page_len': page_len
})
@frappe.whitelist()
def set_multiple_status(names, status):
names = json.loads(names)
for name in names:
task = frappe.get_doc("Task", name)
task.status = status
task.save()
def set_tasks_as_overdue():
tasks = frappe.get_all("Task", filters={"status": ["not in", ["Cancelled", "Completed"]]}, fields=["name", "status", "review_date"])
for task in tasks:
if task.status == "Pending Review":
if getdate(task.review_date) > getdate(today()):
continue
frappe.get_doc("Task", task.name).update_status()
@frappe.whitelist()
def make_timesheet(source_name, target_doc=None, ignore_permissions=False):
def set_missing_values(source, target):
target.append("time_logs", {
"hours": source.actual_time,
"completed": source.status == "Completed",
"project": source.project,
"task": source.name
})
doclist = get_mapped_doc("Task", source_name, {
"Task": {
"doctype": "Timesheet"
}
}, target_doc, postprocess=set_missing_values, ignore_permissions=ignore_permissions)
return doclist
@frappe.whitelist()
def get_children(doctype, parent, task=None, project=None, is_root=False):
filters = [['docstatus', '<', '2']]
if task:
filters.append(['parent_task', '=', task])
elif parent and not is_root:
# via expand child
filters.append(['parent_task', '=', parent])
else:
filters.append(['ifnull(`parent_task`, "")', '=', ''])
if project:
filters.append(['project', '=', project])
tasks = frappe.get_list(doctype, fields=[
'name as value',
'subject as title',
'is_group as expandable'
], filters=filters, order_by='name')
# return tasks
return tasks
@frappe.whitelist()
def add_node():
from frappe.desk.treeview import make_tree_args
args = frappe.form_dict
args.update({
"name_field": "subject"
})
args = make_tree_args(**args)
if args.parent_task == 'All Tasks' or args.parent_task == args.project:
args.parent_task = None
frappe.get_doc(args).insert()
@frappe.whitelist()
def add_multiple_tasks(data, parent):
data = json.loads(data)
new_doc = {'doctype': 'Task', 'parent_task': parent if parent!="All Tasks" else ""}
new_doc['project'] = frappe.db.get_value('Task', {"name": parent}, 'project') or ""
for d in data:
if not d.get("subject"): continue
new_doc['subject'] = d.get("subject")
new_task = frappe.get_doc(new_doc)
new_task.insert()
def on_doctype_update():
frappe.db.add_index("Task", ["lft", "rgt"])
def validate_project_dates(project_end_date, task, task_start, task_end, actual_or_expected_date):
if task.get(task_start) and date_diff(project_end_date, getdate(task.get(task_start))) < 0:
frappe.throw(_("Task's {0} Start Date cannot be after Project's End Date.").format(actual_or_expected_date))
if task.get(task_end) and date_diff(project_end_date, getdate(task.get(task_end))) < 0:
frappe.throw(_("Task's {0} End Date cannot be after Project's End Date.").format(actual_or_expected_date))
|
ESS-LLP/erpnext
|
erpnext/projects/doctype/task/task.py
|
Python
|
gpl-3.0
| 12,175 | 0.026201 |
#!/usr/bin/env python
# encoding: utf-8
"""
Show how to use `dur` and `delay` parameters of play() and out()
methods to sequence events over time.
"""
from pyo import *
import random
s = Server(duplex=0).boot()
num = 70
freqs = [random.uniform(100, 1000) for i in range(num)]
start1 = [i * 0.5 for i in range(num)]
fade1 = Fader([1] * num, 1, 5, mul=0.03).play(dur=5, delay=start1)
a = SineLoop(freqs, feedback=0.05, mul=fade1).out(dur=5, delay=start1)
start2 = 30
dur2 = 40
snds = [
"../snds/alum1.wav",
"../snds/alum2.wav",
"../snds/alum3.wav",
"../snds/alum4.wav",
]
tabs = SndTable(snds)
fade2 = Fader(0.05, 10, dur2, mul=0.7).play(dur=dur2, delay=start2)
b = Beat(time=0.125, w1=[90, 30, 30, 20], w2=[30, 90, 50, 40], w3=[0, 30, 30, 40], poly=1).play(dur=dur2, delay=start2)
out = TrigEnv(b, tabs, b["dur"], mul=b["amp"] * fade2).out(dur=dur2, delay=start2)
start3 = 45
dur3 = 30
fade3 = Fader(15, 15, dur3, mul=0.02).play(dur=dur3, delay=start3)
fm = FM(carrier=[149, 100, 151, 50] * 3, ratio=[0.2499, 0.501, 0.75003], index=10, mul=fade3).out(
dur=dur3, delay=start3
)
s.gui(locals())
|
belangeo/pyo
|
pyo/examples/sequencing/01_starttime_duration.py
|
Python
|
lgpl-3.0
| 1,118 | 0.002683 |
# -*- coding: utf-8 -*-
from Headset import Headset
import logging
import time
puerto = 'COM3'
headset = Headset(logging.INFO)
try:
headset.connect(puerto, 115200)
except Exception, e:
raise e
print "Is conected? " + str(headset.isConnected())
print "-----------------------------------------"
headset.startReading(persist_data=True)
time.sleep(5)
headset.stopReading()
headset.closePort()
print "-----------------------------------------"
print "Is conected? " + str(headset.isConnected())
print headset.getStatus()
|
emotrix/Emotrix
|
emotrix/HeadsetTester.py
|
Python
|
bsd-2-clause
| 529 | 0 |
import sublime
import sublime_plugin
import re
import os
import datetime
TMLP_DIR = 'templates'
KEY_SYNTAX = 'syntax'
KEY_FILE_EXT = 'extension'
IS_GTE_ST3 = int(sublime.version()) >= 3000
PACKAGE_NAME = 'new-file-pro'
PACKAGES_PATH = sublime.packages_path()
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
class NewFileBase(sublime_plugin.WindowCommand):
def __init__(self, window):
super(NewFileBase, self).__init__(window)
def appendFileExtension(self, name, t):
tmp = name.split('.')
length = len(tmp)
s_ext = tmp[length - 1]
exts = {'css': 'css', 'html': 'html', 'js': 'js', 'json': 'json', 'php': 'php', 'php-class': 'php', 'php-interface': 'php', 'xml':'xml', 'python': 'python', 'ruby': 'ruby'}
try:
t_ext = exts[t]
if (s_ext == t_ext and length == 1) or s_ext != t_ext:
return name + '.' + t_ext
except KeyError:
pass
return name;
def appendPHPExtension(self, name):
t = name.split('.')
length = len(t)
ext = t[length - 1]
if ext != "php":
return name + '.php'
return name;
def get_code(self, type='text' ):
code = ''
file_name = "%s.tmpl" % type
isIOError = False
if IS_GTE_ST3:
tmpl_dir = 'Packages/' + PACKAGE_NAME + '/' + TMLP_DIR + '/'
user_tmpl_dir = 'Packages/User/' + PACKAGE_NAME + '/' + TMLP_DIR + '/'
else:
tmpl_dir = os.path.join(PACKAGES_PATH, PACKAGE_NAME, TMLP_DIR)
user_tmpl_dir = os.path.join(PACKAGES_PATH, 'User', PACKAGE_NAME, TMLP_DIR)
self.user_tmpl_path = os.path.join(user_tmpl_dir, file_name)
self.tmpl_path = os.path.join(tmpl_dir, file_name)
if IS_GTE_ST3:
try:
code = sublime.load_resource(self.user_tmpl_path)
except IOError:
try:
code = sublime.load_resource(self.tmpl_path)
except IOError:
isIOError = True
else:
if os.path.isfile(self.user_tmpl_path):
code = self.open_file(self.user_tmpl_path)
elif os.path.isfile(self.tmpl_path):
code = self.open_file(self.tmpl_path)
else:
isIOError = True
if isIOError:
sublime.message_dialog('[Warning] No such file: ' + self.tmpl_path + ' or ' + self.user_tmpl_path)
return self.format_tag(code)
def format_tag(self, code):
win = sublime.active_window()
code = code.replace('\r', '') # replace \r\n -> \n
# format
settings = self.get_settings()
format = settings.get('date_format', '%Y-%m-%d')
date = datetime.datetime.now().strftime(format)
if not IS_GTE_ST3:
code = code.decode('utf8') # for st2 && Chinese characters
code = code.replace('${date}', date)
attr = settings.get('attr', {})
for key in attr:
code = code.replace('${%s}' % key, attr.get(key, ''))
if settings.get('enable_project_variables', False) and hasattr(win, 'extract_variables'):
variables = win.extract_variables()
for key in ['project_base_name', 'project_path', 'platform']:
code = code.replace('${%s}' % key, variables.get(key, ''))
code = re.sub(r"(?<!\\)\${(?!\d)", '\${', code)
return code
def open_file(self, path, mode='r'):
fp = open(path, mode)
code = fp.read()
fp.close()
return code
def get_settings(self, type=None):
settings = sublime.load_settings(PACKAGE_NAME + '.sublime-settings')
if not type:
return settings
opts = settings.get(type, [])
return opts
|
KevinHoo/new-file-pro
|
commands/NewFileBase.py
|
Python
|
gpl-3.0
| 3,250 | 0.029231 |
#!/usr/bin/env python
"""A QR Move SCU application.
For sending Query/Retrieve (QR) C-MOVE requests to a QR Move SCP.
"""
import argparse
import sys
from pynetdicom import (
AE,
evt,
QueryRetrievePresentationContexts,
AllStoragePresentationContexts,
)
from pynetdicom.apps.common import setup_logging, create_dataset, handle_store
from pynetdicom._globals import ALL_TRANSFER_SYNTAXES, DEFAULT_MAX_LENGTH
from pynetdicom.pdu_primitives import SOPClassExtendedNegotiation
from pynetdicom.sop_class import (
PatientRootQueryRetrieveInformationModelMove,
StudyRootQueryRetrieveInformationModelMove,
PatientStudyOnlyQueryRetrieveInformationModelMove,
)
__version__ = "0.4.0"
def _setup_argparser():
"""Setup the command line arguments"""
# Description
parser = argparse.ArgumentParser(
description=(
"The movescu application implements a Service Class User (SCU) "
"for the Query/Retrieve (QR) Service Class and (optionally) a "
"Storage SCP for the Storage Service Class. movescu supports "
"retrieve functionality using the C-MOVE message. It sends query "
"keys to an SCP and waits for a response. It will accept "
"associations for the purpose of receiving images sent as a "
"result of the C-MOVE request. movescu can initiate the transfer "
"of images to a third party or can retrieve images to itself "
"(note: the use of the term 'move' is a misnomer, the C-MOVE "
"operation performs a SOP Instance copy only)"
),
usage="movescu [options] addr port",
)
# Parameters
req_opts = parser.add_argument_group("Parameters")
req_opts.add_argument(
"addr", help="TCP/IP address or hostname of DICOM peer", type=str
)
req_opts.add_argument("port", help="TCP/IP port number of peer", type=int)
# General Options
gen_opts = parser.add_argument_group("General Options")
gen_opts.add_argument(
"--version", help="print version information and exit", action="store_true"
)
output = gen_opts.add_mutually_exclusive_group()
output.add_argument(
"-q",
"--quiet",
help="quiet mode, print no warnings and errors",
action="store_const",
dest="log_type",
const="q",
)
output.add_argument(
"-v",
"--verbose",
help="verbose mode, print processing details",
action="store_const",
dest="log_type",
const="v",
)
output.add_argument(
"-d",
"--debug",
help="debug mode, print debug information",
action="store_const",
dest="log_type",
const="d",
)
gen_opts.add_argument(
"-ll",
"--log-level",
metavar="[l]",
help=("use level l for the logger (critical, error, warn, info, debug)"),
type=str,
choices=["critical", "error", "warn", "info", "debug"],
)
parser.set_defaults(log_type="v")
# Network Options
net_opts = parser.add_argument_group("Network Options")
net_opts.add_argument(
"-aet",
"--calling-aet",
metavar="[a]etitle",
help="set my calling AE title (default: MOVESCU)",
type=str,
default="MOVESCU",
)
net_opts.add_argument(
"-aec",
"--called-aet",
metavar="[a]etitle",
help="set called AE title of peer (default: ANY-SCP)",
type=str,
default="ANY-SCP",
)
net_opts.add_argument(
"-aem",
"--move-aet",
metavar="[a]etitle",
help="set move destination AE title (default: STORESCP)",
type=str,
default="STORESCP",
)
net_opts.add_argument(
"-ta",
"--acse-timeout",
metavar="[s]econds",
help="timeout for ACSE messages (default: 30 s)",
type=float,
default=30,
)
net_opts.add_argument(
"-td",
"--dimse-timeout",
metavar="[s]econds",
help="timeout for DIMSE messages (default: 30 s)",
type=float,
default=30,
)
net_opts.add_argument(
"-tn",
"--network-timeout",
metavar="[s]econds",
help="timeout for the network (default: 30 s)",
type=float,
default=30,
)
net_opts.add_argument(
"-pdu",
"--max-pdu",
metavar="[n]umber of bytes",
help=(
f"set max receive pdu to n bytes (0 for unlimited, "
f"default: {DEFAULT_MAX_LENGTH})"
),
type=int,
default=DEFAULT_MAX_LENGTH,
)
# Query information model choices
qr_group = parser.add_argument_group("Query Information Model Options")
qr_model = qr_group.add_mutually_exclusive_group()
qr_model.add_argument(
"-P",
"--patient",
help="use patient root information model (default)",
action="store_true",
)
qr_model.add_argument(
"-S", "--study", help="use study root information model", action="store_true"
)
qr_model.add_argument(
"-O",
"--psonly",
help="use patient/study only information model",
action="store_true",
)
# Query Options
qr_query = parser.add_argument_group("Query Options")
qr_query.add_argument(
"-k",
"--keyword",
metavar="[k]eyword: (gggg,eeee)=str, keyword=str",
help=(
"add or override a query element using either an element tag as "
"(group,element) or the element's keyword (such as PatientName)"
),
type=str,
action="append",
)
qr_query.add_argument(
"-f",
"--file",
metavar="path to [f]ile",
help=(
"use a DICOM file as the query dataset, if "
"used with -k then the elements will be added to or overwrite "
"those present in the file"
),
type=str,
)
# Store SCP options
store_group = parser.add_argument_group("Storage SCP Options")
store_group.add_argument(
"--store",
help="start a Storage SCP that can be used as the move destination",
action="store_true",
default=False,
)
store_group.add_argument(
"--store-port",
metavar="[p]ort",
help="the port number to use for the Storage SCP",
type=int,
default=11113,
)
store_group.add_argument(
"--store-aet",
metavar="[a]etitle",
help="the AE title to use for the Storage SCP",
type=str,
default="STORESCP",
)
# Extended Negotiation Options
ext_neg = parser.add_argument_group("Extended Negotiation Options")
ext_neg.add_argument(
"--relational-retrieval",
help="request the use of relational retrieval",
action="store_true",
)
ext_neg.add_argument(
"--enhanced-conversion",
help="request the use of enhanced multi-frame image conversion",
action="store_true",
)
# Output Options
out_opts = parser.add_argument_group("Output Options")
out_opts.add_argument(
"-od",
"--output-directory",
metavar="[d]irectory",
help="write received objects to directory d",
type=str,
)
out_opts.add_argument(
"--ignore", help="receive data but don't store it", action="store_true"
)
ns = parser.parse_args()
if ns.version:
pass
elif not bool(ns.file) and not bool(ns.keyword):
parser.error("-f and/or -k must be specified")
return ns
def main(args=None):
"""Run the application."""
if args is not None:
sys.argv = args
args = _setup_argparser()
if args.version:
print(f"movescu.py v{__version__}")
sys.exit()
APP_LOGGER = setup_logging(args, "movescu")
APP_LOGGER.debug(f"movescu.py v{__version__}")
APP_LOGGER.debug("")
# Create query (identifier) dataset
try:
# If you're looking at this to see how QR Move works then `identifer`
# is a pydicom Dataset instance with your query keys, e.g.:
# identifier = Dataset()
# identifier.QueryRetrieveLevel = 'PATIENT'
# identifier.PatientName = '*'
identifier = create_dataset(args, APP_LOGGER)
except Exception as exc:
APP_LOGGER.exception(exc)
sys.exit(1)
# Create application entity
ae = AE()
# Start the Store SCP (optional)
scp = None
if args.store:
transfer_syntax = ALL_TRANSFER_SYNTAXES[:]
store_handlers = [(evt.EVT_C_STORE, handle_store, [args, APP_LOGGER])]
ae.ae_title = args.store_aet
for cx in AllStoragePresentationContexts:
ae.add_supported_context(cx.abstract_syntax, transfer_syntax)
scp = ae.start_server(
("localhost", args.store_port), block=False, evt_handlers=store_handlers
)
ae.ae_title = args.calling_aet
ae.acse_timeout = args.acse_timeout
ae.dimse_timeout = args.dimse_timeout
ae.network_timeout = args.network_timeout
ae.requested_contexts = QueryRetrievePresentationContexts
ae.supported_contexts = []
# Query/Retrieve Information Models
if args.study:
query_model = StudyRootQueryRetrieveInformationModelMove
elif args.psonly:
query_model = PatientStudyOnlyQueryRetrieveInformationModelMove
else:
query_model = PatientRootQueryRetrieveInformationModelMove
# Extended Negotiation
ext_neg = []
ext_opts = [args.relational_retrieval, args.enhanced_conversion]
if any(ext_opts):
app_info = b""
for option in ext_opts:
app_info += b"\x01" if option else b"\x00"
item = SOPClassExtendedNegotiation()
item.sop_class_uid = query_model
item.service_class_application_information = app_info
ext_neg = [item]
# Request association with remote AE
assoc = ae.associate(
args.addr,
args.port,
ae_title=args.called_aet,
max_pdu=args.max_pdu,
ext_neg=ext_neg,
)
if assoc.is_established:
# Send query
move_aet = args.move_aet or args.calling_aet
responses = assoc.send_c_move(identifier, move_aet, query_model)
for (status, rsp_identifier) in responses:
# If `status.Status` is one of the 'Pending' statuses then
# `rsp_identifier` is the C-MOVE response's Identifier dataset
if status and status.Status in [0xFF00, 0xFF01]:
# `rsp_identifier` is a pydicom Dataset containing a query
# response. You may want to do something interesting here...
pass
assoc.release()
_EXIT_VALUE = 0
else:
_EXIT_VALUE = 1
# Shutdown the Storage SCP (if used)
if scp:
scp.shutdown()
sys.exit(_EXIT_VALUE)
if __name__ == "__main__":
main()
|
scaramallion/pynetdicom
|
pynetdicom/apps/movescu/movescu.py
|
Python
|
mit
| 11,001 | 0.000364 |
from sys import exit
from random import randint
def death():
quips = ["You died. You kinda suck at this.",
"Your mom would be proud. If she were smarter.",
"Such a luser.",
"I have a small puppy that's better at this."]
print quips[randint(0,len(quips)-1)]
exit(1)
def princess_lives_here():
print "You see a beatiful Princess with a shiny crown."
print "She offers you some cake."
eat_it = raw_input("> ")
if eat_it == "eat it":
print "You explode like a pinata full of frogs."
print "The Princess cackles and eats the frogs. Yum!"
return 'death'
elif eat_it == "do not eat it":
print "She throws the cake at you and it curs off your head."
print "The last thing you see is her munching on your torso. Yum!"
return 'death'
elif eat_it == "make her eat it":
print "The Princess screams as you cram the cake in her mouth."
print "Then she smiles and cries and thanks you for saving her."
print "She points to a tiny door and says, 'The Koi needs cake too.'"
print "She gives you the very last bit of cake and ahoves you in."
return 'gold_koi_pond'
else:
print "The princess looks at you confused and just points at the cake."
return 'princess_lives_here'
def gold_koi_pond():
print "There is a garden with a koi pond in the center."
print "You walk close and see a massive fin pole out."
print "You peek in and a creepy looking huge Koi stares at you."
print "It opens its mouth waiting for food."
feed_it = raw_input("> ")
if feed_it == "feed it":
print "The Koi jumps up and rather than eating the cake, eats your arm."
print "You fall in and the Koi shruge than eats you."
print "You are then pooped out sometime later."
return 'death'
elif feed_it == "do not feed it":
print "The Koi grimaces, then thrashes around for a second."
print "It rushes to the other end of the pond, braces against the wall..."
print "then it *lunges* out of the water, up in the air and over your"
print "entire body, cake and all."
print "You are then poped out a week later."
return 'death'
elif feed_it == "throw it in":
print "The Koi wiggles, then leaps into the air to eat the cake."
print "You can see it's happy, it then grunts, thrashes..."
print "and finally rolls over and poops a magic diamond into the air"
print "at your feet."
return 'bear_with_sword'
else:
print "The Koi gets annoyed and wiggles a bit."
return 'gold_koi_pond'
def bear_with_sword():
print "Puzzled, you are about to pick up the fish poop diamond when"
print "a bear bearing a load bearing sword walks in."
print '"Hey! That\' my diamond! Where\'d you get that!?"'
print "It holds its paw out and looks at you."
give_it = raw_input("> ")
if give_it == "give it":
print "The bear swipes at your hand to grab the diamond and"
print "rips your hand off in the process. It then looks at"
print 'your bloody stump and says, "Oh crap, sorry about that."'
print "It tries to put your hand back on, but you collapse."
print "The last thing you see is the bear shrug and eat you."
return 'death'
elif give_it == "say_no":
print "The bear looks shocked. Nobody ever told a bear"
print "with a broadsword 'no'. It asks, "
print '"Is it because it\'s not a katana? I could go get one!"'
print "It then runs off and now you notice a big iron gate."
print '"Where the hell did that come from?" You say.'
return 'big_iron_gate'
else:
print "The bear look puzzled as to why you'd do that."
return "bear_with_sword"
def big_iron_gate():
print "You walk up to the big iron gate and see there's a handle."
open_it = raw_input("> ")
if open_it == 'open it':
print "You open it and you are free!"
print "There are mountains. And berries! And..."
print "Oh, but then the bear comes with his katana and stabs you."
print '"Who\'s laughing now!? Love this katana."'
return 'death'
else:
print "That doesn't seem sensible. I mean, the door's right there."
return 'big_iron_gate'
ROOMS = {
'death':death,
'princess_lives_here':princess_lives_here,
'gold_koi_pond':gold_koi_pond,
'big_iron_gate':big_iron_gate,
'bear_with_sword':bear_with_sword
}
def runner(map,start):
next = start
while True:
room = map[next]
print "\n------------"
next = room()
runner(ROOMS,'princess_lives_here')
|
veratulips/OMOOC2py
|
_src/exCodesHardWay/ex41.py
|
Python
|
mit
| 4,337 | 0.027669 |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from distutils import util
import os
import re
from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
from google.api_core import operation as gac_operation # type: ignore
from google.api_core import operation_async # type: ignore
from google.cloud.aiplatform_v1beta1.services.specialist_pool_service import pagers
from google.cloud.aiplatform_v1beta1.types import operation as gca_operation
from google.cloud.aiplatform_v1beta1.types import specialist_pool
from google.cloud.aiplatform_v1beta1.types import specialist_pool as gca_specialist_pool
from google.cloud.aiplatform_v1beta1.types import specialist_pool_service
from google.protobuf import empty_pb2 # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from .transports.base import SpecialistPoolServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import SpecialistPoolServiceGrpcTransport
from .transports.grpc_asyncio import SpecialistPoolServiceGrpcAsyncIOTransport
class SpecialistPoolServiceClientMeta(type):
"""Metaclass for the SpecialistPoolService client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[SpecialistPoolServiceTransport]]
_transport_registry["grpc"] = SpecialistPoolServiceGrpcTransport
_transport_registry["grpc_asyncio"] = SpecialistPoolServiceGrpcAsyncIOTransport
def get_transport_class(
cls, label: str = None,
) -> Type[SpecialistPoolServiceTransport]:
"""Returns an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class SpecialistPoolServiceClient(metaclass=SpecialistPoolServiceClientMeta):
"""A service for creating and managing Customer SpecialistPools.
When customers start Data Labeling jobs, they can reuse/create
Specialist Pools to bring their own Specialists to label the
data. Customers can add/remove Managers for the Specialist Pool
on Cloud console, then Managers will get email notifications to
manage Specialists and tasks on CrowdCompute console.
"""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Converts api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "aiplatform.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
SpecialistPoolServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(info)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
SpecialistPoolServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> SpecialistPoolServiceTransport:
"""Returns the transport used by the client instance.
Returns:
SpecialistPoolServiceTransport: The transport used by the client
instance.
"""
return self._transport
@staticmethod
def specialist_pool_path(project: str, location: str, specialist_pool: str,) -> str:
"""Returns a fully-qualified specialist_pool string."""
return "projects/{project}/locations/{location}/specialistPools/{specialist_pool}".format(
project=project, location=location, specialist_pool=specialist_pool,
)
@staticmethod
def parse_specialist_pool_path(path: str) -> Dict[str, str]:
"""Parses a specialist_pool path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/specialistPools/(?P<specialist_pool>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
"""Returns a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str,) -> str:
"""Returns a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder,)
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str,) -> str:
"""Returns a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization,)
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str,) -> str:
"""Returns a fully-qualified project string."""
return "projects/{project}".format(project=project,)
@staticmethod
def parse_common_project_path(path: str) -> Dict[str, str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str,) -> str:
"""Returns a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@staticmethod
def parse_common_location_path(path: str) -> Dict[str, str]:
"""Parse a location path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
return m.groupdict() if m else {}
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, SpecialistPoolServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the specialist pool service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, SpecialistPoolServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
# Create SSL credentials for mutual TLS if needed.
use_client_cert = bool(
util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
)
client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
is_mtls = True
client_cert_source_func = client_options.client_cert_source
else:
is_mtls = mtls.has_default_client_cert_source()
if is_mtls:
client_cert_source_func = mtls.default_client_cert_source()
else:
client_cert_source_func = None
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
if is_mtls:
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
else:
api_endpoint = self.DEFAULT_ENDPOINT
else:
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
"values: never, auto, always"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, SpecialistPoolServiceTransport):
# transport is a SpecialistPoolServiceTransport instance.
if credentials or client_options.credentials_file:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
if client_options.scopes:
raise ValueError(
"When providing a transport instance, provide its scopes "
"directly."
)
self._transport = transport
else:
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
always_use_jwt_access=(
Transport == type(self).get_transport_class("grpc")
or Transport == type(self).get_transport_class("grpc_asyncio")
),
)
def create_specialist_pool(
self,
request: specialist_pool_service.CreateSpecialistPoolRequest = None,
*,
parent: str = None,
specialist_pool: gca_specialist_pool.SpecialistPool = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> gac_operation.Operation:
r"""Creates a SpecialistPool.
Args:
request (google.cloud.aiplatform_v1beta1.types.CreateSpecialistPoolRequest):
The request object. Request message for
[SpecialistPoolService.CreateSpecialistPool][google.cloud.aiplatform.v1beta1.SpecialistPoolService.CreateSpecialistPool].
parent (str):
Required. The parent Project name for the new
SpecialistPool. The form is
``projects/{project}/locations/{location}``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
specialist_pool (google.cloud.aiplatform_v1beta1.types.SpecialistPool):
Required. The SpecialistPool to
create.
This corresponds to the ``specialist_pool`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.SpecialistPool` SpecialistPool represents customers' own workforce to work on their data
labeling jobs. It includes a group of specialist
managers who are responsible for managing the
labelers in this pool as well as customers' data
labeling jobs associated with this pool. Customers
create specialist pool as well as start data labeling
jobs on Cloud, managers and labelers work with the
jobs using CrowdCompute console.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, specialist_pool])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a specialist_pool_service.CreateSpecialistPoolRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, specialist_pool_service.CreateSpecialistPoolRequest):
request = specialist_pool_service.CreateSpecialistPoolRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if specialist_pool is not None:
request.specialist_pool = specialist_pool
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.create_specialist_pool]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = gac_operation.from_gapic(
response,
self._transport.operations_client,
gca_specialist_pool.SpecialistPool,
metadata_type=specialist_pool_service.CreateSpecialistPoolOperationMetadata,
)
# Done; return the response.
return response
def get_specialist_pool(
self,
request: specialist_pool_service.GetSpecialistPoolRequest = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> specialist_pool.SpecialistPool:
r"""Gets a SpecialistPool.
Args:
request (google.cloud.aiplatform_v1beta1.types.GetSpecialistPoolRequest):
The request object. Request message for
[SpecialistPoolService.GetSpecialistPool][google.cloud.aiplatform.v1beta1.SpecialistPoolService.GetSpecialistPool].
name (str):
Required. The name of the SpecialistPool resource. The
form is
``projects/{project}/locations/{location}/specialistPools/{specialist_pool}``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.aiplatform_v1beta1.types.SpecialistPool:
SpecialistPool represents customers'
own workforce to work on their data
labeling jobs. It includes a group of
specialist managers who are responsible
for managing the labelers in this pool
as well as customers' data labeling jobs
associated with this pool.
Customers create specialist pool as well
as start data labeling jobs on Cloud,
managers and labelers work with the jobs
using CrowdCompute console.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a specialist_pool_service.GetSpecialistPoolRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, specialist_pool_service.GetSpecialistPoolRequest):
request = specialist_pool_service.GetSpecialistPoolRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.get_specialist_pool]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
def list_specialist_pools(
self,
request: specialist_pool_service.ListSpecialistPoolsRequest = None,
*,
parent: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListSpecialistPoolsPager:
r"""Lists SpecialistPools in a Location.
Args:
request (google.cloud.aiplatform_v1beta1.types.ListSpecialistPoolsRequest):
The request object. Request message for
[SpecialistPoolService.ListSpecialistPools][google.cloud.aiplatform.v1beta1.SpecialistPoolService.ListSpecialistPools].
parent (str):
Required. The name of the SpecialistPool's parent
resource. Format:
``projects/{project}/locations/{location}``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.aiplatform_v1beta1.services.specialist_pool_service.pagers.ListSpecialistPoolsPager:
Response message for
[SpecialistPoolService.ListSpecialistPools][google.cloud.aiplatform.v1beta1.SpecialistPoolService.ListSpecialistPools].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a specialist_pool_service.ListSpecialistPoolsRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, specialist_pool_service.ListSpecialistPoolsRequest):
request = specialist_pool_service.ListSpecialistPoolsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.list_specialist_pools]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__iter__` convenience method.
response = pagers.ListSpecialistPoolsPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
def delete_specialist_pool(
self,
request: specialist_pool_service.DeleteSpecialistPoolRequest = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> gac_operation.Operation:
r"""Deletes a SpecialistPool as well as all Specialists
in the pool.
Args:
request (google.cloud.aiplatform_v1beta1.types.DeleteSpecialistPoolRequest):
The request object. Request message for
[SpecialistPoolService.DeleteSpecialistPool][google.cloud.aiplatform.v1beta1.SpecialistPoolService.DeleteSpecialistPool].
name (str):
Required. The resource name of the SpecialistPool to
delete. Format:
``projects/{project}/locations/{location}/specialistPools/{specialist_pool}``
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
The JSON representation for Empty is empty JSON
object {}.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a specialist_pool_service.DeleteSpecialistPoolRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, specialist_pool_service.DeleteSpecialistPoolRequest):
request = specialist_pool_service.DeleteSpecialistPoolRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.delete_specialist_pool]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = gac_operation.from_gapic(
response,
self._transport.operations_client,
empty_pb2.Empty,
metadata_type=gca_operation.DeleteOperationMetadata,
)
# Done; return the response.
return response
def update_specialist_pool(
self,
request: specialist_pool_service.UpdateSpecialistPoolRequest = None,
*,
specialist_pool: gca_specialist_pool.SpecialistPool = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> gac_operation.Operation:
r"""Updates a SpecialistPool.
Args:
request (google.cloud.aiplatform_v1beta1.types.UpdateSpecialistPoolRequest):
The request object. Request message for
[SpecialistPoolService.UpdateSpecialistPool][google.cloud.aiplatform.v1beta1.SpecialistPoolService.UpdateSpecialistPool].
specialist_pool (google.cloud.aiplatform_v1beta1.types.SpecialistPool):
Required. The SpecialistPool which
replaces the resource on the server.
This corresponds to the ``specialist_pool`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Required. The update mask applies to
the resource.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.SpecialistPool` SpecialistPool represents customers' own workforce to work on their data
labeling jobs. It includes a group of specialist
managers who are responsible for managing the
labelers in this pool as well as customers' data
labeling jobs associated with this pool. Customers
create specialist pool as well as start data labeling
jobs on Cloud, managers and labelers work with the
jobs using CrowdCompute console.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([specialist_pool, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a specialist_pool_service.UpdateSpecialistPoolRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, specialist_pool_service.UpdateSpecialistPoolRequest):
request = specialist_pool_service.UpdateSpecialistPoolRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if specialist_pool is not None:
request.specialist_pool = specialist_pool
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.update_specialist_pool]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("specialist_pool.name", request.specialist_pool.name),)
),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = gac_operation.from_gapic(
response,
self._transport.operations_client,
gca_specialist_pool.SpecialistPool,
metadata_type=specialist_pool_service.UpdateSpecialistPoolOperationMetadata,
)
# Done; return the response.
return response
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-aiplatform",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("SpecialistPoolServiceClient",)
|
sasha-gitg/python-aiplatform
|
google/cloud/aiplatform_v1beta1/services/specialist_pool_service/client.py
|
Python
|
apache-2.0
| 37,342 | 0.001767 |
# -*- coding: utf-8 -*-
{
'name': 'Import OFX Bank Statement',
'category': 'Banking addons',
'version': '8.0.1.0.1',
'license': 'AGPL-3',
'author': 'OpenERP SA,'
'Odoo Community Association (OCA)',
'website': 'https://github.com/OCA/bank-statement-import',
'depends': [
'account_bank_statement_import'
],
'demo': [
'demo/demo_data.xml',
],
'external_dependencies': {
'python': ['ofxparse'],
},
'auto_install': False,
'installable': True,
}
|
acsone/bank-statement-import
|
account_bank_statement_import_ofx/__openerp__.py
|
Python
|
agpl-3.0
| 534 | 0 |
from __future__ import absolute_import, division, print_function
import pygtk
pygtk.require('2.0')
import gtk, gobject, cairo
import time
import sys
import imp
import os
from bcam.loader_dxf import DXFLoader
from bcam.loader_excellon import ExcellonLoader
from bcam.tool_operation import TOResult
from bcam.tool_op_drill import TODrill
from bcam.tool_op_exact_follow import TOExactFollow
from bcam.tool_op_offset_follow import TOOffsetFollow
from bcam.tool_op_pocketing import TOPocketing
from bcam.calc_utils import AABB, OverlapEnum
from bcam.path import Path
from bcam.project import project
from bcam.generalized_setting import TOSTypes
from logging import debug, info, warning, error, critical
from bcam.util import dbgfname
from bcam.singleton import Singleton
from bcam.state import State
class EVEnum(object):
load_click = "load_click"
save_click = "save_click"
load_file = "load_file"
save_file = "save_file"
load_project_click = "load_project_click"
save_project_click = "save_project_click"
save_project_as_click = "save_project_as_click"
load_project = "load_project"
save_project = "save_project"
new_project_click = "new_project_click"
quit_click = "quit_click"
screen_left_press = "screen_left_press"
screen_left_release = "screen_left_release"
pointer_motion = "pointer_motion"
drill_tool_click = "drill_tool_click"
deselect_all = "deselect_all"
shift_press = "shift_press"
shift_release = "shift_release"
ctrl_press = "ctrl_press"
ctrl_release = "ctrl_release"
update_paths_list = "update_paths_list"
update_tool_operations_list = "update_tool_operations_list"
path_list_selection_changed = "path_list_selection_changed"
tool_operations_list_selection_changed = "tool_operations_list_selection_changed"
exact_follow_tool_click = "exact_follow_tool_click"
offset_follow_tool_click = "offset_follow_tool_click"
pocket_tool_click = "pocket_tool_click"
update_settings = "update_settings"
tool_operation_up_click = "tool_operation_up_click"
tool_operation_down_click = "tool_operation_down_click"
scroll_up = "scroll_up"
scroll_down = "scroll_down"
hscroll = "hscroll"
vscroll = "vscroll"
tool_paths_check_button_click = "tool_paths_check_button_click"
paths_check_button_click = "paths_check_button_click"
path_delete_button_click = "path_delete_button_click"
tool_operation_delete_button_click = "tool_operation_delete_button_click"
update_progress = "update_progress"
undo_click = "undo_click"
redo_click = "redo_click"
main_start = "main_start"
pause = "pause"
class EventProcessor(object):
ee = EVEnum()
event_list = []
selected_elements = []
selected_path = None
selected_tool_operation = None
left_press_start = None
pointer_position = None
shift_pressed = False
ctrl_pressed = False
def __init__(self):
Singleton.ee = self.ee
Singleton.ep = self
self.started = False
self.events = {
self.ee.load_click: [self.load_click],
self.ee.save_click: [self.save_click],
self.ee.load_file: [self.load_file],
self.ee.save_file: [self.save_file],
self.ee.load_project_click: [self.load_project_click],
self.ee.save_project_click: [self.save_project_click],
self.ee.save_project_as_click: [self.save_project_as_click],
self.ee.load_project: [self.load_project],
self.ee.save_project: [self.save_project],
self.ee.new_project_click: [self.new_project_click],
self.ee.quit_click: [self.quit_click],
self.ee.screen_left_press: [self.screen_left_press],
self.ee.screen_left_release: [self.screen_left_release],
self.ee.pointer_motion: [self.pointer_motion],
self.ee.drill_tool_click: [self.drill_tool_click],
self.ee.deselect_all: [self.deselect_all],
self.ee.shift_press: [self.shift_press],
self.ee.shift_release: [self.shift_release],
self.ee.ctrl_press: [self.ctrl_press],
self.ee.ctrl_release: [self.ctrl_release],
self.ee.update_paths_list: [self.update_paths_list],
self.ee.path_list_selection_changed: [self.path_list_selection_changed],
self.ee.exact_follow_tool_click: [self.exact_follow_tool_click],
self.ee.offset_follow_tool_click: [self.offset_follow_tool_click],
self.ee.pocket_tool_click: [self.pocket_tool_click],
self.ee.update_tool_operations_list: [self.update_tool_operations_list],
self.ee.tool_operations_list_selection_changed: [self.tool_operations_list_selection_changed],
self.ee.update_settings: [self.update_settings],
self.ee.tool_operation_up_click: [self.tool_operation_up_click],
self.ee.tool_operation_down_click: [self.tool_operation_down_click],
self.ee.scroll_up: [self.scroll_up],
self.ee.scroll_down: [self.scroll_down],
self.ee.hscroll: [self.hscroll],
self.ee.vscroll: [self.vscroll],
self.ee.tool_paths_check_button_click: [self.tool_paths_check_button_click],
self.ee.paths_check_button_click: [self.paths_check_button_click],
self.ee.path_delete_button_click: [self.path_delete_button_click],
self.ee.tool_operation_delete_button_click: [self.tool_operation_delete_button_click],
self.ee.update_progress: [self.update_progress],
self.ee.undo_click: [self.undo_click],
self.ee.redo_click: [self.redo_click],
self.ee.main_start: [self.main_start],
self.ee.pause: [self.pause],
}
def reset(self):
self.selected_elements = []
self.selected_path = None
self.selected_tool_operation = None
self.left_press_start = None
def append_event_processor(self, event, proc):
self.events[event].append(proc)
def prepend_event_processor(self, event, proc):
self.events[event].insert(0, proc)
def set_event(self, event, proc_list):
self.events[event] = proc_list
def push_event(self, event, *args):
self.event_list.append((event, args))
def process(self):
if self.started == False:
self.push_event(self.ee.main_start, None)
self.started = True
event_list = self.event_list[:]
self.event_list = []
for e, args in event_list:
if e in self.events:
for p in self.events[e]:
r = p(args)
if (r == False):
break
else:
dbgfname()
warning(" Unknown event:"+str(e)+" args: "+str(args))
warning(" Please report")
def load_click(self, args):
mimes = [("Drawings (*.dxf)", "Application/dxf", "*.dxf"),
("Drill files (*.drl)", "Application/drl", "*.drl")]
result = self.mw.mk_file_dialog("Open ...", mimes)
if result!=None:
self.push_event(self.ee.load_file, result)
def save_click(self, args):
mimes = [("GCode (*.ngc)", "Application/ngc", "*.ngc")]
result = self.mw.mk_file_save_dialog("Save ...", mimes)
if result!=None:
self.push_event(self.ee.save_file, result)
def load_project_click(self, args):
mimes = [("BCam projects (*.bcam)", "Application/bcam", "*.bcam")]
result = self.mw.mk_file_dialog("Open project ...", mimes)
if result!=None:
self.push_event(self.ee.load_project, result)
def save_project_click(self, args):
dbgfname()
debug(" save project clicked")
if (project.get_path() != None):
self.save_project((project.get_path(), ))
else:
mimes = [("BCam project (*.bcam)", "Application/bcam", "*.bcam")]
result = self.mw.mk_file_save_dialog("Save project ...", mimes)
if result!=None:
self.save_project((result, ))
def save_project_as_click(self, args):
dbgfname()
debug(" save project as clicked")
mimes = [("BCam project (*.bcam)", "Application/bcam", "*.bcam")]
result = self.mw.mk_file_save_dialog("Save project as ...", mimes)
if result!=None:
self.save_project((result, ))
def new_project_click(self, args):
dbgfname()
debug(" new project clicked")
if not Singleton.state.is_clean():
debug(" not clean, ask to save")
if self.mw.mk_question_dialog("Current project has some unsaved data.\nWould you like to save it?"):
self.save_project_click(None)
self.reset()
Singleton.state = State()
self.push_event(self.ee.update_tool_operations_list, (None))
self.push_event(self.ee.update_paths_list, (None))
project.push_state(Singleton.state, "new_project_click")
self.mw.widget.update()
def quit_click(self, args):
dbgfname()
debug(" quit clicked")
if not Singleton.state.is_clean():
debug(" not clean, ask to save")
if self.mw.mk_question_dialog("Current project has some unsaved data.\nWould you like to save it?"):
self.save_project_click(None)
exit(0)
else:
exit(0)
def update_paths_list(self, args):
if Singleton.state.paths != None:
self.mw.clear_list(self.mw.gtklist)
for p in Singleton.state.paths:
if p.name[0] == '*':
continue
self.mw.add_item_to_list(self.mw.gtklist, p.name, self.ee.paths_check_button_click)
def update_tool_operations_list(self, args):
dbgfname()
debug(" args: "+str(args))
if Singleton.state.tool_operations != None:
self.mw.clear_list(self.mw.tp_gtklist)
for p in Singleton.state.tool_operations:
self.mw.add_item_to_list(self.mw.tp_gtklist, p.display_name, self.ee.tool_paths_check_button_click)
if (args!=(None,)):
if "selection" in args[0]:
idx = args[0]["selection"]
self.mw.set_item_selected(self.mw.tp_gtklist, idx)
def load_file(self, args):
dbgfname()
debug(" load file: "+str(args))
ext = os.path.splitext(args[0])[1][1:].strip()
if (ext == "dxf"):
dxfloader = DXFLoader()
Singleton.state.add_paths(dxfloader.load(args[0]))
else:
excloader = ExcellonLoader()
Singleton.state.add_paths(excloader.load(args[0]))
self.push_event(self.ee.update_paths_list, (None))
project.push_state(Singleton.state, "load_file")
self.mw.widget.update()
feedrate = Singleton.state.settings.tool.get_feedrate()
debug(" feedrate: "+str(feedrate))
def save_file(self, args):
dbgfname()
debug(" save file: "+str(args))
file_path = args[0]
if os.path.splitext(file_path)[1][1:].strip() != "ngc":
file_path+=".ngc"
out = ""
out+=Singleton.state.settings.default_pp.set_metric()
out+=Singleton.state.settings.default_pp.set_absolute()
feedrate = Singleton.state.settings.tool.get_feedrate()
debug(" feedrate: "+str(feedrate))
out+=Singleton.state.settings.default_pp.set_feedrate(feedrate)
out+= Singleton.state.settings.default_pp.move_to_rapid([0, 0, Singleton.state.settings.tool.default_height])
for p in Singleton.state.tool_operations:
out+=p.get_gcode()
out+= Singleton.state.settings.default_pp.move_to_rapid([0, 0, Singleton.state.settings.tool.default_height])
f = open(file_path, "w")
f.write(out)
f.close()
def load_project(self, args):
dbgfname()
debug(" load project: "+str(args))
project_path = args[0]
project.load(project_path)
self.mw.update_right_vbox()
def save_project(self, args):
dbgfname()
debug(" save project: "+str(args))
project_path = args[0]
project.save(project_path)
def screen_left_press(self, args):
dbgfname()
debug(" press at:"+str(args))
offset = Singleton.state.get_offset()
scale = Singleton.state.get_scale()
cx = (args[0][0]-offset[0])/scale[0]
cy = (args[0][1]-offset[1])/scale[1]
self.left_press_start = (cx, cy)
self.pointer_position = (cx, cy)
self.mw.widget.update()
def screen_left_release(self, args):
dbgfname()
debug(" release at: "+str(args))
offset = Singleton.state.get_offset()
scale = Singleton.state.get_scale()
cx = (args[0][0]-offset[0])/scale[0]
cy = (args[0][1]-offset[1])/scale[1]
self.pointer_position = (cx, cy)
if (self.left_press_start!=None):
if Singleton.state.paths == None:
self.left_press_start=None
return
# just a click
dx = abs(cx-self.left_press_start[0])
dy = abs(cy-self.left_press_start[1])
debug(" dx, dy: "+str(dx)+" "+str(dy))
if dx<1 and dy<1:
for p in Singleton.state.paths:
for e in p.elements:
if (e.distance_to_pt((cx, cy))<1):
if self.shift_pressed:
if not e in self.selected_elements:
e.set_selected()
self.selected_elements.append(e)
else:
if e in self.selected_elements:
self.selected_elements.remove(e)
e.unset_selected()
else:
self.deselect_all(None)
e.set_selected()
self.selected_elements.append(e)
# selection with a box
else:
ex = cx
ey = cy
sx = self.left_press_start[0]
sy = self.left_press_start[1]
select_aabb = AABB(sx, sy, ex, ey)
if not self.shift_pressed:
self.deselect_all(None)
for p in Singleton.state.paths:
for e in p.elements:
if not e in self.selected_elements:
e_aabb = e.get_aabb()
if (e_aabb != None):
debug(" e: "+str(e_aabb))
debug(" select:"+str(select_aabb))
overlap = select_aabb.aabb_in_aabb(e_aabb)
debug(" overlap:"+str(overlap))
if (overlap != OverlapEnum.no_overlap) and (overlap != OverlapEnum.fully_lays_inside):
e.set_selected()
self.selected_elements.append(e)
self.mw.widget.update()
self.left_press_start=None
def pointer_motion(self, args):
offset = Singleton.state.get_offset()
scale = Singleton.state.get_scale()
cx = (args[0][0]-offset[0])/scale[0]
cy = (args[0][1]-offset[1])/scale[1]
self.pointer_position = (cx, cy)
self.mw.cursor_pos_label.set_text("cur: %.3f:%.3f"%(cx, cy))
self.mw.widget.update()
def drill_tool_click(self, args):
dbgfname()
debug(" drill tool click:"+str(args))
debug(" "+str(self.selected_elements))
for e in self.selected_elements:
debug(" thickness:"+str(Singleton.state.get_settings().get_material().get_thickness()))
drl_op = TODrill(Singleton.state, index=len(Singleton.state.tool_operations))
if drl_op.apply(e, Singleton.state.get_settings().get_material().get_thickness()):
Singleton.state.tool_operations.append(drl_op)
self.push_event(self.ee.update_tool_operations_list, (None))
project.push_state(Singleton.state, "drill_tool_click")
debug(" "+str(Singleton.state.tool_operations))
self.mw.widget.update()
def join_elements(self, args):
dbgfname()
sp = Singleton.state.paths
if self.selected_elements!=None:
debug(" selected: "+str(self.selected_elements))
p = Path(Singleton.state, self.selected_elements, "path", Singleton.state.settings.get_def_lt().name)
connected = p.mk_connected_path()
debug(" connected elements: "+str(connected))
if connected != None:
connected.name = connected.name+" "+str(len(sp))
self.deselect_all(None)
for e in connected.elements:
for i, p in enumerate(sp):
if e in sp[i].elements:
sp[i].elements.remove(e)
sp.append(connected)
self.push_event(self.ee.update_paths_list, (None))
#project.push_state(Singleton.state, "join_elements")
return connected
return None
def deselect_all(self, args):
for e in self.selected_elements:
e.toggle_selected()
self.selected_elements = []
if (self.selected_tool_operation != None):
self.selected_tool_operation.unset_selected()
self.selected_tool_operation = None
self.mw.widget.update()
def shift_press(self, args):
self.shift_pressed = True
def shift_release(self, args):
self.shift_pressed = False
def ctrl_press(self, args):
self.ctrl_pressed = True
def ctrl_release(self, args):
self.ctrl_pressed = False
def path_list_selection_changed(self, args):
selection = args[0][0].get_selection()
self.deselect_all(None)
self.selected_path = None
for li in selection:
name = li.children()[0].children()[1].get_text()
for p in Singleton.state.paths:
if p.name == name:
self.selected_path = p
for e in p.elements:
if not e in self.selected_elements:
e.set_selected()
self.selected_elements.append(e)
self.mw.widget.update()
def tool_operations_list_selection_changed(self, args):
selection = args[0][0].get_selection()
self.deselect_all(None)
self.selected_tool_operation = None
for li in selection:
name = li.children()[0].children()[1].get_text()
for p in Singleton.state.tool_operations:
if p.display_name == name:
self.selected_tool_operation = p
p.set_selected()
self.mw.new_settings_vbox(p.get_settings_list(), p.display_name+" settings")
self.mw.widget.update()
def exact_follow_tool_click(self, args):
dbgfname()
debug(" exact follow tool click: "+str(args))
connected = self.join_elements(None)
debug(" selected path: "+str(self.selected_path))
if connected != None:
path_follow_op = TOExactFollow(Singleton.state, index=len(Singleton.state.tool_operations), depth=Singleton.state.get_settings().get_material().get_thickness())
if path_follow_op.apply(connected):
Singleton.state.add_tool_operations([path_follow_op])
self.push_event(self.ee.update_tool_operations_list, (None))
project.push_state(Singleton.state, "exact_follow_tool_click")
self.mw.widget.update()
def offset_follow_tool_click(self, args):
dbgfname()
debug(" offset follow tool click: "+str(args))
connected = self.join_elements(None)
debug(" selected path: "+str(self.selected_path))
debug(" connected: "+str(connected))
if connected != None:
path_follow_op = TOOffsetFollow(Singleton.state, index=len(Singleton.state.tool_operations), depth=Singleton.state.get_settings().get_material().get_thickness())
if path_follow_op.apply(connected):
Singleton.state.tool_operations.append(path_follow_op)
self.push_event(self.ee.update_tool_operations_list, (None))
project.push_state(Singleton.state, "offset_follow_tool_click")
self.mw.widget.update()
def pocket_tool_click(self, args):
#dbgfname()
#debug(" args: "+str(args))
if args[0] != None:
#debug(" pocket tool click: "+str(args))
connected = self.join_elements(None)
#debug(" selected path: "+str(self.selected_path))
if connected != None:
pocket_op = TOPocketing(Singleton.state, index=len(Singleton.state.tool_operations), depth=Singleton.state.get_settings().get_material().get_thickness())
result = pocket_op.apply(connected)
if result == TOResult.ok:
if Singleton.state.get_tool_operation_by_name(pocket_op.display_name) == None:
Singleton.state.tool_operations.append(pocket_op)
project.push_state(Singleton.state, "pocket_tool_click")
self.push_event(self.ee.update_tool_operations_list, (None))
elif result == TOResult.repeat:
Singleton.state.set_operation_in_progress(pocket_op)
self.push_event(self.ee.update_progress, True)
self.push_event(self.ee.pocket_tool_click, None)
else:
op = Singleton.state.get_operation_in_progress()
#debug(" Operation in progress: "+str(op))
if op != None:
if op.apply(None) == TOResult.repeat:
self.push_event(self.ee.update_progress, True)
self.push_event(self.ee.pocket_tool_click, None)
else:
if Singleton.state.get_tool_operation_by_name(op.display_name) == None:
Singleton.state.tool_operations.append(op)
project.push_state(Singleton.state, "pocket_tool_click")
self.push_event(self.ee.update_tool_operations_list, (None))
self.push_event(self.ee.update_progress, False)
Singleton.state.unset_operation_in_progress()
self.mw.widget.update()
def update_progress(self, args):
if args[0] == True:
Singleton.state.spinner_frame+=1
Singleton.state.spinner_frame %= (len(Singleton.state.spinner)*20)
self.mw.progress_label.set_text("progress: "+Singleton.state.spinner[int(Singleton.state.spinner_frame/20)])
self.mw.widget.update()
else:
self.mw.progress_label.set_text("No task running")
self.mw.widget.update()
def update_settings(self, args):
dbgfname()
debug(" settings update: "+str(args))
setting = args[0][0]
if setting.type == TOSTypes.float:
new_value = args[0][1][0].get_value()
setting.set_value(new_value)
project.push_state(Singleton.state, "update_settings")
elif setting.type == TOSTypes.button:
setting.set_value(None)
else:
warning(" Unknown setting type: %s"%(setting.type,))
self.mw.widget.update()
def tool_operation_up_click(self, args):
dbgfname()
debug(" tool operation up")
if self.selected_tool_operation==None:
return
if len(Singleton.state.tool_operations)==0:
return
cur_idx = Singleton.state.tool_operations.index(self.selected_tool_operation)
debug(" cur idx: "+str(cur_idx))
if cur_idx == 0:
return
temp = self.selected_tool_operation
Singleton.state.tool_operations.remove(self.selected_tool_operation)
Singleton.state.tool_operations.insert(cur_idx-1, temp)
self.push_event(self.ee.update_tool_operations_list, {"selection": cur_idx-1})
project.push_state(Singleton.state, "tool_operation_up_click")
def tool_operation_down_click(self, args):
dbgfname()
debug(" tool operation down")
if self.selected_tool_operation==None:
return
if len(Singleton.state.tool_operations)==0:
return
cur_idx = Singleton.state.tool_operations.index(self.selected_tool_operation)
debug(" cur idx: "+str(cur_idx))
if cur_idx == len(Singleton.state.tool_operations)-1:
return
temp = self.selected_tool_operation
Singleton.state.tool_operations.remove(self.selected_tool_operation)
Singleton.state.tool_operations.insert(cur_idx+1, temp)
self.push_event(self.ee.update_tool_operations_list, {"selection": cur_idx+1})
project.push_state(Singleton.state, "tool_operation_down_click")
def scroll_up(self, args):
dbgfname()
debug(" scroll up")
if self.shift_pressed:
offset = Singleton.state.get_base_offset()
Singleton.mw.widget_vscroll.set_value(-(offset[1]+10*Singleton.state.scale[0]))
elif self.ctrl_pressed:
offset = Singleton.state.get_base_offset()
Singleton.mw.widget_hscroll.set_value(-(offset[0]+10*Singleton.state.scale[0]))
else:
osx, osy = Singleton.state.scale
if Singleton.state.scale[0]<=0.01:
Singleton.state.scale = (Singleton.state.scale[0]+0.1, Singleton.state.scale[1]+0.1)
else:
Singleton.state.scale = (Singleton.state.scale[0]*1.5, Singleton.state.scale[1]*1.5)
tx, ty = Singleton.state.get_offset()
sx, sy = Singleton.state.get_screen_size()
px, py = self.pointer_position
nsx, nsy = Singleton.state.scale
debug(" Old px, py: %f, %f"%(px, py))
debug(" Screen size: %s"%((sx, sy),))
Singleton.state.set_base_offset((px-px*nsx, -(py-py*nsy)))
debug(" New px, py: %f, %f"%((-(px-px/nsx), (py-py/nsy))))
debug(" New scale: %s"%((nsx, nsy),))
self.mw.cursor_pos_label.set_text("cur: %.3f:%.3f"%(px, py))
self.mw.widget.update()
def scroll_down(self, args):
dbgfname()
debug(" scroll down")
if self.shift_pressed:
offset = Singleton.state.get_base_offset()
Singleton.mw.widget_vscroll.set_value(-(offset[1]-10*Singleton.state.scale[0]))
elif self.ctrl_pressed:
offset = Singleton.state.get_base_offset()
Singleton.mw.widget_hscroll.set_value(-(offset[0]-10*Singleton.state.scale[0]))
else:
if Singleton.state.scale[0]>0.1:
if Singleton.state.scale[0]<=1:
Singleton.state.scale = (Singleton.state.scale[0]-0.1, Singleton.state.scale[1]-0.1)
else:
Singleton.state.scale = (Singleton.state.scale[0]/1.5, Singleton.state.scale[1]/1.5)
px, py = self.pointer_position
nsx, nsy = Singleton.state.scale
Singleton.state.set_base_offset((-px*nsx, py*nsy))
self.mw.widget.update()
def hscroll(self, args):
dbgfname()
debug(" hscroll: "+str(args))
debug(" "+str(args[0][0].get_value()))
offset = Singleton.state.get_base_offset()
Singleton.state.set_base_offset((-args[0][0].get_value(), offset[1]))
self.mw.widget.update()
def vscroll(self, args):
dbgfname()
debug(" vscroll: "+str(args))
debug(" "+str(args[0][0].get_value()))
offset = Singleton.state.get_base_offset()
Singleton.state.set_base_offset((offset[0], -args[0][0].get_value()))
self.mw.widget.update()
def tool_paths_check_button_click(self, args):
name = args[0][0]
for o in Singleton.state.tool_operations:
if o.display_name == name:
o.display = not o.display
break
self.mw.widget.update()
def paths_check_button_click(self, args):
name = args[0][0]
for p in Singleton.state.paths:
if p.name == name:
p.display = not p.display
break
self.mw.widget.update()
def path_delete_button_click(self, args):
if self.selected_path in Singleton.state.paths:
Singleton.state.paths.remove(self.selected_path)
self.selected_path = None
self.push_event(self.ee.update_paths_list, (None))
project.push_state(Singleton.state, "path_delete_button_click")
self.mw.widget.update()
def tool_operation_delete_button_click(self, args):
if self.selected_tool_operation in Singleton.state.tool_operations:
Singleton.state.tool_operations.remove(self.selected_tool_operation)
self.selected_tool_operation = None
self.push_event(self.ee.update_tool_operations_list, (None))
project.push_state(Singleton.state, "tool_operation_delete_button_click")
self.mw.widget.update()
def undo_click(self, args):
dbgfname()
debug(" steps("+str(len(project.steps))+") before: "+str(project.steps))
project.step_back()
debug(" steps("+str(len(project.steps))+") after: "+str(project.steps))
self.push_event(self.ee.update_tool_operations_list, (None))
self.push_event(self.ee.update_paths_list, (None))
self.mw.widget.update()
def redo_click(self, args):
dbgfname()
debug(" steps("+str(len(project.steps))+") before: "+str(project.steps))
project.step_forward()
debug(" steps("+str(len(project.steps))+") after: "+str(project.steps))
self.push_event(self.ee.update_tool_operations_list, (None))
self.push_event(self.ee.update_paths_list, (None))
self.mw.widget.update()
def main_start(self, args):
Singleton.plugins = []
if Singleton.plugins_dir != None:
for dirname, subdirs, files in os.walk(Singleton.plugins_dir):
debug('Found directory: %s' % dirname)
for fname in files:
if os.path.splitext(fname)[1] == ".py":
debug('F\t%s' % fname)
plugin_path = os.path.abspath(os.path.join(Singleton.plugins_dir, fname))
plugin_mod_name = os.path.splitext(fname)[0]
debug("Loading module with spec %s:%s"%(plugin_mod_name, plugin_path))
imp.load_source(plugin_mod_name, plugin_path)
for dname in subdirs:
debug('D\t%s' % dname)
debug("Registering plugins")
for p in Singleton.plugins:
p.register()
def pause(self, args):
p = args[0]
debug("Pause for %f"%(p,))
time.sleep(p)
ee = EVEnum()
ep = EventProcessor()
|
snegovick/bcam
|
bcam/events.py
|
Python
|
gpl-3.0
| 31,621 | 0.004744 |
from zope.interface import Interface, Attribute
from zope import schema
from uwosh.emergency.client.config import mf as _
class IUWOshEmergencyClientLayer(Interface):
"""Marker interface that defines a browser layer
"""
|
uwosh/uwosh.emergency.client
|
uwosh/emergency/client/interfaces.py
|
Python
|
gpl-2.0
| 228 | 0.008772 |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ducktape.cluster.remoteaccount import RemoteCommandError
from ducktape.utils.util import wait_until
class JmxMixin(object):
"""This mixin helps existing service subclasses start JmxTool on their worker nodes and collect jmx stats.
A couple things worth noting:
- this is not a service in its own right.
- we assume the service using JmxMixin also uses KafkaPathResolverMixin
"""
def __init__(self, num_nodes, jmx_object_names=None, jmx_attributes=None):
self.jmx_object_names = jmx_object_names
self.jmx_attributes = jmx_attributes or []
self.jmx_port = 9192
self.started = [False] * num_nodes
self.jmx_stats = [{} for x in range(num_nodes)]
self.maximum_jmx_value = {} # map from object_attribute_name to maximum value observed over time
self.average_jmx_value = {} # map from object_attribute_name to average value observed over time
self.jmx_tool_log = "/mnt/jmx_tool.log"
self.jmx_tool_err_log = "/mnt/jmx_tool.err.log"
def clean_node(self, node):
node.account.kill_process("jmx", clean_shutdown=False, allow_fail=True)
node.account.ssh("rm -rf %s" % self.jmx_tool_log, allow_fail=False)
def start_jmx_tool(self, idx, node):
if self.jmx_object_names is None:
self.logger.debug("%s: Not starting jmx tool because no jmx objects are defined" % node.account)
return
if self.started[idx-1]:
self.logger.debug("%s: jmx tool has been started already on this node" % node.account)
return
cmd = "%s kafka.tools.JmxTool " % self.path.script("kafka-run-class.sh", node)
cmd += "--reporting-interval 1000 --jmx-url service:jmx:rmi:///jndi/rmi://127.0.0.1:%d/jmxrmi" % self.jmx_port
for jmx_object_name in self.jmx_object_names:
cmd += " --object-name %s" % jmx_object_name
for jmx_attribute in self.jmx_attributes:
cmd += " --attributes %s" % jmx_attribute
cmd += " 1>> %s" % self.jmx_tool_log
cmd += " 2>> %s &" % self.jmx_tool_err_log
self.logger.debug("%s: Start JmxTool %d command: %s" % (node.account, idx, cmd))
node.account.ssh(cmd, allow_fail=False)
wait_until(lambda: self._jmx_has_output(node), timeout_sec=10, backoff_sec=.5, err_msg="%s: Jmx tool took too long to start" % node.account)
self.started[idx-1] = True
def _jmx_has_output(self, node):
"""Helper used as a proxy to determine whether jmx is running by that jmx_tool_log contains output."""
try:
node.account.ssh("test -z \"$(cat %s)\"" % self.jmx_tool_log, allow_fail=False)
return False
except RemoteCommandError:
return True
def read_jmx_output(self, idx, node):
if not self.started[idx-1]:
return
object_attribute_names = []
cmd = "cat %s" % self.jmx_tool_log
self.logger.debug("Read jmx output %d command: %s", idx, cmd)
lines = [line for line in node.account.ssh_capture(cmd, allow_fail=False)]
assert len(lines) > 1, "There don't appear to be any samples in the jmx tool log: %s" % lines
for line in lines:
if "time" in line:
object_attribute_names = line.strip()[1:-1].split("\",\"")[1:]
continue
stats = [float(field) for field in line.split(',')]
time_sec = int(stats[0]/1000)
self.jmx_stats[idx-1][time_sec] = {name: stats[i+1] for i, name in enumerate(object_attribute_names)}
# do not calculate average and maximum of jmx stats until we have read output from all nodes
# If the service is multithreaded, this means that the results will be aggregated only when the last
# service finishes
if any(len(time_to_stats) == 0 for time_to_stats in self.jmx_stats):
return
start_time_sec = min([min(time_to_stats.keys()) for time_to_stats in self.jmx_stats])
end_time_sec = max([max(time_to_stats.keys()) for time_to_stats in self.jmx_stats])
for name in object_attribute_names:
aggregates_per_time = []
for time_sec in xrange(start_time_sec, end_time_sec + 1):
# assume that value is 0 if it is not read by jmx tool at the given time. This is appropriate for metrics such as bandwidth
values_per_node = [time_to_stats.get(time_sec, {}).get(name, 0) for time_to_stats in self.jmx_stats]
# assume that value is aggregated across nodes by sum. This is appropriate for metrics such as bandwidth
aggregates_per_time.append(sum(values_per_node))
self.average_jmx_value[name] = sum(aggregates_per_time) / len(aggregates_per_time)
self.maximum_jmx_value[name] = max(aggregates_per_time)
def read_jmx_output_all_nodes(self):
for node in self.nodes:
self.read_jmx_output(self.idx(node), node)
|
airbnb/kafka
|
tests/kafkatest/services/monitor/jmx.py
|
Python
|
apache-2.0
| 5,768 | 0.003814 |
# -*- coding: utf-8 -*-
#
# textract documentation build configuration file, created by
# sphinx-quickstart on Fri Jul 4 11:09:09 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.join(os.path.abspath('.'), '..'))
import textract
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['.templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'textract'
copyright = u'2014, Dean Malmgren'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = textract.VERSION
# The full version, including alpha/beta/rc tags.
release = textract.VERSION
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['.static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'textractdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'textract.tex', u'textract Documentation',
u'Dean Malmgren', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'textract', u'textract Documentation',
[u'Dean Malmgren'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'textract', u'textract Documentation',
u'Dean Malmgren', 'textract', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# on_rtd is whether we are on readthedocs.org
# http://read-the-docs.readthedocs.org/en/latest/theme.html#how-do-i-use-this-locally-and-on-read-the-docs
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
shobhitmittal/textract
|
docs/conf.py
|
Python
|
mit
| 8,694 | 0.005866 |
import random
class ImageQueryParser:
def __init__(self):
pass
def parse(self, query_string):
tab = query_string.split(" ")
last = tab[-1].lower()
is_random = False
index = 0
if last.startswith("-"):
if last == "-r":
is_random = True
tab.pop()
else:
try:
index = int(last[1:])
tab.pop()
except ValueError:
pass
query_string = " ".join(tab)
return ImageQuery(query_string, is_random, index)
class ImageQuery:
def __init__(self, query, is_random, index):
self.__query = query
self.__is_random = is_random
self.__index = index
def query(self):
return self.__query
def is_random(self):
return self.__is_random
def next_index(self):
if self.is_random():
return random.randrange(0, 100)
else:
i = self.__index
self.__index += 1
return i
|
mamaddeveloper/teleadmin
|
tools/imageQueryParser.py
|
Python
|
mit
| 1,079 | 0 |
class Parameter(object):
def __init__(self, name):
self.name = name
class Vehicle(object):
def __init__(self, name, path):
self.name = name
self.path = path
self.params = []
class Library(object):
def __init__(self, name):
self.name = name
self.params = []
known_param_fields = [
'Description',
'DisplayName',
'Values',
'Range',
'Units',
'Increment',
'User',
'RebootRequired',
'Bitmask',
'Volatile',
'ReadOnly',
]
# Follow SI units conventions from:
# http://physics.nist.gov/cuu/Units/units.html
# http://physics.nist.gov/cuu/Units/outside.html
# and
# http://physics.nist.gov/cuu/Units/checklist.html
# http://www.bipm.org/en/publications/si-brochure/
# http://www1.bipm.org/en/CGPM/db/3/2/ g_n unit for G-force
# one further constrain is that only printable (7bit) ASCII characters are allowed
known_units = {
# abreviation : full-text (used in .html .rst and .wiki files)
# time
's' : 'seconds' ,
'ds' : 'deciseconds' ,
'cs' : 'centiseconds' ,
'ms' : 'milliseconds' ,
'PWM' : 'PWM in microseconds' , # should be microseconds, this is NOT a SI unit, but follows https://github.com/ArduPilot/ardupilot/pull/5538#issuecomment-271943061
'Hz' : 'hertz' ,
# distance
'km' : 'kilometers' , # metre is the SI unit name, meter is the american spelling of it
'm' : 'meters' , # metre is the SI unit name, meter is the american spelling of it
'm/s' : 'meters per second' , # metre is the SI unit name, meter is the american spelling of it
'm/s/s' : 'meters per square second' , # metre is the SI unit name, meter is the american spelling of it
'm/s/s/s' : 'meters per cubic second' , # metre is the SI unit name, meter is the american spelling of it
'cm' : 'centimeters' , # metre is the SI unit name, meter is the american spelling of it
'cm/s' : 'centimeters per second' , # metre is the SI unit name, meter is the american spelling of it
'cm/s/s' : 'centimeters per square second', # metre is the SI unit name, meter is the american spelling of it
'cm/s/s/s': 'centimeters per cubic second' , # metre is the SI unit name, meter is the american spelling of it
'mm' : 'millimeters' , # metre is the SI unit name, meter is the american spelling of it
# temperature
'degC' : 'degrees Celsius' , # Not SI, but Kelvin is too cumbersome for most users
# angle
'deg' : 'degrees' , # Not SI, but is some situations more user-friendly than radians
'deg/s' : 'degrees per second' , # Not SI, but is some situations more user-friendly than radians
'cdeg' : 'centidegrees' , # Not SI, but is some situations more user-friendly than radians
'cdeg/s' : 'centidegrees per second', # Not SI, but is some situations more user-friendly than radians
'cdeg/s/s': 'centidegrees per square second' , # Not SI, but is some situations more user-friendly than radians
'rad' : 'radians' ,
'rad/s' : 'radians per second' ,
'rad/s/s' : 'radians per square second' ,
# electricity
'A' : 'ampere' ,
'V' : 'volt' ,
'W' : 'watt' ,
# magnetism
'Gauss' : 'gauss' , # Gauss is not an SI unit, but 1 tesla = 10000 gauss so a simple replacement is not possible here
'Gauss/s' : 'gauss per second' , # Gauss is not an SI unit, but 1 tesla = 10000 gauss so a simple replacement is not possible here
'mGauss' : 'milligauss' , # Gauss is not an SI unit, but 1 tesla = 10000 gauss so a simple replacement is not possible here
# pressure
'Pa' : 'pascal' ,
'mbar' : 'millibar' ,
# ratio
'%' : 'percent' ,
'%/s' : 'percent per second' ,
'd%' : 'decipercent' , # decipercent is strange, but "per-mille" is even more exotic
# compound
'm.m/s/s' : 'square meter per square second',
'deg/m/s' : 'degrees per meter per second' ,
'm/s/m' : 'meters per second per meter' , # Why not use Hz here ????
'mGauss/A': 'milligauss per ampere' ,
'mA.h' : 'milliampere hour' ,
'A/V' : 'ampere per volt' ,
'm/V' : 'meters per volt' ,
'gravities': 'standard acceleration due to gravity' , # g_n would be a more correct unit, but IMHO no one understands what g_n means
}
required_param_fields = [
'Description',
'DisplayName',
'User',
]
known_group_fields = [
'Path',
]
|
gcrisis/ardupilot
|
Tools/autotest/param_metadata/param.py
|
Python
|
gpl-3.0
| 5,433 | 0.024664 |
# coding=utf-8
HOSTNAME = 'localhost'
DATABASE = 'r'
USERNAME = 'web'
PASSWORD = 'web'
DB_URI = 'mysql://{}:{}@{}/{}'.format(
USERNAME, PASSWORD, HOSTNAME, DATABASE)
|
dongweiming/web_develop
|
chapter3/section4/consts.py
|
Python
|
gpl-3.0
| 170 | 0 |
# Copyright (c) 2008 Joost Cassee
# Licensed under the terms of the MIT License (see LICENSE.txt)
import logging
from django.core import urlresolvers
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext, loader
from django.utils import simplejson
from django.utils.translation import ugettext as _
from tinymce.compressor import gzip_compressor
from tinymce.widgets import get_language_config
from django.views.decorators.csrf import csrf_exempt
def textareas_js(request, name, lang=None):
"""
Returns a HttpResponse whose content is a Javscript file. The template
is loaded from 'tinymce/<name>_textareas.js' or
'<name>/tinymce_textareas.js'. Optionally, the lang argument sets the
content language.
"""
template_files = (
'tinymce/%s_textareas.js' % name,
'%s/tinymce_textareas.js' % name,
)
template = loader.select_template(template_files)
vars = get_language_config(lang)
vars['content_language'] = lang
context = RequestContext(request, vars)
return HttpResponse(template.render(context),
content_type="application/x-javascript")
@csrf_exempt
def spell_check(request):
"""
Returns a HttpResponse that implements the TinyMCE spellchecker protocol.
"""
try:
import enchant
raw = request.raw_post_data
input = simplejson.loads(raw)
id = input['id']
method = input['method']
params = input['params']
lang = params[0]
arg = params[1]
if not enchant.dict_exists(str(lang)):
raise RuntimeError("dictionary not found for language '%s'" % lang)
checker = enchant.Dict(str(lang))
if method == 'checkWords':
result = [word for word in arg if not checker.check(word)]
elif method == 'getSuggestions':
result = checker.suggest(arg)
else:
raise RuntimeError("Unkown spellcheck method: '%s'" % method)
output = {
'id': id,
'result': result,
'error': None,
}
except Exception:
logging.exception("Error running spellchecker")
return HttpResponse(_("Error running spellchecker"))
return HttpResponse(simplejson.dumps(output),
content_type='application/json')
def preview(request, name):
"""
Returns a HttpResponse whose content is an HTML file that is used
by the TinyMCE preview plugin. The template is loaded from
'tinymce/<name>_preview.html' or '<name>/tinymce_preview.html'.
"""
template_files = (
'tinymce/%s_preview.html' % name,
'%s/tinymce_preview.html' % name,
)
template = loader.select_template(template_files)
return HttpResponse(template.render(RequestContext(request)),
content_type="text/html")
def flatpages_link_list(request):
"""
Returns a HttpResponse whose content is a Javscript file representing a
list of links to flatpages.
"""
from django.contrib.flatpages.models import FlatPage
link_list = [(page.title, page.url) for page in FlatPage.objects.all()]
return render_to_link_list(link_list)
def compressor(request):
"""
Returns a GZip-compressed response.
"""
return gzip_compressor(request)
def render_to_link_list(link_list):
"""
Returns a HttpResponse whose content is a Javscript file representing a
list of links suitable for use wit the TinyMCE external_link_list_url
configuration option. The link_list parameter must be a list of 2-tuples.
"""
return render_to_js_vardef('tinyMCELinkList', link_list)
def render_to_image_list(image_list):
"""
Returns a HttpResponse whose content is a Javscript file representing a
list of images suitable for use wit the TinyMCE external_image_list_url
configuration option. The image_list parameter must be a list of 2-tuples.
"""
return render_to_js_vardef('tinyMCEImageList', image_list)
def render_to_js_vardef(var_name, var_value):
output = "var %s = %s" % (var_name, simplejson.dumps(var_value))
return HttpResponse(output, content_type='application/x-javascript')
def filebrowser(request):
fb_url = urlresolvers.reverse('filebrowser.views.browse')
return render_to_response('tinymce/filebrowser.js', {'fb_url': fb_url},
context_instance=RequestContext(request))
|
django-wodnas/django-tinymce
|
tinymce/views.py
|
Python
|
mit
| 4,440 | 0.001802 |
'''
mode | desc
r 또는 rt | 텍스트 모드로 읽기
w 또는 wt | 텍스트 모드로 쓰기
a 또는 at | 텍스트 모드로 파일 마지막에 추가하기
rb | 바이너리 모드로 읽기
wb | 바이너리 모드로 쓰기
ab | 바이너리 모드로 파일 마지막에 추가하기
'''
f = open("./py200_sample.txt", "w")
f.write("abcd")
f.close()
r = open("./py200_sample.txt", "r")
print("-" * 60)
print(r.readline())
r.close()
|
JaeGyu/PythonEx_1
|
p200_048.py
|
Python
|
mit
| 497 | 0 |
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# RUN: %p/structured_output | FileCheck %s
# pylint: disable=missing-docstring,line-too-long
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v2 as tf
from tensorflow.compiler.mlir.tensorflow.tests.tf_saved_model import common
class TestModule(tf.Module):
# The fNNNN name prefixes in this file are such that the sorted order of the
# functions in the resulting MLIR output match the order in the source file,
# allowing us to conveniently co-locate the CHECK's with the code they are
# checking.
#
# Note: CHECK-DAG doesn't work with CHECK-SAME/CHECK-NEXT.
# Check index paths for results.
#
# CHECK: func {{@[a-zA-Z_0-9]+}}() -> (
# CHECK-SAME: tensor<1xf32> {tf_saved_model.index_path = []})
# CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["f0000_single_return"]
@tf.function(input_signature=[])
def f0000_single_return(self):
return tf.constant(1.0, shape=[1])
# Check index paths for results with multiple return values.
# Note that semantically in Python, multiple return values are equivalent
# to returning a tuple/list.
#
# CHECK: func {{@[a-zA-Z_0-9]+}}() -> (
# CHECK-SAME: tensor<1xf32> {tf_saved_model.index_path = [0]},
# CHECK-SAME: tensor<2xf32> {tf_saved_model.index_path = [1]})
# CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["f0001_multiple_results_no_punctuation"]
@tf.function(input_signature=[])
def f0001_multiple_results_no_punctuation(self):
return tf.constant(1.0, shape=[1]), tf.constant(1.0, shape=[2])
# Check index paths for results written explicitly with parentheses.
# This is semantically equivalent to the earlier test without parentheses,
# but this test serves as documentation of this behavior for the purposes
# of tf_saved_model users.
#
# CHECK: func {{@[a-zA-Z_0-9]+}}() -> (
# CHECK-SAME: tensor<1xf32> {tf_saved_model.index_path = [0]},
# CHECK-SAME: tensor<2xf32> {tf_saved_model.index_path = [1]})
# CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["f0002_multiple_results_parentheses"]
@tf.function(input_signature=[])
def f0002_multiple_results_parentheses(self):
return (tf.constant(1.0, shape=[1]), tf.constant(1.0, shape=[2]))
# Check index paths for results written explicitly with brackets.
# This is semantically equivalent to the earlier test without parentheses,
# but this test serves as documentation of this behavior for the purposes
# of tf_saved_model users.
#
# CHECK: func {{@[a-zA-Z_0-9]+}}() -> (
# CHECK-SAME: tensor<1xf32> {tf_saved_model.index_path = [0]},
# CHECK-SAME: tensor<2xf32> {tf_saved_model.index_path = [1]})
# CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["f0003_multiple_results_brackets"]
@tf.function(input_signature=[])
def f0003_multiple_results_brackets(self):
return [tf.constant(1.0, shape=[1]), tf.constant(1.0, shape=[2])]
# Check index paths for lists.
#
# CHECK: func {{@[a-zA-Z_0-9]+}}() -> (
# CHECK-SAME: tensor<1xf32> {tf_saved_model.index_path = [0, 0]},
# CHECK-SAME: tensor<2xf32> {tf_saved_model.index_path = [0, 1]})
# CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["f0004_list_2_elements"]
@tf.function(input_signature=[])
def f0004_list_2_elements(self):
return [[tf.constant(1.0, shape=[1]), tf.constant(1.0, shape=[2])]]
# Check index paths for dicts.
# Keys are linearized in sorted order, matching `tf.nest.flatten`.
# More thorough testing of this is in structured_input.py. The underlying code
# path for linearization is shared, so no need to replicate that testing here.
#
# CHECK: func {{@[a-zA-Z_0-9]+}}() -> (
# CHECK-SAME: tensor<1xf32> {tf_saved_model.index_path = ["x"]},
# CHECK-SAME: tensor<2xf32> {tf_saved_model.index_path = ["y"]})
# CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["f0005_dict_2_keys"]
@tf.function(input_signature=[])
def f0005_dict_2_keys(self):
return {
'x': tf.constant(1.0, shape=[1]),
'y': tf.constant(1.0, shape=[2]),
}
# Check index paths for outputs are correctly handled in the presence of
# multiple return statements.
#
# CHECK: func {{@[a-zA-Z_0-9]+}}(
# CHECK-SAME: %arg0: tensor<f32> {tf._user_specified_name = "x", tf_saved_model.index_path = [0]}
# CHECK-SAME: ) -> (
# CHECK-SAME: tensor<1xf32> {tf_saved_model.index_path = ["x"]})
# CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["f0006_multiple_return_statements"]
@tf.function(input_signature=[tf.TensorSpec([], tf.float32)])
def f0006_multiple_return_statements(self, x):
if x > 3.:
return {'x': tf.constant(1.0, shape=[1])}
else:
return {'x': tf.constant(1.0, shape=[1])}
if __name__ == '__main__':
common.do_test(TestModule)
|
karllessard/tensorflow
|
tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/structured_output.py
|
Python
|
apache-2.0
| 5,614 | 0.015497 |
import unittest
from series import slices
# Tests adapted from `problem-specifications//canonical-data.json` @ v1.0.0
class SeriesTest(unittest.TestCase):
def test_slices_of_one_from_one(self):
self.assertEqual(slices("1", 1), ["1"])
def test_slices_of_one_from_two(self):
self.assertEqual(slices("12", 1), ["1", "2"])
def test_slices_of_two(self):
self.assertEqual(slices("35", 2), ["35"])
def test_slices_of_two_overlap(self):
self.assertEqual(slices("9142", 2), ["91", "14", "42"])
def test_slices_can_include_duplicates(self):
self.assertEqual(slices("777777", 3), ["777", "777", "777", "777"])
def test_slices_of_a_long_series(self):
self.assertEqual(
slices("918493904243", 5),
["91849", "18493", "84939", "49390", "93904", "39042", "90424", "04243"],
)
def test_slice_length_is_too_large(self):
with self.assertRaisesWithMessage(ValueError):
slices("12345", 6)
def test_slice_length_cannot_be_zero(self):
with self.assertRaisesWithMessage(ValueError):
slices("12345", 0)
def test_slice_length_cannot_be_negative(self):
with self.assertRaisesWithMessage(ValueError):
slices("123", -1)
def test_empty_series_is_invalid(self):
with self.assertRaisesWithMessage(ValueError):
slices("", 1)
# Utility functions
def setUp(self):
try:
self.assertRaisesRegex
except AttributeError:
self.assertRaisesRegex = self.assertRaisesRegexp
def assertRaisesWithMessage(self, exception):
return self.assertRaisesRegex(exception, r".+")
if __name__ == "__main__":
unittest.main()
|
TGITS/programming-workouts
|
exercism/python/series/series_test.py
|
Python
|
mit
| 1,747 | 0.000572 |
# (c) 2016, Matt Davis <mdavis@ansible.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import time
from datetime import datetime, timedelta
from ansible.errors import AnsibleError
from ansible.plugins.action import ActionBase
from ansible.module_utils._text import to_native
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class TimedOutException(Exception):
pass
class ActionModule(ActionBase):
TRANSFERS_FILES = False
DEFAULT_REBOOT_TIMEOUT = 600
DEFAULT_CONNECT_TIMEOUT = 5
DEFAULT_PRE_REBOOT_DELAY = 2
DEFAULT_POST_REBOOT_DELAY = 0
DEFAULT_TEST_COMMAND = 'whoami'
DEFAULT_REBOOT_MESSAGE = 'Reboot initiated by Ansible.'
def get_system_uptime(self):
uptime_command = "(Get-WmiObject -ClassName Win32_OperatingSystem).LastBootUpTime"
(rc, stdout, stderr) = self._connection.exec_command(uptime_command)
if rc != 0:
raise Exception("win_reboot: failed to get host uptime info, rc: %d, stdout: %s, stderr: %s"
% (rc, stdout, stderr))
return stdout
def do_until_success_or_timeout(self, what, timeout, what_desc, fail_sleep=1):
max_end_time = datetime.utcnow() + timedelta(seconds=timeout)
exc = ""
while datetime.utcnow() < max_end_time:
try:
what()
if what_desc:
display.debug("win_reboot: %s success" % what_desc)
return
except Exception as e:
exc = e
if what_desc:
display.debug("win_reboot: %s fail (expected), retrying in %d seconds..." % (what_desc, fail_sleep))
time.sleep(fail_sleep)
raise TimedOutException("timed out waiting for %s: %s" % (what_desc, exc))
def run(self, tmp=None, task_vars=None):
self._supports_check_mode = True
self._supports_async = True
if self._play_context.check_mode:
return dict(changed=True, elapsed=0, rebooted=True)
if task_vars is None:
task_vars = dict()
result = super(ActionModule, self).run(tmp, task_vars)
if result.get('skipped', False) or result.get('failed', False):
return result
# Handle timeout parameters and its alias
deprecated_args = {
'shutdown_timeout': '2.5',
'shutdown_timeout_sec': '2.5',
}
for arg, version in deprecated_args.items():
if self._task.args.get(arg) is not None:
display.warning("Since Ansible %s, %s is no longer used with win_reboot" % (arg, version))
if self._task.args.get('connect_timeout') is not None:
connect_timeout = int(self._task.args.get('connect_timeout', self.DEFAULT_CONNECT_TIMEOUT))
else:
connect_timeout = int(self._task.args.get('connect_timeout_sec', self.DEFAULT_CONNECT_TIMEOUT))
if self._task.args.get('reboot_timeout') is not None:
reboot_timeout = int(self._task.args.get('reboot_timeout', self.DEFAULT_REBOOT_TIMEOUT))
else:
reboot_timeout = int(self._task.args.get('reboot_timeout_sec', self.DEFAULT_REBOOT_TIMEOUT))
if self._task.args.get('pre_reboot_delay') is not None:
pre_reboot_delay = int(self._task.args.get('pre_reboot_delay', self.DEFAULT_PRE_REBOOT_DELAY))
else:
pre_reboot_delay = int(self._task.args.get('pre_reboot_delay_sec', self.DEFAULT_PRE_REBOOT_DELAY))
if self._task.args.get('post_reboot_delay') is not None:
post_reboot_delay = int(self._task.args.get('post_reboot_delay', self.DEFAULT_POST_REBOOT_DELAY))
else:
post_reboot_delay = int(self._task.args.get('post_reboot_delay_sec', self.DEFAULT_POST_REBOOT_DELAY))
test_command = str(self._task.args.get('test_command', self.DEFAULT_TEST_COMMAND))
msg = str(self._task.args.get('msg', self.DEFAULT_REBOOT_MESSAGE))
# Get current uptime
try:
before_uptime = self.get_system_uptime()
except Exception as e:
result['failed'] = True
result['reboot'] = False
result['msg'] = to_native(e)
return result
# Initiate reboot
display.vvv("rebooting server")
(rc, stdout, stderr) = self._connection.exec_command('shutdown /r /t %d /c "%s"' % (pre_reboot_delay, msg))
# Test for "A system shutdown has already been scheduled. (1190)" and handle it gracefully
if rc == 1190:
display.warning('A scheduled reboot was pre-empted by Ansible.')
# Try to abort (this may fail if it was already aborted)
(rc, stdout1, stderr1) = self._connection.exec_command('shutdown /a')
# Initiate reboot again
(rc, stdout2, stderr2) = self._connection.exec_command('shutdown /r /t %d' % pre_reboot_delay)
stdout += stdout1 + stdout2
stderr += stderr1 + stderr2
if rc != 0:
result['failed'] = True
result['rebooted'] = False
result['msg'] = "Shutdown command failed, error text was %s" % stderr
return result
start = datetime.now()
# Get the original connection_timeout option var so it can be reset after
connection_timeout_orig = None
try:
connection_timeout_orig = self._connection.get_option('connection_timeout')
except AnsibleError:
display.debug("win_reboot: connection_timeout connection option has not been set")
try:
# keep on checking system uptime with short connection responses
def check_uptime():
display.vvv("attempting to get system uptime")
# override connection timeout from defaults to custom value
try:
self._connection.set_options(direct={"connection_timeout": connect_timeout})
self._connection._reset()
except AttributeError:
display.warning("Connection plugin does not allow the connection timeout to be overridden")
# try and get uptime
try:
current_uptime = self.get_system_uptime()
except Exception as e:
raise e
if current_uptime == before_uptime:
raise Exception("uptime has not changed")
self.do_until_success_or_timeout(check_uptime, reboot_timeout, what_desc="reboot uptime check success")
# reset the connection to clear the custom connection timeout
try:
self._connection.set_options(direct={"connection_timeout": connection_timeout_orig})
self._connection._reset()
except (AnsibleError, AttributeError):
display.debug("Failed to reset connection_timeout back to default")
# finally run test command to ensure everything is working
def run_test_command():
display.vvv("attempting post-reboot test command '%s'" % test_command)
(rc, stdout, stderr) = self._connection.exec_command(test_command)
if rc != 0:
raise Exception('test command failed')
# FUTURE: add a stability check (system must remain up for N seconds) to deal with self-multi-reboot updates
self.do_until_success_or_timeout(run_test_command, reboot_timeout, what_desc="post-reboot test command success")
result['rebooted'] = True
result['changed'] = True
except TimedOutException as toex:
result['failed'] = True
result['rebooted'] = True
result['msg'] = to_native(toex)
if post_reboot_delay != 0:
display.vvv("win_reboot: waiting an additional %d seconds" % post_reboot_delay)
time.sleep(post_reboot_delay)
elapsed = datetime.now() - start
result['elapsed'] = elapsed.seconds
return result
|
photoninger/ansible
|
lib/ansible/plugins/action/win_reboot.py
|
Python
|
gpl-3.0
| 8,292 | 0.0041 |
# Created By: Virgil Dupras
# Created On: 2006/01/29
# Copyright 2015 Hardcoded Software (http://www.hardcoded.net)
#
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.gnu.org/licenses/gpl-3.0.html
import difflib
import itertools
import logging
import string
from collections import defaultdict, namedtuple
from unicodedata import normalize
from hscommon.util import flatten, multi_replace
from hscommon.trans import tr
from hscommon.jobprogress import job
(WEIGHT_WORDS, MATCH_SIMILAR_WORDS, NO_FIELD_ORDER,) = range(3)
JOB_REFRESH_RATE = 100
def getwords(s):
# We decompose the string so that ascii letters with accents can be part of the word.
s = normalize("NFD", s)
s = multi_replace(s, "-_&+():;\\[]{}.,<>/?~!@#$*", " ").lower()
s = "".join(
c for c in s if c in string.ascii_letters + string.digits + string.whitespace
)
return [_f for _f in s.split(" ") if _f] # remove empty elements
def getfields(s):
fields = [getwords(field) for field in s.split(" - ")]
return [_f for _f in fields if _f]
def unpack_fields(fields):
result = []
for field in fields:
if isinstance(field, list):
result += field
else:
result.append(field)
return result
def compare(first, second, flags=()):
"""Returns the % of words that match between ``first`` and ``second``
The result is a ``int`` in the range 0..100.
``first`` and ``second`` can be either a string or a list (of words).
"""
if not (first and second):
return 0
if any(isinstance(element, list) for element in first):
return compare_fields(first, second, flags)
second = second[:] # We must use a copy of second because we remove items from it
match_similar = MATCH_SIMILAR_WORDS in flags
weight_words = WEIGHT_WORDS in flags
joined = first + second
total_count = sum(len(word) for word in joined) if weight_words else len(joined)
match_count = 0
in_order = True
for word in first:
if match_similar and (word not in second):
similar = difflib.get_close_matches(word, second, 1, 0.8)
if similar:
word = similar[0]
if word in second:
if second[0] != word:
in_order = False
second.remove(word)
match_count += len(word) if weight_words else 1
result = round(((match_count * 2) / total_count) * 100)
if (result == 100) and (not in_order):
result = 99 # We cannot consider a match exact unless the ordering is the same
return result
def compare_fields(first, second, flags=()):
"""Returns the score for the lowest matching :ref:`fields`.
``first`` and ``second`` must be lists of lists of string. Each sub-list is then compared with
:func:`compare`.
"""
if len(first) != len(second):
return 0
if NO_FIELD_ORDER in flags:
results = []
# We don't want to remove field directly in the list. We must work on a copy.
second = second[:]
for field1 in first:
max = 0
matched_field = None
for field2 in second:
r = compare(field1, field2, flags)
if r > max:
max = r
matched_field = field2
results.append(max)
if matched_field:
second.remove(matched_field)
else:
results = [
compare(field1, field2, flags) for field1, field2 in zip(first, second)
]
return min(results) if results else 0
def build_word_dict(objects, j=job.nulljob):
"""Returns a dict of objects mapped by their words.
objects must have a ``words`` attribute being a list of strings or a list of lists of strings
(:ref:`fields`).
The result will be a dict with words as keys, lists of objects as values.
"""
result = defaultdict(set)
for object in j.iter_with_progress(
objects, "Prepared %d/%d files", JOB_REFRESH_RATE
):
for word in unpack_fields(object.words):
result[word].add(object)
return result
def merge_similar_words(word_dict):
"""Take all keys in ``word_dict`` that are similar, and merge them together.
``word_dict`` has been built with :func:`build_word_dict`. Similarity is computed with Python's
``difflib.get_close_matches()``, which computes the number of edits that are necessary to make
a word equal to the other.
"""
keys = list(word_dict.keys())
keys.sort(key=len) # we want the shortest word to stay
while keys:
key = keys.pop(0)
similars = difflib.get_close_matches(key, keys, 100, 0.8)
if not similars:
continue
objects = word_dict[key]
for similar in similars:
objects |= word_dict[similar]
del word_dict[similar]
keys.remove(similar)
def reduce_common_words(word_dict, threshold):
"""Remove all objects from ``word_dict`` values where the object count >= ``threshold``
``word_dict`` has been built with :func:`build_word_dict`.
The exception to this removal are the objects where all the words of the object are common.
Because if we remove them, we will miss some duplicates!
"""
uncommon_words = set(
word for word, objects in word_dict.items() if len(objects) < threshold
)
for word, objects in list(word_dict.items()):
if len(objects) < threshold:
continue
reduced = set()
for o in objects:
if not any(w in uncommon_words for w in unpack_fields(o.words)):
reduced.add(o)
if reduced:
word_dict[word] = reduced
else:
del word_dict[word]
# Writing docstrings in a namedtuple is tricky. From Python 3.3, it's possible to set __doc__, but
# some research allowed me to find a more elegant solution, which is what is done here. See
# http://stackoverflow.com/questions/1606436/adding-docstrings-to-namedtuples-in-python
class Match(namedtuple("Match", "first second percentage")):
"""Represents a match between two :class:`~core.fs.File`.
Regarless of the matching method, when two files are determined to match, a Match pair is created,
which holds, of course, the two matched files, but also their match "level".
.. attribute:: first
first file of the pair.
.. attribute:: second
second file of the pair.
.. attribute:: percentage
their match level according to the scan method which found the match. int from 1 to 100. For
exact scan methods, such as Contents scans, this will always be 100.
"""
__slots__ = ()
def get_match(first, second, flags=()):
# it is assumed here that first and second both have a "words" attribute
percentage = compare(first.words, second.words, flags)
return Match(first, second, percentage)
def getmatches(
objects,
min_match_percentage=0,
match_similar_words=False,
weight_words=False,
no_field_order=False,
j=job.nulljob,
):
"""Returns a list of :class:`Match` within ``objects`` after fuzzily matching their words.
:param objects: List of :class:`~core.fs.File` to match.
:param int min_match_percentage: minimum % of words that have to match.
:param bool match_similar_words: make similar words (see :func:`merge_similar_words`) match.
:param bool weight_words: longer words are worth more in match % computations.
:param bool no_field_order: match :ref:`fields` regardless of their order.
:param j: A :ref:`job progress instance <jobs>`.
"""
COMMON_WORD_THRESHOLD = 50
LIMIT = 5000000
j = j.start_subjob(2)
sj = j.start_subjob(2)
for o in objects:
if not hasattr(o, "words"):
o.words = getwords(o.name)
word_dict = build_word_dict(objects, sj)
reduce_common_words(word_dict, COMMON_WORD_THRESHOLD)
if match_similar_words:
merge_similar_words(word_dict)
match_flags = []
if weight_words:
match_flags.append(WEIGHT_WORDS)
if match_similar_words:
match_flags.append(MATCH_SIMILAR_WORDS)
if no_field_order:
match_flags.append(NO_FIELD_ORDER)
j.start_job(len(word_dict), tr("0 matches found"))
compared = defaultdict(set)
result = []
try:
# This whole 'popping' thing is there to avoid taking too much memory at the same time.
while word_dict:
items = word_dict.popitem()[1]
while items:
ref = items.pop()
compared_already = compared[ref]
to_compare = items - compared_already
compared_already |= to_compare
for other in to_compare:
m = get_match(ref, other, match_flags)
if m.percentage >= min_match_percentage:
result.append(m)
if len(result) >= LIMIT:
return result
j.add_progress(desc=tr("%d matches found") % len(result))
except MemoryError:
# This is the place where the memory usage is at its peak during the scan.
# Just continue the process with an incomplete list of matches.
del compared # This should give us enough room to call logging.
logging.warning(
"Memory Overflow. Matches: %d. Word dict: %d"
% (len(result), len(word_dict))
)
return result
return result
def getmatches_by_contents(files, j=job.nulljob):
"""Returns a list of :class:`Match` within ``files`` if their contents is the same.
:param j: A :ref:`job progress instance <jobs>`.
"""
size2files = defaultdict(set)
for f in files:
if f.size:
size2files[f.size].add(f)
del files
possible_matches = [files for files in size2files.values() if len(files) > 1]
del size2files
result = []
j.start_job(len(possible_matches), tr("0 matches found"))
for group in possible_matches:
for first, second in itertools.combinations(group, 2):
if first.is_ref and second.is_ref:
continue # Don't spend time comparing two ref pics together.
if first.md5partial == second.md5partial:
if first.md5 == second.md5:
result.append(Match(first, second, 100))
j.add_progress(desc=tr("%d matches found") % len(result))
return result
class Group:
"""A group of :class:`~core.fs.File` that match together.
This manages match pairs into groups and ensures that all files in the group match to each
other.
.. attribute:: ref
The "reference" file, which is the file among the group that isn't going to be deleted.
.. attribute:: ordered
Ordered list of duplicates in the group (including the :attr:`ref`).
.. attribute:: unordered
Set duplicates in the group (including the :attr:`ref`).
.. attribute:: dupes
An ordered list of the group's duplicate, without :attr:`ref`. Equivalent to
``ordered[1:]``
.. attribute:: percentage
Average match percentage of match pairs containing :attr:`ref`.
"""
# ---Override
def __init__(self):
self._clear()
def __contains__(self, item):
return item in self.unordered
def __getitem__(self, key):
return self.ordered.__getitem__(key)
def __iter__(self):
return iter(self.ordered)
def __len__(self):
return len(self.ordered)
# ---Private
def _clear(self):
self._percentage = None
self._matches_for_ref = None
self.matches = set()
self.candidates = defaultdict(set)
self.ordered = []
self.unordered = set()
def _get_matches_for_ref(self):
if self._matches_for_ref is None:
ref = self.ref
self._matches_for_ref = [match for match in self.matches if ref in match]
return self._matches_for_ref
# ---Public
def add_match(self, match):
"""Adds ``match`` to internal match list and possibly add duplicates to the group.
A duplicate can only be considered as such if it matches all other duplicates in the group.
This method registers that pair (A, B) represented in ``match`` as possible candidates and,
if A and/or B end up matching every other duplicates in the group, add these duplicates to
the group.
:param tuple match: pair of :class:`~core.fs.File` to add
"""
def add_candidate(item, match):
matches = self.candidates[item]
matches.add(match)
if self.unordered <= matches:
self.ordered.append(item)
self.unordered.add(item)
if match in self.matches:
return
self.matches.add(match)
first, second, _ = match
if first not in self.unordered:
add_candidate(first, second)
if second not in self.unordered:
add_candidate(second, first)
self._percentage = None
self._matches_for_ref = None
def discard_matches(self):
"""Remove all recorded matches that didn't result in a duplicate being added to the group.
You can call this after the duplicate scanning process to free a bit of memory.
"""
discarded = set(
m
for m in self.matches
if not all(obj in self.unordered for obj in [m.first, m.second])
)
self.matches -= discarded
self.candidates = defaultdict(set)
return discarded
def get_match_of(self, item):
"""Returns the match pair between ``item`` and :attr:`ref`.
"""
if item is self.ref:
return
for m in self._get_matches_for_ref():
if item in m:
return m
def prioritize(self, key_func, tie_breaker=None):
"""Reorders :attr:`ordered` according to ``key_func``.
:param key_func: Key (f(x)) to be used for sorting
:param tie_breaker: function to be used to select the reference position in case the top
duplicates have the same key_func() result.
"""
# tie_breaker(ref, dupe) --> True if dupe should be ref
# Returns True if anything changed during prioritization.
master_key_func = lambda x: (-x.is_ref, key_func(x))
new_order = sorted(self.ordered, key=master_key_func)
changed = new_order != self.ordered
self.ordered = new_order
if tie_breaker is None:
return changed
ref = self.ref
key_value = key_func(ref)
for dupe in self.dupes:
if key_func(dupe) != key_value:
break
if tie_breaker(ref, dupe):
ref = dupe
if ref is not self.ref:
self.switch_ref(ref)
return True
return changed
def remove_dupe(self, item, discard_matches=True):
try:
self.ordered.remove(item)
self.unordered.remove(item)
self._percentage = None
self._matches_for_ref = None
if (len(self) > 1) and any(
not getattr(item, "is_ref", False) for item in self
):
if discard_matches:
self.matches = set(m for m in self.matches if item not in m)
else:
self._clear()
except ValueError:
pass
def switch_ref(self, with_dupe):
"""Make the :attr:`ref` dupe of the group switch position with ``with_dupe``.
"""
if self.ref.is_ref:
return False
try:
self.ordered.remove(with_dupe)
self.ordered.insert(0, with_dupe)
self._percentage = None
self._matches_for_ref = None
return True
except ValueError:
return False
dupes = property(lambda self: self[1:])
@property
def percentage(self):
if self._percentage is None:
if self.dupes:
matches = self._get_matches_for_ref()
self._percentage = sum(match.percentage for match in matches) // len(
matches
)
else:
self._percentage = 0
return self._percentage
@property
def ref(self):
if self:
return self[0]
def get_groups(matches):
"""Returns a list of :class:`Group` from ``matches``.
Create groups out of match pairs in the smartest way possible.
"""
matches.sort(key=lambda match: -match.percentage)
dupe2group = {}
groups = []
try:
for match in matches:
first, second, _ = match
first_group = dupe2group.get(first)
second_group = dupe2group.get(second)
if first_group:
if second_group:
if first_group is second_group:
target_group = first_group
else:
continue
else:
target_group = first_group
dupe2group[second] = target_group
else:
if second_group:
target_group = second_group
dupe2group[first] = target_group
else:
target_group = Group()
groups.append(target_group)
dupe2group[first] = target_group
dupe2group[second] = target_group
target_group.add_match(match)
except MemoryError:
del dupe2group
del matches
# should free enough memory to continue
logging.warning("Memory Overflow. Groups: {0}".format(len(groups)))
# Now that we have a group, we have to discard groups' matches and see if there're any "orphan"
# matches, that is, matches that were candidate in a group but that none of their 2 files were
# accepted in the group. With these orphan groups, it's safe to build additional groups
matched_files = set(flatten(groups))
orphan_matches = []
for group in groups:
orphan_matches += {
m
for m in group.discard_matches()
if not any(obj in matched_files for obj in [m.first, m.second])
}
if groups and orphan_matches:
groups += get_groups(
orphan_matches
) # no job, as it isn't supposed to take a long time
return groups
|
mahmutf/dupeguru
|
core/engine.py
|
Python
|
gpl-3.0
| 18,745 | 0.002401 |
#!/usr/bin/env python
# encoding: utf-8
"""
Copyright (c) 2010 The Echo Nest. All rights reserved.
Created by Tyler Williams on 2010-09-01
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
"""
# ========================
# = try_new_things.py =
# ========================
#
# enter a few of your favorite artists and create a playlist of new music that
# you might like.
#
import sys, os, logging
import xml.sax.saxutils as saxutils
from optparse import OptionParser
from pyechonest import artist, playlist
# set your api key here if it's not set in the environment
# config.ECHO_NEST_API_KEY = "XXXXXXXXXXXXXXXXX"
logger = logging.getLogger(__name__)
class XmlWriter(object):
""" code from: http://users.musicbrainz.org/~matt/xspf/m3u2xspf
Copyright (c) 2006, Matthias Friedrich <matt@mafr.de>
"""
def __init__(self, outStream, indentAmount=' '):
self._out = outStream
self._indentAmount = indentAmount
self._stack = [ ]
def prolog(self, encoding='UTF-8', version='1.0'):
pi = '<?xml version="%s" encoding="%s"?>' % (version, encoding)
self._out.write(pi + '\n')
def start(self, name, attrs={ }):
indent = self._getIndention()
self._stack.append(name)
self._out.write(indent + self._makeTag(name, attrs) + '\n')
def end(self):
name = self._stack.pop()
indent = self._getIndention()
self._out.write('%s</%s>\n' % (indent, name))
def elem(self, name, value, attrs={ }):
# delete attributes with an unset value
for (k, v) in attrs.items():
if v is None or v == '':
del attrs[k]
if value is None or value == '':
if len(attrs) == 0:
return
self._out.write(self._getIndention())
self._out.write(self._makeTag(name, attrs, True) + '\n')
else:
escValue = saxutils.escape(value or '')
self._out.write(self._getIndention())
self._out.write(self._makeTag(name, attrs))
self._out.write(escValue)
self._out.write('</%s>\n' % name)
def _getIndention(self):
return self._indentAmount * len(self._stack)
def _makeTag(self, name, attrs={ }, close=False):
ret = '<' + name
for (k, v) in attrs.iteritems():
if v is not None:
v = saxutils.quoteattr(str(v))
ret += ' %s=%s' % (k, v)
if close:
return ret + '/>'
else:
return ret + '>'
def write_xspf(f, tuples):
"""send me a list of (artist,title,mp3_url)"""
xml = XmlWriter(f, indentAmount=' ')
xml.prolog()
xml.start('playlist', { 'xmlns': 'http://xspf.org/ns/0/', 'version': '1' })
xml.start('trackList')
for tupe in tuples:
xml.start('track')
xml.elem('creator',tupe[0])
xml.elem('title',tupe[1])
xml.elem('location', tupe[2])
xml.end()
xml.end()
xml.end()
f.close()
def lookup_seeds(seed_artist_names):
seed_ids = []
for artist_name in seed_artist_names:
try:
seed_ids.append("-%s" % (artist.Artist(artist_name).id,))
except Exception:
logger.info('artist "%s" not found.' % (artist_name,))
# we could try to do full artist search here
# and let them choose the right artist
logger.info('seed_ids: %s' % (seed_ids,))
return seed_ids
def find_playlist(seed_artist_ids, playable=False):
if playable:
logger.info("finding playlist with audio...")
p = playlist.static(type='artist-radio', artist_id=seed_artist_ids, variety=1, buckets=['id:7digital', 'tracks'], limit=True)
else:
logger.info("finding playlist without audio...")
p = playlist.static(type='artist-radio', artist_id=seed_artist_ids, variety=1)
return p
if __name__ == "__main__":
usage = 'usage: %prog [options] "artist 1" "artist 2" ... "artist N"\n\n' \
'example:\n' \
'\t ./%prog "arcade fire" "feist" "broken social scene" -x -f arcade_feist_scene.xspf\n' \
'\t ./%prog "justice" "four tet" "bitshifter" -v\n'
parser = OptionParser(usage=usage)
parser.add_option("-v", "--verbose",
action="store_true", dest="verbose", default=False,
help="say what you're doing")
parser.add_option("-a", "--audio",
action="store_true", dest="audio", default=False,
help="fetch sample audio for songs")
parser.add_option("-x", "--xspf",
action="store_true", dest="xspf", default=False,
help="output an xspf format playlist")
parser.add_option("-f", "--filename",
metavar="FILE", help="write output to FILE")
(options, args) = parser.parse_args()
if len(args) < 1:
parser.error("you must provide at least 1 seed artist!")
# handle verbose logging
log_level = logging.ERROR
if options.verbose:
log_level = logging.INFO
logging.basicConfig(level=log_level)
logger.setLevel(log_level)
# make sure output file doesn't already exist
if options.filename and os.path.exists(options.filename):
logger.error("The file path: %s already exists." % (options.filename,))
sys.exit(1)
# resolve seed artists
seed_ids = lookup_seeds(args)
# find playlist
raw_plist = find_playlist(seed_ids, playable=(options.audio or options.xspf))
tuple_plist = []
for s in raw_plist:
name = s.artist_name
title = s.title
url = ""
if options.audio:
url = s.get_tracks('7digital', [{}])[0].get('preview_url')
tuple_plist.append((name,title,url))
# write to stdout or file specified
fout = open(options.filename, 'w') if options.filename else sys.stdout
if options.xspf:
write_xspf(fout, tuple_plist)
else:
for tupe in tuple_plist:
fout.write("%s - %s \t %s\n" % tupe)
logger.info("all done!")
sys.exit(0)
|
alex/pyechonest
|
examples/try_new_things.py
|
Python
|
bsd-3-clause
| 6,593 | 0.005764 |
# -*- coding: utf-8 -*-
def main():
startApplication("sasview")
clickTab(waitForObject(":FittingWidgetUI.tabFitting_QTabWidget_2"), "Resolution")
test.compare(waitForObjectExists(":groupBox_4.cbSmearing_QComboBox").currentIndex, 0)
test.compare(str(waitForObjectExists(":groupBox_4.cbSmearing_QComboBox").currentText), "None")
test.compare(waitForObjectExists(":groupBox_4.cbSmearing_QComboBox").count, 1)
clickTab(waitForObject(":FittingWidgetUI.tabFitting_QTabWidget_2"), "Model")
clickButton(waitForObject(":groupBox.cmdLoad_QPushButton"))
waitForObjectItem(":stackedWidget.listView_QListView", "test")
doubleClickItem(":stackedWidget.listView_QListView", "test", 36, 4, 0, Qt.LeftButton)
waitForObjectItem(":stackedWidget.listView_QListView", "1d\\_data")
doubleClickItem(":stackedWidget.listView_QListView", "1d\\_data", 30, 10, 0, Qt.LeftButton)
waitForObjectItem(":stackedWidget.listView_QListView", "cyl\\_400\\_20\\.txt")
doubleClickItem(":stackedWidget.listView_QListView", "cyl\\_400\\_20\\.txt", 72, 3, 0, Qt.LeftButton)
clickButton(waitForObject(":groupBox.cmdSendTo_QPushButton"))
mouseClick(waitForObject(":groupBox_6.cbCategory_QComboBox_2"), 136, 8, 0, Qt.LeftButton)
mouseClick(waitForObjectItem(":groupBox_6.cbCategory_QComboBox_2", "Cylinder"), 129, 9, 0, Qt.LeftButton)
clickTab(waitForObject(":FittingWidgetUI.tabFitting_QTabWidget_2"), "Resolution")
test.compare(waitForObjectExists(":groupBox_4.cbSmearing_QComboBox").currentIndex, 0)
test.compare(str(waitForObjectExists(":groupBox_4.cbSmearing_QComboBox").currentText), "None")
test.compare(waitForObjectExists(":groupBox_4.cbSmearing_QComboBox").count, 3)
mouseClick(waitForObject(":groupBox_4.cbSmearing_QComboBox"), 117, 7, 0, Qt.LeftButton)
mouseClick(waitForObjectItem(":groupBox_4.cbSmearing_QComboBox", "Custom Pinhole Smear"), 113, 6, 0, Qt.LeftButton)
test.compare(str(waitForObjectExists(":groupBox_4.cbSmearing_QComboBox").currentText), "Custom Pinhole Smear")
test.compare(str(waitForObjectExists(":groupBox_4.lblSmearUp_QLabel").text), "<html><head/><body><p>dQ<span style=\" vertical-align:sub;\">low</span></p></body></html>")
test.compare(str(waitForObjectExists(":groupBox_4.lblSmearDown_QLabel").text), "<html><head/><body><p>dQ<span style=\" vertical-align:sub;\">high</span></p></body></html>")
test.compare(str(waitForObjectExists(":groupBox_4.txtSmearUp_QLineEdit").text), "")
test.compare(waitForObjectExists(":groupBox_4.txtSmearUp_QLineEdit").enabled, True)
test.compare(str(waitForObjectExists(":groupBox_4.txtSmearDown_QLineEdit").text), "")
test.compare(waitForObjectExists(":groupBox_4.txtSmearDown_QLineEdit").enabled, True)
mouseClick(waitForObject(":groupBox_4.cbSmearing_QComboBox"), 117, 15, 0, Qt.LeftButton)
mouseClick(waitForObjectItem(":groupBox_4.cbSmearing_QComboBox", "Custom Slit Smear"), 89, 5, 0, Qt.LeftButton)
test.compare(str(waitForObjectExists(":groupBox_4.cbSmearing_QComboBox").currentText), "Custom Slit Smear")
test.compare(waitForObjectExists(":groupBox_4.lblSmearUp_QLabel").visible, True)
test.compare(str(waitForObjectExists(":groupBox_4.lblSmearUp_QLabel").text), "Slit height")
test.compare(str(waitForObjectExists(":groupBox_4.lblSmearDown_QLabel").text), "Slit width")
test.compare(waitForObjectExists(":groupBox_4.lblSmearDown_QLabel").visible, True)
|
SasView/sasview
|
src/sas/qtgui/UnitTesting/SquishTestSuites/suite_sasview_qt/tst_Resolution/test.py
|
Python
|
bsd-3-clause
| 3,434 | 0.009027 |
from __future__ import absolute_import, division
import time
import os
try:
unicode
except NameError:
unicode = str
from . import LockBase, NotLocked, NotMyLock, LockTimeout, AlreadyLocked
class SQLiteLockFile(LockBase):
"Demonstrate SQL-based locking."
testdb = None
def __init__(self, path, threaded=True, timeout=None):
"""
>>> lock = SQLiteLockFile('somefile')
>>> lock = SQLiteLockFile('somefile', threaded=False)
"""
LockBase.__init__(self, path, threaded, timeout)
self.lock_file = unicode(self.lock_file)
self.unique_name = unicode(self.unique_name)
if SQLiteLockFile.testdb is None:
import tempfile
_fd, testdb = tempfile.mkstemp()
os.close(_fd)
os.unlink(testdb)
del _fd, tempfile
SQLiteLockFile.testdb = testdb
import sqlite3
self.connection = sqlite3.connect(SQLiteLockFile.testdb)
c = self.connection.cursor()
try:
c.execute("create table locks"
"("
" lock_file varchar(32),"
" unique_name varchar(32)"
")")
except sqlite3.OperationalError:
pass
else:
self.connection.commit()
import atexit
atexit.register(os.unlink, SQLiteLockFile.testdb)
def acquire(self, timeout=None):
timeout = timeout is not None and timeout or self.timeout
end_time = time.time()
if timeout is not None and timeout > 0:
end_time += timeout
if timeout is None:
wait = 0.1
elif timeout <= 0:
wait = 0
else:
wait = timeout / 10
cursor = self.connection.cursor()
while True:
if not self.is_locked():
# Not locked. Try to lock it.
cursor.execute("insert into locks"
" (lock_file, unique_name)"
" values"
" (?, ?)",
(self.lock_file, self.unique_name))
self.connection.commit()
# Check to see if we are the only lock holder.
cursor.execute("select * from locks"
" where unique_name = ?",
(self.unique_name,))
rows = cursor.fetchall()
if len(rows) > 1:
# Nope. Someone else got there. Remove our lock.
cursor.execute("delete from locks"
" where unique_name = ?",
(self.unique_name,))
self.connection.commit()
else:
# Yup. We're done, so go home.
return
else:
# Check to see if we are the only lock holder.
cursor.execute("select * from locks"
" where unique_name = ?",
(self.unique_name,))
rows = cursor.fetchall()
if len(rows) == 1:
# We're the locker, so go home.
return
# Maybe we should wait a bit longer.
if timeout is not None and time.time() > end_time:
if timeout > 0:
# No more waiting.
raise LockTimeout("Timeout waiting to acquire"
" lock for %s" %
self.path)
else:
# Someone else has the lock and we are impatient..
raise AlreadyLocked("%s is already locked" % self.path)
# Well, okay. We'll give it a bit longer.
time.sleep(wait)
def release(self):
if not self.is_locked():
raise NotLocked("%s is not locked" % self.path)
if not self.i_am_locking():
raise NotMyLock("%s is locked, but not by me (by %s)" %
(self.unique_name, self._who_is_locking()))
cursor = self.connection.cursor()
cursor.execute("delete from locks"
" where unique_name = ?",
(self.unique_name,))
self.connection.commit()
def _who_is_locking(self):
cursor = self.connection.cursor()
cursor.execute("select unique_name from locks"
" where lock_file = ?",
(self.lock_file,))
return cursor.fetchone()[0]
def is_locked(self):
cursor = self.connection.cursor()
cursor.execute("select * from locks"
" where lock_file = ?",
(self.lock_file,))
rows = cursor.fetchall()
return not not rows
def i_am_locking(self):
cursor = self.connection.cursor()
cursor.execute("select * from locks"
" where lock_file = ?"
" and unique_name = ?",
(self.lock_file, self.unique_name))
return not not cursor.fetchall()
def break_lock(self):
cursor = self.connection.cursor()
cursor.execute("delete from locks"
" where lock_file = ?",
(self.lock_file,))
self.connection.commit()
|
ARMmbed/yotta_osx_installer
|
workspace/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.py
|
Python
|
apache-2.0
| 5,540 | 0.000722 |
#!/usr/bin/python
import json
import sys
import data_processing as dp
from mython import NumpyToListEncoder
from subprocess import check_output
from imp import reload
reload(dp)
# Neat way of calling:
# find . -name '*_metadata.json' > rootlist
# python gen_json.py $(< rootlist) &> gen_json.out
files = sys.argv[1:]
roots = [f.replace('_metadata.json','') for f in files]
for root in roots:
data = dp.read_dir_autogen(root,gosling='/home/busemey2/bin/gosling')
loc = '/'.join(root.split('/')[:-1])
outfn = loc+"/record.json"
print("Outputting to %s..."%outfn)
with open(outfn,'w') as outf:
json.dump(data,outf,cls=NumpyToListEncoder)
|
bbusemeyer/busempyer
|
drivers/gen_json.py
|
Python
|
gpl-2.0
| 653 | 0.018377 |
import numpy as np
class lemketableau:
def __init__(self,M,q,maxIter = 100):
n = len(q)
self.T = np.hstack((np.eye(n),-M,-np.ones((n,1)),q.reshape((n,1))))
self.n = n
self.wPos = np.arange(n)
self.zPos = np.arange(n,2*n)
self.W = 0
self.Z = 1
self.Y = 2
self.Q = 3
TbInd = np.vstack((self.W*np.ones(n,dtype=int),
np.arange(n,dtype=int)))
TnbInd = np.vstack((self.Z*np.ones(n,dtype=int),
np.arange(n,dtype=int)))
DriveInd = np.array([[self.Y],[0]])
QInd = np.array([[self.Q],[0]])
self.Tind = np.hstack((TbInd,TnbInd,DriveInd,QInd))
self.maxIter = maxIter
def lemkeAlgorithm(self):
initVal = self.initialize()
if not initVal:
return np.zeros(self.n),0,'Solution Found'
for k in range(self.maxIter):
stepVal = self.step()
if self.Tind[0,-2] == self.Y:
# Solution Found
z = self.extractSolution()
return z,0,'Solution Found'
elif not stepVal:
return None,1,'Secondary ray found'
return None,2,'Max Iterations Exceeded'
def initialize(self):
q = self.T[:,-1]
minQ = np.min(q)
if minQ < 0:
ind = np.argmin(q)
self.clearDriverColumn(ind)
self.pivot(ind)
return True
else:
return False
def step(self):
q = self.T[:,-1]
a = self.T[:,-2]
ind = np.nan
minRatio = np.inf
for i in range(self.n):
if a[i] > 0:
newRatio = q[i] / a[i]
if newRatio < minRatio:
ind = i
minRatio = newRatio
if minRatio < np.inf:
self.clearDriverColumn(ind)
self.pivot(ind)
return True
else:
return False
def extractSolution(self):
z = np.zeros(self.n)
q = self.T[:,-1]
for i in range(self.n):
if self.Tind[0,i] == self.Z:
z[self.Tind[1,i]] = q[i]
return z
def partnerPos(self,pos):
v,ind = self.Tind[:,pos]
if v == self.W:
ppos = self.zPos[ind]
elif v == self.Z:
ppos = self.wPos[ind]
else:
ppos = None
return ppos
def pivot(self,pos):
ppos = self.partnerPos(pos)
if ppos is not None:
self.swapColumns(pos,ppos)
self.swapColumns(pos,-2)
return True
else:
self.swapColumns(pos,-2)
return False
def swapMatColumns(self,M,i,j):
Mi = np.array(M[:,i],copy=True)
Mj = np.array(M[:,j],copy=True)
M[:,i] = Mj
M[:,j] = Mi
return M
def swapPos(self,v,ind,newPos):
if v == self.W:
self.wPos[ind] = newPos % (2*self.n+2)
elif v == self.Z:
self.zPos[ind] = newPos % (2*self.n+2)
def swapColumns(self,i,j):
iInd = self.Tind[:,i]
jInd = self.Tind[:,j]
v,ind = iInd
self.swapPos(v,ind,j)
v,ind = jInd
self.swapPos(v,ind,i)
self.Tind = self.swapMatColumns(self.Tind,i,j)
self.T = self.swapMatColumns(self.T,i,j)
def clearDriverColumn(self,ind):
a = self.T[ind,-2]
self.T[ind] /= a
for i in range(self.n):
if i != ind:
b = self.T[i,-2]
self.T[i] -= b * self.T[ind]
def ind2str(self,indvec):
v,pos = indvec
if v == self.W:
s = 'w%d' % pos
elif v == self.Z:
s = 'z%d' % pos
elif v == self.Y:
s = 'y'
else:
s = 'q'
return s
def indexStringArray(self):
indstr = np.array([self.ind2str(indvec) for indvec in self.Tind.T],dtype=object)
return indstr
def indexedTableau(self):
indstr = self.indexStringArray()
return np.vstack((indstr,self.T))
def __repr__(self):
IT = self.indexedTableau()
return IT.__repr__()
def __str__(self):
IT = self.indexedTableau()
return IT.__str__()
def lemkelcp(M,q,maxIter=100):
"""
sol = lemkelcp(M,q,maxIter)
Uses Lemke's algorithm to copute a solution to the
linear complementarity problem:
Mz + q >= 0
z >= 0
z'(Mz+q) = 0
The inputs are given by:
M - an nxn numpy array
q - a length n numpy array
maxIter - an optional number of pivot iterations. Set to 100 by default
The solution is a tuple of the form:
z,exit_code,exit_string = sol
The entries are summaries in the table below:
|z | exit_code | exit_string |
-----------------------------------------------------------
| solution to LCP | 0 | 'Solution Found' |
| None | 1 | 'Secondary ray found' |
| None | 2 | 'Max Iterations Exceeded' |
"""
tableau = lemketableau(M,q,maxIter)
return tableau.lemkeAlgorithm()
|
AndyLamperski/lemkelcp
|
lemkelcp/lemkelcp.py
|
Python
|
mit
| 5,431 | 0.02228 |
import logging
#Config
MYSQL_HOST = '127.0.0.1'
MYSQL_PORT = 3306
MYSQL_USER = 'root'
MYSQL_PASS = 'oppzk'
MYSQL_DB = 'SSMM'
MANAGE_PASS = 'passwd'
#if you want manage in other server you should set this value to global ip
MANAGE_BIND_IP = '127.0.0.1'
#make sure this port is idle
MANAGE_PORT = 10001
PANEL_VERSION = 'V2' # V2 or V3. V2 not support API
API_URL = 'http://domain/mu'
API_PASS = 'mupass'
NODE_ID = '1'
CHECKTIME = 30 # check service time
SYNCTIME = 300 # sync traffic time
RESETTIME = 300 # reset traffic time
#BIND IP
#if you want bind ipv4 and ipv6 '[::]'
#if you want bind all of ipv4 if '0.0.0.0'
#if you want bind all of if only '4.4.4.4'
SS_BIND_IP = '0.0.0.0'
SS_METHOD = 'rc4-md5'
#LOG CONFIG
LOG_ENABLE = False
LOG_LEVEL = logging.DEBUG
LOG_FILE = '/var/log/shadowsocks.log'
|
niubileme/shadowsocks-manyuser
|
shadowsocks/config.py
|
Python
|
apache-2.0
| 805 | 0.016149 |
#!/usr/bin/env python
import sys
# import particle restart
sys.path.append('../../src/utils')
import particle_restart as pr
# read in particle restart file
if len(sys.argv) > 1:
p = pr.Particle(sys.argv[1])
else:
p = pr.Particle('particle_12_842.binary')
# set up output string
outstr = ''
# write out properties
outstr += 'current batch:\n'
outstr += "{0:12.6E}\n".format(p.current_batch)
outstr += 'current gen:\n'
outstr += "{0:12.6E}\n".format(p.current_gen)
outstr += 'particle id:\n'
outstr += "{0:12.6E}\n".format(p.id)
outstr += 'run mode:\n'
outstr += "{0:12.6E}\n".format(p.run_mode)
outstr += 'particle weight:\n'
outstr += "{0:12.6E}\n".format(p.weight)
outstr += 'particle energy:\n'
outstr += "{0:12.6E}\n".format(p.energy)
outstr += 'particle xyz:\n'
outstr += "{0:12.6E} {1:12.6E} {2:12.6E}\n".format(p.xyz[0],p.xyz[1],p.xyz[2])
outstr += 'particle uvw:\n'
outstr += "{0:12.6E} {1:12.6E} {2:12.6E}\n".format(p.uvw[0],p.uvw[1],p.uvw[2])
# write results to file
with open('results_test.dat','w') as fh:
fh.write(outstr)
|
shenqicang/openmc
|
tests/test_particle_restart_eigval/results.py
|
Python
|
mit
| 1,055 | 0.008531 |
__version__ = '$Id$'
from Acquisition import aq_inner
from Products.Five.browser import BrowserView
from Products.CMFCore.utils import getToolByName
from DateTime import DateTime
class LastZorionagurrak(BrowserView):
def getLastZorionagurrak(self, num=5):
context = aq_inner(self.context)
today = DateTime().earliestTime()
todayend = DateTime().latestTime()
tomorrow = today + 1
pcal = getToolByName(context, 'portal_catalog')
todaybrains = pcal(portal_type='Zorionagurra',
review_state='published',
getDate={'query':(today, todayend),
'range':'min:max'},
sort_on='getDate',
sort_limit=num)
todaybrainnumber = len(todaybrains)
if todaybrainnumber >= num:
return todaybrains
else:
tomorrowbrainnumber = num - todaybrainnumber
tomorrowbrains = pcal(portal_type='Zorionagurra',
review_state='published',
getDate={'query':(todayend,),
'range':'min'},
sort_on='getDate',
sort_limit=tomorrowbrainnumber)
return todaybrains + tomorrowbrains
|
codesyntax/Products.zorionagurra
|
Products/zorionagurra/browser/portlet.py
|
Python
|
gpl-2.0
| 1,398 | 0.004292 |
import unittest
import os
import hiframe
import hipubiface_test_basic_plugin._hiframe
MY_ABSOLUTE_PATH = os.path.abspath(__file__)
MY_ABSOLUTE_PARENT = os.path.dirname(MY_ABSOLUTE_PATH)
HIPUBIFACE_PATH = os.path.dirname(os.path.dirname(os.path.dirname(MY_ABSOLUTE_PARENT)))
HIPUBIFACE_SRC_PATH = HIPUBIFACE_PATH+"/src"
class HipubifaceTestBasic(unittest.TestCase):
# def test_guest_ping_pass(self):
# cv = [["00000000", "ffffffff"],
# ["00000001", "fffffffe"],
# ["a7845763", "587ba89c"],
# ["8da581bf", "725a7e40"],
# ["0da581bf", "f25a7e40"]
# ]
# for c in cv :
# r = hipubiface.call("base", "guest_ping", {"txt_value":c[0].upper()})
# self.check_ok(r)
# self.assertEqual(r["type"],"value")
# self.assertEqual(r["value"],c[1].lower())
#
# r = hipubiface.call("base", "guest_ping", {"txt_value":c[0].lower()})
# self.check_ok(r)
# self.assertEqual(r["type"],"value")
# self.assertEqual(r["value"],c[1].lower())
#
# r = hipubiface.call("base", "guest_ping", {"txt_value":c[1].upper()})
# self.check_ok(r)
# self.assertEqual(r["type"],"value")
# self.assertEqual(r["value"],c[0].lower())
#
# r = hipubiface.call("base", "guest_ping", {"txt_value":c[1].lower()})
# self.check_ok(r)
# self.assertEqual(r["type"],"value")
# self.assertEqual(r["value"],c[0].lower())
#
# def test_guest_ping_fail(self):
# cv = ["asdf",
# "0000",
# "1234",
# "dddd",
# "1234567890",
# "-9999999",
# "-99999999",
# "9999999",
# "999999999"
# ]
# for c in cv :
# r = hipubiface.call("base", "guest_ping", {"txt_value":c})
# self.assertTrue(r != None)
# self.assertTrue(isinstance(r,dict))
# self.assertEqual(r[hipubiface.RESULT_KEY], hipubiface.RESULT_VALUE_FAIL_TXT)
# self.assertEqual(r["fail_reason"],"bad value")
# def test_list_cmd(self):
# ret = hipubiface._hiframe.command_guest_list_cmd()
# self.check_ok(ret)
# self.assertEqual(ret["type"],"value")
# self.assertTrue("value" in ret)
# self.assertTrue("hs_plugin" in ret["value"])
# self.assertTrue("guest_list_cmd" in ret["value"]["hs_plugin"])
# self.assertEqual(ret["value"]["hs_plugin"]["guest_list_cmd"],[])
def test_call_noarg(self):
hf=hiframe.HiFrame(plugin_path_list=[MY_ABSOLUTE_PARENT,HIPUBIFACE_SRC_PATH])
hf.start()
me=hf.plugin_D["hipubiface"]
ret = me.call("hipubiface_test_basic_plugin","helloworld")
self.assertEqual(ret, "helloworld")
def test_call_arg(self):
hf=hiframe.HiFrame(plugin_path_list=[MY_ABSOLUTE_PARENT,HIPUBIFACE_SRC_PATH])
hf.start()
me=hf.plugin_D["hipubiface"]
ret = me.call("hipubiface_test_basic_plugin","uppercase",{"txt_a":"asdf"})
self.assertEqual(ret, "ASDF")
def test_call_exception(self):
hf=hiframe.HiFrame(plugin_path_list=[MY_ABSOLUTE_PARENT,HIPUBIFACE_SRC_PATH])
hf.start()
me=hf.plugin_D["hipubiface"]
try:
me.call("hipubiface_test_basic_plugin","hello_exception")
self.fail()
except hipubiface_test_basic_plugin._hiframe.TestException:
pass
except:
self.fail()
# def test_hellofile(self):
# ret = hipubiface.call("hipubiface_test_basic_plugin","hellofile")
# self.check_ok(ret)
# self.assertEqual(ret["type"], "file")
# self.assertEqual(ret["file_type"], "local")
# self.assertEqual(ret["mime"], "text/plain; charset=us-ascii")
# self.assertTrue(ret["file_name"].endswith("/test/res/test0.torrent.txt"))
#
# def test_hellofile2(self):
# ret = hipubiface.call("hipubiface_test_basic_plugin","hellofile2")
# self.check_ok(ret)
# self.assertEqual(ret["type"], "file")
# self.assertEqual(ret["file_type"], "local")
# self.assertTrue(not ("mime" in ret))
# self.assertTrue(ret["file_name"].endswith("/test/res/test0.torrent.txt"))
|
luzi82/HiSocial
|
HiPubIface/test/000_basic/src/hipubiface_test_basic.py
|
Python
|
gpl-3.0
| 4,370 | 0.019908 |
#!/usr/bin/env python
import os
import sys
import argparse
import traceback
sys.path.insert(1, os.path.join(os.path.dirname(__file__), os.pardir))
from toollib.group import Group,UnsortedInputGrouper
import scipy.stats as ss
class KSGroup(Group):
def __init__(self, tup):
super(KSGroup, self).__init__(tup)
self.samples = []
def add(self, chunks):
self.samples.append(float(chunks[args.column]))
def done(self):
jdelim = args.delimiter if args.delimiter != None else ' '
if len(self.tup) > 0:
args.outfile.write(jdelim.join(self.tup) + jdelim)
args.outfile.write(jdelim.join(map(str, ss.kstest(self.samples, args.distf, args=args.params))) + '\n')
if __name__ == "__main__":
# set up command line args
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter,\
description='Compare the request distributions of all clients')
parser.add_argument('infile', nargs='?', type=argparse.FileType('r'), default=sys.stdin)
parser.add_argument('outfile', nargs='?', type=argparse.FileType('w'), default=sys.stdout)
parser.add_argument('-s', '--source', default='scipy.stats', choices=['scipy.stats', 'lambda'], help='source of the distribution to fit')
parser.add_argument('-i', '--dist', default='paretoLomax')
parser.add_argument('-p', '--params', default='', help='initial parameters')
parser.add_argument('-c', '--column', type=int, default=0)
parser.add_argument('-g', '--group', nargs='+', type=int, default=[])
parser.add_argument('-d', '--delimiter', default=None)
args = parser.parse_args()
args.params = map(float, args.params.split(args.delimiter))
if args.source == 'scipy.stats':
args.source = ss
else:
args.source = None
if args.source:
mod = args.source
for c in args.dist.split('.'):
mod = getattr(mod, c)
args.distf = mod
else:
args.distf = eval(args.dist)
grouper = UnsortedInputGrouper(args.infile, KSGroup, args.group, args.delimiter)
grouper.group()
|
scoky/pytools
|
curve/ks_test.py
|
Python
|
mit
| 2,158 | 0.008341 |
from django.db import models
from django.template.defaultfilters import slugify
from datetime import datetime
from redactor.fields import RedactorField
from management.post_tweet import post_tweet
### News
####################################################################################################
class News(models.Model):
post_tweet = models.BooleanField(
default=False,
)
tweet_cc = models.CharField(
max_length=70,
blank=True,
null=True,
)
title = models.CharField(
max_length=250,
)
slug = models.SlugField(
max_length=250,
blank=True,
unique=True,
)
content = RedactorField()
created = models.DateTimeField(
default=datetime.now,
blank=True,
null=True,
)
city = models.ForeignKey(
'utils.City',
blank=True,
null=True,
)
country = models.ForeignKey(
'utils.Country',
blank=True,
null=True,
)
tags = models.ManyToManyField(
'utils.Tag',
through='NewsTag',
related_name='news',
)
projects = models.ManyToManyField(
'projects.Project',
through='ProjectRelatedToNews',
related_name='news',
)
publications = models.ManyToManyField(
'publications.Publication',
through='PublicationRelatedToNews',
related_name='news',
)
persons = models.ManyToManyField(
'persons.Person',
through='PersonRelatedToNews',
related_name='news',
)
class Meta:
ordering = ('-created',)
verbose_name = u'News piece'
verbose_name_plural = u'News pieces'
def __unicode__(self):
return u'%s' % self.title
def save(self, *args, **kwargs):
self.slug = slugify(self.title)
if self.post_tweet:
post_tweet(self)
self.content = self.content.replace("<img src=", "<img class='img-responsive' src=")
super(News, self).save(*args, **kwargs)
### NewsTag
####################################################################################################
class NewsTag(models.Model):
tag = models.ForeignKey('utils.Tag')
news = models.ForeignKey('News')
class Meta:
verbose_name = u'News Tag'
verbose_name_plural = u'News Tags'
### ProjectRelatedToNews
####################################################################################################
class ProjectRelatedToNews(models.Model):
project = models.ForeignKey('projects.Project')
news = models.ForeignKey('News')
class Meta:
verbose_name = u'Project related to News piece'
verbose_name_plural = u'Projects related to News pieces'
### PublicationRelatedToNews
####################################################################################################
class PublicationRelatedToNews(models.Model):
publication = models.ForeignKey('publications.Publication')
news = models.ForeignKey('News')
class Meta:
verbose_name = u'Publication related to News piece'
verbose_name_plural = u'Publications related to News pieces'
### PersonRelatedToNews
####################################################################################################
class PersonRelatedToNews(models.Model):
person = models.ForeignKey('persons.Person')
news = models.ForeignKey('News')
class Meta:
verbose_name = u'Person related to News piece'
verbose_name_plural = u'People related to News pieces'
### EventRelatedToNews
####################################################################################################
class EventRelatedToNews(models.Model):
event = models.ForeignKey('events.Event')
news = models.ForeignKey('News')
class Meta:
verbose_name = u'Event related to News piece'
verbose_name_plural = u'Events related to News pieces'
|
morelab/labman_ud
|
labman_ud/entities/news/models.py
|
Python
|
gpl-3.0
| 3,999 | 0.003251 |
from PIL import Image
import stripe
import datetime
from django.shortcuts import render, redirect
from django.views.generic import TemplateView, View, FormView
from django.core.urlresolvers import reverse_lazy, reverse
from django.views.decorators.csrf import csrf_exempt
from django.utils.decorators import method_decorator
from django.conf import settings
from paypal.standard.forms import PayPalPaymentsForm
from picture.models import Picture, Settings, Pixel, PaymentNote
from picture.forms import PaymentNoteForm
from paypal.standard.models import ST_PP_COMPLETED
from paypal.standard.ipn.signals import valid_ipn_received, invalid_ipn_received, payment_was_flagged
# Create your views here.
class PictureIndexView(FormView):
template_name = 'picture/index.html'
form_class = PaymentNoteForm
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['picture'] = Settings.objects.first().picture
context['random'] = datetime.datetime.now()
context['payment_notes'] = [{
'name': note.name,
'url': note.url,
'number': note.number,
'pixels': [{
'r': pixel.r,
'g': pixel.g,
'b': pixel.b,
} for pixel in note.pixels.all()[:50]]
} for note in PaymentNote.objects.filter(picture=self.picture).order_by('-number')]
return context
def form_valid(self, form):
note = form.save(commit=False)
self.request.session['payment_note'] = {
'name': note.name,
'url': note.url,
'number': note.number,
}
return super().form_valid(form)
def dispatch(self, request, *args, **kwargs):
self.picture = Settings.objects.first().picture
return super().dispatch(request, *args, **kwargs)
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['picture'] = self.picture
return kwargs
def get_success_url(self):
if getattr(settings,'NO_PAYMENTS', False) == True:
create_payment_note(self.request.session['payment_note'])
return reverse('picture-payment-success')
else:
return reverse('picture-payment')
class PaymentView(TemplateView):
template_name = 'picture/payment.html'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['picture'] = Settings.objects.first().picture
context['paypal_form'] = self.paypal_form
context['stripe'] = self.stripe_options
context['amount'] = self.request.session.get('payment_note').get('number')
return context
@method_decorator(csrf_exempt)
def dispatch(self, request, *args, **kwargs):
self.picture = Settings.objects.first().picture
business = settings.PAYPAL_EMAIL
paypal_options = {
"business": business,
"amount": request.session.get('payment_note').get('number'),
"invoice": request.session.get('payment_note').get('url'),
"custom": request.session.get('payment_note').get('name'),
"item_name": "Pixel Reveal",
# "invoice": "unique-invoice-id",
"notify_url": request.build_absolute_uri(reverse('paypal-ipn')),
"return_url": request.build_absolute_uri(reverse('picture-paypal-payment-success')),
"cancel_return": request.build_absolute_uri(reverse('picture-index')),
}
self.paypal_form = PayPalPaymentsForm(initial=paypal_options)
#STRIPE stuff
self.stripe_options = {
'p_key': settings.STRIPE_PUBLISH,
'amount': request.session.get('payment_note').get('number') * 100,
'name': 'Calvin Collins',
'description': 'Pixel Reveal',
}
return super().dispatch(request, *args, **kwargs)
class PaymentSuccessView(TemplateView):
template_name = 'picture/payment_success.html'
class PaypalPaymentSuccessView(TemplateView):
template_name = 'picture/paypal_payment_success.html'
class PaymentErrorView(TemplateView):
template_name = 'picture/payment_error.html'
class StripeView(View):
def post(self, request, *args, **kwargs):
self.picture = Settings.objects.first().picture
stripe.api_key = settings.STRIPE_SECRET
token = request.POST['stripeToken']
try:
charge = stripe.Charge.create(
amount = request.session.get('payment_note').get('number') * 100,
currency="usd",
source=token,
description="Pixel Reveal"
)
except stripe.error.CardError as e:
# The card has been declined
return redirect(reverse('picture-payment-error'))
else:
create_payment_note(self.request.session['payment_note'])
return redirect(reverse('picture-payment-success'))
def create_payment_note(note_info):
form = PaymentNoteForm(note_info, picture=Settings.objects.first().picture)
if form.is_valid():
note = form.save(commit=False)
note.picture = Settings.objects.first().picture
note.save()
coords = note.picture.uncover_line(note.number)
img = note.picture.pillow_image.convert('RGB')
for i, coord in enumerate(coords):
if i > 50:
break
r, g, b = img.getpixel((coord['x'], coord['y']))
note.pixels.add(Pixel.objects.create(
x = coord['x'],
y = coord['y'],
r = r,
g = g,
b = b
))
note.save()
def handle_payment(sender, **kwargs):
ipn_obj = sender
if ipn_obj.payment_status == ST_PP_COMPLETED:
# WARNING !
# Check that the receiver email is the same we previously
# set on the business field request. (The user could tamper
# with those fields on payment form before send it to PayPal)
if ipn_obj.receiver_email != settings.PAYPAL_EMAIL:
# Not a valid payment
return
note_info = {
'name': ipn_obj.custom,
'url': ipn_obj.invoice,
'number': ipn_obj.mc_gross,
}
create_payment_note(note_info)
valid_ipn_received.connect(handle_payment)
|
ZzCalvinzZ/picturepay
|
picture/views.py
|
Python
|
mit
| 5,543 | 0.027242 |
import startbot, stats, os, re, random, sys
import utils
MARKOV_LENGTH = 2
#majority of the code taken from https://github.com/hrs/markov-sentence-generator
#changes made: allowed it to hook up from the text gotten directly from messages
#changed it to be encompassed in a class structure. Made minor changes to make it Py3.X compatible
class markov():
# These mappings can get fairly large -- they're stored globally to
# save copying time.
# (tuple of words) -> {dict: word -> number of times the word appears following the tuple}
# Example entry:
# ('eyes', 'turned') => {'to': 2.0, 'from': 1.0}
# Used briefly while first constructing the normalized mapping
tempMapping = {}
# (tuple of words) -> {dict: word -> *normalized* number of times the word appears following the tuple}
# Example entry:
# ('eyes', 'turned') => {'to': 0.66666666, 'from': 0.33333333}
mapping = {}
# Contains the set of words that can start sentences
starts = []
m_botName = None
def __init__(self, groupObj, groupName, bot):
self.m_botName = bot.name
self.train(groupObj, groupName)
def train(self, groupObj, groupName):
stats.getAllText(groupObj, groupName, self.m_botName)
self.buildMapping(self.wordlist('..{1}cache{1}messages-{0}.txt'.format(groupName, os.path.sep)), MARKOV_LENGTH)
utils.showOutput("bot successfully trained.")
def talk(self, message, bot, groupName):
try:
bot.post(self.genSentence2(message, MARKOV_LENGTH))
except:
bot.post(self.genSentence(MARKOV_LENGTH))
# We want to be able to compare words independent of their capitalization.
def fixCaps(self, word):
# Ex: "FOO" -> "foo"
if word.isupper() and word != "I":
word = word.lower()
# Ex: "LaTeX" => "Latex"
elif word [0].isupper():
word = word.lower().capitalize()
# Ex: "wOOt" -> "woot"
else:
word = word.lower()
return word
# Tuples can be hashed; lists can't. We need hashable values for dict keys.
# This looks like a hack (and it is, a little) but in practice it doesn't
# affect processing time too negatively.
def toHashKey(self, lst):
return tuple(lst)
# Returns the contents of the file, split into a list of words and
# (some) punctuation.
def wordlist(self, filename):
f = open(filename, 'r', encoding='utf-8')
wordlist = [self.fixCaps(w) for w in re.findall(r"[\w']+|[.,!?;]", f.read())]
f.close()
return wordlist
# Self-explanatory -- adds "word" to the "tempMapping" dict under "history".
# tempMapping (and mapping) both match each word to a list of possible next
# words.
# Given history = ["the", "rain", "in"] and word = "Spain", we add "Spain" to
# the entries for ["the", "rain", "in"], ["rain", "in"], and ["in"].
def addItemToTempMapping(self, history, word):
while len(history) > 0:
first = self.toHashKey(history)
if first in self.tempMapping:
if word in self.tempMapping[first]:
self.tempMapping[first][word] += 1.0
else:
self.tempMapping[first][word] = 1.0
else:
self.tempMapping[first] = {}
self.tempMapping[first][word] = 1.0
history = history[1:]
# Building and normalizing the mapping.
def buildMapping(self, wordlist, markovLength):
self.starts.append(wordlist [0])
for i in range(1, len(wordlist) - 1):
if i <= markovLength:
history = wordlist[: i + 1]
else:
history = wordlist[i - markovLength + 1 : i + 1]
follow = wordlist[i + 1]
# if the last elt was a period, add the next word to the start list
if history[-1] == "." and follow not in ".,!?;":
self.starts.append(follow)
self.addItemToTempMapping(history, follow)
# Normalize the values in tempMapping, put them into mapping
for first, followset in self.tempMapping.items():
total = sum(followset.values())
# Normalizing here:
self.mapping[first] = dict([(k, v / total) for k, v in followset.items()])
# Returns the next word in the sentence (chosen randomly),
# given the previous ones.
def next(self, prevList):
sum = 0.0
retval = ""
index = random.random()
# Shorten prevList until it's in mapping
while self.toHashKey(prevList) not in self.mapping:
prevList.pop(0)
# Get a random word from the mapping, given prevList
for k, v in self.mapping[self.toHashKey(prevList)].items():
sum += v
if sum >= index and retval == "":
retval = k
return retval
def genSentence2(self, message, markovLength): #attempts to use input sentence material to construct a sentence
# Start with a random "starting word" from the input message
splitmessage = message.lower().split()
splitmessage.remove('{0},'.format(self.m_botName.lower()))
if len(splitmessage) == 0:
curr = random.choice(self.starts)
else:
curr = random.choice(splitmessage)
sent = curr.capitalize()
prevList = [curr]
# Keep adding words until we hit a period
while (curr not in "."):
curr = self.next(prevList)
prevList.append(curr)
# if the prevList has gotten too long, trim it
if len(prevList) > markovLength:
prevList.pop(0)
if (curr not in ".,!?;"):
sent += " " # Add spaces between words (but not punctuation)
sent += curr
return sent
def genSentence(self, markovLength):
# Start with a random "starting word"
curr = random.choice(self.starts)
sent = curr.capitalize()
prevList = [curr]
# Keep adding words until we hit a period
while (curr not in "."):
curr = self.next(prevList)
prevList.append(curr)
# if the prevList has gotten too long, trim it
if len(prevList) > markovLength:
prevList.pop(0)
if (curr not in ".,!?;"):
sent += " " # Add spaces between words (but not punctuation)
sent += curr
return sent
|
crc5464/groupme-bot
|
src/markov.py
|
Python
|
gpl-3.0
| 6,551 | 0.003817 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.12 on 2018-12-04 15:13
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('anagrafica', '0049_auto_20181028_1639'),
]
operations = [
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.CharField(max_length=255)),
('is_active', models.BooleanField(default=True)),
('required', models.BooleanField(default=True, verbose_name='Obbligatorio')),
],
options={
'verbose_name': 'Domanda',
'verbose_name_plural': 'Domande',
},
),
migrations.CreateModel(
name='Survey',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_active', models.BooleanField(default=True)),
('text', models.CharField(max_length=255)),
],
options={
'verbose_name': 'Questionario di gradimento',
'verbose_name_plural': 'Questionari di gradimento',
},
),
migrations.CreateModel(
name='SurveyResult',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('response', models.TextField(blank=True, max_length=1000, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='survey.Question')),
('survey', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='survey.Survey')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='anagrafica.Persona')),
],
options={
'verbose_name': "Risposta dell'utente",
'verbose_name_plural': 'Risposte degli utenti',
},
),
migrations.AddField(
model_name='question',
name='survey',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='survey.Survey'),
),
]
|
CroceRossaItaliana/jorvik
|
survey/migrations/0001_initial.py
|
Python
|
gpl-3.0
| 2,602 | 0.003459 |
#-*- coding: utf-8 -*-
import unittest
from top.WordTableModel import WordTableModel
class WordTableModelTestsTestCase(unittest.TestCase):
def setUp(self):
self.model = WordTableModel()
self.model.load("dotestow.pkl")
def testLoading(self):
assert len(self.model.words) == 5, "incorrect number of loaded words " + \
"got: " + len(self.model.words) + ", but: 5 was expected"
list = []
for word in self.model.words:
list.append(word.word)
msg = "failed while loading the words with number: "
assert list[0] == "sibilant sound", msg + '0'
assert list[1] == "aberration", msg + '1'
assert list[2] == "acrid", msg + '2'
assert list[3] == "adjourn", msg + '3'
assert list[4] == "ambience", msg + '4'
def testSorting(self):
self.model.sortByWord()
assert self.model.words[0].word == "aberration", "incorrect sorting by word " + \
"got: " + self.model.words[0].word + ", but: 'aberration' was expected"
self.model.sortByDifficulty()
assert self.model.words[0].word == "adjourn", "incorrect sorting by word " + \
"got: " + self.model.words[0].word + ", but: 'adjourn' was expected"
self.model.reversedDiffSort = True
self.model.sortByDifficulty()
assert self.model.words[0].word == "ambience", "incorrect sorting by word " + \
"got: " + self.model.words[0].word + ", but: 'ambience' was expected"
def testExport(self):
self.model.exportWords("exportTest.txt")
modelFh = open("dotestow.txt")
testFh = open("exportTest.txt")
modelText = modelFh.read()
testText = testFh.read()
assert modelText == testText, "incorrect export"
modelFh.close()
testFh.close()
import os
os.remove("exportTest.txt")
def testImport(self):
self.model.words.clearWords()
self.model.importWords("dotestow.txt")
self.testLoading()
if __name__ == '__main__':
unittest.main()
|
michaupl/wordtester
|
src/tests/WordTableModelTests.py
|
Python
|
apache-2.0
| 2,079 | 0.005291 |
#!/usr/bin/env python2
# Print out the 2^n possibilities of a word with the length n
import unittest
from itertools import product, permutations
def word_variations(s):
try:
if not len(s): return
lower, upper = s.lower(), s.upper()
except:
return
# Since number strings won't produce cartesian values with lower/upper,
# we use itertools.permutations.
if lower == upper:
pairs = permutations(lower)
else:
pairs = product(*zip(lower, upper))
result = {''.join(pair) for pair in pairs} # Using set literal notation.
print result, "\n", len(result)
return result
word_variations("abc")
class WordTest(unittest.TestCase):
def _test(self, s, expected):
result = word_variations(s)
self.assertEqual(len(result), expected)
def test_basecase(self):
self._test("hello", 32)
def test_int(self):
self._test("123", 6)
def test_empty(self):
self.assertEqual(word_variations(""), None)
|
mikar/projects
|
various/word_possibilities.py
|
Python
|
mit
| 1,021 | 0.007835 |
import unittest
from collections import deque
import datetime
import sys
import os
import StringIO
from south import exceptions
from south.migration import migrate_app
from south.migration.base import all_migrations, Migration, Migrations
from south.migration.utils import depends, dfs, flatten, get_app_label
from south.models import MigrationHistory
from south.tests import Monkeypatcher
from south.db import db
class TestBrokenMigration(Monkeypatcher):
installed_apps = ["fakeapp", "otherfakeapp", "brokenapp"]
def test_broken_dependencies(self):
self.assertRaises(
exceptions.DependsOnUnmigratedApplication,
Migrations.calculate_dependencies,
force=True,
)
#depends_on_unknown = self.brokenapp['0002_depends_on_unknown']
#self.assertRaises(exceptions.DependsOnUnknownMigration,
# depends_on_unknown.dependencies)
#depends_on_higher = self.brokenapp['0003_depends_on_higher']
#self.assertRaises(exceptions.DependsOnHigherMigration,
# depends_on_higher.dependencies)
class TestMigration(Monkeypatcher):
installed_apps = ["fakeapp", "otherfakeapp"]
def setUp(self):
super(TestMigration, self).setUp()
self.fakeapp = Migrations('fakeapp')
self.otherfakeapp = Migrations('otherfakeapp')
Migrations.calculate_dependencies(force=True)
def test_str(self):
migrations = [str(m) for m in self.fakeapp]
self.assertEqual(['fakeapp:0001_spam',
'fakeapp:0002_eggs',
'fakeapp:0003_alter_spam'],
migrations)
def test_repr(self):
migrations = [repr(m) for m in self.fakeapp]
self.assertEqual(['<Migration: fakeapp:0001_spam>',
'<Migration: fakeapp:0002_eggs>',
'<Migration: fakeapp:0003_alter_spam>'],
migrations)
def test_app_label(self):
self.assertEqual(['fakeapp', 'fakeapp', 'fakeapp'],
[m.app_label() for m in self.fakeapp])
def test_name(self):
self.assertEqual(['0001_spam', '0002_eggs', '0003_alter_spam'],
[m.name() for m in self.fakeapp])
def test_full_name(self):
self.assertEqual(['fakeapp.migrations.0001_spam',
'fakeapp.migrations.0002_eggs',
'fakeapp.migrations.0003_alter_spam'],
[m.full_name() for m in self.fakeapp])
def test_migration(self):
# Can't use vanilla import, modules beginning with numbers aren't in grammar
M1 = __import__("fakeapp.migrations.0001_spam", {}, {}, ['Migration']).Migration
M2 = __import__("fakeapp.migrations.0002_eggs", {}, {}, ['Migration']).Migration
M3 = __import__("fakeapp.migrations.0003_alter_spam", {}, {}, ['Migration']).Migration
self.assertEqual([M1, M2, M3],
[m.migration().Migration for m in self.fakeapp])
self.assertRaises(exceptions.UnknownMigration,
self.fakeapp['9999_unknown'].migration)
def test_previous(self):
self.assertEqual([None,
self.fakeapp['0001_spam'],
self.fakeapp['0002_eggs']],
[m.previous() for m in self.fakeapp])
def test_dependencies(self):
"Test that the dependency detection works."
self.assertEqual([
set([]),
set([self.fakeapp['0001_spam']]),
set([self.fakeapp['0002_eggs']])
],
[m.dependencies for m in self.fakeapp],
)
self.assertEqual([
set([self.fakeapp['0001_spam']]),
set([self.otherfakeapp['0001_first']]),
set([
self.otherfakeapp['0002_second'],
self.fakeapp['0003_alter_spam'],
])
],
[m.dependencies for m in self.otherfakeapp],
)
def test_forwards_plan(self):
self.assertEqual([
[self.fakeapp['0001_spam']],
[
self.fakeapp['0001_spam'],
self.fakeapp['0002_eggs']
],
[
self.fakeapp['0001_spam'],
self.fakeapp['0002_eggs'],
self.fakeapp['0003_alter_spam'],
]
],
[m.forwards_plan() for m in self.fakeapp],
)
self.assertEqual([
[
self.fakeapp['0001_spam'],
self.otherfakeapp['0001_first']
],
[
self.fakeapp['0001_spam'],
self.otherfakeapp['0001_first'],
self.otherfakeapp['0002_second']
],
[
self.fakeapp['0001_spam'],
self.otherfakeapp['0001_first'],
self.otherfakeapp['0002_second'],
self.fakeapp['0002_eggs'],
self.fakeapp['0003_alter_spam'],
self.otherfakeapp['0003_third'],
]
],
[m.forwards_plan() for m in self.otherfakeapp],
)
def test_is_before(self):
F1 = self.fakeapp['0001_spam']
F2 = self.fakeapp['0002_eggs']
F3 = self.fakeapp['0003_alter_spam']
O1 = self.otherfakeapp['0001_first']
O2 = self.otherfakeapp['0002_second']
O3 = self.otherfakeapp['0003_third']
self.assertTrue(F1.is_before(F2))
self.assertTrue(F1.is_before(F3))
self.assertTrue(F2.is_before(F3))
self.assertEqual(O3.is_before(O1), False)
self.assertEqual(O3.is_before(O2), False)
self.assertEqual(O2.is_before(O2), False)
self.assertEqual(O2.is_before(O1), False)
self.assertEqual(F2.is_before(O1), None)
self.assertEqual(F2.is_before(O2), None)
self.assertEqual(F2.is_before(O3), None)
class TestMigrationDependencies(Monkeypatcher):
installed_apps = ['deps_a', 'deps_b', 'deps_c']
def setUp(self):
super(TestMigrationDependencies, self).setUp()
self.deps_a = Migrations('deps_a')
self.deps_b = Migrations('deps_b')
self.deps_c = Migrations('deps_c')
Migrations.calculate_dependencies(force=True)
def test_dependencies(self):
self.assertEqual(
[
set([]),
set([self.deps_a['0001_a']]),
set([self.deps_a['0002_a']]),
set([
self.deps_a['0003_a'],
self.deps_b['0003_b'],
]),
set([self.deps_a['0004_a']]),
],
[m.dependencies for m in self.deps_a],
)
self.assertEqual(
[
set([]),
set([
self.deps_b['0001_b'],
self.deps_a['0002_a']
]),
set([
self.deps_b['0002_b'],
self.deps_a['0003_a']
]),
set([self.deps_b['0003_b']]),
set([self.deps_b['0004_b']]),
],
[m.dependencies for m in self.deps_b],
)
self.assertEqual(
[
set([]),
set([self.deps_c['0001_c']]),
set([self.deps_c['0002_c']]),
set([self.deps_c['0003_c']]),
set([
self.deps_c['0004_c'],
self.deps_a['0002_a']
]),
],
[m.dependencies for m in self.deps_c],
)
def test_dependents(self):
self.assertEqual([set([self.deps_a['0002_a']]),
set([self.deps_c['0005_c'],
self.deps_b['0002_b'],
self.deps_a['0003_a']]),
set([self.deps_b['0003_b'],
self.deps_a['0004_a']]),
set([self.deps_a['0005_a']]),
set([])],
[m.dependents for m in self.deps_a])
self.assertEqual([set([self.deps_b['0002_b']]),
set([self.deps_b['0003_b']]),
set([self.deps_b['0004_b'],
self.deps_a['0004_a']]),
set([self.deps_b['0005_b']]),
set([])],
[m.dependents for m in self.deps_b])
self.assertEqual([set([self.deps_c['0002_c']]),
set([self.deps_c['0003_c']]),
set([self.deps_c['0004_c']]),
set([self.deps_c['0005_c']]),
set([])],
[m.dependents for m in self.deps_c])
def test_forwards_plan(self):
self.assertEqual([[self.deps_a['0001_a']],
[self.deps_a['0001_a'],
self.deps_a['0002_a']],
[self.deps_a['0001_a'],
self.deps_a['0002_a'],
self.deps_a['0003_a']],
[self.deps_b['0001_b'],
self.deps_a['0001_a'],
self.deps_a['0002_a'],
self.deps_b['0002_b'],
self.deps_a['0003_a'],
self.deps_b['0003_b'],
self.deps_a['0004_a']],
[self.deps_b['0001_b'],
self.deps_a['0001_a'],
self.deps_a['0002_a'],
self.deps_b['0002_b'],
self.deps_a['0003_a'],
self.deps_b['0003_b'],
self.deps_a['0004_a'],
self.deps_a['0005_a']]],
[m.forwards_plan() for m in self.deps_a])
self.assertEqual([[self.deps_b['0001_b']],
[self.deps_b['0001_b'],
self.deps_a['0001_a'],
self.deps_a['0002_a'],
self.deps_b['0002_b']],
[self.deps_b['0001_b'],
self.deps_a['0001_a'],
self.deps_a['0002_a'],
self.deps_b['0002_b'],
self.deps_a['0003_a'],
self.deps_b['0003_b']],
[self.deps_b['0001_b'],
self.deps_a['0001_a'],
self.deps_a['0002_a'],
self.deps_b['0002_b'],
self.deps_a['0003_a'],
self.deps_b['0003_b'],
self.deps_b['0004_b']],
[self.deps_b['0001_b'],
self.deps_a['0001_a'],
self.deps_a['0002_a'],
self.deps_b['0002_b'],
self.deps_a['0003_a'],
self.deps_b['0003_b'],
self.deps_b['0004_b'],
self.deps_b['0005_b']]],
[m.forwards_plan() for m in self.deps_b])
self.assertEqual([[self.deps_c['0001_c']],
[self.deps_c['0001_c'],
self.deps_c['0002_c']],
[self.deps_c['0001_c'],
self.deps_c['0002_c'],
self.deps_c['0003_c']],
[self.deps_c['0001_c'],
self.deps_c['0002_c'],
self.deps_c['0003_c'],
self.deps_c['0004_c']],
[self.deps_c['0001_c'],
self.deps_c['0002_c'],
self.deps_c['0003_c'],
self.deps_c['0004_c'],
self.deps_a['0001_a'],
self.deps_a['0002_a'],
self.deps_c['0005_c']]],
[m.forwards_plan() for m in self.deps_c])
def test_backwards_plan(self):
self.assertEqual([
[
self.deps_c['0005_c'],
self.deps_b['0005_b'],
self.deps_b['0004_b'],
self.deps_a['0005_a'],
self.deps_a['0004_a'],
self.deps_b['0003_b'],
self.deps_b['0002_b'],
self.deps_a['0003_a'],
self.deps_a['0002_a'],
self.deps_a['0001_a'],
],
[
self.deps_c['0005_c'],
self.deps_b['0005_b'],
self.deps_b['0004_b'],
self.deps_a['0005_a'],
self.deps_a['0004_a'],
self.deps_b['0003_b'],
self.deps_b['0002_b'],
self.deps_a['0003_a'],
self.deps_a['0002_a'],
],
[
self.deps_b['0005_b'],
self.deps_b['0004_b'],
self.deps_a['0005_a'],
self.deps_a['0004_a'],
self.deps_b['0003_b'],
self.deps_a['0003_a'],
],
[
self.deps_a['0005_a'],
self.deps_a['0004_a'],
],
[
self.deps_a['0005_a'],
]
], [m.backwards_plan() for m in self.deps_a])
self.assertEqual([
[
self.deps_b['0005_b'],
self.deps_b['0004_b'],
self.deps_a['0005_a'],
self.deps_a['0004_a'],
self.deps_b['0003_b'],
self.deps_b['0002_b'],
self.deps_b['0001_b'],
],
[
self.deps_b['0005_b'],
self.deps_b['0004_b'],
self.deps_a['0005_a'],
self.deps_a['0004_a'],
self.deps_b['0003_b'],
self.deps_b['0002_b'],
],
[
self.deps_b['0005_b'],
self.deps_b['0004_b'],
self.deps_a['0005_a'],
self.deps_a['0004_a'],
self.deps_b['0003_b'],
],
[
self.deps_b['0005_b'],
self.deps_b['0004_b'],
],
[
self.deps_b['0005_b'],
],
], [m.backwards_plan() for m in self.deps_b])
self.assertEqual([
[
self.deps_c['0005_c'],
self.deps_c['0004_c'],
self.deps_c['0003_c'],
self.deps_c['0002_c'],
self.deps_c['0001_c'],
],
[
self.deps_c['0005_c'],
self.deps_c['0004_c'],
self.deps_c['0003_c'],
self.deps_c['0002_c'],
],
[
self.deps_c['0005_c'],
self.deps_c['0004_c'],
self.deps_c['0003_c'],
],
[
self.deps_c['0005_c'],
self.deps_c['0004_c'],
],
[self.deps_c['0005_c']]
], [m.backwards_plan() for m in self.deps_c])
class TestCircularDependencies(Monkeypatcher):
installed_apps = ["circular_a", "circular_b"]
def test_plans(self):
Migrations.calculate_dependencies(force=True)
circular_a = Migrations('circular_a')
circular_b = Migrations('circular_b')
self.assertRaises(
exceptions.CircularDependency,
circular_a[-1].forwards_plan,
)
self.assertRaises(
exceptions.CircularDependency,
circular_b[-1].forwards_plan,
)
self.assertRaises(
exceptions.CircularDependency,
circular_a[-1].backwards_plan,
)
self.assertRaises(
exceptions.CircularDependency,
circular_b[-1].backwards_plan,
)
class TestMigrations(Monkeypatcher):
installed_apps = ["fakeapp", "otherfakeapp"]
def test_all(self):
M1 = Migrations(__import__("fakeapp", {}, {}, ['']))
M2 = Migrations(__import__("otherfakeapp", {}, {}, ['']))
self.assertEqual(
[M1, M2],
list(all_migrations()),
)
def test(self):
M1 = Migrations(__import__("fakeapp", {}, {}, ['']))
self.assertEqual(M1, Migrations("fakeapp"))
self.assertEqual(M1, Migrations(self.create_fake_app("fakeapp")))
def test_application(self):
fakeapp = Migrations("fakeapp")
application = __import__("fakeapp", {}, {}, [''])
self.assertEqual(application, fakeapp.application)
def test_migration(self):
# Can't use vanilla import, modules beginning with numbers aren't in grammar
M1 = __import__("fakeapp.migrations.0001_spam", {}, {}, ['Migration']).Migration
M2 = __import__("fakeapp.migrations.0002_eggs", {}, {}, ['Migration']).Migration
migration = Migrations('fakeapp')
self.assertEqual(M1, migration['0001_spam'].migration().Migration)
self.assertEqual(M2, migration['0002_eggs'].migration().Migration)
self.assertRaises(exceptions.UnknownMigration,
migration['0001_jam'].migration)
def test_guess_migration(self):
# Can't use vanilla import, modules beginning with numbers aren't in grammar
M1 = __import__("fakeapp.migrations.0001_spam", {}, {}, ['Migration']).Migration
migration = Migrations('fakeapp')
self.assertEqual(M1, migration.guess_migration("0001_spam").migration().Migration)
self.assertEqual(M1, migration.guess_migration("0001_spa").migration().Migration)
self.assertEqual(M1, migration.guess_migration("0001_sp").migration().Migration)
self.assertEqual(M1, migration.guess_migration("0001_s").migration().Migration)
self.assertEqual(M1, migration.guess_migration("0001_").migration().Migration)
self.assertEqual(M1, migration.guess_migration("0001").migration().Migration)
self.assertRaises(exceptions.UnknownMigration,
migration.guess_migration, "0001-spam")
self.assertRaises(exceptions.MultiplePrefixMatches,
migration.guess_migration, "000")
self.assertRaises(exceptions.MultiplePrefixMatches,
migration.guess_migration, "")
self.assertRaises(exceptions.UnknownMigration,
migration.guess_migration, "0001_spams")
self.assertRaises(exceptions.UnknownMigration,
migration.guess_migration, "0001_jam")
def test_app_label(self):
names = ['fakeapp', 'otherfakeapp']
self.assertEqual(names,
[Migrations(n).app_label() for n in names])
def test_full_name(self):
names = ['fakeapp', 'otherfakeapp']
self.assertEqual([n + '.migrations' for n in names],
[Migrations(n).full_name() for n in names])
class TestMigrationLogic(Monkeypatcher):
"""
Tests if the various logic functions in migration actually work.
"""
installed_apps = ["fakeapp", "otherfakeapp"]
def assertListEqual(self, list1, list2):
list1 = list(list1)
list2 = list(list2)
list1.sort()
list2.sort()
return self.assertEqual(list1, list2)
def test_find_ghost_migrations(self):
pass
def test_apply_migrations(self):
MigrationHistory.objects.all().delete()
migrations = Migrations("fakeapp")
# We should start with no migrations
self.assertEqual(list(MigrationHistory.objects.all()), [])
# Apply them normally
migrate_app(migrations, target_name=None, fake=False,
load_initial_data=True)
# We should finish with all migrations
self.assertListEqual(
((u"fakeapp", u"0001_spam"),
(u"fakeapp", u"0002_eggs"),
(u"fakeapp", u"0003_alter_spam"),),
MigrationHistory.objects.values_list("app_name", "migration"),
)
# Now roll them backwards
migrate_app(migrations, target_name="zero", fake=False)
# Finish with none
self.assertEqual(list(MigrationHistory.objects.all()), [])
def test_migration_merge_forwards(self):
MigrationHistory.objects.all().delete()
migrations = Migrations("fakeapp")
# We should start with no migrations
self.assertEqual(list(MigrationHistory.objects.all()), [])
# Insert one in the wrong order
MigrationHistory.objects.create(app_name = "fakeapp",
migration = "0002_eggs",
applied = datetime.datetime.now())
# Did it go in?
self.assertListEqual(
((u"fakeapp", u"0002_eggs"),),
MigrationHistory.objects.values_list("app_name", "migration"),
)
# Apply them normally
self.assertRaises(exceptions.InconsistentMigrationHistory,
migrate_app,
migrations, target_name=None, fake=False)
self.assertRaises(exceptions.InconsistentMigrationHistory,
migrate_app,
migrations, target_name='zero', fake=False)
try:
migrate_app(migrations, target_name=None, fake=False)
except exceptions.InconsistentMigrationHistory, e:
self.assertEqual(
[
(
migrations['0002_eggs'],
migrations['0001_spam'],
)
],
e.problems,
)
try:
migrate_app(migrations, target_name="zero", fake=False)
except exceptions.InconsistentMigrationHistory, e:
self.assertEqual(
[
(
migrations['0002_eggs'],
migrations['0001_spam'],
)
],
e.problems,
)
# Nothing should have changed (no merge mode!)
self.assertListEqual(
((u"fakeapp", u"0002_eggs"),),
MigrationHistory.objects.values_list("app_name", "migration"),
)
# Apply with merge
migrate_app(migrations, target_name=None, merge=True, fake=False)
# We should finish with all migrations
self.assertListEqual(
((u"fakeapp", u"0001_spam"),
(u"fakeapp", u"0002_eggs"),
(u"fakeapp", u"0003_alter_spam"),),
MigrationHistory.objects.values_list("app_name", "migration"),
)
# Now roll them backwards
migrate_app(migrations, target_name="0002", fake=False)
migrate_app(migrations, target_name="0001", fake=True)
migrate_app(migrations, target_name="zero", fake=False)
# Finish with none
self.assertEqual(list(MigrationHistory.objects.all()), [])
def test_alter_column_null(self):
def null_ok():
from django.db import connection, transaction
# the DBAPI introspection module fails on postgres NULLs.
cursor = connection.cursor()
# SQLite has weird now()
if db.backend_name == "sqlite3":
now_func = "DATETIME('NOW')"
else:
now_func = "NOW()"
try:
cursor.execute("INSERT INTO southtest_spam (id, weight, expires, name) VALUES (100, 10.1, %s, NULL);" % now_func)
except:
transaction.rollback()
return False
else:
cursor.execute("DELETE FROM southtest_spam")
transaction.commit()
return True
MigrationHistory.objects.all().delete()
migrations = Migrations("fakeapp")
# by default name is NOT NULL
migrate_app(migrations, target_name="0002", fake=False)
self.failIf(null_ok())
self.assertListEqual(
((u"fakeapp", u"0001_spam"),
(u"fakeapp", u"0002_eggs"),),
MigrationHistory.objects.values_list("app_name", "migration"),
)
# after 0003, it should be NULL
migrate_app(migrations, target_name="0003", fake=False)
self.assert_(null_ok())
self.assertListEqual(
((u"fakeapp", u"0001_spam"),
(u"fakeapp", u"0002_eggs"),
(u"fakeapp", u"0003_alter_spam"),),
MigrationHistory.objects.values_list("app_name", "migration"),
)
# make sure it is NOT NULL again
migrate_app(migrations, target_name="0002", fake=False)
self.failIf(null_ok(), 'name not null after migration')
self.assertListEqual(
((u"fakeapp", u"0001_spam"),
(u"fakeapp", u"0002_eggs"),),
MigrationHistory.objects.values_list("app_name", "migration"),
)
# finish with no migrations, otherwise other tests fail...
migrate_app(migrations, target_name="zero", fake=False)
self.assertEqual(list(MigrationHistory.objects.all()), [])
def test_dependencies(self):
fakeapp = Migrations("fakeapp")
otherfakeapp = Migrations("otherfakeapp")
# Test a simple path
self.assertEqual([fakeapp['0001_spam'],
fakeapp['0002_eggs'],
fakeapp['0003_alter_spam']],
fakeapp['0003_alter_spam'].forwards_plan())
# And a complex one.
self.assertEqual(
[
fakeapp['0001_spam'],
otherfakeapp['0001_first'],
otherfakeapp['0002_second'],
fakeapp['0002_eggs'],
fakeapp['0003_alter_spam'],
otherfakeapp['0003_third']
],
otherfakeapp['0003_third'].forwards_plan(),
)
class TestMigrationUtils(Monkeypatcher):
installed_apps = ["fakeapp", "otherfakeapp"]
def test_get_app_label(self):
self.assertEqual(
"southtest",
get_app_label(self.create_fake_app("southtest.models")),
)
self.assertEqual(
"baz",
get_app_label(self.create_fake_app("foo.bar.baz.models")),
)
class TestUtils(unittest.TestCase):
def test_flatten(self):
self.assertEqual([], list(flatten(iter([]))))
self.assertEqual([], list(flatten(iter([iter([]), ]))))
self.assertEqual([1], list(flatten(iter([1]))))
self.assertEqual([1, 2], list(flatten(iter([1, 2]))))
self.assertEqual([1, 2], list(flatten(iter([iter([1]), 2]))))
self.assertEqual([1, 2], list(flatten(iter([iter([1, 2])]))))
self.assertEqual([1, 2, 3], list(flatten(iter([iter([1, 2]), 3]))))
self.assertEqual([1, 2, 3],
list(flatten(iter([iter([1]), iter([2]), 3]))))
self.assertEqual([1, 2, 3],
list(flatten([[1], [2], 3])))
def test_depends(self):
graph = {'A1': []}
self.assertEqual(['A1'],
depends('A1', lambda n: graph[n]))
graph = {'A1': [],
'A2': ['A1'],
'A3': ['A2']}
self.assertEqual(['A1', 'A2', 'A3'],
depends('A3', lambda n: graph[n]))
graph = {'A1': [],
'A2': ['A1'],
'A3': ['A2', 'A1']}
self.assertEqual(['A1', 'A2', 'A3'],
depends('A3', lambda n: graph[n]))
graph = {'A1': [],
'A2': ['A1'],
'A3': ['A2', 'A1', 'B1'],
'B1': []}
self.assertEqual(
['B1', 'A1', 'A2', 'A3'],
depends('A3', lambda n: graph[n]),
)
graph = {'A1': [],
'A2': ['A1'],
'A3': ['A2', 'A1', 'B2'],
'B1': [],
'B2': ['B1']}
self.assertEqual(
['B1', 'B2', 'A1', 'A2', 'A3'],
depends('A3', lambda n: graph[n]),
)
graph = {'A1': [],
'A2': ['A1', 'B1'],
'A3': ['A2'],
'B1': ['A1']}
self.assertEqual(['A1', 'B1', 'A2', 'A3'],
depends('A3', lambda n: graph[n]))
graph = {'A1': [],
'A2': ['A1'],
'A3': ['A2', 'A1', 'B2'],
'B1': [],
'B2': ['B1', 'C1'],
'C1': ['B1']}
self.assertEqual(
['B1', 'C1', 'B2', 'A1', 'A2', 'A3'],
depends('A3', lambda n: graph[n]),
)
graph = {'A1': [],
'A2': ['A1'],
'A3': ['A2', 'B2', 'A1', 'C1'],
'B1': ['A1'],
'B2': ['B1', 'C2', 'A1'],
'C1': ['B1'],
'C2': ['C1', 'A1'],
'C3': ['C2']}
self.assertEqual(
['A1', 'B1', 'C1', 'C2', 'B2', 'A2', 'A3'],
depends('A3', lambda n: graph[n]),
)
def assertCircularDependency(self, trace, target, graph):
"Custom assertion that checks a circular dependency is detected correctly."
self.assertRaises(
exceptions.CircularDependency,
depends,
target,
lambda n: graph[n],
)
try:
depends(target, lambda n: graph[n])
except exceptions.CircularDependency, e:
self.assertEqual(trace, e.trace)
def test_depends_cycle(self):
graph = {'A1': ['A1']}
self.assertCircularDependency(
['A1', 'A1'],
'A1',
graph,
)
graph = {'A1': [],
'A2': ['A1', 'A2'],
'A3': ['A2']}
self.assertCircularDependency(
['A1', 'A2', 'A1'],
'A3',
graph,
)
graph = {'A1': [],
'A2': ['A1'],
'A3': ['A2', 'A3'],
'A4': ['A3']}
self.assertCircularDependency(
['A3', 'A2', 'A1', 'A3'],
'A4',
graph,
)
graph = {'A1': ['B1'],
'B1': ['A1']}
self.assertCircularDependency(
['A1', 'B1', 'A1'],
'A1',
graph,
)
graph = {'A1': [],
'A2': ['A1', 'B2'],
'A3': ['A2'],
'B1': [],
'B2': ['B1', 'A2'],
'B3': ['B2']}
self.assertCircularDependency(
['A2', 'A1', 'B2', 'A2'],
'A3',
graph,
)
graph = {'A1': [],
'A2': ['A1', 'B3'],
'A3': ['A2'],
'B1': [],
'B2': ['B1', 'A2'],
'B3': ['B2']}
self.assertCircularDependency(
['B2', 'A2', 'A1', 'B3', 'B2'],
'A3',
graph,
)
graph = {'A1': [],
'A2': ['A1'],
'A3': ['A2', 'B2'],
'A4': ['A3'],
'B1': ['A3'],
'B2': ['B1']}
self.assertCircularDependency(
['A1', 'B2', 'B1', 'A3', 'A2', 'A1'],
'A4',
graph,
)
|
defcube/django-south
|
south/tests/logic.py
|
Python
|
apache-2.0
| 32,253 | 0.002263 |
from __future__ import unicode_literals
from django.contrib.syndication import views
from django.utils import feedgenerator
from django.utils.timezone import get_fixed_timezone
from .models import Article, Entry
class TestRss2Feed(views.Feed):
title = 'My blog'
description = 'A more thorough description of my blog.'
link = '/blog/'
feed_guid = '/foo/bar/1234'
author_name = 'Sally Smith'
author_email = 'test@example.com'
author_link = 'http://www.example.com/'
categories = ('python', 'django')
feed_copyright = 'Copyright (c) 2007, Sally Smith'
ttl = 600
def items(self):
return Entry.objects.all()
def item_description(self, item):
return "Overridden description: %s" % item
def item_pubdate(self, item):
return item.published
def item_updateddate(self, item):
return item.updated
item_author_name = 'Sally Smith'
item_author_email = 'test@example.com'
item_author_link = 'http://www.example.com/'
item_categories = ('python', 'testing')
item_copyright = 'Copyright (c) 2007, Sally Smith'
class TestRss2FeedWithGuidIsPermaLinkTrue(TestRss2Feed):
def item_guid_is_permalink(self, item):
return True
class TestRss2FeedWithGuidIsPermaLinkFalse(TestRss2Feed):
def item_guid(self, item):
return str(item.pk)
def item_guid_is_permalink(self, item):
return False
class TestRss091Feed(TestRss2Feed):
feed_type = feedgenerator.RssUserland091Feed
class TestNoPubdateFeed(views.Feed):
title = 'Test feed'
link = '/feed/'
def items(self):
return Entry.objects.all()
class TestAtomFeed(TestRss2Feed):
feed_type = feedgenerator.Atom1Feed
subtitle = TestRss2Feed.description
class TestLatestFeed(TestRss2Feed):
"""
A feed where the latest entry date is an `updated` element.
"""
feed_type = feedgenerator.Atom1Feed
subtitle = TestRss2Feed.description
def items(self):
return Entry.objects.exclude(pk=5)
class ArticlesFeed(TestRss2Feed):
"""
A feed to test no link being defined. Articles have no get_absolute_url()
method, and item_link() is not defined.
"""
def items(self):
return Article.objects.all()
class TestSingleEnclosureRSSFeed(TestRss2Feed):
"""
A feed to test that RSS feeds work with a single enclosure.
"""
def item_enclosure_url(self, item):
return 'http://example.com'
def item_enclosure_size(self, item):
return 0
def item_mime_type(self, item):
return 'image/png'
class TestMultipleEnclosureRSSFeed(TestRss2Feed):
"""
A feed to test that RSS feeds raise an exception with multiple enclosures.
"""
def item_enclosures(self, item):
return [
feedgenerator.Enclosure('http://example.com/hello.png', 0, 'image/png'),
feedgenerator.Enclosure('http://example.com/goodbye.png', 0, 'image/png'),
]
class TemplateFeed(TestRss2Feed):
"""
A feed to test defining item titles and descriptions with templates.
"""
title_template = 'syndication/title.html'
description_template = 'syndication/description.html'
# Defining a template overrides any item_title definition
def item_title(self):
return "Not in a template"
class TemplateContextFeed(TestRss2Feed):
"""
A feed to test custom context data in templates for title or description.
"""
title_template = 'syndication/title_context.html'
description_template = 'syndication/description_context.html'
def get_context_data(self, **kwargs):
context = super(TemplateContextFeed, self).get_context_data(**kwargs)
context['foo'] = 'bar'
return context
class NaiveDatesFeed(TestAtomFeed):
"""
A feed with naive (non-timezone-aware) dates.
"""
def item_pubdate(self, item):
return item.published
class TZAwareDatesFeed(TestAtomFeed):
"""
A feed with timezone-aware dates.
"""
def item_pubdate(self, item):
# Provide a weird offset so that the test can know it's getting this
# specific offset and not accidentally getting on from
# settings.TIME_ZONE.
return item.published.replace(tzinfo=get_fixed_timezone(42))
class TestFeedUrlFeed(TestAtomFeed):
feed_url = 'http://example.com/customfeedurl/'
class MyCustomAtom1Feed(feedgenerator.Atom1Feed):
"""
Test of a custom feed generator class.
"""
def root_attributes(self):
attrs = super(MyCustomAtom1Feed, self).root_attributes()
attrs['django'] = 'rocks'
return attrs
def add_root_elements(self, handler):
super(MyCustomAtom1Feed, self).add_root_elements(handler)
handler.addQuickElement('spam', 'eggs')
def item_attributes(self, item):
attrs = super(MyCustomAtom1Feed, self).item_attributes(item)
attrs['bacon'] = 'yum'
return attrs
def add_item_elements(self, handler, item):
super(MyCustomAtom1Feed, self).add_item_elements(handler, item)
handler.addQuickElement('ministry', 'silly walks')
class TestCustomFeed(TestAtomFeed):
feed_type = MyCustomAtom1Feed
class TestSingleEnclosureAtomFeed(TestAtomFeed):
"""
A feed to test that Atom feeds work with a single enclosure.
"""
def item_enclosure_url(self, item):
return 'http://example.com'
def item_enclosure_size(self, item):
return 0
def item_mime_type(self, item):
return 'image/png'
class TestMultipleEnclosureAtomFeed(TestAtomFeed):
"""
A feed to test that Atom feeds work with multiple enclosures.
"""
def item_enclosures(self, item):
return [
feedgenerator.Enclosure('http://example.com/hello.png', '0', 'image/png'),
feedgenerator.Enclosure('http://example.com/goodbye.png', '0', 'image/png'),
]
|
yephper/django
|
tests/syndication_tests/feeds.py
|
Python
|
bsd-3-clause
| 6,128 | 0.000653 |
"""A simple interface for executing bytecodes over a Bluetooth serial port.
From the lms2012 source code documentation:
Beside running user programs the VM is able to execute direct commands from
the Communication Module. In fact direct commands are small programs that
consist of regular byte codes and they are executed in parallel with a running
user program. Special care MUST be taken when writing direct commands because
the decision until now is NOT to restrict the use of "dangerous" codes and
constructions (loops in a direct command are allowed).
If a new direct command from the same source is going to be executed an actual
running direct command is terminated.
Because of a small header objects are limited to one VMTHREAD only - SUBCALLs
and BLOCKs are, of course, not possible. This header contains information about
the number of global variables (for response), number of local variables, and
command size.
Direct commands that have data responses can place the data in the global
variable space. The global variable space is equal to the communication
response buffer. The composition of the direct command defines at which
offset the result is placed (global variable 0 is placed at offset 0 in
the buffer).
Offsets in the response buffer (global variables) must be aligned (i.e. 32bit
variable offsets are divisible by 4, 16bit variable offsets are divisible by 2).
All multi-byte words are little endian.
Direct Command bytes:
------------------------------
Byte 0 - 1: Command size
Byte 2 - 3: Message counter
Byte 4: CommandType
Byte 5 - 6: Number of global and local variables (compressed).
Byte 6 Byte 5
76543210 76543210
-------- --------
llllllgg gggggggg
gg gggggggg Global variables [0..MAX_COMMAND_GLOBALS]
llllll Local variables [0..MAX_COMMAND_LOCALS]
Byte 7 - n: Byte codes
Direct Command response Bytes:
------------------------------
Byte 0 - 1: Reply size
Byte 2 - 3: Message counter
Byte 4: ReplyType
Byte 5 - n: Response buffer (global variable values)
"""
import ev3
import message
MAX_CMD_LEN = 1019 # The size of the brick's txBuf is 1024 bytes but
# the header requires 5 bytes.
MAX_STR_LEN = 255
MAX_VERSION_STR_LEN = 64
MAX_LOCAL_VARIABLE_BYTES = 0xFFFFFFFF
MAX_NAME_STR_LEN = 64
MOTOR_MIN_POWER = -100
MOTOR_MAX_POWER = 100
MOTOR_MIN_SPEED = -100
MOTOR_MAX_SPEED = 100
USB_CHAIN_LAYER_MASTER = 0
USB_CHAIN_LAYER_SLAVE = 1
MOTOR_MIN_RATIO = -200
MOTOR_MAX_RATIO = 200
MIN_VOLUME = 0
MAX_VOLUME = 100
LCD_HEIGHT_PIXELS = 128
LCD_WIDTH_PIXELS = 178
class DirectCommandError(Exception):
"""Subclass for reporting errors."""
pass
class CommandType(object):
"""Every System Command must be one of these two types."""
DIRECT_COMMAND_REPLY = 0x00
DIRECT_COMMAND_NO_REPLY = 0x80
class ReplyType(object):
"""Every reply to a System Command must be one of these two types."""
DIRECT_REPLY = 0x02
DIRECT_REPLY_ERROR = 0x04
class OutputPort(object):
"""These can be OR'd together to operate on multiple ports at once."""
PORT_A = 0x01
PORT_B = 0x02
PORT_C = 0x04
PORT_D = 0x08
ALL = (PORT_A | PORT_B | PORT_C | PORT_D)
class InputPort(object):
"""These can be OR'd together to operate on multiple ports at once."""
PORT_1 = 0x00
PORT_2 = 0x01
PORT_3 = 0x02
PORT_4 = 0x03
PORT_A = 0x10
PORT_B = 0x11
PORT_C = 0x12
PORT_D = 0x13
class StopType(object):
"""When an OutputPort is stopped it can be told to brake or coast."""
COAST = 0
BRAKE = 1
class PolarityType(object):
""""""
BACKWARD = -1
TOGGLE = 0
FORWARD = 1
class TouchMode(object):
""""""
TOUCH = 0
BUMPS = 1
class NXTLightMode(object):
""""""
REFLECT = 0
AMBIENT = 1
class NXTSoundMode(object):
""""""
DECIBELS = 0
ADJUSTED_DECIBLES = 1
class NXTColorMode(object):
""""""
REFLECTIVE = 0
AMBIENT = 1
COLOR = 2
GREEN = 3
BLUE = 4
RAW = 5
class NXTUltrasonicMode(object):
""""""
CM = 0
INCHES = 1
class NXTTemperatureMode(object):
""""""
CELSIUS = 0
FAHRENHEIT = 1
class MotorMode(object):
""""""
DEGREES = 0
ROTATIONS = 1
PERCENT = 2
class UltrasonicMode(object):
""""""
CM = 0
INCH = 1
LISTEN = 2
class GyroMode(object):
""""""
ANGLE = 0
RATE = 1
FAS = 2
G_AND_A = 3
class IRMode(object):
""""""
PROXIMITY = 0
SEEK = 1
REMOTE = 2
REMOTE_A = 3
SALT = 4
CALIBRATION = 5
class ColorMode(object):
""""""
RELECTIVE = 0
AMBIENT = 1
COLOR = 2
class ColorSensorColor(object):
"""These are the results that the EV3 color sensor can return when operating
in ColorMode.COLOR.
"""
NONE = 0
BLACK = 1
BLUE = 2
GREEN = 3
YELLOW = 4
RED = 5
WHITE = 6
BROWN = 7
class LEDPattern(object):
"""The brick user interface has several status LEDs."""
OFF = 0
GREEN = 1
RED = 2
ORANGE = 3
FLASHING_GREEN = 4
FLASHING_RED = 5
FLASHING_ORANGE = 6
GREEN_HEARTBEAT = 7
RED_HEARTBEAT = 8
ORANGE_HEARTBEAT = 9
class DeviceType(object):
"""These are the known device types.
NOTE: These have only been partially confirmed.
"""
NXT_TOUCH = 0x01
NXT_LIGHT = 0x02
NXT_SOUND = 0x03
NXT_COLOR = 0x04
NXT_ULTRASONIC = 0x05
NXT_TEMPERATURE = 0x06
TACHO = 0x07 # TYPE_TACHO in lms2012.h
MINI_TACHO = 0x08 # TYPE_MINITACHO in lms2012.h
NEW_TACHO = 0x09 # TYPE_NEWTACHO in lms2012.h
EV3_TOUCH = 0x10
EV3_COLOR = 0x1D
EV3_ULTRASONIC = 0x1E
EV3_GYROSCOPE = 0x20
EV3_INFRARED = 0x21
SENSOR_INITIALIZING = 0x7D
PORT_EMPTY = 0x7E
ERROR_PORT = 0x7F
UNKNOWN = 0xFF
class LCDColor(object):
"""The brick's LCD only displays two colors."""
BACKGROUND = 0
FOREGROUND = 1
class ButtonType(object):
"""The brick's user interface contains 6 buttons."""
NO_BUTTON = 0
UP_BUTTON = 1
ENTER_BUTTON = 2
DOWN_BUTTON = 3
RIGHT_BUTTON = 4
LEFT_BUTTON = 5
BACK_BUTTON = 6
ANY_BUTTON = 7
class MathType(object):
""""""
EXP = 1 # e^x r = expf(x)
MOD = 2 # Modulo r = fmod(x,y)
FLOOR = 3 # Floor r = floor(x)
CEIL = 4 # Ceiling r = ceil(x)
ROUND = 5 # Round r = round(x)
ABS = 6 # Absolute r = fabs(x)
NEGATE = 7 # Negate r = 0.0 - x
SQRT = 8 # Squareroot r = sqrt(x)
LOG = 9 # Log r = log10(x)
LN = 10 # Ln r = log(x)
SIN = 11
COS = 12
TAN = 13
ASIN = 14
ACOS = 15
ATAN = 16
MOD8 = 17 # Modulo DATA8 r = x % y
MOD16 = 18 # Modulo DATA16 r = x % y
MOD32 = 19 # Modulo DATA32 r = x % y
POW = 20 # Exponent r = powf(x,y)
TRUNC = 21 # Truncate r = (float)((int)(x * pow(y))) / pow(y)
class BrowserType(object):
""""""
BROWSE_FOLDERS = 0 # Browser for folders
BROWSE_FOLDS_FILES = 1 # Browser for folders and files
BROWSE_CACHE = 2 # Browser for cached / recent files
BROWSE_FILES = 3 # Browser for files
class Icon(object):
"""The icons on the brick are enumerated by value."""
ICON_NONE = -1
ICON_RUN = 0
ICON_FOLDER = 1
ICON_FOLDER2 = 2
ICON_USB = 3
ICON_SD = 4
ICON_SOUND = 5
ICON_IMAGE = 6
ICON_SETTINGS = 7
ICON_ONOFF = 8
ICON_SEARCH = 9
ICON_WIFI = 10
ICON_CONNECTIONS = 11
ICON_ADD_HIDDEN = 12
ICON_TRASHBIN = 13
ICON_VISIBILITY = 14
ICON_KEY = 15
ICON_CONNECT = 16
ICON_DISCONNECT = 17
ICON_UP = 18
ICON_DOWN = 19
ICON_WAIT1 = 20
ICON_WAIT2 = 21
ICON_BLUETOOTH = 22
ICON_INFO = 23
ICON_TEXT = 24
ICON_QUESTIONMARK = 27
ICON_INFO_FILE = 28
ICON_DISC = 29
ICON_CONNECTED = 30
ICON_OBP = 31
ICON_OBD = 32
ICON_OPENFOLDER = 33
ICON_BRICK1 = 34
class FontType(object):
""""""
NORMAL_FONT = 0
SMALL_FONT = 1
LARGE_FONT = 2
TINY_FONT = 3
class DataFormat(object):
"""Data formats that are used by the VM."""
DATA8 = 0x00
DATA16 = 0x01
DATA32 = 0x02
DATA_F = 0x03 # 32bit floating point value (single precision)
DATA_S = 0x04 # Zero terminated string
DATA_A = 0x05 # Array handle
DATA_V = 0x07 # Variable type
DATA_PCT = 0x10 # Percent (used in INPUT_READEXT)
DATA_RAW = 0x12 # Raw (used in INPUT_READEXT)
DATA_SI = 0x13 # SI unit (used in INPUT_READEXT)
# Values used by this Python module only:
HND = 0xFF # For compatibility with ParamTypes.
BOOL = 0xFE # For converting to Python values
class ParamType(object):
"""Parameter types that are used by the VM."""
PRIMPAR_LABEL = 0x20
HND = 0x10 # 8bit handle index (i.e. pointer to a string)
ADR = 0x08 # 3bit address
LCS = 0x84 # Null terminated string
LAB1 = 0xA0
LC0 = 0x00 # 6bit immediate
LC1 = 0x81 # 8bit immediate
LC2 = 0x82 # 16bit immediate
LC4 = 0x83 # 32bit immediate
LCA = 0x81 # 8bit pointer to local array
LV1 = 0xC1 # 8bit pointer to local value
LV2 = 0xC2 # 16bit pointer to local value
LV4 = 0xC3 # 32bit pointer to local value
LVA = 0xC1 # 8bit pointer to local array
GV0 = 0x60 # 5bit pointer to global value
GV1 = 0xE1 # 8bit pointer to global value
GV2 = 0xE2 # 16bit pointer to global value
GV4 = 0xE3 # 32bit pointer to global value
GVA = 0xE1 # 8bit pointer to global array
# Values used by this Python module only:
FLOAT = 0xFF # 32bit floating point value (single precision)
# Defines the number of bytes required to represent each DataFormat.
PARAM_TYPE_LENS = { ParamType.PRIMPAR_LABEL: None,
ParamType.HND: 1,
ParamType.ADR: 1,
ParamType.LCS: None,
ParamType.LAB1: 1,
ParamType.LC0: 0,
ParamType.LC1: 1,
ParamType.LC2: 2,
ParamType.LC4: 4,
ParamType.LCA: 1,
ParamType.LV1: 1,
ParamType.LV2: 2,
ParamType.LV4: 4,
ParamType.LVA: 1,
ParamType.GV0: 0,
ParamType.GV1: 1,
ParamType.GV2: 2,
ParamType.GV4: 4,
ParamType.GVA: 1,
ParamType.FLOAT: 4 }
DATA_FORMAT_LENS = { DataFormat.DATA8: 1,
DataFormat.DATA16: 2,
DataFormat.DATA32: 4,
DataFormat.DATA_F: 4,
DataFormat.DATA_S: None,
DataFormat.DATA_A: None,
DataFormat.DATA_V: None,
DataFormat.DATA_PCT: 1,
DataFormat.DATA_RAW: 4,
DataFormat.DATA_SI: 4,
DataFormat.HND: 1,
DataFormat.BOOL: 1 }
# There are two ways to specify an output in the c_output module. The first is
# as a bit mask and the second is by index.
OUTPUT_CHANNEL_TO_INDEX = { OutputPort.PORT_A: 0,
OutputPort.PORT_B: 1,
OutputPort.PORT_C: 2,
OutputPort.PORT_D: 3 }
class UIReadSubcode(object):
""""""
GET_VBATT = 1
GET_IBATT = 2
GET_OS_VERS = 3
GET_EVENT = 4
GET_TBATT = 5
GET_IINT = 6
GET_IMOTOR = 7
GET_STRING = 8
GET_HW_VERS = 9
GET_FW_VERS = 10
GET_FW_BUILD = 11
GET_OS_BUILD = 12
GET_ADDRESS = 13
GET_CODE = 14
KEY = 15
GET_SHUTDOWN = 16
GET_WARNING = 17
GET_LBATT = 18
TEXTBOX_READ = 21
GET_VERSION = 26
GET_IP = 27
GET_POWER = 29
GET_SDCARD = 30
GET_USBSTICK = 31
class UIWriteSubcode(object):
""""""
WRITE_FLUSH = 1
FLOATVALUE = 2
STAMP = 3
PUT_STRING = 8
VALUE8 = 9
VALUE16 = 10
VALUE32 = 11
VALUEF = 12
ADDRESS = 13
CODE = 14
DOWNLOAD_END = 15
SCREEN_BLOCK = 16
TEXTBOX_APPEND = 21
SET_BUSY = 22
SET_TESTPIN = 24
INIT_RUN = 25
UPDATE_RUN = 26
LED = 27
POWER = 29
GRAPH_SAMPLE = 30
TERMINAL = 31
class UIButtonSubcode(object):
""""""
SHORTPRESS = 1
LONGPRESS = 2
WAIT_FOR_PRESS = 3
FLUSH = 4
PRESS = 5
RELEASE = 6
GET_HORZ = 7
GET_VERT = 8
PRESSED = 9
SET_BACK_BLOCK = 10
GET_BACK_BLOCK = 11
TESTSHORTPRESS = 12
TESTLONGPRESS = 13
GET_BUMBED = 14
GET_CLICK = 15
class COMGetSubcodes(object):
""""""
GET_ON_OFF = 1 # Set, Get
GET_VISIBLE = 2 # Set, Get
GET_RESULT = 4 # Get
GET_PIN = 5 # Set, Get
SEARCH_ITEMS = 8 # Get
SEARCH_ITEM = 9 # Get
FAVOUR_ITEMS = 10 # Get
FAVOUR_ITEM = 11 # Get
GET_ID = 12
GET_BRICKNAME = 13
GET_NETWORK = 14
GET_PRESENT = 15
GET_ENCRYPT = 16
CONNEC_ITEMS = 17
CONNEC_ITEM = 18
GET_INCOMING = 19
GET_MODE2 = 20
class COMSetSubcode(object):
""""""
SET_ON_OFF = 1 # Set, Get
SET_VISIBLE = 2 # Set, Get
SET_SEARCH = 3 # Set
SET_PIN = 5 # Set, Get
SET_PASSKEY = 6 # Set
SET_CONNECTION = 7 # Set
SET_BRICKNAME = 8
SET_MOVEUP = 9
SET_MOVEDOWN = 10
SET_ENCRYPT = 11
SET_SSID = 12
SET_MODE2 = 13
class InputDeviceSubcode(object):
""""""
GET_FORMAT = 2
CAL_MINMAX = 3
CAL_DEFAULT = 4
GET_TYPEMODE = 5
GET_SYMBOL = 6
CAL_MIN = 7
CAL_MAX = 8
SETUP = 9 # Probably only for internal use.
CLR_ALL = 10 # Resets counters, angle, etc.
GET_RAW = 11
GET_CONNECTION = 12
STOP_ALL = 13 # Stops any attached motors?
GET_NAME = 21
GET_MODENAME = 22
SET_RAW = 23
GET_FIGURES = 24
GET_CHANGES = 25
CLR_CHANGES = 26
READY_PCT = 27
READY_RAW = 28
READY_SI = 29
GET_MINMAX = 30
GET_BUMPS = 31
class ProgramInfoSubcode(object):
""""""
OBJ_STOP = 0
OBJ_START = 4
GET_STATUS = 22
GET_SPEED = 23
GET_PRGRESULT = 24
SET_INSTR = 25
class UIDrawSubcode(object):
""""""
UPDATE = 0
CLEAN = 1
PIXEL = 2
LINE = 3
CIRCLE = 4
TEXT = 5
ICON = 6
PICTURE = 7
VALUE = 8
FILLRECT = 9
RECT = 10
NOTIFICATION = 11
QUESTION = 12
KEYBOARD = 13
BROWSE = 14
VERTBAR = 15
INVERSERECT = 16
SELECT_FONT = 17
TOPLINE = 18
FILLWINDOW = 19
SCROLL = 20
DOTLINE = 21
VIEW_VALUE = 22
VIEW_UNIT = 23
FILLCIRCLE = 24
STORE = 25
RESTORE = 26
ICON_QUESTION = 27
BMPFILE = 28
POPUP = 29
GRAPH_SETUP = 30
GRAPH_DRAW = 31
TEXTBOX = 32
class FileSubcode(object):
""""""
OPEN_APPEND = 0
OPEN_READ = 1
OPEN_WRITE = 2
READ_VALUE = 3
WRITE_VALUE = 4
READ_TEXT = 5
WRITE_TEXT = 6
CLOSE = 7
LOAD_IMAGE = 8
GET_HANDLE = 9
MAKE_FOLDER = 10
GET_POOL = 11
SET_LOG_SYNC_TIME = 12
GET_FOLDERS = 13
GET_LOG_SYNC_TIME = 14
GET_SUBFOLDER_NAME = 15
WRITE_LOG = 16
CLOSE_LOG = 17
GET_IMAGE = 18
GET_ITEM = 19
GET_CACHE_FILES = 20
PUT_CACHE_FILE = 21
GET_CACHE_FILE = 22
DEL_CACHE_FILE = 23
DEL_SUBFOLDER = 24
GET_LOG_NAME = 25
OPEN_LOG = 27
READ_BYTES = 28
WRITE_BYTES = 29
REMOVE = 30
MOVE = 31
class ArraySubcode(object):
""""""
DELETE = 0
CREATE8 = 1
CREATE16 = 2
CREATE32 = 3
CREATEF = 4
RESIZE = 5
FILL = 6
COPY = 7
INIT8 = 8
INIT16 = 9
INIT32 = 10
INITF = 11
SIZE = 12
READ_CONTENT = 13
WRITE_CONTENT = 14
READ_SIZE = 15
class FilenameSubcode(object):
""""""
EXIST = 16 # MUST BE GREATER OR EQUAL TO "ARRAY_SUBCODES"
TOTALSIZE = 17
SPLIT = 18
MERGE = 19
CHECK = 20
PACK = 21
UNPACK = 22
GET_FOLDERNAME = 23
class InfoSubcode(object):
""""""
SET_ERROR = 1
GET_ERROR = 2
ERRORTEXT = 3
GET_VOLUME = 4
SET_VOLUME = 5
GET_MINUTES = 6
SET_MINUTES = 7
class SoundSubcode(object):
""""""
BREAK = 0
TONE = 1
PLAY = 2
REPEAT = 3
SERVICE = 4
class StringSubcode(object):
""""""
GET_SIZE = 1 # Get string size
ADD = 2 # Add two strings
COMPARE = 3 # Compare two strings
DUPLICATE = 5 # Duplicate one string to another
VALUE_TO_STRING = 6
STRING_TO_VALUE = 7
STRIP = 8
NUMBER_TO_STRING = 9
SUB = 10
VALUE_FORMATTED = 11
NUMBER_FORMATTED = 12
class TstSubcode(object):
""""""
TST_OPEN = 10 # Must >= "INFO_SUBCODES"
TST_CLOSE = 11
TST_READ_PINS = 12
TST_WRITE_PINS = 13
TST_READ_ADC = 14
TST_WRITE_UART = 15
TST_READ_UART = 16
TST_ENABLE_UART = 17
TST_DISABLE_UART = 18
TST_ACCU_SWITCH = 19
TST_BOOT_MODE2 = 20
TST_POLL_MODE2 = 21
TST_CLOSE_MODE2 = 22
TST_RAM_CHECK = 23
class Opcode(object):
"""All of the opcodes that are used by the VM."""
ERROR = 0x00
NOP = 0x01
PROGRAM_STOP = 0x02
PROGRAM_START = 0x03
OBJECT_STOP = 0x04
OBJECT_START = 0x05
OBJECT_TRIG = 0x06
OBJECT_WAIT = 0x07
RETURN = 0x08
CALL = 0x09
OBJECT_END = 0x0A
SLEEP = 0x0B
PROGRAM_INFO = 0x0C
LABEL = 0x0D
PROBE = 0x0E
DO = 0x0F
# MATH
ADD8 = 0x10
ADD16 = 0x11
ADD32 = 0x12
ADDF = 0x13
SUB8 = 0x14
SUB16 = 0x15
SUB32 = 0x16
SUBF = 0x17
MUL8 = 0x18
MUL16 = 0x19
MUL32 = 0x1A
MULF = 0x1B
DIV8 = 0x1C
DIV16 = 0x1D
DIV32 = 0x1E
DIVF = 0x1F
# LOGIC
OR8 = 0x20
OR16 = 0x21
OR32 = 0x22
AND8 = 0x24
AND16 = 0x25
AND32 = 0x26
XOR8 = 0x28
XOR16 = 0x29
XOR32 = 0x2A
RL8 = 0x2C
RL16 = 0x2D
RL32 = 0x2E
# MOVE
INIT_BYTES = 0x2F
MOVE8_8 = 0x30
MOVE8_16 = 0x31
MOVE8_32 = 0x32
MOVE8_F = 0x33
MOVE16_8 = 0x34
MOVE16_16 = 0x35
MOVE16_32 = 0x36
MOVE16_F = 0x37
MOVE32_8 = 0x38
MOVE32_16 = 0x39
MOVE32_32 = 0x3A
MOVE32_F = 0x3B
MOVEF_8 = 0x3C
MOVEF_16 = 0x3D
MOVEF_32 = 0x3E
MOVEF_F = 0x3F
# BRANCH
JR = 0x40
JR_FALSE = 0x41
JR_TRUE = 0x42
JR_NAN = 0x43
# COMPARE
CP_LT8 = 0x44
CP_LT16 = 0x45
CP_LT32 = 0x46
CP_LTF = 0x47
CP_GT8 = 0x48
CP_GT16 = 0x49
CP_GT32 = 0x4A
CP_GTF = 0x4B
CP_EQ8 = 0x4C
CP_EQ16 = 0x4D
CP_EQ32 = 0x4E
CP_EQF = 0x4F
CP_NEQ8 = 0x50
CP_NEQ16 = 0x51
CP_NEQ32 = 0x52
CP_NEQF = 0x53
CP_LTEQ8 = 0x54
CP_LTEQ16 = 0x55
CP_LTEQ32 = 0x56
CP_LTEQF = 0x57
CP_GTEQ8 = 0x58
CP_GTEQ16 = 0x59
CP_GTEQ32 = 0x5A
CP_GTEQF = 0x5B
# SELECT
SELECT8 = 0x5C
SELECT16 = 0x5D
SELECT32 = 0x5E
SELECTF = 0x5F
SYSTEM = 0x60
PORT_CNV_OUTPUT = 0x61
PORT_CNV_INPUT = 0x62
NOTE_TO_FREQ = 0x63
# BRANCH
JR_LT8 = 0x64
JR_LT16 = 0x65
JR_LT32 = 0x66
JR_LTF = 0x67
JR_GT8 = 0x68
JR_GT16 = 0x69
JR_GT32 = 0x6A
JR_GTF = 0x6B
JR_EQ8 = 0x6C
JR_EQ16 = 0x6D
JR_EQ32 = 0x6E
JR_EQF = 0x6F
JR_NEQ8 = 0x70
JR_NEQ16 = 0x71
JR_NEQ32 = 0x72
JR_NEQF = 0x73
JR_LTEQ8 = 0x74
JR_LTEQ16 = 0x75
JR_LTEQ32 = 0x76
JR_LTEQF = 0x77
JR_GTEQ8 = 0x78
JR_GTEQ16 = 0x79
JR_GTEQ32 = 0x7A
JR_GTEQF = 0x7B
# VM
INFO = 0x7C
STRINGS = 0x7D
MEMORY_WRITE = 0x7E
MEMORY_READ = 0x7F
# UI
UI_FLUSH = 0x80
UI_READ = 0x81
UI_WRITE = 0x82
UI_BUTTON = 0x83
UI_DRAW = 0x84
# TIMER
TIMER_WAIT = 0x85
TIMER_READY = 0x86
TIMER_READ = 0x87
# BREAKPOINT
BP0 = 0x88
BP1 = 0x89
BP2 = 0x8A
BP3 = 0x8B
BP_SET = 0x8C
MATH = 0x8D
RANDOM = 0x8E
# TIMER
TIMER_READ_US = 0x8F
# UI
KEEP_ALIVE = 0x90
# COM
COM_READ = 0x91
COM_WRITE = 0x92
# SOUND
SOUND = 0x94
SOUND_TEST = 0x95
SOUND_READY = 0x96
# INPUT
INPUT_SAMPLE = 0x97
INPUT_DEVICE_LIST = 0x98
INPUT_DEVICE = 0x99
INPUT_READ = 0x9A
INPUT_TEST = 0x9B
INPUT_READY = 0x9C
INPUT_READSI = 0x9D
INPUT_READEXT = 0x9E
INPUT_WRITE = 0x9F
# OUTPUT
OUTPUT_GET_TYPE = 0xA0
OUTPUT_SET_TYPE = 0xA1
OUTPUT_RESET = 0xA2
OUTPUT_STOP = 0xA3
OUTPUT_POWER = 0xA4
OUTPUT_SPEED = 0xA5
OUTPUT_START = 0xA6
OUTPUT_POLARITY = 0xA7
OUTPUT_READ = 0xA8
OUTPUT_TEST = 0xA9
OUTPUT_READY = 0xAA
OUTPUT_POSITION = 0xAB
OUTPUT_STEP_POWER = 0xAC
OUTPUT_TIME_POWER = 0xAD
OUTPUT_STEP_SPEED = 0xAE
OUTPUT_TIME_SPEED = 0xAF
OUTPUT_STEP_SYNC = 0xB0
OUTPUT_TIME_SYNC = 0xB1
OUTPUT_CLR_COUNT = 0xB2
OUTPUT_GET_COUNT = 0xB3
OUTPUT_PRG_STOP = 0xB4
# MEMORY
FILE = 0xC0
ARRAY = 0xC1
ARRAY_WRITE = 0xC2
ARRAY_READ = 0xC3
ARRAY_APPEND = 0xC4
MEMORY_USAGE = 0xC5
FILENAME = 0xC6
# READ
READ8 = 0xC8
READ16 = 0xC9
READ32 = 0xCA
READF = 0xCB
# WRITE
WRITE8 = 0xCC
WRITE16 = 0xCD
WRITE32 = 0xCE
WRITEF = 0xCF
# COM
COM_READY = 0xD0
COM_READDATA = 0xD1
COM_WRITEDATA = 0xD2
COM_GET = 0xD3
COM_SET = 0xD4
COM_TEST = 0xD5
COM_REMOVE = 0xD6
COM_WRITEFILE = 0xD7
MAILBOX_OPEN = 0xD8
MAILBOX_WRITE = 0xD9
MAILBOX_READ = 0xDA
MAILBOX_TEST = 0xDB
MAILBOX_READY = 0xDC
MAILBOX_CLOSE = 0xDD
# SPARE
TST = 0xFF
class DirectCommand(object):
"""Handles variable allocation and parameters for commands that can consist
of arbitrary bytecodes.
TODO: Better param verification?
"""
# These are inserted into the _global_params_types list so that commands
# that return mulitple values can have their values bundled together into
# tuples before they are returned.
_REPLY_TUPLE_OPEN_TOKEN = '_('
_REPLY_TUPLE_CLOSE_TOKEN = ')_'
def __init__(self):
"""Constructs a new, empty object."""
self._global_params_types = []
self._local_params_byte_count = 0
self._global_params_byte_count = 0
# Allocate space for the CommandType.
self._msg = [0x00]
# Allocate space for global and local param lengths.
self._msg.append(0x00)
self._msg.append(0x00)
def send(self, ev3_object):
"""Sends the message and parses the reply."""
if (2 == len(self._msg)):
raise DirectCommandError('Attempt to send an empty DirectCommand.')
self._msg[1] = (self._global_params_byte_count & 0xFF)
self._msg[2] = ((self._local_params_byte_count << 2) |
((self._global_params_byte_count >> 8) & 0x03))
if (self._global_params_byte_count):
self._msg[0] = CommandType.DIRECT_COMMAND_REPLY
reply = ev3_object.send_message_for_reply(self._msg)
return self._parse_reply(reply)
else:
self._msg[0] = CommandType.DIRECT_COMMAND_NO_REPLY
ev3_object.send_message(self._msg)
def safe_add(fn):
"""A wrapper for adding commands in a safe manner."""
def checked_add(*args):
# Wrappers aren't bound methods so they can't reference 'self'
# directly. However, 'self' will be provided as the first parameter
# when the wrapped method is called.
_self = args[0]
msg_len = len(_self._msg)
global_params_types_len = len(_self._global_params_types)
local_params_byte_count = _self._local_params_byte_count
global_params_byte_count = _self._global_params_byte_count
fn(*args)
if ((MAX_CMD_LEN < len(_self._msg)) or
(MAX_CMD_LEN < _self._global_params_byte_count) or
(MAX_LOCAL_VARIABLE_BYTES < _self._local_params_byte_count)):
del (_self._msg[msg_len:])
del (_self._global_params_types[global_params_types_len:])
_self._local_params_byte_count = local_params_byte_count
_self._global_params_byte_count = global_params_byte_count
raise DirectCommandError('Not enough space to add the ' +
'given func.')
return checked_add
@safe_add
def add_timer_wait(self, milliseconds):
"""Causes the thread to sleep for the specified number of milliseconds.
"""
local_var_tuple = self._allocate_local_param(DataFormat.DATA32)
self._msg.append(Opcode.TIMER_WAIT)
self._append_local_constant(milliseconds)
self._append_param(*local_var_tuple)
self._msg.append(Opcode.TIMER_READY)
self._append_param(*local_var_tuple)
@safe_add
def add_ui_draw_update(self):
"""Updates the screen (applies whatever drawing commands have been
issued since the last update).
"""
self._msg.append(Opcode.UI_DRAW)
self._msg.append(UIDrawSubcode.UPDATE)
@safe_add
def add_ui_draw_clean(self):
"""Fills the screen with LCDColor.BACKGROUND."""
self._msg.append(Opcode.UI_DRAW)
self._msg.append(UIDrawSubcode.CLEAN)
@safe_add
def add_ui_draw_fillwindow(self, lcd_color, start_y, count):
"""Fills the window with count rows of the given LCDColor starting at
row start_y.
NOTE: Starting at 0 with a size of 0 will clear the window. This seems
to be the way the CLEAN command is implemented.
"""
self._msg.append(Opcode.UI_DRAW)
self._msg.append(UIDrawSubcode.FILLWINDOW)
self._append_param(lcd_color)
self._append_param(start_y, ParamType.LC2)
self._append_param(count, ParamType.LC2)
@safe_add
def add_ui_draw_pixel(self, lcd_color, xy):
"""Draws a pixel at the given (x, y)."""
self._msg.append(Opcode.UI_DRAW)
self._msg.append(UIDrawSubcode.PIXEL)
self._append_param(lcd_color)
self._append_param(xy[0], ParamType.LC2)
self._append_param(xy[1], ParamType.LC2)
@safe_add
def add_ui_draw_line(self, lcd_color, start_xy, end_xy):
"""Draws a line from the start (x, y) to the end (x, y)."""
self._msg.append(Opcode.UI_DRAW)
self._msg.append(UIDrawSubcode.LINE)
self._append_param(lcd_color)
self._append_param(start_xy[0], ParamType.LC2)
self._append_param(start_xy[1], ParamType.LC2)
self._append_param(end_xy[0], ParamType.LC2)
self._append_param(end_xy[1], ParamType.LC2)
@safe_add
def add_ui_draw_dotline(self, lcd_color,
start_xy,
end_xy,
on_pixels,
off_pixels):
"""Draws a line from the start (x, y) to the end (x, y). The line will
be composed of a repeating pattern consisting of on_pixels followed by
off_pixels.
"""
self._msg.append(Opcode.UI_DRAW)
self._msg.append(UIDrawSubcode.DOTLINE)
self._append_param(lcd_color)
self._append_param(start_xy[0], ParamType.LC2)
self._append_param(start_xy[1], ParamType.LC2)
self._append_param(end_xy[0], ParamType.LC2)
self._append_param(end_xy[1], ParamType.LC2)
self._append_param(on_pixels, ParamType.LC2)
self._append_param(off_pixels, ParamType.LC2)
@safe_add
def add_ui_draw_rect(self, lcd_color, xy, width, height):
"""Draws a rectangle with (x, y) as the top-left corner and with width
and height dimensions.
"""
self._msg.append(Opcode.UI_DRAW)
self._msg.append(UIDrawSubcode.RECT)
self._append_param(lcd_color)
self._append_param(xy[0], ParamType.LC2)
self._append_param(xy[1], ParamType.LC2)
self._append_param(width, ParamType.LC2)
self._append_param(height, ParamType.LC2)
@safe_add
def add_ui_draw_fillrect(self, lcd_color, xy, width, height):
"""Draws a filled rectangle with (x, y) as the top-left corner and
with width and height dimensions.
"""
self._msg.append(Opcode.UI_DRAW)
self._msg.append(UIDrawSubcode.FILLRECT)
self._append_param(lcd_color)
self._append_param(xy[0], ParamType.LC2)
self._append_param(xy[1], ParamType.LC2)
self._append_param(width, ParamType.LC2)
self._append_param(height, ParamType.LC2)
@safe_add
def add_ui_draw_inverserect(self, xy, width, height):
"""Draws a rectangle with (x, y) as the top-left corner and with width
and height dimensions. Any pixel that this rectangle overlaps will have
its color flipped.
"""
self._msg.append(Opcode.UI_DRAW)
self._msg.append(UIDrawSubcode.INVERSERECT)
self._append_param(xy[0], ParamType.LC2)
self._append_param(xy[1], ParamType.LC2)
self._append_param(width, ParamType.LC2)
self._append_param(height, ParamType.LC2)
@safe_add
def add_ui_draw_circle(self, lcd_color, xy, radius):
"""Draws a circle centered at (x, y) with the specified radius."""
self._msg.append(Opcode.UI_DRAW)
self._msg.append(UIDrawSubcode.CIRCLE)
self._append_param(lcd_color)
self._append_param(xy[0], ParamType.LC2)
self._append_param(xy[1], ParamType.LC2)
self._append_param(radius, ParamType.LC2)
@safe_add
def add_ui_draw_fillcircle(self, lcd_color, xy, radius):
"""Draws a filled circle centered at (x, y) with the specified radius.
"""
self._msg.append(Opcode.UI_DRAW)
self._msg.append(UIDrawSubcode.FILLCIRCLE)
self._append_param(lcd_color)
self._append_param(xy[0], ParamType.LC2)
self._append_param(xy[1], ParamType.LC2)
self._append_param(radius, ParamType.LC2)
@safe_add
def add_ui_draw_selectfont(self, font_type):
"""Selects the FontType that will be used by following calls to
add_ui_draw_text.
"""
self._msg.append(Opcode.UI_DRAW)
self._msg.append(UIDrawSubcode.SELECT_FONT)
self._append_param(font_type)
@safe_add
def add_ui_draw_text(self, lcd_color, xy, text_str):
"""Draws the given text with (x, y) as the top-left corner of the
bounding box. Use add_ui_draw_selectfont to select the font.
"""
self._msg.append(Opcode.UI_DRAW)
self._msg.append(UIDrawSubcode.TEXT)
self._append_param(lcd_color)
self._append_param(xy[0], ParamType.LC2)
self._append_param(xy[1], ParamType.LC2)
self._append_param(text_str, ParamType.LCS)
@safe_add
def add_ui_draw_topline(self, topline_enabled):
"""Enables or disables the display of the menu bar at the top of the
screen that normally displays status icons such as the battery
indicator.
"""
self._msg.append(Opcode.UI_DRAW)
self._msg.append(UIDrawSubcode.TOPLINE)
self._append_param(int(topline_enabled))
@safe_add
def add_ui_draw_store(self, ui_level_no=0):
"""If ui_level_no is zero then this function saves the current screen
content so that it be restored later using add_ui_draw_restore.
"""
self._msg.append(Opcode.UI_DRAW)
self._msg.append(UIDrawSubcode.STORE)
self._append_param(ui_level_no)
@safe_add
def add_ui_draw_restore(self, ui_level_no=0):
"""Restores the screen content that was previously saved using
add_ui_draw_store.
"""
self._msg.append(Opcode.UI_DRAW)
self._msg.append(UIDrawSubcode.RESTORE)
self._append_param(ui_level_no)
@safe_add
def add_ui_button_pressed(self, button_type):
"""Returns True if the specified ButtonType button is being pressed."""
self._msg.append(Opcode.UI_BUTTON)
self._msg.append(UIButtonSubcode.PRESSED)
self._append_param(button_type)
self._append_reply_param(DataFormat.BOOL)
@safe_add
def add_keep_alive(self):
"""Resets the sleep timer and returns the sleep timer's new value in
minutes.
"""
self._msg.append(Opcode.KEEP_ALIVE)
self._append_reply_param(DataFormat.DATA8)
@safe_add
def add_input_device_get_typemode(self, input_port,
layer=USB_CHAIN_LAYER_MASTER):
"""Returns the DeviceType and mode for the given input_port. The mode
value depends on the type of the device.
"""
self._msg.append(Opcode.INPUT_DEVICE)
self._msg.append(InputDeviceSubcode.GET_TYPEMODE)
self._append_param(layer)
self._append_param(input_port)
self._global_params_types.append(self._REPLY_TUPLE_OPEN_TOKEN)
self._append_reply_param(DataFormat.DATA8)
self._append_reply_param(DataFormat.DATA8)
self._global_params_types.append(self._REPLY_TUPLE_CLOSE_TOKEN)
@safe_add
def add_input_device_get_name(self, input_port,
layer=USB_CHAIN_LAYER_MASTER):
"""Returns a string describing the device that is located at the
specified port i.e. 'NONE' or 'US-DIST-CM'.
"""
self._msg.append(Opcode.INPUT_DEVICE)
self._msg.append(InputDeviceSubcode.GET_NAME)
self._append_param(layer)
self._append_param(input_port)
self._append_param(MAX_NAME_STR_LEN, ParamType.LC2)
self._append_reply_param((DataFormat.DATA_S, MAX_NAME_STR_LEN))
@safe_add
def add_input_device_get_modename(self, input_port,
mode,
layer=USB_CHAIN_LAYER_MASTER):
"""Returns a string describing the specified mode of the device that is
located at the given port. For example, for an ultrasonic sensor mode
0 will return 'US-DIST-CM' and mode 1 will return 'US-DIST-IN'.
NOTE: Reading invalid modes can corrupt the reply buffer.
"""
self._msg.append(Opcode.INPUT_DEVICE)
self._msg.append(InputDeviceSubcode.GET_MODENAME)
self._append_param(layer)
self._append_param(input_port)
self._append_param(mode)
self._append_param(MAX_NAME_STR_LEN, ParamType.LC2)
self._append_reply_param((DataFormat.DATA_S, MAX_NAME_STR_LEN))
@safe_add
def add_input_device_get_minmax(self, input_port,
layer=USB_CHAIN_LAYER_MASTER):
""""""
self._msg.append(Opcode.INPUT_DEVICE)
self._msg.append(InputDeviceSubcode.GET_MINMAX)
self._append_param(layer)
self._append_param(input_port)
self._global_params_types.append(self._REPLY_TUPLE_OPEN_TOKEN)
self._append_reply_param(DataFormat.DATA_F)
self._append_reply_param(DataFormat.DATA_F)
self._global_params_types.append(self._REPLY_TUPLE_CLOSE_TOKEN)
@safe_add
def add_input_device_get_changes(self, input_port,
layer=USB_CHAIN_LAYER_MASTER):
"""Returns the number of positive changes since the last time
CLR_CHANGES was called (i.e. the number of times that a touch sensor
has been pressed).
"""
self._msg.append(Opcode.INPUT_DEVICE)
self._msg.append(InputDeviceSubcode.GET_CHANGES)
self._append_param(layer)
self._append_param(input_port)
self._append_reply_param(DataFormat.DATA_F)
@safe_add
def add_input_device_get_bumps(self, input_port,
layer=USB_CHAIN_LAYER_MASTER):
"""Returns the number of negative changes since the last time
CLR_CHANGES was called (i.e. the number of times that a touch sensor
has been released).
"""
self._msg.append(Opcode.INPUT_DEVICE)
self._msg.append(InputDeviceSubcode.GET_BUMPS)
self._append_param(layer)
self._append_param(input_port)
self._append_reply_param(DataFormat.DATA_F)
@safe_add
def add_input_device_clr_changes(self, input_port,
layer=USB_CHAIN_LAYER_MASTER):
"""Returns the number of negative changes since the last time
CLR_CHANGES was called (i.e. the number of times that a touch sensor
has been released).
NOTE: Does not clear the accumulated angle measurement for the EV3
gyro sensor.
"""
self._msg.append(Opcode.INPUT_DEVICE)
self._msg.append(InputDeviceSubcode.CLR_CHANGES)
self._append_param(layer)
self._append_param(input_port)
@safe_add
def add_input_device_clr_all(self, layer=USB_CHAIN_LAYER_MASTER):
"""Clears all of the input device values."""
self._msg.append(Opcode.INPUT_DEVICE)
self._msg.append(InputDeviceSubcode.CLR_ALL)
self._append_param(layer)
@safe_add
def add_input_device_ready_si(self, input_port,
mode=-1,
device_type=0,
layer=USB_CHAIN_LAYER_MASTER):
"""Waits until the device on the specified InputPort is ready and then
returns its value as a standard unit.
"""
self._msg.append(Opcode.INPUT_DEVICE)
self._msg.append(InputDeviceSubcode.READY_SI)
self._append_param(layer)
self._append_param(input_port)
self._append_param(device_type)
self._append_param(mode)
self._append_param(1) # Number of values
self._append_reply_param(DataFormat.DATA_F)
@safe_add
def add_input_device_ready_raw(self, input_port,
mode=-1,
device_type=0,
layer=USB_CHAIN_LAYER_MASTER):
"""Waits until the device on the specified InputPort is ready and then
returns its value as a raw value.
"""
self._msg.append(Opcode.INPUT_DEVICE)
self._msg.append(InputDeviceSubcode.READY_RAW)
self._append_param(layer)
self._append_param(input_port)
self._append_param(device_type)
self._append_param(mode)
self._append_param(1) # Number of values
self._append_reply_param(DataFormat.DATA32)
@safe_add
def add_input_device_ready_percent(self, input_port,
mode=-1,
device_type=0,
layer=USB_CHAIN_LAYER_MASTER):
"""Waits until the device on the specified InputPort is ready and then
returns its value as a percentage.
"""
self._msg.append(Opcode.INPUT_DEVICE)
self._msg.append(InputDeviceSubcode.READY_PCT)
self._append_param(layer)
self._append_param(input_port)
self._append_param(device_type)
self._append_param(mode)
self._append_param(1) # Number of values
self._append_reply_param(DataFormat.DATA8)
@safe_add
def add_sound_tone(self, volume,
frequency_hz,
duration_ms,
layer=USB_CHAIN_LAYER_MASTER):
"""Plays the tone at the given volume and frequency for the given
duration_ms milliseconds.
"""
self._msg.append(Opcode.SOUND)
self._msg.append(SoundSubcode.TONE)
self._append_param(volume)
self._append_param(frequency_hz, ParamType.LC2)
self._append_param(duration_ms, ParamType.LC2)
@safe_add
def add_sound_play(self, volume, filename):
"""Plays the sound file with the given name at the specified volume.
The default sound files are located in the '/home/root/lms2012/sys/ui/'
directory and include Startup.rsf, PowerDown.rsf, OverpowerAlert.rsf,
GeneralAlarm.rsf, DownloadSucces.rsf, and Click.rsf.
NOTE: Do not include the '.rsf' extension in the filename.
"""
self._msg.append(Opcode.SOUND)
self._msg.append(SoundSubcode.PLAY)
self._append_param(volume)
self._append_param(filename, ParamType.LCS)
@safe_add
def add_ui_read_get_fw_vers(self):
"""Returns the FW version as a string in the form 'VX.XXX'."""
self._msg.append(Opcode.UI_READ)
self._msg.append(UIReadSubcode.GET_FW_VERS)
self._append_param(MAX_VERSION_STR_LEN, ParamType.LC2)
self._append_reply_param((DataFormat.DATA_S, MAX_NAME_STR_LEN))
@safe_add
def add_ui_read_get_hw_vers(self):
"""Returns the HW version as a string in the form 'VX.XXX'."""
self._msg.append(Opcode.UI_READ)
self._msg.append(UIReadSubcode.GET_HW_VERS)
self._append_param(MAX_VERSION_STR_LEN, ParamType.LC2)
self._append_reply_param((DataFormat.DATA_S, MAX_NAME_STR_LEN))
@safe_add
def add_ui_read_get_fw_build(self):
"""Returns the firmware build as a string in the form 'XXXXXXXXXX'."""
self._msg.append(Opcode.UI_READ)
self._msg.append(UIReadSubcode.GET_FW_BUILD)
self._append_param(MAX_VERSION_STR_LEN, ParamType.LC2)
self._append_reply_param((DataFormat.DATA_S, MAX_NAME_STR_LEN))
@safe_add
def add_ui_read_get_os_vers(self):
"""Returns the OS version as a string in the form 'Linux X.X.XX'."""
self._msg.append(Opcode.UI_READ)
self._msg.append(UIReadSubcode.GET_OS_VERS)
self._append_param(MAX_VERSION_STR_LEN, ParamType.LC2)
self._append_reply_param((DataFormat.DATA_S, MAX_NAME_STR_LEN))
@safe_add
def add_ui_read_get_os_build(self):
"""Returns the OS build as a string in the form 'XXXXXXXXXX'."""
self._msg.append(Opcode.UI_READ)
self._msg.append(UIReadSubcode.GET_OS_BUILD)
self._append_param(MAX_VERSION_STR_LEN, ParamType.LC2)
self._append_reply_param((DataFormat.DATA_S, MAX_NAME_STR_LEN))
@safe_add
def add_ui_read_get_version(self):
"""Returns the Lego Mindstorms version as a string in the form
'LMS2012 VX.XXX(<TIMESTAMP>)'.
"""
self._msg.append(Opcode.UI_READ)
self._msg.append(UIReadSubcode.GET_VERSION)
self._append_param(MAX_VERSION_STR_LEN, ParamType.LC2)
self._append_reply_param((DataFormat.DATA_S, MAX_NAME_STR_LEN))
@safe_add
def add_ui_read_get_ip(self):
"""Returns the IP address as a string."""
self._msg.append(Opcode.UI_READ)
self._msg.append(UIReadSubcode.GET_IP)
self._append_param(MAX_VERSION_STR_LEN, ParamType.LC2)
self._append_reply_param((DataFormat.DATA_S, MAX_NAME_STR_LEN))
@safe_add
def add_ui_read_get_vbatt(self):
"""Gets the current battery voltage. According to the constants that are
defined in 'lms2012.h', the rechargeable battery should be in the range
of [6.0, 7.1] and normal batteries should be in the range of [4.5, 6.2].
"""
self._msg.append(Opcode.UI_READ)
self._msg.append(UIReadSubcode.GET_VBATT)
self._append_reply_param(DataFormat.DATA_F)
@safe_add
def add_ui_read_get_lbatt(self):
"""Gets the current battery level as a percentage."""
self._msg.append(Opcode.UI_READ)
self._msg.append(UIReadSubcode.GET_LBATT)
self._append_reply_param(DataFormat.DATA_PCT)
@safe_add
def add_ui_read_get_ibatt(self):
"""Gets the current battery discharge amperage."""
self._msg.append(Opcode.UI_READ)
self._msg.append(UIReadSubcode.GET_IBATT)
self._append_reply_param(DataFormat.DATA_F)
@safe_add
def add_ui_read_get_tbatt(self):
"""Gets the current battery temperature rise."""
self._msg.append(Opcode.UI_READ)
self._msg.append(UIReadSubcode.GET_TBATT)
self._append_reply_param(DataFormat.DATA_F)
@safe_add
def add_ui_read_get_imotor(self):
"""Gets the amount of current that the motors are using."""
self._msg.append(Opcode.UI_READ)
self._msg.append(UIReadSubcode.GET_IMOTOR)
self._append_reply_param(DataFormat.DATA_F)
@safe_add
def add_ui_read_get_sdcard(self):
"""Returns the following information about the SD card:
(<SD_CARD_OK>, <TOTAL_KBYTES>, <FREE_KBYTES>). The SD_CARD_OK value is
a boolean.
"""
self._msg.append(Opcode.UI_READ)
self._msg.append(UIReadSubcode.GET_SDCARD)
self._global_params_types.append(self._REPLY_TUPLE_OPEN_TOKEN)
self._append_reply_param(DataFormat.BOOL)
self._append_reply_param(DataFormat.DATA32)
self._append_reply_param(DataFormat.DATA32)
self._global_params_types.append(self._REPLY_TUPLE_CLOSE_TOKEN)
@safe_add
def add_ui_read_get_usbstick(self):
"""Returns the following information about the USB stick:
(<USB_STICK_OK>, <TOTAL_KBYTES>, <FREE_KBYTES>). The USB_STICK_OK value
is a boolean.
"""
self._msg.append(Opcode.UI_READ)
self._msg.append(UIReadSubcode.GET_SDCARD)
self._global_params_types.append(self._REPLY_TUPLE_OPEN_TOKEN)
self._append_reply_param(DataFormat.BOOL)
self._append_reply_param(DataFormat.DATA32)
self._append_reply_param(DataFormat.DATA32)
self._global_params_types.append(self._REPLY_TUPLE_CLOSE_TOKEN)
@safe_add
def add_output_get_type(self, output_port, layer=USB_CHAIN_LAYER_MASTER):
"""Returns the DeviceType of the device that is connected to the
specified OutputPort.
"""
self._msg.append(Opcode.OUTPUT_GET_TYPE)
self._append_param(layer)
self._append_param(OUTPUT_CHANNEL_TO_INDEX[output_port])
self._append_reply_param(DataFormat.DATA8)
@safe_add
def add_output_set_type(self, output_port,
output_type,
layer=USB_CHAIN_LAYER_MASTER):
"""Sets the DeviceType of the device that is connected to the
specified OutputPort.
TODO: d_pwm.c says this only works with type TACHO or MINI_TACHO.
TODO: Debug this. Not sure how to verify that this works.
It seems to be implemented in d_pwm.c
"""
self._msg.append(Opcode.OUTPUT_SET_TYPE)
self._append_param(layer)
self._append_param(output_type)
@safe_add
def add_output_reset(self, output_port_mask,
layer=USB_CHAIN_LAYER_MASTER):
"""Resets the tacho count and timer of the motor(s) described by the
output_port_mask parameter. Should be called when initializing a
motor?
"""
self._msg.append(Opcode.OUTPUT_RESET)
self._append_param(layer)
self._append_param(output_port_mask)
@safe_add
def add_output_stop(self, output_port_mask,
stop_type,
layer=USB_CHAIN_LAYER_MASTER):
"""Stops the motor(s) described by the output_port_mask parameter.
The stop_byte parameter defines whether the motor will BRAKE or COAST.
"""
self._msg.append(Opcode.OUTPUT_STOP)
self._append_param(layer)
self._append_param(output_port_mask)
self._append_param(stop_type)
@safe_add
def add_output_power(self, output_port_mask,
power,
layer=USB_CHAIN_LAYER_MASTER):
"""Sets the power for the motor(s) described by the output_port_mask
parameter. Power values should be in the range [-100, 100]. Note that
add_output_start needs to be called before the motor will start moving.
"""
self._msg.append(Opcode.OUTPUT_POWER)
self._append_param(layer)
self._append_param(output_port_mask)
self._append_param(power)
@safe_add
def add_output_speed(self, output_port_mask,
speed,
layer=USB_CHAIN_LAYER_MASTER):
"""Sets the speed for the motor(s) described by the output_port_mask
parameter. Speed values should be in the range [-100, 100]. Note that
add_output_start needs to be called before the motor will start moving.
"""
self._msg.append(Opcode.OUTPUT_SPEED);
self._append_param(layer)
self._append_param(output_port_mask)
self._append_param(speed)
@safe_add
def add_output_start(self, output_port_mask,
layer=USB_CHAIN_LAYER_MASTER):
"""Starts the motor(s) described by the output_port_mask
parameter.
"""
self._msg.append(Opcode.OUTPUT_START)
self._append_param(layer)
self._append_param(output_port_mask)
@safe_add
def add_output_polarity(self, output_port_mask,
polarity_type,
layer=USB_CHAIN_LAYER_MASTER):
"""Sets the polarity for the motor(s) described by the
output_port_mask parameter.
"""
self._msg.append(Opcode.OUTPUT_POLARITY)
self._append_param(layer)
self._append_param(output_port_mask)
self._append_param(polarity_type)
@safe_add
def add_output_read(self, output_port, layer=USB_CHAIN_LAYER_MASTER):
"""Reads the speed and tacho pulses for the given output_port and
returns them as a tuple in the form (SPEED, TACHO_PULSES).
"""
self._msg.append(Opcode.OUTPUT_READ)
self._append_param(layer)
self._append_param(OUTPUT_CHANNEL_TO_INDEX[output_port])
self._global_params_types.append(self._REPLY_TUPLE_OPEN_TOKEN)
self._append_reply_param(DataFormat.DATA8)
self._append_reply_param(DataFormat.DATA32)
self._global_params_types.append(self._REPLY_TUPLE_CLOSE_TOKEN)
@safe_add
def add_output_ready(self, output_port_mask,
layer=USB_CHAIN_LAYER_MASTER):
"""Waits for the outputs in the output_port_mask to report that
they are ready before executing the next opcode. For example, if two
consecutive motor commands are used with the same OutputPort putting
this opcode between them ensures that the first command finishes
before the second one is started.
"""
self._msg.append(Opcode.OUTPUT_READY)
self._append_param(layer)
self._append_param(output_port_mask)
@safe_add
def add_output_position(self, output_port_mask,
position,
layer=USB_CHAIN_LAYER_MASTER):
"""Sets the position of the specied OutputPort(s)."""
self._msg.append(Opcode.OUTPUT_POSITION)
self._append_param(layer)
self._append_param(output_port_mask)
self._append_param(position, ParamType.LC4)
@safe_add
def add_output_step_power(self, output_port_mask,
power,
ramp_up_steps,
steps,
ramp_down_steps,
stop_type,
layer=USB_CHAIN_LAYER_MASTER):
"""Ramps up the power for the motor(s) described by the
output_port_mask, holds for steps, and then ramps down. It is not
necessary to call add_output_start in addition to this opcode.
NOTE: The EV3 will NOT wait for this operation to complete before
executing the next opcode unless add_output_ready is used.
"""
self._msg.append(Opcode.OUTPUT_STEP_POWER);
self._append_param(layer)
self._append_param(output_port_mask)
self._append_param(power)
self._append_param(ramp_up_steps, ParamType.LC4)
self._append_param(steps, ParamType.LC4)
self._append_param(ramp_down_steps, ParamType.LC4)
self._append_param(stop_type)
@safe_add
def add_output_time_power(self, output_port_mask,
power,
ramp_up_ms,
time_ms,
ramp_down_ms,
stop_type,
layer=USB_CHAIN_LAYER_MASTER):
"""Ramps up the power for the motor(s) described by the
output_port_mask, holds for time_ms, and then ramps down. It is not
necessary to call add_output_start in addition to this opcode.
NOTE: The EV3 will NOT wait for this operation to complete before
executing the next opcode unless add_output_ready is used.
"""
self._msg.append(Opcode.OUTPUT_TIME_POWER);
self._append_param(layer)
self._append_param(output_port_mask)
self._append_param(power)
self._append_param(ramp_up_ms, ParamType.LC4)
self._append_param(time_ms, ParamType.LC4)
self._append_param(ramp_down_ms, ParamType.LC4)
self._append_param(stop_type)
@safe_add
def add_output_step_speed(self, output_port_mask,
speed,
ramp_up_steps,
steps,
ramp_down_steps,
stop_type,
layer=USB_CHAIN_LAYER_MASTER):
"""Ramps up the power for the motor(s) described by the
output_port_mask, holds for steps, and then ramps down. It is not
necessary to call add_output_start in addition to this opcode.
NOTE: The EV3 will NOT wait for this operation to complete before
executing the next opcode unless add_output_ready is used.
"""
self._msg.append(Opcode.OUTPUT_STEP_SPEED);
self._append_param(layer)
self._append_param(output_port_mask)
self._append_param(speed)
self._append_param(ramp_up_steps, ParamType.LC4)
self._append_param(steps, ParamType.LC4)
self._append_param(ramp_down_steps, ParamType.LC4)
self._append_param(stop_type)
@safe_add
def add_output_time_speed(self, output_port_mask,
speed,
ramp_up_ms,
time_ms,
ramp_down_ms,
stop_type,
layer=USB_CHAIN_LAYER_MASTER):
"""Ramps up the power for the motor(s) described by the
output_port_mask, holds for time_ms, and then ramps down. It is not
necessary to call add_output_start in addition to this opcode.
NOTE: The EV3 will NOT wait for this operation to complete before
executing the next opcode unless add_output_ready is used.
"""
self._msg.append(Opcode.OUTPUT_TIME_SPEED);
self._append_param(layer)
self._append_param(output_port_mask)
self._append_param(speed)
self._append_param(ramp_up_ms, ParamType.LC4)
self._append_param(time_ms, ParamType.LC4)
self._append_param(ramp_down_ms, ParamType.LC4)
self._append_param(stop_type)
@safe_add
def add_output_step_sync(self, output_port_mask,
speed,
turn_ratio,
step,
stop_type,
layer=USB_CHAIN_LAYER_MASTER):
"""Sets the speed for the two given motors in the following fashion:
[-200, -101]: Turn right with right motor running in reverse
[-100, -1]: Turn right with right motor slowed
0: Both motors in sync in the same direction
[1, 100]: Turn left with left motor slowed
[101, 200]: Turn left with left motor running in reverse
It is not necessary to call add_output_start in addition to this opcode.
NOTE: The EV3 will NOT wait for this operation to complete before
executing the next opcode unless add_output_ready is used.
"""
self._msg.append(Opcode.OUTPUT_STEP_SYNC);
self._append_param(layer)
self._append_param(output_port_mask)
self._append_param(speed)
self._append_param(turn_ratio, ParamType.LC2)
self._append_param(step, ParamType.LC4)
self._append_param(stop_type)
@safe_add
def add_output_time_sync(self, output_port_mask,
speed,
turn_ratio,
time,
stop_type,
layer=USB_CHAIN_LAYER_MASTER):
"""Sets the speed for the two given motors in the following fashion:
[-200, -101]: Turn right with right motor running in reverse
[-100, -1]: Turn right with right motor slowed
0: Both motors in sync in the same direction
[1, 100]: Turn left with left motor slowed
[101, 200]: Turn left with left motor running in reverse
It is not necessary to call add_output_start in addition to this opcode.
NOTE: The EV3 will NOT wait for this operation to complete before
executing the next opcode unless add_output_ready is used.
"""
self._msg.append(Opcode.OUTPUT_TIME_SYNC);
self._append_param(layer)
self._append_param(output_port_mask)
self._append_param(speed)
self._append_param(turn_ratio, ParamType.LC2)
self._append_param(time, ParamType.LC4)
self._append_param(stop_type)
@safe_add
def add_output_clr_count(self, output_port_mask,
layer=USB_CHAIN_LAYER_MASTER):
"""Clears the tacho count for the given OutputPort(s) when in sensor
mode.
"""
self._msg.append(Opcode.OUTPUT_CLR_COUNT);
self._append_param(layer)
self._append_param(output_port_mask)
@safe_add
def add_output_get_count(self, output_port,
layer=USB_CHAIN_LAYER_MASTER):
"""Returns the tacho count for the given OutputPort when in sensor
mode.
"""
self._msg.append(Opcode.OUTPUT_GET_COUNT);
self._append_param(layer)
self._append_param(OUTPUT_CHANNEL_TO_INDEX[output_port])
self._append_reply_param(DataFormat.DATA32)
@safe_add
def add_set_leds(self, led_pattern):
"""Sets the LEDs on the front of the brick to the specified item in
the LEDPattern enumeration.
"""
self._msg.append(Opcode.UI_WRITE)
self._msg.append(UIWriteSubcode.LED)
self._append_param(led_pattern)
def _parse_reply(self, buf):
result = []
index = 0
if (ReplyType.DIRECT_REPLY_ERROR == buf[0]):
raise DirectCommandError('The DirectCommand failed.')
if (self._global_params_byte_count != (len(buf) - 1)):
raise DirectCommandError('The data returned by the ' +
'command was smaller than expected.')
# The items in the reply are grouped into tuples. Each tuple represents
# the reply to a command that returns multiple values.
sub_tuple = None
for item in self._global_params_types:
value = None
length = 0
if (self._REPLY_TUPLE_OPEN_TOKEN == item):
sub_tuple = []
continue
elif (self._REPLY_TUPLE_CLOSE_TOKEN == item):
result.append(tuple(sub_tuple))
sub_tuple = None
continue
if (isinstance(item, tuple)):
value, length = self._parse_param(buf, (index + 1), *item)
else:
# Ensure that the alignment is correct.
data_len = DATA_FORMAT_LENS[item]
pad = (index % data_len)
if (0 != pad):
pad = (data_len - pad)
index += pad
value, length = self._parse_param(buf, (index + 1), item)
if (sub_tuple is not None):
sub_tuple.append(value)
else:
result.append(value)
index += length
return tuple(result)
def _parse_param(self, buf, index, data_format, data_len=None):
value = None
length = 1
if (DataFormat.DATA_S == data_format):
value = message.parse_null_terminated_str(buf, index, data_len)
length = data_len
elif (DataFormat.HND == data_format):
value = (buf[index] & ~ParamType.HND)
elif (DataFormat.DATA_F == data_format):
value = message.parse_float(buf, index)
length = DATA_FORMAT_LENS[DataFormat.DATA_F]
elif (DataFormat.BOOL == data_format):
value = bool(buf[index])
length = 1
else:
length = DATA_FORMAT_LENS[data_format]
if (1 == length):
value = buf[index]
elif (2 == length):
value = message.parse_u16(buf, index)
elif (4 == length):
value = message.parse_u32(buf, index)
else:
raise DirectCommandError('Unexpected ParamType: %d' %
param_type)
return (value, length)
def _append_reply_param(self, reply_format):
"""Global parameters are stored in the tx buffer on the brick so
their values are returned in the message reply.
"""
data_len = None
if (not isinstance(reply_format, tuple)):
# Ensure that the alignment is correct.
data_len = DATA_FORMAT_LENS[reply_format]
pad = (self._global_params_byte_count % data_len)
if (pad):
pad = (data_len - pad)
self._global_params_byte_count += pad
else:
data_len = reply_format[1]
# Use as few bits as possible to save space in message buffer.
param_type = ParamType.GV1
if (0xFFFF < self._global_params_byte_count):
param_type = ParamType.GV4
elif (0xFF < self._global_params_byte_count):
param_type = ParamType.GV2
self._append_param(self._global_params_byte_count, param_type)
self._global_params_types.append(reply_format)
self._global_params_byte_count += data_len
def _allocate_local_param(self, data_format):
"""Local parameters are essentially stack variables so they are NOT
included in the reply from the brick. This function returns an index
that can be used to access a new local variable of the given DataFormat.
"""
# Ensure that the alignment is correct.
data_len = DATA_FORMAT_LENS[data_format]
pad = (self._local_params_byte_count % data_len)
if (pad):
pad = (data_len - pad)
self._local_params_byte_count += pad
# Use as few bits as possible to save space in message buffer.
param_type = ParamType.LV1
if (0xFFFF < self._local_params_byte_count):
param_type = ParamType.LV4
elif (0xFF < self._local_params_byte_count):
param_type = ParamType.LV2
index = self._local_params_byte_count
self._local_params_byte_count += data_len
return (index, param_type)
def _append_local_constant(self, val):
""""Appends an immediate value as a local constant."""
param_type = None
if (isinstance(val, int)):
num_bits = int.bit_length(val)
if (num_bits > 16):
param_type = ParamType.LC4
elif (num_bits > 8):
param_type = ParamType.LC2
elif (num_bits > 6):
param_type = ParamType.LC1
else:
param_type = ParamType.LC0
elif (isinstance(val, float)):
param_type = ParamType.FLOAT
elif (isinstance(val, str)):
param_type = ParamType.LCS
else:
raise NotImplementedError('Unknown local constant type.')
self._append_param(val, param_type)
def _append_param(self, val, param_type=ParamType.LC1):
"""All parameters need to be prefixed with their type so the VM knows
how to interpret the following data. The reply_format parameter is
used when a reply is expected.
"""
if (ParamType.PRIMPAR_LABEL == param_type):
raise NotImplementedError('ParamType.PRIMPAR_LABEL')
elif (ParamType.LCS == param_type):
self._msg.append(param_type)
message.append_str(self._msg, val)
elif (ParamType.LC0 == param_type):
self._msg.append(ParamType.LC0 | (0x3F & val))
elif (ParamType.HND == param_type):
self._msg.append(ParamType.HND | val)
elif (ParamType.ADR == param_type):
self._msg.append(ParamType.ADR | val)
elif (ParamType.GV0 == param_type):
self._msg.append(ParamType.GV0 | (0x1F & val))
elif (ParamType.FLOAT == param_type):
self._msg.append(ParamType.LC4)
message.append_float(self._msg, val)
else:
length = PARAM_TYPE_LENS[param_type]
self._msg.append(param_type)
if (1 == length):
message.append_u8(self._msg, val)
elif (2 == length):
message.append_u16(self._msg, val)
elif (4 == length):
message.append_u32(self._msg, val)
else:
raise DirectCommandError('Unexpected ParamType:' +
' %d' % param_type)
|
inductivekickback/ev3
|
ev3/direct_command.py
|
Python
|
mit
| 72,705 | 0.011361 |
from vectores_oo import Vector
x = input('vector U componente X= ')
y = input('vector U componente X= ')
U = Vector(x,y)
m = input('vector V magnitud= ')
a = input('vector V angulo= ')
V = Vector(m=m, a=a)
E = input('Escalar= ')
print "U=%s" % U
print "V=%s" % V
print 'UxE=%s' % U.x_escalar(E)
print 'VxE=%s' % V.x_escalar(E)
print 'U+V=%s' % U.Suma(V)
print 'U.V=%s' % U.ProductoPunto(V)
print '|UxV|=%s' % U.Modulo_ProductoCruz(V)
|
rgarcia-herrera/vectores
|
vectores.py
|
Python
|
gpl-3.0
| 446 | 0.006726 |
# -*- coding: utf-8 -*-
import os
import pygame
from pygame.locals import *
class Sprite(pygame.sprite.Sprite):
def __init__(self,SpriteName):
pygame.sprite.Sprite.__init__(self)
self.Name = SpriteName
self.rect = 0
self.image = 0
def getRect(self):
return self.rect
def getImg(self):
return self.image
def load_image(self, name, colorkey=None):
#fullname = os.path.join('data', 'images')
fullname = name + '.png'
try:
image = pygame.image.load(fullname)
except pygame.error, message:
print 'Cannot load image:', fullname
raise SystemExit, message
image = image.convert()
if colorkey is not None:
if colorkey is -1:
colorkey = image.get_at((0,0))
image.set_colorkey(colorkey, RLEACCEL)
return image, image.get_rect()
class spritesheet(object):
def __init__(self, filename):
try:
self.sheet = pygame.image.load(filename).convert()
except pygame.error, message:
print 'Unable to load spritesheet image:', filename
raise SystemExit, message
# Load a specific image from a specific rectangle
def image_at(self, rectangle, colorkey = None):
"Loads image from x,y,x+offset,y+offset"
rect = pygame.Rect(rectangle)
image = pygame.Surface(rect.size).convert()
image.blit(self.sheet, (0, 0), rect)
if colorkey is not None:
if colorkey is -1:
colorkey = image.get_at((0,0))
image.set_colorkey(colorkey, pygame.RLEACCEL)
return image, rect
# Load a whole bunch of images and return them as a list
def images_at(self, rects):
"Loads multiple images, supply a list of coordinates"
return [self.image_at(rect) for rect in rects], rect
# Load a whole strip of images
def load_strip(self, rect, image_count, colorkey = None):
"Loads a strip of images and returns them as a list"
tups = [(rect[0]+rect[2]*x, rect[1], rect[2], rect[3])
for x in range(image_count)]
return self.images_at(tups, colorkey)
|
vtungn/HackaPanzer
|
Sprite.py
|
Python
|
mit
| 2,212 | 0.007233 |
from validx import Dict, List
from .protos import DataObject
from .palette import Palette
from .utils.parser import BinaryParser
from .utils.validator import UInt8
from .utils.types import Remappings, Remapping
class PaletteMapping(DataObject):
__slots__ = (
"colors",
"remaps",
)
schema = Dict({"colors": Palette.schema, "remaps": List(List(UInt8))})
def __init__(self):
self.colors: Palette = Palette()
self.remaps: Remappings = []
def remap(self, remap_id: int) -> Palette:
return self.colors.remap(self.remaps[remap_id])
def read(self, parser: BinaryParser):
self.colors = Palette().read(parser)
for k in range(0, 19):
remap: Remapping = []
for m in range(0, 256):
remap.append(parser.get_uint8())
self.remaps.append(remap)
return self
def write(self, parser):
self.colors.write(parser)
for k in range(0, 19):
for m in range(0, 256):
parser.put_uint8(self.remaps[k][m])
def serialize(self) -> dict:
return {"colors": self.colors.serialize(), "remaps": self.remaps}
def unserialize(self, data: dict):
self.colors = Palette().unserialize(data["colors"])
self.remaps = data["remaps"]
return self
|
omf2097/pyomftools
|
omftools/pyshadowdive/palette_mapping.py
|
Python
|
mit
| 1,335 | 0 |
from pyroute2.netlink import nlmsg
class errmsg(nlmsg):
'''
Custom message type
Error ersatz-message
'''
fields = (('code', 'i'), )
|
craneworks/python-pyroute2
|
pyroute2/netlink/rtnl/errmsg.py
|
Python
|
apache-2.0
| 155 | 0 |
import src
class Chemical(src.items.Item):
type = "Chemical"
def __init__(self):
super().__init__(display=src.canvas.displayChars.fireCrystals)
self.name = "chemical"
self.composition = b"cccccggggg"
def apply(self, character):
import hashlib
results = []
counter = 0
while 1:
tmp = random.choice(["mix", "shift"])
if tmp == "mix":
self.mix(character)
elif tmp == "switch":
self.mix(character)
elif tmp == "shift":
self.shift()
test = hashlib.sha256(self.composition[0:9])
character.addMessage(counter)
result = int(test.digest()[-1])
result2 = int(test.digest()[-2])
if result < 15:
character.addMessage(test.digest())
character.addMessage(result)
character.addMessage(result2)
break
counter += 1
# character.addMessage(results)
def shift(self):
self.composition = self.composition[1:] + self.composition[0:1]
def mix(self, character):
part1 = self.composition[0:5]
part2 = self.composition[5:10]
self.composition = (
part1[0:1]
+ part2[0:1]
+ part1[1:2]
+ part2[1:2]
+ part1[2:3]
+ part2[2:3]
+ part1[3:4]
+ part2[3:4]
+ part1[4:5]
+ part2[4:5]
)
src.items.addType(Chemical)
|
MarxMustermann/OfMiceAndMechs
|
src/itemFolder/obsolete/chemical.py
|
Python
|
gpl-3.0
| 1,571 | 0 |
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
ZMQ example using python3's asyncio
Bitcoin should be started with the command line arguments:
GuldenD -testnet -daemon \
-zmqpubhashblock=tcp://127.0.0.1:28332 \
-zmqpubrawtx=tcp://127.0.0.1:28332 \
-zmqpubhashtx=tcp://127.0.0.1:28332 \
-zmqpubhashblock=tcp://127.0.0.1:28332
We use the asyncio library here. `self.handle()` installs itself as a
future at the end of the function. Since it never returns with the event
loop having an empty stack of futures, this creates an infinite loop. An
alternative is to wrap the contents of `handle` inside `while True`.
The `@asyncio.coroutine` decorator and the `yield from` syntax found here
was introduced in python 3.4 and has been deprecated in favor of the `async`
and `await` keywords respectively.
A blocking example using python 2.7 can be obtained from the git history:
https://github.com/bitcoin/bitcoin/blob/37a7fe9e440b83e2364d5498931253937abe9294/contrib/zmq/zmq_sub.py
"""
import binascii
import asyncio
import zmq
import zmq.asyncio
import signal
import struct
import sys
if not (sys.version_info.major >= 3 and sys.version_info.minor >= 4):
print("This example only works with Python 3.4 and greater")
exit(1)
port = 28332
class ZMQHandler():
def __init__(self):
self.loop = zmq.asyncio.install()
self.zmqContext = zmq.asyncio.Context()
self.zmqSubSocket = self.zmqContext.socket(zmq.SUB)
self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "hashblock")
self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "hashtx")
self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawblock")
self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawtx")
self.zmqSubSocket.connect("tcp://127.0.0.1:%i" % port)
@asyncio.coroutine
def handle(self) :
msg = yield from self.zmqSubSocket.recv_multipart()
topic = msg[0]
body = msg[1]
sequence = "Unknown"
if len(msg[-1]) == 4:
msgSequence = struct.unpack('<I', msg[-1])[-1]
sequence = str(msgSequence)
if topic == b"hashblock":
print('- HASH BLOCK ('+sequence+') -')
print(binascii.hexlify(body))
elif topic == b"hashtx":
print('- HASH TX ('+sequence+') -')
print(binascii.hexlify(body))
elif topic == b"rawblock":
print('- RAW BLOCK HEADER ('+sequence+') -')
print(binascii.hexlify(body[:80]))
elif topic == b"rawtx":
print('- RAW TX ('+sequence+') -')
print(binascii.hexlify(body))
# schedule ourselves to receive the next message
asyncio.ensure_future(self.handle())
def start(self):
self.loop.add_signal_handler(signal.SIGINT, self.stop)
self.loop.create_task(self.handle())
self.loop.run_forever()
def stop(self):
self.loop.stop()
self.zmqContext.destroy()
daemon = ZMQHandler()
daemon.start()
|
nlgcoin/guldencoin-official
|
contrib/zmq/zmq_sub3.4.py
|
Python
|
mit
| 3,273 | 0.001833 |
# Copyright 2014, Doug Wiegley, A10 Networks.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
from __future__ import unicode_literals
from acos_client import errors as acos_errors
from acos_client.v21 import base
class BasePersistence(base.BaseV21):
def __init__(self, client):
super(BasePersistence, self).__init__(client)
self.prefix = "slb.template.%s_persistence" % self.pers_type
def get(self, name, **kwargs):
return self._post(("%s.search" % self.prefix), {'name': name},
**kwargs)
def exists(self, name, **kwargs):
try:
self.get(name, **kwargs)
return True
except acos_errors.NotFound:
return False
def create(self, name, **kwargs):
self._post(("%s.create" % self.prefix), self.get_params(name),
**kwargs)
def delete(self, name, **kwargs):
self._post(("%s.delete" % self.prefix), {'name': name}, **kwargs)
class CookiePersistence(BasePersistence):
def __init__(self, client):
self.pers_type = 'cookie'
super(CookiePersistence, self).__init__(client)
def get_params(self, name):
return {
"cookie_persistence_template": {
"name": name
}
}
class SourceIpPersistence(BasePersistence):
def __init__(self, client):
self.pers_type = 'src_ip'
super(SourceIpPersistence, self).__init__(client)
def get_params(self, name):
return {
"src_ip_persistence_template": {
"name": name
}
}
|
mdurrant-b3/acos-client
|
acos_client/v21/slb/template/persistence.py
|
Python
|
apache-2.0
| 2,180 | 0 |
# tsuserver3, an Attorney Online server
#
# Copyright (C) 2016 argoneus <argoneuscze@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import ipaddress
import json
import yaml
from server.exceptions import ServerError
class BanManager:
def __init__(self):
self.bans = {}
self.load_banlist()
self.hdid_exempt = {}
self.load_hdidexceptions()
def load_banlist(self):
try:
with open('storage/banlist.json', 'r') as banlist_file:
self.bans = json.load(banlist_file)
except FileNotFoundError:
with open('storage/banlist.json', 'w') as poll_list_file:
json.dump({}, poll_list_file)
def write_banlist(self):
with open('storage/banlist.json', 'w') as banlist_file:
json.dump(self.bans, banlist_file)
def add_ban(self, ip):
try:
x = len(ip)
except AttributeError:
raise ServerError('Argument must be an 12-digit number.')
if x == 12:
self.bans[ip] = True
self.write_banlist()
def remove_ban(self, client, ip):
try:
try:
int(ip)
except ValueError:
ipaddress.ip_address(ip)
ip = client.server.get_ipid(ip)
except ValueError:
if not len(ip) == 12:
raise ServerError('Argument must be an IP address or 10-digit number.')
del self.bans[ip]
self.write_banlist()
def is_banned(self, ipid):
try:
return self.bans[ipid]
except KeyError:
return False
def load_hdidexceptions(self):
with open('config/hdid_exceptions.yaml', 'r', encoding='utf-8') as hdid:
self.hdid_exempt = yaml.load(hdid)
|
AttorneyOnlineVidya/tsuserver3
|
server/ban_manager.py
|
Python
|
agpl-3.0
| 2,392 | 0.001254 |
"""
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os, tempfile
from os.path import join, exists, basename
from shutil import copytree, rmtree
from workspace_tools.utils import mkdir
from workspace_tools.export import uvision4, codesourcery, codered, gccarm, ds5_5, iar, emblocks, coide, kds
from workspace_tools.export.exporters import zip_working_directory_and_clean_up, OldLibrariesException
from workspace_tools.targets import EXPORT_MAP
EXPORTERS = {
'uvision': uvision4.Uvision4,
'lpcxpresso': codered.CodeRed,
'codesourcery': codesourcery.CodeSourcery,
'gcc_arm': gccarm.GccArm,
'ds5_5': ds5_5.DS5_5,
'iar': iar.IAREmbeddedWorkbench,
'emblocks' : emblocks.IntermediateFile,
'coide' : coide.CoIDE,
'kds' : kds.KDS,
}
ERROR_MESSAGE_UNSUPPORTED_TOOLCHAIN = """
Sorry, the target %s is not currently supported on the %s toolchain.
Please refer to <a href='/handbook/Exporting-to-offline-toolchains' target='_blank'>Exporting to offline toolchains</a> for more information.
"""
ERROR_MESSAGE_NOT_EXPORT_LIBS = """
To export this project please <a href='http://mbed.org/compiler/?import=http://mbed.org/users/mbed_official/code/mbed-export/k&mode=lib' target='_blank'>import the export version of the mbed library</a>.
"""
def online_build_url_resolver(url):
# TODO: Retrieve the path and name of an online library build URL
return {'path':'', 'name':''}
def export(project_path, project_name, ide, target, destination='/tmp/',
tempdir=None, clean=True, extra_symbols=None, build_url_resolver=online_build_url_resolver):
# Convention: we are using capitals for toolchain and target names
if target is not None:
target = target.upper()
if tempdir is None:
tempdir = tempfile.mkdtemp()
if ide is None:
# Simply copy everything, no project files to be generated
for d in ['src', 'lib']:
os.system("cp -r %s/* %s" % (join(project_path, d), tempdir))
report = {'success': True}
else:
report = {'success': False}
if ide not in EXPORTERS:
report['errormsg'] = "Unsupported toolchain"
else:
Exporter = EXPORTERS[ide]
target = EXPORT_MAP.get(target, target)
if target not in Exporter.TARGETS:
report['errormsg'] = ERROR_MESSAGE_UNSUPPORTED_TOOLCHAIN % (target, ide)
else:
try:
exporter = Exporter(target, tempdir, project_name, build_url_resolver, extra_symbols=extra_symbols)
exporter.scan_and_copy_resources(project_path, tempdir)
exporter.generate()
report['success'] = True
except OldLibrariesException, e:
report['errormsg'] = ERROR_MESSAGE_NOT_EXPORT_LIBS
zip_path = None
if report['success']:
# add readme file to every offline export.
open(os.path.join(temdir, 'README.html'),'w').write('<meta http-equiv="refresh" content="0; url=http://developer.mbed.org/handbook/ExportToOfflineToolchain#%s#%s"/>'% (target,ide))
zip_path = zip_working_directory_and_clean_up(tempdir, destination, project_name, clean)
return zip_path, report
###############################################################################
# Generate project folders following the online conventions
###############################################################################
def copy_tree(src, dst, clean=True):
if exists(dst):
if clean:
rmtree(dst)
else:
return
copytree(src, dst)
def setup_user_prj(user_dir, prj_path, lib_paths=None):
"""
Setup a project with the same directory structure of the mbed online IDE
"""
mkdir(user_dir)
# Project Path
copy_tree(prj_path, join(user_dir, "src"))
# Project Libraries
user_lib = join(user_dir, "lib")
mkdir(user_lib)
if lib_paths is not None:
for lib_path in lib_paths:
copy_tree(lib_path, join(user_lib, basename(lib_path)))
|
Willem23/mbed
|
workspace_tools/export/__init__.py
|
Python
|
apache-2.0
| 4,602 | 0.004129 |
# -*- coding: UTF-8 -*-
# Copyright 2015-2021 Rumma & Ko Ltd
# License: GNU Affero General Public License v3 (see file COPYING for details)
"""
Defines a set of user roles and fills
:class:`lino.modlib.users.choicelists.UserTypes`.
This is used as the :attr:`user_types_module
<lino.core.site.Site.user_types_module>` for :ref:`noi`.
"""
from django.utils.translation import gettext_lazy as _
from lino.modlib.office.roles import OfficeStaff, OfficeUser
from lino.modlib.users.roles import Helper
# from lino.modlib.comments.roles import CommentsReader
from lino.modlib.comments.roles import CommentsUser, CommentsStaff, PrivateCommentsReader, CommentsReader
from lino.core.roles import SiteUser, SiteAdmin
from lino_xl.lib.excerpts.roles import ExcerptsUser, ExcerptsStaff
from lino_xl.lib.contacts.roles import ContactsUser, ContactsStaff
from lino_xl.lib.courses.roles import CoursesUser
from lino_xl.lib.tickets.roles import Reporter, Searcher, Triager, TicketsStaff
from lino_xl.lib.working.roles import Worker
from lino_xl.lib.cal.roles import CalendarReader
from lino_xl.lib.votes.roles import VotesStaff, VotesUser
from lino_xl.lib.products.roles import ProductsStaff
from lino_xl.lib.ledger.roles import LedgerStaff
from lino.modlib.users.choicelists import UserTypes
class Customer(SiteUser, OfficeUser, VotesUser, Searcher, Reporter, CommentsUser):
"""
A **Customer** is somebody who uses our software and may report
tickets, but won't work on them. Able to comment and view tickets on sites
where they are contact people. Unable to see any client data other than orgs
where they are a contact person and themselves.
"""
pass
class Contributor(Customer, Searcher, Helper, Worker, ExcerptsUser, CoursesUser):
"""
A **Contributor** is somebody who works on and see tickets of sites they are team members of.
"""
pass
class Developer(Contributor, ContactsUser, Triager, ExcerptsStaff, CommentsStaff, TicketsStaff, PrivateCommentsReader):
"""
A **Developer** is a trusted user who has signed an NDA, has access to client contacts.
Is able to make service reports as well as manage tickets.
"""
pass
class SiteAdmin(SiteAdmin, Developer, OfficeStaff, VotesStaff, ContactsStaff, CommentsStaff, ProductsStaff, LedgerStaff):
"""
Can do everything.
"""
# class Anonymous(CommentsReader, CalendarReader):
class Anonymous(CalendarReader, CommentsReader, Searcher):
pass
UserTypes.clear()
add = UserTypes.add_item
add('000', _("Anonymous"), Anonymous, 'anonymous',
readonly=True, authenticated=False)
add('100', _("Customer"), Customer, 'customer user')
add('200', _("Contributor"), Contributor, 'contributor')
add('400', _("Developer"), Developer, 'developer')
add('900', _("Administrator"), SiteAdmin, 'admin')
# UserTypes.user = UserTypes.customer
# from lino.core.merge import MergeAction
# from lino.api import rt
# lib = rt.models
# for m in (lib.contacts.Company, ):
# m.define_action(merge_row=MergeAction(
# m, required_roles=set([ContactsStaff])))
|
lino-framework/noi
|
lino_noi/lib/noi/user_types.py
|
Python
|
bsd-2-clause
| 3,072 | 0.002604 |
##
# Copyright 2009-2015 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for SuiteSparse, implemented as an easyblock
@author: Stijn De Weirdt (Ghent University)
@author: Dries Verdegem (Ghent University)
@author: Kenneth Hoste (Ghent University)
@author: Pieter De Baets (Ghent University)
@author: Jens Timmerman (Ghent University)
"""
import fileinput
import re
import os
import shutil
import sys
from distutils.version import LooseVersion
from easybuild.easyblocks.generic.configuremake import ConfigureMake
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.filetools import mkdir
from easybuild.tools.modules import get_software_root
class EB_SuiteSparse(ConfigureMake):
"""Support for building SuiteSparse."""
def __init__(self, *args, **kwargs):
"""Custom constructor for SuiteSparse easyblock, initialize custom class parameters."""
super(EB_SuiteSparse, self).__init__(*args, **kwargs)
self.config_name = 'UNKNOWN'
def configure_step(self):
"""Configure build by patching UFconfig.mk or SuiteSparse_config.mk."""
if LooseVersion(self.version) < LooseVersion('4.0'):
self.config_name = 'UFconfig'
else:
self.config_name = 'SuiteSparse_config'
cfgvars = {
'CC': os.getenv('MPICC'),
'CFLAGS': os.getenv('CFLAGS'),
'CXX': os.getenv('MPICXX'),
'F77': os.getenv('MPIF77'),
'F77FLAGS': os.getenv('F77FLAGS'),
'BLAS': os.getenv('LIBBLAS_MT'),
'LAPACK': os.getenv('LIBLAPACK_MT'),
}
metis = get_software_root('METIS')
parmetis = get_software_root('ParMETIS')
if parmetis:
metis_path = parmetis
metis_libs = ' '.join([
os.path.join(parmetis, 'lib', 'libparmetis.a'),
os.path.join(parmetis, 'lib', 'metis.a'),
])
elif metis:
metis_path = metis
metis_libs = os.path.join(metis, 'lib', 'metis.a')
else:
raise EasyBuildError("Neither METIS or ParMETIS module loaded.")
cfgvars.update({
'METIS_PATH': metis_path,
'METIS': metis_libs,
})
# patch file
fp = os.path.join(self.cfg['start_dir'], self.config_name, '%s.mk' % self.config_name)
try:
for line in fileinput.input(fp, inplace=1, backup='.orig'):
for (var, val) in cfgvars.items():
orig_line = line
# for variables in cfgvars, substiture lines assignment
# in the file, whatever they are, by assignments to the
# values in cfgvars
line = re.sub(r"^\s*(%s\s*=\s*).*$" % var,
r"\1 %s # patched by EasyBuild" % val,
line)
if line != orig_line:
cfgvars.pop(var)
sys.stdout.write(line)
except IOError, err:
raise EasyBuildError("Failed to patch %s in: %s", fp, err)
# add remaining entries at the end
if cfgvars:
try:
f = open(fp, "a")
f.write("# lines below added automatically by EasyBuild")
for (var, val) in cfgvars.items():
f.write("%s = %s\n" % (var, val))
f.close()
except IOError, err:
raise EasyBuildError("Failed to complete %s: %s", fp, err)
def install_step(self):
"""Install by copying the contents of the builddir to the installdir (preserving permissions)"""
for x in os.listdir(self.cfg['start_dir']):
src = os.path.join(self.cfg['start_dir'], x)
dst = os.path.join(self.installdir, x)
try:
if os.path.isdir(src):
shutil.copytree(src, dst)
# symlink
# - dst/Lib to dst/lib
# - dst/Include to dst/include
for c in ['Lib', 'Include']:
nsrc = os.path.join(dst, c)
ndst = os.path.join(dst, c.lower())
if os.path.exists(nsrc):
os.symlink(nsrc, ndst)
else:
shutil.copy2(src, dst)
except OSError, err:
raise EasyBuildError("Copying src %s to dst %s failed: %s", src, dst, err)
# some extra symlinks are necessary for UMFPACK to work.
paths = [
os.path.join('AMD', 'include', 'amd.h'),
os.path.join('AMD' ,'include' ,'amd_internal.h'),
os.path.join(self.config_name, '%s.h' % self.config_name),
os.path.join('AMD', 'lib', 'libamd.a')
]
for path in paths:
src = os.path.join(self.installdir, path)
dn = path.split(os.path.sep)[-2]
fn = path.split(os.path.sep)[-1]
dstdir = os.path.join(self.installdir, 'UMFPACK', dn)
mkdir(dstdir)
if os.path.exists(src):
try:
os.symlink(src, os.path.join(dstdir, fn))
except OSError, err:
raise EasyBuildError("Failed to make symbolic link from %s to %s: %s", src, dst, err)
def make_module_req_guess(self):
"""Add config dir to CPATH so include file is found."""
guesses = super(EB_SuiteSparse, self).make_module_req_guess()
guesses.update({'CPATH': [self.config_name]})
return guesses
def sanity_check_step(self):
"""Custom sanity check for SuiteSparse."""
if LooseVersion(self.version) < LooseVersion('4.0'):
csparse_dir = 'CSparse3'
else:
csparse_dir = 'CSparse'
custom_paths = {
'files': [os.path.join(x, 'lib', 'lib%s.a' % x.lower()) for x in ["AMD", "BTF", "CAMD", "CCOLAMD", "CHOLMOD",
"COLAMD", "CXSparse", "KLU", "LDL", "RBio",
"SPQR", "UMFPACK"]] +
[os.path.join(csparse_dir, 'lib', 'libcsparse.a')],
'dirs': ["MATLAB_Tools"],
}
super(EB_SuiteSparse, self).sanity_check_step(custom_paths=custom_paths)
|
ULHPC/modules
|
easybuild/easybuild-easyblocks/easybuild/easyblocks/s/suitesparse.py
|
Python
|
mit
| 7,433 | 0.002422 |
from django.urls import path
from . import views
urlpatterns = [
path('start-ga', views.StartGA.as_view()),
path('stop-ga', views.StopGA.as_view()),
path('check-ga', views.CheckGA.as_view()),
]
|
seakers/daphne_brain
|
example_problem/explorer/urls.py
|
Python
|
mit
| 208 | 0 |
#!/usr/bin/env python
# Copyright 2014 The Swarming Authors. All rights reserved.
# Use of this source code is governed by the Apache v2.0 license that can be
# found in the LICENSE file.
import datetime
import sys
import unittest
import test_env
test_env.setup_test_env()
# From components/third_party/
import webtest
import webapp2
import stats
from components import stats_framework
from support import stats_framework_mock
from support import test_case
# pylint: disable=R0201
class Store(webapp2.RequestHandler):
def get(self):
"""Generates fake stats."""
stats.add_entry(stats.STORE, 2048, 'GS; inline')
self.response.write('Yay')
class Return(webapp2.RequestHandler):
def get(self):
"""Generates fake stats."""
stats.add_entry(stats.RETURN, 4096, 'memcache')
self.response.write('Yay')
class Lookup(webapp2.RequestHandler):
def get(self):
"""Generates fake stats."""
stats.add_entry(stats.LOOKUP, 200, 103)
self.response.write('Yay')
class Dupe(webapp2.RequestHandler):
def get(self):
"""Generates fake stats."""
stats.add_entry(stats.DUPE, 1024, 'inline')
self.response.write('Yay')
def to_str(now, delta):
"""Converts a datetime to unicode."""
now = now + datetime.timedelta(seconds=delta)
return unicode(now.strftime(stats.utils.DATETIME_FORMAT))
class StatsTest(test_case.TestCase, stats_framework_mock.MockMixIn):
def setUp(self):
super(StatsTest, self).setUp()
fake_routes = [
('/store', Store),
('/return', Return),
('/lookup', Lookup),
('/dupe', Dupe),
]
self.app = webtest.TestApp(
webapp2.WSGIApplication(fake_routes, debug=True),
extra_environ={'REMOTE_ADDR': 'fake-ip'})
stats_framework_mock.configure(self)
self.now = datetime.datetime(2010, 1, 2, 3, 4, 5, 6)
self.mock_now(self.now, 0)
def _test_handler(self, url, added_data):
stats_framework_mock.reset_timestamp(stats.STATS_HANDLER, self.now)
self.assertEqual('Yay', self.app.get(url).body)
self.assertEqual(1, len(list(stats_framework.yield_entries(None, None))))
self.mock_now(self.now, 60)
self.assertEqual(10, stats.generate_stats())
actual = stats_framework.get_stats(
stats.STATS_HANDLER, 'minutes', self.now, 1, True)
expected = [
{
'contains_lookups': 0,
'contains_requests': 0,
'downloads': 0,
'downloads_bytes': 0,
'failures': 0,
'key': datetime.datetime(2010, 1, 2, 3, 4),
'other_requests': 0,
'requests': 1,
'uploads': 0,
'uploads_bytes': 0,
},
]
expected[0].update(added_data)
self.assertEqual(expected, actual)
def test_store(self):
expected = {
'uploads': 1,
'uploads_bytes': 2048,
}
self._test_handler('/store', expected)
def test_return(self):
expected = {
'downloads': 1,
'downloads_bytes': 4096,
}
self._test_handler('/return', expected)
def test_lookup(self):
expected = {
'contains_lookups': 200,
'contains_requests': 1,
}
self._test_handler('/lookup', expected)
def test_dupe(self):
expected = {
'other_requests': 1,
}
self._test_handler('/dupe', expected)
if __name__ == '__main__':
if '-v' in sys.argv:
unittest.TestCase.maxDiff = None
unittest.main()
|
madecoste/swarming
|
appengine/isolate/tests/stats_test.py
|
Python
|
apache-2.0
| 3,366 | 0.006239 |
"""
Updated on 19.12.2009
@author: alen, pinda
"""
from django.conf import settings
from django.conf.urls.defaults import *
from socialregistration.utils import OpenID, OAuthClient, OAuthTwitter, OAuthLinkedin
urlpatterns = patterns('',
url('^setup/$', 'socialregistration.views.setup',
name='socialregistration_setup'),
url('^logout/$', 'socialregistration.views.logout',
name='social_logout'),
)
# Setup Facebook URLs if there's an API key specified
if getattr(settings, 'FACEBOOK_API_KEY', None) is not None:
urlpatterns = urlpatterns + patterns('',
url('^facebook/login/$', 'socialregistration.views.facebook_login',
name='facebook_login'),
url('^facebook/connect/$', 'socialregistration.views.facebook_connect',
name='facebook_connect'),
url('^xd_receiver.htm', 'django.views.generic.simple.direct_to_template',
{'template':'socialregistration/xd_receiver.html'},
name='facebook_xd_receiver'),
)
#Setup Twitter URLs if there's an API key specified
if getattr(settings, 'TWITTER_CONSUMER_KEY', None) is not None:
urlpatterns = urlpatterns + patterns('',
url('^twitter/redirect/$', 'socialregistration.views.oauth_redirect',
dict(
consumer_key=settings.TWITTER_CONSUMER_KEY,
secret_key=settings.TWITTER_CONSUMER_SECRET_KEY,
request_token_url=settings.TWITTER_REQUEST_TOKEN_URL,
access_token_url=settings.TWITTER_ACCESS_TOKEN_URL,
authorization_url=settings.TWITTER_AUTHORIZATION_URL,
callback_url='twitter_callback',
client_class = OAuthClient
),
name='twitter_redirect'),
url('^twitter/callback/$', 'socialregistration.views.oauth_callback',
dict(
consumer_key=settings.TWITTER_CONSUMER_KEY,
secret_key=settings.TWITTER_CONSUMER_SECRET_KEY,
request_token_url=settings.TWITTER_REQUEST_TOKEN_URL,
access_token_url=settings.TWITTER_ACCESS_TOKEN_URL,
authorization_url=settings.TWITTER_AUTHORIZATION_URL,
callback_url='twitter',
client_class = OAuthClient
),
name='twitter_callback'
),
url('^twitter/$', 'socialregistration.views.twitter', {'client_class': OAuthTwitter}, name='twitter'),
)
#Setup Linkedin URLs if there's an API key specified
if getattr(settings, 'LINKEDIN_CONSUMER_KEY', None) is not None:
urlpatterns = urlpatterns + patterns('',
url('^linkedin/redirect/$', 'socialregistration.views.oauth_redirect',
dict(
consumer_key=settings.LINKEDIN_CONSUMER_KEY,
secret_key=settings.LINKEDIN_CONSUMER_SECRET_KEY,
request_token_url=settings.LINKEDIN_REQUEST_TOKEN_URL,
access_token_url=settings.LINKEDIN_ACCESS_TOKEN_URL,
authorization_url=settings.LINKEDIN_AUTHORIZATION_URL,
callback_url='linkedin_callback',
client_class = OAuthClient
),
name='linkedin_redirect'),
url('^linkedin/callback/$', 'socialregistration.views.oauth_callback',
dict(
consumer_key=settings.LINKEDIN_CONSUMER_KEY,
secret_key=settings.LINKEDIN_CONSUMER_SECRET_KEY,
request_token_url=settings.LINKEDIN_REQUEST_TOKEN_URL,
access_token_url=settings.LINKEDIN_ACCESS_TOKEN_URL,
authorization_url=settings.LINKEDIN_AUTHORIZATION_URL,
callback_url='linkedin',
client_class = OAuthClient,
parameters={'oauth_verifier':''}
),
name='linkedin_callback'
),
url('^linkedin/$', 'socialregistration.views.linkedin', {'client_class': OAuthLinkedin}, name='linkedin'),
)
urlpatterns = urlpatterns + patterns('',
url('^openid/redirect/$', 'socialregistration.views.openid_redirect', { 'client_class': OpenID}, name='openid_redirect'),
url('^openid/callback/$', 'socialregistration.views.openid_callback', { 'client_class': OpenID}, name='openid_callback')
)
|
nvbn/django-socialregistration
|
socialregistration/urls.py
|
Python
|
mit
| 4,237 | 0.006372 |
# -*- coding: utf-8 -*-
"""
兼容Python版本
"""
import sys
is_py2 = (sys.version_info[0] == 2)
is_py3 = (sys.version_info[0] == 3)
is_py33 = (sys.version_info[0] == 3 and sys.version_info[1] == 3)
try:
import simplejson as json
except (ImportError, SyntaxError):
import json
if is_py2:
from urllib import quote as urlquote, unquote as urlunquote
from urlparse import urlparse, parse_qs, urlsplit
def to_bytes(data):
"""若输入为unicode, 则转为utf-8编码的bytes;其他则原样返回。"""
if isinstance(data, unicode):
return data.encode('utf-8')
else:
return data
def to_string(data):
"""把输入转换为str对象"""
return to_bytes(data)
def to_unicode(data):
"""把输入转换为unicode,要求输入是unicode或者utf-8编码的bytes。"""
if isinstance(data, bytes):
return data.decode('utf-8')
else:
return data
def stringify(input):
if isinstance(input, dict):
return dict([(stringify(key), stringify(value)) for key,value in input.iteritems()])
elif isinstance(input, list):
return [stringify(element) for element in input]
elif isinstance(input, unicode):
return input.encode('utf-8')
else:
return input
builtin_str = str
bytes = str
str = unicode
elif is_py3:
from urllib.parse import quote as urlquote, unquote as urlunquote
from urllib.parse import urlparse, parse_qs, urlsplit
def to_bytes(data):
"""若输入为str(即unicode),则转为utf-8编码的bytes;其他则原样返回"""
if isinstance(data, str):
return data.encode(encoding='utf-8')
else:
return data
def to_string(data):
"""若输入为bytes,则认为是utf-8编码,并返回str"""
if isinstance(data, bytes):
return data.decode('utf-8')
else:
return data
def to_unicode(data):
"""把输入转换为unicode,要求输入是unicode或者utf-8编码的bytes。"""
return to_string(data)
def stringify(input):
return input
builtin_str = str
bytes = bytes
str = str
|
aliyun/aliyun-oss-python-sdk
|
oss2/compat.py
|
Python
|
mit
| 2,283 | 0.001447 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import re
# Todo this mapping REALLY needs a non-hardcoded home
_slave_type = {
"bld-linux64-ec2": [
re.compile("^bld-centos6-hp-"),
re.compile("^bld-linux64-ec2-"),
re.compile("^bld-linux64-ix-"),
re.compile("^b-linux64-ix-"),
re.compile("^bld-linux64-spot-"),
re.compile("^b-linux64-hp-"),
re.compile("^try-linux64-spot-"),
],
"bld-lion-r5": [
re.compile("^bld-lion-r5-"),
],
"b-2008-ix": [
re.compile("^b-2008-ix-"),
re.compile("^b-2008-sm-"),
re.compile("^w64-ix-"),
],
"tst-linux64-ec2": [
re.compile("^talos-linux64-ix-"),
re.compile("^tst-linux64-spot-"),
re.compile("^tst-linux64-ec2-"),
],
"tst-linux32-ec2": [
re.compile("^talos-linux32-ix-"),
re.compile("^tst-linux32-spot-"),
re.compile("^tst-linux32-ec2-"),
],
"t-yosemite-r5": [
re.compile("^t-yosemite-r5-"),
],
"talos-mtnlion-r5": [
re.compile("^talos-mtnlion-r5-"),
],
"t-snow-r4": [
re.compile("^t-snow-r4-"),
re.compile("^talos-r4-snow-"),
],
"t-w732-ix": [
re.compile("^t-w732-ix-"),
],
"t-w864-ix": [
re.compile("^t-w864-ix-"),
],
"t-xp32-ix": [
re.compile("^t-xp32-ix-"),
],
}
_gpo_needed = [
"b-2008-ix", "t-w732-ix", "t-w864-ix", "t-xp32-ix"
]
def slave_patterns():
vals = []
ret = {}
for key, values in _slave_type.items():
for regex in values:
vals += [regex.pattern[1:] + "*"]
vals.sort()
ret[key] = vals
vals = []
return ret
def slave_to_slavetype(slave):
if slave in _slave_type.keys():
return slave
for key, values in _slave_type.items():
for regex in values:
if regex.match(slave):
return key
return None
def is_aws_serviceable(slave):
slaveclass = slave_to_slavetype(slave)
if 'ec2' in slaveclass:
return True
return False
def needs_gpo(slave):
slaveclass = slave_to_slavetype(slave)
if slaveclass in _gpo_needed:
return True
return False
def slave_filter(slave_class):
def _inner_slave_filter(item):
for i in _slave_type[slave_class]:
if i.match(item["name"]):
return True
return False # If we got here, no match
return _inner_slave_filter
def slavetype_to_awsprefix(slave_class):
if not is_aws_serviceable(slave_class):
raise ValueError("Unsupported Slave")
basic_slave_prefix = slave_to_slavetype(slave_class)
if basic_slave_prefix.startswith("bld"):
loan_prefix = basic_slave_prefix.replace("bld-", "dev-")
elif basic_slave_prefix.startswith("tst"):
loan_prefix = basic_slave_prefix
else:
raise ValueError("Unsure how to name this aws slave")
return loan_prefix
|
Callek/build-relengapi-slaveloan
|
relengapi/blueprints/slaveloan/slave_mappings.py
|
Python
|
mpl-2.0
| 3,163 | 0 |
# Time: O(n)
# Space: O(1)
# Suppose you have a long flowerbed in which some of the plots are planted and some are not.
# However, flowers cannot be planted in adjacent plots - they would compete for water
# and both would die.
#
# Given a flowerbed (represented as an array containing 0 and 1,
# where 0 means empty and 1 means not empty), and a number n,
# return if n new flowers can be planted in it without violating the no-adjacent-flowers rule.
#
# Example 1:
# Input: flowerbed = [1,0,0,0,1], n = 1
# Output: True
# Example 2:
# Input: flowerbed = [1,0,0,0,1], n = 2
# Output: False
# Note:
# The input array won't violate no-adjacent-flowers rule.
# The input array size is in the range of [1, 20000].
# n is a non-negative integer which won't exceed the input array size.
class Solution(object):
def canPlaceFlowers(self, flowerbed, n):
"""
:type flowerbed: List[int]
:type n: int
:rtype: bool
"""
for i in xrange(len(flowerbed)):
if flowerbed[i] == 0 and (i == 0 or flowerbed[i-1] == 0) and \
(i == len(flowerbed)-1 or flowerbed[i+1] == 0):
flowerbed[i] = 1
n -= 1
if n <= 0:
return True
return False
|
yiwen-luo/LeetCode
|
Python/can-place-flowers.py
|
Python
|
mit
| 1,263 | 0.003167 |
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd.
# MIT License. See license.txt
from __future__ import unicode_literals
"""
Customize Form is a Single DocType used to mask the Property Setter
Thus providing a better UI from user perspective
"""
import webnotes
from webnotes.utils import cstr
class DocType:
def __init__(self, doc, doclist=[]):
self.doc, self.doclist = doc, doclist
self.doctype_properties = [
'search_fields',
'default_print_format',
'read_only_onload',
'allow_print',
'allow_email',
'allow_copy',
'allow_attach',
'max_attachments'
]
self.docfield_properties = [
'idx',
'label',
'fieldtype',
'fieldname',
'options',
'permlevel',
'width',
'print_width',
'reqd',
'in_filter',
'in_list_view',
'hidden',
'print_hide',
'report_hide',
'allow_on_submit',
'depends_on',
'description',
'default',
'name'
]
self.property_restrictions = {
'fieldtype': [['Currency', 'Float'], ['Small Text', 'Data'], ['Text', 'Text Editor', 'Code']],
}
self.forbidden_properties = ['idx']
def get(self):
"""
Gets DocFields applied with Property Setter customizations via Customize Form Field
"""
self.clear()
if self.doc.doc_type:
from webnotes.model.doc import addchild
for d in self.get_ref_doclist():
if d.doctype=='DocField':
new = addchild(self.doc, 'fields', 'Customize Form Field',
self.doclist)
self.set(
{
'list': self.docfield_properties,
'doc' : d,
'doc_to_set': new
}
)
elif d.doctype=='DocType':
self.set({ 'list': self.doctype_properties, 'doc': d })
def get_ref_doclist(self):
"""
* Gets doclist of type self.doc.doc_type
* Applies property setter properties on the doclist
* returns the modified doclist
"""
from webnotes.model.doctype import get
ref_doclist = get(self.doc.doc_type)
ref_doclist = webnotes.doclist([ref_doclist[0]]
+ ref_doclist.get({"parent": self.doc.doc_type}))
return ref_doclist
def clear(self):
"""
Clear fields in the doc
"""
# Clear table before adding new doctype's fields
self.doclist = self.doc.clear_table(self.doclist, 'fields')
self.set({ 'list': self.doctype_properties, 'value': None })
def set(self, args):
"""
Set a list of attributes of a doc to a value
or to attribute values of a doc passed
args can contain:
* list --> list of attributes to set
* doc_to_set --> defaults to self.doc
* value --> to set all attributes to one value eg. None
* doc --> copy attributes from doc to doc_to_set
"""
if not 'doc_to_set' in args:
args['doc_to_set'] = self.doc
if 'list' in args:
if 'value' in args:
for f in args['list']:
args['doc_to_set'].fields[f] = None
elif 'doc' in args:
for f in args['list']:
args['doc_to_set'].fields[f] = args['doc'].fields.get(f)
else:
webnotes.msgprint("Please specify args['list'] to set", raise_exception=1)
def post(self):
"""
Save diff between Customize Form Bean and DocType Bean as property setter entries
"""
if self.doc.doc_type:
from webnotes.model import doc
from core.doctype.doctype.doctype import validate_fields_for_doctype
this_doclist = webnotes.doclist([self.doc] + self.doclist)
ref_doclist = self.get_ref_doclist()
dt_doclist = doc.get('DocType', self.doc.doc_type)
# get a list of property setter docs
diff_list = self.diff(this_doclist, ref_doclist, dt_doclist)
self.set_properties(diff_list)
validate_fields_for_doctype(self.doc.doc_type)
webnotes.clear_cache(doctype=self.doc.doc_type)
webnotes.msgprint("Updated")
def diff(self, new_dl, ref_dl, dt_dl):
"""
Get difference between new_dl doclist and ref_dl doclist
then check how it differs from dt_dl i.e. default doclist
"""
import re
self.defaults = self.get_defaults()
diff_list = []
for new_d in new_dl:
for ref_d in ref_dl:
if ref_d.doctype == 'DocField' and new_d.name == ref_d.name:
for prop in self.docfield_properties:
# do not set forbidden properties like idx
if prop in self.forbidden_properties: continue
d = self.prepare_to_set(prop, new_d, ref_d, dt_dl)
if d: diff_list.append(d)
break
elif ref_d.doctype == 'DocType' and new_d.doctype == 'Customize Form':
for prop in self.doctype_properties:
d = self.prepare_to_set(prop, new_d, ref_d, dt_dl)
if d: diff_list.append(d)
break
return diff_list
def get_defaults(self):
"""
Get fieldtype and default value for properties of a field
"""
df_defaults = webnotes.conn.sql("""
SELECT fieldname, fieldtype, `default`, label
FROM `tabDocField`
WHERE parent='DocField' or parent='DocType'""", as_dict=1)
defaults = {}
for d in df_defaults:
defaults[d['fieldname']] = d
defaults['idx'] = {'fieldname' : 'idx', 'fieldtype' : 'Int', 'default' : 1, 'label' : 'idx'}
defaults['previous_field'] = {'fieldname' : 'previous_field', 'fieldtype' : 'Data', 'default' : None, 'label' : 'Previous Field'}
return defaults
def prepare_to_set(self, prop, new_d, ref_d, dt_doclist, delete=0):
"""
Prepares docs of property setter
sets delete property if it is required to be deleted
"""
# Check if property has changed compared to when it was loaded
if new_d.fields.get(prop) != ref_d.fields.get(prop) \
and not \
( \
new_d.fields.get(prop) in [None, 0] \
and ref_d.fields.get(prop) in [None, 0] \
) and not \
( \
new_d.fields.get(prop) in [None, ''] \
and ref_d.fields.get(prop) in [None, ''] \
):
#webnotes.msgprint("new: " + str(new_d.fields[prop]) + " | old: " + str(ref_d.fields[prop]))
# Check if the new property is same as that in original doctype
# If yes, we need to delete the property setter entry
for dt_d in dt_doclist:
if dt_d.name == ref_d.name \
and (new_d.fields.get(prop) == dt_d.fields.get(prop) \
or \
( \
new_d.fields.get(prop) in [None, 0] \
and dt_d.fields.get(prop) in [None, 0] \
) or \
( \
new_d.fields.get(prop) in [None, ''] \
and dt_d.fields.get(prop) in [None, ''] \
)):
delete = 1
break
value = new_d.fields.get(prop)
if prop in self.property_restrictions:
allow_change = False
for restrict_list in self.property_restrictions.get(prop):
if value in restrict_list and \
ref_d.fields.get(prop) in restrict_list:
allow_change = True
break
if not allow_change:
webnotes.msgprint("""\
You cannot change '%s' of '%s' from '%s' to '%s'.
%s can only be changed among %s.
<i>Ignoring this change and saving.</i>""" % \
(self.defaults.get(prop, {}).get("label") or prop,
new_d.fields.get("label") or new_d.fields.get("idx"),
ref_d.fields.get(prop), value,
self.defaults.get(prop, {}).get("label") or prop,
" -or- ".join([", ".join(r) for r in \
self.property_restrictions.get(prop)])))
return None
# If the above conditions are fulfilled,
# create a property setter doc, but dont save it yet.
from webnotes.model.doc import Document
d = Document('Property Setter')
d.doctype_or_field = ref_d.doctype=='DocField' and 'DocField' or 'DocType'
d.doc_type = self.doc.doc_type
d.field_name = ref_d.fieldname
d.property = prop
d.value = value
d.property_type = self.defaults[prop]['fieldtype']
#d.default_value = self.defaults[prop]['default']
if delete: d.delete = 1
if d.select_item:
d.select_item = self.remove_forbidden(d.select_item)
# return the property setter doc
return d
else: return None
def set_properties(self, ps_doclist):
"""
* Delete a property setter entry
+ if it already exists
+ if marked for deletion
* Save the property setter doc in the list
"""
for d in ps_doclist:
# Delete existing property setter entry
if not d.fields.get("field_name"):
webnotes.conn.sql("""
DELETE FROM `tabProperty Setter`
WHERE doc_type = %(doc_type)s
AND property = %(property)s""", d.fields)
else:
webnotes.conn.sql("""
DELETE FROM `tabProperty Setter`
WHERE doc_type = %(doc_type)s
AND field_name = %(field_name)s
AND property = %(property)s""", d.fields)
# Save the property setter doc if not marked for deletion i.e. delete=0
if not d.delete:
d.save(1)
def delete(self):
"""
Deletes all property setter entries for the selected doctype
and resets it to standard
"""
if self.doc.doc_type:
webnotes.conn.sql("""
DELETE FROM `tabProperty Setter`
WHERE doc_type = %s""", self.doc.doc_type)
webnotes.clear_cache(doctype=self.doc.doc_type)
self.get()
def remove_forbidden(self, string):
"""
Replace forbidden characters with a space
"""
forbidden = ['%', "'", '"', '#', '*', '?', '`']
for f in forbidden:
string.replace(f, ' ')
|
gangadhar-kadam/sapphite_lib
|
core/doctype/customize_form/customize_form.py
|
Python
|
mit
| 8,959 | 0.041299 |
"""
Copyright 2020 Google LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from typing import Callable, List
import tensorflow as tf
import tensorflow_hub as tfhub
from discretezoo import attack_setup
class EmbeddedCosineDistance:
"""EmbeddedCosineDistance calculates cosine distance in embedding space.
Attributes:
embeddings: A tensor containing an embedding vector for each index in vocab.
<float32>[vocab_size, embedding_dimension]
"""
def __init__(self, embeddings: tf.Tensor):
"""Initializes EmbeddedCosineDistance with embeddings.
Arguments:
embeddings: A tensor containing an embedding for each index in vocab.
<float32>[vocab_size, embedding_dimension]
"""
assert embeddings.ndim == 2, (
'Embeddings are expected to have 2 dimensions'
f' but you passed a tensor with {embeddings.ndim}.')
self._embeddings = embeddings
@tf.function
def __call__(self, original_sentences: tf.Tensor,
adversarial_sentences: tf.Tensor) -> tf.Tensor:
r"""Calculates cosine distance between reduced embedded sentences.
Sentences are embedded and then reduced by summing them up.
Cosine similarity is then given by \frac{v_{original} \cdot v_{adversarial}}
{|v_{original}| \times |v_{adversarial|}}.
Cosine distance is defined as 1 - similarity.
Arguments:
original_sentences: A tensor of token indices in the original sentences.
<int32>[batch_size, sentence_length]
adversarial_sentences: A tensor of token indices in the adversarial
sentences. <int32>[batch_size, sentence_length]
Returns:
A tensor <float32>[batch_size, 1] of cosine distances between original and
adversarial sentences. Return values are in the range [0, 2]
https://www.tensorflow.org/api_docs/python/tf/keras/losses/cosine_similarity
is used internally, which computes negative similarity, and 1 is added.
"""
original_sentences_embedded = tf.nn.embedding_lookup(
self._embeddings, original_sentences)
adversarial_sentences_embedded = tf.nn.embedding_lookup(
self._embeddings, adversarial_sentences)
original_sentences_reduced = tf.math.reduce_sum(original_sentences_embedded,
axis=1)
adversarial_sentences_reduced = tf.math.reduce_sum(
adversarial_sentences_embedded, axis=1)
# Unintuitively, tf.keras.losses.cosine_similarity returns negative cosine
# similarity. Adding 1 means that two vectors will have 0 as a minimum
# distance instead of -1, which is helpful in later loss computation.
distance = 1 + tf.keras.losses.cosine_similarity(
original_sentences_reduced, adversarial_sentences_reduced)
return tf.expand_dims(distance, 1)
class EmbeddedEuclideanDistance:
"""EmbeddedEuclideanDistance calculates euclidean distance in embedding space.
Attributes:
embeddings: A tensor containing an embedding vector for each index in vocab.
<float32>[vocab_size, embedding_dimension]
reduce_mean: This is a boolean flag that signals how embedded sentences will
be reduced to a single vector. True for mean, False for sum.
"""
def __init__(self, embeddings: tf.Tensor, reduce_mean: bool = True):
"""Initializes EmbeddedEuclideanDistance with embeddings and reduction type.
Arguments:
embeddings: A tensor containing an embedding for each index in vocab.
<float32>[vocab_size, embedding_dimension]
reduce_mean: This boolean flag signals how embedded sentences will be
reduced to a single vector. True for mean, False for sum.
"""
assert embeddings.ndim == 2, (
'Embeddings are expected to have 2 dimensions'
f' but you passed a tensor with {embeddings.ndim}.')
self._embeddings = embeddings
self._reduce_mean = reduce_mean
@tf.function
def __call__(self, original_sentences: tf.Tensor,
adversarial_sentences: tf.Tensor) -> tf.Tensor:
"""Calculates euclidean distances between reduced embedded sentences.
Arguments:
original_sentences: A tensor of token indices in the original sentences.
<int32>[batch_size, sentence_length]
adversarial_sentences: A tensor of token indices in the adversarial
sentences. <int32>[batch_size, sentence_length]
Returns:
A tensor <float32>[batch_size, 1] of euclidean distances between original
and adversarial sentences.
"""
original_sentences_embedded = tf.nn.embedding_lookup(
self._embeddings, original_sentences)
adversarial_sentences_embedded = tf.nn.embedding_lookup(
self._embeddings, adversarial_sentences)
if self._reduce_mean:
original_sentences_reduced = tf.math.reduce_mean(
original_sentences_embedded, axis=1)
adversarial_sentences_reduced = tf.math.reduce_mean(
adversarial_sentences_embedded, axis=1)
else:
original_sentences_reduced = tf.math.reduce_sum(
original_sentences_embedded, axis=1)
adversarial_sentences_reduced = tf.math.reduce_sum(
adversarial_sentences_embedded, axis=1)
difference_vector = tf.math.subtract(original_sentences_reduced,
adversarial_sentences_reduced)
distance = tf.norm(difference_vector, axis=-1, keepdims=True)
return distance
class UniversalSentenceEncoderDistance:
"""Wraps the Universal Sentence Encoder and converts tensors to strings.
The Universal Sentence Encoder expects python strings as input and includes
its own tokenizer. The attack functions on tensors, so we need to convert
vocab indices to tokens and then detokenize the text back into strings.
Attributes:
detokenizer: Detokenizer accepts a list of tokens, joins them by whitespace,
and then undoes the regexes used to tokenize text.
vocab: A list of tokens in the vocabulary.
padding_index: An integer indicating which vocab entry is the padding token.
encoder: This is a tensorflow hub module corresponding to the Universal
Sentence Encoder.
"""
def __init__(
self,
detokenizer: Callable[[List[str]], str],
vocab: List[str],
padding_index: int = 0,
use_tfhub_url:
str = 'https://tfhub.dev/google/universal-sentence-encoder-large/5'):
"""Initializes the UniversalSentenceEncoderDistance class.
Arguments:
detokenizer: Detokenizer accepts a list of tokens, joins them by whitespace,
and then undoes the regexes used to tokenize text.
vocab: A list of tokens in the vocabulary.
padding_index: An integer indicating which vocab entry is the padding token.
use_tfhub_url: The URL to the Universal Sentence Encoder on the Tensorflow
Hub. The default value corresponds to the Transformer based model, but
Deep Averaging Networks and multilingual versions are also available.
"""
self._vocab = vocab
self._padding_index = padding_index
self._detokenizer = detokenizer
self._encoder = tfhub.load(use_tfhub_url)
def __call__(self, original_sentences: tf.Tensor,
adversarial_sentences: tf.Tensor) -> tf.Tensor:
"""Converts tensors of vocabulary indices to strings and calls the encoder.
Arguments:
original_sentences: A tensor of token indices in the original sentences.
<int32>[batch_size, sentence_length]
adversarial_sentences: A tensor of token indices in the adversarial
sentences. <int32>[batch_size, sentence_length]
Returns:
A tensor <float32>[batch_size, 1] of cosine distances between original
and adversarial sentences encoded by the Universal Sentence Encoder.
"""
original_sentence_strings = attack_setup.tensor_to_strings(
original_sentences, self._vocab, self._detokenizer, self._padding_index)
adversarial_sentence_strings = attack_setup.tensor_to_strings(
adversarial_sentences, self._vocab, self._detokenizer,
self._padding_index)
original_sentence_embedding = self._encoder(original_sentence_strings)
adversarial_sentence_embedding = self._encoder(adversarial_sentence_strings)
# Unintuitively, tf.keras.losses.cosine_similarity returns negative cosine
# similarity. Adding 1 means that two vectors will have 0 as a minimum
# distance instead of -1, which is helpful in later loss computation.
distance = 1 + tf.keras.losses.cosine_similarity(
original_sentence_embedding, adversarial_sentence_embedding)
return tf.expand_dims(distance, 1)
|
googleinterns/adversarial-0th-order-optimization
|
discretezoo/loss/semantic_similarity.py
|
Python
|
apache-2.0
| 9,072 | 0.003307 |
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from solar.orchestration.runner import app
from solar.system_log.operations import set_error, move_to_commited
__all__ = ['error_logitem', 'commit_logitem']
@app.task(name='error_logitem')
def error_logitem(task_uuid):
return set_error(task_uuid.rsplit(':', 1)[-1])
@app.task(name='commit_logitem')
def commit_logitem(task_uuid):
return move_to_commited(task_uuid.rsplit(':', 1)[-1])
|
torgartor21/solar
|
solar/solar/system_log/tasks.py
|
Python
|
apache-2.0
| 1,007 | 0 |
import numpy
import data_generator
class Generator:
def __init__(self):
self.combinations = data_generator.generateAllByteToAx25DataCombinations()
self.frameSeparatorOne = data_generator.calculateNewAx25DataFromOldImpl(1, 0, 0x7E, False)
self.frameSeparatorZero = data_generator.calculateNewAx25DataFromOldImpl(0, 0, 0x7E, False)
def generateDefinitionsHeader(self, filePath):
text = '''#pragma once
#include <stdint.h>
typedef struct AX25EncodedData_t
{
uint16_t dataGivenThatPreviousBitWasZero;
uint8_t dataBitsCount;
uint8_t newNumberOfOnes;
} AX25EncodedData;
//
// To figure out what those values mean see ax25-utils Python project,
// code_generation_v2.py file
//
extern const AX25EncodedData byte2ax25EncodedData[];
#define FRAME_SEPARATOR_GIVEN_THAT_PREVIOUS_BIT_WAS_ZERO ''' + str(self.frameSeparatorZero[0]) + '''
#define FRAME_SEPARATOR_GIVEN_THAT_PREVIOUS_BIT_WAS_ONE ''' + str(self.frameSeparatorOne[0]) + '''
#define GET_VALUE_IF_LAST_BIT_IS_ONE(pAx25EncodedData) \\
((~(pAx25EncodedData)->dataGivenThatPreviousBitWasZero) & ((1 << ((pAx25EncodedData)->dataBitsCount)) - 1))
#define GET_VALUE_IF_LAST_BIT_IS_ZERO(pAx25EncodedData) \\
((pAx25EncodedData)->dataGivenThatPreviousBitWasZero)
#define GET_LAST_BIT(value, pAx25EncodedData) \\
(((value) >> ((pData)->dataBitsCount - 1)) & 1)
#define GENERATE_AX25_TABLE_INDEX(currentNumberOfOnes, byte) \\
(((currentNumberOfOnes) << 8) + (byte))
#define GET_AX25_ENCODED_DATA_FOR_BYTE(currentNumberOfOnes, byte) \\
&byte2ax25EncodedData[GENERATE_AX25_TABLE_INDEX((currentNumberOfOnes), (byte))];
'''
with open(filePath, 'w+') as f:
f.write(text)
def generateSource(self, filePath):
text = '''#include "ax25.h"
const AX25EncodedData byte2ax25EncodedData[] =
{
'''
i = 0
for (oldNumberOfOnes, byte2Encode, newDataGiventLastBitWasZero, newLastBitGiventLastBitWasZero, newDataGiventLastBitWasOne, newLastBitGiventLastBitWasOne, newDataNumberOfBits, newNumberOfOnes) in self.combinations:
text += ' {' + '{:>3}'.format(newDataGiventLastBitWasZero) + ', ' + '{:>2}'.format(newDataNumberOfBits) + ', ' + '{:>2}'.format(newNumberOfOnes) + '}, ' + \
'// idx = ' + '{:0>4}'.format(i) + ', oldNumberOfOnes = ' + str(oldNumberOfOnes) + ', byte2Encode = ' + '{:0>3}'.format(byte2Encode) + '\n'
i += 1
text += '''};
'''
with open(filePath, 'w+') as f:
f.write(text)
generator = Generator()
generator.generateDefinitionsHeader("../com-telemetry/src/aprs/generated/ax25.h")
generator.generateSource("../com-telemetry/src/aprs/generated/ax25.c")
|
far-far-away-science/hab-v2
|
software/ax25-utils/code_generation_v2.py
|
Python
|
gpl-3.0
| 2,693 | 0.004827 |
from documents.models import Document
from categories.models import Category
import os
def move_doc(doc_id, cat_id):
doc = Document.objects.get(pk=int(doc_id))
old_cat = doc.refer_category
new_cat = Category.objects.get(pk=int(cat_id))
for p in doc.pages.all():
cmd = "mv " + p.get_absolute_path() + " " + new_cat.get_absolute_path() + "/"
os.system(cmd)
doc.refer_category = new_cat
doc.save()
old_cat.documents.remove(doc)
new_cat.documents.add(doc)
|
Foxugly/MyTaxAccountant
|
scripts/move_document.py
|
Python
|
agpl-3.0
| 504 | 0.003968 |
from hypothesis import given
from hypothesis.strategies import binary
from msgpack import packb
from mitmproxy.contentviews import msgpack
from . import full_eval
def msgpack_encode(content):
return packb(content, use_bin_type=True)
def test_parse_msgpack():
assert msgpack.parse_msgpack(msgpack_encode({"foo": 1}))
assert msgpack.parse_msgpack(b"aoesuteoahu") is msgpack.PARSE_ERROR
assert msgpack.parse_msgpack(msgpack_encode({"foo": "\xe4\xb8\x96\xe7\x95\x8c"}))
def test_format_msgpack():
assert list(msgpack.format_msgpack({
"data": [
"str",
42,
True,
False,
None,
{},
[]
]
}))
def test_view_msgpack():
v = full_eval(msgpack.ViewMsgPack())
assert v(msgpack_encode({}))
assert not v(b"aoesuteoahu")
assert v(msgpack_encode([1, 2, 3, 4, 5]))
assert v(msgpack_encode({"foo": 3}))
assert v(msgpack_encode({"foo": True, "nullvalue": None}))
@given(binary())
def test_view_msgpack_doesnt_crash(data):
v = full_eval(msgpack.ViewMsgPack())
v(data)
def test_render_priority():
v = msgpack.ViewMsgPack()
assert v.render_priority(b"data", content_type="application/msgpack")
assert v.render_priority(b"data", content_type="application/x-msgpack")
assert not v.render_priority(b"data", content_type="text/plain")
|
mitmproxy/mitmproxy
|
test/mitmproxy/contentviews/test_msgpack.py
|
Python
|
mit
| 1,392 | 0.000718 |
# vim: set ts=2 expandtab:
'''
Module: read.py
Desc: unpack data from binary files
Author: John O'Neil
Email: oneil.john@gmail.com
DATE: Thursday, March 13th 2014
'''
import struct
DEBUG = False
class EOFError(Exception):
""" Custom exception raised when we read to EOF
"""
pass
def split_buffer(length, buf):
'''split provided array at index x
'''
#print "split-buffer******"
a = []
if len(buf)<length:
return (a, buf)
#print "length of buf is" + str(len(buf))
for i in range(length):
a.append(buf.pop(0))
return (a,buf)
def dump_list(list):
print(u' '.join(u'{:#x}'.format(x) for x in list))
def ucb(f):
'''Read unsigned char byte from binary file
'''
if isinstance(f, list):
if len(f) < 1:
raise EOFError()
b, f = split_buffer(1, f)
return struct.unpack('B', ''.join(b))[0]
else:
_f = f.read(1)
if len(_f) < 1:
raise EOFError()
return struct.unpack('B', _f)[0]
def usb(f):
'''Read unsigned short from binary file
'''
if isinstance(f, list):
n, f = split_buffer(2, f)
return struct.unpack('>H', ''.join(n))[0]
else:
_f = f.read(2)
if DEBUG:
print("usb: " + hex(ord(_f[0])) + ":" + hex(ord(_f[1])))
if len(_f) < 2:
raise EOFError()
return struct.unpack('>H', _f)[0]
def ui3b(f):
'''Read 3 byte unsigned short from binary file
'''
if isinstance(f, list):
n, f = split_buffer(3, f)
return struct.unpack('>I', '\x00'+ ''.join(n))[0]
else:
_f = f.read(3)
if len(_f) < 3:
raise EOFError()
return struct.unpack('>I', '\x00'+ (_f))[0]
def uib(f):
'''
'''
if isinstance(f, list):
n, f = split_buffer(4, f)
return struct.unpack('>L', ''.join(n))[0]
else:
_f = f.read(4)
if len(_f) < 4:
raise EOFError()
return struct.unpack('>L', _f)[0]
def ulb(f):
'''Read unsigned long long (64bit integer) from binary file
'''
if isinstance(f, list):
n, f = split_buffer(8, f)
return struct.unpack('>Q', ''.join(n))[0]
else:
_f = f.read(8)
if len(_f) < 8:
raise EOFError()
return struct.unpack('>Q', _f)[0]
def buffer(f, size):
'''Read N bytes from either a file or list
'''
if isinstance(f, list):
n, f = split_buffer(size, f)
return ''.join(n)
else:
_f = f.read(size)
if len(_f) < size:
raise EOFError()
return _f
|
johnoneil/arib
|
arib/read.py
|
Python
|
apache-2.0
| 2,368 | 0.022382 |
# Copyright 2013 NEC Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from tempest.api.compute import base
from tempest import exceptions
from tempest import test
class TenantUsagesNegativeTestJSON(base.BaseV2ComputeAdminTest):
@classmethod
def resource_setup(cls):
super(TenantUsagesNegativeTestJSON, cls).resource_setup()
cls.adm_client = cls.os_adm.tenant_usages_client
cls.client = cls.os.tenant_usages_client
cls.identity_client = cls._get_identity_admin_client()
now = datetime.datetime.now()
cls.start = cls._parse_strtime(now - datetime.timedelta(days=1))
cls.end = cls._parse_strtime(now + datetime.timedelta(days=1))
@classmethod
def _parse_strtime(cls, at):
# Returns formatted datetime
return at.strftime('%Y-%m-%dT%H:%M:%S.%f')
@test.attr(type=['negative', 'gate'])
def test_get_usage_tenant_with_empty_tenant_id(self):
# Get usage for a specific tenant empty
params = {'start': self.start,
'end': self.end}
self.assertRaises(exceptions.NotFound,
self.adm_client.get_tenant_usage,
'', params)
@test.attr(type=['negative', 'gate'])
def test_get_usage_tenant_with_invalid_date(self):
# Get usage for tenant with invalid date
params = {'start': self.end,
'end': self.start}
self.assertRaises(exceptions.BadRequest,
self.adm_client.get_tenant_usage,
self.client.tenant_id, params)
@test.attr(type=['negative', 'gate'])
def test_list_usage_all_tenants_with_non_admin_user(self):
# Get usage for all tenants with non admin user
params = {'start': self.start,
'end': self.end,
'detailed': int(bool(True))}
self.assertRaises(exceptions.Unauthorized,
self.client.list_tenant_usages, params)
class TenantUsagesNegativeTestXML(TenantUsagesNegativeTestJSON):
_interface = 'xml'
|
queria/my-tempest
|
tempest/api/compute/admin/test_simple_tenant_usage_negative.py
|
Python
|
apache-2.0
| 2,656 | 0 |
"""Tests the surveytools.footprint module."""
import numpy as np
from surveytools.footprint import VphasFootprint, VphasOffset
def test_vphas_offset_coordinates():
"""Test the offset pattern, which is expected to equal
ra -0, dec +0 arcsec for the "a" pointing;
ra -588, dec +660 arcsec for the "b" pointing;
ra -300, dec +350 arcsec for the "c" pointing.
"""
vf = VphasFootprint()
np.testing.assert_almost_equal(vf.offsets['0001a']['ra'], 97.2192513369)
np.testing.assert_almost_equal(vf.offsets['0001a']['dec'], 0)
np.testing.assert_almost_equal(vf.offsets['0001b']['ra'], 97.2192513369 - 588/3600.)
np.testing.assert_almost_equal(vf.offsets['0001b']['dec'], 0 + 660/3600.)
np.testing.assert_almost_equal(vf.offsets['0001c']['ra'], 97.2192513369 - 300/3600.)
np.testing.assert_almost_equal(vf.offsets['0001c']['dec'], 0 + 350/3600.)
def test_vphas_offset_pattern():
vf = VphasFootprint()
for field in ['0500', '1000', '2000']:
ra, dec = vf.offsets[field+'a']['ra'], vf.offsets[field+'a']['dec']
np.testing.assert_almost_equal(vf.offsets[field+'b']['ra'],
ra - (588/3600.) / np.cos(np.radians(dec)))
np.testing.assert_almost_equal(vf.offsets[field+'b']['dec'],
dec + 660/3600.)
def test_vphas_filenames():
"""Ensure the right filename is returned for a given band/offset."""
assert VphasOffset('1122a').image_filenames['ha'] == 'o20120330_00032.fit'
assert VphasOffset('1122b').image_filenames['ha'] == 'o20120330_00034.fit'
assert VphasOffset('1122c').image_filenames['ha'] == 'o20120330_00033.fit'
assert VphasOffset('1842a').image_filenames['r'] == 'o20130314_00061.fit'
assert VphasOffset('1842b').image_filenames['r'] == 'o20130314_00062.fit'
assert VphasOffset('0765a').image_filenames['g'] == 'o20130413_00024.fit'
assert VphasOffset('0765b').image_filenames['g'] == 'o20130413_00026.fit'
assert VphasOffset('0765c').image_filenames['g'] == 'o20130413_00025.fit'
if __name__ == '__main__':
test_vphas_filenames()
|
barentsen/surveytools
|
surveytools/tests/test_footprint.py
|
Python
|
mit
| 2,130 | 0.002817 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# This file is part of agora-tools.
# Copyright (C) 2014-2016 Agora Voting SL <agora@agoravoting.com>
# agora-tools is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License.
# agora-tools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with agora-tools. If not, see <http://www.gnu.org/licenses/>.
import json
import csv
import os
import copy
import operator
import argparse
from datetime import datetime, timedelta
from utils.csvblocks import csv_to_blocks
from utils.json_serialize import serialize
def iget(d, key, default):
'''
Ignore-case get
This function makes a dict.get but there's no need for the key to be
exactly the same as the key in the dict. Before the real **get** we
look into the dict keys and find for this key ignoring the case so the
key param can differ from the dict key in lower or upper cases.
:param d: this is the dict to search in
:param key: this is the key to search
:param default: this is the default value to return if key isn't in the
dict
'''
real_key = key
keyl = key.lower()
for k in d.keys():
if k.lower() == keyl:
real_key = k
return d.get(real_key, default)
BASE_ELECTION = {
"id": -1,
"title": "",
"description": "",
"layout": "",
"presentation": {
"share_text": [
{
"network": "Twitter",
"button_text": "",
"social_message": "I have just voted in election __URL__, you can too! #nvotes"
},
{
"network": "Facebook",
"button_text": "",
"social_message": "__URL__"
}
],
"theme": 'default',
"urls": [],
"theme_css": "",
"extra_options": {}
},
"end_date": "",
"start_date": "",
"real": True,
"questions": []
}
BASE_QUESTION = {
"description": "",
"layout": 'simple',
"max": 1,
"min": 0,
"num_winners": 1,
"title": "",
"randomize_answer_order": True,
"tally_type": "plurality-at-large",
"answer_total_votes_percentage": "over-total-votes",
"extra_options": {},
"answers": []
}
BASE_ANSWER = {
"id": -1,
"category": '',
"details": "",
"sort_order": -1,
"urls": [],
"text": ""
}
def parse_int(s):
return int(s)
def parse_list(s):
return s.split(",")
def parse_bool(s):
return s == "TRUE"
def parse_extra(q):
val = dict(
(key.replace("extra: ", ""), value)
for key, value in q.items() if key.startswith("extra: ")
)
if "success_screen__hide_download_ballot_ticket" in val:
val["success_screen__hide_download_ballot_ticket"] = parse_bool(
val["success_screen__hide_download_ballot_ticket"]
)
if "shuffle_category_list" in val:
val["shuffle_category_list"] = parse_list(val["shuffle_category_list"])
if "shuffle_categories" in val:
val["shuffle_categories"] = parse_bool(val["shuffle_categories"])
if "shuffle_all_options" in val:
val["shuffle_all_options"] = parse_bool(val["shuffle_all_options"])
if "select_all_category_clicks" in val:
val["select_all_category_clicks"] = parse_int(val["select_all_category_clicks"])
if "answer_group_columns_size" in val:
val["answer_group_columns_size"] = parse_int(val["answer_group_columns_size"])
if "answer_columns_size" in val:
val["answer_columns_size"] = parse_int(val["answer_columns_size"])
return val
def blocks_to_election(blocks, config, add_to_id=0):
'''
Parses a list of blocks into an election
'''
# convert blocks into a more convenient structure
election = blocks[0]['values']
blocks.pop(0)
questions = []
def get_answer_id(answer):
return answer['Id']
def get_description(answer):
return answer.get('Description', '').replace('\n', '<br/>')
def get_url(key, value):
if key in ['Gender', 'Tag', 'Support']:
return "https://agoravoting.com/api/%s/%s" % (key.lower(), value)
elif value.startswith('http://') or value.startswith('https://'):
return value.strip()
return key + value.strip()
for question, options in zip(blocks[0::2], blocks[1::2]):
q = question['values']
q['options'] = options['values']
data = {
"description": q.get("Description", ''),
"layout": q.get("Layout", 'simple'),
"max": int(q["Maximum choices"]),
"min": int(q["Minimum choices"]),
"num_winners": int(q["Number of winners"]),
"title": q["Title"],
"randomize_answer_order": parse_bool(q.get("Randomize options order", False)),
"tally_type": q.get("Voting system", "plurality-at-large"),
"answer_total_votes_percentage": q["Totals"],
"extra_options": parse_extra(q),
"answers": [
{
"id": int(get_answer_id(answer)),
"category": answer.get("Category", ''),
"details": get_description(answer),
"sort_order": index,
"urls": [
{
'title': url_key,
'url': get_url(url_key, url_val)
}
for url_key, url_val in answer.items()
if url_key in ['Image URL', 'URL', 'Gender', 'Tag', 'Support'] and\
len(url_val.strip()) > 0
],
"text": answer['Text'],
}
for answer, index in zip(q['options'], range(len(q['options'])))
if len("".join(answer.values()).strip()) > 0
]
}
# check answers
try:
assert len(data['answers']) == len(set(list(map(operator.itemgetter('text'), data['answers']))))
except Exception as e:
print("duplicated options in question '%s':" % q["Title"])
l = list(map(operator.itemgetter('text'), data['answers']))
print(set([x for x in l if l.count(x) > 1]))
raise e
data['max'] = min(data['max'], len(data['answers']))
data['num_winners'] = min(data['num_winners'], len(data['answers']))
for answ in data['answers']:
try:
assert answ['id'] == answ['sort_order']
except:
print(answ)
questions.append(data)
def get_def(dictionary, key, default_value):
if key not in dictionary or len(dictionary[key]) == 0:
return default_value
return dictionary[key]
start_date = datetime.strptime("10/10/2015 10:10", "%d/%m/%Y %H:%M")
if len(election["Start date time"]) > 0:
try:
start_date = datetime.strptime(election["Start date time"], "%d/%m/%Y %H:%M:%S")
except:
start_date = datetime.strptime(election["Start date time"], "%d/%m/%Y %H:%M")
ret = {
"id": int(election['Id']) + add_to_id,
"authorities": config['authorities'],
"director": config['director'],
"title": election['Title'],
"description": election['Description'],
"layout": election.get('Layout', ''),
"presentation": {
"share_text": [
{
"network": "Twitter",
"button_text": "",
"social_message": election.get('Share Text', '')
},
{
"network": "Facebook",
"button_text": "",
"social_message": "__URL__"
}
],
"theme": election.get('Theme', 'default'),
"urls": [],
"theme_css": "",
"extra_options": parse_extra(election),
"show_login_link_on_home": parse_bool(iget(election, 'login link on home', False)),
},
"end_date": (start_date + timedelta(hours=int(get_def(election, 'Duration in hours', '24')))).isoformat() + ".001",
"start_date": start_date.isoformat() + ".001",
"questions": questions,
"real": True
}
return ret
def form_to_elections(path, separator, config, add_to_id):
'''
Converts the google forms into election configurations
'''
election_funcs = {
"Título": lambda d: ["title", d],
"Descripción": lambda d: ["description", d],
"Comienzo": lambda d: ["start_date", datetime.strptime(d, "%m/%d/%Y %H:%M:%S").isoformat()+ ".001"],
"Final": lambda d: ["end_date", datetime.strptime(d, "%m/%d/%Y %H:%M:%S").isoformat()+ ".001"],
}
census_key = "Censo"
more_keys = {
"¿Más preguntas?": lambda v: "No" not in v
}
auth_method = config['authapi']['event_config']['auth_method']
question_options_key = "Opciones"
question_funcs = {
"Título": lambda d: ["title", d],
"Descripción": lambda d: ["description", d],
"Número de ganadores": lambda d: ["num_winners", int(d)],
"Número máximo de opciones": lambda d: ["max", int(d)],
"Número mínimo de opciones": lambda d: ["min", int(d)],
"Orden aleatorio": lambda d: ["randomize_answer_order", d == "Aleatorio"],
"Resultados": lambda d: ["answer_total_votes_percentage", "over-total-votes" if d == "Sobre votos totales" else "over-total-valid-votes"]
}
elections = []
base_election = copy.deepcopy(BASE_ELECTION)
base_election['director'] = config['director']
base_election['authorities'] = config['authorities']
with open(path, mode='r', encoding="utf-8", errors='strict') as f:
fcsv = csv.reader(f, delimiter=',', quotechar='"')
keys = fcsv.__next__()
for values in fcsv:
if len(values) == 0:
continue
question_num = -1
election = copy.deepcopy(base_election)
election['id'] = add_to_id + len(elections)
question = None
for key, value, index in zip(keys, values, range(len(values))):
if question_num == -1 and key not in more_keys.keys() and key in election_funcs.keys():
dest_key, dest_value = election_funcs[key](value)
election[dest_key] = dest_value
elif key == census_key:
if auth_method == "sms":
election['census'] = [{"tlf": item} for item in value.split("\n")]
else: # email
election['census'] = [{"email": item} for item in value.split("\n")]
question_num += 1
question = copy.deepcopy(BASE_QUESTION)
elif question_num >= 0 and key in question_funcs.keys():
dest_key, dest_value = question_funcs[key](value)
question[dest_key] = dest_value
elif question_num >= 0 and key == question_options_key:
options = value.strip().split("\n")
question['answers'] = [{
"id": opt_id,
"category": '',
"details": '',
"sort_order": opt_id,
"urls": [],
"text": opt
}
for opt, opt_id in zip(options, range(len(options)))]
elif question_num >= 0 and key in more_keys.keys():
question_num += 1
election['questions'].append(question)
question = copy.deepcopy(BASE_QUESTION)
if not more_keys[key](value):
elections.append(election)
break
return elections
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Converts a CSV into the json to create an election.')
parser.add_argument('-c', '--config-path', help='default config for the election')
parser.add_argument('-i', '--input-path', help='input file or directory')
parser.add_argument('-o', '--output-path', help='output file or directory')
parser.add_argument('-A', '--admin-format', help='use create format for agora-admin instead of agora-elections', action="store_true")
parser.add_argument('-a', '--add-to-id', type=int, help='add an int number to the id', default=0)
parser.add_argument(
'-f', '--format',
choices=['csv-blocks', 'tsv-blocks', 'csv-google-forms'],
default="csv-blocks",
help='output file or directory')
args = parser.parse_args()
if not os.access(args.input_path, os.R_OK):
print("can't read %s" % args.input_path)
exit(2)
if os.path.isdir(args.output_path) and not os.access(args.output_path, os.W_OK):
print("can't write to %s" % args.output_path)
exit(2)
if not os.access(args.config_path, os.R_OK):
print("can't read %s" % args.config_path)
exit(2)
config = None
with open(args.config_path, mode='r', encoding="utf-8", errors='strict') as f:
config = json.loads(f.read())
try:
if args.format == "csv-blocks" or args.format == "tsv-blocks":
separator = {
"csv-blocks": ",",
"tsv-blocks": "\t"
}[args.format]
extension = {
"csv-blocks": ".csv",
"tsv-blocks": ".tsv"
}[args.format]
if os.path.isdir(args.input_path):
if not os.path.exists(args.output_path):
os.makedirs(args.output_path)
i = 0
files = sorted([name for name in os.listdir(args.input_path)
if os.path.isfile(os.path.join(args.input_path, name)) and name.endswith(extension)])
for name in files:
print("importing %s" % name)
file_path = os.path.join(args.input_path, name)
blocks = csv_to_blocks(path=file_path, separator=separator)
election = blocks_to_election(blocks, config, args.add_to_id)
if str(election['id']) + extension != name:
print("WARNING: election id %i doesn't match filename %s" % (election['id'], name))
if not args.admin_format:
output_path = os.path.join(args.output_path, str(election['id']) + ".config.json")
else:
output_path = os.path.join(args.output_path, str(i) + ".json")
auth_config_path = os.path.join(args.output_path, str(i) + ".config.json")
auth_config = config['authapi']['event_config']
with open(auth_config_path, mode='w', encoding="utf-8", errors='strict') as f:
f.write(serialize(auth_config))
auth_census_path = os.path.join(args.output_path, str(i) + ".census.json")
census_config = config['authapi'].get('census_data', [])
with open(auth_census_path, mode='w', encoding="utf-8", errors='strict') as f:
f.write(serialize(census_config))
with open(output_path, mode='w', encoding="utf-8", errors='strict') as f:
f.write(serialize(election))
if config.get('agora_results_config', None) is not None:
if not args.admin_format:
results_conf_path = os.path.join(args.output_path, str(election['id']) + ".config.results.json")
else:
results_conf_path = os.path.join(args.output_path, str(i) + ".config.results.json")
with open(
results_conf_path,
mode='w',
encoding="utf-8",
errors='strict') as f:
f.write(serialize(
dict(
version="1.0",
pipes=config['agora_results_config']
)
))
i += 1
else:
blocks = csv_to_blocks(path=args.input_path, separator=separator)
election = blocks_to_election(blocks, config, args.add_to_id)
if str(election['id']) + extension != os.path.basename(args.input_path):
print("WARNING: election id %i doesn't match filename %s" % (election['id'], os.path.basename(args.input_path)))
with open(args.output_path, mode='w', encoding="utf-8", errors='strict') as f:
f.write(serialize(election))
else:
if not os.path.exists(args.output_path):
os.makedirs(args.output_path)
elif not os.path.isdir(args.output_path):
print("output path must be a directory")
exit(2)
elections = form_to_elections(path=args.input_path,
separator="\t",
config=config,
add_to_id=args.add_to_id)
for election in elections:
fpath = os.path.join(args.output_path, "%d.census.json" % election["id"])
with open(fpath, mode='w', encoding="utf-8", errors='strict') as f:
f.write(serialize(election['census']))
del election['census']
fpath = os.path.join(args.output_path, "%d.json" % election["id"])
with open(fpath, mode='w', encoding="utf-8", errors='strict') as f:
f.write(serialize(election))
fpath = os.path.join(args.output_path, "%d.config.json" % election["id"])
with open(fpath, mode='w', encoding="utf-8", errors='strict') as f:
f.write(serialize(config['authapi']['event_config']))
except:
print("malformed CSV")
import traceback
traceback.print_exc()
exit(3)
|
agoravoting/agora-tools
|
import_election_csv.py
|
Python
|
agpl-3.0
| 18,685 | 0.005516 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import base64
from StringIO import StringIO
from openerp.modules.module import get_module_resource
import openerp.modules.registry
from openerp.osv import osv
from openerp_sxw2rml import sxw2rml
class report_xml(osv.osv):
_inherit = 'ir.actions.report.xml'
def sxwtorml(self, cr, uid, file_sxw, file_type):
'''
The use of this function is to get rml file from sxw file.
'''
sxwval = StringIO(base64.decodestring(file_sxw))
if file_type=='sxw':
fp = open(get_module_resource('base_report_designer','openerp_sxw2rml', 'normalized_oo2rml.xsl'),'rb')
if file_type=='odt':
fp = open(get_module_resource('base_report_designer','openerp_sxw2rml', 'normalized_odt2rml.xsl'),'rb')
return {'report_rml_content': str(sxw2rml(sxwval, xsl=fp.read()))}
def upload_report(self, cr, uid, report_id, file_sxw, file_type, context=None):
'''
Untested function
'''
sxwval = StringIO(base64.decodestring(file_sxw))
if file_type=='sxw':
fp = open(get_module_resource('base_report_designer','openerp_sxw2rml', 'normalized_oo2rml.xsl'),'rb')
if file_type=='odt':
fp = open(get_module_resource('base_report_designer','openerp_sxw2rml', 'normalized_odt2rml.xsl'),'rb')
report = self.pool['ir.actions.report.xml'].write(cr, uid, [report_id], {
'report_sxw_content': base64.decodestring(file_sxw),
'report_rml_content': str(sxw2rml(sxwval, xsl=fp.read())),
})
return True
def report_get(self, cr, uid, report_id, context=None):
if context is None:
context = {}
# skip osv.fields.sanitize_binary_value() because we want the raw bytes in all cases
context.update(bin_raw=True)
report = self.browse(cr, uid, report_id, context=context)
sxw_data = report.report_sxw_content
rml_data = report.report_rml_content
if isinstance(sxw_data, unicode):
sxw_data = sxw_data.encode("iso-8859-1", "replace")
if isinstance(rml_data, unicode):
rml_data = rml_data.encode("iso-8859-1", "replace")
return {
'file_type' : report.report_type,
'report_sxw_content': sxw_data and base64.encodestring(sxw_data) or False,
'report_rml_content': rml_data and base64.encodestring(rml_data) or False
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
jmesteve/saas3
|
openerp/addons/base_report_designer/base_report_designer.py
|
Python
|
agpl-3.0
| 3,471 | 0.007491 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import random
from datetime import datetime
from werkzeug import cached_property
from flask import url_for, Markup
from flask.ext.sqlalchemy import BaseQuery
from flask.ext.principal import Permission, UserNeed, Denial
from newsmeme.extensions import db
from newsmeme.helpers import slugify, domain, markdown
from newsmeme.permissions import auth, moderator
from newsmeme.models.types import DenormalizedText
from newsmeme.models.users import User
class PostQuery(BaseQuery):
def jsonify(self):
for post in self.all():
yield post.json
def as_list(self):
"""
Return restricted list of columns for list queries
"""
deferred_cols = ("description",
"tags",
"author.email",
"author.password",
"author.activation_key",
"author.openid",
"author.date_joined",
"author.receive_email",
"author.email_alerts",
"author.followers",
"author.following")
options = [db.defer(col) for col in deferred_cols]
return self.options(*options)
def deadpooled(self):
return self.filter(Post.score <= 0)
def popular(self):
return self.filter(Post.score > 0)
def hottest(self):
return self.order_by(Post.num_comments.desc(),
Post.score.desc(),
Post.id.desc())
def public(self):
return self.filter(Post.access == Post.PUBLIC)
def restricted(self, user=None):
"""
Returns posts filtered for a) public posts b) posts authored by
the user or c) posts authored by friends
"""
if user and user.is_moderator:
return self
criteria = [Post.access == Post.PUBLIC]
if user:
criteria.append(Post.author_id == user.id)
if user.friends:
criteria.append(db.and_(Post.access == Post.FRIENDS,
Post.author_id.in_(user.friends)))
return self.filter(reduce(db.or_, criteria))
def search(self, keywords):
criteria = []
for keyword in keywords.split():
keyword = '%' + keyword + '%'
criteria.append(db.or_(Post.title.ilike(keyword),
Post.description.ilike(keyword),
Post.link.ilike(keyword),
Post.tags.ilike(keyword),
User.username.ilike(keyword)))
q = reduce(db.and_, criteria)
return self.filter(q).join(User).distinct()
class Post(db.Model):
__tablename__ = "posts"
PUBLIC = 100
FRIENDS = 200
PRIVATE = 300
PER_PAGE = 40
query_class = PostQuery
id = db.Column(db.Integer, primary_key=True)
author_id = db.Column(db.Integer,
db.ForeignKey(User.id, ondelete='CASCADE'),
nullable=False)
title = db.Column(db.Unicode(200))
description = db.Column(db.UnicodeText)
link = db.Column(db.String(250))
date_created = db.Column(db.DateTime, default=datetime.utcnow)
score = db.Column(db.Integer, default=1)
num_comments = db.Column(db.Integer, default=0)
votes = db.Column(DenormalizedText)
access = db.Column(db.Integer, default=PUBLIC)
_tags = db.Column("tags", db.UnicodeText)
author = db.relation(User, innerjoin=True, lazy="joined")
__mapper_args__ = {'order_by': id.desc()}
class Permissions(object):
def __init__(self, obj):
self.obj = obj
@cached_property
def default(self):
return Permission(UserNeed(self.obj.author_id)) & moderator
@cached_property
def view(self):
if self.obj.access == Post.PUBLIC:
return Permission()
if self.obj.access == Post.FRIENDS:
needs = [UserNeed(user_id) for user_id in
self.obj.author.friends]
return self.default & Permission(*needs)
return self.default
@cached_property
def edit(self):
return self.default
@cached_property
def delete(self):
return self.default
@cached_property
def vote(self):
needs = [UserNeed(user_id) for user_id in self.obj.votes]
needs.append(UserNeed(self.obj.author_id))
return auth & Denial(*needs)
@cached_property
def comment(self):
return auth
def __init__(self, *args, **kwargs):
super(Post, self).__init__(*args, **kwargs)
self.votes = self.votes or set()
self.access = self.access or self.PUBLIC
def __str__(self):
return self.title
def __repr__(self):
return "<%s>" % self
@cached_property
def permissions(self):
return self.Permissions(self)
def vote(self, user):
self.votes.add(user.id)
def _get_tags(self):
return self._tags
def _set_tags(self, tags):
self._tags = tags
if self.id:
# ensure existing tag references are removed
d = db.delete(post_tags, post_tags.c.post_id == self.id)
db.engine.execute(d)
for tag in set(self.taglist):
slug = slugify(tag)
tag_obj = Tag.query.filter(Tag.slug == slug).first()
if tag_obj is None:
tag_obj = Tag(name=tag, slug=slug)
db.session.add(tag_obj)
if self not in tag_obj.posts:
tag_obj.posts.append(self)
tags = db.synonym("_tags", descriptor=property(_get_tags, _set_tags))
@property
def taglist(self):
if self.tags is None:
return []
tags = [t.strip() for t in self.tags.split(",")]
return [t for t in tags if t]
@cached_property
def linked_taglist(self):
"""
Returns the tags in the original order and format,
with link to tag page
"""
return [(tag, url_for('frontend.tag',
slug=slugify(tag)))
for tag in self.taglist]
@cached_property
def domain(self):
if not self.link:
return ''
return domain(self.link)
@cached_property
def json(self):
"""
Returns dict of safe attributes for passing into
a JSON request.
"""
return dict(post_id=self.id,
score=self.score,
title=self.title,
link=self.link,
description=self.description,
num_comments=self.num_comments,
author=self.author.username)
@cached_property
def access_name(self):
return {
Post.PUBLIC: "public",
Post.FRIENDS: "friends",
Post.PRIVATE: "private"
}.get(self.access, "public")
def can_access(self, user=None):
if self.access == self.PUBLIC:
return True
if user is None:
return False
if user.is_moderator or user.id == self.author_id:
return True
return self.access == self.FRIENDS and self.author_id in user.friends
@cached_property
def comments(self):
"""
Returns comments in tree. Each parent comment has a "comments"
attribute appended and a "depth" attribute.
"""
from newsmeme.models.comments import Comment
comments = Comment.query.filter(Comment.post_id == self.id).all()
def _get_comments(parent, depth):
parent.comments = []
parent.depth = depth
for comment in comments:
if comment.parent_id == parent.id:
parent.comments.append(comment)
_get_comments(comment, depth + 1)
parents = [c for c in comments if c.parent_id is None]
for parent in parents:
_get_comments(parent, 0)
return parents
def _url(self, _external=False):
return url_for('post.view',
post_id=self.id,
slug=self.slug,
_external=_external)
@cached_property
def url(self):
return self._url()
@cached_property
def permalink(self):
return self._url(True)
@cached_property
def markdown(self):
return Markup(markdown(self.description or ''))
@cached_property
def slug(self):
return slugify(self.title or '')[:80]
post_tags = db.Table("post_tags", db.Model.metadata,
db.Column("post_id", db.Integer,
db.ForeignKey('posts.id', ondelete='CASCADE'),
primary_key=True),
db.Column("tag_id", db.Integer,
db.ForeignKey('tags.id', ondelete='CASCADE'),
primary_key=True))
class TagQuery(BaseQuery):
def cloud(self):
tags = self.filter(Tag.num_posts > 0).all()
if not tags:
return []
max_posts = max(t.num_posts for t in tags)
min_posts = min(t.num_posts for t in tags)
diff = (max_posts - min_posts) / 10.0
if diff < 0.1:
diff = 0.1
for tag in tags:
tag.size = int(tag.num_posts / diff)
if tag.size < 1:
tag.size = 1
random.shuffle(tags)
return tags
class Tag(db.Model):
__tablename__ = "tags"
query_class = TagQuery
id = db.Column(db.Integer, primary_key=True)
slug = db.Column(db.Unicode(80), unique=True)
posts = db.dynamic_loader(Post, secondary=post_tags, query_class=PostQuery)
_name = db.Column("name", db.Unicode(80), unique=True)
def __str__(self):
return self.name
def _get_name(self):
return self._name
def _set_name(self, name):
self._name = name.lower().strip()
self.slug = slugify(name)
name = db.synonym("_name", descriptor=property(_get_name, _set_name))
@cached_property
def url(self):
return url_for("frontend.tag", slug=self.slug)
num_posts = db.column_property(
db.select([db.func.count(post_tags.c.post_id)]).
where(db.and_(post_tags.c.tag_id == id,
Post.id == post_tags.c.post_id,
Post.access == Post.PUBLIC)).as_scalar())
|
sixu05202004/newsmeme
|
newsmeme/newsmeme/models/posts.py
|
Python
|
bsd-3-clause
| 10,746 | 0.000279 |
# coding=utf-8
# Copyright 2020 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test for tf_agents.utils.random_py_policy."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import
from tf_agents.policies import random_py_policy
from tf_agents.specs import array_spec
from tf_agents.trajectories import time_step
from tf_agents.utils import test_utils
class RandomPyPolicyTest(test_utils.TestCase):
def setUp(self):
super(RandomPyPolicyTest, self).setUp()
self._time_step_spec = time_step.time_step_spec(
observation_spec=array_spec.ArraySpec((1,), np.int32))
self._time_step = time_step.restart(observation=np.array([1]))
def testGeneratesActions(self):
action_spec = [
array_spec.BoundedArraySpec((2, 3), np.int32, -10, 10),
array_spec.BoundedArraySpec((1, 2), np.int32, -10, 10)
]
policy = random_py_policy.RandomPyPolicy(
time_step_spec=self._time_step_spec, action_spec=action_spec)
action_step = policy.action(self._time_step)
tf.nest.assert_same_structure(action_spec, action_step.action)
self.assertTrue(np.all(action_step.action[0] >= -10))
self.assertTrue(np.all(action_step.action[0] <= 10))
self.assertTrue(np.all(action_step.action[1] >= -10))
self.assertTrue(np.all(action_step.action[1] <= 10))
def testGeneratesBatchedActions(self):
action_spec = [
array_spec.BoundedArraySpec((2, 3), np.int32, -10, 10),
array_spec.BoundedArraySpec((1, 2), np.int32, -10, 10)
]
policy = random_py_policy.RandomPyPolicy(
time_step_spec=self._time_step_spec,
action_spec=action_spec,
outer_dims=(3,))
action_step = policy.action(self._time_step)
tf.nest.assert_same_structure(action_spec, action_step.action)
self.assertEqual((3, 2, 3), action_step.action[0].shape)
self.assertEqual((3, 1, 2), action_step.action[1].shape)
self.assertTrue(np.all(action_step.action[0] >= -10))
self.assertTrue(np.all(action_step.action[0] <= 10))
self.assertTrue(np.all(action_step.action[1] >= -10))
self.assertTrue(np.all(action_step.action[1] <= 10))
def testGeneratesBatchedActionsWithoutSpecifyingOuterDims(self):
action_spec = [
array_spec.BoundedArraySpec((2, 3), np.int32, -10, 10),
array_spec.BoundedArraySpec((1, 2), np.int32, -10, 10)
]
time_step_spec = time_step.time_step_spec(
observation_spec=array_spec.ArraySpec((1,), np.int32))
policy = random_py_policy.RandomPyPolicy(
time_step_spec=time_step_spec, action_spec=action_spec)
action_step = policy.action(
time_step.restart(np.array([[1], [2], [3]], dtype=np.int32)))
tf.nest.assert_same_structure(action_spec, action_step.action)
self.assertEqual((3, 2, 3), action_step.action[0].shape)
self.assertEqual((3, 1, 2), action_step.action[1].shape)
self.assertTrue(np.all(action_step.action[0] >= -10))
self.assertTrue(np.all(action_step.action[0] <= 10))
self.assertTrue(np.all(action_step.action[1] >= -10))
self.assertTrue(np.all(action_step.action[1] <= 10))
def testPolicyStateSpecIsEmpty(self):
policy = random_py_policy.RandomPyPolicy(
time_step_spec=self._time_step_spec, action_spec=[])
self.assertEqual(policy.policy_state_spec, ())
def testMasking(self):
batch_size = 1000
time_step_spec = time_step.time_step_spec(
observation_spec=array_spec.ArraySpec((1,), np.int32))
action_spec = array_spec.BoundedArraySpec((), np.int64, -5, 5)
# We create a fixed mask here for testing purposes. Normally the mask would
# be part of the observation.
mask = [0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0]
np_mask = np.array(mask)
batched_mask = np.array([mask for _ in range(batch_size)])
policy = random_py_policy.RandomPyPolicy(
time_step_spec=time_step_spec,
action_spec=action_spec,
observation_and_action_constraint_splitter=(
lambda obs: (obs, batched_mask)))
my_time_step = time_step.restart(time_step_spec, batch_size)
action_step = policy.action(my_time_step)
tf.nest.assert_same_structure(action_spec, action_step.action)
# Sample from the policy 1000 times, and ensure that actions considered
# invalid according to the mask are never chosen.
action_ = self.evaluate(action_step.action)
self.assertTrue(np.all(action_ >= -5))
self.assertTrue(np.all(action_ <= 5))
self.assertAllEqual(np_mask[action_ - action_spec.minimum],
np.ones([batch_size]))
# Ensure that all valid actions occur somewhere within the batch. Because we
# sample 1000 times, the chance of this failing for any particular action is
# (2/3)^1000, roughly 1e-176.
for index in range(action_spec.minimum, action_spec.maximum + 1):
if np_mask[index - action_spec.minimum]:
self.assertIn(index, action_)
if __name__ == '__main__':
test_utils.main()
|
tensorflow/agents
|
tf_agents/policies/random_py_policy_test.py
|
Python
|
apache-2.0
| 5,609 | 0.001783 |
# coding: utf-8
"""
regex.tests
~~~~~~~~~~~
:copyright: 2012 by Daniel Neuhäuser
:license: BSD, see LICENSE.rst
"""
from unittest import TestCase
from itertools import izip
from contextlib import contextmanager
from regex.parser import (
parse, ParserError, Parser, DEFAULT_ALPHABET, DEFAULT_LANGUAGE
)
from regex.ast import (
Epsilon, Character, Concatenation, Union, Repetition, Group, Either,
Neither, Range, Any
)
from regex.matcher import Find, Span
from regex.tokenizer import Tokenizer, Token, TokenizerError
class TestParser(TestCase):
def test_epsilon(self):
self.assertEqual(parse(u""), Epsilon())
def test_character(self):
self.assertEqual(parse(u"a"), Character(u"a"))
def test_concatenation(self):
self.assertEqual(
parse(u"ab"),
Concatenation(Character(u"a"), Character(u"b"))
)
def test_union(self):
self.assertEqual(
parse(u"a|b"),
Union(Character(u"a"), Character(u"b"))
)
def test_zero_or_more(self):
self.assertEqual(
parse(u"a*"),
Repetition(Character(u"a"))
)
def test_zero_or_more_missing_repeatable(self):
with self.assertRaises(ParserError) as context:
parse(u"*")
exception = context.exception
self.assertEqual(
exception.reason,
u"* is not preceded by a repeatable expression"
)
self.assertEqual(exception.annotation, (
u"*\n"
u"^"
))
def test_one_or_more(self):
self.assertEqual(
parse(u"a+"),
Concatenation(Character(u"a"), Repetition(Character(u"a")))
)
def test_one_or_more_missing_repeatable(self):
with self.assertRaises(ParserError) as context:
parse(u"+")
exception = context.exception
self.assertEqual(
exception.reason,
u"+ is not preceded by a repeatable expression",
)
self.assertEqual(
exception.annotation,
(
u"+\n"
u"^"
)
)
def test_group(self):
self.assertEqual(
parse(u"(a)"),
Group(Character(u"a"))
)
def test_group_missing_begin(self):
with self.assertRaises(ParserError) as context:
parse(u"a)")
exception = context.exception
self.assertEqual(
exception.reason,
u"found unmatched )"
)
self.assertEqual(
exception.annotation,
(
u"a)\n"
u" ^"
)
)
def test_group_missing_end(self):
with self.assertRaises(ParserError) as context:
parse(u"(a")
exception = context.exception
self.assertEqual(
exception.reason,
u"unexpected end of string, expected ) corresponding to ("
)
self.assertEqual(
exception.annotation,
(
u"(a\n"
u"^-^"
)
)
def test_either(self):
self.assertEqual(
parse(u"[ab]"),
Either(frozenset(map(Character, u"ab")))
)
def test_either_missing_begin(self):
with self.assertRaises(ParserError) as context:
parse(u"ab]")
exception = context.exception
self.assertEqual(
exception.reason,
u"found unmatched ]"
)
self.assertEqual(
exception.annotation,
(
u"ab]\n"
u" ^"
)
)
def test_either_missing_end(self):
with self.assertRaises(ParserError) as context:
parse(u"[ab")
exception = context.exception
self.assertEqual(
exception.reason,
u"unexpected end of string, expected ] corresponding to ["
)
self.assertEqual(
exception.annotation,
(
u"[ab\n"
u"^--^"
)
)
def test_neither(self):
self.assertEqual(
parse(u"[^ab]"),
Neither(frozenset(map(Character, u"ab")), DEFAULT_ALPHABET)
)
def test_range(self):
self.assertEqual(
parse(u"[a-c]"),
Either(frozenset([Range(
Character(u"a"),
Character(u"c"),
DEFAULT_ALPHABET
)]))
)
def test_range_missing_start(self):
with self.assertRaises(ParserError) as context:
parse(u"[-c]")
exception = context.exception
self.assertEqual(exception.reason, u"range is missing start")
self.assertEqual(
exception.annotation,
(
u"[-c]\n"
u"^"
)
)
def test_range_missing_end(self):
with self.assertRaises(ParserError) as context:
parse(u"[a-]")
exception = context.exception
self.assertEqual(
exception.reason,
u"expected character, found instruction: ]"
)
self.assertEqual(
exception.annotation,
(
u"[a-]\n"
u" ^"
)
)
def test_any(self):
parser = Parser(DEFAULT_LANGUAGE, alphabet=frozenset(u"ab"))
self.assertEqual(
parser.parse(u"."),
Any(frozenset(u"ab"))
)
class RegexTestWrapper(object):
def __init__(self, regex):
self.regex = regex
self.ast = parse(regex)
@property
def nfa(self):
if not hasattr(self, "_nfa"):
self._nfa = self.ast.to_nfa()
return self._nfa
@property
def dfa(self):
if not hasattr(self, "_dfa"):
self._dfa = self.ast.to_dfa()
return self._dfa
@property
def dfa_table(self):
if not hasattr(self, "_dfa_table"):
self._dfa_table = self.dfa.to_dfa_table()
return self._dfa_table
@property
def matchers(self):
if hasattr(self, "_matchers"):
return self._matchers
return self._iter_matchers()
def _iter_matchers(self):
self._matchers = []
matcher = lambda x: self._matchers.append(x) or x
yield matcher(self.nfa)
yield matcher(self.dfa)
yield matcher(self.dfa_table)
def assertMatches(self, string, expected_end):
for matcher in self.matchers:
end = matcher.match(string)
assert end == expected_end, end
def assertAllMatches(self, matches):
for string, end in matches:
self.assertMatches(string, end)
def assertNotMatches(self, string):
for matcher in self.matchers:
end = matcher.match(string)
assert end is None, end
def assertNotMatchesAny(self, strings):
for string in strings:
self.assertNotMatches(string)
def assertFindEqual(self, string, span):
for matcher in self.matchers:
find = matcher.find(string)
assert find == Find(string, span), find
def assertAllFinds(self, finds):
for string, span in finds:
self.assertFindEqual(string, span)
def assertFindAllEqual(self, string, spans):
for matcher in self.matchers:
finds = matcher.find_all(string)
for find, span in izip(finds, spans):
assert find == Find(string, span), find
try:
find = finds.next()
raise AssertionError("unexpected find: %r" % find)
except StopIteration:
pass
def assertSub(self, string, sub, expected_result):
for matcher in self.matchers:
result = matcher.subn(string, sub)
assert result == expected_result, result
assert matcher.sub(string, sub) == expected_result[0]
class TestMatcher(TestCase):
compilers = ["to_nfa", "to_dfa", "to_dfa_table"]
@contextmanager
def regex(self, regex):
yield RegexTestWrapper(regex)
def test_epsilon(self):
with self.regex(u"") as regex:
regex.assertMatches(u"", 0)
regex.assertNotMatches(u"a")
regex.assertAllFinds([
(u"", Span(0, 0)),
(u"a", Span(1, 1))
])
regex.assertSub(u"", u"a", (u"a", 1))
def test_any(self):
with self.regex(u".") as regex:
regex.assertMatches(u"a", 1)
regex.assertFindEqual(u"a", Span(0, 1))
regex.assertFindAllEqual(u"aa", [
Span(0, 1),
Span(1, 2)
])
regex.assertSub(u"a", u"b", (u"b", 1))
regex.assertSub(u"aa", u"b", (u"bb", 2))
def test_character(self):
with self.regex(u"a") as regex:
regex.assertMatches(u"a", 1)
regex.assertMatches(u"aa", 1)
regex.assertAllFinds([
(u"a", Span(0, 1)),
(u"ba", Span(1, 2))
])
regex.assertFindAllEqual(u"aa", [
Span(0, 1),
Span(1, 2)
])
regex.assertFindAllEqual(u"aba", [
Span(0, 1),
Span(2, 3)
])
regex.assertSub(u"a", u"b", (u"b", 1))
regex.assertSub(u"ab", u"b", (u"bb", 1))
regex.assertSub(u"aa", u"b", (u"bb", 2))
regex.assertSub(u"bab", u"b", (u"bbb", 1))
def test_concatenation(self):
with self.regex(u"ab") as regex:
regex.assertMatches(u"ab", 2)
regex.assertMatches(u"abab", 2)
regex.assertAllFinds([
(u"ab", Span(0, 2)),
(U"cab", Span(1, 3))
])
regex.assertFindAllEqual(u"abab", [
Span(0, 2),
Span(2, 4)
])
regex.assertFindAllEqual(u"abcab", [
Span(0, 2),
Span(3, 5)
])
regex.assertSub(u"ab", u"c", (u"c", 1))
regex.assertSub(u"abab", u"c", (u"cc", 2))
regex.assertSub(u"dabdabd", u"c", (u"dcdcd", 2))
def test_union(self):
with self.regex(u"a|b") as regex:
for string in [u"a", u"b", u"aa", u"bb"]:
regex.assertMatches(string, 1)
for string in [u"a", u"b"]:
regex.assertFindEqual(string, Span(0, 1))
for string in [u"ca", u"cb"]:
regex.assertFindEqual(string, Span(1, 2))
for string in [u"aa", u"bb", u"ab"]:
regex.assertFindAllEqual(string, [
Span(0, 1),
Span(1, 2)
])
for string in [u"aca", u"bcb"]:
regex.assertFindAllEqual(string, [
Span(0, 1),
Span(2, 3)
])
regex.assertSub(u"a", u"c", (u"c", 1))
regex.assertSub(u"b", u"c", (u"c", 1))
regex.assertSub(u"ab", u"c", (u"cc", 2))
regex.assertSub(u"dadbd", u"c", (u"dcdcd", 2))
def test_zero_or_more(self):
with self.regex(u"a*") as regex:
regex.assertAllMatches([(u"", 0), (u"a", 1), (u"aa", 2)])
for string in [u"", u"a", u"aa"]:
regex.assertFindEqual(string, Span(0, len(string)))
for string in [u"b", u"ba", u"baa"]:
regex.assertFindEqual(string, Span(1, len(string)))
regex.assertFindAllEqual(u"aba", [
Span(0, 1),
Span(2, 3)
])
regex.assertFindAllEqual(u"aabaa", [
Span(0, 2),
Span(3, 5)
])
regex.assertSub(u"", u"b", (u"b", 1))
regex.assertSub(u"cac", u"b", (u"cbc", 1))
regex.assertSub(u"caac", u"b", (u"cbc", 1))
def test_one_or_more(self):
with self.regex(u"a+") as regex:
regex.assertAllMatches([(u"a", 1), (u"aa", 2)])
for string in [u"a", u"aa"]:
regex.assertFindEqual(string, Span(0, len(string)))
for string in [u"ba", u"baa"]:
regex.assertFindEqual(string, Span(1, len(string)))
regex.assertFindAllEqual(u"aba", [
Span(0, 1),
Span(2, 3)
])
regex.assertFindAllEqual(u"aabaa", [
Span(0, 2),
Span(3, 5)
])
regex.assertSub(u"cac", u"b", (u"cbc", 1))
regex.assertSub(u"caac", u"b", (u"cbc", 1))
def test_group(self):
with self.regex(u"(ab)") as ab:
for string in [u"ab", u"abab", u"ababab"]:
ab.assertMatches(string, 2)
ab.assertAllFinds([
(u"ab", Span(0, 2)),
(u"cab", Span(1, 3))
])
ab.assertFindAllEqual(u"abab", [
Span(0, 2),
Span(2, 4)
])
ab.assertFindAllEqual(u"abcab", [
Span(0, 2),
Span(3, 5)
])
ab.assertSub(u"dabd", u"c", (u"dcd", 1))
ab.assertSub(u"dababd", u"c", (u"dccd", 2))
with self.regex(u"(ab)+") as abp:
abp.assertAllMatches([
(u"ab", 2),
(u"abab", 4),
(u"ababab", 6)
])
for string in [u"ab", u"abab"]:
abp.assertFindEqual(string, Span(0, len(string)))
for string in [u"cab", u"cabab"]:
abp.assertFindEqual(string, Span(1, len(string)))
abp.assertFindAllEqual(u"abcab", [
Span(0, 2),
Span(3, 5)
])
abp.assertFindAllEqual(u"ababcabab", [
Span(0, 4),
Span(5, 9)
])
abp.assertSub(u"dabd", u"c", (u"dcd", 1))
abp.assertSub(u"dababd", u"c", (u"dcd", 1))
def test_either(self):
with self.regex(u"[ab]") as regex:
for string in [u"a", u"b", u"aa", u"bb", u"ab", u"ba"]:
regex.assertMatches(string, 1)
for string in [u"a", u"b"]:
regex.assertFindEqual(string, Span(0, 1))
for string in [u"ca", u"cb"]:
regex.assertFindEqual(string, Span(1, 2))
for string in [u"aa", u"bb", u"ab", u"ba"]:
regex.assertFindAllEqual(string, [
Span(0, 1),
Span(1, 2)
])
for string in [u"aca", u"bcb", u"acb", u"bca"]:
regex.assertFindAllEqual(string, [
Span(0, 1),
Span(2, 3)
])
regex.assertSub(u"a", u"c", (u"c", 1))
regex.assertSub(u"b", u"c", (u"c", 1))
regex.assertSub(u"dadbd", u"c", (u"dcdcd", 2))
def test_neither(self):
with self.regex(u"[^ab]") as regex:
regex.assertMatches(u"c", 1)
regex.assertNotMatchesAny([u"a", u"b"])
regex.assertAllFinds([
(u"c", Span(0, 1)),
(u"ac", Span(1, 2)),
(u"bc", Span(1, 2))
])
for string in [u"cac", u"cbc"]:
regex.assertFindAllEqual(string, [
Span(0, 1),
Span(2, 3)
])
regex.assertSub(u"bcb", u"a", (u"bab", 1))
regex.assertSub(u"bcbcb", u"a", (u"babab", 2))
def test_range(self):
with self.regex(u"[a-c]") as regex:
for string in [u"a", u"aa", u"b", u"bb", u"c", u"cc"]:
regex.assertMatches(string, 1)
for string in [u"a", u"b", u"c"]:
regex.assertFindEqual(string, Span(0, 1))
for string in [u"da", u"db", u"dc"]:
regex.assertFindEqual(string, Span(1, 2))
for string in [u"ada", u"bdb", u"cdc"]:
regex.assertFindAllEqual(string, [
Span(0, 1),
Span(2, 3)
])
regex.assertSub(u"faf", u"e", (u"fef", 1))
regex.assertSub(u"fbf", u"e", (u"fef", 1))
regex.assertSub(u"fcf", u"e", (u"fef", 1))
regex.assertSub(u"fafbf", u"e", (u"fefef", 2))
regex.assertSub(u"fafbfcf", u"e", (u"fefefef", 3))
class TestTokenizer(TestCase):
def runTest(self):
class A(Token):
pass
class B(Token):
pass
class AB(Token):
pass
tokenizer = Tokenizer([
(u"ab+", AB),
(u"a+", A),
(u"b+", B)
])
self.assertEqual(list(tokenizer(u"ababaab")), [
AB(u"abab", Span(0, 4)),
A(u"aa", Span(4, 6)),
B(u"b", Span(6, 7))
])
string = u"ababaabbcaa"
with self.assertRaises(TokenizerError) as context:
list(tokenizer(string))
exception = context.exception
self.assertEqual(
exception.reason,
"string cannot be further consumed at position 8"
)
self.assertEqual(exception.position, 8)
self.assertEqual(string[exception.position], u"c")
|
DasIch/editor
|
prototypes/regex/tests.py
|
Python
|
bsd-3-clause
| 17,417 | 0.000172 |
# -*- coding: utf-8 -*-
#
# This file is part of GetTor, a Tor Browser distribution system.
#
# :authors: Israel Leiva <ilv@riseup.net>
# see also AUTHORS file
#
# :copyright: (c) 2008-2014, The Tor Project, Inc.
# (c) 2014, Israel Leiva
#
# :license: This is Free Software. See LICENSE for license information.
import os
import re
import sys
import time
import email
import gettext
import logging
import smtplib
import datetime
import ConfigParser
from email import Encoders
from email.MIMEBase import MIMEBase
from email.mime.text import MIMEText
from email.MIMEMultipart import MIMEMultipart
import core
import utils
import blacklist
"""SMTP module for processing email requests."""
OS = {
'osx': 'Mac OS X',
'linux': 'Linux',
'windows': 'Windows'
}
class ConfigError(Exception):
pass
class AddressError(Exception):
pass
class SendEmailError(Exception):
pass
class InternalError(Exception):
pass
class SMTP(object):
"""Receive and reply requests by email.
Public methods:
process_email(): Process the email received.
Exceptions:
ConfigError: Bad configuration.
AddressError: Address of the sender malformed.
SendEmailError: SMTP server not responding.
InternalError: Something went wrong internally.
"""
def __init__(self, cfg=None):
"""Create new object by reading a configuration file.
:param: cfg (string) path of the configuration file.
"""
default_cfg = 'smtp.cfg'
config = ConfigParser.ConfigParser()
if cfg is None or not os.path.isfile(cfg):
cfg = default_cfg
try:
with open(cfg) as f:
config.readfp(f)
except IOError:
raise ConfigError("File %s not found!" % cfg)
try:
self.our_domain = config.get('general', 'our_domain')
self.mirrors = config.get('general', 'mirrors')
self.i18ndir = config.get('i18n', 'dir')
logdir = config.get('log', 'dir')
logfile = os.path.join(logdir, 'smtp.log')
loglevel = config.get('log', 'level')
blacklist_cfg = config.get('blacklist', 'cfg')
self.bl = blacklist.Blacklist(blacklist_cfg)
self.bl_max_req = config.get('blacklist', 'max_requests')
self.bl_max_req = int(self.bl_max_req)
self.bl_wait_time = config.get('blacklist', 'wait_time')
self.bl_wait_time = int(self.bl_wait_time)
core_cfg = config.get('general', 'core_cfg')
self.core = core.Core(core_cfg)
except ConfigParser.Error as e:
raise ConfigError("Configuration error: %s" % str(e))
except blacklist.ConfigError as e:
raise InternalError("Blacklist error: %s" % str(e))
except core.ConfigError as e:
raise InternalError("Core error: %s" % str(e))
# logging
log = logging.getLogger(__name__)
logging_format = utils.get_logging_format()
date_format = utils.get_date_format()
formatter = logging.Formatter(logging_format, date_format)
log.info('Redirecting SMTP logging to %s' % logfile)
logfileh = logging.FileHandler(logfile, mode='a+')
logfileh.setFormatter(formatter)
logfileh.setLevel(logging.getLevelName(loglevel))
log.addHandler(logfileh)
# stop logging on stdout from now on
log.propagate = False
self.log = log
def _is_blacklisted(self, addr):
"""Check if a user is blacklisted.
:param: addr (string) the hashed address of the user.
:return: true is the address is blacklisted, false otherwise.
"""
try:
self.bl.is_blacklisted(
addr, 'SMTP', self.bl_max_req, self.bl_wait_time
)
return False
except blacklist.BlacklistError as e:
return True
def _get_lc(self, addr):
"""Get the locale from an email address.
Process the email received and look for the locale in the recipient
address (e.g. gettor+en@torproject.org). If no locale found, english
by default.
:param: (string) the email address we want to get the locale from.
:return: (string) the locale (english if none).
"""
# if no match found, english by default
lc = 'en'
# look for gettor+locale@torproject.org
m = re.match('gettor\+(\w\w)@\w+\.\w+', addr)
if m:
# we found a request for locale lc
lc = "%s" % m.groups()
return lc.lower()
def _get_normalized_address(self, addr):
"""Get normalized address.
We look for anything inside the last '<' and '>'. Code taken from
the old GetTor (utils.py).
:param: addr (string) the address we want to normalize.
:raise: AddressError if the address can't be normalized.
:return: (string) the normalized address.
"""
if '<' in addr:
idx = addr.rindex('<')
addr = addr[idx:]
m = re.search(r'<([^>]*)>', addr)
if m is None:
# malformed address
raise AddressError("Couldn't extract normalized address "
"from %s" % self_get_sha256(addr))
addr = m.group(1)
return addr
def _get_content(self, email):
"""Get the body content of an email.
:param: email (object) the email object to extract the content from.
:return: (string) body of the message.
"""
# get the body content of the email
maintype = email.get_content_maintype()
if maintype == 'multipart':
for part in email.get_payload():
if part.get_content_maintype() == 'text':
return part.get_payload()
elif maintype == 'text':
return email.get_payload()
def _get_msg(self, msgid, lc):
"""Get message identified by msgid in a specific locale.
:param: msgid (string) the identifier of a string.
:param: lc (string) the locale.
:return: (string) the message from the .po file.
"""
# obtain the content in the proper language
try:
t = gettext.translation(lc, self.i18ndir, languages=[lc])
_ = t.ugettext
msgstr = _(msgid)
return msgstr
except IOError as e:
raise ConfigError("%s" % str(e))
def _parse_email(self, msg, addr):
"""Parse the email received.
Get the locale and parse the text for the rest of the info.
:param: msg (string) the content of the email to be parsed.
:param: addr (string) the address of the recipient (i.e. us).
:return: (list) 4-tuple with locale, os and type of request.
"""
req = self._parse_text(msg)
lc = self._get_lc(addr)
supported_lc = self.core.get_supported_lc()
if lc in supported_lc:
req['lc'] = lc
else:
req['lc'] = 'en'
return req
def _parse_text(self, msg):
"""Parse the text part of the email received.
Try to figure out what the user is asking, namely, the type
of request, the package and os required (if applies).
:param: msg (string) the content of the email to be parsed.
:return: (list) 3-tuple with the type of request, os and pt info.
"""
# by default we asume the request is asking for help
req = {}
req['type'] = 'help'
req['os'] = None
# core knows what OS are supported
supported_os = self.core.get_supported_os()
# search for OS or mirrors request
# if nothing is found, help by default
found_request = False
words = re.split('\s+', msg.strip())
for word in words:
if not found_request:
# OS first
for os in supported_os:
if re.match(os, word, re.IGNORECASE):
req['os'] = os
req['type'] = 'links'
found_request = True
break
# mirrors
if re.match("mirrors?", word, re.IGNORECASE):
req['type'] = 'mirrors'
found_request = True
else:
break
return req
def _create_email(self, from_addr, to_addr, subject, msg):
"""Create an email object.
This object will be used to construct the reply.
:param: from_addr (string) the address of the sender.
:param: to_addr (string) the address of the recipient.
:param: subject (string) the subject of the email.
:param: msg (string) the content of the email.
:return: (object) the email object.
"""
email_obj = MIMEMultipart()
email_obj.set_charset("utf-8")
email_obj['Subject'] = subject
email_obj['From'] = from_addr
email_obj['To'] = to_addr
msg_attach = MIMEText(msg, 'plain')
email_obj.attach(msg_attach)
return email_obj
def _send_email(self, from_addr, to_addr, subject, msg, attach=None):
"""Send an email.
Take a 'from' and 'to' addresses, a subject and the content, creates
the email and send it.
:param: from_addr (string) the address of the sender.
:param: to_addr (string) the address of the recipient.
:param: subject (string) the subject of the email.
:param: msg (string) the content of the email.
:param: attach (string) the path of the mirrors list.
"""
email_obj = self._create_email(from_addr, to_addr, subject, msg)
if(attach):
# for now, the only email with attachment is the one for mirrors
try:
part = MIMEBase('application', "octet-stream")
part.set_payload(open(attach, "rb").read())
Encoders.encode_base64(part)
part.add_header(
'Content-Disposition',
'attachment; filename="mirrors.txt"'
)
email_obj.attach(part)
except IOError as e:
raise SendEmailError('Error with mirrors: %s' % str(e))
try:
s = smtplib.SMTP("localhost")
s.sendmail(from_addr, to_addr, email_obj.as_string())
s.quit()
except smtplib.SMTPException as e:
raise SendEmailError("Error with SMTP: %s" % str(e))
def _send_links(self, links, lc, os, from_addr, to_addr):
"""Send links to the user.
Get the message in the proper language (according to the locale),
replace variables and send the email.
:param: links (string) the links to be sent.
:param: lc (string) the locale.
:param: os (string) the operating system.
:param: from_addr (string) the address of the sender.
:param: to_addr (string) the address of the recipient.
"""
# obtain the content in the proper language and send it
try:
links_subject = self._get_msg('links_subject', 'en')
links_msg = self._get_msg('links_msg', 'en')
links_msg = links_msg % (OS[os], links)
self._send_email(
from_addr,
to_addr,
links_subject,
links_msg,
None
)
except ConfigError as e:
raise InternalError("Error while getting message %s" % str(e))
except SendEmailError as e:
raise InternalError("Error while sending links message")
def _send_mirrors(self, lc, from_addr, to_addr):
"""Send mirrors message.
Get the message in the proper language (according to the locale),
replace variables (if any) and send the email.
:param: lc (string) the locale.
:param: from_addr (string) the address of the sender.
:param: to_addr (string) the address of the recipient.
"""
# obtain the content in the proper language and send it
try:
mirrors_subject = self._get_msg('mirrors_subject', lc)
mirrors_msg = self._get_msg('mirrors_msg', lc)
self._send_email(
from_addr, to_addr, mirrors_subject, mirrors_msg, self.mirrors
)
except ConfigError as e:
raise InternalError("Error while getting message %s" % str(e))
except SendEmailError as e:
raise InternalError("Error while sending mirrors message")
def _send_help(self, lc, from_addr, to_addr):
"""Send help message.
Get the message in the proper language (according to the locale),
replace variables (if any) and send the email.
:param: lc (string) the locale.
:param: from_addr (string) the address of the sender.
:param: to_addr (string) the address of the recipient.
"""
# obtain the content in the proper language and send it
try:
help_subject = self._get_msg('help_subject', lc)
help_msg = self._get_msg('help_msg', lc)
self._send_email(from_addr, to_addr, help_subject, help_msg, None)
except ConfigError as e:
raise InternalError("Error while getting message %s" % str(e))
except SendEmailError as e:
raise InternalError("Error while sending help message")
def process_email(self, raw_msg):
"""Process the email received.
Create an email object from the string received. The processing
flow is as following:
- check for blacklisted address.
- parse the email.
- check the type of request.
- send reply.
:param: raw_msg (string) the email received.
:raise: InternalError if something goes wrong while asking for the
links to the Core module.
"""
self.log.debug("Processing email")
parsed_msg = email.message_from_string(raw_msg)
content = self._get_content(parsed_msg)
from_addr = parsed_msg['From']
to_addr = parsed_msg['To']
bogus_request = False
status = ''
req = None
try:
# two ways for a request to be bogus: address malformed or
# blacklisted
try:
self.log.debug("Normalizing address...")
norm_from_addr = self._get_normalized_address(from_addr)
except AddressError as e:
bogus_request = True
self.log.info('invalid; none; none')
if norm_from_addr:
anon_addr = utils.get_sha256(norm_from_addr)
if self._is_blacklisted(anon_addr):
bogus_request = True
self.log.info('blacklist; none; none')
if not bogus_request:
# try to figure out what the user is asking
self.log.debug("Request seems legit; parsing it...")
req = self._parse_email(content, to_addr)
# our address should have the locale requested
our_addr = "gettor+%s@%s" % (req['lc'], self.our_domain)
# possible options: help, links, mirrors
if req['type'] == 'help':
self.log.debug("Trying to send help...")
self.log.info('help; none; %s' % req['lc'])
# make sure we can send emails
try:
self._send_help('en', our_addr, norm_from_addr)
except SendEmailError as e:
self.log.debug("FAILED: %s" % str(e))
raise InternalError("Something's wrong with the SMTP "
"server: %s" % str(e))
elif req['type'] == 'mirrors':
self.log.debug("Trying to send the mirrors...")
self.log.info('mirrors; none; %s' % req['lc'])
# make sure we can send emails
try:
self._send_mirrors('en', our_addr, norm_from_addr)
except SendEmailError as e:
self.log.debug("FAILED: %s" % str(e))
raise SendEmailError("Something's wrong with the SMTP "
"server: %s" % str(e))
elif req['type'] == 'links':
self.log.debug("Trying to obtain the links...")
self.log.info('links; %s; %s' % (req['os'], req['lc']))
try:
links = self.core.get_links(
'SMTP', req['os'], req['lc']
)
# if core fails, we fail too
except (core.InternalError, core.ConfigError) as e:
self.log.debug("FAILED: %s" % str(e))
# something went wrong with the core
raise InternalError("Error obtaining the links")
# make sure we can send emails
self.log.debug("Trying to send the links...")
try:
self._send_links(links, req['lc'], req['os'], our_addr,
norm_from_addr)
except SendEmailError as e:
self.log.debug("FAILED: %s" % str(e))
raise SendEmailError("Something's wrong with the SMTP "
"server: %s" % str(e))
finally:
self.log.debug("Request processed")
|
ilv/gettor
|
gettor/smtp.py
|
Python
|
bsd-3-clause
| 17,953 | 0.000501 |
# --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
"""Fast R-CNN config system.
This file specifies default config options for Fast R-CNN. You should not
change values in this file. Instead, you should write a config file (in yaml)
and use cfg_from_file(yaml_file) to load it and override the default options.
Most tools in $ROOT/tools take a --cfg option to specify an override file.
- See tools/{train,test}_net.py for example code that uses cfg_from_file()
- See experiments/cfgs/*.yml for example YAML config override files
"""
import os
import os.path as osp
import numpy as np
import math
# `pip install easydict` if you don't have it
from easydict import EasyDict as edict
__C = edict()
# Consumers can get config by:
# from fast_rcnn_config import cfg
cfg = __C
# region proposal network (RPN) or not
__C.IS_RPN = False
__C.FLIP_X = False
__C.INPUT = 'COLOR'
# multiscale training and testing
__C.IS_MULTISCALE = True
__C.IS_EXTRAPOLATING = True
#
__C.REGION_PROPOSAL = 'RPN'
__C.NET_NAME = 'CaffeNet'
__C.SUBCLS_NAME = 'voxel_exemplars'
#
# Training options
#
__C.TRAIN = edict()
__C.TRAIN.VISUALIZE = False
__C.TRAIN.VERTEX_REG = False
__C.TRAIN.GRID_SIZE = 256
__C.TRAIN.CHROMATIC = False
# Scales to compute real features
__C.TRAIN.SCALES_BASE = (0.25, 0.5, 1.0, 2.0, 3.0)
# The number of scales per octave in the image pyramid
# An octave is the set of scales up to half of the initial scale
__C.TRAIN.NUM_PER_OCTAVE = 4
# parameters for ROI generating
__C.TRAIN.SPATIAL_SCALE = 0.0625
__C.TRAIN.KERNEL_SIZE = 5
# Aspect ratio to use during training
__C.TRAIN.ASPECTS = (1, 0.75, 0.5, 0.25)
# Images to use per minibatch
__C.TRAIN.IMS_PER_BATCH = 2
# Minibatch size (number of regions of interest [ROIs])
__C.TRAIN.BATCH_SIZE = 128
# Fraction of minibatch that is labeled foreground (i.e. class > 0)
__C.TRAIN.FG_FRACTION = 0.25
# Overlap threshold for a ROI to be considered foreground (if >= FG_THRESH)
__C.TRAIN.FG_THRESH = (0.5,)
# Overlap threshold for a ROI to be considered background (class = 0 if
# overlap in [LO, HI))
__C.TRAIN.BG_THRESH_HI = (0.5,)
__C.TRAIN.BG_THRESH_LO = (0.1,)
# Use horizontally-flipped images during training?
__C.TRAIN.USE_FLIPPED = True
# Train bounding-box regressors
__C.TRAIN.BBOX_REG = True
# Overlap required between a ROI and ground-truth box in order for that ROI to
# be used as a bounding-box regression training example
__C.TRAIN.BBOX_THRESH = (0.5,)
# Iterations between snapshots
__C.TRAIN.SNAPSHOT_ITERS = 10000
# solver.prototxt specifies the snapshot path prefix, this adds an optional
# infix to yield the path: <prefix>[_<infix>]_iters_XYZ.caffemodel
__C.TRAIN.SNAPSHOT_INFIX = ''
# Use a prefetch thread in roi_data_layer.layer
# So far I haven't found this useful; likely more engineering work is required
__C.TRAIN.USE_PREFETCH = False
# Train using subclasses
__C.TRAIN.SUBCLS = True
# Train using viewpoint
__C.TRAIN.VIEWPOINT = False
# Threshold of ROIs in training RCNN
__C.TRAIN.ROI_THRESHOLD = 0.1
# IOU >= thresh: positive example
__C.TRAIN.RPN_POSITIVE_OVERLAP = 0.7
# IOU < thresh: negative example
__C.TRAIN.RPN_NEGATIVE_OVERLAP = 0.3
# If an anchor statisfied by positive and negative conditions set to negative
__C.TRAIN.RPN_CLOBBER_POSITIVES = False
# Max number of foreground examples
__C.TRAIN.RPN_FG_FRACTION = 0.5
# Total number of examples
__C.TRAIN.RPN_BATCHSIZE = 256
# NMS threshold used on RPN proposals
__C.TRAIN.RPN_NMS_THRESH = 0.7
# Number of top scoring boxes to keep before apply NMS to RPN proposals
__C.TRAIN.RPN_PRE_NMS_TOP_N = 12000
# Number of top scoring boxes to keep after applying NMS to RPN proposals
__C.TRAIN.RPN_POST_NMS_TOP_N = 2000
# Proposal height and width both need to be greater than RPN_MIN_SIZE (at orig image scale)
__C.TRAIN.RPN_MIN_SIZE = 16
# Deprecated (outside weights)
__C.TRAIN.RPN_BBOX_INSIDE_WEIGHTS = (1.0, 1.0, 1.0, 1.0)
# Give the positive RPN examples weight of p * 1 / {num positives}
# and give negatives a weight of (1 - p)
# Set to -1.0 to use uniform example weighting
__C.TRAIN.RPN_POSITIVE_WEIGHT = -1.0
__C.TRAIN.RPN_BASE_SIZE = 16
__C.TRAIN.RPN_ASPECTS = [0.25, 0.5, 0.75, 1, 1.5, 2, 3] # 7 aspects
__C.TRAIN.RPN_SCALES = [2, 2.82842712, 4, 5.65685425, 8, 11.3137085, 16, 22.627417, 32, 45.254834] # 2**np.arange(1, 6, 0.5), 10 scales
#
# Testing options
#
__C.TEST = edict()
__C.TEST.IS_PATCH = False;
__C.TEST.VERTEX_REG = False
__C.TEST.VISUALIZE = False
# Scales to compute real features
__C.TEST.SCALES_BASE = (0.25, 0.5, 1.0, 2.0, 3.0)
# The number of scales per octave in the image pyramid
# An octave is the set of scales up to half of the initial scale
__C.TEST.NUM_PER_OCTAVE = 4
# Aspect ratio to use during testing
__C.TEST.ASPECTS = (1, 0.75, 0.5, 0.25)
# parameters for ROI generating
__C.TEST.SPATIAL_SCALE = 0.0625
__C.TEST.KERNEL_SIZE = 5
# Overlap threshold used for non-maximum suppression (suppress boxes with
# IoU >= this threshold)
__C.TEST.NMS = 0.5
# Experimental: treat the (K+1) units in the cls_score layer as linear
# predictors (trained, eg, with one-vs-rest SVMs).
__C.TEST.SVM = False
# Test using bounding-box regressors
__C.TEST.BBOX_REG = True
# Test using subclass
__C.TEST.SUBCLS = True
# Train using viewpoint
__C.TEST.VIEWPOINT = False
# Threshold of ROIs in testing
__C.TEST.ROI_THRESHOLD = 0.1
__C.TEST.ROI_THRESHOLD_NUM = 80000
__C.TEST.ROI_NUM = 2000
__C.TEST.DET_THRESHOLD = 0.0001
## NMS threshold used on RPN proposals
__C.TEST.RPN_NMS_THRESH = 0.7
## Number of top scoring boxes to keep before apply NMS to RPN proposals
__C.TEST.RPN_PRE_NMS_TOP_N = 6000
## Number of top scoring boxes to keep after applying NMS to RPN proposals
__C.TEST.RPN_POST_NMS_TOP_N = 300
# Proposal height and width both need to be greater than RPN_MIN_SIZE (at orig image scale)
__C.TEST.RPN_MIN_SIZE = 16
#
# MISC
#
# The mapping from image coordinates to feature map coordinates might cause
# some boxes that are distinct in image space to become identical in feature
# coordinates. If DEDUP_BOXES > 0, then DEDUP_BOXES is used as the scale factor
# for identifying duplicate boxes.
# 1/16 is correct for {Alex,Caffe}Net, VGG_CNN_M_1024, and VGG16
__C.DEDUP_BOXES = 1./16.
# Pixel mean values (BGR order) as a (1, 1, 3) array
# These are the values originally used for training VGG16
__C.PIXEL_MEANS = np.array([[[102.9801, 115.9465, 122.7717]]])
# For reproducibility
__C.RNG_SEED = 3
# A small number that's used many times
__C.EPS = 1e-14
# Root directory of project
__C.ROOT_DIR = osp.abspath(osp.join(osp.dirname(__file__), '..', '..'))
# Place outputs under an experiments directory
__C.EXP_DIR = 'default'
# Use GPU implementation of non-maximum suppression
__C.USE_GPU_NMS = True
# Default GPU device id
__C.GPU_ID = 0
def get_output_dir(imdb, net):
"""Return the directory where experimental artifacts are placed.
A canonical path is built using the name from an imdb and a network
(if not None).
"""
path = osp.abspath(osp.join(__C.ROOT_DIR, 'output', __C.EXP_DIR, imdb.name))
if net is None:
return path
else:
return osp.join(path, net.name)
def _add_more_info(is_train):
# compute all the scales
if is_train:
scales_base = __C.TRAIN.SCALES_BASE
num_per_octave = __C.TRAIN.NUM_PER_OCTAVE
else:
scales_base = __C.TEST.SCALES_BASE
num_per_octave = __C.TEST.NUM_PER_OCTAVE
num_scale_base = len(scales_base)
num = (num_scale_base - 1) * num_per_octave + 1
scales = []
for i in xrange(num):
index_scale_base = i / num_per_octave
sbase = scales_base[index_scale_base]
j = i % num_per_octave
if j == 0:
scales.append(sbase)
else:
sbase_next = scales_base[index_scale_base+1]
step = (sbase_next - sbase) / num_per_octave
scales.append(sbase + j * step)
if is_train:
__C.TRAIN.SCALES = scales
else:
__C.TEST.SCALES = scales
print scales
# map the scales to scales for RoI pooling of classification
if is_train:
kernel_size = __C.TRAIN.KERNEL_SIZE / __C.TRAIN.SPATIAL_SCALE
else:
kernel_size = __C.TEST.KERNEL_SIZE / __C.TEST.SPATIAL_SCALE
area = kernel_size * kernel_size
scales = np.array(scales)
areas = np.repeat(area, num) / (scales ** 2)
scaled_areas = areas[:, np.newaxis] * (scales[np.newaxis, :] ** 2)
diff_areas = np.abs(scaled_areas - 224 * 224)
levels = diff_areas.argmin(axis=1)
if is_train:
__C.TRAIN.SCALE_MAPPING = levels
else:
__C.TEST.SCALE_MAPPING = levels
# compute width and height of grid box
if is_train:
area = __C.TRAIN.KERNEL_SIZE * __C.TRAIN.KERNEL_SIZE
aspect = __C.TRAIN.ASPECTS # height / width
else:
area = __C.TEST.KERNEL_SIZE * __C.TEST.KERNEL_SIZE
aspect = __C.TEST.ASPECTS # height / width
num_aspect = len(aspect)
widths = np.zeros((num_aspect), dtype=np.float32)
heights = np.zeros((num_aspect), dtype=np.float32)
for i in xrange(num_aspect):
widths[i] = math.sqrt(area / aspect[i])
heights[i] = widths[i] * aspect[i]
if is_train:
__C.TRAIN.ASPECT_WIDTHS = widths
__C.TRAIN.ASPECT_HEIGHTS = heights
__C.TRAIN.RPN_SCALES = np.array(__C.TRAIN.RPN_SCALES)
else:
__C.TEST.ASPECT_WIDTHS = widths
__C.TEST.ASPECT_HEIGHTS = heights
def _merge_a_into_b(a, b):
"""Merge config dictionary a into config dictionary b, clobbering the
options in b whenever they are also specified in a.
"""
if type(a) is not edict:
return
for k, v in a.iteritems():
# a must specify keys that are in b
if not b.has_key(k):
raise KeyError('{} is not a valid config key'.format(k))
# the types must match, too
if type(b[k]) is not type(v):
raise ValueError(('Type mismatch ({} vs. {}) '
'for config key: {}').format(type(b[k]),
type(v), k))
# recursively merge dicts
if type(v) is edict:
try:
_merge_a_into_b(a[k], b[k])
except:
print('Error under config key: {}'.format(k))
raise
else:
b[k] = v
def cfg_from_file(filename):
"""Load a config file and merge it into the default options."""
import yaml
with open(filename, 'r') as f:
yaml_cfg = edict(yaml.load(f))
_merge_a_into_b(yaml_cfg, __C)
_add_more_info(1)
_add_more_info(0)
|
yuxng/Deep_ISM
|
ISM/lib/ism/config.py
|
Python
|
mit
| 10,824 | 0.001663 |
"""
Parse spotify URLs
"""
from __future__ import unicode_literals
import re
import logging
log = logging.getLogger('spotify')
def handle_privmsg(bot, user, channel, args):
"""Grab Spotify URLs from the messages and handle them"""
m = re.match(".*(http:\/\/open.spotify.com\/|spotify:)(?P<item>album|artist|track|user[:\/]\S+[:\/]playlist)[:\/](?P<id>[a-zA-Z0-9]+)\/?.*", args)
if not m:
return None
spotify_id = m.group('id')
item = m.group('item').replace(':', '/').split('/')
item[0] += 's'
if item[0] == 'users':
# All playlists seem to return 401 at the time, even the public ones
return None
apiurl = "https://api.spotify.com/v1/%s/%s" % ('/'.join(item), spotify_id)
r = bot.get_url(apiurl)
if r.status_code != 200:
if r.status_code not in [401, 403]:
log.warning('Spotify API returned %s while trying to fetch %s' % r.status_code, apiurl)
return
data = r.json()
title = '[Spotify] '
if item[0] in ['albums', 'tracks']:
artists = []
for artist in data['artists']:
artists.append(artist['name'])
title += ', '.join(artists)
if item[0] == 'albums':
title += ' - %s (%s)' % (data['name'], data['release_date'])
if item[0] == 'artists':
title += data['name']
genres_n = len(data['genres'])
if genres_n > 0:
genitive = 's' if genres_n > 1 else ''
genres = data['genres'][0:4]
more = ' +%s more' % genres_n - 5 if genres_n > 4 else ''
title += ' (Genre%s: %s%s)' % (genitive, ', '.join(genres), more)
if item[0] == 'tracks':
title += ' - %s - %s' % (data['album']['name'], data['name'])
return bot.say(channel, title)
|
EArmour/pyfibot
|
pyfibot/modules/module_spotify.py
|
Python
|
bsd-3-clause
| 1,777 | 0.005627 |
from django import template
from django.template.loader import render_to_string
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.contrib import comments
from django.utils.encoding import smart_unicode
register = template.Library()
class BaseCommentNode(template.Node):
"""
Base helper class (abstract) for handling the get_comment_* template tags.
Looks a bit strange, but the subclasses below should make this a bit more
obvious.
"""
#@classmethod
def handle_token(cls, parser, token):
"""Class method to parse get_comment_list/count/form and return a Node."""
tokens = token.contents.split()
if tokens[1] != 'for':
raise template.TemplateSyntaxError("Second argument in %r tag must be 'for'" % tokens[0])
# {% get_whatever for obj as varname %}
if len(tokens) == 5:
if tokens[3] != 'as':
raise template.TemplateSyntaxError("Third argument in %r must be 'as'" % tokens[0])
return cls(
object_expr = parser.compile_filter(tokens[2]),
as_varname = tokens[4],
)
# {% get_whatever for app.model pk as varname %}
elif len(tokens) == 6:
if tokens[4] != 'as':
raise template.TemplateSyntaxError("Fourth argument in %r must be 'as'" % tokens[0])
return cls(
ctype = BaseCommentNode.lookup_content_type(tokens[2], tokens[0]),
object_pk_expr = parser.compile_filter(tokens[3]),
as_varname = tokens[5]
)
else:
raise template.TemplateSyntaxError("%r tag requires 4 or 5 arguments" % tokens[0])
handle_token = classmethod(handle_token)
#@staticmethod
def lookup_content_type(token, tagname):
try:
app, model = token.split('.')
return ContentType.objects.get(app_label=app, model=model)
except ValueError:
raise template.TemplateSyntaxError("Third argument in %r must be in the format 'app.model'" % tagname)
except ContentType.DoesNotExist:
raise template.TemplateSyntaxError("%r tag has non-existant content-type: '%s.%s'" % (tagname, app, model))
lookup_content_type = staticmethod(lookup_content_type)
def __init__(self, ctype=None, object_pk_expr=None, object_expr=None, as_varname=None, comment=None):
if ctype is None and object_expr is None:
raise template.TemplateSyntaxError("Comment nodes must be given either a literal object or a ctype and object pk.")
self.comment_model = comments.get_model()
self.as_varname = as_varname
self.ctype = ctype
self.object_pk_expr = object_pk_expr
self.object_expr = object_expr
self.comment = comment
def render(self, context):
qs = self.get_query_set(context)
context[self.as_varname] = self.get_context_value_from_queryset(context, qs)
return ''
def get_query_set(self, context):
ctype, object_pk = self.get_target_ctype_pk(context)
if not object_pk:
return self.comment_model.objects.none()
qs = self.comment_model.objects.filter(
content_type = ctype,
object_pk = smart_unicode(object_pk),
site__pk = settings.SITE_ID,
)
# The is_public and is_removed fields are implementation details of the
# built-in comment model's spam filtering system, so they might not
# be present on a custom comment model subclass. If they exist, we
# should filter on them.
field_names = [f.name for f in self.comment_model._meta.fields]
if 'is_public' in field_names:
qs = qs.filter(is_public=True)
if getattr(settings, 'COMMENTS_HIDE_REMOVED', True) and 'is_removed' in field_names:
qs = qs.filter(is_removed=False)
return qs
def get_target_ctype_pk(self, context):
if self.object_expr:
try:
obj = self.object_expr.resolve(context)
except template.VariableDoesNotExist:
return None, None
return ContentType.objects.get_for_model(obj), obj.pk
else:
return self.ctype, self.object_pk_expr.resolve(context, ignore_failures=True)
def get_context_value_from_queryset(self, context, qs):
"""Subclasses should override this."""
raise NotImplementedError
class CommentListNode(BaseCommentNode):
"""Insert a list of comments into the context."""
def get_context_value_from_queryset(self, context, qs):
return list(qs)
class CommentCountNode(BaseCommentNode):
"""Insert a count of comments into the context."""
def get_context_value_from_queryset(self, context, qs):
return qs.count()
class CommentFormNode(BaseCommentNode):
"""Insert a form for the comment model into the context."""
def get_form(self, context):
ctype, object_pk = self.get_target_ctype_pk(context)
if object_pk:
return comments.get_form()(ctype.get_object_for_this_type(pk=object_pk))
else:
return None
def render(self, context):
context[self.as_varname] = self.get_form(context)
return ''
class RenderCommentFormNode(CommentFormNode):
"""Render the comment form directly"""
#@classmethod
def handle_token(cls, parser, token):
"""Class method to parse render_comment_form and return a Node."""
tokens = token.contents.split()
if tokens[1] != 'for':
raise template.TemplateSyntaxError("Second argument in %r tag must be 'for'" % tokens[0])
# {% render_comment_form for obj %}
if len(tokens) == 3:
return cls(object_expr=parser.compile_filter(tokens[2]))
# {% render_comment_form for app.models pk %}
elif len(tokens) == 4:
return cls(
ctype = BaseCommentNode.lookup_content_type(tokens[2], tokens[0]),
object_pk_expr = parser.compile_filter(tokens[3])
)
handle_token = classmethod(handle_token)
def render(self, context):
ctype, object_pk = self.get_target_ctype_pk(context)
if object_pk:
template_search_list = [
"comments/%s/%s/form.html" % (ctype.app_label, ctype.model),
"comments/%s/form.html" % ctype.app_label,
"comments/form.html"
]
context.push()
formstr = render_to_string(template_search_list, {"form" : self.get_form(context)}, context)
context.pop()
return formstr
else:
return ''
class RenderCommentListNode(CommentListNode):
"""Render the comment list directly"""
#@classmethod
def handle_token(cls, parser, token):
"""Class method to parse render_comment_list and return a Node."""
tokens = token.contents.split()
if tokens[1] != 'for':
raise template.TemplateSyntaxError("Second argument in %r tag must be 'for'" % tokens[0])
# {% render_comment_list for obj %}
if len(tokens) == 3:
return cls(object_expr=parser.compile_filter(tokens[2]))
# {% render_comment_list for app.models pk %}
elif len(tokens) == 4:
return cls(
ctype = BaseCommentNode.lookup_content_type(tokens[2], tokens[0]),
object_pk_expr = parser.compile_filter(tokens[3])
)
handle_token = classmethod(handle_token)
def render(self, context):
ctype, object_pk = self.get_target_ctype_pk(context)
if object_pk:
template_search_list = [
"comments/%s/%s/list.html" % (ctype.app_label, ctype.model),
"comments/%s/list.html" % ctype.app_label,
"comments/list.html"
]
qs = self.get_query_set(context)
context.push()
liststr = render_to_string(template_search_list, {
"comment_list" : self.get_context_value_from_queryset(context, qs)
}, context)
context.pop()
return liststr
else:
return ''
# We could just register each classmethod directly, but then we'd lose out on
# the automagic docstrings-into-admin-docs tricks. So each node gets a cute
# wrapper function that just exists to hold the docstring.
#@register.tag
def get_comment_count(parser, token):
"""
Gets the comment count for the given params and populates the template
context with a variable containing that value, whose name is defined by the
'as' clause.
Syntax::
{% get_comment_count for [object] as [varname] %}
{% get_comment_count for [app].[model] [object_id] as [varname] %}
Example usage::
{% get_comment_count for event as comment_count %}
{% get_comment_count for calendar.event event.id as comment_count %}
{% get_comment_count for calendar.event 17 as comment_count %}
"""
return CommentCountNode.handle_token(parser, token)
#@register.tag
def get_comment_list(parser, token):
"""
Gets the list of comments for the given params and populates the template
context with a variable containing that value, whose name is defined by the
'as' clause.
Syntax::
{% get_comment_list for [object] as [varname] %}
{% get_comment_list for [app].[model] [object_id] as [varname] %}
Example usage::
{% get_comment_list for event as comment_list %}
{% for comment in comment_list %}
...
{% endfor %}
"""
return CommentListNode.handle_token(parser, token)
#@register.tag
def render_comment_list(parser, token):
"""
Render the comment list (as returned by ``{% get_comment_list %}``)
through the ``comments/list.html`` template
Syntax::
{% render_comment_list for [object] %}
{% render_comment_list for [app].[model] [object_id] %}
Example usage::
{% render_comment_list for event %}
"""
return RenderCommentListNode.handle_token(parser, token)
#@register.tag
def get_comment_form(parser, token):
"""
Get a (new) form object to post a new comment.
Syntax::
{% get_comment_form for [object] as [varname] %}
{% get_comment_form for [app].[model] [object_id] as [varname] %}
"""
return CommentFormNode.handle_token(parser, token)
#@register.tag
def render_comment_form(parser, token):
"""
Render the comment form (as returned by ``{% render_comment_form %}``) through
the ``comments/form.html`` template.
Syntax::
{% render_comment_form for [object] %}
{% render_comment_form for [app].[model] [object_id] %}
"""
return RenderCommentFormNode.handle_token(parser, token)
#@register.simple_tag
def comment_form_target():
"""
Get the target URL for the comment form.
Example::
<form action="{% comment_form_target %}" method="post">
"""
return comments.get_form_target()
#@register.simple_tag
def get_comment_permalink(comment, anchor_pattern=None):
"""
Get the permalink for a comment, optionally specifying the format of the
named anchor to be appended to the end of the URL.
Example::
{{ get_comment_permalink comment "#c%(id)s-by-%(user_name)s" }}
"""
if anchor_pattern:
return comment.get_absolute_url(anchor_pattern)
return comment.get_absolute_url()
register.tag(get_comment_count)
register.tag(get_comment_list)
register.tag(get_comment_form)
register.tag(render_comment_form)
register.simple_tag(comment_form_target)
register.simple_tag(get_comment_permalink)
register.tag(render_comment_list)
|
hunch/hunch-gift-app
|
django/contrib/comments/templatetags/comments.py
|
Python
|
mit
| 12,178 | 0.006077 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from novel import serial, utils
BASE_URL = 'http://www.quanben5.com/n/{}/xiaoshuo.html'
class Quanben5(serial.SerialNovel):
def __init__(self, tid):
super().__init__(utils.base_to_url(BASE_URL, tid), '#content',
intro_sel='.description',
chap_type=serial.ChapterType.path,
chap_sel='.list li',
tid=tid)
def get_title_and_author(self):
name = self.doc('h1').text()
author = self.doc('.author').text()
return name, author
|
wangjiezhe/FetchNovels
|
novel/sources/quanben5.py
|
Python
|
gpl-3.0
| 612 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.