text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
import json
from django.db import models
from django.conf import settings
from django.utils.six import with_metaclass, text_type
from django.utils.translation import ugettext_lazy as _
from . import SirTrevorContent
from .forms import SirTrevorFormField
class SirTrevorField(with_metaclass(models.SubfieldBase, models.Field)):
description = _("TODO")
def get_internal_type(self):
return 'TextField'
def formfield(self, **kwargs):
defaults = {
'form_class': SirTrevorFormField
}
defaults.update(kwargs)
return super(SirTrevorField, self).formfield(**defaults)
def to_python(self, value):
return SirTrevorContent(value)
def get_db_prep_value(self, value, connection, prepared=False):
return text_type(value)
if 'south' in settings.INSTALLED_APPS:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^sirtrevor\.fields\.SirTrevorField"])
|
zerc/django-sirtrevor
|
sirtrevor/fields.py
|
Python
|
mit
| 974 | 0.00308 |
import sys
from resources.datatables import WeaponType
def setup(core, object):
object.setStfFilename('static_item_n')
object.setStfName('weapon_pistol_trader_roadmap_01_02')
object.setDetailFilename('static_item_d')
object.setDetailName('weapon_pistol_trader_roadmap_01_02')
object.setStringAttribute('class_required', 'Trader')
object.setIntAttribute('required_combat_level', 62)
object.setAttackSpeed(0.4);
object.setMaxRange(35);
object.setDamageType("energy");
object.setMinDamage(250);
object.setMaxDamage(500);
object.setWeaponType(WeaponType.Pistol);
return
|
ProjectSWGCore/NGECore2
|
scripts/object/weapon/ranged/pistol/weapon_pistol_trader_roadmap_01_02.py
|
Python
|
lgpl-3.0
| 580 | 0.037931 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import warnings
warnings.warn("the ``irsa_dust`` module has been moved to "
"astroquery.ipac.irsa.irsa_dust, "
"please update your imports.", DeprecationWarning, stacklevel=2)
from astroquery.ipac.irsa.irsa_dust import *
|
ceb8/astroquery
|
astroquery/irsa_dust/__init__.py
|
Python
|
bsd-3-clause
| 317 | 0.003155 |
"""
Unit tests over SQLite backend for Crash Database
"""
from apport.report import Report
import os
from unittest import TestCase
from sqlite import CrashDatabase
class CrashDatabaseTestCase(TestCase):
def setUp(self):
self.crash_base = os.path.sep + 'tmp'
self.crash_base_url = 'file://' + self.crash_base + '/'
self.crash_path = os.path.join(self.crash_base, 'test.crash')
self.r = Report()
self.r['ExecutablePath'] = '/usr/bin/napoleon-solod'
self.r['Package'] = 'libnapoleon-solo1 1.2-1'
self.r['Signal'] = '11'
self.r['StacktraceTop'] = """foo_bar (x=2) at crash.c:28
d01 (x=3) at crash.c:29
raise () from /lib/libpthread.so.0
<signal handler called>
__frob (x=4) at crash.c:30"""
def tearDown(self):
if os.path.exists(self.crash_path):
os.unlink(self.crash_path)
exe_crash_base = os.path.join(self.crash_base, '1_usr_bin_napoleon-solod')
if os.path.exists(exe_crash_base):
os.unlink(exe_crash_base)
def test_create_db_default(self):
try:
CrashDatabase(None, {})
self.assertTrue(os.path.isfile(os.path.expanduser('~/crashdb.sqlite')))
finally:
os.unlink(os.path.expanduser('~/crashdb.sqlite'))
def test_crashes_base_url(self):
cb = CrashDatabase(None, {'dbfile': ':memory:', 'crashes_base_url': self.crash_base_url})
self.assertEqual(cb.base_url, self.crash_base_url)
def test_crashes_base_url_is_none(self):
cb = CrashDatabase(None, {'dbfile': ':memory:'})
self.assertIsNone(cb.base_url)
def test_upload_download(self):
cb = CrashDatabase(None, {'dbfile': ':memory:', 'crashes_base_url': self.crash_base_url})
crash_id = cb.upload(self.r)
self.assertEqual(crash_id, 1)
report = cb.download(1)
self.assertIsInstance(report, Report)
self.assertIn('Signal', report)
self.assertEqual(report['Signal'], '11')
def test_failed_upload_no_URL(self):
cb = CrashDatabase(None, {'dbfile': ':memory:'})
self.assertRaises(ValueError, cb.upload, self.r)
def test_failed_upload_invalid_URL_scheme(self):
cb = CrashDatabase(None, {'dbfile': ':memory:'})
self.r['_URL'] = 'invalid://scheme/path'
self.assertRaises(ValueError, cb.upload, self.r)
def test_failed_download(self):
cb = CrashDatabase(None, {'dbfile': ':memory:'})
self.assertRaises(Exception, cb.download, 23232)
def test_get_id_url(self):
cb = CrashDatabase(None, {'dbfile': ':memory:'})
self.assertEqual("#1", cb.get_id_url(None, 1))
self.assertEqual("#1: napoleon-solod crashed with SIGSEGV in foo_bar()",
cb.get_id_url(self.r, 1))
def test_update(self):
"""
Test complete update
"""
cb = CrashDatabase(None, {'dbfile': ':memory:', 'crashes_base_url': self.crash_base_url})
crash_id = cb.upload(self.r)
self.r['SourcePackage'] = 'adios'
self.r['Signal'] = u'9'
cb.update(crash_id, self.r, 'a comment to add')
report = cb.download(crash_id)
self.assertIn('SourcePackage', report)
self.assertEqual(report['Signal'], u'9')
def test_update_with_key_filter(self):
"""
Test a partial update
"""
cb = CrashDatabase(None, {'dbfile': ':memory:', 'crashes_base_url': self.crash_base_url})
crash_id = cb.upload(self.r)
self.r['SourcePackage'] = 'adios'
self.r['Signal'] = u'9'
cb.update(crash_id, self.r, 'a comment to add', key_filter=('Package', 'SourcePackage'))
report = cb.download(crash_id)
self.assertIn('SourcePackage', report)
self.assertNotEqual(report['Signal'], u'9')
def test_failed_update_no_URL(self):
cb = CrashDatabase(None, {'dbfile': ':memory:'})
self.r['_URL'] = self.crash_base_url + 'test.crash'
crash_id = cb.upload(self.r)
del self.r['_URL']
self.assertRaises(ValueError, cb.update, *(crash_id, self.r, 'comment'))
def test_get_distro_release(self):
cb = CrashDatabase(None, {'dbfile': ':memory:', 'crashes_base_url': self.crash_base_url})
crash_id = cb.upload(self.r)
self.assertIsNone(cb.get_distro_release(crash_id))
self.r['DistroRelease'] = 'Ubuntu 14.04'
crash_id = cb.upload(self.r)
self.assertEqual(cb.get_distro_release(crash_id), 'Ubuntu 14.04')
def test_get_unretraced(self):
cb = CrashDatabase(None, {'dbfile': ':memory:', 'crashes_base_url': self.crash_base_url})
self.assertEqual(cb.get_unretraced(), [])
crash_id = cb.upload(self.r)
self.assertEqual(cb.get_unretraced(), [crash_id])
self.r['Stacktrace'] = """
#0 0x00007f96dcfb9f77 in __GI_raise (sig=sig@entry=6) at ../nptl/sysdeps/unix/sysv/linux/raise.c:56
resultvar = 0
pid = 1427
selftid = 1427
#1 0x00007f96dcfbd5e8 in __GI_abort () at abort.c:90
save_stage = 2
act = {__sigaction_handler = {sa_handler = 0x0, sa_sigaction = 0x0}, sa_mask = {__val = {140286034336064, 140285996709792, 140285998988405, 5, 0, 752786625060479084, 140285929102568, 140285994568476, 140285996709792, 140285459489344, 140285999015717, 140285994520128, 140285996776629, 140285996776368, 140733249635424, 6}}, sa_flags = 56247888, sa_restorer = 0x18}
sigs = {__val = {32, 0 <repeats 15 times>}}
#2 0x00007f96e0deccbc in smb_panic_default (why=0x7f96e0df8b1c "internal error") at ../lib/util/fault.c:149
No locals.
#3 smb_panic (why=why@entry=0x7f96e0df8b1c "internal error") at ../lib/util/fault.c:162
No locals.
#4 0x00007f96e0dece76 in fault_report (sig=<optimized out>) at ../lib/util/fault.c:77
counter = 1
#5 sig_fault (sig=<optimized out>) at ../lib/util/fault.c:88
No locals.
#6 <signal handler called>
No locals.
#7 0x00007f96b9bae711 in sarray_get_safe (indx=<optimized out>, array=<optimized out>) at /build/buildd/gcc-4.8-4.8.1/src/libobjc/objc-private/sarray.h:237
No locals.
#8 objc_msg_lookup (receiver=0x7f96e3485278, op=0x7f96c0fae240 <_OBJC_SELECTOR_TABLE+128>) at /build/buildd/gcc-4.8-4.8.1/src/libobjc/sendmsg.c:448
No locals.
#9 0x00007f96c0da737a in sogo_table_get_row (table_object=<optimized out>, mem_ctx=0x7f96e33e5940, query_type=MAPISTORE_PREFILTERED_QUERY, row_id=1, data=0x7fff035a4e00) at MAPIStoreSOGo.m:1464
e = <optimized out>
ret = MAPISTORE_SUCCESS
wrapper = <optimized out>
pool = 0x7f96e3485278
table = <optimized out>
rc = 0
__FUNCTION__ = "sogo_table_get_row"
__PRETTY_FUNCTION__ = "sogo_table_get_row"
"""
cb.update(crash_id, self.r, "")
self.assertEqual(cb.get_unretraced(), [])
self.r['Stacktrace'] = """#8 0x00007ff5aae8e159 in ldb_msg_find_ldb_val (msg=<optimised out>, attr_name=<optimised out>) at ../common/ldb_msg.c:399
el = <optimised out>
#9 0x00007ff5aae8e669 in ldb_msg_find_attr_as_string (msg=<optimised out>, attr_name=<optimised out>, default_value=0x0) at ../common/ldb_msg.c:584
v = <optimised out>
#10 0x00007ff5905d0e5f in ?? ()
No symbol table info available.
#11 0x0000000000000081 in ?? ()
No symbol table info available.
#12 0x0000000000000000 in ?? ()
No symbol table info available."""
cb.update(crash_id, self.r, "")
self.assertEqual(cb.get_unretraced(), [crash_id])
def test_get_unfixed(self):
cb = CrashDatabase(None, {'dbfile': ':memory:', 'crashes_base_url': self.crash_base_url})
self.assertEqual(cb.get_unfixed(), set())
crash_id = cb.upload(self.r)
self.assertEqual(cb.get_unfixed(), set([crash_id]))
cb.close_duplicate(self.r, crash_id, crash_id)
self.assertEqual(cb.get_unfixed(), set())
def test_close_duplicate(self):
cb = CrashDatabase(None, {'dbfile': ':memory:', 'crashes_base_url': self.crash_base_url})
crash_id = cb.upload(self.r)
self.assertIsNone(cb.duplicate_of(crash_id))
crash_id2 = cb.upload(self.r)
self.assertIsNone(cb.duplicate_of(crash_id2))
cb.close_duplicate(self.r, crash_id2, crash_id)
self.assertEqual(cb.duplicate_of(crash_id2), crash_id)
# Remove current duplicate thing
cb.close_duplicate(self.r, crash_id2, None)
self.assertIsNone(cb.duplicate_of(crash_id2))
# Tests related with components
def test_app_components_get_set(self):
cb = CrashDatabase(None, {'dbfile': ':memory:', 'crashes_base_url': self.crash_base_url})
crash_id = cb.upload(self.r)
self.assertEqual(cb.get_app_components(crash_id), [])
cb.set_app_components(crash_id, ['sand'])
self.assertEqual(cb.get_app_components(crash_id), ['sand'])
cb.set_app_components(crash_id, ['sand'])
self.assertEqual(cb.get_app_components(crash_id), ['sand'])
def test_app_components_remove(self):
cb = CrashDatabase(None, {'dbfile': ':memory:', 'crashes_base_url': self.crash_base_url})
crash_id = cb.upload(self.r)
self.assertRaises(ValueError, cb.remove_app_component, *(crash_id, 'sand'))
self.assertIsNone(cb.remove_app_component(crash_id))
cb.set_app_components(crash_id, ['sand'])
self.assertIsNone(cb.remove_app_component(crash_id, 'sand'))
self.assertEqual(cb.get_app_components(crash_id), [])
cb.set_app_components(crash_id, ['sand'])
self.assertIsNone(cb.remove_app_component(crash_id))
self.assertEqual(cb.get_app_components(crash_id), [])
|
icandigitbaby/openchange
|
script/bug-analysis/test_sqlite.py
|
Python
|
gpl-3.0
| 9,687 | 0.002684 |
# Copyright (c) 2012 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import os
import mock
from oslo.config import cfg
from neutron.agent.linux import ip_lib
from neutron.agent.linux import utils
from neutron.common import constants
from neutron.common import exceptions
from neutron.plugins.common import constants as p_const
from neutron.plugins.linuxbridge.agent import linuxbridge_neutron_agent
from neutron.plugins.linuxbridge.common import constants as lconst
from neutron.tests import base
LOCAL_IP = '192.168.0.33'
DEVICE_1 = 'tapabcdef01-12'
class FakeIpLinkCommand(object):
def set_up(self):
pass
class FakeIpDevice(object):
def __init__(self):
self.link = FakeIpLinkCommand()
class TestLinuxBridge(base.BaseTestCase):
def setUp(self):
super(TestLinuxBridge, self).setUp()
interface_mappings = {'physnet1': 'eth1'}
root_helper = cfg.CONF.AGENT.root_helper
self.linux_bridge = linuxbridge_neutron_agent.LinuxBridgeManager(
interface_mappings, root_helper)
def test_ensure_physical_in_bridge_invalid(self):
result = self.linux_bridge.ensure_physical_in_bridge('network_id',
p_const.TYPE_VLAN,
'physnetx',
7)
self.assertFalse(result)
def test_ensure_physical_in_bridge_flat(self):
with mock.patch.object(self.linux_bridge,
'ensure_flat_bridge') as flat_bridge_func:
self.linux_bridge.ensure_physical_in_bridge(
'network_id', p_const.TYPE_FLAT, 'physnet1', None)
self.assertTrue(flat_bridge_func.called)
def test_ensure_physical_in_bridge_vlan(self):
with mock.patch.object(self.linux_bridge,
'ensure_vlan_bridge') as vlan_bridge_func:
self.linux_bridge.ensure_physical_in_bridge(
'network_id', p_const.TYPE_VLAN, 'physnet1', 7)
self.assertTrue(vlan_bridge_func.called)
def test_ensure_physical_in_bridge_vxlan(self):
self.linux_bridge.vxlan_mode = lconst.VXLAN_UCAST
with mock.patch.object(self.linux_bridge,
'ensure_vxlan_bridge') as vxlan_bridge_func:
self.linux_bridge.ensure_physical_in_bridge(
'network_id', 'vxlan', 'physnet1', 7)
self.assertTrue(vxlan_bridge_func.called)
class TestLinuxBridgeAgent(base.BaseTestCase):
LINK_SAMPLE = [
'1: lo: <LOOPBACK,UP,LOWER_UP> mtu 16436 qdisc noqueue \\'
'state UNKNOWN \\'
'link/loopback 00:00:00:00:00:00 brd 00:00:00:00:00:00',
'2: eth77: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 \\'
'qdisc mq state UP qlen 1000\ link/ether \\'
'cc:dd:ee:ff:ab:cd brd ff:ff:ff:ff:ff:ff']
def setUp(self):
super(TestLinuxBridgeAgent, self).setUp()
# disable setting up periodic state reporting
cfg.CONF.set_override('report_interval', 0, 'AGENT')
cfg.CONF.set_default('firewall_driver',
'neutron.agent.firewall.NoopFirewallDriver',
group='SECURITYGROUP')
self.execute_p = mock.patch.object(ip_lib.IPWrapper, '_execute')
self.execute = self.execute_p.start()
self.execute.return_value = '\n'.join(self.LINK_SAMPLE)
self.get_mac_p = mock.patch('neutron.agent.linux.utils.'
'get_interface_mac')
self.get_mac = self.get_mac_p.start()
self.get_mac.return_value = '00:00:00:00:00:01'
self.agent = linuxbridge_neutron_agent.LinuxBridgeNeutronAgentRPC({},
0,
None)
def test_treat_devices_removed_with_existed_device(self):
agent = linuxbridge_neutron_agent.LinuxBridgeNeutronAgentRPC({},
0,
None)
devices = [DEVICE_1]
with contextlib.nested(
mock.patch.object(agent.plugin_rpc, "update_device_down"),
mock.patch.object(agent, "remove_devices_filter")
) as (fn_udd, fn_rdf):
fn_udd.return_value = {'device': DEVICE_1,
'exists': True}
with mock.patch.object(linuxbridge_neutron_agent.LOG,
'info') as log:
resync = agent.treat_devices_removed(devices)
self.assertEqual(2, log.call_count)
self.assertFalse(resync)
self.assertTrue(fn_udd.called)
self.assertTrue(fn_rdf.called)
def test_treat_devices_removed_with_not_existed_device(self):
agent = linuxbridge_neutron_agent.LinuxBridgeNeutronAgentRPC({},
0,
None)
devices = [DEVICE_1]
with contextlib.nested(
mock.patch.object(agent.plugin_rpc, "update_device_down"),
mock.patch.object(agent, "remove_devices_filter")
) as (fn_udd, fn_rdf):
fn_udd.return_value = {'device': DEVICE_1,
'exists': False}
with mock.patch.object(linuxbridge_neutron_agent.LOG,
'debug') as log:
resync = agent.treat_devices_removed(devices)
self.assertEqual(1, log.call_count)
self.assertFalse(resync)
self.assertTrue(fn_udd.called)
self.assertTrue(fn_rdf.called)
def test_treat_devices_removed_failed(self):
agent = linuxbridge_neutron_agent.LinuxBridgeNeutronAgentRPC({},
0,
None)
devices = [DEVICE_1]
with contextlib.nested(
mock.patch.object(agent.plugin_rpc, "update_device_down"),
mock.patch.object(agent, "remove_devices_filter")
) as (fn_udd, fn_rdf):
fn_udd.side_effect = Exception()
with mock.patch.object(linuxbridge_neutron_agent.LOG,
'debug') as log:
resync = agent.treat_devices_removed(devices)
self.assertEqual(2, log.call_count)
self.assertTrue(resync)
self.assertTrue(fn_udd.called)
self.assertTrue(fn_rdf.called)
def _test_scan_devices(self, previous, updated,
fake_current, expected, sync):
self.agent.br_mgr = mock.Mock()
self.agent.br_mgr.get_tap_devices.return_value = fake_current
self.agent.updated_devices = updated
results = self.agent.scan_devices(previous, sync)
self.assertEqual(expected, results)
def test_scan_devices_no_changes(self):
previous = {'current': set([1, 2]),
'updated': set(),
'added': set(),
'removed': set()}
fake_current = set([1, 2])
updated = set()
expected = {'current': set([1, 2]),
'updated': set(),
'added': set(),
'removed': set()}
self._test_scan_devices(previous, updated, fake_current, expected,
sync=False)
def test_scan_devices_added_removed(self):
previous = {'current': set([1, 2]),
'updated': set(),
'added': set(),
'removed': set()}
fake_current = set([2, 3])
updated = set()
expected = {'current': set([2, 3]),
'updated': set(),
'added': set([3]),
'removed': set([1])}
self._test_scan_devices(previous, updated, fake_current, expected,
sync=False)
def test_scan_devices_removed_retried_on_sync(self):
previous = {'current': set([2, 3]),
'updated': set(),
'added': set(),
'removed': set([1])}
fake_current = set([2, 3])
updated = set()
expected = {'current': set([2, 3]),
'updated': set(),
'added': set([2, 3]),
'removed': set([1])}
self._test_scan_devices(previous, updated, fake_current, expected,
sync=True)
def test_scan_devices_vanished_removed_on_sync(self):
previous = {'current': set([2, 3]),
'updated': set(),
'added': set(),
'removed': set([1])}
# Device 2 disappeared.
fake_current = set([3])
updated = set()
# Device 1 should be retried.
expected = {'current': set([3]),
'updated': set(),
'added': set([3]),
'removed': set([1, 2])}
self._test_scan_devices(previous, updated, fake_current, expected,
sync=True)
def test_scan_devices_updated(self):
previous = {'current': set([1, 2]),
'updated': set(),
'added': set(),
'removed': set()}
fake_current = set([1, 2])
updated = set([1])
expected = {'current': set([1, 2]),
'updated': set([1]),
'added': set(),
'removed': set()}
self._test_scan_devices(previous, updated, fake_current, expected,
sync=False)
def test_scan_devices_updated_non_existing(self):
previous = {'current': set([1, 2]),
'updated': set(),
'added': set(),
'removed': set()}
fake_current = set([1, 2])
updated = set([3])
expected = {'current': set([1, 2]),
'updated': set(),
'added': set(),
'removed': set()}
self._test_scan_devices(previous, updated, fake_current, expected,
sync=False)
def test_scan_devices_updated_on_sync(self):
previous = {'current': set([1, 2]),
'updated': set([1]),
'added': set(),
'removed': set()}
fake_current = set([1, 2])
updated = set([2])
expected = {'current': set([1, 2]),
'updated': set([1, 2]),
'added': set([1, 2]),
'removed': set()}
self._test_scan_devices(previous, updated, fake_current, expected,
sync=True)
def test_process_network_devices(self):
agent = self.agent
device_info = {'current': set(),
'added': set(['tap3', 'tap4']),
'updated': set(['tap2', 'tap3']),
'removed': set(['tap1'])}
agent.prepare_devices_filter = mock.Mock()
agent.refresh_firewall = mock.Mock()
agent.treat_devices_added_updated = mock.Mock(return_value=False)
agent.treat_devices_removed = mock.Mock(return_value=False)
agent.process_network_devices(device_info)
agent.prepare_devices_filter.assert_called_with(set(['tap3', 'tap4']))
self.assertTrue(agent.refresh_firewall.called)
agent.treat_devices_added_updated.assert_called_with(set(['tap2',
'tap3',
'tap4']))
agent.treat_devices_removed.assert_called_with(set(['tap1']))
def test_treat_devices_added_updated_admin_state_up_true(self):
agent = self.agent
mock_details = {'device': 'dev123',
'port_id': 'port123',
'network_id': 'net123',
'admin_state_up': True,
'network_type': 'vlan',
'segmentation_id': 100,
'physical_network': 'physnet1'}
agent.plugin_rpc = mock.Mock()
agent.plugin_rpc.get_devices_details_list.return_value = [mock_details]
agent.br_mgr = mock.Mock()
agent.br_mgr.add_interface.return_value = True
resync_needed = agent.treat_devices_added_updated(set(['tap1']))
self.assertFalse(resync_needed)
agent.br_mgr.add_interface.assert_called_with('net123', 'vlan',
'physnet1', 100,
'port123')
self.assertTrue(agent.plugin_rpc.update_device_up.called)
def test_treat_devices_added_updated_admin_state_up_false(self):
agent = self.agent
mock_details = {'device': 'dev123',
'port_id': 'port123',
'network_id': 'net123',
'admin_state_up': False,
'network_type': 'vlan',
'segmentation_id': 100,
'physical_network': 'physnet1'}
agent.plugin_rpc = mock.Mock()
agent.plugin_rpc.get_devices_details_list.return_value = [mock_details]
agent.remove_port_binding = mock.Mock()
resync_needed = agent.treat_devices_added_updated(set(['tap1']))
self.assertFalse(resync_needed)
agent.remove_port_binding.assert_called_with('net123', 'port123')
self.assertFalse(agent.plugin_rpc.update_device_up.called)
class TestLinuxBridgeManager(base.BaseTestCase):
def setUp(self):
super(TestLinuxBridgeManager, self).setUp()
self.interface_mappings = {'physnet1': 'eth1'}
self.root_helper = cfg.CONF.AGENT.root_helper
self.lbm = linuxbridge_neutron_agent.LinuxBridgeManager(
self.interface_mappings, self.root_helper)
def test_interface_exists_on_bridge(self):
with mock.patch.object(os, 'listdir') as listdir_fn:
listdir_fn.return_value = ["abc"]
self.assertTrue(
self.lbm.interface_exists_on_bridge("br-int", "abc")
)
self.assertFalse(
self.lbm.interface_exists_on_bridge("br-int", "abd")
)
def test_get_bridge_name(self):
nw_id = "123456789101112"
self.assertEqual(self.lbm.get_bridge_name(nw_id),
"brq" + nw_id[0:11])
nw_id = ""
self.assertEqual(self.lbm.get_bridge_name(nw_id),
"brq")
def test_get_subinterface_name(self):
self.assertEqual(self.lbm.get_subinterface_name("eth0", "0"),
"eth0.0")
self.assertEqual(self.lbm.get_subinterface_name("eth0", ""),
"eth0.")
def test_get_tap_device_name(self):
if_id = "123456789101112"
self.assertEqual(self.lbm.get_tap_device_name(if_id),
constants.TAP_DEVICE_PREFIX + if_id[0:11])
if_id = ""
self.assertEqual(self.lbm.get_tap_device_name(if_id),
constants.TAP_DEVICE_PREFIX)
def test_get_vxlan_device_name(self):
vn_id = constants.MAX_VXLAN_VNI
self.assertEqual(self.lbm.get_vxlan_device_name(vn_id),
"vxlan-" + str(vn_id))
self.assertIsNone(self.lbm.get_vxlan_device_name(vn_id + 1))
def test_get_all_neutron_bridges(self):
br_list = ["br-int", "brq1", "brq2", "br-ex"]
with mock.patch.object(os, 'listdir') as listdir_fn:
listdir_fn.return_value = br_list
self.assertEqual(self.lbm.get_all_neutron_bridges(),
br_list[1:3])
self.assertTrue(listdir_fn.called)
def test_get_interfaces_on_bridge(self):
with contextlib.nested(
mock.patch.object(utils, 'execute'),
mock.patch.object(os, 'listdir'),
mock.patch.object(ip_lib, 'device_exists', return_value=True)
) as (exec_fn, listdir_fn, dev_exists_fn):
listdir_fn.return_value = ["qbr1"]
self.assertEqual(self.lbm.get_interfaces_on_bridge("br0"),
["qbr1"])
def test_get_interfaces_on_bridge_not_existing(self):
with mock.patch.object(ip_lib, 'device_exists', return_value=False):
self.assertEqual([], self.lbm.get_interfaces_on_bridge("br0"))
def test_get_tap_devices_count(self):
with mock.patch.object(os, 'listdir') as listdir_fn:
listdir_fn.return_value = ['tap2101', 'eth0.100', 'vxlan-1000']
self.assertEqual(self.lbm.get_tap_devices_count('br0'), 1)
listdir_fn.side_effect = OSError()
self.assertEqual(self.lbm.get_tap_devices_count('br0'), 0)
def test_get_interface_by_ip(self):
with contextlib.nested(
mock.patch.object(ip_lib.IPWrapper, 'get_devices'),
mock.patch.object(ip_lib.IpAddrCommand, 'list')
) as (get_dev_fn, ip_list_fn):
device = mock.Mock()
device.name = 'dev_name'
get_dev_fn.return_value = [device]
ip_list_fn.returnvalue = mock.Mock()
self.assertEqual(self.lbm.get_interface_by_ip(LOCAL_IP),
'dev_name')
def test_get_bridge_for_tap_device(self):
with contextlib.nested(
mock.patch.object(self.lbm, "get_all_neutron_bridges"),
mock.patch.object(self.lbm, "get_interfaces_on_bridge")
) as (get_all_qbr_fn, get_if_fn):
get_all_qbr_fn.return_value = ["br-int", "br-ex"]
get_if_fn.return_value = ["tap1", "tap2", "tap3"]
self.assertEqual(self.lbm.get_bridge_for_tap_device("tap1"),
"br-int")
self.assertIsNone(self.lbm.get_bridge_for_tap_device("tap4"))
def test_is_device_on_bridge(self):
self.assertTrue(not self.lbm.is_device_on_bridge(""))
with mock.patch.object(os.path, 'exists') as exists_fn:
exists_fn.return_value = True
self.assertTrue(self.lbm.is_device_on_bridge("tap1"))
exists_fn.assert_called_with(
"/sys/devices/virtual/net/tap1/brport"
)
def test_get_interface_details(self):
with contextlib.nested(
mock.patch.object(ip_lib.IpAddrCommand, 'list'),
mock.patch.object(ip_lib.IpRouteCommand, 'get_gateway')
) as (list_fn, getgw_fn):
gwdict = dict(gateway='1.1.1.1')
getgw_fn.return_value = gwdict
ipdict = dict(cidr='1.1.1.1/24',
broadcast='1.1.1.255',
scope='global',
ip_version=4,
dynamic=False)
list_fn.return_value = ipdict
ret = self.lbm.get_interface_details("eth0")
self.assertTrue(list_fn.called)
self.assertTrue(getgw_fn.called)
self.assertEqual(ret, (ipdict, gwdict))
def test_ensure_flat_bridge(self):
with contextlib.nested(
mock.patch.object(ip_lib.IpAddrCommand, 'list'),
mock.patch.object(ip_lib.IpRouteCommand, 'get_gateway')
) as (list_fn, getgw_fn):
gwdict = dict(gateway='1.1.1.1')
getgw_fn.return_value = gwdict
ipdict = dict(cidr='1.1.1.1/24',
broadcast='1.1.1.255',
scope='global',
ip_version=4,
dynamic=False)
list_fn.return_value = ipdict
with mock.patch.object(self.lbm, 'ensure_bridge') as ens:
self.assertEqual(
self.lbm.ensure_flat_bridge("123", "eth0"),
"eth0"
)
self.assertTrue(list_fn.called)
self.assertTrue(getgw_fn.called)
ens.assert_called_once_with("brq123", "eth0",
ipdict, gwdict)
def test_ensure_vlan_bridge(self):
with contextlib.nested(
mock.patch.object(self.lbm, 'ensure_vlan'),
mock.patch.object(self.lbm, 'ensure_bridge'),
mock.patch.object(self.lbm, 'get_interface_details'),
) as (ens_vl_fn, ens, get_int_det_fn):
ens_vl_fn.return_value = "eth0.1"
get_int_det_fn.return_value = (None, None)
self.assertEqual(self.lbm.ensure_vlan_bridge("123", "eth0", "1"),
"eth0.1")
ens.assert_called_with("brq123", "eth0.1", None, None)
get_int_det_fn.return_value = ("ips", "gateway")
self.assertEqual(self.lbm.ensure_vlan_bridge("123", "eth0", "1"),
"eth0.1")
ens.assert_called_with("brq123", "eth0.1", "ips", "gateway")
def test_ensure_local_bridge(self):
with mock.patch.object(self.lbm, 'ensure_bridge') as ens_fn:
self.lbm.ensure_local_bridge("54321")
ens_fn.assert_called_once_with("brq54321")
def test_ensure_vlan(self):
with mock.patch.object(ip_lib, 'device_exists') as de_fn:
de_fn.return_value = True
self.assertEqual(self.lbm.ensure_vlan("eth0", "1"), "eth0.1")
de_fn.return_value = False
with mock.patch.object(utils, 'execute') as exec_fn:
exec_fn.return_value = False
self.assertEqual(self.lbm.ensure_vlan("eth0", "1"), "eth0.1")
# FIXME(kevinbenton): validate the params to the exec_fn calls
self.assertEqual(exec_fn.call_count, 2)
exec_fn.return_value = True
self.assertIsNone(self.lbm.ensure_vlan("eth0", "1"))
self.assertEqual(exec_fn.call_count, 3)
def test_ensure_vxlan(self):
seg_id = "12345678"
self.lbm.local_int = 'eth0'
self.lbm.vxlan_mode = lconst.VXLAN_MCAST
with mock.patch.object(ip_lib, 'device_exists') as de_fn:
de_fn.return_value = True
self.assertEqual(self.lbm.ensure_vxlan(seg_id), "vxlan-" + seg_id)
de_fn.return_value = False
with mock.patch.object(self.lbm.ip,
'add_vxlan') as add_vxlan_fn:
add_vxlan_fn.return_value = FakeIpDevice()
self.assertEqual(self.lbm.ensure_vxlan(seg_id),
"vxlan-" + seg_id)
add_vxlan_fn.assert_called_with("vxlan-" + seg_id, seg_id,
group="224.0.0.1",
dev=self.lbm.local_int)
cfg.CONF.set_override('l2_population', 'True', 'VXLAN')
self.assertEqual(self.lbm.ensure_vxlan(seg_id),
"vxlan-" + seg_id)
add_vxlan_fn.assert_called_with("vxlan-" + seg_id, seg_id,
group="224.0.0.1",
dev=self.lbm.local_int,
proxy=True)
def test_update_interface_ip_details(self):
gwdict = dict(gateway='1.1.1.1',
metric=50)
ipdict = dict(cidr='1.1.1.1/24',
broadcast='1.1.1.255',
scope='global',
ip_version=4,
dynamic=False)
with contextlib.nested(
mock.patch.object(ip_lib.IpAddrCommand, 'add'),
mock.patch.object(ip_lib.IpAddrCommand, 'delete')
) as (add_fn, del_fn):
self.lbm.update_interface_ip_details("br0", "eth0",
[ipdict], None)
self.assertTrue(add_fn.called)
self.assertTrue(del_fn.called)
with contextlib.nested(
mock.patch.object(ip_lib.IpRouteCommand, 'add_gateway'),
mock.patch.object(ip_lib.IpRouteCommand, 'delete_gateway')
) as (addgw_fn, delgw_fn):
self.lbm.update_interface_ip_details("br0", "eth0",
None, gwdict)
self.assertTrue(addgw_fn.called)
self.assertTrue(delgw_fn.called)
def test_bridge_exists_and_ensure_up(self):
ip_lib_mock = mock.Mock()
with mock.patch.object(ip_lib, 'IPDevice', return_value=ip_lib_mock):
# device exists
self.assertTrue(self.lbm._bridge_exists_and_ensure_up("br0"))
self.assertTrue(ip_lib_mock.link.set_up.called)
# device doesn't exists
ip_lib_mock.link.set_up.side_effect = RuntimeError
self.assertFalse(self.lbm._bridge_exists_and_ensure_up("br0"))
def test_ensure_bridge(self):
with contextlib.nested(
mock.patch.object(self.lbm, '_bridge_exists_and_ensure_up'),
mock.patch.object(utils, 'execute'),
mock.patch.object(self.lbm, 'update_interface_ip_details'),
mock.patch.object(self.lbm, 'interface_exists_on_bridge'),
mock.patch.object(self.lbm, 'is_device_on_bridge'),
mock.patch.object(self.lbm, 'get_bridge_for_tap_device'),
) as (de_fn, exec_fn, upd_fn, ie_fn, if_br_fn, get_if_br_fn):
de_fn.return_value = False
exec_fn.return_value = False
self.assertEqual(self.lbm.ensure_bridge("br0", None), "br0")
ie_fn.return_Value = False
self.lbm.ensure_bridge("br0", "eth0")
upd_fn.assert_called_with("br0", "eth0", None, None)
ie_fn.assert_called_with("br0", "eth0")
self.lbm.ensure_bridge("br0", "eth0", "ips", "gateway")
upd_fn.assert_called_with("br0", "eth0", "ips", "gateway")
ie_fn.assert_called_with("br0", "eth0")
exec_fn.side_effect = Exception()
de_fn.return_value = True
self.lbm.ensure_bridge("br0", "eth0")
ie_fn.assert_called_with("br0", "eth0")
exec_fn.reset_mock()
exec_fn.side_effect = None
de_fn.return_value = True
ie_fn.return_value = False
get_if_br_fn.return_value = "br1"
self.lbm.ensure_bridge("br0", "eth0")
expected = [
mock.call(['brctl', 'delif', 'br1', 'eth0'],
root_helper=self.root_helper),
mock.call(['brctl', 'addif', 'br0', 'eth0'],
root_helper=self.root_helper),
]
exec_fn.assert_has_calls(expected)
def test_ensure_physical_in_bridge(self):
self.assertFalse(
self.lbm.ensure_physical_in_bridge("123", p_const.TYPE_VLAN,
"phys", "1")
)
with mock.patch.object(self.lbm, "ensure_flat_bridge") as flbr_fn:
self.assertTrue(
self.lbm.ensure_physical_in_bridge("123", p_const.TYPE_FLAT,
"physnet1", None)
)
self.assertTrue(flbr_fn.called)
with mock.patch.object(self.lbm, "ensure_vlan_bridge") as vlbr_fn:
self.assertTrue(
self.lbm.ensure_physical_in_bridge("123", p_const.TYPE_VLAN,
"physnet1", "1")
)
self.assertTrue(vlbr_fn.called)
with mock.patch.object(self.lbm, "ensure_vxlan_bridge") as vlbr_fn:
self.lbm.vxlan_mode = lconst.VXLAN_MCAST
self.assertTrue(
self.lbm.ensure_physical_in_bridge("123", p_const.TYPE_VXLAN,
"physnet1", "1")
)
self.assertTrue(vlbr_fn.called)
def test_add_tap_interface(self):
with mock.patch.object(ip_lib, "device_exists") as de_fn:
de_fn.return_value = False
self.assertFalse(
self.lbm.add_tap_interface("123", p_const.TYPE_VLAN,
"physnet1", "1", "tap1")
)
de_fn.return_value = True
with contextlib.nested(
mock.patch.object(self.lbm, "ensure_local_bridge"),
mock.patch.object(utils, "execute"),
mock.patch.object(self.lbm, "get_bridge_for_tap_device")
) as (en_fn, exec_fn, get_br):
exec_fn.return_value = False
get_br.return_value = True
self.assertTrue(self.lbm.add_tap_interface("123",
p_const.TYPE_LOCAL,
"physnet1", None,
"tap1"))
en_fn.assert_called_with("123")
get_br.return_value = False
exec_fn.return_value = True
self.assertFalse(self.lbm.add_tap_interface("123",
p_const.TYPE_LOCAL,
"physnet1", None,
"tap1"))
with mock.patch.object(self.lbm,
"ensure_physical_in_bridge") as ens_fn:
ens_fn.return_value = False
self.assertFalse(self.lbm.add_tap_interface("123",
p_const.TYPE_VLAN,
"physnet1", "1",
"tap1"))
def test_add_interface(self):
with mock.patch.object(self.lbm, "add_tap_interface") as add_tap:
self.lbm.add_interface("123", p_const.TYPE_VLAN, "physnet-1",
"1", "234")
add_tap.assert_called_with("123", p_const.TYPE_VLAN, "physnet-1",
"1", "tap234")
def test_delete_vlan_bridge(self):
with contextlib.nested(
mock.patch.object(ip_lib, "device_exists"),
mock.patch.object(self.lbm, "get_interfaces_on_bridge"),
mock.patch.object(self.lbm, "remove_interface"),
mock.patch.object(self.lbm, "get_interface_details"),
mock.patch.object(self.lbm, "update_interface_ip_details"),
mock.patch.object(self.lbm, "delete_vxlan"),
mock.patch.object(utils, "execute")
) as (de_fn, getif_fn, remif_fn, if_det_fn,
updif_fn, del_vxlan, exec_fn):
de_fn.return_value = False
self.lbm.delete_vlan_bridge("br0")
self.assertFalse(getif_fn.called)
de_fn.return_value = True
getif_fn.return_value = ["eth0", "eth1", "vxlan-1002"]
if_det_fn.return_value = ("ips", "gateway")
exec_fn.return_value = False
self.lbm.delete_vlan_bridge("br0")
updif_fn.assert_called_with("eth1", "br0", "ips", "gateway")
del_vxlan.assert_called_with("vxlan-1002")
def test_delete_vlan_bridge_with_ip(self):
with contextlib.nested(
mock.patch.object(ip_lib, "device_exists"),
mock.patch.object(self.lbm, "get_interfaces_on_bridge"),
mock.patch.object(self.lbm, "remove_interface"),
mock.patch.object(self.lbm, "get_interface_details"),
mock.patch.object(self.lbm, "update_interface_ip_details"),
mock.patch.object(self.lbm, "delete_vlan"),
mock.patch.object(utils, "execute")
) as (de_fn, getif_fn, remif_fn, if_det_fn,
updif_fn, del_vlan, exec_fn):
de_fn.return_value = True
getif_fn.return_value = ["eth0", "eth1.1"]
if_det_fn.return_value = ("ips", "gateway")
exec_fn.return_value = False
self.lbm.delete_vlan_bridge("br0")
updif_fn.assert_called_with("eth1.1", "br0", "ips", "gateway")
self.assertFalse(del_vlan.called)
def test_delete_vlan_bridge_no_ip(self):
with contextlib.nested(
mock.patch.object(ip_lib, "device_exists"),
mock.patch.object(self.lbm, "get_interfaces_on_bridge"),
mock.patch.object(self.lbm, "remove_interface"),
mock.patch.object(self.lbm, "get_interface_details"),
mock.patch.object(self.lbm, "update_interface_ip_details"),
mock.patch.object(self.lbm, "delete_vlan"),
mock.patch.object(utils, "execute")
) as (de_fn, getif_fn, remif_fn, if_det_fn,
updif_fn, del_vlan, exec_fn):
de_fn.return_value = True
getif_fn.return_value = ["eth0", "eth1.1"]
exec_fn.return_value = False
if_det_fn.return_value = ([], None)
self.lbm.delete_vlan_bridge("br0")
del_vlan.assert_called_with("eth1.1")
self.assertFalse(updif_fn.called)
def test_delete_vxlan_bridge_no_int_mappings(self):
interface_mappings = {}
lbm = linuxbridge_neutron_agent.LinuxBridgeManager(
interface_mappings, self.root_helper)
with contextlib.nested(
mock.patch.object(ip_lib, "device_exists"),
mock.patch.object(lbm, "get_interfaces_on_bridge"),
mock.patch.object(lbm, "remove_interface"),
mock.patch.object(lbm, "delete_vxlan"),
mock.patch.object(utils, "execute")
) as (de_fn, getif_fn, remif_fn, del_vxlan, exec_fn):
de_fn.return_value = False
lbm.delete_vlan_bridge("br0")
self.assertFalse(getif_fn.called)
de_fn.return_value = True
getif_fn.return_value = ["vxlan-1002"]
exec_fn.return_value = False
lbm.delete_vlan_bridge("br0")
del_vxlan.assert_called_with("vxlan-1002")
def test_remove_empty_bridges(self):
self.lbm.network_map = {'net1': mock.Mock(), 'net2': mock.Mock()}
def tap_count_side_effect(*args):
return 0 if args[0] == 'brqnet1' else 1
with contextlib.nested(
mock.patch.object(self.lbm, "delete_vlan_bridge"),
mock.patch.object(self.lbm, "get_tap_devices_count",
side_effect=tap_count_side_effect),
) as (del_br_fn, count_tap_fn):
self.lbm.remove_empty_bridges()
del_br_fn.assert_called_once_with('brqnet1')
def test_remove_interface(self):
with contextlib.nested(
mock.patch.object(ip_lib, "device_exists"),
mock.patch.object(self.lbm, "is_device_on_bridge"),
mock.patch.object(utils, "execute")
) as (de_fn, isdev_fn, exec_fn):
de_fn.return_value = False
self.assertFalse(self.lbm.remove_interface("br0", "eth0"))
self.assertFalse(isdev_fn.called)
de_fn.return_value = True
isdev_fn.return_value = False
self.assertTrue(self.lbm.remove_interface("br0", "eth0"))
isdev_fn.return_value = True
exec_fn.return_value = True
self.assertFalse(self.lbm.remove_interface("br0", "eth0"))
exec_fn.return_value = False
self.assertTrue(self.lbm.remove_interface("br0", "eth0"))
def test_delete_vlan(self):
with contextlib.nested(
mock.patch.object(ip_lib, "device_exists"),
mock.patch.object(utils, "execute")
) as (de_fn, exec_fn):
de_fn.return_value = False
self.lbm.delete_vlan("eth1.1")
self.assertFalse(exec_fn.called)
de_fn.return_value = True
exec_fn.return_value = False
self.lbm.delete_vlan("eth1.1")
self.assertTrue(exec_fn.called)
def _check_vxlan_support(self, expected, vxlan_module_supported,
vxlan_ucast_supported, vxlan_mcast_supported):
with contextlib.nested(
mock.patch.object(self.lbm, 'vxlan_module_supported',
return_value=vxlan_module_supported),
mock.patch.object(self.lbm, 'vxlan_ucast_supported',
return_value=vxlan_ucast_supported),
mock.patch.object(self.lbm, 'vxlan_mcast_supported',
return_value=vxlan_mcast_supported)):
if expected == lconst.VXLAN_NONE:
self.assertRaises(exceptions.VxlanNetworkUnsupported,
self.lbm.check_vxlan_support)
self.assertEqual(expected, self.lbm.vxlan_mode)
else:
self.lbm.check_vxlan_support()
self.assertEqual(expected, self.lbm.vxlan_mode)
def test_check_vxlan_support(self):
self._check_vxlan_support(expected=lconst.VXLAN_UCAST,
vxlan_module_supported=True,
vxlan_ucast_supported=True,
vxlan_mcast_supported=True)
self._check_vxlan_support(expected=lconst.VXLAN_MCAST,
vxlan_module_supported=True,
vxlan_ucast_supported=False,
vxlan_mcast_supported=True)
self._check_vxlan_support(expected=lconst.VXLAN_NONE,
vxlan_module_supported=False,
vxlan_ucast_supported=False,
vxlan_mcast_supported=False)
self._check_vxlan_support(expected=lconst.VXLAN_NONE,
vxlan_module_supported=True,
vxlan_ucast_supported=False,
vxlan_mcast_supported=False)
def _check_vxlan_module_supported(self, expected, execute_side_effect):
with mock.patch.object(
utils, 'execute',
side_effect=execute_side_effect):
self.assertEqual(expected, self.lbm.vxlan_module_supported())
def test_vxlan_module_supported(self):
self._check_vxlan_module_supported(
expected=True,
execute_side_effect=None)
self._check_vxlan_module_supported(
expected=False,
execute_side_effect=RuntimeError())
def _check_vxlan_ucast_supported(
self, expected, l2_population, iproute_arg_supported, fdb_append):
cfg.CONF.set_override('l2_population', l2_population, 'VXLAN')
with contextlib.nested(
mock.patch.object(
ip_lib, 'device_exists', return_value=False),
mock.patch.object(self.lbm, 'delete_vxlan', return_value=None),
mock.patch.object(self.lbm, 'ensure_vxlan', return_value=None),
mock.patch.object(
utils, 'execute',
side_effect=None if fdb_append else RuntimeError()),
mock.patch.object(
ip_lib, 'iproute_arg_supported',
return_value=iproute_arg_supported)):
self.assertEqual(expected, self.lbm.vxlan_ucast_supported())
def test_vxlan_ucast_supported(self):
self._check_vxlan_ucast_supported(
expected=False,
l2_population=False, iproute_arg_supported=True, fdb_append=True)
self._check_vxlan_ucast_supported(
expected=False,
l2_population=True, iproute_arg_supported=False, fdb_append=True)
self._check_vxlan_ucast_supported(
expected=False,
l2_population=True, iproute_arg_supported=True, fdb_append=False)
self._check_vxlan_ucast_supported(
expected=True,
l2_population=True, iproute_arg_supported=True, fdb_append=True)
def _check_vxlan_mcast_supported(
self, expected, vxlan_group, iproute_arg_supported):
cfg.CONF.set_override('vxlan_group', vxlan_group, 'VXLAN')
with mock.patch.object(
ip_lib, 'iproute_arg_supported',
return_value=iproute_arg_supported):
self.assertEqual(expected, self.lbm.vxlan_mcast_supported())
def test_vxlan_mcast_supported(self):
self._check_vxlan_mcast_supported(
expected=False,
vxlan_group='',
iproute_arg_supported=True)
self._check_vxlan_mcast_supported(
expected=False,
vxlan_group='224.0.0.1',
iproute_arg_supported=False)
self._check_vxlan_mcast_supported(
expected=True,
vxlan_group='224.0.0.1',
iproute_arg_supported=True)
class TestLinuxBridgeRpcCallbacks(base.BaseTestCase):
def setUp(self):
cfg.CONF.set_override('local_ip', LOCAL_IP, 'VXLAN')
super(TestLinuxBridgeRpcCallbacks, self).setUp()
self.u_execute_p = mock.patch('neutron.agent.linux.utils.execute')
self.u_execute = self.u_execute_p.start()
class FakeLBAgent(object):
def __init__(self):
self.agent_id = 1
self.br_mgr = (linuxbridge_neutron_agent.
LinuxBridgeManager({'physnet1': 'eth1'},
cfg.CONF.AGENT.root_helper))
self.br_mgr.vxlan_mode = lconst.VXLAN_UCAST
segment = mock.Mock()
segment.network_type = 'vxlan'
segment.segmentation_id = 1
self.br_mgr.network_map['net_id'] = segment
self.lb_rpc = linuxbridge_neutron_agent.LinuxBridgeRpcCallbacks(
object(),
FakeLBAgent()
)
self.root_helper = cfg.CONF.AGENT.root_helper
def test_network_delete(self):
with contextlib.nested(
mock.patch.object(self.lb_rpc.agent.br_mgr, "get_bridge_name"),
mock.patch.object(self.lb_rpc.agent.br_mgr, "delete_vlan_bridge")
) as (get_br_fn, del_fn):
get_br_fn.return_value = "br0"
self.lb_rpc.network_delete("anycontext", network_id="123")
get_br_fn.assert_called_with("123")
del_fn.assert_called_with("br0")
def test_fdb_add(self):
fdb_entries = {'net_id':
{'ports':
{'agent_ip': [constants.FLOODING_ENTRY,
['port_mac', 'port_ip']]},
'network_type': 'vxlan',
'segment_id': 1}}
with mock.patch.object(utils, 'execute',
return_value='') as execute_fn:
self.lb_rpc.fdb_add(None, fdb_entries)
expected = [
mock.call(['bridge', 'fdb', 'show', 'dev', 'vxlan-1'],
root_helper=self.root_helper),
mock.call(['bridge', 'fdb', 'add',
constants.FLOODING_ENTRY[0],
'dev', 'vxlan-1', 'dst', 'agent_ip'],
root_helper=self.root_helper,
check_exit_code=False),
mock.call(['ip', 'neigh', 'replace', 'port_ip', 'lladdr',
'port_mac', 'dev', 'vxlan-1', 'nud', 'permanent'],
root_helper=self.root_helper,
check_exit_code=False),
mock.call(['bridge', 'fdb', 'add', 'port_mac', 'dev',
'vxlan-1', 'dst', 'agent_ip'],
root_helper=self.root_helper,
check_exit_code=False),
]
execute_fn.assert_has_calls(expected)
def test_fdb_ignore(self):
fdb_entries = {'net_id':
{'ports':
{LOCAL_IP: [constants.FLOODING_ENTRY,
['port_mac', 'port_ip']]},
'network_type': 'vxlan',
'segment_id': 1}}
with mock.patch.object(utils, 'execute',
return_value='') as execute_fn:
self.lb_rpc.fdb_add(None, fdb_entries)
self.lb_rpc.fdb_remove(None, fdb_entries)
self.assertFalse(execute_fn.called)
fdb_entries = {'other_net_id':
{'ports':
{'192.168.0.67': [constants.FLOODING_ENTRY,
['port_mac', 'port_ip']]},
'network_type': 'vxlan',
'segment_id': 1}}
with mock.patch.object(utils, 'execute',
return_value='') as execute_fn:
self.lb_rpc.fdb_add(None, fdb_entries)
self.lb_rpc.fdb_remove(None, fdb_entries)
self.assertFalse(execute_fn.called)
def test_fdb_remove(self):
fdb_entries = {'net_id':
{'ports':
{'agent_ip': [constants.FLOODING_ENTRY,
['port_mac', 'port_ip']]},
'network_type': 'vxlan',
'segment_id': 1}}
with mock.patch.object(utils, 'execute',
return_value='') as execute_fn:
self.lb_rpc.fdb_remove(None, fdb_entries)
expected = [
mock.call(['bridge', 'fdb', 'del',
constants.FLOODING_ENTRY[0],
'dev', 'vxlan-1', 'dst', 'agent_ip'],
root_helper=self.root_helper,
check_exit_code=False),
mock.call(['ip', 'neigh', 'del', 'port_ip', 'lladdr',
'port_mac', 'dev', 'vxlan-1'],
root_helper=self.root_helper,
check_exit_code=False),
mock.call(['bridge', 'fdb', 'del', 'port_mac',
'dev', 'vxlan-1', 'dst', 'agent_ip'],
root_helper=self.root_helper,
check_exit_code=False),
]
execute_fn.assert_has_calls(expected)
def test_fdb_update_chg_ip(self):
fdb_entries = {'chg_ip':
{'net_id':
{'agent_ip':
{'before': [['port_mac', 'port_ip_1']],
'after': [['port_mac', 'port_ip_2']]}}}}
with mock.patch.object(utils, 'execute',
return_value='') as execute_fn:
self.lb_rpc.fdb_update(None, fdb_entries)
expected = [
mock.call(['ip', 'neigh', 'replace', 'port_ip_2', 'lladdr',
'port_mac', 'dev', 'vxlan-1', 'nud', 'permanent'],
root_helper=self.root_helper,
check_exit_code=False),
mock.call(['ip', 'neigh', 'del', 'port_ip_1', 'lladdr',
'port_mac', 'dev', 'vxlan-1'],
root_helper=self.root_helper,
check_exit_code=False)
]
execute_fn.assert_has_calls(expected)
|
huaweiswitch/neutron
|
neutron/tests/unit/linuxbridge/test_lb_neutron_agent.py
|
Python
|
apache-2.0
| 48,351 | 0.000041 |
import sys
import logging
import time
import requests
from biokbase.AbstractHandle.Client import AbstractHandle
def getStderrLogger(name, level=logging.INFO):
logger = logging.getLogger(name)
logger.setLevel(level)
# send messages to sys.stderr
streamHandler = logging.StreamHandler(sys.__stderr__)
formatter = logging.Formatter("%(asctime)s - %(filename)s - %(lineno)d - %(levelname)s - %(message)s")
formatter.converter = time.gmtime
streamHandler.setFormatter(formatter)
logger.addHandler(streamHandler)
return logger
def getHandles(logger,
shock_url = "https://kbase.us/services/shock-api/",
handle_url = "https://kbase.us/services/handle_service/",
shock_ids = None,
handle_ids = None,
token = None):
if token is None:
raise Exception("Authentication token required!")
hs = AbstractHandle(url=handle_url, token=token)
handles = list()
if shock_ids is not None:
header = dict()
header["Authorization"] = "Oauth {0}".format(token)
for sid in shock_ids:
info = None
try:
logger.info("Found shock id {0}, retrieving information about the data.".format(sid))
response = requests.get("{0}/node/{1}".format(shock_url, sid), headers=header, verify=True)
info = response.json()["data"]
except:
logger.error("There was an error retrieving information about the shock node id {0} from url {1}".format(sid, shock_url))
try:
logger.info("Retrieving a handle id for the data.")
handle_id = hs.persist_handle({"id" : sid,
"type" : "shock",
"url" : shock_url,
"file_name": info["file"]["name"],
"remote_md5": info["file"]["md5"]})
except:
try:
handle_id = hs.ids_to_handles([sid])[0]["hid"]
single_handle = hs.hids_to_handles([handle_id])
assert len(single_handle) != 0
if info is not None:
single_handle[0]["file_name"] = info["file"]["name"]
single_handle[0]["remote_md5"] = info["file"]["md5"]
print >> sys.stderr, single_handle
handles.append(single_handle[0])
except:
logger.error("The input shock node id {} is already registered or could not be registered".format(sid))
raise
elif handle_ids is not None:
for hid in handle_ids:
try:
single_handle = hs.hids_to_handles([hid])
assert len(single_handle) != 0
handles.append(single_handle[0])
except:
logger.error("Invalid handle id {0}".format(hid))
raise
return handles
|
realmarcin/transform
|
lib/biokbase/Transform/script_utils.py
|
Python
|
mit
| 3,187 | 0.011923 |
#!/usr/bin/env python
import fileinput
import re
import sys
refs = {}
complete_file = ""
for line in open(sys.argv[1], 'r'):
complete_file += line
for m in re.findall('\[\[(.+)\]\]\n=+ ([^\n]+)', complete_file):
ref, title = m
refs["<<" + ref + ">>"] = "<<" + ref + ", " + title + ">>"
def translate(match):
try:
return refs[match.group(0)]
except KeyError:
return ""
rc = re.compile('|'.join(map(re.escape, sorted(refs, reverse=True))))
for line in open(sys.argv[1], 'r'):
print rc.sub(translate, line),
|
tgraf/libnl
|
doc/resolve-asciidoc-refs.py
|
Python
|
lgpl-2.1
| 521 | 0.026871 |
'''
import csv
from collections import Counter
counts = Counter()
with open ('zoo.csv') as fin:
cin = csv.reader(fin)
for num, row in enumerate(cin):
if num > 0:
counts[row[0]] += int (row[-1])
for animal, hush in counts.items():
print("%10s %10s" % (animal, hush))
'''
'''
import bubbles
p = bubbles.Pipeline()
p.source(bubbles.data_object('csv_source', 'zoo.csv', inter_field=True))
p.aggregate('animal', 'hush')
p.pretty_print()
'''
def display_shapefile(name, iwidth=500, iheight=500):
import shapefile
from PIL import Image, ImageDraw
r = shapefile.Reader(name)
mleft, mbottom, mright, mtop = r.bbox
# map units
mwidth = mright - mleft
mheight = mtop - mbottom
# scale map units to image units
hscale = iwidth/mwidth
vscale = iheight/mheight
img = Image.new("RGB", (iwidth, iheight), "white")
draw = ImageDraw(img)
for shape in r.shapes():
pixels = [
(int(iwidth - ((mright - x) * hscale)), int((mtop - y) * vscale))
for x, y, in shape.points]
if shape.shapeType == shapefile.POLYGON:
draw.polygon(pixels, outline='black')
elif shape.shapeType == shapefile.POLYLINE:
draw.line(pixels, fill='black')
img.show()
if __name__=="__main__":
import sys
display_shapefile(sys.argv[1], 700, 700)
|
serggrom/python-projects
|
Aplication_B.py
|
Python
|
gpl-3.0
| 1,409 | 0.003549 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from holmes.validators.base import Validator
from holmes.utils import _
class ImageAltValidator(Validator):
@classmethod
def get_without_alt_parsed_value(cls, value):
result = []
for src, name in value:
data = '<a href="%s" target="_blank">%s</a>' % (src, name)
result.append(data)
return {'images': ', '.join(result)}
@classmethod
def get_alt_too_big_parsed_value(cls, value):
result = []
for src, name, alt in value['images']:
data = u'<a href="{}" alt="{}" target="_blank">{}</a>'.format(
src, alt, name
)
result.append(data)
return {
'max_size': value['max_size'],
'images': ', '.join(result)
}
@classmethod
def get_violation_definitions(cls):
return {
'invalid.images.alt': {
'title': _('Image(s) without alt attribute'),
'description': _(
'Images without alt text are not good for '
'Search Engines. Images without alt were '
'found for: %(images)s.'),
'value_parser': cls.get_without_alt_parsed_value,
'category': _('SEO'),
'generic_description': _(
'Images without alt attribute are not good for '
'search engines. They are searchable by the content '
'of this attribute, so if it\'s empty, it cause bad '
'indexing optimization.'
)
},
'invalid.images.alt_too_big': {
'title': _('Image(s) with alt attribute too big'),
'description': _(
'Images with alt text bigger than %(max_size)d chars are '
'not good for search engines. Images with a too big alt '
'were found for: %(images)s.'),
'value_parser': cls.get_alt_too_big_parsed_value,
'category': _('SEO'),
'generic_description': _(
'Images with alt text too long are not good to SEO. '
'This maximum value are configurable '
'by Holmes configuration.'
),
'unit': 'number'
}
}
@classmethod
def get_default_violations_values(cls, config):
return {
'invalid.images.alt_too_big': {
'value': config.MAX_IMAGE_ALT_SIZE,
'description': config.get_description('MAX_IMAGE_ALT_SIZE')
}
}
def validate(self):
max_alt_size = self.get_violation_pref('invalid.images.alt_too_big')
imgs = self.get_imgs()
result_no_alt = []
result_alt_too_big = []
for img in imgs:
src = img.get('src')
if not src:
continue
src = self.normalize_url(src)
img_alt = img.get('alt')
if src:
name = src.rsplit('/', 1)[-1]
if not img_alt:
result_no_alt.append((src, name))
elif len(img_alt) > max_alt_size:
result_alt_too_big.append((src, name, img_alt))
if result_no_alt:
self.add_violation(
key='invalid.images.alt',
value=result_no_alt,
points=20 * len(result_no_alt)
)
if result_alt_too_big:
self.add_violation(
key='invalid.images.alt_too_big',
value={
'images': result_alt_too_big,
'max_size': max_alt_size
},
points=20 * len(result_alt_too_big)
)
def get_imgs(self):
return self.review.data.get('page.all_images', None)
|
holmes-app/holmes-api
|
holmes/validators/image_alt.py
|
Python
|
mit
| 3,935 | 0 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class NetworkInterfaceLoadBalancersOperations:
"""NetworkInterfaceLoadBalancersOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_10_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
network_interface_name: str,
**kwargs: Any
) -> AsyncIterable["_models.NetworkInterfaceLoadBalancerListResult"]:
"""List all load balancers in a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceLoadBalancerListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_10_01.models.NetworkInterfaceLoadBalancerListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceLoadBalancerListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceLoadBalancerListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/loadBalancers'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_10_01/aio/operations/_network_interface_load_balancers_operations.py
|
Python
|
mit
| 5,732 | 0.004187 |
# -*- coding: utf-8 -*-
# this default value is just for testing in a fake.
# pylint: disable=dangerous-default-value
"""Fake Module containing helper functions for the SQLite plugin"""
from plasoscaffolder.bll.services import base_sqlite_plugin_helper
from plasoscaffolder.bll.services import base_sqlite_plugin_path_helper
from plasoscaffolder.dal import base_sql_query_execution
from plasoscaffolder.dal import sql_query_data
from plasoscaffolder.model import sql_query_column_model
from plasoscaffolder.model import sql_query_column_model_data
class FakeSQLitePluginHelper(base_sqlite_plugin_helper.BaseSQLitePluginHelper):
"""Fake for the SQLite plugin helper"""
def __init__(
self, plugin_exists=False, folder_exists=False,
file_exists=False, valid_name=True,
change_bool_after_every_call_plugin_exists=False,
change_bool_after_every_call_folder_exists=False,
change_bool_after_every_call_file_exists=False,
change_bool_after_every_call_valid_name=False,
distinct_columns=None, valid_row_name=True,
change_bool_after_every_call_valid_row_name=False,
change_bool_after_every_call_valid_comma_separated_string=False,
valid_comma_separated_string=True,
columns_and_timestamp_column=([], []),
assumed_timestamps=[]):
""" Initializes the fake plugin helper
Args:
change_bool_after_every_call_plugin_exists (bool): if the function
boolean should change after every call.
change_bool_after_every_call_file_exists (bool): if the function
boolean should change after every call.
change_bool_after_every_call_folder_exists (bool): if the function
boolean should change after every call.
change_bool_after_every_call_valid_name (bool): if the function
boolean should change after every call.
change_bool_after_every_call_valid_comma_separated_string (bool): if
the function boolean should change after every call.
file_exists (bool): what the FileExists function should return
plugin_exists (bool): what the PluginExists function should return
folder_exists (bool): what the FolderExists function should return
valid_name (bool): what the IsValidPluginName function should return
distinct_columns ([]): what the GetDistinctColumnsFromSQLQueryData
function should return
valid_row_name (bool): if the row name is valid,
what the function isValidRowName will return
change_bool_after_every_call_valid_row_name (bool): if the function
boolean should change after every call.
columns_and_timestamp_column ([sql_query_column_model.SQLColumnModel],
[sql_query_column_model.SQLColumnModel]): what to return for the
method GetColumnsAndTimestampColumn
assumed_timestamps ([str]): what to return for the method
GetAssumedTimestamps
"""
self.change_valid_name = change_bool_after_every_call_valid_name
self.change_file_exists = change_bool_after_every_call_file_exists
self.change_folder_exists = change_bool_after_every_call_folder_exists
self.change_plugin_exists = change_bool_after_every_call_plugin_exists
self.change_valid_row_name = change_bool_after_every_call_valid_row_name
self.change_valid_comma_separated_string = (
change_bool_after_every_call_valid_comma_separated_string)
self.plugin_exists = plugin_exists
self.folder_exists = folder_exists
self.file_exists = file_exists
self.valid_name = valid_name
self.distinct_columns = distinct_columns
self.is_valid_row_name = valid_row_name
self.is_valid_comma_separated_string = valid_comma_separated_string
self.columns_and_timestamp_column = columns_and_timestamp_column
self.assumed_timestamps = assumed_timestamps
def PluginExists(self,
path: str,
plugin_name: str,
database_suffix: str,
path_helper:
base_sqlite_plugin_path_helper.BaseSQLitePluginPathHelper) \
-> bool:
if self.change_plugin_exists:
self.plugin_exists = not self.plugin_exists
return not self.plugin_exists
else:
return self.plugin_exists
def FileExists(self, path: str) -> bool:
"""will return true false true ... starting with the initial (against
loops while testing)"""
if self.change_file_exists:
self.file_exists = not self.file_exists
return not self.file_exists
else:
return self.file_exists
def FolderExists(self, path: str) -> bool:
"""will return true false true ... starting with the initial (against
loops while testing)"""
if self.change_folder_exists:
self.folder_exists = not self.folder_exists
return not self.folder_exists
else:
return self.folder_exists
def IsValidPluginName(self, plugin_name: str) -> bool:
"""will return true false true ... starting with the initial (against
loops while testing)"""
if self.change_valid_name:
self.valid_name = not self.valid_name
return not self.valid_name
else:
return self.valid_name
def IsValidRowName(self, row_name: str) -> bool:
"""will return true false true ... starting with the initial (against
loops while testing)"""
if self.change_valid_row_name:
self.is_valid_row_name = not self.is_valid_row_name
return not self.is_valid_row_name
else:
return self.is_valid_row_name
return
def IsValidCommaSeparatedString(self, text: str) -> bool:
"""will return true false true ... starting with the initial (against
loops while testing)"""
if self.change_valid_comma_separated_string:
self.is_valid_comma_separated_string = (
not self.is_valid_comma_separated_string)
return not self.is_valid_comma_separated_string
else:
return self.is_valid_comma_separated_string
return
def RunSQLQuery(self, query: str,
executor: base_sql_query_execution.BaseSQLQueryExecution()):
""" Validates the sql Query
Args:
executor (base_sql_query_execution.SQLQueryExection()) the sql executor
query (str): the sql Query
Returns:
base_sql_query_execution.SQLQueryData: the data to the executed Query
"""
return executor.ExecuteQuery(query)
def GetDistinctColumnsFromSQLQueryData(
self,
queries: [sql_query_data.SQLQueryData]) -> [str]:
"""
Get a distinct list of all attributes from multiple queries
Args:
queries ([base_sql_query_execution.SQLQueryData]): an array of multiple
sql query data objects
Returns:
[str]: a distinct list of all attributes used in the query
"""
return self.distinct_columns
def GetAssumedTimestamps(self, columns: [sql_query_column_model]) -> [str]:
"""Gets all columns assumed that they are timestamps
Args:
columns ([sql_query_column_model]): the columns from the query
Returns:
[str]: the names from the columns assumed they could be a timestamp
"""
return self.assumed_timestamps
def GetColumnsAndTimestampColumn(
self, columns: [sql_query_column_model.SQLColumnModel],
timestamps: [str], data: [str]
) -> ([sql_query_column_model_data.SQLColumnModelData],
[sql_query_column_model.SQLColumnModel]):
"""Splits the column list into a list of simple columns and a list for
timestamp event columns and adds the data to the simple columns
Args:
columns ([sql_query_column_model_data.SQLColumnModelData]): the columns
from the SQL query
timestamps ([str]): the timestamp events
data ([str]): the data from the cursor
Returns:
([sql_query_column_model_data.SQLColumnModelData],
[sql_query_column_model.SQLColumnModel): a tuple of columns,
the first are the normal columns, the second are the timestamp events
"""
return self.columns_and_timestamp_column
|
ClaudiaSaxer/PlasoScaffolder
|
src/tests/fake/fake_sqlite_plugin_helper.py
|
Python
|
apache-2.0
| 7,987 | 0.004132 |
#!/usr/bin/env python3
#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
import unittest
import logging
from MooseDocs.test import MooseDocsTestCase
from MooseDocs.extensions import core
from MooseDocs import base
logging.basicConfig()
class TestCore(MooseDocsTestCase):
EXTENSIONS = [core]
def testCodeBlock(self):
text = "```\nint x = 0;\n```"
ast = self.tokenize(text)
self.assertToken(ast(0), 'Code', language='text', content='\nint x = 0;\n', escape=True)
def helper(r):
self.assertHTMLTag(r(0), 'pre', class_='moose-pre')
self.assertHTMLTag(r(0)(0), 'code', class_='language-text')
self.assertHTMLString(r(0)(0)(0), '\nint x = 0;\n', escape=True)
res = self.render(ast)
self.assertHTMLTag(res, 'body')
helper(res)
res = self.render(ast, renderer=base.MaterializeRenderer())
self.assertHTMLTag(res, 'div')
helper(res)
tex = self.render(ast, renderer=base.LatexRenderer())
self.assertLatex(tex(0), 'Environment', 'verbatim',
after_begin='\n', before_end='\n', escape=False)
self.assertLatexString(tex(0)(0), 'int x = 0;', escape=False)
def testLineBreak(self):
text = r'Break\\ this'
ast = self.tokenize(text)
self.assertToken(ast(0), 'Paragraph', size=3)
self.assertToken(ast(0)(0), 'Word', content='Break')
self.assertToken(ast(0)(1), 'LineBreak')
self.assertToken(ast(0)(2), 'Word', content='this')
text = r'''Break\\
this'''
ast = self.tokenize(text)
self.assertToken(ast(0), 'Paragraph', size=3)
self.assertToken(ast(0)(0), 'Word', content='Break')
self.assertToken(ast(0)(1), 'LineBreak')
self.assertToken(ast(0)(2), 'Word', content='this')
def testEscapeCharacter(self):
text = "No \[link\] and no \!\! comment"
ast = self.tokenize(text)
self.assertToken(ast(0), 'Paragraph', size=14)
self.assertToken(ast(0)(0), 'Word', content='No')
self.assertToken(ast(0)(1), 'Space', count=1)
self.assertToken(ast(0)(2), 'Punctuation', content='[')
self.assertToken(ast(0)(3), 'Word', content='link')
self.assertToken(ast(0)(4), 'Punctuation', content=']')
self.assertToken(ast(0)(5), 'Space', count=1)
self.assertToken(ast(0)(6), 'Word', content='and')
self.assertToken(ast(0)(7), 'Space', count=1)
self.assertToken(ast(0)(8), 'Word', content='no')
self.assertToken(ast(0)(9), 'Space', count=1)
self.assertToken(ast(0)(10), 'Punctuation', content='!')
self.assertToken(ast(0)(11), 'Punctuation', content='!')
self.assertToken(ast(0)(12), 'Space', count=1)
self.assertToken(ast(0)(13), 'Word', content='comment')
for c in ['!', '[', ']', '@', '^', '*', '+', '~', '-']:
text = r'foo \{} bar'.format(c)
ast = self.tokenize(text)
self.assertToken(ast(0), 'Paragraph', size=5)
self.assertToken(ast(0)(0), 'Word', content='foo')
self.assertToken(ast(0)(1), 'Space', count=1)
self.assertToken(ast(0)(2), 'Punctuation', content=c)
self.assertToken(ast(0)(3), 'Space', count=1)
self.assertToken(ast(0)(4), 'Word', content='bar')
if __name__ == '__main__':
unittest.main(verbosity=2)
|
nuclear-wizard/moose
|
python/MooseDocs/test/extensions/test_core.py
|
Python
|
lgpl-2.1
| 3,665 | 0.004093 |
"""Fixer that changes raw_input(...) into input(...)."""
# Author: Andre Roberge
# Local imports
from .. import fixer_base
from ..fixer_util import Name
class FixRawInput(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """
power< name='raw_input' trailer< '(' [any] ')' > any* >
"""
def transform(self, node, results):
name = results["name"]
name.replace(Name("input", prefix=name.prefix))
|
Orav/kbengine
|
kbe/src/lib/python/Lib/lib2to3/fixes/fix_raw_input.py
|
Python
|
lgpl-3.0
| 471 | 0.002123 |
#!/usr/bin/env python3
""" 2018 AOC Day 09 """
import argparse
import typing
import unittest
class Node(object):
''' Class representing node in cyclic linked list '''
def __init__(self, prev: 'Node', next: 'Node', value: int):
''' Create a node with explicit parameters '''
self._prev = prev
self._next = next
self._value = value
@staticmethod
def default() -> 'Node':
''' Create a node linked to itself with value 0 '''
node = Node(None, None, 0) # type: ignore
node._prev = node
node._next = node
return node
def forward(self, n: int = 1) -> 'Node':
''' Go forward n nodes '''
current = self
for _ in range(n):
current = current._next
return current
def back(self, n: int = 1) -> 'Node':
''' Go backward n nodes '''
current = self
for _ in range(n):
current = current._prev
return current
def insert(self, value: int) -> 'Node':
''' Insert new node after current node with given value, and return newly inserted Node '''
new_node = Node(self, self._next, value)
self._next._prev = new_node
self._next = new_node
return self._next
def remove(self) -> 'Node':
''' Remove current Node and return the following Node '''
self._prev._next = self._next
self._next._prev = self._prev
return self._next
def value(self) -> int:
''' Get value '''
return self._value
def chain_values(self):
values = [self.value()]
current = self.forward()
while current != self:
values.append(current.value())
current = current.forward()
return values
def part1(nplayers: int, highest_marble: int) -> int:
""" Solve part 1 """
current = Node.default()
player = 0
scores = {p: 0 for p in range(nplayers)}
for idx in range(1, highest_marble + 1):
if idx % 23 == 0:
scores[player] += idx
current = current.back(7)
scores[player] += current.value()
current = current.remove()
else:
current = current.forward().insert(idx)
player = (player + 1) % nplayers
return max(scores.values())
def part2(nplayers: int, highest_node: int) -> int:
""" Solve part 2 """
return part1(nplayers, highest_node)
def main():
""" Run 2018 Day 09 """
parser = argparse.ArgumentParser(description='Advent of Code 2018 Day 09')
parser.add_argument('nplayers', type=int, help='# of players')
parser.add_argument(
'highest_marble',
type=int,
help='highest-valued marble',
)
opts = parser.parse_args()
print('Part 1:', part1(opts.nplayers, opts.highest_marble))
print('Part 2:', part2(opts.nplayers, opts.highest_marble * 100))
if __name__ == '__main__':
main()
class ExampleTest(unittest.TestCase):
def test_part1(self):
examples = {
(9, 25): 32,
(10, 1618): 8317,
(13, 7999): 146373,
(17, 1104): 2764,
(21, 6111): 54718,
(30, 5807): 37305,
}
for example, expected in examples.items():
self.assertEqual(part1(*example), expected)
|
devonhollowood/adventofcode
|
2018/day09.py
|
Python
|
mit
| 3,328 | 0.0003 |
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The Qiita Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
from os.path import join, basename
from os import environ
from qp_shogun.utils import (
_format_params, make_read_pairs_per_sample,
_run_commands, _per_sample_ainfo)
DIR = environ["QC_SORTMERNA_DB_DP"]
RNA_REF_DB = (
'{0}silva-arc-23s-id98.fasta,'
'{0}silva-arc-23s-id98.idx:'
'{0}silva-bac-16s-id90.fasta,'
'{0}silva-bac-16s-id90.idx:'
'{0}silva-bac-23s-id98.fasta,'
'{0}silva-bac-23s-id98.idx:'
'{0}silva-arc-16s-id95.fasta,'
'{0}silva-arc-16s-id95.idx:'
'{0}silva-euk-18s-id95.fasta,'
'{0}silva-euk-18s-id95.idx:'
'{0}silva-euk-28s-id98.fasta,'
'{0}silva-euk-28s-id98.idx:'
'{0}rfam-5s-database-id98.fasta,'
'{0}rfam-5s-database-id98.idx:'
'{0}rfam-5.8s-database-id98.fasta,'
'{0}rfam-5.8s-database-id98.idx'
).format(DIR)
SORTMERNA_PARAMS = {
'blast': 'Output blast format',
'num_alignments': 'Number of alignments',
'a': 'Number of threads',
'm': 'Memory'}
def generate_sortmerna_commands(forward_seqs, reverse_seqs, map_file,
out_dir, parameters):
"""Generates the Sortmerna commands
Parameters
----------
forward_seqs : list of str
The list of forward seqs filepaths
reverse_seqs : list of str
The list of reverse seqs filepaths
map_file : str
The path to the mapping file
out_dir : str
The job output directory
parameters : dict
The command's parameters, keyed by parameter name
Returns
-------
cmds: list of str
The Sortmerna commands
samples: list of tup
list of 4-tuples with run prefix, sample name, fwd read fp, rev read fp
Notes
-----
Currently this is requiring matched pairs in the make_read_pairs_per_sample
step but implicitly allowing empty reverse reads in the actual command
generation. This behavior may allow support of situations with empty
reverse reads in some samples, for example after trimming and QC.
"""
# matching filenames, samples, and run prefixes
samples = make_read_pairs_per_sample(forward_seqs, reverse_seqs, map_file)
cmds = []
param_string = _format_params(parameters, SORTMERNA_PARAMS)
threads = parameters['Number of threads']
# Sortmerna 2.1 does not support direct processing of
# compressed files currently
# note SMR auto-detects file type and adds .fastq extension
# to the generated output files
template = ("unpigz -p {thrds} -c {ip} > {ip_unpigz} && "
"sortmerna --ref {ref_db} --reads {ip_unpigz} "
"--aligned {smr_r_op} --other {smr_nr_op} "
"--fastx {params} && "
"pigz -p {thrds} -c {smr_r_op}.fastq > {smr_r_op_gz} && "
"pigz -p {thrds} -c {smr_nr_op}.fastq > {smr_nr_op_gz};"
)
arguments = {'thrds': threads,
'ref_db': RNA_REF_DB, 'params': param_string}
for run_prefix, sample, f_fp, r_fp in samples:
prefix_path = join(out_dir, run_prefix)
for index, fp in enumerate([f_fp, r_fp]):
# if reverse filepath is not present ignore it
if fp is None:
continue
arguments['ip'] = fp
arguments['ip_unpigz'] = join(
out_dir, basename(fp.replace('.fastq.gz', '.fastq')))
arguments['smr_r_op'] = prefix_path + '.ribosomal.R%d'\
% (index + 1)
arguments['smr_nr_op'] = prefix_path + '.nonribosomal.R%d'\
% (index + 1)
arguments['smr_r_op_gz'] = arguments['smr_r_op'] + '.fastq.gz'
arguments['smr_nr_op_gz'] = arguments['smr_nr_op'] + '.fastq.gz'
cmds.append(template.format(**arguments))
return cmds, samples
# In this version I have not added a summary file or sam file
def sortmerna(qclient, job_id, parameters, out_dir):
"""Run Sortmerna with the given parameters
Parameters
----------
qclient : tgp.qiita_client.QiitaClient
The Qiita server client
job_id : str
The job id
parameters : dict
The parameter values
out_dir : str
The path to the job's output directory
Returns
-------
bool, list, str
The results of the job
"""
# Step 1 get the rest of the information need to run Sortmerna
qclient.update_job_step(job_id, "Step 1 of 4: Collecting information")
artifact_id = parameters['input']
del parameters['input']
# Get the artifact filepath information
artifact_info = qclient.get("/qiita_db/artifacts/%s/" % artifact_id)
fps = artifact_info['files']
# Get the artifact metadata
prep_info = qclient.get('/qiita_db/prep_template/%s/'
% artifact_info['prep_information'][0])
qiime_map = prep_info['qiime-map']
# Step 2 generating command for Sortmerna
qclient.update_job_step(job_id, "Step 2 of 4: Generating"
" SortMeRNA commands")
rs = fps['raw_reverse_seqs'] if 'raw_reverse_seqs' in fps else []
commands, samples = generate_sortmerna_commands(
fps['raw_forward_seqs'],
rs, qiime_map, out_dir,
parameters)
# Step 3 executing Sortmerna
len_cmd = len(commands)
msg = "Step 3 of 4: Executing ribosomal filtering (%d/{0})".format(len_cmd)
success, msg = _run_commands(qclient, job_id,
commands, msg, 'QC_Sortmerna')
if not success:
return False, None, msg
ainfo = []
# Generates 2 artifacts: one for the ribosomal
# reads and other for the non-ribosomal reads
# Step 4 generating artifacts for Nonribosomal reads
msg = ("Step 4 of 5: Generating artifacts "
"for Nonribosomal reads (%d/{0})").format(len_cmd)
suffixes = ['%s.nonribosomal.R1.fastq.gz', '%s.nonribosomal.R2.fastq.gz']
prg_name = 'Sortmerna'
file_type_name = 'Non-ribosomal reads'
ainfo.extend(_per_sample_ainfo(
out_dir, samples, suffixes, prg_name, file_type_name, bool(rs)))
# Step 5 generating artifacts for Ribosomal reads
msg = ("Step 5 of 5: Generating artifacts "
"for Ribosomal reads (%d/{0})").format(len_cmd)
suffixes = ['%s.ribosomal.R1.fastq.gz', '%s.ribosomal.R2.fastq.gz']
prg_name = 'Sortmerna'
file_type_name = 'Ribosomal reads'
ainfo.extend(_per_sample_ainfo(
out_dir, samples, suffixes, prg_name, file_type_name, bool(rs)))
return True, ainfo, ""
|
antgonza/qp-shotgun
|
qp_shogun/sortmerna/sortmerna.py
|
Python
|
bsd-3-clause
| 6,980 | 0 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2018-2022 F4PGA Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
import time
import os
class Timed:
def __init__(self, t, name, unprinted_runtime=False):
self.t = t
self.name = name
self.start = None
self.unprinted_runtime = unprinted_runtime
def __enter__(self):
self.start = time.time()
def __exit__(self, type, value, traceback):
end = time.time()
self.t.add_runtime(
self.name,
end - self.start,
unprinted_runtime=self.unprinted_runtime
)
def get_vivado_max_freq(report_file):
processing = False
group = ""
delay = ""
freq = 0
freqs = dict()
path_type = None
with open(report_file, 'r') as fp:
for l in fp:
if l.startswith("Slack"):
if '(MET)' in l:
violation = 0.0
else:
violation = float(
l.split(':')[1].split()[0].strip().strip('ns')
)
processing = True
if processing is True:
fields = l.split()
if len(fields) > 1 and fields[1].startswith('----'):
processing = False
# check if this is a timing we want
if group not in requirement.split():
continue
if group not in freqs:
freqs[group] = dict()
freqs[group]['actual'] = freq
freqs[group]['requested'] = requested_freq
freqs[group]['met'] = freq >= requested_freq
freqs[group]['{}_violation'.format(path_type.lower())
] = violation
path_type = None
if path_type is not None:
freqs[group]['{}_violation'.format(path_type.lower())
] = violation
data = l.split(':')
if len(data) > 1:
if data[0].strip() == 'Data Path Delay':
delay = data[1].split()[0].strip('ns')
freq = 1e9 / float(delay)
if data[0].strip() == 'Path Group':
group = data[1].strip()
if data[0].strip() == 'Requirement':
requirement = data[1].strip()
r = float(requirement.split()[0].strip('ns'))
if r != 0.0:
requested_freq = 1e9 / r
if data[0].strip() == 'Path Type':
ptype = data[1].strip()
if path_type != ptype.split()[0]:
path_type = ptype.split()[0]
for cd in freqs:
freqs[cd]['actual'] = float("{:.3f}".format(freqs[cd]['actual'] / 1e6))
freqs[cd]['requested'] = float(
"{:.3f}".format(freqs[cd]['requested'] / 1e6)
)
return freqs
def get_yosys_resources(yosys_log):
with open(yosys_log, "r") as f:
data = f.readlines()
resources = dict()
print_stats = False
proc_cells = False
for line in data:
print_stats = "Printing statistics" in line or print_stats
if not print_stats:
continue
if proc_cells and line.strip():
cell, count = line.split()
resources[cell] = count
proc_cells = ("Number of cells" in line or proc_cells) and line.strip()
return resources
def have_exec(mybin):
return which(mybin) != None
# https://stackoverflow.com/questions/377017/test-if-executable-exists-in-python
def which(program, get_dir=False):
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
exe_file = os.path.join(path, program)
if is_exe(exe_file):
if get_dir:
return path
else:
return exe_file
return None
def safe_get_dict_value(dict, key, default):
if key in dict:
return dict[key]
else:
return default
def get_file_dict(file_name, file_type):
return dict(name=os.path.realpath(file_name), file_type=file_type)
|
SymbiFlow/fpga-tool-perf
|
utils/utils.py
|
Python
|
isc
| 5,117 | 0.000391 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2013 by Erwin Marsi and TST-Centrale
#
# This file is part of the DAESO Framework.
#
# The DAESO Framework is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# The DAESO Framework is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
distutils setup script for distributing Timbl Tools
"""
# TODO:
# - docs, data and test are not installed when using bdist_wininst...
__authors__ = "Erwin Marsi <e.marsi@gmail.com>"
from distutils.core import setup
from glob import glob
from os import walk, path, remove
from os.path import basename, isdir, join, exists
from shutil import rmtree
if exists('MANIFEST'): remove('MANIFEST')
if exists("build"): rmtree("build")
name = "timbl-tools"
version = "0.5.0"
description = """Timbl Tools is a collection of Python modules and scripts for
working with TiMBL, the Tilburg Memory-based Learner."""
long_description = """
Timbl Tools is a collection of Python modules and scripts for working with
TiMBL, the Tilburg Memory-based Learner. It provides support for:
* creating Timbl servers and clients
* running (cross-validated) experiments
* lazy parsing of verbose Timbl ouput (e.g. NN distributions)
* down-sampling of instances
* writing ascii graphs of the feature weights
"""
packages = [ root[4:]
for (root, dirs, files) in walk("lib")
if not ".svn" in root ]
def get_data_files(data_dir_prefix, dir):
# data_files specifies a sequence of (directory, files) pairs
# Each (directory, files) pair in the sequence specifies the installation directory
# and the files to install there.
data_files = []
for base, subdirs, files in walk(dir):
install_dir = join(data_dir_prefix, base)
files = [ join(base, f) for f in files
if not f.endswith(".pyc") and not f.endswith("~") ]
data_files.append((install_dir, files))
if '.svn' in subdirs:
subdirs.remove('.svn') # ignore svn directories
return data_files
# data files are installed under sys.prefix/share/pycornetto-%(version)
data_dir = join("share", "%s-%s" % (name, version))
data_files = [(data_dir, ['CHANGES', 'COPYING', 'INSTALL', 'README'])]
data_files += get_data_files(data_dir, "doc")
data_files += get_data_files(data_dir, "data")
sdist_options = dict(
formats=["zip","gztar","bztar"])
setup(
name = name,
version = version,
description = description,
long_description = long_description,
license = "GNU Public License v3",
author = "Erwin Marsi",
author_email = "e.marsi@gmail.com",
url = "https://github.com/emsrc/timbl-tools",
requires = ["networkx"],
provides = ["tt (%s)" % version],
package_dir = {"": "lib"},
packages = packages,
scripts = glob(join("bin","*.py")),
data_files = data_files,
platforms = "POSIX, Mac OS X, MS Windows",
keywords = [
"TiMBL"],
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: GNU Public License (GPL)",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
"Natural Language :: English"
],
options = dict(sdist=sdist_options)
)
|
emsrc/timbl-tools
|
setup.py
|
Python
|
gpl-3.0
| 3,905 | 0.014597 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .operations import Operations
from .components_operations import ComponentsOperations
from .web_tests_operations import WebTestsOperations
from .export_configurations_operations import ExportConfigurationsOperations
from .proactive_detection_configurations_operations import ProactiveDetectionConfigurationsOperations
from .component_current_billing_features_operations import ComponentCurrentBillingFeaturesOperations
from .component_quota_status_operations import ComponentQuotaStatusOperations
from .api_keys_operations import APIKeysOperations
__all__ = [
'Operations',
'ComponentsOperations',
'WebTestsOperations',
'ExportConfigurationsOperations',
'ProactiveDetectionConfigurationsOperations',
'ComponentCurrentBillingFeaturesOperations',
'ComponentQuotaStatusOperations',
'APIKeysOperations',
]
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-applicationinsights/azure/mgmt/applicationinsights/operations/__init__.py
|
Python
|
mit
| 1,314 | 0.001522 |
from extract_feature_lib import *
from sys import argv
from dga_model_eval import *
from __init__ import *
def clear_cache(index, cache):
print "clear cache", index
for tmp in cache:
client[db_name][coll_name_list[index]+"_matrix"].insert(cache[tmp])
def extract_domain_feature(index):
#cursor = client[db_name].domain.find({"_id":{"$in": ["emltrk.com", "weminemnc.com"]}})
cursor = client[db_name][coll_name_list[index]].find(timeout=False)
outlier = {"", "ntp.org", "isipp.com", "gccdn.net", "cdngc.net", "gstatic.com", "cloudfront.net"}
cache = {}
num = 1
print index
for row in cursor:
if get_tail(row["_id"]) in outlier:
continue
# if index == 0 and np.random.randint(1, 100) != 7:
# continue
if index == 0:
if np.random.randint(1, 50) != 7:
flag = False
this_db_name = db_name
for m in range(0, 5):
pre_db_name = "p" + (datetime.strptime(this_db_name[1:], "%y%m%d") - timedelta(days=1)).strftime("%y%m%d")
if client[pre_db_name][coll_name_list[index]+"_matrix"].find_one({"_id": row["_id"]}):
flag = True
break
this_db_name = pre_db_name
if not flag:
continue
num += 1
print "one more", num, row["_id"]
ip_count = len(row["ITEMS"])
min_ttl = min(row["TTLS"])
max_ttl = max(row["TTLS"])
lifetime = int(row["LAST_SEEN"] - row["FIRST_SEEN"])/(60*60*24)
p16_entropy = ip_diversity(row["ITEMS"])
if index == 2 and (ip_count < 2 or min_ttl > 20000 or p16_entropy < 0.08):
continue
gro = growth(row["_id"], row["ITEMS"], row["SUBDOMAINS"], db_name)
relative = relative_domain(row["ITEMS"], db_name)
ipinfo = ip_info(row["ITEMS"], db_name)
if ipinfo[0] == -1:
print "no ip", row["_id"], index, db_name
continue
subdomain = subdomain_diversity(row["SUBDOMAINS"])
cache[row["_id"]] = {"ip_count": ip_count, "p16_entropy": p16_entropy,
"relative": relative,
"subdomain": subdomain,
"growth": gro,
"ipinfo": ipinfo,
"ttl": [min_ttl, max_ttl, max_ttl - min_ttl], "lifetime": lifetime, "_id": row["_id"]}
# client[db_name][coll_name_list[index]+"_matrix"].insert(tmp)
clear_cache(index, cache)
def extract_ip_feature(index):
model = init_dga()
cursor = client[db_name][coll_name_list[index]].find(timeout=False)
cache = {}
print index
for row in cursor:
if not ip_p.match(str(row["_id"])):
continue
number = len(row["ITEMS"])
min_ttl = min(row["TTLS"])
max_ttl = max(row["TTLS"])
lifetime = int(row["LAST_SEEN"] - row["FIRST_SEEN"])/(60*60*24)
dd = domain_diversity(row["SUBDOMAINS"], row["ITEMS"])
ips = ip_pool_stability(row["ITEMS"], db_name)
tmp_counter = collections.Counter(evaluate_url_list(model, row["ITEMS"]))
dga = round(tmp_counter['dga']/float(number), 3)
cache[row["_id"]] = {"number": number, "dd": dd, "ips": ips, "dga": dga,
"ttl": [min_ttl, max_ttl], "lifetime": lifetime, "_id": row["_id"]}
# client[db_name][coll_name_list[index]+"_matrix"].insert(tmp)
clear_cache(index, cache)
def main(index):
#index = int(raw_input())
if index < 4:
extract_domain_feature(index)
elif index < 8:
extract_ip_feature(index)
else:
for i in range(4, 8):
extract_ip_feature(i)
for i in range(0, 4):
extract_domain_feature(i)
if __name__ == '__main__':
script, db_name = argv
main(8)
|
whodewho/FluxEnder
|
src/extract_feature.py
|
Python
|
gpl-2.0
| 3,943 | 0.003297 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2011 OpenERP Italian Community (<http://www.openerp-italia.org>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from report import report_sxw
class Parser(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(Parser, self).__init__(cr, uid, name, context)
self.localcontext.update({
'time': time,
})
|
luca-vercelli/l10n-italy
|
l10n_it_invoice_report/invoice.py
|
Python
|
agpl-3.0
| 1,213 | 0.004122 |
# Copyright (c) 2016 Lee Cannon
# Licensed under the MIT License, see included LICENSE File
import click
import os
import re
from datetime import datetime
def _is_file_modnet(file_name: str) -> bool:
"""Returns True if the filename contains Modnet.
:param file_name: The filename to check.
:type file_name: str
:return: If the filename contains Modnet
:rtype: bool
"""
if file_name.find('Modnet') != -1:
return True
else:
return False
def _is_file_merge(file_name: str) -> bool:
"""Returns True if the filename contains Merge.
:param file_name: The filename to check.
:type file_name: str
:return: If the filename contains Merge
:rtype: bool
"""
if file_name.find('Merge') != -1:
return True
else:
return False
def correct_html(file: str, output_folder: str, date: str) -> str:
"""Correct the HTML output of Jupyter Notebook.
:param file: The Notebook HTMl output file
:type file: str
:param output_folder: The folder to output the corrected HTML to.
:type output_folder: str
:param date: The date the report is generated for.
:type date: str
:return: The output file.
:rtype: str
"""
includes = os.path.dirname(os.path.abspath(__file__)) + os.sep + 'includes' + os.sep
with open(file, 'r') as f:
contents = f.read()
is_modnet = _is_file_modnet(file)
is_merge = _is_file_merge(file)
# Replace require.js CDN with full code.
with open(includes + 'require.min.js', 'r') as f:
require = f.read()
contents = contents.replace(
'<script src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.1.10/require.min.js"></script>',
'<script>' + require + '</script>')
# Replace jQuery CDN with full code.
with open(includes + 'jquery.min.js', 'r') as f:
jquery = f.read()
contents = contents.replace(
'<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/2.0.3/jquery.min.js"></script>',
'<script>' + jquery + '</script>')
# Remove MathJax CDN as it is unnecessary
contents = contents.replace(
'<script src="https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS_HTML"></script>', '')
# Inject jQuery code to hide the code input boxes
contents = contents.replace(
'</body>', "<script>$('div.input').hide();</script></body>")
# Deal with how crap IE is.
contents = contents.replace(
'</title>', '</title>\n<meta http-equiv="x-ua-compatible" content="IE=9">')
if str(file).find('Daily') != -1:
if is_modnet:
output_file = output_folder + 'modnet-daily-report-' + date + '.html'
elif is_merge:
output_file = output_folder + 'merge-daily-report-' + date + '.html'
else:
output_file = output_folder + 'daily-report-' + date + '.html'
elif str(file).find('Weekly') != -1:
if is_modnet:
output_file = output_folder + 'modnet-weekly-report-' + date + '.html'
elif is_merge:
output_file = output_folder + 'merge-weekly-report-' + date + '.html'
else:
output_file = output_folder + 'weekly-report-' + date + '.html'
elif str(file).find('Monthly') != -1:
if is_modnet:
output_file = output_folder + 'modnet-monthly-report-' + date + '.html'
elif is_merge:
output_file = output_folder + 'merge-monthly-report-' + date + '.html'
else:
output_file = output_folder + 'monthly-report-' + date + '.html'
else:
raise TypeError('The only supported report types are "daily" and "weekly')
with open(output_file, 'w') as f:
f.write(contents)
return output_file
@click.group()
@click.version_option(version=0.1)
def main():
"""A command line wrapper around the trending module."""
pass
@main.command(short_help='correct notebook output')
@click.argument('file', type=click.Path(exists=True))
@click.argument('output_folder', type=click.Path(exists=True))
@click.argument('date')
def clean_html(file, output_folder, date):
"""FILE: The path to the output of the notebook
DATE: The date the report is for. For weekly the date of the monday is recommended."""
if not re.search('[0-9]{4}-[0-9]{2}-[0-9]{2}', date):
click.echo('The date must be in "yyyy-mm-dd" format.')
exit()
output_file = correct_html(file, output_folder, date)
if click.confirm('Do you want to view the corrected output?'):
click.launch(output_file)
@main.command(short_help='generate the daily report')
@click.argument('file', type=click.Path(exists=True))
def daily(file):
"""FILE: The path to that days data"""
raise NotImplementedError
@main.command(short_help='generate the weekly report')
@click.argument('monday_file', type=click.Path(exists=True))
def weekly(monday_file):
"""MONDAY_FILE: The path to the monday data"""
raise NotImplementedError
@main.command(short_help='generate the monthly report')
@click.argument('month_number')
@click.argument('year', default=str(datetime.now().year))
def monthly(month_number, year):
"""MONTH_NUMBER: The number of the month
YEAR: The year to grab the data from"""
try:
month_number = int(month_number)
except ValueError:
click.echo('The month entered is invalid')
exit()
if month_number < 1 or month_number > 12:
click.echo('The month entered is invalid')
exit()
try:
year = int(year)
except ValueError:
click.echo('The year entered is invalid')
exit()
if year < 2015:
click.echo('No data from before 2015 is present')
exit()
raise NotImplementedError
|
leecannon/trending
|
trending/command_line.py
|
Python
|
mit
| 5,764 | 0.002255 |
from testmodule import *
import sys
class TestWrites(TestRunner):
def __init__(self):
super().__init__()
def mthd(self):
import pysharkbite
securityOps = super().getSecurityOperations()
securityOps.create_user("testUser","password")
## validate that we DON'T see the permissions
assert( False == securityOps.has_system_permission("testUser",pysharkbite.SystemPermissions.CREATE_TABLE) )
securityOps.grant_system_permission("testUser",pysharkbite.SystemPermissions.CREATE_TABLE )
securityOps.grant_system_permission("testUser",pysharkbite.SystemPermissions.DROP_TABLE )
## validate that we DO see the permissions
assert( True == securityOps.has_system_permission("testUser",pysharkbite.SystemPermissions.CREATE_TABLE) )
auths = pysharkbite.Authorizations()
auths.addAuthorization("blah1")
auths.addAuthorization("blah2")
securityOps.grantAuthorizations(auths,"testUser")
tableOperations = super().getTableOperations()
tableOperations.create(False)
## validate that we DO see the permissions
securityOps.grant_table_permission("testUser",super().getTable(),pysharkbite.TablePermissions.READ )
securityOps.grant_table_permission("testUser",super().getTable(),pysharkbite.TablePermissions.WRITE )
securityOps.grant_table_permission("testUser",super().getTable(),pysharkbite.TablePermissions.DROP_TABLE )
super().setUser("testUser","password")
super().inity(replace=True)
tableOperations = super().getTableOperations()
securityOps = super().getConnector().securityOps()
""" Add authorizations """
""" mutation.put("cf","cq","cv",1569786960) """
with tableOperations.createWriter(auths, 10) as writer:
mutation = pysharkbite.Mutation("row2")
mutation.put("cf","cq","blah1",1569786960, "value")
mutation.put("cf2","cq2","blah1",1569786960, "value2")
""" no value """
mutation.put("cf3","cq3","blah2",1569786960, "")
writer.addMutation( mutation )
auths = pysharkbite.Authorizations()
auths.addAuthorization("blah1")
scanner = tableOperations.createScanner(auths, 2)
startKey = pysharkbite.Key()
endKey = pysharkbite.Key()
startKey.setRow("row")
endKey.setRow("row3")
range = pysharkbite.Range(startKey,True,endKey,False)
scanner.addRange( range )
resultset = scanner.getResultSet()
for keyvalue in resultset:
key = keyvalue.getKey()
assert( "row2" == key.getRow() )
value = keyvalue.getValue()
if "cf" == key.getColumnFamily():
assert( "value" == value.get() )
if ("cf2" == key.getColumnFamily() ):
assert( "value2" == value.get() )
if ("cf3" == key.getColumnFamily() ):
print("Unexpected column cf3")
sys.exit(1)
""" delete your table if user did not create temp """
tableOperations.remove()
super().setUser("root","secret")
super().inity(replace=True)
tableOperations = super().getTableOperations()
securityOps = super().getSecurityOperations()
securityOps.remove_user("testUser")
try:
super().setUser("testUser","password")
super().inity(replace=True)
print("Expected failure when setting user")
sys.exit(1)
except pysharkbite.ClientException:
print("caught expected exception")
pass
runner = TestWrites()
runner.mthd()
|
phrocker/sharkbite
|
test/python/TestSecurityOperations.py
|
Python
|
apache-2.0
| 3,290 | 0.061398 |
#!/usr/bin/env python
# encoding: utf-8
def run(whatweb, pluginname):
whatweb.recog_from_header(pluginname, "X-Cache")
|
cflq3/getcms
|
plugins/jiasule_cloudsec.py
|
Python
|
mit
| 126 | 0.007937 |
# -*- coding: UTF-8 -*-
#######################################################################
# ----------------------------------------------------------------------------
# "THE BEER-WARE LICENSE" (Revision 42):
# @tantrumdev wrote this file. As long as you retain this notice you
# can do whatever you want with this stuff. If we meet some day, and you think
# this stuff is worth it, you can buy me a beer in return. - Muad'Dib
# ----------------------------------------------------------------------------
#######################################################################
# Addon Name: Placenta
# Addon id: plugin.video.placenta
# Addon Provider: Mr.Blamo
import re
import urllib
import urlparse
import json
import base64
from resources.lib.modules import client, cleantitle, directstream, dom_parser2, source_utils, log_utils
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['savaze.com']
self.base_link = 'http://www.savaze.com'
self.movies_search_path = ('links/%s')
def movie(self, imdb, title, localtitle, aliases, year):
try:
urls = []
lst = ['1080p','720p','bluray-2','bluray']
for i in lst:
url = urlparse.urljoin(self.base_link, self.movies_search_path % (imdb) + '-%s' % i)
r = client.request(url)
if r: urls.append(url)
url = urlparse.urljoin(self.base_link, self.movies_search_path % (imdb))
url = client.request(url, output='geturl')
if '-1080p' not in url and '-720p' not in url and '-bluray' not in url:
r = client.request(url)
if r: urls.append(url)
if not urls: return
return urls
except Exception:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
return ''
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
urls = []
lst = ['1080p','720p','bluray-2','bluray']
clean_season = season if len(season) >= 2 else '0' + season
clean_episode = episode if len(episode) >= 2 else '0' + episode
for i in lst:
url = urlparse.urljoin(self.base_link, self.movies_search_path % (imdb) + '-s%se%s-%s' % (clean_season, clean_episode, i))
r = client.request(url)
if r: urls.append(url)
url = urlparse.urljoin(self.base_link, self.movies_search_path % (imdb))
url = client.request(url, output='geturl')
if '-1080p' not in url and '-720p' not in url and '-bluray' not in url:
r = client.request(url)
if r: urls.append(url)
if not urls: return
return urls
except Exception:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
for u in url:
hostDict += [('clicknupload.org')]
quality = '1080p' if '-1080p' in u or 'bluray-2' in u else '720p' if '-720p' in u or 'bluray' in u else 'SD'
r = client.request(u)
r = dom_parser2.parse_dom(r, 'ul', {'class': 'download-links'})
r = dom_parser2.parse_dom(r, 'a', req=['href'])
r = [i.attrs['href'] for i in r if i]
for i in r:
try:
valid, host = source_utils.is_host_valid(i, hostDict)
if not valid: continue
sources.append({
'source': host,
'quality': quality,
'language': 'en',
'url': i,
'direct': False,
'debridonly': False
})
except: pass
return sources
except Exception:
return
def resolve(self, url):
return url
|
felipenaselva/felipe.repository
|
script.module.placenta/lib/resources/lib/sources/en/to_be_fixed/sitedown/savaze.py
|
Python
|
gpl-2.0
| 4,194 | 0.010491 |
#
# Copyright (C) 2013-2014 Emerson Max de Medeiros Silva
#
# This file is part of ippl.
#
# ippl is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ippl is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ippl. If not, see <http://www.gnu.org/licenses/>.
#
import math
from ippl.shape import *
from ippl.render import *
if __name__ == "__main__":
s = Shape()
# IV - Lines
l = Line(Point(0, 0), Point(50, 25))
s.outer_loop.append(l)
l = Line(Point(50, 25), Point(0, 0))
l.move(0, 30)
s.outer_loop.append(l)
l = Line(Point(0, 25), Point(50, 0))
l.move(55, 0)
s.outer_loop.append(l)
l = Line(Point(50, 0), Point(0, 25))
l.move(55, 30)
s.outer_loop.append(l)
aabb = s.bounds()
size = aabb.size()
size = (int(size[0]) + 1, int(size[1]) + 1)
r = Render()
r.draw_bounds = True
r.image_size = size
r.initialize()
r.shape(s)
r.save("render_test.png")
|
emersonmx/ippl
|
ippl/test/render.py
|
Python
|
gpl-3.0
| 1,381 | 0.003621 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Linter to verify that all flags reported by GHC's --show-options mode
are documented in the user's guide.
"""
import sys
import subprocess
from typing import Set
from pathlib import Path
# A list of known-undocumented flags. This should be considered to be a to-do
# list of flags that need to be documented.
EXPECTED_UNDOCUMENTED_PATH = \
Path(__file__).parent / 'expected-undocumented-flags.txt'
EXPECTED_UNDOCUMENTED = \
{line for line in open(EXPECTED_UNDOCUMENTED_PATH).read().split()}
def expected_undocumented(flag: str) -> bool:
if flag in EXPECTED_UNDOCUMENTED:
return True
if flag.startswith('-Werror'):
return True
if flag.startswith('-Wno-') \
or flag.startswith('-dno') \
or flag.startswith('-fno') \
or flag.startswith('-XNo'):
return True
if flag.startswith('-Wwarn=') \
or flag.startswith('-Wno-warn='):
return True
return False
def read_documented_flags(doc_flags) -> Set[str]:
# Map characters that mark the end of a flag
# to whitespace.
trans = str.maketrans({
'=': ' ',
'[': ' ',
'⟨': ' ',
})
return {line.translate(trans).split()[0]
for line in doc_flags.read().split('\n')
if line != ''}
def read_ghc_flags(ghc_path: str) -> Set[str]:
ghc_output = subprocess.check_output([ghc_path, '--show-options'],
encoding='UTF-8')
return {flag
for flag in ghc_output.split('\n')
if not expected_undocumented(flag)
if flag != ''}
def main() -> None:
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--ghc', type=argparse.FileType('r'),
help='path of GHC executable')
parser.add_argument('--doc-flags', type=argparse.FileType('r'),
help='path of ghc-flags.txt output from Sphinx')
args = parser.parse_args()
doc_flags = read_documented_flags(args.doc_flags)
ghc_flags = read_ghc_flags(args.ghc.name)
failed = False
undocumented = ghc_flags - doc_flags
if len(undocumented) > 0:
print(f'Found {len(undocumented)} flags not documented in the users guide:')
print('\n'.join(f' {flag}' for flag in sorted(undocumented)))
print()
failed = True
now_documented = EXPECTED_UNDOCUMENTED.intersection(doc_flags)
if len(now_documented) > 0:
print(f'Found flags that are documented yet listed in {EXPECTED_UNDOCUMENTED_PATH}:')
print('\n'.join(f' {flag}' for flag in sorted(now_documented)))
print()
failed = True
if failed:
sys.exit(1)
if __name__ == '__main__':
main()
|
sdiehl/ghc
|
docs/users_guide/compare-flags.py
|
Python
|
bsd-3-clause
| 2,799 | 0.002145 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
=========================================================================
Program: Visualization Toolkit
Module: TestNamedColorsIntegration.py
Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
All rights reserved.
See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notice for more information.
=========================================================================
'''
import vtk
import vtk.test.Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
class reconstructSurface(vtk.test.Testing.vtkTest):
def testReconstructSurface(self):
# Read some points. Use a programmable filter to read them.
#
pointSource = vtk.vtkProgrammableSource()
def readPoints():
fp = open(VTK_DATA_ROOT + "/Data/cactus.3337.pts", "r")
points = vtk.vtkPoints()
while True:
line = fp.readline().split()
if len(line) == 0:
break
if line[0] == "p":
points.InsertNextPoint(float(line[1]), float(line[2]), float(line[3]))
pointSource.GetPolyDataOutput().SetPoints(points)
pointSource.SetExecuteMethod(readPoints)
# Construct the surface and create isosurface
#
surf = vtk.vtkSurfaceReconstructionFilter()
surf.SetInputConnection(pointSource.GetOutputPort())
cf = vtk.vtkContourFilter()
cf.SetInputConnection(surf.GetOutputPort())
cf.SetValue(0, 0.0)
reverse = vtk.vtkReverseSense()
reverse.SetInputConnection(cf.GetOutputPort())
reverse.ReverseCellsOn()
reverse.ReverseNormalsOn()
map = vtk.vtkPolyDataMapper()
map.SetInputConnection(reverse.GetOutputPort())
map.ScalarVisibilityOff()
surfaceActor = vtk.vtkActor()
surfaceActor.SetMapper(map)
surfaceActor.GetProperty().SetDiffuseColor(1.0000, 0.3882, 0.2784)
surfaceActor.GetProperty().SetSpecularColor(1, 1, 1)
surfaceActor.GetProperty().SetSpecular(.4)
surfaceActor.GetProperty().SetSpecularPower(50)
# Create the RenderWindow, Renderer and both Actors
#
ren = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren)
# Add the actors to the renderer, set the background and size
#
ren.AddActor(surfaceActor)
ren.SetBackground(1, 1, 1)
renWin.SetSize(300, 300)
ren.GetActiveCamera().SetFocalPoint(0, 0, 0)
ren.GetActiveCamera().SetPosition(1, 0, 0)
ren.GetActiveCamera().SetViewUp(0, 0, 1)
ren.ResetCamera()
ren.GetActiveCamera().Azimuth(20)
ren.GetActiveCamera().Elevation(30)
ren.GetActiveCamera().Dolly(1.2)
ren.ResetCameraClippingRange()
# render and interact with data
iRen = vtk.vtkRenderWindowInteractor()
iRen.SetRenderWindow(renWin);
renWin.Render()
img_file = "reconstructSurface.png"
vtk.test.Testing.compareImage(iRen.GetRenderWindow(), vtk.test.Testing.getAbsImagePath(img_file), threshold=25)
vtk.test.Testing.interact()
if __name__ == "__main__":
vtk.test.Testing.main([(reconstructSurface, 'test')])
|
hlzz/dotfiles
|
graphics/VTK-7.0.0/Imaging/Core/Testing/Python/reconstructSurface.py
|
Python
|
bsd-3-clause
| 3,635 | 0.001926 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2016-09-01 11:41
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('profiles', '0006_add_show_security_question_field'),
]
operations = [
migrations.AddField(
model_name='userprofilessettings',
name='num_security_questions',
field=models.PositiveSmallIntegerField(default=3, verbose_name='Number of security questions asked for password recovery'),
),
migrations.AddField(
model_name='userprofilessettings',
name='password_recovery_retries',
field=models.PositiveSmallIntegerField(default=5, verbose_name='Max number of password recovery retries before lockout'),
),
]
|
praekelt/molo.profiles
|
molo/profiles/migrations/0007_add_password_recovery_retries.py
|
Python
|
bsd-2-clause
| 845 | 0.002367 |
from flask import render_template, redirect, url_for, request
from flask.views import MethodView
from nastradini import mongo, utils
from positionform import PositionForm
class Position(MethodView):
methods = ['GET', 'POST']
def get(self):
form = PositionForm()
return render_template('position.html', form=form)
def post(self):
# First, let's get the doc id.
doc_id = utils.get_doc_id()
# Create position info object.
form = PositionForm(request.form)
json = form.data
# Store the document.
mongo.db.positions.update({'_id': doc_id}, json, True)
return redirect(url_for('browse_internal_positions'))
|
assemblio/project-nastradin
|
nastradini/views/forms/position.py
|
Python
|
gpl-2.0
| 700 | 0 |
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Given the output of -t commands from a ninja build for a gyp and GN generated
build, report on differences between the command lines."""
import os
import shlex
import subprocess
import sys
# Must be in src/.
os.chdir(os.path.join(os.path.dirname(__file__), '..', '..', '..'))
g_total_differences = 0
def FindAndRemoveArgWithValue(command_line, argname):
"""Given a command line as a list, remove and return the value of an option
that takes a value as a separate entry.
Modifies |command_line| in place.
"""
if argname not in command_line:
return ''
location = command_line.index(argname)
value = command_line[location + 1]
command_line[location:location + 2] = []
return value
def MergeSpacedArgs(command_line, argname):
"""Combine all arguments |argname| with their values, separated by a space."""
i = 0
result = []
while i < len(command_line):
arg = command_line[i]
if arg == argname:
result.append(arg + ' ' + command_line[i + 1])
i += 1
else:
result.append(arg)
i += 1
return result
def NormalizeSymbolArguments(command_line):
"""Normalize -g arguments.
If there's no -g args, it's equivalent to -g0. -g2 is equivalent to -g.
Modifies |command_line| in place.
"""
# Strip -g0 if there's no symbols.
have_some_symbols = False
for x in command_line:
if x.startswith('-g') and x != '-g0':
have_some_symbols = True
if not have_some_symbols and '-g0' in command_line:
command_line.remove('-g0')
# Rename -g2 to -g.
if '-g2' in command_line:
command_line[index('-g2')] = '-g'
def GetFlags(lines):
"""Turn a list of command lines into a semi-structured dict."""
flags_by_output = {}
for line in lines:
# TODO(scottmg): Hacky way of getting only cc for now.
if 'clang' not in line:
continue
command_line = shlex.split(line.strip())[1:]
output_name = FindAndRemoveArgWithValue(command_line, '-o')
dep_name = FindAndRemoveArgWithValue(command_line, '-MF')
NormalizeSymbolArguments(command_line)
command_line = MergeSpacedArgs(command_line, '-Xclang')
defines = [x for x in command_line if x.startswith('-D')]
include_dirs = [x for x in command_line if x.startswith('-I')]
dash_f = [x for x in command_line if x.startswith('-f')]
warnings = [x for x in command_line if x.startswith('-W')]
cc_file = [x for x in command_line if x.endswith('.cc') or
x.endswith('.c') or
x.endswith('.cpp')]
if len(cc_file) != 1:
print 'Skipping %s' % command_line
continue
assert len(cc_file) == 1
others = [x for x in command_line if x not in defines and \
x not in include_dirs and \
x not in dash_f and \
x not in warnings and \
x not in cc_file]
# Filter for libFindBadConstructs.so having a relative path in one and
# absolute path in the other.
others_filtered = []
for x in others:
if x.startswith('-Xclang ') and x.endswith('libFindBadConstructs.so'):
others_filtered.append(
'-Xclang ' +
os.path.join(os.getcwd(),
os.path.normpath(
os.path.join('out/gn_flags', x.split(' ', 1)[1]))))
elif x.startswith('-B'):
others_filtered.append(
'-B' +
os.path.join(os.getcwd(),
os.path.normpath(os.path.join('out/gn_flags', x[2:]))))
else:
others_filtered.append(x)
others = others_filtered
flags_by_output[cc_file[0]] = {
'output': output_name,
'depname': dep_name,
'defines': sorted(defines),
'include_dirs': sorted(include_dirs), # TODO(scottmg): This is wrong.
'dash_f': sorted(dash_f),
'warnings': sorted(warnings),
'other': sorted(others),
}
return flags_by_output
def CompareLists(gyp, gn, name, dont_care_gyp=None, dont_care_gn=None):
"""Return a report of any differences between gyp and gn lists, ignoring
anything in |dont_care_{gyp|gn}| respectively."""
global g_total_differences
if not dont_care_gyp:
dont_care_gyp = []
if not dont_care_gn:
dont_care_gn = []
output = ''
if gyp[name] != gn[name]:
gyp_set = set(gyp[name])
gn_set = set(gn[name])
missing_in_gyp = gyp_set - gn_set
missing_in_gn = gn_set - gyp_set
missing_in_gyp -= set(dont_care_gyp)
missing_in_gn -= set(dont_care_gn)
if missing_in_gyp or missing_in_gn:
output += ' %s differ:\n' % name
if missing_in_gyp:
output += ' In gyp, but not in GN:\n %s' % '\n '.join(
sorted(missing_in_gyp)) + '\n'
g_total_differences += len(missing_in_gyp)
if missing_in_gn:
output += ' In GN, but not in gyp:\n %s' % '\n '.join(
sorted(missing_in_gn)) + '\n\n'
g_total_differences += len(missing_in_gn)
return output
def Run(command_line):
"""Run |command_line| as a subprocess and return stdout. Raises on error."""
return subprocess.check_output(command_line, shell=True)
def main():
if len(sys.argv) != 2 and len(sys.argv) != 3:
print 'usage: %s gyp_target gn_target' % __file__
print ' or: %s target' % __file__
return 1
if len(sys.argv) == 2:
sys.argv.append(sys.argv[1])
print >>sys.stderr, 'Regenerating...'
# Currently only Release, non-component.
Run('gn gen out/gn_flags --args="is_debug=false is_component_build=false"')
os.environ.pop('GYP_DEFINES', None)
Run('python build/gyp_chromium -Goutput_dir=out_gyp_flags -Gconfig=Release')
gn = Run('ninja -C out/gn_flags -t commands %s' % sys.argv[2])
gyp = Run('ninja -C out_gyp_flags/Release -t commands %s' % sys.argv[1])
all_gyp_flags = GetFlags(gyp.splitlines())
all_gn_flags = GetFlags(gn.splitlines())
gyp_files = set(all_gyp_flags.keys())
gn_files = set(all_gn_flags.keys())
different_source_list = gyp_files != gn_files
if different_source_list:
print 'Different set of sources files:'
print ' In gyp, not in GN:\n %s' % '\n '.join(
sorted(gyp_files - gn_files))
print ' In GN, not in gyp:\n %s' % '\n '.join(
sorted(gn_files - gyp_files))
print '\nNote that flags will only be compared for files in both sets.\n'
file_list = gyp_files & gn_files
files_with_given_differences = {}
for filename in sorted(file_list):
gyp_flags = all_gyp_flags[filename]
gn_flags = all_gn_flags[filename]
differences = CompareLists(gyp_flags, gn_flags, 'dash_f')
differences += CompareLists(gyp_flags, gn_flags, 'defines')
differences += CompareLists(gyp_flags, gn_flags, 'include_dirs')
differences += CompareLists(gyp_flags, gn_flags, 'warnings', dont_care_gn=[
# More conservative warnings in GN we consider to be OK.
'-Wendif-labels',
'-Wextra',
'-Wsign-compare',
])
differences += CompareLists(gyp_flags, gn_flags, 'other')
if differences:
files_with_given_differences.setdefault(differences, []).append(filename)
for diff, files in files_with_given_differences.iteritems():
print '\n'.join(sorted(files))
print diff
print 'Total differences:', g_total_differences
# TODO(scottmg): Return failure on difference once we're closer to identical.
return 0
if __name__ == '__main__':
sys.exit(main())
|
M4sse/chromium.src
|
tools/gn/bin/gyp_flag_compare.py
|
Python
|
bsd-3-clause
| 7,719 | 0.010882 |
from sender import *
if __name__ == '__main__':
connection = Connection().initialize()
connection.send('Default exchange message!')
connection.destroy()
|
harunyasar/rabbitmq_playground
|
default_exchange_sender.py
|
Python
|
gpl-3.0
| 166 | 0 |
import base64
import collections
import errno
import gevent
import os
import socket
import sys
import traceback
from azure import WindowsAzureMissingResourceError
from azure.storage import BlobService
from . import calling_format
from hashlib import md5
from urlparse import urlparse
from wal_e import log_help
from wal_e.pipeline import get_download_pipeline
from wal_e.piper import PIPE
from wal_e.retries import retry, retry_with_count
assert calling_format
logger = log_help.WalELogger(__name__)
_Key = collections.namedtuple('_Key', ['size'])
WABS_CHUNK_SIZE = 4 * 1024 * 1024
def uri_put_file(creds, uri, fp, content_encoding=None):
assert fp.tell() == 0
assert uri.startswith('wabs://')
def log_upload_failures_on_error(exc_tup, exc_processor_cxt):
def standard_detail_message(prefix=''):
return (prefix + ' There have been {n} attempts to upload '
'file {url} so far.'.format(n=exc_processor_cxt, url=uri))
typ, value, tb = exc_tup
del exc_tup
# Screen for certain kinds of known-errors to retry from
if issubclass(typ, socket.error):
socketmsg = value[1] if isinstance(value, tuple) else value
logger.info(
msg='Retrying upload because of a socket error',
detail=standard_detail_message(
"The socket error's message is '{0}'."
.format(socketmsg)))
else:
# For all otherwise untreated exceptions, report them as a
# warning and retry anyway -- all exceptions that can be
# justified should be treated and have error messages
# listed.
logger.warning(
msg='retrying file upload from unexpected exception',
detail=standard_detail_message(
'The exception type is {etype} and its value is '
'{evalue} and its traceback is {etraceback}'
.format(etype=typ, evalue=value,
etraceback=''.join(traceback.format_tb(tb)))))
# Help Python GC by resolving possible cycles
del tb
# Because we're uploading in chunks, catch rate limiting and
# connection errors which occur for each individual chunk instead of
# failing the whole file and restarting.
@retry(retry_with_count(log_upload_failures_on_error))
def upload_chunk(chunk, block_id):
check_sum = base64.encodestring(md5(chunk).digest()).strip('\n')
conn.put_block(url_tup.netloc, url_tup.path, chunk,
block_id, content_md5=check_sum)
url_tup = urlparse(uri)
kwargs = dict(x_ms_blob_type='BlockBlob')
if content_encoding is not None:
kwargs['x_ms_blob_content_encoding'] = content_encoding
conn = BlobService(creds.account_name, creds.account_key, protocol='https')
conn.put_blob(url_tup.netloc, url_tup.path, '', **kwargs)
# WABS requires large files to be uploaded in 4MB chunks
block_ids = []
length, index = 0, 0
pool_size = os.getenv('WABS_UPLOAD_POOL_SIZE', 5)
p = gevent.pool.Pool(size=pool_size)
while True:
data = fp.read(WABS_CHUNK_SIZE)
if data:
length += len(data)
block_id = base64.b64encode(str(index))
p.wait_available()
p.spawn(upload_chunk, data, block_id)
block_ids.append(block_id)
index += 1
else:
p.join()
break
conn.put_block_list(url_tup.netloc, url_tup.path, block_ids)
# To maintain consistency with the S3 version of this function we must
# return an object with a certain set of attributes. Currently, that set
# of attributes consists of only 'size'
return _Key(size=len(data))
def uri_get_file(creds, uri, conn=None):
assert uri.startswith('wabs://')
url_tup = urlparse(uri)
if conn is None:
conn = BlobService(creds.account_name, creds.account_key,
protocol='https')
# Determin the size of the target blob
props = conn.get_blob_properties(url_tup.netloc, url_tup.path)
blob_size = int(props['content-length'])
ret_size = 0
data = ''
# WABS requires large files to be downloaded in 4MB chunks
while ret_size < blob_size:
ms_range = 'bytes={}-{}'.format(ret_size,
ret_size + WABS_CHUNK_SIZE - 1)
while True:
# Because we're downloading in chunks, catch rate limiting and
# connection errors here instead of letting them bubble up to the
# @retry decorator so that we don't have to start downloading the
# whole file over again.
try:
part = conn.get_blob(url_tup.netloc,
url_tup.path,
x_ms_range=ms_range)
except EnvironmentError as e:
if e.errno in (errno.EBUSY, errno.ECONNRESET):
logger.warning(
msg="retrying after encountering exception",
detail=("Exception traceback:\n{0}".format(
traceback.format_exception(*sys.exc_info()))),
hint="")
gevent.sleep(30)
else:
raise
else:
break
length = len(part)
ret_size += length
data += part
if length > 0 and length < WABS_CHUNK_SIZE:
break
elif length == 0:
break
return data
def do_lzop_get(creds, url, path, decrypt):
"""
Get and decompress a S3 URL
This streams the content directly to lzop; the compressed version
is never stored on disk.
"""
assert url.endswith('.lzo'), 'Expect an lzop-compressed file'
assert url.startswith('wabs://')
conn = BlobService(creds.account_name, creds.account_key, protocol='https')
def log_wal_fetch_failures_on_error(exc_tup, exc_processor_cxt):
def standard_detail_message(prefix=''):
return (prefix + ' There have been {n} attempts to fetch wal '
'file {url} so far.'.format(n=exc_processor_cxt, url=url))
typ, value, tb = exc_tup
del exc_tup
# Screen for certain kinds of known-errors to retry from
if issubclass(typ, socket.error):
socketmsg = value[1] if isinstance(value, tuple) else value
logger.info(
msg='Retrying fetch because of a socket error',
detail=standard_detail_message(
"The socket error's message is '{0}'."
.format(socketmsg)))
else:
# For all otherwise untreated exceptions, report them as a
# warning and retry anyway -- all exceptions that can be
# justified should be treated and have error messages
# listed.
logger.warning(
msg='retrying WAL file fetch from unexpected exception',
detail=standard_detail_message(
'The exception type is {etype} and its value is '
'{evalue} and its traceback is {etraceback}'
.format(etype=typ, evalue=value,
etraceback=''.join(traceback.format_tb(tb)))))
# Help Python GC by resolving possible cycles
del tb
@retry(retry_with_count(log_wal_fetch_failures_on_error))
def download():
with open(path, 'wb') as decomp_out:
with get_download_pipeline(PIPE, decomp_out, decrypt) as pl:
g = gevent.spawn(write_and_return_error, url, conn, pl.stdin)
try:
# Raise any exceptions guarded by
# write_and_return_error.
exc = g.get()
if exc is not None:
raise exc
except WindowsAzureMissingResourceError:
# Short circuit any re-try attempts under certain race
# conditions.
pl.abort()
logger.warning(
msg=('could no longer locate object while '
'performing wal restore'),
detail=('The absolute URI that could not be '
'located is {url}.'.format(url=url)),
hint=('This can be normal when Postgres is trying '
'to detect what timelines are available '
'during restoration.'))
return False
logger.info(
msg='completed download and decompression',
detail='Downloaded and decompressed "{url}" to "{path}"'
.format(url=url, path=path))
return True
return download()
def write_and_return_error(url, conn, stream):
try:
data = uri_get_file(None, url, conn=conn)
stream.write(data)
stream.flush()
except Exception, e:
return e
finally:
stream.close()
|
modulexcite/wal-e
|
wal_e/blobstore/wabs/wabs_util.py
|
Python
|
bsd-3-clause
| 9,211 | 0 |
from django.urls import path
from . import dashboard_views
app_name = 'exam'
urlpatterns = [
path('assignment/new/', dashboard_views.MakeAssignmentView.as_view(),
name='assignment_new'),
path('assignment/success/',
dashboard_views.MakeAssignmentSuccess.as_view(),
name='assignment_success'),
path('assignment/<int:assignment_id>/name_list/',
dashboard_views.AssignmentNameListView.as_view(),
name='assignment_name_list'),
]
|
d120/pyophase
|
exam/dashboard_urls.py
|
Python
|
agpl-3.0
| 483 | 0 |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: pogoprotos/networking/requests/messages/release_pokemon_message.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='pogoprotos/networking/requests/messages/release_pokemon_message.proto',
package='pogoprotos.networking.requests.messages',
syntax='proto3',
serialized_pb=_b('\nEpogoprotos/networking/requests/messages/release_pokemon_message.proto\x12\'pogoprotos.networking.requests.messages\"@\n\x15ReleasePokemonMessage\x12\x12\n\npokemon_id\x18\x01 \x01(\x06\x12\x13\n\x0bpokemon_ids\x18\x02 \x03(\x06\x62\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_RELEASEPOKEMONMESSAGE = _descriptor.Descriptor(
name='ReleasePokemonMessage',
full_name='pogoprotos.networking.requests.messages.ReleasePokemonMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pokemon_id', full_name='pogoprotos.networking.requests.messages.ReleasePokemonMessage.pokemon_id', index=0,
number=1, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pokemon_ids', full_name='pogoprotos.networking.requests.messages.ReleasePokemonMessage.pokemon_ids', index=1,
number=2, type=6, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=114,
serialized_end=178,
)
DESCRIPTOR.message_types_by_name['ReleasePokemonMessage'] = _RELEASEPOKEMONMESSAGE
ReleasePokemonMessage = _reflection.GeneratedProtocolMessageType('ReleasePokemonMessage', (_message.Message,), dict(
DESCRIPTOR = _RELEASEPOKEMONMESSAGE,
__module__ = 'pogoprotos.networking.requests.messages.release_pokemon_message_pb2'
# @@protoc_insertion_point(class_scope:pogoprotos.networking.requests.messages.ReleasePokemonMessage)
))
_sym_db.RegisterMessage(ReleasePokemonMessage)
# @@protoc_insertion_point(module_scope)
|
bellowsj/aiopogo
|
aiopogo/pogoprotos/networking/requests/messages/release_pokemon_message_pb2.py
|
Python
|
mit
| 2,798 | 0.007505 |
import os
import sys
import textwrap
from collections import OrderedDict
from argparse import ArgumentParser, RawDescriptionHelpFormatter
from faice.tools.run.__main__ import main as run_main
from faice.tools.run.__main__ import DESCRIPTION as RUN_DESCRIPTION
from faice.tools.vagrant.__main__ import main as vagrant_main
from faice.tools.vagrant.__main__ import DESCRIPTION as VAGRANT_DESCRIPTION
VERSION = '1.2'
TOOLS = OrderedDict([
('run', run_main),
('vagrant', vagrant_main)
])
def main():
description = [
'FAICE Copyright (C) 2017 Christoph Jansen',
'',
'This program comes with ABSOLUTELY NO WARRANTY. This is free software, and you are welcome to redistribute it'
'under certain conditions. See the LICENSE file distributed with this software for details.',
]
parser = ArgumentParser(
description=os.linesep.join([textwrap.fill(block) for block in description]),
formatter_class=RawDescriptionHelpFormatter
)
parser.add_argument(
'-v', '--version', action='version', version=VERSION
)
subparsers = parser.add_subparsers(title="tools")
sub_parser = subparsers.add_parser('run', help=RUN_DESCRIPTION, add_help=False)
_ = subparsers.add_parser('vagrant', help=VAGRANT_DESCRIPTION, add_help=False)
if len(sys.argv) < 2:
parser.print_help()
exit()
_ = parser.parse_known_args()
sub_args = sub_parser.parse_known_args()
tool = TOOLS[sub_args[1][0]]
sys.argv[0] = 'faice {}'.format(sys.argv[1])
del sys.argv[1]
exit(tool())
if __name__ == '__main__':
main()
|
curious-containers/faice
|
faice/__main__.py
|
Python
|
gpl-3.0
| 1,622 | 0.003083 |
from django.contrib.gis.db import models
# Create your models here.
class GeoWaterUse(models.Model):
id = models.AutoField(primary_key=True)
geometry = models.PointField()
api = models.CharField(max_length=20, null=False)
well_name = models.CharField(max_length=100, null=True)
frac_date = models.DateField(auto_now=False, auto_now_add=False)
state = models.CharField(max_length=20, null=True)
county = models.CharField(max_length=20, null=True)
latitude = models.DecimalField(max_digits=20, decimal_places=6)
longitude = models.DecimalField(max_digits=20, decimal_places=6)
horizontal_length = models.DecimalField(max_digits=20, decimal_places=3)
water_use = models.DecimalField(max_digits=20, decimal_places=3)
objects = models.GeoManager()
class Meta:
ordering = ["api"]
class GeoProducedWater(models.Model):
id = models.AutoField(primary_key=True)
geometry = models.PointField()
api = models.CharField(max_length=20, null=False)
well_name = models.CharField(max_length=100, null=True)
latitude = models.DecimalField(max_digits=20, decimal_places=6)
longitude = models.DecimalField(max_digits=20, decimal_places=6)
volume_date = models.DateField(auto_now=False, auto_now_add=False)
h2o_volume = models.DecimalField(max_digits=10, decimal_places=2)
days_on = models.PositiveIntegerField()
is_prediction = models.BooleanField()
objects = models.GeoManager()
class Meta:
ordering = ["api"]
|
tcqiuyu/aquam
|
aquam/apps/geoanalytics/models.py
|
Python
|
mit
| 1,526 | 0.003277 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# random code that helps with debugging/testing the python interfaces and examples
# this is not meant to be run by normal users
from __future__ import with_statement # for python 2.5
__copyright__ = 'Copyright (C) 2009-2010'
__license__ = 'Apache License, Version 2.0'
from openravepy import *
from numpy import *
from itertools import izip
def test_drawjoints(robot):
"""draws the joint axes of the robot
"""
env = robot.GetEnv()
h = None
try:
while True:
h = [env.drawlinelist(array([j.GetAnchor()-j.GetAxis(0),j.GetAnchor()+j.GetAxis(0)]),5,array([0,0,float(j.GetDOFIndex())/robot.GetDOF()])) for j in robot.GetJoints() if not j.IsStatic()]
h += [env.drawlinelist(array([j.GetAnchor()-0.25*j.GetAxis(0),j.GetAnchor()+0.25*j.GetAxis(0)]),20,array([0,float(j.GetDOFIndex())/robot.GetDOF(),0])) for j in robot.GetPassiveJoints() if not j.IsStatic()]
time.sleep(0.1)
finally:
h = None
def test_drawmanip(manip):
robot=manip.GetRobot()
env=robot.GetEnv()
while True:
h = None
joints = [robot.GetJoints()[i] for i in manip.GetArmJoints()]
h = [env.drawlinelist(array([j.GetAnchor()-j.GetAxis(0),j.GetAnchor()+j.GetAxis(0)]),5,array([0,0,i/8.0])) for i,j in enumerate(joints)]
time.sleep(0.1)
def derive_normalizeAxisRotation():
"""Find the rotation theta around axis v such that rot(v,theta) * q is closest to the identity"""
from sympy import *
vx,vy,vz = Symbol('vx'),Symbol('vy'),Symbol('vz')
v = Matrix(3,1,[vx,vy,vz])
theta = Symbol('theta')
q0 = Matrix(4,1,[cos(theta/2),sin(theta/2)*v[0],sin(theta/2)*v[1],sin(theta/2)*v[2]])
q0dtheta = Matrix(4,1,[-sin(theta/2)/2,cos(theta/2)*v[0]/2,cos(theta/2)*v[1]/2,cos(theta/2)*v[2]/2])
qx,qy,qz,qw = Symbol('qx'),Symbol('qy'),Symbol('qz'),Symbol('qw')
q1 = Matrix(4,1,[qx,qy,qz,qw])
qidentity = Matrix(4,1,[S.One,S.Zero,S.Zero,S.Zero])
qfinal = Matrix(4,1,[q0[0]*q1[0] - q0[1]*q1[1] - q0[2]*q1[2] - q0[3]*q1[3],
q0[0]*q1[1] + q0[1]*q1[0] + q0[2]*q1[3] - q0[3]*q1[2],
q0[0]*q1[2] + q0[2]*q1[0] + q0[3]*q1[1] - q0[1]*q1[3],
q0[0]*q1[3] + q0[3]*q1[0] + q0[1]*q1[2] - q0[2]*q1[1]])
qfinaldtheta = Matrix(4,1,[q0dtheta[0]*q1[0] - q0dtheta[1]*q1[1] - q0dtheta[2]*q1[2] - q0dtheta[3]*q1[3],
q0dtheta[0]*q1[1] + q0dtheta[1]*q1[0] + q0dtheta[2]*q1[3] - q0dtheta[3]*q1[2],
q0dtheta[0]*q1[2] + q0dtheta[2]*q1[0] + q0dtheta[3]*q1[1] - q0dtheta[1]*q1[3],
q0dtheta[0]*q1[3] + q0dtheta[3]*q1[0] + q0dtheta[1]*q1[2] - q0dtheta[2]*q1[1]])
solveeq = qfinaldtheta.dot(qidentity-qfinal).expand()
sthetad2 = Symbol('sthetad2') # sin(theta/2)
cthetad2 = Symbol('cthetad2') # cos(theta/2)
finaleq = Poly(solveeq.subs([(sin(theta/2),sthetad2),(cos(theta/2),cthetad2)]),sthetad2,cthetad2)
# should be:
# Poly((qw**2/2 + qx**2/2 + qy**2/2 + qz**2/2 - qw**2*vx**2/2 - qw**2*vy**2/2 - qw**2*vz**2/2 - qx**2*vx**2/2 - qx**2*vy**2/2 - qx**2*vz**2/2 - qy**2*vx**2/2 - qy**2*vy**2/2 - qy**2*vz**2/2 - qz**2*vx**2/2 - qz**2*vy**2/2 - qz**2*vz**2/2)*sthetad2*cthetad2 - qx/2*sthetad2 + (-qw*vz/2 - qy*vx/2 - qz*vy/2)*cthetad2, sthetad2, cthetad2)
# sthetad2*cthetad2 coefficient reduces to 0
# Poly(- qx/2*sthetad2 + (-qw*vz/2 - qy*vx/2 - qz*vy/2)*cthetad2, sthetad2, cthetad2)
# theta = 2*atan2(-qw*vz-qz*vy-qy*vx,qx)
|
vitan/openrave
|
sandbox/debugkinbody.py
|
Python
|
lgpl-3.0
| 4,041 | 0.022272 |
# -*- coding: utf-8 -*-
#
# codimension - graphics python two-way code editor and analyzer
# Copyright (C) 2010-2017 Sergey Satskiy <sergey.satskiy@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""A few constants which do not depend on other project files"""
# Default encoding for the cases when:
# - the encoding could not be detected
# - replaces ascii to be on the safe side
DEFAULT_ENCODING = 'utf-8'
# File encoding used for various settings and project files
SETTINGS_ENCODING = 'utf-8'
# Directory to store Codimension settings and projects
CONFIG_DIR = '.codimension3'
|
SergeySatskiy/codimension
|
codimension/utils/config.py
|
Python
|
gpl-3.0
| 1,187 | 0 |
q1_start = 0
q1_end = 1
N_q1 = 128
q2_start = 0
q2_end = 1
N_q2 = 3
p1_start = -4
p1_end = 4
N_p1 = 4
p2_start = -0.5
p2_end = 0.5
N_p2 = 1
p3_start = -0.5
p3_end = 0.5
N_p3 = 1
N_ghost = 3
|
ShyamSS-95/Bolt
|
example_problems/nonrelativistic_boltzmann/beam_test/1D/domain.py
|
Python
|
gpl-3.0
| 225 | 0.044444 |
from ..provider.constants import Provider, string_to_provider
from ..services.base import Service
from .context import DisconnectOnException
from .errors import (
AlreadyConnectedException,
ClusterError,
MultipleClustersConnectionError,
NotConnectedError,
PleaseDisconnectError,
)
class ClusterService(Service):
def connected_clusters(self):
return self.services.kubernetes_service.get_cluster_names()
def assert_is_connected(self):
connected_clusters = self.connected_clusters()
if not connected_clusters:
raise NotConnectedError()
if len(connected_clusters) > 1:
raise MultipleClustersConnectionError(connected_clusters)
return connected_clusters[0]
def assert_is_disconnected(self):
connected_clusters = self.connected_clusters()
if connected_clusters:
if len(connected_clusters) == 1:
raise PleaseDisconnectError(connected_clusters[0])
raise MultipleClustersConnectionError(connected_clusters)
def connect(self, cluster_name, provider_string, kubeconfig, registry):
try:
self.assert_is_disconnected()
except PleaseDisconnectError as e:
if e.current_cluster_name == cluster_name:
raise AlreadyConnectedException(e.current_cluster_name) from e
raise
provider = string_to_provider(provider_string)
provider_service = self.services.provider_broker.get_provider_service(provider)
if kubeconfig is None:
kubeconfig = provider_service.create_kubeconfig(cluster_name)
else:
assert provider == Provider.CUSTOM, "Must use --provider custom to connect with a kubeconfig"
with DisconnectOnException(cluster_name, self.services):
self.services.kubernetes_service.write_config(cluster_name, kubeconfig)
self.services.kubernetes_service.ensure_orchestrate_namespace()
cluster = provider_service.create_cluster_object(
services=self.services,
name=cluster_name,
registry=registry,
)
self.services.cluster_metadata_service.write_metadata(cluster)
return self.test()
def create(self, options):
try:
self.assert_is_disconnected()
except PleaseDisconnectError as e:
if e.current_cluster_name == options.get('cluster_name', ''):
raise AlreadyConnectedException(e.current_cluster_name) from e
raise
self.services.options_validator_service.validate_cluster_options(**options)
cluster_name = options.get('cluster_name', '')
provider_string = options.get('provider', '')
provider = string_to_provider(provider_string)
provider_service = self.services.provider_broker.get_provider_service(provider)
with DisconnectOnException(cluster_name, self.services):
cluster = provider_service.create_kubernetes_cluster(options)
self.services.kubernetes_service.ensure_orchestrate_namespace()
self.services.cluster_metadata_service.write_metadata(cluster)
self.services.kubernetes_service.wait_until_nodes_are_ready()
return cluster.name
def update(self, options):
self.services.options_validator_service.validate_cluster_options(**options)
cluster_name = options.get('cluster_name', '')
provider_string = options.get('provider', '')
provider = string_to_provider(provider_string)
provider_service = self.services.provider_broker.get_provider_service(provider)
with DisconnectOnException(cluster_name, self.services):
cluster = provider_service.update_kubernetes_cluster(options)
self.services.kubernetes_service.ensure_orchestrate_namespace()
self.services.kubernetes_service.wait_until_nodes_are_ready()
return cluster.name
def destroy(self, cluster_name, provider_string):
provider = string_to_provider(provider_string)
provider_service = self.services.provider_broker.get_provider_service(provider)
provider_service.destroy_kubernetes_cluster(cluster_name=cluster_name)
self.services.cluster_metadata_service.ensure_metadata_deleted(cluster_name=cluster_name)
def disconnect(self, cluster_name, disconnect_all):
if (cluster_name and disconnect_all) or (not cluster_name and not disconnect_all):
raise ClusterError('Must provide exactly one of --cluster-name <cluster_name> and --all')
try:
current_cluster_name = self.assert_is_connected()
if cluster_name is not None and current_cluster_name != cluster_name:
raise PleaseDisconnectError(current_cluster_name)
except MultipleClustersConnectionError:
if not disconnect_all:
raise
for cname in self.connected_clusters():
try:
self.services.cluster_metadata_service.ensure_metadata_deleted(cluster_name=cname)
self.services.kubernetes_service.ensure_config_deleted(cluster_name=cname)
self.services.logging_service.warning(f'Successfully disconnected from {cname}')
except Exception as e:
raise ClusterError(
f'Looks like an error occured while attempting to disconnect from cluster "{cname}".'
) from e
def get_connected_cluster(self):
cluster_name = self.assert_is_connected()
return self.services.cluster_metadata_service.read_metadata(cluster_name)
def test(self):
cluster = self.get_connected_cluster()
provider_service = self.services.provider_broker.get_provider_service(cluster.provider)
try:
provider_service.test_kubernetes_cluster(cluster_name=cluster.name)
except Exception as e:
raise ClusterError(
f'Looks like an error occured while testing cluster "{cluster.name}".'
) from e
return cluster
|
sigopt/sigopt-python
|
sigopt/orchestrate/cluster/service.py
|
Python
|
mit
| 5,588 | 0.0102 |
import numpy as np
from scipy.integrate import odeint
from scipy.integrate import ode
import matplotlib.pylab as plt
import csv
import time
endpoint = 1000000000; # integration range
dx = 10.0; # step size
lam0 = 0.845258; # in unit of omegam, omegam = 3.66619*10^-17
dellam = np.array([0.00003588645221954444, 0.06486364865874367]); # deltalambda/omegam
ks = [1.0,1.0/90]; # two k's
thm = 0.16212913985547778; # theta_m
psi0, x0 = [1.0+0.j, 0.0], 0 # initial condition
savestep = 100000; # save to file every savestep steps
xlin = np.arange(dx,endpoint+1*dx, dx)
psi = np.zeros([len(xlin) , 2], dtype='complex_')
xlinsave = np.zeros(len(xlin)/savestep);
psisave = np.zeros([len(xlinsave) , 2], dtype='complex_')
probsave = np.zeros([len(xlinsave) , 3])
def hamiltonian(x, deltalambda, k, thetam):
return [[ 0, 0.5* np.sin(2*thetam) * ( deltalambda[0] * np.sin(k[0]*x) + deltalambda[1] * np.sin(k[1]*x) ) * np.exp( 1.0j * ( - x - np.cos(2*thetam) * ( ( deltalambda[0]/k[0] * np.cos(k[0]*x) + deltalambda[1]/k[1] * np.cos(k[1]*x) ) ) ) ) ], [ 0.5* np.sin(2*thetam) * ( deltalambda[0] * np.sin(k[0]*x) + deltalambda[1] * np.sin(k[1]*x) ) * np.exp( -1.0j * ( - x - np.cos(2*thetam) * ( deltalambda[0] /k[0] * np.cos(k[0]*x) + deltalambda[1] /k[1] * np.cos(k[1]*x) ) ) ), 0 ]] # Hamiltonian for double frequency
def deripsi(t, psi, deltalambda, k , thetam):
return -1.0j * np.dot( hamiltonian(t, deltalambda,k,thetam), [psi[0], psi[1]] )
sol = ode(deripsi).set_integrator('zvode', method='bdf', atol=1e-8, with_jacobian=False)
sol.set_initial_value(psi0, x0).set_f_params(dellam,ks,thm)
flag = 0
flagsave = 0
timestampstr = time.strftime("%Y%m%d-%H%M%S")
print timestampstr
while sol.successful() and sol.t < endpoint:
sol.integrate(xlin[flag])
if np.mod(flag,savestep)==0:
probsave[flagsave] = [sol.t, np.absolute(sol.y[1])**2, np.absolute(sol.y[0])**2]
with open(r'probtrans-test-'+timestampstr+'.csv', 'a') as f_handle:
np.savetxt(f_handle, probsave[flagsave])
flagsave = flagsave + 1
flag = flag + 1
print "CONGRATS"
# # ploting using probsave array inside file
# plt.figure(figsize=(18,13))
# plt.plot(probsave[:,0], probsave[:,1],'-')
# plt.title("Probabilities",fontsize=20)
# plt.xlabel("$\hat x$",fontsize=20)
# plt.ylabel("Probability",fontsize=20)
# plt.show()
# # Template for reading the csv file
# # Ploting using data file
# probsavefromfile = np.loadtxt("probtrans-test-"+timestampstr+".csv")
# # print test
# # print len(test[1::2]), test[1::2], len(test[::2]), test[::2]
# plt.figure(figsize=(18,13))
# plt.plot(probsavefromfile[::2], probsavefromfile[1::2],'-')
# plt.title("Probabilities",fontsize=20)
# plt.xlabel("$\hat x$",fontsize=20)
# plt.ylabel("Probability",fontsize=20)
# plt.show()
|
NeuPhysics/codebase
|
ipynb/matter/py-server/save-data-on-site.py
|
Python
|
mit
| 2,819 | 0.023058 |
#!/usr/bin/env python
# Usage parse_shear sequences.fna a2t.txt emb_output.b6
import sys
import csv
from collections import Counter, defaultdict
sequences = sys.argv[1]
accession2taxonomy = sys.argv[2]
alignment = sys.argv[3]
with open(accession2taxonomy) as inf:
next(inf)
csv_inf = csv.reader(inf, delimiter="\t")
a2t = dict(('_'.join(row[0].split()[0].split('_')[:-1]).split('.')[0], row[-1]) for row in csv_inf)
print("Loaded accession2taxonomy.")
reads_counter = Counter()
with open(sequences) as inf:
for i, line in enumerate(inf):
if i % 100000 == 0:
print("Processed %d lines" % i)
print(line)
if line.startswith('>'):
name = '_'.join(line.split()[0][1:].split('_')[:-1]).split('.')[0]
if name in a2t:
species = a2t[name]
reads_counter.update([species])
print("Loaded read counter")
counts_dict = defaultdict(Counter)
with open(alignment) as inf:
csv_inf = csv.reader(inf, delimiter="\t")
for i, row in enumerate(csv_inf):
if i % 100000 == 0:
print("Processed %d records" % i)
print(row)
if row[-1].startswith('k'):
read = row[0]
read = "_".join(read.split('_')[:-1]).split('.')[0]
if read in a2t:
species = a2t[read]
tax = row[-1]
counts_dict[species].update([tax])
print("Loaded counts_dict.")
with open("sheared_bayes.txt", "w") as outf:
for i, species in enumerate(counts_dict.keys()):
row = [0] * 10
row[-1] = reads_counter[species]
row[0] = species
counts = counts_dict[species]
if i % 10000 == 0:
print("Processed %d records" % i)
print(counts)
for j in counts.keys():
c = j.count(';')
row[c+1] = counts[j]
row = list(map(str, row))
outf.write("\t".join(row) + "\n")
|
knights-lab/analysis_SHOGUN
|
scripts/parse_shear.py
|
Python
|
mit
| 2,010 | 0.000498 |
#!/usr/bin/env python
#coding:utf-8
"""
Author: --<v1ll4n>
Purpose: Provide some useful thread utils
Created: 2016/10/29
"""
import unittest
#import multiprocessing
from pprint import pprint
from time import sleep
try:
from Queue import Full, Empty, Queue
except:
from queue import Full, Empty, Queue
#from random import choice
#from traceback import format_exc
from threading import Thread, Lock
#from multiprocessing import Process, Lock
from uuid import uuid1
########################################################################
class TaskError(Exception):
""""""
pass
########################################################################
class LaborThread(Thread):
""""""
#----------------------------------------------------------------------
def __init__(self, result_queue, master, clean_mod=True, *args, **kargs):
"""Constructor"""
Thread.__init__(self, name='ThreadPool-Labor-'+uuid1().hex,
*args, **kargs)
self._master = master
self._clean_mod = clean_mod
self._result_queue = result_queue
self._startworkingflag_ = True
self._task_queue = Queue(1)
self._count_lock = Lock()
#----------------------------------------------------------------------
def get_result_queue(self):
""""""
return self._result_queue
#----------------------------------------------------------------------
def get_task_queue(self):
""""""
return self._task_queue
#----------------------------------------------------------------------
def feed(self, function, *vargs, **kwargs):
""""""
try:
self._task_queue.put_nowait(tuple([function, vargs, kwargs]))
return True
except Full:
#format_exc()
return False
#----------------------------------------------------------------------
def run(self):
""""""
while self._startworkingflag_:
#pprint('Running')
try:
_task = self._task_queue.get(timeout=3)
result = {}
result['from'] = self.name
result['state'] = False
result['result'] = None
result['current_task'] = _task.__str__()
result['exception'] = tuple()
try:
ret = self._process_task(_task)
result['state'] = True
result['result'] = ret
#self._result_queue.put(result)
except Exception as e:
result['state'] = False
result['result'] = None
exception_i = (str(type(e)), str(e))
result['exception'] = exception_i
finally:
if self._clean_mod:
_result = {}
_result['state'] = result['state']
_result['result'] = result['result']
result = _result
self._result_queue.put(result)
self._count_lock.acquire()
self._master._executed_task_count = \
self._master._executed_task_count + 1
self._count_lock.release()
except Empty:
pass
#----------------------------------------------------------------------
def _process_task(self, task):
""""""
try:
ret = task[0](*task[1], **task[2])
return ret
except Exception as e:
raise e
#----------------------------------------------------------------------
def stop(self):
""""""
#self.stop()
self._startworkingflag_ = False
#----------------------------------------------------------------------
def __del__(self):
""""""
self.stop()
#----------------------------------------------------------------------
def _exception_process(self):
""""""
########################################################################
class Pool(object):
""""""
#----------------------------------------------------------------------
def __init__(self, thread_max=30, clean_mod=True):
"""Constructor"""
self.thread_max = thread_max
self._current_thread = []
self._daemon_thread = []
self._clean_mod = clean_mod
self._result_queue = Queue()
self._task_queue = Queue()
self.is_alive = True
self._executed_task_count = 0
self._task_count = 0
#----------------------------------------------------------------------
def _restart_thread_daemon(self):
""""""
#pprint('threads daemon started!')
while self.is_alive:
if len(self._current_thread) < self.thread_max:
self._start_new_labor()
else:
sleep(0.5)
#----------------------------------------------------------------------
def _start_new_labor(self):
""""""
#pprint('start new labor')
_tmp_labor = LaborThread(result_queue=self._result_queue, master=self,
clean_mod=self._clean_mod)
_tmp_labor.daemon = True
_tmp_labor.start()
self._current_thread.append(_tmp_labor)
#----------------------------------------------------------------------
def feed(self, target_func, *vargs, **kwargs):
""""""
self._task_queue.put(tuple([target_func, vargs, kwargs]))
self._task_count = self._task_count + 1
#----------------------------------------------------------------------
def _dispatcher(self):
""""""
#pprint('dispatcher start!')
while self.is_alive:
try:
ret = self._task_queue.get()
while True:
availible_threads = [None if x.get_task_queue().full() \
else x for x in self._current_thread]
for i in availible_threads:
if i == None:
pass
else:
i.feed(ret[0], *ret[1], **ret[2])
ret = None
break
if ret == None:
break
else:
continue
except Empty:
sleep(seconds=0.5)
#----------------------------------------------------------------------
def stop(self):
""""""
for i in self._current_thread:
i.stop()
del i
self.is_alive = False
#----------------------------------------------------------------------
def start(self):
""""""
self.is_alive = True
_ = Thread(name='restart_labor_daemon', target=self._restart_thread_daemon)
_.daemon = True
_.start()
self._daemon_thread.append(_)
_ = Thread(name='dispatcher_daemon', target=self._dispatcher)
_.daemon = True
_.start()
#----------------------------------------------------------------------
def get_result_queue(self):
""""""
return self._result_queue
#----------------------------------------------------------------------
def get_task_queue(self):
""""""
return self._task_queue
#----------------------------------------------------------------------
def get_result_generator(self):
""""""
while True:
try:
ret = self._result_queue.get(timeout=1)
yield ret
except Empty:
if self._task_count == self._executed_task_count:
break
else:
pass
#----------------------------------------------------------------------
@property
def task_count(self):
"""The amount of tasks"""
return self._task_count
#----------------------------------------------------------------------
@property
def executed_task_count(self):
""""""
return self._executed_task_count
#----------------------------------------------------------------------
@property
def percent(self):
""""""
return float(self._executed_task_count)/float(self._task_count)
########################################################################
class PoolTest(unittest.case.TestCase):
""""""
#----------------------------------------------------------------------
def runTest(self):
"""Constructor"""
self.test_laborprocess()
#----------------------------------------------------------------------
def test_pool(self):
""""""
def func1(arg1):
print('func1 called!')
return arg1
pool = Pool()
pool.start()
pool.feed(func1, 12345)
for i in range(10):
pool.feed(func1, i)
sleep(3)
while True:
try:
pprint(pool.get_result_queue().get(timeout=5))
except Empty:
break
pool.stop()
if __name__ == "__main__":
unittest.main()
|
VillanCh/g3ar
|
g3ar/threadutils/thread_pool.py
|
Python
|
bsd-2-clause
| 9,669 | 0.005378 |
import spade
import time
class MyAgent(spade.Agent.Agent):
class ReceiveBehav(spade.Behaviour.Behaviour):
"""This behaviour will receive all kind of messages"""
def _process(self):
self.msg = None
# Blocking receive for 10 seconds
self.msg = self._receive(True, 10)
# Check wether the message arrived
if self.msg:
print "I got a message!"
else:
print "I waited but got no message"
def _setup(self):
print "MyAgent starting . . ."
# Add the "ReceiveBehav" as the default behaviour
rb = self.ReceiveBehav()
self.setDefaultBehaviour(rb)
if __name__ == "__main__":
a = MyAgent("agent2@127.0.0.1", "secret")
a.start()
|
vportascarta/UQAC-8INF844-SPHERO
|
agents/ExempleAgentReceveur.py
|
Python
|
gpl-3.0
| 789 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-09-20 15:04
from __future__ import unicode_literals
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
('meinberlin_plans', '0017_rename_cost_field'),
]
operations = [
migrations.AlterField(
model_name='plan',
name='point_label',
field=models.CharField(default='Label of the location', help_text='This could be an address or the name of a landmark.', max_length=255, verbose_name='Label of the location'),
),
]
|
liqd/a4-meinberlin
|
meinberlin/apps/plans/migrations/0018_point_label_required.py
|
Python
|
agpl-3.0
| 621 | 0.00161 |
from unittest.mock import Mock
from django.db.models import QuerySet
from datagrowth.resources import HttpResource
from core.tests.mocks.requests import MockRequests
MockErrorQuerySet = Mock(QuerySet)
MockErrorQuerySet.count = Mock(return_value=0)
class HttpResourceMock(HttpResource):
URI_TEMPLATE = "http://localhost:8000/{}/?q={}"
PARAMETERS = {
"param": 1
}
HEADERS = {
"Accept": "application/json"
}
GET_SCHEMA = {
"args": {
"title": "resource mock arguments",
"type": "array", # a single alphanumeric element
"items": [
{
"type": "string",
"enum": ["en", "nl"]
},
{
"type": "string",
"pattern": "[A-Za-z0-9]+"
}
],
"additionalItems": False,
"minItems": 2
},
"kwargs": None # not allowed
}
POST_SCHEMA = {
"args": {
"title": "resource mock arguments",
"type": "array", # a single alphanumeric element
"items": [
{
"type": "string",
"enum": ["en", "nl"]
},
{
"type": "string",
"pattern": "[A-Za-z0-9]+"
}
],
"additionalItems": False,
"minItems": 2
},
"kwargs": {
"title": "resource mock keyword arguments",
"type": "object",
"properties": {
"query": {"type": "string"}
},
"required": ["query"]
}
}
CONFIG_NAMESPACE = "mock"
def __init__(self, *args, **kwargs):
super(HttpResourceMock, self).__init__(*args, **kwargs)
self.session = MockRequests
self.session.send.reset_mock()
def send(self, method, *args, **kwargs):
if method == "post":
query = kwargs.get("query")
if query:
args += (query,)
args = (self.config.source_language,) + args
elif method == "get":
args = (self.config.source_language,) + args
return super(HttpResourceMock, self).send(method, *args, **kwargs)
def auth_parameters(self):
return {
"auth": 1,
"key": self.config.secret
}
def next_parameters(self):
content_type, data = self.content
try:
nxt = data["next"]
except (KeyError, TypeError):
return {}
return {"next": nxt}
@property
def meta(self):
return self.variables()["meta"]
def data(self, **kwargs):
return {
"test": kwargs.get("query")
}
def variables(self, *args):
args = args or (self.request["args"] if self.request else tuple())
return {
"url": args,
"meta": args[1] if len(args) > 1 else None
}
|
fako/datascope
|
src/core/tests/mocks/http.py
|
Python
|
gpl-3.0
| 3,044 | 0 |
from pymol.cgo import *
from pymol import cmd
from pymol.vfont import plain
# create the axes object, draw axes with cylinders coloured red, green,
#blue for X, Y and Z
obj = [
CYLINDER, 0., 0., 0., 10., 0., 0., 0.2, 1.0, 1.0, 1.0, 1.0, 0.0, 0.,
CYLINDER, 0., 0., 0., 0., 10., 0., 0.2, 1.0, 1.0, 1.0, 0., 1.0, 0.,
CYLINDER, 0., 0., 0., 0., 0., 10., 0.2, 1.0, 1.0, 1.0, 0., 0.0, 1.0,
]
# add labels to axes object
cyl_text(obj,plain,[-5.,-5.,-1],'Origin',0.20,axes=[[3.0,0.0,0.0],[0.0,3.0,0.0],[0.0,0.0,3.0]])
cyl_text(obj,plain,[10.,0.,0.],'X',0.20,axes=[[3.0,0.0,0.0],[0.0,3.0,0.0],[0.0,0.0,3.0]])
cyl_text(obj,plain,[0.,10.,0.],'Y',0.20,axes=[[3.0,0.0,0.0],[0.0,3.0,0.0],[0.0,0.0,3.0]])
cyl_text(obj,plain,[0.,0.,10.],'Z',0.20,axes=[[3.0,0.0,0.0],[0.0,3.0,0.0],[0.0,0.0,3.0]])
# then we load it into PyMOL
cmd.load_cgo(obj,'axes')
|
weitzner/Dotfiles
|
pymol_scripts/axes_cyl.py
|
Python
|
mit
| 853 | 0.078546 |
import multiprocessing
import warnings
import six
from chainer.backends import cuda
from chainer.dataset import convert
from chainer import reporter
from chainer.training.updaters import standard_updater
try:
from cupy.cuda import nccl
_available = True
except ImportError:
_available = False
import numpy
class _Worker(multiprocessing.Process):
def __init__(self, proc_id, pipe, master):
super(_Worker, self).__init__()
self.proc_id = proc_id
self.pipe = pipe
self.converter = master.converter
self.model = master._master
self.device = master._devices[proc_id]
self.iterator = master._mpu_iterators[proc_id]
self.n_devices = len(master._devices)
def setup(self):
_, comm_id = self.pipe.recv()
self.comm = nccl.NcclCommunicator(self.n_devices, comm_id,
self.proc_id)
self.model.to_gpu(self.device)
self.reporter = reporter.Reporter()
self.reporter.add_observer('main', self.model)
self.reporter.add_observers('main',
self.model.namedlinks(skipself=True))
def run(self):
dev = cuda.Device(self.device)
dev.use()
self.setup()
gp = None
while True:
job, data = self.pipe.recv()
if job == 'finalize':
dev.synchronize()
break
if job == 'update':
# For reducing memory
self.model.cleargrads()
batch = self.converter(self.iterator.next(), self.device)
observation = {}
with self.reporter.scope(observation):
loss = _calc_loss(self.model, batch)
self.model.cleargrads()
loss.backward()
del loss
gg = gather_grads(self.model)
nccl_data_type = _get_nccl_data_type(gg.dtype)
null_stream = cuda.Stream.null
self.comm.reduce(gg.data.ptr, gg.data.ptr, gg.size,
nccl_data_type, nccl.NCCL_SUM, 0,
null_stream.ptr)
del gg
self.model.cleargrads()
gp = gather_params(self.model)
nccl_data_type = _get_nccl_data_type(gp.dtype)
self.comm.bcast(gp.data.ptr, gp.size, nccl_data_type, 0,
null_stream.ptr)
scatter_params(self.model, gp)
gp = None
class MultiprocessParallelUpdater(standard_updater.StandardUpdater):
"""Implementation of a multiprocess parallel GPU Updater.
This is an implementation of :class:`Updater` that uses multiple GPUs
with multi-process data parallelism. It uses Nvidia NCCL for communication
between multiple GPUs.
It behaves similarly to
:class:`~chainer.training.updaters.StandardUpdater`.
The update routine is modified to support data-parallel
computation on multiple GPUs in one machine.
It is based on synchronous parallel SGD: it
parallelizes the gradient computation over a mini-batch, and updates the
parameters only in the main device.
It does not transfer the values collected by :class:`Reporter` in the sub
devices to the main device. So you can only see the reported values in
the main device.
Args:
iterators: List of dataset iterator for the training dataset. The
number of the iterators must be same to the number of GPUs you use.
optimizer: Optimizer to update parameters. The model should be attached
to the optimizer.
converter: Converter function to build input arrays. Each batch
extracted by the iterator is split equally between the devices and
then passed with corresponding ``device`` option to this function.
:func:`~chainer.dataset.concat_examples` is used by default.
devices: Dictionary or list of devices to which the training data is
sent. The master device will be the first one in the list or the
value attached to the key ``'main'``.
"""
def __init__(self, iterators, optimizer, converter=convert.concat_examples,
devices=None):
if not MultiprocessParallelUpdater.available():
raise Exception(
'NCCL is not enabled. MultiprocessParallelUpdater '
'requires NCCL.\n'
'Please reinstall chainer after you install NCCL.\n'
'(see https://github.com/chainer/chainer#installation).')
assert len(iterators) == len(devices)
for iterator in iterators[1:]:
assert len(iterator.dataset) == len(iterators[0].dataset)
# Correct optimizer parameters for new minibatch size
optim = optimizer.__class__.__name__
if optim in ('Adam', 'AdaGrad', 'RMSprop'):
optimizer.eps *= len(devices)
warnings.warn('optimizer.eps is changed to {} '
'by MultiprocessParallelUpdater for new batch size.'.
format(optimizer.eps))
elif optim in ('RMSpropGraves', 'AdaDelta'):
optimizer.eps *= len(devices) ** 2 # not quite right for AdaDelta
warnings.warn('optimizer.eps is changed to {} '
'by MultiprocessParallelUpdater for new batch size.'.
format(optimizer.eps))
elif hasattr(optimizer, 'lr'):
optimizer.lr /= len(devices)
warnings.warn('optimizer.lr is changed to {} '
'by MultiprocessParallelUpdater for new batch size.'.
format(optimizer.lr))
super(MultiprocessParallelUpdater, self).__init__(
iterator=iterators[0],
optimizer=optimizer,
converter=converter
)
if isinstance(devices, dict):
main = devices.pop('main')
devices = list(six.itervalues(devices))
devices = [main] + devices
if devices is None or any(device is None for device in devices):
raise ValueError('must specify GPU devices')
self._master = optimizer.target
self._devices = devices
self._mpu_iterators = iterators
self._initialized = False
self._pipes = []
self._workers = []
self.comm = None
@staticmethod
def available():
return _available
def _send_message(self, message):
for pipe in self._pipes:
pipe.send(message)
def setup_workers(self):
if self._initialized:
return
self._initialized = True
self._master.cleargrads()
for i in six.moves.range(1, len(self._devices)):
pipe, worker_end = multiprocessing.Pipe()
worker = _Worker(i, worker_end, self)
worker.start()
self._workers.append(worker)
self._pipes.append(pipe)
with cuda.Device(self._devices[0]):
self._master.to_gpu(self._devices[0])
if len(self._devices) > 1:
comm_id = nccl.get_unique_id()
self._send_message(("set comm_id", comm_id))
self.comm = nccl.NcclCommunicator(len(self._devices),
comm_id, 0)
def update_core(self):
self.setup_workers()
self._send_message(('update', None))
with cuda.Device(self._devices[0]):
# For reducing memory
self._master.cleargrads()
optimizer = self.get_optimizer('main')
batch = self.get_iterator('main').next()
batch = self.converter(batch, self._devices[0])
loss = _calc_loss(self._master, batch)
self._master.cleargrads()
loss.backward()
# NCCL: reduce grads
null_stream = cuda.Stream.null
if self.comm is not None:
gg = gather_grads(self._master)
nccl_data_type = _get_nccl_data_type(gg.dtype)
self.comm.reduce(gg.data.ptr, gg.data.ptr, gg.size,
nccl_data_type, nccl.NCCL_SUM,
0, null_stream.ptr)
scatter_grads(self._master, gg)
del gg
optimizer.update()
if self.comm is not None:
gp = gather_params(self._master)
nccl_data_type = _get_nccl_data_type(gp.dtype)
self.comm.bcast(gp.data.ptr, gp.size, nccl_data_type,
0, null_stream.ptr)
def finalize(self):
self._send_message(('finalize', None))
for worker in self._workers:
worker.join()
def _calc_loss(model, in_arrays):
if isinstance(in_arrays, tuple):
return model(*in_arrays)
elif isinstance(in_arrays, dict):
return model(**in_arrays)
else:
return model(in_arrays)
def size_num_grads(link):
"""Count total size of all gradient arrays of a given link
Args:
link (chainer.link.Link): Target link object.
"""
size = 0
num = 0
for param in link.params():
if param.size == 0:
continue
size += param.size
num += 1
return size, num
def _memcpy_gather():
return cuda.cupy.ElementwiseKernel(
'raw T ptrs, raw X dtypes, raw Y info',
'raw float32 dst',
'''
int id_min = id_pre;
int id_max = num_src;
while (id_max - id_min > 1) {
int id = (id_max + id_min) / 2;
if (i < info[id]) id_max = id;
else id_min = id;
}
int id = id_min;
int i_dst = i;
int i_src = i;
if (id > 0) i_src -= info[id];
dst[i_dst] = 0;
if (ptrs[id] != NULL) {
if (dtypes[id] == 0) { // fp32
float *src = reinterpret_cast<float *>(ptrs[id]);
dst[i_dst] = src[i_src];
}
else { // fp16
float16 *src = reinterpret_cast<float16 *>(ptrs[id]);
dst[i_dst] = static_cast<float>(src[i_src]);
}
}
id_pre = id;
''',
'_memcpy_gather',
loop_prep='''
int num_src = info[0];
int id_pre = 0;
''')
def _gather(link, target):
size, num = size_num_grads(link)
ptrs = numpy.empty(num, dtype=numpy.uint64)
dtypes = numpy.empty(num, dtype=numpy.int8)
info = numpy.empty(num + 1, dtype=numpy.int32)
info[0] = 0
i = 0
for _, param in sorted(link.namedparams()):
if param.size == 0:
continue
ptrs[i] = 0 # NULL pointer
d = getattr(param, target)
if d is not None:
ptrs[i] = d.data.ptr
dtypes[i] = 0 # fp32
if param.dtype == numpy.float16:
dtypes[i] = 1 # fp16
info[i + 1] = info[i] + param.size
i += 1
info[0] = num
ptrs = cuda.to_gpu(ptrs)
dtypes = cuda.to_gpu(dtypes)
info = cuda.to_gpu(info)
return _memcpy_gather()(ptrs, dtypes, info, size=size)
def gather_grads(link):
"""Put together all gradient arrays and make a single array
Args:
link (chainer.link.Link): Target link object.
Return:
cupy.ndarray
"""
if link.xp is numpy:
raise RuntimeError('gather_grads works only on GPU.')
return _gather(link, "grad")
def gather_params(link):
"""Put together all gradient arrays and make a single array
Args:
link (chainer.link.Link): Target link object.
Return:
cupy.ndarray
"""
if link.xp is numpy:
raise RuntimeError('Link.gather_params works only on GPU.')
return _gather(link, "data")
def _memcpy_scatter():
return cuda.cupy.ElementwiseKernel(
'raw T ptrs, raw X dtypes, raw Y info, raw float32 array',
'',
'''
int id_min = id_pre;
int id_max = num_src;
while (id_max - id_min > 1) {
int id = (id_max + id_min) / 2;
if (i < info[id]) id_max = id;
else id_min = id;
}
int id = id_min;
int i_src = i;
int i_dst = i;
if (id > 0) i_dst -= info[id];
if (ptrs[id] != NULL) {
if (dtypes[id] == 0) { // fp32
float *dst = reinterpret_cast<float *>(ptrs[id]);
dst[i_dst] = array[i_src];
}
else { // fp16
float16 *dst = reinterpret_cast<float16 *>(ptrs[id]);
dst[i_dst] = static_cast<float16>(array[i_src]);
}
}
id_pre = id;
''',
'_memcpy_scatter',
loop_prep='''
int num_src = info[0];
int id_pre = 0;
''')
def _scatter(link, array, target):
size, num = size_num_grads(link)
ptrs = numpy.zeros(num, dtype=numpy.uint64)
dtypes = numpy.zeros(num, dtype=numpy.int8)
info = numpy.zeros(num + 1, dtype=numpy.int32)
info[0] = 0
i = 0
for _, param in sorted(link.namedparams()):
if param.size == 0:
continue
ptrs[i] = 0 # NULL pointer
d = getattr(param, target)
if d is None:
d = cuda.cupy.zeros(param.shape, dtype=param.dtype)
setattr(param, target, d)
ptrs[i] = d.data.ptr
dtypes[i] = 0 # fp32
if param.dtype == numpy.float16:
dtypes[i] = 1 # fp16
info[i + 1] = info[i] + param.size
i += 1
if i != num:
raise()
info[0] = num
ptrs = cuda.to_gpu(ptrs)
dtypes = cuda.to_gpu(dtypes)
info = cuda.to_gpu(info)
return _memcpy_scatter()(ptrs, dtypes, info, array, size=size)
def scatter_grads(link, array):
"""Put back contents of the specified array to the related gradient arrays
Args:
link (chainer.link.Link): Target link object.
array (cupy.ndarray): gathered array created by gather_grads()
"""
return _scatter(link, array, "grad")
def scatter_params(link, array):
"""Put back contents of the specified array to the related gradient arrays
Args:
link (chainer.link.Link): Target link object.
array (cupy.ndarray): gathered array created by gather_params()
"""
return _scatter(link, array, "data")
def _get_nccl_data_type(dtype):
"""Get data type for NCCL"""
if dtype == numpy.float32:
nccl_data_type = nccl.NCCL_FLOAT
elif dtype == numpy.float16:
nccl_data_type = nccl.NCCL_HALF
elif dtype == numpy.float64:
nccl_data_type = nccl.NCCL_DOUBLE
else:
raise RuntimeError('Unexpected data type:{}'.format(dtype))
return nccl_data_type
|
aonotas/chainer
|
chainer/training/updaters/multiprocess_parallel_updater.py
|
Python
|
mit
| 15,115 | 0 |
#Ret Samys, creator of this program, can be found at RetSamys.deviantArt.com
#Please feel free to change anything or to correct me or to make requests... I'm a really bad coder. =)
#Watch Andrew Huang's video here: https://www.youtube.com/watch?v=4IAZY7JdSHU
changecounter=0
path="for_elise_by_beethoven.mid"
print """Welcome to my horribly inefficient program to tonal invert MIDI files according to Andrew Huang's #MIDIFLIP challenge as seen on https://www.youtube.com/watch?v=4IAZY7JdSHU
"""
pth=raw_input("Please enter your MIDI file's path here (save the file in the same directory as this program if you want to avoid typing the entire path): ")
if pth!="":path=pth
try:
f=open(path,"rb")
except:
try:
f=open(path+".mid","rb")
except:
print "Sorry, but are you sure this is where the file is?"
cset=raw_input("As a standard setting, this program will flip all notes around C', which will flip the 'hands'. To use this mode press enter. You can use the old mode, which keeps the 'hands' where they are, but also creates a whole bunch of errors, by entering anything at all: ")
print "Program running"
print "You may abort any time by hitting CTRL+C"
midi=f.read()
writeme="".join(midi.split("MTrk")[0]) #final string to be written into new file
for i in midi.split("MTrk")[1:]: #skip header chunk and jump directly into track chunk
print "Editing Track "+str(midi.split("MTrk").index(i))+" of "+str(len(midi.split("MTrk"))-1)
lowcount=0
highcount=0
i=list(i) #split string into list of characters
delta=True #default value for checking delta_time
offset=0 #default value for flipping pitch according to last event - since there is no such event at the beginning of a track
for byte in range(len(i[4:])): #skip length bytes
if delta: #delta_time checking mode
if ord(i[4:][byte])>127: #determine if this is the last byte of the variable-length quantity for delta_time
delta=False #found last byte! next byte should be event
else:
pass
else: #event checking mode
if ord(i[4:][byte])==255 and ord(i[4:][byte+1])==81 and ord(i[4:][byte+2])==3: #check for set tempo meta-event
byte+=5 #skip set tempo meta-event
elif ord(i[4:][byte])>=144 and ord(i[4:][byte])<=159: #check for note on event
byte+=1 #go to note byte
if cset=="":
offset=(60-ord(i[4:][byte]))*2 #calculate offset to c'
else:
try: #skipped if lastnote is not defined
offset+=(ord(lastnote)-ord(i[4:][byte]))*2 #calculate offset
except NameError:
pass
lastnote=i[4:][byte] #set current note to compare to next note before it's changed!
try:i[byte+4]=chr(ord(i[4:][byte])+offset) #change note
except:
if ord(i[4:][byte])+offset>127:
i[byte+4]=chr(127)
highcount+=1
else:
i[byte+4]=chr(0)
lowcount+=1
#journey to note off starts here
for offbyte in range(len(i[byte+4:])):
if ord(i[byte+4:][offbyte])==255 and ord(i[byte+4:][offbyte+1])==81 and ord(i[byte+4:][offbyte+1])==3: #check for set tempo meta-event
offbyte+=5 #skip set tempo meta-event
elif ord(i[byte+4:][offbyte])>=128 and ord(i[byte+4:][offbyte])<=137 and i[byte+4:][offbyte+1]==lastnote: #check if the same note is off
try:i[byte+4+offbyte+1]=chr(ord(i[byte+4:][offbyte+1])+offset) #change note
except:
if ord(i[4:][byte])+offset>127:
i[byte+4]=chr(127)
else:
i[byte+4]=chr(0)
changecounter+=1
break
elif ord(i[byte+4:][offbyte])==123: #all notes off
changecounter+=1
break
elif ord(i[byte+4:][offbyte])>=160 and ord(i[byte+4:][offbyte])<=175 and i[byte+4:][offbyte+1]==lastnote: #polyphonic aftertouch - just in case? Urgh, I don't actually understand this enough, when is this activated and is there a way to deactivate!?!?
try:i[offbyte+1+byte+4]=chr(ord(i[byte+4:][offbyte+1])+offset) #change note
except:
i[offbyte+1+byte+4]=chr(127)
else:
pass
byte+=1 #skip velocity byte
else:
pass
if lowcount or highcount:print "WARNING: There were notes out of range: "+str(lowcount)+" too low and "+str(highcount)+" too high."
writeme=writeme+"MTrk"+"".join(i) #join list of characters to final string
counter=1
path=path.replace(".mid","")
while True:
try:
newfile = open(path+"_midiflip_"+str(counter)+".mid")
newfile.close()
counter+=1
except IOError as e:
newfile = open(path+"_midiflip_"+str(counter)+".mid","wb")
newfile.write(writeme)
newfile.close()
break
print "End of the line..."
print str(changecounter)+" notes changed"
|
RetSamys/midiflip
|
midiflip.py
|
Python
|
gpl-3.0
| 5,501 | 0.028722 |
# -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.types import EventID, RoomID, UserID
from synapse.api.errors import SynapseError
from synapse.api.constants import EventTypes, Membership
class EventValidator(object):
def validate(self, event):
EventID.from_string(event.event_id)
RoomID.from_string(event.room_id)
required = [
# "auth_events",
"content",
# "hashes",
"origin",
# "prev_events",
"sender",
"type",
]
for k in required:
if not hasattr(event, k):
raise SynapseError(400, "Event does not have key %s" % (k,))
# Check that the following keys have string values
strings = [
"origin",
"sender",
"type",
]
if hasattr(event, "state_key"):
strings.append("state_key")
for s in strings:
if not isinstance(getattr(event, s), basestring):
raise SynapseError(400, "Not '%s' a string type" % (s,))
if event.type == EventTypes.Member:
if "membership" not in event.content:
raise SynapseError(400, "Content has not membership key")
if event.content["membership"] not in Membership.LIST:
raise SynapseError(400, "Invalid membership key")
# Check that the following keys have dictionary values
# TODO
# Check that the following keys have the correct format for DAGs
# TODO
def validate_new(self, event):
self.validate(event)
UserID.from_string(event.sender)
if event.type == EventTypes.Message:
strings = [
"body",
"msgtype",
]
self._ensure_strings(event.content, strings)
elif event.type == EventTypes.Topic:
self._ensure_strings(event.content, ["topic"])
elif event.type == EventTypes.Name:
self._ensure_strings(event.content, ["name"])
def _ensure_strings(self, d, keys):
for s in keys:
if s not in d:
raise SynapseError(400, "'%s' not in content" % (s,))
if not isinstance(d[s], basestring):
raise SynapseError(400, "Not '%s' a string type" % (s,))
|
illicitonion/synapse
|
synapse/events/validator.py
|
Python
|
apache-2.0
| 2,913 | 0 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# RawSpeed documentation build configuration file, created by
# sphinx-quickstart on Mon Aug 14 18:30:09 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
import os
import sys
sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.githubpages', 'sphinx-pyexec']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'RawSpeed'
copyright = '2009-2016 Klaus Post, 2016-2019 Roman Lebedev'
author = '(c) Authors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = ''
# The full version, including alpha/beta/rc tags.
release = ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
|
aferrero2707/PhotoFlow
|
src/external/rawspeed/docs/conf.py
|
Python
|
gpl-3.0
| 3,214 | 0 |
"""
Functions for calculating statistics and handling uncertainties.
(c) Oscar Branson : https://github.com/oscarbranson
"""
import numpy as np
import uncertainties.unumpy as un
import scipy.interpolate as interp
from scipy.stats import pearsonr
def nan_pearsonr(x, y):
xy = np.vstack([x, y])
xy = xy[:, ~np.any(np.isnan(xy),0)]
n = len(x)
if xy.shape[-1] < n // 2:
return np.nan, np.nan
return pearsonr(xy[0], xy[1])
def R2calc(meas, model, force_zero=False):
if force_zero:
SStot = np.sum(meas**2)
else:
SStot = np.sum((meas - np.nanmean(meas))**2)
SSres = np.sum((meas - model)**2)
return 1 - (SSres / SStot)
# uncertainties unpackers
def unpack_uncertainties(uarray):
"""
Convenience function to unpack nominal values and uncertainties from an
``uncertainties.uarray``.
Returns:
(nominal_values, std_devs)
"""
try:
return un.nominal_values(uarray), un.std_devs(uarray)
except:
return uarray, None
def nominal_values(a):
try:
return un.nominal_values(a)
except:
return a
def std_devs(a):
try:
return un.std_devs(a)
except:
return a
def gauss_weighted_stats(x, yarray, x_new, fwhm):
"""
Calculate gaussian weigted moving mean, SD and SE.
Parameters
----------
x : array-like
The independent variable
yarray : (n,m) array
Where n = x.size, and m is the number of
dependent variables to smooth.
x_new : array-like
The new x-scale to interpolate the data
fwhm : int
FWHM of the gaussian kernel.
Returns
-------
(mean, std, se) : tuple
"""
sigma = fwhm / (2 * np.sqrt(2 * np.log(2)))
# create empty mask array
mask = np.zeros((x.size, yarray.shape[1], x_new.size))
# fill mask
for i, xni in enumerate(x_new):
mask[:, :, i] = gauss(x[:, np.newaxis], 1, xni, sigma)
# normalise mask
nmask = mask / mask.sum(0) # sum of each gaussian = 1
# calculate moving average
av = (nmask * yarray[:, :, np.newaxis]).sum(0) # apply mask to data
# sum along xn axis to get means
# calculate moving sd
diff = np.power(av - yarray[:, :, np.newaxis], 2)
std = np.sqrt((diff * nmask).sum(0))
# sqrt of weighted average of data-mean
# calculate moving se
se = std / np.sqrt(mask.sum(0))
# max amplitude of weights is 1, so sum of weights scales
# a fn of how many points are nearby. Use this as 'n' in
# SE calculation.
return av, std, se
def gauss(x, *p):
""" Gaussian function.
Parameters
----------
x : array_like
Independent variable.
*p : parameters unpacked to A, mu, sigma
A = amplitude, mu = centre, sigma = width
Return
------
array_like
gaussian descriped by *p.
"""
A, mu, sigma = p
return A * np.exp(-0.5 * (-mu + x)**2 / sigma**2)
# Statistical Functions
def stderr(a):
"""
Calculate the standard error of a.
"""
return np.nanstd(a) / np.sqrt(sum(np.isfinite(a)))
# Robust Statistics. See:
# - https://en.wikipedia.org/wiki/Robust_statistics
# - http://www.cscjp.co.jp/fera/document/ANALYSTVol114Decpgs1693-97_1989.pdf
# - http://www.rsc.org/images/robust-statistics-technical-brief-6_tcm18-214850.pdf
# - http://www.itl.nist.gov/div898/software/dataplot/refman2/auxillar/h15.htm
def H15_mean(x):
"""
Calculate the Huber (H15) Robust mean of x.
For details, see:
http://www.cscjp.co.jp/fera/document/ANALYSTVol114Decpgs1693-97_1989.pdf
http://www.rsc.org/images/robust-statistics-technical-brief-6_tcm18-214850.pdf
"""
mu = np.nanmean(x)
sd = np.nanstd(x) * 1.134
sig = 1.5
hi = x > mu + sig * sd
lo = x < mu - sig * sd
if any(hi | lo):
x[hi] = mu + sig * sd
x[lo] = mu - sig * sd
return H15_mean(x)
else:
return mu
def H15_std(x):
"""
Calculate the Huber (H15) Robust standard deviation of x.
For details, see:
http://www.cscjp.co.jp/fera/document/ANALYSTVol114Decpgs1693-97_1989.pdf
http://www.rsc.org/images/robust-statistics-technical-brief-6_tcm18-214850.pdf
"""
mu = np.nanmean(x)
sd = np.nanstd(x) * 1.134
sig = 1.5
hi = x > mu + sig * sd
lo = x < mu - sig * sd
if any(hi | lo):
x[hi] = mu + sig * sd
x[lo] = mu - sig * sd
return H15_std(x)
else:
return sd
def H15_se(x):
"""
Calculate the Huber (H15) Robust standard deviation of x.
For details, see:
http://www.cscjp.co.jp/fera/document/ANALYSTVol114Decpgs1693-97_1989.pdf
http://www.rsc.org/images/robust-statistics-technical-brief-6_tcm18-214850.pdf
"""
sd = H15_std(x)
return sd / np.sqrt(sum(np.isfinite(x)))
def get_total_n_points(d):
"""
Returns the total number of data points in values of dict.
Paramters
---------
d : dict
"""
n = 0
for di in d.values():
n += len(di)
return n
def get_total_time_span(d):
"""
Returns total length of analysis.
"""
tmax = 0
for di in d.values():
if di.uTime.max() > tmax:
tmax = di.uTime.max()
return tmax
class un_interp1d(object):
"""
object for handling interpolation of values with uncertainties.
"""
def __init__(self, x, y, fill_value=np.nan, **kwargs):
if isinstance(fill_value, tuple):
nom_fill = tuple([un.nominal_values(v) for v in fill_value])
std_fill = tuple([un.std_devs(v) for v in fill_value])
else:
nom_fill = std_fill = fill_value
self.nom_interp = interp.interp1d(un.nominal_values(x),
un.nominal_values(y),
fill_value=nom_fill, **kwargs)
self.std_interp = interp.interp1d(un.nominal_values(x),
un.std_devs(y),
fill_value=std_fill, **kwargs)
def new(self, xn):
yn = self.nom_interp(xn)
yn_err = self.std_interp(xn)
return un.uarray(yn, yn_err)
def new_nom(self, xn):
return self.nom_interp(xn)
def new_std(self, xn):
return self.std_interp(xn)
def stack_keys(ddict, keys, extra=None):
"""
Combine elements of ddict into an array of shape (len(ddict[key]), len(keys)).
Useful for preparing data for sklearn.
Parameters
----------
ddict : dict
A dict containing arrays or lists to be stacked.
Must be of equal length.
keys : list or str
The keys of dict to stack. Must be present in ddict.
extra : list (optional)
A list of additional arrays to stack. Elements of extra
must be the same length as arrays in ddict.
Extras are inserted as the first columns of output.
"""
if isinstance(keys, str):
d = [ddict[keys]]
else:
d = [ddict[k] for k in keys]
if extra is not None:
d = extra + d
return np.vstack(d).T
|
oscarbranson/latools
|
latools/helpers/stat_fns.py
|
Python
|
mit
| 7,150 | 0.002238 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2013-TODAY OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.addons.portal_project.tests.test_access_rights import TestPortalProjectBase
from openerp.exceptions import AccessError
from openerp.osv.orm import except_orm
from openerp.tools import mute_logger
class TestPortalProjectBase(TestPortalProjectBase):
def setUp(self):
super(TestPortalProjectBase, self).setUp()
cr, uid = self.cr, self.uid
# Useful models
self.project_issue = self.registry('project.issue')
# Various test issues
self.issue_1_id = self.project_issue.create(cr, uid, {
'name': 'Test1', 'user_id': False, 'project_id': self.project_pigs_id}, {'mail_create_nolog': True})
self.issue_2_id = self.project_issue.create(cr, uid, {
'name': 'Test2', 'user_id': False, 'project_id': self.project_pigs_id}, {'mail_create_nolog': True})
self.issue_3_id = self.project_issue.create(cr, uid, {
'name': 'Test3', 'user_id': False, 'project_id': self.project_pigs_id}, {'mail_create_nolog': True})
self.issue_4_id = self.project_issue.create(cr, uid, {
'name': 'Test4', 'user_id': self.user_projectuser_id, 'project_id': self.project_pigs_id}, {'mail_create_nolog': True})
self.issue_5_id = self.project_issue.create(cr, uid, {
'name': 'Test5', 'user_id': self.user_portal_id, 'project_id': self.project_pigs_id}, {'mail_create_nolog': True})
self.issue_6_id = self.project_issue.create(cr, uid, {
'name': 'Test6', 'user_id': self.user_public_id, 'project_id': self.project_pigs_id}, {'mail_create_nolog': True})
class TestPortalIssue(TestPortalProjectBase):
@mute_logger('openerp.addons.base.ir.ir_model', 'openerp.osv.orm')
def test_00_project_access_rights(self):
""" Test basic project access rights, for project and portal_project """
cr, uid, pigs_id = self.cr, self.uid, self.project_pigs_id
# ----------------------------------------
# CASE1: public project
# ----------------------------------------
# Do: Alfred reads project -> ok (employee ok public)
# Test: all project issues visible
issue_ids = self.project_issue.search(cr, self.user_projectuser_id, [('project_id', '=', pigs_id)])
test_issue_ids = set([self.issue_1_id, self.issue_2_id, self.issue_3_id, self.issue_4_id, self.issue_5_id, self.issue_6_id])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: project user cannot see all issues of a public project')
# Test: all project issues readable
self.project_issue.read(cr, self.user_projectuser_id, issue_ids, ['name'])
# Test: all project issues writable
self.project_issue.write(cr, self.user_projectuser_id, issue_ids, {'description': 'TestDescription'})
# Do: Bert reads project -> crash, no group
# Test: no project issue visible
self.assertRaises(AccessError, self.project_issue.search, cr, self.user_none_id, [('project_id', '=', pigs_id)])
# Test: no project issue readable
self.assertRaises(AccessError, self.project_issue.read, cr, self.user_none_id, issue_ids, ['name'])
# Test: no project issue writable
self.assertRaises(AccessError, self.project_issue.write, cr, self.user_none_id, issue_ids, {'description': 'TestDescription'})
# Do: Chell reads project -> ok (portal ok public)
# Test: all project issues visible
issue_ids = self.project_issue.search(cr, self.user_portal_id, [('project_id', '=', pigs_id)])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: project user cannot see all issues of a public project')
# Test: all project issues readable
self.project_issue.read(cr, self.user_portal_id, issue_ids, ['name'])
# Test: no project issue writable
self.assertRaises(AccessError, self.project_issue.write, cr, self.user_portal_id, issue_ids, {'description': 'TestDescription'})
# Do: Donovan reads project -> ok (public ok public)
# Test: all project issues visible
issue_ids = self.project_issue.search(cr, self.user_public_id, [('project_id', '=', pigs_id)])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: project user cannot see all issues of a public project')
# ----------------------------------------
# CASE2: portal project
# ----------------------------------------
self.project_project.write(cr, uid, [pigs_id], {'privacy_visibility': 'portal'})
# Do: Alfred reads project -> ok (employee ok public)
# Test: all project issues visible
issue_ids = self.project_issue.search(cr, self.user_projectuser_id, [('project_id', '=', pigs_id)])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: project user cannot see all issues of a portal project')
# Do: Bert reads project -> crash, no group
# Test: no project issue searchable
self.assertRaises(AccessError, self.project_issue.search, cr, self.user_none_id, [('project_id', '=', pigs_id)])
# Data: issue follower
self.project_issue.message_subscribe_users(cr, self.user_projectuser_id, [self.issue_1_id, self.issue_3_id], [self.user_portal_id])
# Do: Chell reads project -> ok (portal ok public)
# Test: only followed project issues visible + assigned
issue_ids = self.project_issue.search(cr, self.user_portal_id, [('project_id', '=', pigs_id)])
test_issue_ids = set([self.issue_1_id, self.issue_3_id, self.issue_5_id])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: portal user should see the followed issues of a portal project')
# Data: issue follower cleaning
self.project_issue.message_unsubscribe_users(cr, self.user_projectuser_id, [self.issue_1_id, self.issue_3_id], [self.user_portal_id])
# ----------------------------------------
# CASE3: employee project
# ----------------------------------------
self.project_project.write(cr, uid, [pigs_id], {'privacy_visibility': 'employees'})
# Do: Alfred reads project -> ok (employee ok employee)
# Test: all project issues visible
issue_ids = self.project_issue.search(cr, self.user_projectuser_id, [('project_id', '=', pigs_id)])
test_issue_ids = set([self.issue_1_id, self.issue_2_id, self.issue_3_id, self.issue_4_id, self.issue_5_id, self.issue_6_id])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: project user cannot see all issues of an employees project')
# Do: Chell reads project -> ko (portal ko employee)
# Test: no project issue visible + assigned
issue_ids = self.project_issue.search(cr, self.user_portal_id, [('project_id', '=', pigs_id)])
self.assertFalse(issue_ids, 'access rights: portal user should not see issues of an employees project, even if assigned')
# ----------------------------------------
# CASE4: followers project
# ----------------------------------------
self.project_project.write(cr, uid, [pigs_id], {'privacy_visibility': 'followers'})
# Do: Alfred reads project -> ko (employee ko followers)
# Test: no project issue visible
issue_ids = self.project_issue.search(cr, self.user_projectuser_id, [('project_id', '=', pigs_id)])
test_issue_ids = set([self.issue_4_id])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: employee user should not see issues of a not-followed followers project, only assigned')
# Do: Chell reads project -> ko (portal ko employee)
# Test: no project issue visible
issue_ids = self.project_issue.search(cr, self.user_portal_id, [('project_id', '=', pigs_id)])
test_issue_ids = set([self.issue_5_id])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: portal user should not see issues of a not-followed followers project, only assigned')
# Data: subscribe Alfred, Chell and Donovan as follower
self.project_project.message_subscribe_users(cr, uid, [pigs_id], [self.user_projectuser_id, self.user_portal_id, self.user_public_id])
self.project_issue.message_subscribe_users(cr, self.user_manager_id, [self.issue_1_id, self.issue_3_id], [self.user_portal_id, self.user_projectuser_id])
# Do: Alfred reads project -> ok (follower ok followers)
# Test: followed + assigned issues visible
issue_ids = self.project_issue.search(cr, self.user_projectuser_id, [('project_id', '=', pigs_id)])
test_issue_ids = set([self.issue_1_id, self.issue_3_id, self.issue_4_id])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: employee user should not see followed + assigned issues of a follower project')
# Do: Chell reads project -> ok (follower ok follower)
# Test: followed + assigned issues visible
issue_ids = self.project_issue.search(cr, self.user_portal_id, [('project_id', '=', pigs_id)])
test_issue_ids = set([self.issue_1_id, self.issue_3_id, self.issue_5_id])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: employee user should not see followed + assigned issues of a follower project')
|
poiesisconsulting/openerp-restaurant
|
portal_project_issue/tests/test_access_rights.py
|
Python
|
agpl-3.0
| 10,548 | 0.004645 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Proprietary and confidential.
# Copyright 2011 Perfect Search Corporation.
# All rights reserved.
#
import sys
sys.dont_write_bytecode = True
#import clientplugin
import fastbranches
import serverplugin
|
perfectsearch/sandman
|
code/bzr-plugins/__init__.py
|
Python
|
mit
| 254 | 0.015748 |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fnmatch
import functools
import itertools
import os
import pkg_resources
import six
import yaml
from jsonpath_rw_ext import parser
from oslo_config import cfg
from oslo_log import log
import oslo_messaging
from ceilometer.agent import plugin_base
from ceilometer.i18n import _LE, _LI
from ceilometer import sample
OPTS = [
cfg.StrOpt('meter_definitions_cfg_file',
default="meters.yaml",
help="Configuration file for defining meter notifications."
),
]
cfg.CONF.register_opts(OPTS, group='meter')
cfg.CONF.import_opt('disable_non_metric_meters', 'ceilometer.notification',
group='notification')
LOG = log.getLogger(__name__)
class MeterDefinitionException(Exception):
def __init__(self, message, definition_cfg):
super(MeterDefinitionException, self).__init__(message)
self.message = message
self.definition_cfg = definition_cfg
def __str__(self):
return '%s %s: %s' % (self.__class__.__name__,
self.definition_cfg, self.message)
class MeterDefinition(object):
JSONPATH_RW_PARSER = parser.ExtentedJsonPathParser()
REQUIRED_FIELDS = ['name', 'type', 'event_type', 'unit', 'volume',
'resource_id']
def __init__(self, definition_cfg):
self.cfg = definition_cfg
missing = [field for field in self.REQUIRED_FIELDS
if not self.cfg.get(field)]
if missing:
raise MeterDefinitionException(
_LE("Required fields %s not specified") % missing, self.cfg)
self._event_type = self.cfg.get('event_type')
if isinstance(self._event_type, six.string_types):
self._event_type = [self._event_type]
if ('type' not in self.cfg.get('lookup', []) and
self.cfg['type'] not in sample.TYPES):
raise MeterDefinitionException(
_LE("Invalid type %s specified") % self.cfg['type'], self.cfg)
self._field_getter = {}
for name, field in self.cfg.items():
if name in ["event_type", "lookup"] or not field:
continue
elif isinstance(field, six.integer_types):
self._field_getter[name] = field
elif isinstance(field, dict) and name == 'metadata':
meta = {}
for key, val in field.items():
parts = self.parse_jsonpath(val)
meta[key] = functools.partial(self._parse_jsonpath_field,
parts)
self._field_getter['metadata'] = meta
else:
parts = self.parse_jsonpath(field)
self._field_getter[name] = functools.partial(
self._parse_jsonpath_field, parts)
def parse_jsonpath(self, field):
try:
parts = self.JSONPATH_RW_PARSER.parse(field)
except Exception as e:
raise MeterDefinitionException(_LE(
"Parse error in JSONPath specification "
"'%(jsonpath)s': %(err)s")
% dict(jsonpath=field, err=e), self.cfg)
return parts
def match_type(self, meter_name):
for t in self._event_type:
if fnmatch.fnmatch(meter_name, t):
return True
def parse_fields(self, field, message, all_values=False):
getter = self._field_getter.get(field)
if not getter:
return
elif isinstance(getter, dict):
dict_val = {}
for key, val in getter.items():
dict_val[key] = val(message, all_values)
return dict_val
elif callable(getter):
return getter(message, all_values)
else:
return getter
@staticmethod
def _parse_jsonpath_field(parts, message, all_values):
values = [match.value for match in parts.find(message)
if match.value is not None]
if values:
if not all_values:
return values[0]
return values
def get_config_file():
config_file = cfg.CONF.meter.meter_definitions_cfg_file
if not os.path.exists(config_file):
config_file = cfg.CONF.find_file(config_file)
if not config_file:
config_file = pkg_resources.resource_filename(
__name__, "data/meters.yaml")
return config_file
def setup_meters_config():
"""Setup the meters definitions from yaml config file."""
config_file = get_config_file()
if config_file is not None:
LOG.debug(_LE("Meter Definitions configuration file: %s"), config_file)
with open(config_file) as cf:
config = cf.read()
try:
meters_config = yaml.safe_load(config)
except yaml.YAMLError as err:
if hasattr(err, 'problem_mark'):
mark = err.problem_mark
errmsg = (_LE("Invalid YAML syntax in Meter Definitions file "
"%(file)s at line: %(line)s, column: %(column)s.")
% dict(file=config_file,
line=mark.line + 1,
column=mark.column + 1))
else:
errmsg = (_LE("YAML error reading Meter Definitions file "
"%(file)s")
% dict(file=config_file))
LOG.error(errmsg)
raise
else:
LOG.debug(_LE("No Meter Definitions configuration file found!"
" Using default config."))
meters_config = {}
LOG.info(_LI("Meter Definitions: %s"), meters_config)
return meters_config
def load_definitions(config_def):
if not config_def:
return []
meter_defs = []
for event_def in reversed(config_def['metric']):
try:
if (event_def['volume'] != 1 or
not cfg.CONF.notification.disable_non_metric_meters):
meter_defs.append(MeterDefinition(event_def))
except MeterDefinitionException as me:
errmsg = (_LE("Error loading meter definition : %(err)s")
% dict(err=me.message))
LOG.error(errmsg)
return meter_defs
class InvalidPayload(Exception):
pass
class ProcessMeterNotifications(plugin_base.NotificationBase):
event_types = []
def __init__(self, manager):
super(ProcessMeterNotifications, self).__init__(manager)
self.definitions = load_definitions(setup_meters_config())
def get_targets(self, conf):
"""Return a sequence of oslo_messaging.Target
It is defining the exchange and topics to be connected for this plugin.
:param conf: Configuration.
#TODO(prad): This should be defined in the notification agent
"""
targets = []
exchanges = [
conf.nova_control_exchange,
conf.cinder_control_exchange,
conf.glance_control_exchange,
conf.neutron_control_exchange,
conf.heat_control_exchange,
conf.keystone_control_exchange,
conf.sahara_control_exchange,
conf.trove_control_exchange,
conf.zaqar_control_exchange,
conf.swift_control_exchange,
conf.magnetodb_control_exchange,
conf.ceilometer_control_exchange,
conf.magnum_control_exchange,
]
for exchange in exchanges:
targets.extend(oslo_messaging.Target(topic=topic,
exchange=exchange)
for topic in conf.notification_topics)
return targets
@staticmethod
def _normalise_as_list(value, d, body, length):
values = d.parse_fields(value, body, True)
if not values:
if value in d.cfg.get('lookup'):
LOG.warning('Could not find %s values', value)
raise InvalidPayload
values = [d.cfg[value]]
elif value in d.cfg.get('lookup') and length != len(values):
LOG.warning('Not all fetched meters contain "%s" field', value)
raise InvalidPayload
return values if isinstance(values, list) else [values]
def process_notification(self, notification_body):
for d in self.definitions:
if d.match_type(notification_body['event_type']):
userid = self.get_user_id(d, notification_body)
projectid = self.get_project_id(d, notification_body)
resourceid = d.parse_fields('resource_id', notification_body)
ts = d.parse_fields('timestamp', notification_body)
metadata = d.parse_fields('metadata', notification_body)
if d.cfg.get('lookup'):
meters = d.parse_fields('name', notification_body, True)
if not meters: # skip if no meters in payload
break
try:
resources = self._normalise_as_list(
'resource_id', d, notification_body, len(meters))
volumes = self._normalise_as_list(
'volume', d, notification_body, len(meters))
units = self._normalise_as_list(
'unit', d, notification_body, len(meters))
types = self._normalise_as_list(
'type', d, notification_body, len(meters))
users = (self._normalise_as_list(
'user_id', d, notification_body, len(meters))
if 'user_id' in d.cfg['lookup'] else [userid])
projs = (self._normalise_as_list(
'project_id', d, notification_body, len(meters))
if 'project_id' in d.cfg['lookup']
else [projectid])
times = (self._normalise_as_list(
'timestamp', d, notification_body, len(meters))
if 'timestamp' in d.cfg['lookup'] else [ts])
except InvalidPayload:
break
for m, v, unit, t, r, p, user, ts in zip(
meters, volumes, itertools.cycle(units),
itertools.cycle(types), itertools.cycle(resources),
itertools.cycle(projs), itertools.cycle(users),
itertools.cycle(times)):
yield sample.Sample.from_notification(
name=m, type=t, unit=unit, volume=v,
resource_id=r, user_id=user, project_id=p,
message=notification_body, timestamp=ts,
metadata=metadata)
else:
yield sample.Sample.from_notification(
name=d.cfg['name'],
type=d.cfg['type'],
unit=d.cfg['unit'],
volume=d.parse_fields('volume', notification_body),
resource_id=resourceid,
user_id=userid,
project_id=projectid,
message=notification_body,
timestamp=ts, metadata=metadata)
@staticmethod
def get_user_id(d, notification_body):
return (d.parse_fields('user_id', notification_body) or
notification_body.get('_context_user_id') or
notification_body.get('_context_user', None))
@staticmethod
def get_project_id(d, notification_body):
return (d.parse_fields('project_id', notification_body) or
notification_body.get('_context_tenant_id') or
notification_body.get('_context_tenant', None))
|
cernops/ceilometer
|
ceilometer/meter/notifications.py
|
Python
|
apache-2.0
| 12,538 | 0 |
# -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2012 Zearin <zearin@gonk.net> #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# #
# This file is part of PyGithub. #
# http://pygithub.github.io/PyGithub/v1/index.html #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import datetime
import Framework
class Issue54(Framework.TestCase):
def setUp(self):
Framework.TestCase.setUp(self)
self.repo = self.g.get_user().get_repo("TestRepo")
def testConversion(self):
commit = self.repo.get_git_commit("73f320ae06cd565cf38faca34b6a482addfc721b")
self.assertEqual(commit.message, "Test commit created around Fri, 13 Jul 2012 18:43:21 GMT, that is vendredi 13 juillet 2012 20:43:21 GMT+2\n")
self.assertEqual(commit.author.date, datetime.datetime(2012, 7, 13, 18, 47, 10))
|
FireBladeNooT/Medusa_1_6
|
lib/github/tests/Issue54.py
|
Python
|
gpl-3.0
| 2,437 | 0.010259 |
import notify2
import os
from time import *
start_time = time()
notify2.init('')
r = notify2.Notification('', '')
while True:
for i in [ ('TO DO', 'Write JavaScript'),
('TO DO', 'Write Python'),
('Thought of the Day', 'Support Open Source'),
('Learn. . .', 'Use Linux'),
('Thought of the Day', 'Stay Cool'),
('Thought of the Day', 'Stop running for cheese'),
('Thought of the Day', 'You are cool')]:
r.update(i[0], i[1])
sleep(120)
x = int(time() - start_time)%120
if x == 119:
os.system('play --no-show-progress --null --channels 1 synth %s sine %f' % ( 0.5, 500))
r.show()
|
OpenC-IIIT/scriptonia
|
notif.py
|
Python
|
mit
| 695 | 0.021583 |
import statsmodels.tsa.stattools as st
import matplotlib.pylab as plt
import numpy as np
import pandas as pd
df = pd.read_csv('gld_uso.csv')
cols = ['GLD','USO']
df['hedgeRatio'] = df['USO'] / df['GLD']
data_mean = pd.rolling_mean(df['hedgeRatio'], window=20)
data_std = pd.rolling_std(df['hedgeRatio'], window=20)
df['numUnits'] = -1*(df['hedgeRatio']-data_mean) / data_std
positions = df[['numUnits','numUnits']].copy()
positions = positions * np.array([-1., 1.])
pnl = positions.shift(1) * np.array((df[cols] - df[cols].shift(1)) / df[cols].shift(1))
pnl = pnl.fillna(0).sum(axis=1)
ret=pnl / np.sum(np.abs(positions.shift(1)),axis=1)
print 'APR', ((np.prod(1.+ret))**(252./len(ret)))-1.
print 'Sharpe', np.sqrt(252.)*np.mean(ret)/np.std(ret)
|
burakbayramli/quant_at
|
book/Ratio.py
|
Python
|
gpl-3.0
| 750 | 0.008 |
#*************************************************************************
#* Dionaea
#* - catches bugs -
#*
#*
#*
# Copyright (c) 2009 Markus Koetter
# Copyright (c) 2001-2007 Twisted Matrix Laboratories.
# Copyright (c) 2001-2009
#
# Allen Short
# Andrew Bennetts
# Apple Computer, Inc.
# Benjamin Bruheim
# Bob Ippolito
# Canonical Limited
# Christopher Armstrong
# David Reid
# Donovan Preston
# Eric Mangold
# Itamar Shtull-Trauring
# James Knight
# Jason A. Mobarak
# Jean-Paul Calderone
# Jonathan Lange
# Jonathan D. Simms
# Juergen Hermann
# Kevin Turner
# Mary Gardiner
# Matthew Lefkowitz
# Massachusetts Institute of Technology
# Moshe Zadka
# Paul Swartz
# Pavel Pergamenshchik
# Ralph Meijer
# Sean Riley
# Software Freedom Conservancy
# Travis B. Hartwell
# Thomas Herve
# Eyal Lotem
# Antoine Pitrou
# Andy Gayton
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#*
#* contact nepenthesdev@gmail.com
#*
#*******************************************************************************/
# ftp server
from dionaea.core import connection, ihandler, g_dionaea, incident
import logging
import os
import urllib.parse
import tempfile
logger = logging.getLogger('ftp')
logger.setLevel(logging.DEBUG)
#
# the following definitions are taken from twisted
# MIT licensed code, gpl compatible
# Copyright (c) 2001-2007 Twisted Matrix Laboratories.
DATA_CNX_ALREADY_OPEN_START_XFR = "125"
FILE_STATUS_OK_OPEN_DATA_CNX = "150"
CMD_OK = "200.1"
TYPE_SET_OK = "200.2"
ENTERING_PORT_MODE = "200.3"
CMD_NOT_IMPLMNTD_SUPERFLUOUS = "202"
SYS_STATUS_OR_HELP_REPLY = "211"
DIR_STATUS = "212"
FILE_STATUS = "213"
HELP_MSG = "214"
NAME_SYS_TYPE = "215"
SVC_READY_FOR_NEW_USER = "220.1"
WELCOME_MSG = "220.2"
SVC_CLOSING_CTRL_CNX = "221"
GOODBYE_MSG = "221"
DATA_CNX_OPEN_NO_XFR_IN_PROGRESS = "225"
CLOSING_DATA_CNX = "226"
TXFR_COMPLETE_OK = "226"
ENTERING_PASV_MODE = "227"
ENTERING_EPSV_MODE = "229"
USR_LOGGED_IN_PROCEED = "230.1" # v1 of code 230
GUEST_LOGGED_IN_PROCEED = "230.2" # v2 of code 230
REQ_FILE_ACTN_COMPLETED_OK = "250"
PWD_REPLY = "257.1"
MKD_REPLY = "257.2"
USR_NAME_OK_NEED_PASS = "331.1" # v1 of Code 331
GUEST_NAME_OK_NEED_EMAIL = "331.2" # v2 of code 331
NEED_ACCT_FOR_LOGIN = "332"
REQ_FILE_ACTN_PENDING_FURTHER_INFO = "350"
SVC_NOT_AVAIL_CLOSING_CTRL_CNX = "421.1"
TOO_MANY_CONNECTIONS = "421.2"
CANT_OPEN_DATA_CNX = "425"
CNX_CLOSED_TXFR_ABORTED = "426"
REQ_ACTN_ABRTD_FILE_UNAVAIL = "450"
REQ_ACTN_ABRTD_LOCAL_ERR = "451"
REQ_ACTN_ABRTD_INSUFF_STORAGE = "452"
SYNTAX_ERR = "500"
SYNTAX_ERR_IN_ARGS = "501"
CMD_NOT_IMPLMNTD = "502"
BAD_CMD_SEQ = "503"
CMD_NOT_IMPLMNTD_FOR_PARAM = "504"
# v1 of code 530 - please log in
NOT_LOGGED_IN = "530.1"
# v2 of code 530 - authorization failure
AUTH_FAILURE = "530.2"
NEED_ACCT_FOR_STOR = "532"
# no such file or directory
FILE_NOT_FOUND = "550.1"
PERMISSION_DENIED = "550.2" # permission denied
# anonymous users can't alter filesystem
ANON_USER_DENIED = "550.3"
# rmd called on a path that is not a directory
IS_NOT_A_DIR = "550.4"
REQ_ACTN_NOT_TAKEN = "550.5"
FILE_EXISTS = "550.6"
IS_A_DIR = "550.7"
PAGE_TYPE_UNK = "551"
EXCEEDED_STORAGE_ALLOC = "552"
FILENAME_NOT_ALLOWED = "553"
RESPONSE = {
# -- 100's --
DATA_CNX_ALREADY_OPEN_START_XFR: '125 Data connection already open, starting transfer',
FILE_STATUS_OK_OPEN_DATA_CNX: '150 File status okay; about to open data connection.',
# -- 200's --
CMD_OK: '200 Command OK',
TYPE_SET_OK: '200 Type set to %s.',
ENTERING_PORT_MODE: '200 PORT OK',
SYS_STATUS_OR_HELP_REPLY: '211 System status reply',
DIR_STATUS: '212 %s',
FILE_STATUS: '213 %s',
HELP_MSG: '214 help: %s',
NAME_SYS_TYPE: '215 UNIX Type: L8',
WELCOME_MSG: "220 %s",
SVC_READY_FOR_NEW_USER: '220 Service ready',
GOODBYE_MSG: '221 Goodbye.',
DATA_CNX_OPEN_NO_XFR_IN_PROGRESS: '225 data connection open, no transfer in progress',
CLOSING_DATA_CNX: '226 Abort successful',
TXFR_COMPLETE_OK: '226 Transfer Complete.',
ENTERING_PASV_MODE: '227 Entering Passive Mode (%s).',
# where is epsv defined in the rfc's?
ENTERING_EPSV_MODE: '229 Entering Extended Passive Mode (|||%s|).',
USR_LOGGED_IN_PROCEED: '230 User logged in, proceed',
GUEST_LOGGED_IN_PROCEED: '230 Anonymous login ok, access restrictions apply.',
#i.e. CWD completed ok
REQ_FILE_ACTN_COMPLETED_OK: '250 Requested File Action Completed OK',
PWD_REPLY: '257 "%s"',
# -- 300's --
USR_NAME_OK_NEED_PASS: '331 Password required for %s.',
GUEST_NAME_OK_NEED_EMAIL: '331 Guest login ok, type your email address as password.',
REQ_FILE_ACTN_PENDING_FURTHER_INFO: '350 Requested file action pending further information.',
# -- 400's --
CANT_OPEN_DATA_CNX: "425 Can't open data connection.",
CNX_CLOSED_TXFR_ABORTED: '426 Transfer aborted. Data connection closed.',
# -- 500's --
SYNTAX_ERR: "500 Syntax error: %s",
SYNTAX_ERR_IN_ARGS: '501 syntax error in argument(s) %s.',
CMD_NOT_IMPLMNTD: "502 Command '%s' not implemented",
BAD_CMD_SEQ: '503 Incorrect sequence of commands: %s',
CMD_NOT_IMPLMNTD_FOR_PARAM: "504 Not implemented for parameter '%s'.",
NOT_LOGGED_IN: '530 Please login with USER and PASS.',
AUTH_FAILURE: '530 Sorry, Authentication failed.',
FILE_NOT_FOUND: '550 %s: No such file or directory.',
PERMISSION_DENIED: '550 %s: Permission denied.',
}
class ftpd(connection):
UNAUTH, INAUTH, AUTHED, RENAMING = range(4)
def __init__ (self, proto='tcp'):
connection.__init__(self, proto)
logger.debug("ftp test")
self.state = self.UNAUTH
self.user = 'bar'
self.dtp = None
self.cwd = '/'
self.basedir = '/tmp/ranz'
self.dtp = None
self.dtf = None
self.limits = {}#{ '_out' : 8192 }
def chroot(self, p):
self.basedir = p
def sendline(self, data):
self.send(data + '\r\n')
def reply(self, key, *args):
msg = RESPONSE[key] % args
self.sendline(msg)
def handle_origin(self, parent):
logger.debug("setting basedir to %s" % parent.basedir)
self.basedir = parent.basedir
def handle_established(self):
self.processors()
self.reply(WELCOME_MSG, "Welcome to the ftp service")
def handle_io_in(self, data):
# try:
# data = data.decode()
# except UnicodeDecodeError:
# logger.warn("error decoding")
# logger.debug("io_in" + data)
logger.debug(data)
lastsep = data.rfind(b"\n")
if lastsep == -1:
logger.debug("data without linebreak")
return 0
lastsep += 1 # add last \n
logger.debug("input size %i, can do %i" % (len(data), lastsep))
data = data[:lastsep]
lines = data.splitlines(0)
for line in lines:
logger.debug("processing line '%s'" % line)
if len(line) == 0:
continue
space = line.find(b' ')
if space != -1:
cmd = line[:space]
args = (line[space + 1:],)
else:
cmd = line
args = ()
logger.warn("cmd '%s'" % cmd)
r = self.processcmd(cmd, args)
if isinstance(r,tuple):
self.reply(*r)
elif r is not None:
self.reply(r)
return lastsep
def processcmd(self, cmd, args):
logger.debug("cmd '%s'" % cmd)
l = [i.decode() for i in args]
cmd = cmd.upper()
if self.state == self.UNAUTH:
if cmd != b'USER':
return NOT_LOGGED_IN
self.ftp_USER(*args)
elif self.state == self.INAUTH:
if cmd != b'PASS':
return (BAD_CMD_SEQ, "PASS required after USER")
self.ftp_PASS(*l)
method = getattr(self, "ftp_" + cmd.decode(), None)
if method is not None:
return method(*l)
else:
return (CMD_NOT_IMPLMNTD, cmd.decode())
def ftp_USER(self, username):
if not username:
return (SYNTAX_ERR, 'USER requires an argument')
self.state = self.INAUTH
self.user = username
if username == 'anonymous':
return GUEST_NAME_OK_NEED_EMAIL
else:
return (USR_NAME_OK_NEED_PASS, username)
def ftp_PASS(self, password):
if not password:
return (SYNTAX_ERR, 'PASS requires an argument')
self.state = self.AUTHED
if self.user == 'anonymous':
return GUEST_LOGGED_IN_PROCEED
else:
return USR_LOGGED_IN_PROCEED
def ftp_FEAT(self):
self.send('211-Features:\r\n' +
' PASV\r\n' +
' PORT\r\n' +
'211 End\r\n')
return None
def ftp_PORT(self, address):
if self.dtf:
self.dtf.close()
self.dtf = None
if self.dtp:
self.dtp.close()
self.dtp = None
addr = list(map(int, address.split(',')))
ip = '%d.%d.%d.%d' % tuple(addr[:4])
port = addr[4] << 8 | addr[5]
logger.debug("PORT cmd for port %i" % port)
if self.remote.host != ip and "::ffff:" + self.remote.host != ip:
logger.warn("Potential FTP Bounce Scan detected")
return None
self.dtp = ftpdataconnect(ip, port, self)
return None
def ftp_PASV(self):
if self.dtf:
self.dtf.close()
self.dtf = None
if self.dtp:
self.dtp.close()
self.dtp = None
self.dtf = ftpdatalisten(host=self.local.host, port=0, ctrl=self)
host = self.dtf.local.host
port = self.dtf.local.port
self.reply(ENTERING_PASV_MODE, encodeHostPort(host, port))
def ftp_QUIT(self):
self.reply(GOODBYE_MSG)
self.close()
def real_path(self, p=None):
if p:
name = os.path.join(self.cwd, p)
else:
name = self.cwd
if len(name) >= 1 and name[0] == '/':
name = name[1:]
name = os.path.join(self.basedir, name)
name = os.path.normpath(name)
return name
def ftp_RETR(self, p):
if not p:
return (SYNTAX_ERR_IN_ARGS, RETR)
name = self.real_path(p)
if not name.startswith(self.basedir):
return (PERMISSION_DENIED, p)
if os.path.exists(name) and os.path.isfile(name):
if self.dtp:
if self.dtp.status == 'established':
self.reply(FILE_STATUS_OK_OPEN_DATA_CNX)
self.dtp.send_file(name)
else:
logger.warn("dtp state %s %s:%i <-> %s:%i!" %
(self.dtp.status,
self.dtp.remote.host, self.dtp.remote.port,
self.dtp.local.host, self.dtp.local.port))
else:
logger.warn("no dtp on %s:%i <-> %s:%i!" %
(self.dtp.remote.host, self.dtp.remote.port,
self.dtp.local.host, self.dtp.local.port))
else:
return (FILE_NOT_FOUND, p)
def ftp_STOR(self, p):
if not p:
return (SYNTAX_ERR_IN_ARGS, STOR)
file = self.real_path(p)
if os.path.exists(file):
return (PERMISSION_DENIED, p)
if not file.startswith(self.basedir):
return (PERMISSION_DENIED, p)
if self.dtp:
if self.dtp.status == 'established':
self.reply(FILE_STATUS_OK_OPEN_DATA_CNX)
self.dtp.recv_file(file)
else:
logger.warn("dtp state %s %s:%i <-> %s:%i!" %
(self.dtp.status,
self.dtp.remote.host, self.dtp.remote.port,
self.dtp.local.host, self.dtp.local.port))
else:
logger.warn("no dtp on %s:%i <-> %s:%i!" %
(self.dtp.remote.host, self.dtp.remote.port,
self.dtp.local.host, self.dtp.local.port))
def ftp_TYPE(self, t):
if t == 'I':
return (TYPE_SET_OK, 'I')
else:
return (CMD_NOT_IMPLMNTD_FOR_PARAM, t)
def ftp_LIST(self, p=None):
name = self.real_path(p)
if not name.startswith(self.basedir):
return (FILE_NOT_FOUND, p)
if os.path.exists(name):
if self.dtp:
if self.dtp.status == 'established':
self.reply(FILE_STATUS_OK_OPEN_DATA_CNX)
self.dtp.send_list(name, len(name)+1)
else:
logger.warn("dtp state %s %s:%i <-> %s:%i!" %
(self.dtp.status,
self.dtp.remote.host, self.dtp.remote.port,
self.dtp.local.host, self.dtp.local.port))
else:
logger.warn("no dtp on %s:%i <-> %s:%i!" %
(self.dtp.remote.host, self.dtp.remote.port,
self.dtp.local.host, self.dtp.local.port))
else:
return (PERMISSION_DENIED, p)
def ftp_PWD(self):
return (PWD_REPLY, self.cwd)
def ftp_CWD(self, p):
cwd = self.real_path(p)
if not cwd.startswith(self.basedir):
return (FILE_NOT_FOUND, p)
else:
self.cwd = cwd[len(self.basedir):]
if self.cwd == "":
self.cwd = "/"
if os.path.exists(cwd) and os.path.isdir(cwd):
return REQ_FILE_ACTN_COMPLETED_OK
else:
return (PERMISSION_DENIED, p)
def ftp_PBSZ(self, arg):
return CMD_OK
def ftp_SYST(self):
return NAME_SYS_TYPE
def ftp_SIZE(self, p):
if not p:
return (FILE_NOT_FOUND,p)
file = self.real_path(p)
if not file.startswith(self.basedir):
return (FILE_NOT_FOUND, p)
if os.path.exists(file) and os.path.isfile(file):
return (FILE_STATUS, str(stat(file).st_size))
return (FILE_NOT_FOUND,p)
def ftp_MDTM(self, p):
if not p:
return (FILE_NOT_FOUND,p)
file = self.real_path(p)
if not file.startswith(self.basedir):
return (FILE_NOT_FOUND, p)
if os.path.exists(file) and os.path.isfile(file):
return (FILE_STATUS, time.strftime('%Y%m%d%H%M%S', time.gmtime(stat(file).st_mtime)))
return (FILE_NOT_FOUND,p)
def ftp_RMD(self, p):
if not p:
return (FILE_NOT_FOUND,p)
dir = self.real_path(p)
if not dir.startswith(self.basedir):
return (FILE_NOT_FOUND, p)
if os.path.exists(dir) and os.path.isdir(dir):
os.rmdir(dir)
return REQ_FILE_ACTN_COMPLETED_OK
return (FILE_NOT_FOUND,p)
def ftp_MKD(self, p):
if not p:
return (FILE_NOT_FOUND,p)
dir = self.real_path(p)
if not dir.startswith(self.basedir):
return (FILE_NOT_FOUND, p)
if os.path.isdir(dir):
return (PERMISSION_DENIED, p)
os.mkdir(dir)
return REQ_FILE_ACTN_COMPLETED_OK
def handle_error(self, err):
pass
def handle_disconnect(self):
if self.dtf:
self.dtf.close()
self.dtf = None
if self.dtp:
self.dtp.close()
self.dtp = None
return 0
def encodeHostPort(host, port):
numbers = host.split('.') + [str(port >> 8), str(port % 256)]
return ','.join(numbers)
from os import stat
from stat import *
import time
import io
class ftpdatacon(connection):
def __init__ (self, ctrl=None):
connection.__init__(self,'tcp')
self.ctrl = ctrl
self.mode = None
def handle_error (self, err):
if self.ctrl:
self.ctrl.reply(CANT_OPEN_DATA_CNX)
def send_list(self, p, rm):
def ls(f, r):
logger.debug("stat %s" % f)
name = f[r:]
s=stat(f)
size = s.st_size
directory = S_ISDIR(s.st_mode)
permissions = S_IMODE(s[ST_MODE])
hardlinks = s.st_nlink
modified = s.st_mtime
owner = s.st_uid
group = s.st_gid
def formatMode(mode):
return ''.join([mode & (256 >> n) and 'rwx'[n % 3] or '-' for n in range(9)])
def formatDate(mtime):
now = time.gmtime()
info = {
'month': mtime.tm_mon,
'day': mtime.tm_mday,
'year': mtime.tm_year,
'hour': mtime.tm_hour,
'minute': mtime.tm_min
}
if now.tm_year != mtime.tm_year:
return '%(month)s %(day)02d %(year)5d' % info
else:
return '%(month)s %(day)02d %(hour)02d:%(minute)02d' % info
format = ('%(directory)s%(permissions)s%(hardlinks)4d '
'%(owner)-9s %(group)-9s %(size)15d %(date)12s '
'%(name)s')
return format % {
'directory': directory and 'd' or '-',
'permissions': formatMode(permissions),
'hardlinks': hardlinks,
'owner': owner,
'group': group,
'size': size,
'date': formatDate(time.gmtime(modified)),
'name': name}
self.mode = 'list'
if os.path.isdir(p):
self.data = [ls(os.path.join(p,f), rm) for f in os.listdir(p)]
elif os.path.isfile(p):
self.data = [ls(p)]
logger.debug("p %s len %i" % (p, len(self.data)) )
if len(self.data) > 0:
self.off = 0
self.off = self.off + 1
self.send(self.data[self.off-1] + '\r\n')
else:
self.close()
if self.ctrl:
self.ctrl.dtp = None
self.ctrl.reply(TXFR_COMPLETE_OK)
def recv_file(self, p):
logger.debug(p)
self.mode = 'recv_file'
self.file = io.open(p, 'wb+')
print(self.file)
def send_file(self, p):
self.mode = 'file'
self.file = io.open(p, 'rb')
w = self.file.read(1024)
self.send(w)
if len(w) < 1024:
self.file.close()
self.mode = None
self.close()
if self.ctrl:
self.ctrl.reply(TXFR_COMPLETE_OK)
self.ctrl.dtp = None
def handle_io_in(self, data):
if self.mode == "recv_file":
self.file.write(data)
return len(data)
def handle_io_out(self):
logger.debug("io_out")
if self.mode == 'list':
if self.off < len(self.data):
self.off = self.off + 1
self.send(self.data[self.off - 1] + '\r\n')
else:
self.close()
if self.ctrl:
self.ctrl.dtp = None
self.ctrl.reply(TXFR_COMPLETE_OK)
elif self.mode == 'file':
w = self.file.read(1024)
self.send(w)
if len(w) < 1024:
self.mode = None
self.close()
self.file.close()
if self.ctrl:
self.ctrl.dtp = None
self.ctrl.reply(TXFR_COMPLETE_OK)
def handle_disconnect(self):
if self.ctrl:
if self.ctrl.dtf:
self.ctrl.dtf = None
if self.ctrl.dtp:
self.ctrl.dtp = None
if self.mode == 'file' and self.file:
self.file.close()
if self.mode == 'recv_file' and self.file:
self.file.close()
self.ctrl.reply(TXFR_COMPLETE_OK)
return 0
def handle_origin(self, parent):
pass
# if parent.limits._out:
# self._out.limit = parent.limits._out
class ftpdataconnect(ftpdatacon):
def __init__ (self, host, port, ctrl):
ftpdatacon.__init__(self,ctrl)
self.connect(host,port)
def handle_established(self):
logger.debug("DATA connection established")
self.ctrl.reply(ENTERING_PORT_MODE)
class ftpdatalisten(ftpdatacon):
def __init__ (self, host=None, port=None, ctrl=None):
ftpdatacon.__init__(self,ctrl)
if host is not None:
self.bind(host,port)
self.listen(1)
if ctrl.limits:
self._out.throttle = ctrl.limits['_out']
def handle_established(self):
logger.debug("DATA connection established")
def handle_origin(self, parent):
ftpdatacon.handle_origin(self,parent)
logger.debug("Meeting parent")
self.ctrl = parent.ctrl
self.ctrl.dtp = self
self.ctrl.dtf = None
parent.ctrl = None
parent.close()
# ftp client
import re
import random
_linesep_regexp = re.compile(b"\r?\n")
class ftpctrl(connection):
def __init__(self, ftp):
connection.__init__(self, 'tcp')
self.ftp = ftp
self.state = 'NONE'
self.timeouts.sustain = 60
def handle_established(self):
logger.debug("FTP CTRL connection established")
def handle_io_in(self, data):
dlen = len(data)
lines = _linesep_regexp.split(data)#.decode('UTF-8'))
remain = lines.pop()
dlen = dlen - len(remain)
for line in lines:
logger.debug("FTP LINE: " + str(line))
c = int(line[:3])
s = line[3:4]
if self.state == 'NONE':
if c == 220 and s != b'-':
self.cmd('USER ' + self.ftp.user)
self.state = 'USER'
elif self.state == 'USER' or self.state == 'PASS':
if self.state == 'USER' and c == 331 and s != b'-':
self.cmd('PASS ' + self.ftp.passwd)
self.state = 'PASS'
if c == 230 and s != b'-':
if self.ftp.mode == 'binary':
self.cmd('TYPE I')
self.state = 'TYPE'
else:
port = self.ftp.makeport()
self.cmd('PORT ' + port)
self.state = 'PORT'
elif self.state == 'TYPE':
if (c >= 200 and c < 300) and s != b'-':
port = self.ftp.makeport()
self.cmd('PORT ' + port)
self.state = 'PORT'
elif self.state == 'PORT':
if c == 200 and s != b'-':
self.cmd('RETR ' + self.ftp.file)
self.state = 'RETR'
else:
logger.warn("PORT command failed")
elif self.state == 'RETR':
if (c > 200 and c < 300) and s != b'-':
self.cmd('QUIT')
self.state = 'QUIT'
self.ftp.ctrldone()
return dlen
def cmd(self, cmd):
logger.debug("FTP CMD: '" + cmd +"'")
self.send(cmd + '\r\n')
def handle_error(self, err):
self.ftp.fail()
return False
def handle_disconnect(self):
if self.state != 'QUIT':
self.ftp.fail()
return False
def handle_timeout_idle(self):
return False
def handle_timeout_sustain(self):
return False
class ftpdata(connection):
def __init__(self, ftp=None):
connection.__init__(self, 'tcp')
self.ftp = ftp
self.timeouts.listen = 10
def handle_established(self):
logger.debug("FTP DATA established")
self.timeouts.idle = 30
self.fileobj = tempfile.NamedTemporaryFile(delete=False, prefix='ftp-', suffix=g_dionaea.config(
)['downloads']['tmp-suffix'], dir=g_dionaea.config()['downloads']['dir'])
def handle_origin(self, parent):
self.ftp = parent.ftp
self.ftp.dataconn = self
self.ftp.datalistener.close()
self.ftp.datalistener = None
def handle_io_in(self, data):
self.fileobj.write(data)
return len(data)
def handle_timeout_idle(self):
self.fileobj.unlink(self.fileobj.name)
self.fileobj = None
self.ftp.fail()
return False
def handle_disconnect(self):
logger.debug("received %i bytes" %(self._in.accounting.bytes))
if hasattr(self, 'fileobj')and self.fileobj != None:
# print(type(self.file))
# print(self.file)
self.fileobj.close()
icd = incident("dionaea.download.complete")
icd.path = self.fileobj.name
icd.con = self.ftp.con
icd.url = self.ftp.url
icd.report()
self.fileobj.unlink(self.fileobj.name)
self.ftp.dataconn = None
self.ftp.datadone()
return False
def handle_timeout_listen(self):
self.ftp.fail()
return False
class ftp:
def __init__(self):
self.ctrl = ftpctrl(self)
def download(self, con, user, passwd, host, port, file, mode, url):
self.user = user
self.passwd = passwd
self.host = host
self.port = port
self.file = file
self.mode = mode
self.con = con
self.url = url
if con:
self.local = con.local.host
self.ctrl.bind(self.local, 0)
self.con.ref()
self.ctrl.connect(host, port)
self.dataconn = None
self.datalistener = None
if con:
i=incident("dionaea.connection.link")
i.parent = con
i.child = self.ctrl
i.report()
def makeport(self):
self.datalistener = ftpdata(ftp=self)
try:
portrange = g_dionaea.config()['modules']['python'][
'ftp']['active-ports']
(minport, maxport) = portrange.split('-')
minport = int(minport)
maxport = int(maxport)
except:
minport = 62001
maxport = 63000
try:
# for NAT setups
host = g_dionaea.config()['modules']['python'][
'ftp']['active-host']
if host == '0.0.0.0':
host = self.ctrl.local.host
logger.info("datalisten host %s" % host)
else:
import socket
host = socket.gethostbyname(host)
logger.info("resolved host %s" % host)
except:
host = self.ctrl.local.host
logger.info("except datalisten host %s" % self.ctrl.local.host)
# NAT, use a port range which is forwarded to your honeypot
ports = list(
filter(lambda port: ((port >> 4) & 0xf) != 0, range(minport, maxport)))
random.shuffle(ports)
port = None
for port in ports:
self.datalistener.bind(self.ctrl.local.host, port)
if self.datalistener.listen() == True:
port = self.datalistener.local.port
i=incident("dionaea.connection.link")
i.parent = self.ctrl
i.child = self.datalistener
i.report()
break
hbytes = host.split('.')
pbytes = [repr(port//256), repr(port%256)]
bytes = hbytes + pbytes
port = ','.join(bytes)
logger.debug("PORT CMD %s" % (port))
return port
def ctrldone(self):
logger.info("SUCCESS DOWNLOADING FILE")
self.done()
def datadone(self):
logger.info("FILE received")
self.done()
def done(self):
if self.ctrl and self.ctrl.state == 'QUIT' and self.dataconn == None:
logger.info("proceed processing file!")
self.ctrl = None
self.finish()
def fail(self):
self.finish()
def finish(self):
if self.con:
self.con.unref()
self.con = None
if self.ctrl != None:
self.ctrl.close()
self.ctrl = None
if self.datalistener and self.datalistener != None:
self.datalistener.close()
self.datalistener = None
if self.dataconn and self.dataconn != None:
self.dataconn.close()
self.dataconn = None
class ftpdownloadhandler(ihandler):
def __init__(self, path):
logger.debug("%s ready!" % (self.__class__.__name__))
ihandler.__init__(self, path)
def handle_incident(self, icd):
url = icd.url
p = urllib.parse.urlsplit(url)
print(p)
if p.scheme == 'ftp':
logger.info("do download")
try:
con = icd.con
except AttributeError:
con = None
if hasattr(icd,'ftpmode'):
ftpmode = icd.ftpmode
else:
ftpmode = 'binary'
f = ftp()
f.download(
con, p.username, p.password, p.hostname, p.port, p.path, ftpmode, url)
|
GovCERT-CZ/dionaea
|
modules/python/scripts/ftp.py
|
Python
|
gpl-2.0
| 32,082 | 0.005143 |
import collections
import numpy as np
import sympy
from sym2num import function, var
def reload_all():
"""Reload modules for testing."""
import imp
for m in (var, function):
imp.reload(m)
if __name__ == '__main__':
reload_all()
g = var.UnivariateCallable('g')
h = var.UnivariateCallable('h')
from sympy.abc import t, w, x, y, z, m
output = [x**2 + sympy.erf(x) + g(x),
sympy.cos(y) + 2*t + sympy.GoldenRatio,
z*sympy.sqrt(sympy.sin(w)+2)*h(x, 2)]
obj = {'data': [w], 'extra': {'other': [m, z]}, 'gg': g}
arguments = function.Arguments(self=obj, t=t, state=[x, y], H=h)
f = function.FunctionPrinter('f', output, arguments)
print(f.print_def())
sf = function.SymbolicSubsFunction(function.Arguments(t=t, m=[x,y]), t**2+x)
print( "\n" + "*" * 80 + "\n")
print(sf(w**4, [2*x,3*z]))
|
dimasad/sym2num
|
examples/function_example.py
|
Python
|
mit
| 896 | 0.007813 |
#!/usr/bin/python
#======================================================================
#
# Project : hpp_IOStressTest
# File : IOST_WRun_CTRL.py
# Date : Oct 25, 2016
# Author : HuuHoang Nguyen
# Contact : hhnguyen@apm.com
# : hoangnh.hpp@gmail.com
# License : MIT License
# Copyright : 2016
# Description: The hpp_IOStressTest is under the MIT License, a copy of license which may be found in LICENSE
#
#======================================================================
import io
import os
import sys
import time
from IOST_Prepare import IOST_Prepare
from IOST_Config import *
from IOST_Basic import *
from IOST_Host import *
from IOST_Terminal import *
import gtk
import gobject
import gtk.glade
import vte
#======================================================================
IOST_WRun_CTRL_Debug_Enable = 1
#======================================================================
class IOST_WRun_CTRL():
"""
"""
#----------------------------------------------------------------------
def __init__(self, glade_filename, window_name, builder=None):
""
self.IOST_WRun_CTRL_window = window_name
self.WRun_Host = IOST_Host("", "IOST_Console")
# print self.WRun_Host.name
if not builder:
self.WRun_CTRL_Builder = gtk.Builder()
self.WRun_CTRL_Builder.add_from_file(glade_filename)
self.WRun_CTRL_Builder.connect_signals(self)
else:
self.WRun_CTRL_Builder = builder
def WRun_GetCTRL_Obj(self, window_name):
""
self.IOST_Objs[window_name][window_name+"_NoteBook"] = self.WRun_CTRL_Builder.get_object(self.IOST_Objs[window_name]["_NoteBook"])
self.IOST_Objs[window_name][window_name+"_Terminal_ScrolledWindow"] = self.WRun_CTRL_Builder.get_object(self.IOST_Objs[window_name]["_Terminal_ScrolledWindow"])
self.IOST_Objs[window_name][window_name+"_Terminal_L"] = self.WRun_CTRL_Builder.get_object(self.IOST_Objs[window_name]["_Terminal_L"])
def WRun_InitCTRL_Obj(self, window_name):
""
self.IOST_Objs[window_name][window_name+"_Terminal_ScrolledWindow"].connect('button_press_event', lambda *args: True)
self.WRun_Term = IOST_Terminal(self.IOST_Objs[window_name][window_name+"_NoteBook"], self.WRun_Host.name)
self.IOST_Objs[window_name][window_name+"_Terminal_ScrolledWindow"].add(self.WRun_Term.IOST_vte)
def WRun_IOST_VTE_show(self):
self.WRun_Term.IOST_vte.show()
|
HPPTECH/hpp_IOSTressTest
|
Refer/IOST_OLD_SRC/IOST_0.17/Libs/IOST_WRun_CTRL.py
|
Python
|
mit
| 2,555 | 0.009785 |
#!/usr/bin/env jython
from __future__ import with_statement
from contextlib import contextmanager
import logging
from plugins import __all__
log = logging.getLogger('kahuna')
class PluginManager:
""" Manages available plugins """
def __init__(self):
""" Initialize the plugin list """
self.__plugins = {}
def load_plugin(self, plugin_name):
""" Loads a single plugin given its name """
if not plugin_name in __all__:
raise KeyError("Plugin " + plugin_name + " not found")
try:
plugin = self.__plugins[plugin_name]
except KeyError:
# Load the plugin only if not loaded yet
log.debug("Loading plugin: %s" % plugin_name)
module = __import__("plugins." + plugin_name, fromlist=["plugins"])
plugin = module.load()
self.__plugins[plugin_name] = plugin
return plugin
def call(self, plugin_name, command_name, args):
""" Encapsulate the call into a context already loaded. """
try:
plugin = self.load_plugin(plugin_name)
except KeyError:
# Plugin not found, pring generic help
self.help_all()
if not command_name:
self.help(plugin)
else:
try:
command = plugin._commands()[command_name]
except KeyError:
# Command not found in plugin. Print only plugin help
self.help(plugin)
with opencontext(plugin):
return command(args)
def help(self, plugin):
""" Prints the help for the given plugin """
commands = plugin._commands()
plugin_name = plugin.__module__.split('.')[-1]
print "%s" % plugin.__doc__
for command in sorted(commands.iterkeys()):
print " %s %s\t%s" % (plugin_name, command,
commands[command].__doc__)
def help_all(self):
""" Prints the help for all registered plugins """
for name in sorted(__all__):
plugin = self.load_plugin(name)
self.help(plugin)
print
@contextmanager
def opencontext(plugin):
""" Loads the context each plugin needs to be initialized
in order to be executed """
plugin._load_context()
yield
plugin._close_context()
|
nacx/kahuna
|
kahuna/pluginmanager.py
|
Python
|
mit
| 2,347 | 0.000852 |
# -*- coding: utf-8 -*-
# Copyright 2016, 2017 Kevin Reid and the ShinySDR contributors
#
# This file is part of ShinySDR.
#
# ShinySDR is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ShinySDR is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ShinySDR. If not, see <http://www.gnu.org/licenses/>.
"""
Minimal units library.
Used only for expressing units for display. Does not provide calculation or dimensions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from collections import namedtuple as _namedtuple
from zope.interface import implementer as _implements
from shinysdr.i.json import IJsonSerializable as _IJsonSerializable
__all__ = [] # appended later
class Unit(_namedtuple('Unit', [
'symbol',
'si_prefix_ok'])): # TODO allow requesting binary prefixes?
_implements(_IJsonSerializable)
def to_json(self):
return {
'type': 'Unit',
'symbol': self.symbol,
'si_prefix_ok': self.si_prefix_ok
}
def __str__(self):
return self.symbol
__all__.append('Unit')
# TODO: reflectively put units into __all__
none = Unit('', True)
s = Unit('s', True)
degree = Unit('°', False) # degree of angle
degC = Unit('°C', False)
degF = Unit('°F', False)
dB = Unit('dB', False)
dBm = Unit('dBm', False)
dBFS = Unit('dBFS', False)
Hz = Unit('Hz', True)
MHz = Unit('MHz', False) # TODO: Remove or refine this when si_prefix_ok is actually used
ppm = Unit('ppm', False)
|
kpreid/shinysdr
|
shinysdr/units.py
|
Python
|
gpl-3.0
| 1,966 | 0.004585 |
# -*- coding: utf-8 -*-
'''
CloudStack Cloud Module
=======================
The CloudStack cloud module is used to control access to a CloudStack based
Public Cloud.
:depends: libcloud >= 0.15
Use of this module requires the ``apikey``, ``secretkey``, ``host`` and
``path`` parameters.
.. code-block:: yaml
my-cloudstack-cloud-config:
apikey: <your api key >
secretkey: <your secret key >
host: localhost
path: /client/api
driver: cloudstack
'''
# pylint: disable=invalid-name,function-redefined
# Import python libs
from __future__ import absolute_import
import pprint
import logging
# Import salt cloud libs
import salt.config as config
from salt.cloud.libcloudfuncs import * # pylint: disable=redefined-builtin,wildcard-import,unused-wildcard-import
from salt.utils import namespaced_function
from salt.exceptions import SaltCloudSystemExit
# CloudStackNetwork will be needed during creation of a new node
# pylint: disable=import-error
try:
from libcloud.compute.drivers.cloudstack import CloudStackNetwork
HAS_LIBS = True
except ImportError:
HAS_LIBS = False
# Get logging started
log = logging.getLogger(__name__)
# Redirect CloudStack functions to this module namespace
get_node = namespaced_function(get_node, globals())
get_size = namespaced_function(get_size, globals())
get_image = namespaced_function(get_image, globals())
avail_locations = namespaced_function(avail_locations, globals())
avail_images = namespaced_function(avail_images, globals())
avail_sizes = namespaced_function(avail_sizes, globals())
script = namespaced_function(script, globals())
list_nodes = namespaced_function(list_nodes, globals())
list_nodes_full = namespaced_function(list_nodes_full, globals())
list_nodes_select = namespaced_function(list_nodes_select, globals())
show_instance = namespaced_function(show_instance, globals())
__virtualname__ = 'cloudstack'
# Only load in this module if the CLOUDSTACK configurations are in place
def __virtual__():
'''
Set up the libcloud functions and check for CloudStack configurations.
'''
if get_configured_provider() is False:
return False
if get_dependencies() is False:
return False
return __virtualname__
def get_configured_provider():
'''
Return the first configured instance.
'''
return config.is_provider_configured(
__opts__,
__active_provider_name__ or __virtualname__,
('apikey', 'secretkey', 'host', 'path')
)
def get_dependencies():
'''
Warn if dependencies aren't met.
'''
return config.check_driver_dependencies(
__virtualname__,
{'libcloud': HAS_LIBS}
)
def get_conn():
'''
Return a conn object for the passed VM data
'''
driver = get_driver(Provider.CLOUDSTACK)
verify_ssl_cert = config.get_cloud_config_value('verify_ssl_cert',
get_configured_provider(),
__opts__,
default=True,
search_global=False)
if verify_ssl_cert is False:
try:
import libcloud.security
libcloud.security.VERIFY_SSL_CERT = False
except (ImportError, AttributeError):
raise SaltCloudSystemExit(
'Could not disable SSL certificate verification. '
'Not loading module.'
)
return driver(
key=config.get_cloud_config_value(
'apikey', get_configured_provider(), __opts__, search_global=False
),
secret=config.get_cloud_config_value(
'secretkey', get_configured_provider(), __opts__,
search_global=False
),
secure=config.get_cloud_config_value(
'secure', get_configured_provider(), __opts__,
default=True, search_global=False
),
host=config.get_cloud_config_value(
'host', get_configured_provider(), __opts__, search_global=False
),
path=config.get_cloud_config_value(
'path', get_configured_provider(), __opts__, search_global=False
),
port=config.get_cloud_config_value(
'port', get_configured_provider(), __opts__,
default=None, search_global=False
)
)
def get_location(conn, vm_):
'''
Return the node location to use
'''
locations = conn.list_locations()
# Default to Dallas if not otherwise set
loc = config.get_cloud_config_value('location', vm_, __opts__, default=2)
for location in locations:
if str(loc) in (str(location.id), str(location.name)):
return location
def get_password(vm_):
'''
Return the password to use
'''
return config.get_cloud_config_value(
'password', vm_, __opts__, default=config.get_cloud_config_value(
'passwd', vm_, __opts__, search_global=False
), search_global=False
)
def get_key():
'''
Returns the ssh private key for VM access
'''
return config.get_cloud_config_value(
'private_key', get_configured_provider(), __opts__, search_global=False
)
def get_keypair(vm_):
'''
Return the keypair to use
'''
keypair = config.get_cloud_config_value('keypair', vm_, __opts__)
if keypair:
return keypair
else:
return False
def get_ip(data):
'''
Return the IP address of the VM
If the VM has public IP as defined by libcloud module then use it
Otherwise try to extract the private IP and use that one.
'''
try:
ip = data.public_ips[0]
except Exception:
ip = data.private_ips[0]
return ip
def get_networkid(vm_):
'''
Return the networkid to use, only valid for Advanced Zone
'''
networkid = config.get_cloud_config_value('networkid', vm_, __opts__)
if networkid is not None:
return networkid
else:
return False
def get_project(conn, vm_):
'''
Return the project to use.
'''
try:
projects = conn.ex_list_projects()
except AttributeError:
# with versions <0.15 of libcloud this is causing an AttributeError.
log.warning('Cannot get projects, you may need to update libcloud to 0.15 or later')
return False
projid = config.get_cloud_config_value('projectid', vm_, __opts__)
if not projid:
return False
for project in projects:
if str(projid) in (str(project.id), str(project.name)):
return project
log.warning("Couldn't find project {0} in projects".format(projid))
return False
def create(vm_):
'''
Create a single VM from a data dict
'''
try:
# Check for required profile parameters before sending any API calls.
if vm_['profile'] and config.is_profile_configured(__opts__,
__active_provider_name__ or 'cloudstack',
vm_['profile'],
vm_=vm_) is False:
return False
except AttributeError:
pass
# Since using "provider: <provider-engine>" is deprecated, alias provider
# to use driver: "driver: <provider-engine>"
if 'provider' in vm_:
vm_['driver'] = vm_.pop('provider')
salt.utils.cloud.fire_event(
'event',
'starting create',
'salt/cloud/{0}/creating'.format(vm_['name']),
{
'name': vm_['name'],
'profile': vm_['profile'],
'provider': vm_['driver'],
},
transport=__opts__['transport']
)
log.info('Creating Cloud VM {0}'.format(vm_['name']))
conn = get_conn()
kwargs = {
'name': vm_['name'],
'image': get_image(conn, vm_),
'size': get_size(conn, vm_),
'location': get_location(conn, vm_),
}
if get_keypair(vm_) is not False:
kwargs['ex_keyname'] = get_keypair(vm_)
if get_networkid(vm_) is not False:
kwargs['networkids'] = get_networkid(vm_)
kwargs['networks'] = ( # The only attr that is used is 'id'.
CloudStackNetwork(None, None, None,
kwargs['networkids'],
None, None),
)
if get_project(conn, vm_) is not False:
kwargs['project'] = get_project(conn, vm_)
salt.utils.cloud.fire_event(
'event',
'requesting instance',
'salt/cloud/{0}/requesting'.format(vm_['name']),
{'kwargs': {'name': kwargs['name'],
'image': kwargs['image'].name,
'size': kwargs['size'].name}},
transport=__opts__['transport']
)
displayname = cloudstack_displayname(vm_)
if displayname:
kwargs['ex_displayname'] = displayname
else:
kwargs['ex_displayname'] = kwargs['name']
volumes = {}
ex_blockdevicemappings = block_device_mappings(vm_)
if ex_blockdevicemappings:
for ex_blockdevicemapping in ex_blockdevicemappings:
if 'VirtualName' not in ex_blockdevicemapping:
ex_blockdevicemapping['VirtualName'] = '{0}-{1}'.format(vm_['name'], len(volumes))
salt.utils.cloud.fire_event(
'event',
'requesting volume',
'salt/cloud/{0}/requesting'.format(ex_blockdevicemapping['VirtualName']),
{'kwargs': {'name': ex_blockdevicemapping['VirtualName'],
'device': ex_blockdevicemapping['DeviceName'],
'size': ex_blockdevicemapping['VolumeSize']}},
)
try:
volumes[ex_blockdevicemapping['DeviceName']] = conn.create_volume(
ex_blockdevicemapping['VolumeSize'],
ex_blockdevicemapping['VirtualName']
)
except Exception as exc:
log.error(
'Error creating volume {0} on CLOUDSTACK\n\n'
'The following exception was thrown by libcloud when trying to '
'requesting a volume: \n{1}'.format(
ex_blockdevicemapping['VirtualName'], exc
),
# Show the traceback if the debug logging level is enabled
exc_info_on_loglevel=logging.DEBUG
)
return False
else:
ex_blockdevicemapping = {}
try:
data = conn.create_node(**kwargs)
except Exception as exc:
log.error(
'Error creating {0} on CLOUDSTACK\n\n'
'The following exception was thrown by libcloud when trying to '
'run the initial deployment: \n{1}'.format(
vm_['name'], str(exc)
),
# Show the traceback if the debug logging level is enabled
exc_info_on_loglevel=logging.DEBUG
)
return False
for device_name in six.iterkeys(volumes):
try:
conn.attach_volume(data, volumes[device_name], device_name)
except Exception as exc:
log.error(
'Error attaching volume {0} on CLOUDSTACK\n\n'
'The following exception was thrown by libcloud when trying to '
'attach a volume: \n{1}'.format(
ex_blockdevicemapping.get('VirtualName', 'UNKNOWN'), exc
),
# Show the traceback if the debug logging level is enabled
exc_info=log.isEnabledFor(logging.DEBUG)
)
return False
ssh_username = config.get_cloud_config_value(
'ssh_username', vm_, __opts__, default='root'
)
vm_['ssh_host'] = get_ip(data)
vm_['password'] = data.extra['password']
vm_['key_filename'] = get_key()
ret = salt.utils.cloud.bootstrap(vm_, __opts__)
ret.update(data.__dict__)
if 'password' in data.extra:
del data.extra['password']
log.info('Created Cloud VM {0[name]!r}'.format(vm_))
log.debug(
'{0[name]!r} VM creation details:\n{1}'.format(
vm_, pprint.pformat(data.__dict__)
)
)
salt.utils.cloud.fire_event(
'event',
'created instance',
'salt/cloud/{0}/created'.format(vm_['name']),
{
'name': vm_['name'],
'profile': vm_['profile'],
'provider': vm_['driver'],
},
transport=__opts__['transport']
)
return ret
def destroy(name, conn=None, call=None):
'''
Delete a single VM, and all of its volumes
'''
if call == 'function':
raise SaltCloudSystemExit(
'The destroy action must be called with -d, --destroy, '
'-a or --action.'
)
salt.utils.cloud.fire_event(
'event',
'destroying instance',
'salt/cloud/{0}/destroying'.format(name),
{'name': name},
)
if not conn:
conn = get_conn() # pylint: disable=E0602
node = get_node(conn, name)
if node is None:
log.error('Unable to find the VM {0}'.format(name))
volumes = conn.list_volumes(node)
if volumes is None:
log.error('Unable to find volumes of the VM {0}'.format(name))
# TODO add an option like 'delete_sshkeys' below
for volume in volumes:
if volume.extra['volume_type'] != 'DATADISK':
log.info('Ignoring volume type {0}: {1}'.format(
volume.extra['volume_type'], volume.name)
)
continue
log.info('Detaching volume: {0}'.format(volume.name))
salt.utils.cloud.fire_event(
'event',
'detaching volume',
'salt/cloud/{0}/detaching'.format(volume.name),
{'name': volume.name},
)
if not conn.detach_volume(volume):
log.error('Failed to Detach volume: {0}'.format(volume.name))
return False
log.info('Detached volume: {0}'.format(volume.name))
salt.utils.cloud.fire_event(
'event',
'detached volume',
'salt/cloud/{0}/detached'.format(volume.name),
{'name': volume.name},
)
log.info('Destroying volume: {0}'.format(volume.name))
salt.utils.cloud.fire_event(
'event',
'destroying volume',
'salt/cloud/{0}/destroying'.format(volume.name),
{'name': volume.name},
)
if not conn.destroy_volume(volume):
log.error('Failed to Destroy volume: {0}'.format(volume.name))
return False
log.info('Destroyed volume: {0}'.format(volume.name))
salt.utils.cloud.fire_event(
'event',
'destroyed volume',
'salt/cloud/{0}/destroyed'.format(volume.name),
{'name': volume.name},
)
log.info('Destroying VM: {0}'.format(name))
ret = conn.destroy_node(node)
if not ret:
log.error('Failed to Destroy VM: {0}'.format(name))
return False
log.info('Destroyed VM: {0}'.format(name))
# Fire destroy action
event = salt.utils.event.SaltEvent('master', __opts__['sock_dir'])
salt.utils.cloud.fire_event(
'event',
'destroyed instance',
'salt/cloud/{0}/destroyed'.format(name),
{'name': name},
)
if __opts__['delete_sshkeys'] is True:
salt.utils.cloud.remove_sshkey(node.public_ips[0])
return True
def block_device_mappings(vm_):
'''
Return the block device mapping:
::
[{'DeviceName': '/dev/sdb', 'VirtualName': 'ephemeral0'},
{'DeviceName': '/dev/sdc', 'VirtualName': 'ephemeral1'}]
'''
return config.get_cloud_config_value(
'block_device_mappings', vm_, __opts__, search_global=True
)
def cloudstack_displayname(vm_):
'''
Return display name of VM:
::
"minion1"
'''
return config.get_cloud_config_value(
'cloudstack_displayname', vm_, __opts__, search_global=True
)
|
smallyear/linuxLearn
|
salt/salt/cloud/clouds/cloudstack.py
|
Python
|
apache-2.0
| 16,091 | 0.000559 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# -*- Author: ClarkYAN -*-
from get_connection import *
from get_key import *
import Tkinter
import tkMessageBox
class mainFrame:
def __init__(self):
self.root = Tkinter.Tk()
self.root.title('Secure Protocol Systems')
self.root.geometry('600x300')
self.root.resizable(width=False, height=True)
# self.scroll = Tkinter.Scrollbar(self.root).pack(side=Tkinter.RIGHT)
# TOP
self.frm = Tkinter.Frame(self.root)
labelTitle = Tkinter.Label(self.root, text="Data Owner 1", font=("Arial", 26))
labelTitle.pack()
# LEFT
self.frm_l = Tkinter.Frame(self.frm)
labelControl = Tkinter.Label(self.frm_l, text="Control Panel", font=("Arial", 20), height=2)
labelControl.pack()
buttonSetUp = Tkinter.Button(self.frm_l, text="Set Up Connection", font=("Arial", 18), height=2,
command=setUpConnection)
buttonSetUp.pack(side=Tkinter.LEFT)
buttonReceiveKey = Tkinter.Button(self.frm_l, text="Receive Public Key", font=("Arial", 18), height=2, command=getKey)
buttonReceiveKey.pack(side=Tkinter.LEFT)
self.frm_l.pack(side=Tkinter.TOP)
self.frm_r = Tkinter.Frame(self.frm)
buttonEncrypted = Tkinter.Button(self.frm_r, text="Encrypting Original data", font=("Arial", 18), height=2)
buttonEncrypted.pack(side=Tkinter.LEFT)
buttonSend = Tkinter.Button(self.frm_r, text="Send to Cloud", font=("Arial", 18), height=2)
buttonSend.pack(side=Tkinter.LEFT)
self.frm_r.pack(side=Tkinter.BOTTOM)
self.frm.pack(side=Tkinter.TOP)
self.root.mainloop()
def setUpConnection():
url = 'http://127.0.0.1:4000/'
sender = 'data_owner_1'
result = str(set_up_connection(url, sender))
tkMessageBox.showinfo("Recent Event", result)
print result
def getKey():
url = 'http://127.0.0.1:5000/key'
sender = 'data_owner_1'
result = send_info(url, sender)
tkMessageBox.showinfo("Recent Event", result)
print result
def main():
db1 = mainFrame()
if __name__ == "__main__":
main()
|
ClarkYan/msc-thesis
|
code/data_owner_1/interface.py
|
Python
|
apache-2.0
| 2,180 | 0.002752 |
from os.path import join, dirname
from setuptools import setup
setup(
name = 'xmppgcm',
packages = ['xmppgcm'], # this must be the same as the name above
version = '0.2.3',
description = 'Client Library for Firebase Cloud Messaging using XMPP',
long_description = open(join(dirname(__file__), 'README.txt')).read(),
install_requires=['sleekxmpp',],
author = 'Winster Jose',
author_email = 'wtjose@gmail.com',
url = 'https://github.com/winster/xmppgcm',
keywords = ['gcm', 'fcm', 'xmpp', 'xmppgcm', 'xmppfcm'], # arbitrary keywords
classifiers = [],
)
|
gamikun/xmppgcm
|
setup.py
|
Python
|
apache-2.0
| 574 | 0.04007 |
#!/usr/bin/env python
"""
Service Subpackage
"""
from . import test
from . import detect
from . import device
from . import object
from . import cov
from . import file
|
JoelBender/bacpypes
|
py34/bacpypes/service/__init__.py
|
Python
|
mit
| 171 | 0 |
#Pizza please
import pyaudiogame
from pyaudiogame import storage
spk = pyaudiogame.speak
MyApp = pyaudiogame.App("Pizza Please")
storage.screen = ["start"]
storage.toppings = ["cheese", "olives", "mushrooms", "Pepperoni", "french fries"]
storage.your_toppings = ["cheese"]
storage.did_run = False
def is_number(number, topping_list):
"""Will check that what the user enters is really a number and not a letter, also that it is within our list"""
if number in "0123456789":
number = int(number)
if number <= len(topping_list)-1:
return number
def say_message(message):
"""Will check if the message has been read and if so, passes. Else, it will read the message"""
if not storage.did_run:
spk(message)
storage.did_run = True
def add_topping(key):
"""Will add a topping to your pizza"""
number = is_number(key, storage.toppings)
if number or number == 0:
storage.your_toppings.append(storage.toppings[number])
spk("You added %s to your pizza. Your pizza currently has %s on top" % (storage.toppings[number], storage.your_toppings))
def remove_topping(key):
"""Removes toppings from the pizza"""
number = is_number(key, storage.your_toppings)
if number or number == 0:
t = storage.your_toppings.pop(number)
if t == "cheese":
spk("You can't remove cheese, what are you, Italian?")
storage.your_toppings.insert(0, "cheese")
else:
spk("You removed %s from your pizza. Now your pizza has %s on top" % (t, storage.your_toppings))
def logic(actions):
"""Press a and d to switch from adding and removing toppings, press 0-9 to deal with the toppings and press space to eat the pizza"""
key = actions['key']
if key == "d":
spk("Press a number to remove a topping from your pizza, press a to add toppings again")
storage.screen[0] = "remove"
storage.did_run = False
elif key == "a":
spk("Press a number to add a topping to your pizza. Press d to remove a topping you don't like")
storage.screen[0] = "add"
storage.did_run = False
elif key == "space":
spk("You sit down to enjoy a yummy pizza. You eat... eat... eat... eat... and are finally done. That was good! Now it's time for another!")
storage.your_toppings = ['cheese']
storage.did_run = False
elif storage.screen[0] == "start":
spk("Welcom to pizza madness! Here you can build your own pizza to eat! Press a to add toppings, press d to remove them and when you are done, press space to eat your yummy pizza!!!")
storage.screen.remove("start")
storage.screen.append("add")
elif storage.screen[0] == "add":
say_message("Please choose a number of toppings to add! Press d to start removing toppings. Toppings are %s" % storage.toppings)
if key:
add_topping(key)
elif storage.screen[0] == "remove" and key:
remove_topping(key)
MyApp.logic = logic
MyApp.run()
|
frastlin/PyAudioGame
|
examples/basic_tutorial/ex6.py
|
Python
|
mit
| 2,789 | 0.023664 |
from __future__ import unicode_literals
from django.db import models
from modpacks.models.modpack import Modpack
class Server(models.Model):
""" Minecraft Server details for display on the server page """
name = models.CharField(verbose_name='Server Name',
max_length=200)
desc = models.TextField(verbose_name='Server Description',
blank=True)
modpack = models.ForeignKey(Modpack, verbose_name='Server Modpack')
address = models.CharField(verbose_name='Server Address',
max_length=200,
blank=True)
screenshot = models.ImageField(verbose_name='Screenshot',
blank=True)
dynmap = models.CharField(verbose_name='DynMap URL',
max_length=200,
blank=True)
slug = models.SlugField()
def get_absolute_url(self):
return reverse("server", self.slug)
def __str__(self):
return self.name
|
Jonpro03/Minecrunch_Web
|
src/servers/models.py
|
Python
|
mit
| 921 | 0.008686 |
#!/usr/bin/python3
# Copyright (c) 2018-2021 Dell Inc. or its subsidiaries.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import json
import logging
import os
import re
import sys
import requests
from requests.packages.urllib3.exceptions import InsecureRequestWarning
from credential_helper import CredentialHelper
from dell_nfv import ConfigOvercloud
from ironic_helper import IronicHelper
from logging_helper import LoggingHelper
from utils import Utils
logging.basicConfig()
logger = logging.getLogger(os.path.splitext(os.path.basename(sys.argv[0]))[0])
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
home_dir = os.path.expanduser('~')
UC_USERNAME = UC_PASSWORD = UC_PROJECT_ID = UC_AUTH_URL = ''
class ConfigEdge(ConfigOvercloud):
"""
Description: Class responsible for overcloud configurations.
"""
ironic = IronicHelper()
ironic_client = ironic.get_ironic_client()
nodes = ironic_client.node.list()
get_drac_credential = CredentialHelper()
def __init__(self, overcloud_name, node_type, node_type_data):
self.node_type = node_type
self.node_type_data = json.loads(node_type_data)
self.mtu = int(self.node_type_data["nfv_mtu"])
_dir = (re.sub(r'[^a-z0-9]', " ", node_type.lower()).replace(" ", "_"))
_ntl = re.sub(r'[^a-z0-9]', "", node_type.lower())
ne_name = "nic_environment_{}.yaml".format(_ntl)
instack_name = "instackenv_{}.json".format(_ntl)
nic_env_file = os.path.join(home_dir, _dir, ne_name)
instackenv_file = os.path.join(home_dir, _dir, instack_name)
self.instackenv = instackenv_file
self.nic_env = nic_env_file
super().__init__(overcloud_name)
def fetch_nfv_parameters(self):
logger.debug("Retrieving NFV parameters")
ntd = self.node_type_data
enable_hugepage = Utils.string_to_bool(ntd["hpg_enable"])
enable_numa = Utils.string_to_bool(ntd["numa_enable"])
nfv_type = self._get_nfv_type(ntd)
is_ovs_dpdk = bool(nfv_type and nfv_type in ["dpdk", "both"])
hostos_cpu_count = int(ntd["numa_hostos_cpu_count"])
_dir = (re.sub(r'[^a-z0-9]', " ",
self.node_type.lower()).replace(" ", "_"))
ntl = re.sub(r'[^a-z0-9]', "", self.node_type.lower())
_f_name = "nic_environment_{}.yaml".format(ntl)
nic_env_file = os.path.join(home_dir, _dir, _f_name)
params = {}
params_dell_env = params["dell_env"] = {}
kernel_args = "iommu=pt intel_iommu=on"
if enable_hugepage:
hpg_num = self.nfv_params.calculate_hugepage_count(
ntd["hpg_size"])
kernel_args += (" default_hugepagesz={} hugepagesz={}"
" hugepages={}").format(ntd["hpg_size"],
ntd["hpg_size"][0:-1],
str(hpg_num))
if enable_numa:
_, node_data = self.nfv_params.select_compute_node(self.node_type,
self.instackenv)
self.nfv_params.parse_data(node_data)
self.nfv_params.get_all_cpus()
self.nfv_params.get_host_cpus(hostos_cpu_count)
self.nfv_params.get_nova_cpus()
self.nfv_params.get_isol_cpus()
if is_ovs_dpdk:
dpdk_nics = self.find_ifaces_by_keyword(nic_env_file,
'Dpdk')
logger.debug("DPDK-NICs >>" + str(dpdk_nics))
self.nfv_params.get_pmd_cpus(self.mtu, dpdk_nics)
self.nfv_params.get_socket_memory(self.mtu, dpdk_nics)
kernel_args += " isolcpus={}".format(self.nfv_params.isol_cpus)
# dell-environmment role specific cpu parameters
params_dell_env["IsolCpusList"] = self.nfv_params.isol_cpus
params_dell_env["NovaComputeCpuDedicatedSet"] = self.nfv_params.nova_cpus
if is_ovs_dpdk:
params_dpdk = params["dpdk"] = {}
params_dpdk["OvsDpdkCoreList"] = self.nfv_params.host_cpus
params_dpdk["OvsPmdCoreList"] = self.nfv_params.pmd_cpus
params_dpdk["OvsDpdkSocketMemory"] = self.nfv_params.socket_mem
# params_dpdk["IsolCpusList"] = self.nfv_params.isol_cpus # Populated in dell_env file
# params_dpdk["NovaComputeCpuDedicatedSet"] = self.nfv_params.nova_cpus # Populated in dell_env file
# params_dpdk["NovaComputeCpuSharedSet"] = self.nfv_params.shared_cpus # Not used in current Architecture
params_dell_env["KernelArgs"] = kernel_args
return params
def _get_nfv_type(self, node_type_data):
if ("nfv_type" in node_type_data
and len(node_type_data["nfv_type"].strip()) != 0
and node_type_data["nfv_type"].strip() in ("dpdk",
"sriov", "both")):
return node_type_data["nfv_type"].strip()
return None
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--overcloud_name",
default=None,
help="The name of the overcloud")
parser.add_argument("--edge_site",
default=None,
dest="node_type",
help="The name of edge site being configured")
parser.add_argument("--edge_site_data",
default=None,
dest="node_type_data",
help="The edge site metadata")
parser.add_argument("--debug",
default=False,
action='store_true',
help="Turn on debugging for this script")
LoggingHelper.add_argument(parser)
args = parser.parse_args()
LoggingHelper.configure_logging(args.logging_level)
config_edge = ConfigEdge(args.overcloud_name, args.node_type,
args.node_type_data)
params = config_edge.fetch_nfv_parameters()
logger.debug(">>>>>> nfv parameters {}".format(str(params)))
return json.dumps(params)
if __name__ == "__main__":
res = main()
logger.debug(">>>>>> res {}".format(str(res)))
sys.stdout.write(res)
|
dsp-jetpack/JetPack
|
src/pilot/dell_nfv_edge.py
|
Python
|
apache-2.0
| 6,869 | 0.000728 |
from sqlalchemy import *
from test.lib import *
class FoundRowsTest(fixtures.TestBase, AssertsExecutionResults):
"""tests rowcount functionality"""
__requires__ = ('sane_rowcount', )
@classmethod
def setup_class(cls):
global employees_table, metadata
metadata = MetaData(testing.db)
employees_table = Table('employees', metadata,
Column('employee_id', Integer,
Sequence('employee_id_seq', optional=True),
primary_key=True),
Column('name', String(50)),
Column('department', String(1)),
)
metadata.create_all()
def setup(self):
global data
data = [ ('Angela', 'A'),
('Andrew', 'A'),
('Anand', 'A'),
('Bob', 'B'),
('Bobette', 'B'),
('Buffy', 'B'),
('Charlie', 'C'),
('Cynthia', 'C'),
('Chris', 'C') ]
i = employees_table.insert()
i.execute(*[{'name':n, 'department':d} for n, d in data])
def teardown(self):
employees_table.delete().execute()
@classmethod
def teardown_class(cls):
metadata.drop_all()
def testbasic(self):
s = employees_table.select()
r = s.execute().fetchall()
assert len(r) == len(data)
def test_update_rowcount1(self):
# WHERE matches 3, 3 rows changed
department = employees_table.c.department
r = employees_table.update(department=='C').execute(department='Z')
print "expecting 3, dialect reports %s" % r.rowcount
assert r.rowcount == 3
def test_update_rowcount2(self):
# WHERE matches 3, 0 rows changed
department = employees_table.c.department
r = employees_table.update(department=='C').execute(department='C')
print "expecting 3, dialect reports %s" % r.rowcount
assert r.rowcount == 3
def test_delete_rowcount(self):
# WHERE matches 3, 3 rows deleted
department = employees_table.c.department
r = employees_table.delete(department=='C').execute()
print "expecting 3, dialect reports %s" % r.rowcount
assert r.rowcount == 3
|
ioram7/keystone-federado-pgid2013
|
build/sqlalchemy/test/sql/test_rowcount.py
|
Python
|
apache-2.0
| 2,260 | 0.00531 |
#!/usr/bin/python
import RPi.GPIO as GPIO
import signal
import time
from on_off import *
class keypad():
def __init__(self, columnCount = 3):
GPIO.setmode(GPIO.BCM)
# CONSTANTS
if columnCount is 3:
self.KEYPAD = [
[1,2,3],
[4,5,6],
[7,8,9],
["*",0,"#"]
]
self.ROW = [27,23,22,24]
self.COLUMN = [10,25,9]
elif columnCount is 4:
self.KEYPAD = [
[1,2,3,"A"],
[4,5,6,"B"],
[7,8,9,"C"],
["*",0,"#","D"]
]
self.ROW = [18,23,24,25]
self.COLUMN = [4,17,22,21]
else:
return
def timer(self, sig, frame):
raise Exception('Time is up')
def KeyPadAuthor(self):
# Initialize the keypad class
kp = keypad()
arr = []
# Loop while waiting for a keypress
digit = None
while len(arr) < 5:
tempDigit = self.getKey()
if tempDigit != digit:
digit = tempDigit;
if digit != None:
# every time a key is pressed it resets the timer
signal.alarm(7)
arr.append(str(digit))
blinkKey()
print (digit)
# return the result
return (arr)
def getKey(self):
# Set all columns as output low
for j in range(len(self.COLUMN)):
GPIO.setup(self.COLUMN[j], GPIO.OUT)
GPIO.output(self.COLUMN[j], GPIO.LOW)
# Set all rows as input
for i in range(len(self.ROW)):
GPIO.setup(self.ROW[i], GPIO.IN, pull_up_down=GPIO.PUD_UP)
# Scan rows for pushed key/button
# A valid key press should set "rowVal" between 0 and 3.
rowVal = -1
for i in range(len(self.ROW)):
tmpRead = GPIO.input(self.ROW[i])
if tmpRead == 0:
rowVal = i
# if rowVal is not 0 thru 3 then no button was pressed and we can exit
if rowVal <0 or rowVal >3:
self.exit()
return
# Convert columns to input
for j in range(len(self.COLUMN)):
GPIO.setup(self.COLUMN[j], GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
# Switch the i-th row found from scan to output
GPIO.setup(self.ROW[rowVal], GPIO.OUT)
GPIO.output(self.ROW[rowVal], GPIO.HIGH)
# Scan columns for still-pushed key/button
# A valid key press should set "colVal" between 0 and 2.
colVal = -1
for j in range(len(self.COLUMN)):
tmpRead = GPIO.input(self.COLUMN[j])
if tmpRead == 1:
colVal=j
# if colVal is not 0 thru 2 then no button was pressed and we can exit
if colVal <0 or colVal >2:
self.exit()
return
# Return the value of the key pressed
self.exit()
return self.KEYPAD[rowVal][colVal]
def exit(self):
# Reinitialize all rows and columns as input at exit
for i in range(len(self.ROW)):
GPIO.setup(self.ROW[i], GPIO.IN, pull_up_down=GPIO.PUD_UP)
for j in range(len(self.COLUMN)):
GPIO.setup(self.COLUMN[j], GPIO.IN, pull_up_down=GPIO.PUD_UP)
|
gacosta1/CATS
|
Software/src/keypad.py
|
Python
|
gpl-3.0
| 3,522 | 0.012777 |
from __future__ import print_function
from builtins import range
import sys
sys.path.insert(1,"../../")
import h2o
from tests import pyunit_utils
import random
import os
def javapredict_dynamic_data():
# Generate random dataset
dataset_params = {}
dataset_params['rows'] = random.sample(list(range(5000,15001)),1)[0]
dataset_params['cols'] = random.sample(list(range(10,21)),1)[0]
dataset_params['categorical_fraction'] = round(random.random(),1)
left_over = (1 - dataset_params['categorical_fraction'])
dataset_params['integer_fraction'] = round(left_over - round(random.uniform(0,left_over),1),1)
if dataset_params['integer_fraction'] + dataset_params['categorical_fraction'] == 1:
if dataset_params['integer_fraction'] > dataset_params['categorical_fraction']:
dataset_params['integer_fraction'] = dataset_params['integer_fraction'] - 0.1
else:
dataset_params['categorical_fraction'] = dataset_params['categorical_fraction'] - 0.1
dataset_params['missing_fraction'] = random.uniform(0,0.5)
dataset_params['has_response'] = True
dataset_params['randomize'] = True
dataset_params['factors'] = random.randint(2,2000)
print("Dataset parameters: {0}".format(dataset_params))
train = h2o.create_frame(**dataset_params)
print("Training dataset:")
print(train)
# Save dataset to results directory
results_dir = pyunit_utils.locate("results")
h2o.download_csv(train,os.path.join(results_dir,"kmeans_dynamic_training_dataset.log"))
# Generate random parameters
params = {}
params['k'] = random.sample(list(range(1,10)),1)[0]
if random.randint(0,1): params['max_iterations'] = random.sample(list(range(1,1000)),1)[0]
if random.randint(0,1): params['standardize'] = random.sample([True, False],1)[0]
if random.randint(0,1): params['seed'] = random.sample(list(range(1,1000)),1)[0]
if random.randint(0,1): params['init'] = random.sample(['Random','PlusPlus','Furthest'],1)[0]
print("Parameter list: {0}".format(params))
x = train.names
x.remove("response")
y = "response"
pyunit_utils.javapredict(algo="kmeans", equality=None, train=train, test=None, x=x, y=y, compile_only=True, **params)
if __name__ == "__main__":
pyunit_utils.standalone_test(javapredict_dynamic_data)
else:
javapredict_dynamic_data()
|
YzPaul3/h2o-3
|
h2o-py/tests/testdir_javapredict/pyunit_javapredict_dynamic_data_paramsKmeans.py
|
Python
|
apache-2.0
| 2,376 | 0.020202 |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
VetEpiGIS-Stat
A QGIS plugin
Spatial functions for vet epidemiology
-------------------
begin : 2016-01-06
git sha : $Format:%H$
copyright : (C) 2016 by Norbert Solymosi
email : solymosi.norbert@gmail.com
***************************************************************************/
/***************************************************************************
The functions are based on the spdep R package: https://cran.r-project.org/web/packages/spdep
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtGui import *
from PyQt4.QtCore import SIGNAL, Qt, QSettings, QCoreApplication, QFile, QFileInfo, QDate, QVariant, \
pyqtSignal, QRegExp, QDateTime, QTranslator, QFile, QIODevice, QTextStream
from PyQt4.QtSql import *
from qgis.core import QgsField, QgsSpatialIndex, QgsMessageLog, QgsProject, \
QgsCoordinateTransform, QGis, QgsVectorFileWriter, QgsMapLayerRegistry, QgsFeature, \
QgsGeometry, QgsFeatureRequest, QgsPoint, QgsVectorLayer, QgsCoordinateReferenceSystem, \
QgsRectangle, QgsDataSourceURI, QgsDataProvider, QgsVectorDataProvider, QgsDistanceArea
from qgis.gui import QgsMapTool, QgsMapToolEmitPoint, QgsMessageBar, QgsRubberBand
from numpy import *
import itertools, math
from globalt_dialog import Ui_Dialog
class Dialog(QDialog, Ui_Dialog):
def __init__(self, lyr):
"""Constructor for the dialog.
"""
QDialog.__init__(self)
self.setupUi(self)
self.nb = []
self.lyr = lyr
flds = lyr.dataProvider().fields()
for fld in flds:
if fld.type()!=10:
self.comboBox.addItem(fld.name())
# self.comboBox.addItem('%s' % fld.type())
self.comboBox_2.addItem('B')
self.comboBox_2.addItem('C')
self.comboBox_2.addItem('S')
self.comboBox_2.addItem('U')
self.comboBox_2.addItem('W')
self.comboBox_3.addItem('greater')
self.comboBox_3.addItem('less')
self.comboBox_3.addItem('two sided')
self.comboBox_4.addItem('randomization')
self.comboBox_4.addItem('normality')
self.toolButton.clicked.connect(self.MoranI)
self.buttonBox.button(QDialogButtonBox.Save).clicked.connect(self.save)
self.comboBox_5.currentIndexChanged.connect(self.neightyper)
# self.comboBox_5.addItem('touch')
# self.comboBox_5.addItem('within distance')
self.comboBox_6.addItem('km')
self.comboBox_6.addItem('map unit')
def point2nb(self):
lst = []
# geoms = [geom.geometry() for geom in self.lyr.getFeatures()
# feats = self.lyr.getFeatures()
# for f1, f2 in itertools.product(feats, repeat=2):
# if f1!=f2:
# d = f1.geometry().asPoint().distance(f2.geometry().asPoint())
# self.plainTextEdit.insertPlainText("%s %s: %s\n" % (f1.id(), f2.id(), d))
featA = QgsFeature()
featsA = self.lyr.getFeatures()
trh = float(self.lineEdit.text())
if self.comboBox_6.currentText() == 'km':
# psrid = self.iface.mapCanvas().mapRenderer().destinationCrs().srsid()
prv = self.lyr.dataProvider()
psrid = prv.crs().srsid()
dist = QgsDistanceArea()
dist.setEllipsoid('WGS84')
dist.setEllipsoidalMode(True)
# self.plainTextEdit.insertPlainText("%s\n" % psrid)
if psrid!=3452:
trafo = QgsCoordinateTransform(psrid, 3452)
while featsA.nextFeature(featA):
featB = QgsFeature()
featsB = self.lyr.getFeatures()
sor = []
while featsB.nextFeature(featB):
if featA.id()!=featB.id():
tav = dist.measureLine(trafo.transform(featA.geometry().asPoint()), trafo.transform(featB.geometry().asPoint()))
# self.plainTextEdit.insertPlainText("%s %s %s\n" % (featA.id(), featB.id(), tav))
if (tav/1000.0) <= trh:
sor.append(featB.id())
lst.append(sor)
else:
while featsA.nextFeature(featA):
featB = QgsFeature()
featsB = self.lyr.getFeatures()
sor = []
while featsB.nextFeature(featB):
if featA.id()!=featB.id():
tav = dist.measureLine(featA.geometry().asPoint(), featB.geometry().asPoint())
# self.plainTextEdit.insertPlainText("%s %s %s\n" % (featA.id(), featB.id(), tav))
if (tav/1000.0) <= trh:
sor.append(featB.id())
lst.append(sor)
else:
while featsA.nextFeature(featA):
featB = QgsFeature()
featsB = self.lyr.getFeatures()
sor = []
while featsB.nextFeature(featB):
if featA.id() != featB.id():
tav = featA.geometry().asPoint().distance(featB.geometry().asPoint())
# self.plainTextEdit.insertPlainText("%s %s %s\n" % (featA.id(), featB.id(), tav))
if tav <= trh:
sor.append(featB.id())
lst.append(sor)
# self.plainTextEdit.insertPlainText("%s\n" % lst)
return lst
def neightyper(self):
if self.comboBox_5.currentText() == 'within distance':
self.lineEdit.setVisible(True)
self.comboBox_6.setVisible(True)
else:
self.lineEdit.setVisible(False)
self.comboBox_6.setVisible(False)
def save(self):
fileName = QFileDialog.getSaveFileName(self, caption='Save As...')
try:
file = QFile(fileName + '.txt')
file.open( QIODevice.WriteOnly | QIODevice.Text )
out = QTextStream(file)
out << self.plainTextEdit.toPlainText()
out.flush()
file.close()
self.close()
return True
except IOError:
return False
def MoranI(self):
QApplication.setOverrideCursor(Qt.WaitCursor)
if len(self.nb)==0:
if self.comboBox_5.currentText()!='within distance':
nb = self.poly2nb()
else:
if self.lineEdit.text() == '':
QApplication.restoreOverrideCursor()
QMessageBox.information(None, 'Missing data', 'Within distance must be set up!')
return
nb = self.point2nb()
self.nb = nb
else:
nb = self.nb
cardnb = self.card(nb)
zero = 0
if len(cardnb)==0:
zero += 1
return
glist = []
for m in cardnb:
s = []
if m>0:
s = [1]*m
glist.append(s)
n = len(cardnb)
effn = n-zero
if effn<1:
return
# vlist = [None]*n
# vlist = [[None]]*n
vlist = [[0]] * n
if self.comboBox_2.currentText()=='B':
for i in xrange(n):
g = glist[i]
if cardnb[i]>0:
vlist[i] = g
elif self.comboBox_2.currentText()=='C' or self.comboBox_2.currentText()=='U':
D = sum(list(itertools.chain.from_iterable(glist)))
if D<1:
return
if self.comboBox_2.currentText()=='C':
nu = float(effn)
else:
nu = 1.0
qr = nu/float(D)
for i in xrange(n):
if cardnb[i]>0:
vlist[i] = [x * qr for x in glist[i]]
elif self.comboBox_2.currentText()=='S':
q = []
for i in xrange(len(glist)):
gg = []
for j in xrange(len(glist[i])):
gg.append(power(2*glist[i][j],2))
q.append(sqrt(sum(gg)))
for i in xrange(n):
if cardnb[i]>0:
if q[i]>0:
mpl = (1.0/float(q[i]))
else:
mpl = 0.0
v = [x * mpl for x in glist[i]]
glist[i] = v
Q = sum(list(itertools.chain.from_iterable(glist)))
if Q<1:
return
qr = float(effn)/float(Q)
for i in xrange(n):
if cardnb[i]>0:
vlist[i] = [x * qr for x in glist[i]]
elif self.comboBox_2.currentText()=='W':
for i in xrange(n):
g = glist[i]
d = sum(g)
if cardnb[i]>0:
if d>0:
mpl = (1.0/float(d))
else:
mpl = 0.0
vlist[i] = [x * mpl for x in g]
listw = vlist
# self.plainTextEdit.insertPlainText("listw: %s\n" % listw)
# return
# S0 = sum(sum(filter(None, listw)))
S0 = sum(sum(listw))
S1 = 0
rS = [0]*len(nb)
cS = [0]*len(nb)
for i in xrange(len(nb)):
ij = nb[i]
wij = listw[i]
rS[i] = sum(wij)
for j in xrange(len(ij)):
dij = wij[j]
ijj = ij[j]
cS[ijj] = cS[ijj] + dij
try:
ijlkup = nb[ijj].index(i)
dji = listw[ijj][ijlkup]
except ValueError:
dji = 0
S1 = S1 + (dij * dij) + (dij * dji)
S2 = sum(power([x + y for x, y in zip(rS, cS)],2))
S02 = float(S0) * float(S0)
n1 = n-1
n2 = n-2
n3 = n-3
nn = n*n
x = self.datRead()
if len(x)!=len(nb):
return
x = array(x)
z = x-mean(x)
zz = sum(power(z,2))
K = (len(x)*sum(power(z,4)))/power(zz,2)
ans = empty([n])
for i in xrange(n):
if cardnb[i]==0:
ans[i] = 0
else:
sm = 0
for j in xrange(cardnb[i]):
k = int(nb[i][j])
wt = listw[i][j]
tmp = z[k]
sm = sm+(tmp*wt)
ans[i] = sm
lz = ans
I = (float(n)/float(S0)) * ((sum(z * lz))/float(zz))
EI = (-1.0)/float(n1)
if self.comboBox_4.currentText()=='randomization':
VI = float(n) * (float(S1) * (float(nn) - 3.0 * float(n) + 3.0) - float(n) * float(S2) + 3.0 * float(S02))
tmp = float(K) * (float(S1) * (float(nn) - float(n)) - 2.0 * float(n) * float(S2) + 6.0 * float(S02))
if tmp>VI:
self.plainTextEdit.insertPlainText('Kurtosis overflow, distribution of variable does not meet test assumptions\n')
VI = (VI - tmp)/(float(n1) * float(n2) * float(n3) * float(S02))
tmp = (VI - power(EI,2))
if tmp<0:
self.plainTextEdit.insertPlainText('Negative variance, ndistribution of variable does not meet test assumptions\n')
VI = tmp
else:
VI = (float(nn) * float(S1) - float(n) * float(S2) + 3.0 * float(S02))/(float(S02) * (float(nn) - 1.0))
tmp = (VI - power(EI,2))
if tmp < 0:
self.plainTextEdit.insertPlainText('Negative variance, ndistribution of variable does not meet test assumptions\n')
VI = tmp
ZI = (I - EI)/sqrt(VI)
if self.comboBox_3.currentText()=='less':
PrI = self.pnorm(ZI)
elif self.comboBox_3.currentText()=='greater':
PrI = 1.0-self.pnorm(ZI)
else:
PrI = 2.0*(1.0-self.pnorm(abs(ZI)))
self.plainTextEdit.insertPlainText("Moran's I: %s\n" % I)
self.plainTextEdit.insertPlainText("Expectation: %s\n" % EI)
self.plainTextEdit.insertPlainText("Variance: %s\n" % VI)
self.plainTextEdit.insertPlainText("Moran's I standard deviate: %s\n" % ZI)
self.plainTextEdit.insertPlainText("p-value: %s\n" % PrI)
ans = empty([n])
for i in xrange(n):
if cardnb[i]==0:
ans[i] = 0
else:
sm = 0
for j in xrange(cardnb[i]):
k = int(nb[i][j])
wt = listw[i][j]
diff = x[i]-x[k]
res = diff*diff
sm = sm+(res*wt)
ans[i] = sm
res = ans
C = (float(n1)/(2.0*float(S0))) * ((sum(res))/float(zz))
EC = 1.0
if self.comboBox_4.currentText()=='randomization':
VC = (float(n1) * float(S1) * (float(nn) - 3.0 * float(n) + 3.0 - float(K) * float(n1)))
VC = VC - ((1.0/4.0) * (float(n1) * float(S2) * (float(nn) + 3.0 * float(n) - 6.0 - float(K) * (float(nn) - float(n) + 2.0))))
VC = VC + (float(S02) * (float(nn) - 3.0 - float(K) * (power(n1,2))))
VC = VC/(float(n) * float(n2) * float(n3) * float(S02))
else:
VC = ((2.0 * float(S1) + float(S2)) * float(n1) - 4.0 * float(S02))/(2.0 * (float(n) + 1.0) * float(S02))
ZC = (EC - C)/sqrt(VC)
if self.comboBox_3.currentText()=='less':
PrI = self.pnorm(ZC)
elif self.comboBox_3.currentText()=='greater':
PrI = 1.0-self.pnorm(ZC)
else:
PrI = 2.0*(1.0-self.pnorm(abs(ZC)))
self.plainTextEdit.insertPlainText('\n\n')
self.plainTextEdit.insertPlainText("Geary's c: %s\n" % C)
self.plainTextEdit.insertPlainText("Expectation: %s\n" % EC)
self.plainTextEdit.insertPlainText("Variance: %s\n" % VC)
self.plainTextEdit.insertPlainText("Geary's c standard deviate: %s\n" % ZC)
self.plainTextEdit.insertPlainText("p-value: %s\n" % PrI)
QApplication.restoreOverrideCursor()
def pnorm(self, z):
return (1.0 + math.erf(z / sqrt(2.0))) / 2.0
def normpdf(x, mean, sd):
var = float(sd)**2
pi = 3.1415926
denom = (2*pi*var)**.5
num = math.exp(-(float(x)-float(mean))**2/(2*var))
return num/denom
# function (x, listw, n, S0, zero.policy = NULL, NAOK = FALSE)
# {
# if (is.null(zero.policy))
# zero.policy <- get("zeroPolicy", envir = .spdepOptions)
# stopifnot(is.logical(zero.policy))
# n1 <- length(listw$neighbours)
# x <- c(x)
# if (n1 != length(x))
# stop("objects of different length")
# xx <- mean(x, na.rm = NAOK)
# z <- x - xx
# zz <- sum(z^2, na.rm = NAOK)
# K <- (length(x) * sum(z^4, na.rm = NAOK))/(zz^2)
# lz <- lag.listw(listw, z, zero.policy = zero.policy, NAOK = NAOK)
# I <- (n/S0) * ((sum(z * lz, na.rm = NAOK))/zz)
# res <- list(I = I, K = K)
# res
# QApplication.setOverrideCursor(Qt.WaitCursor)
# nblst = self.nbCalc()
# # lst = nblst[0]
#
# x = self.datRead()
# x = array(x)
#
# sd = x[nblst[0]]
# for d in sd:
# self.plainTextEdit.insertPlainText('%s, ' % d)
#
# self.plainTextEdit.insertPlainText('\n\n')
#
# sd = x[nblst[1]]
# for d in sd:
# self.plainTextEdit.insertPlainText('%s, ' % d)
#
# self.plainTextEdit.insertPlainText('\n\n')
#
# sd = x
# for d in sd:
# self.plainTextEdit.insertPlainText('%s, ' % d)
#
# # xi = arange(0,9)
# # A = array([ xi, ones(9)])
# # # linearly generated sequence
# # y = [19, 20, 20.5, 21.5, 22, 23, 23, 25.5, 24]
# # w = linalg.lstsq(A.T,y)[0] # obtaining the parameters
# #
# # self.plainTextEdit.insertPlainText('\n\n%s' % w)
# # http://docs.scipy.org/doc/numpy-1.10.0/reference/routines.linalg.html
#
# z = x-mean(x)
# sz2 = sum(power(z,2))
# s0 = len(nblst[0])
# n = len(x)
# zi = x[nblst[0]]
# zj = x[nblst[1]]
# I = (n/(2*s0*sz2))*sum(array(zi)*array(zj))
#
# self.plainTextEdit.insertPlainText('\n\nMoran I: %s' % I)
#
# QApplication.restoreOverrideCursor()
def poly2nb(self):
lst = []
index = QgsSpatialIndex()
featsA = self.lyr.getFeatures()
featsB = self.lyr.getFeatures()
for ft in featsA:
index.insertFeature(ft)
featB = QgsFeature()
prv = self.lyr.dataProvider()
while featsB.nextFeature(featB):
geomB = featB.constGeometry()
idb = featB.id()
idxs = index.intersects(geomB.boundingBox())
sor = []
for idx in idxs:
rqst = QgsFeatureRequest().setFilterFid(idx)
featA = prv.getFeatures(rqst).next()
ida = featA.id()
geomA = QgsGeometry(featA.geometry())
if idb!=ida:
if geomB.touches(geomA)==True:
sor.append(ida)
lst.append(sor)
# self.plainTextEdit.insertPlainText("%s\n" % lst)
return lst
def card(self, nb):
cardnb = []
for n in nb:
cardnb.append(len(n))
return cardnb
def nbCalc(self):
lsta = []
lstb = []
index = QgsSpatialIndex()
featsA = self.lyr.getFeatures()
featsB = self.lyr.getFeatures()
for ft in featsA:
index.insertFeature(ft)
featB = QgsFeature()
prv = self.lyr.dataProvider()
while featsB.nextFeature(featB):
geomB = featB.constGeometry()
idb = featB.id()
idxs = index.intersects(geomB.boundingBox())
for idx in idxs:
rqst = QgsFeatureRequest().setFilterFid(idx)
featA = prv.getFeatures(rqst).next()
ida = featA.id()
geomA = QgsGeometry(featA.geometry())
if idb>ida:
if geomB.touches(geomA)==True:
lsta.append(idb)
lstb.append(ida)
# self.plainTextEdit.insertPlainText('%s - %s\n' % (idb, ida))
lstc = [lsta, lstb]
return lstc
def datRead(self):
lstdat = []
fld = self.comboBox.currentText()
feats = self.lyr.getFeatures()
feat = QgsFeature()
while feats.nextFeature(feat):
lstdat.append(float(feat[fld]))
return lstdat
# from numpy import *
# property_a = array([545., 656., 5.4, 33.])
# property_b = array([ 1.2, 1.3, 2.3, 0.3])
# good_objects = [True, False, False, True]
# good_indices = [0, 3]
# property_asel = property_a[good_objects]
# property_bsel = property_b[good_indices]
# def nbTouches(self):
# feat = QgsFeature()
# provider = self.ml.dataProvider()
# e = provider.featureCount()
#
# for ne in range(self.mod, e + self.mod):
# feat = QgsFeature()
# geom = QgsGeometry()
# fiter = self.ml.getFeatures(QgsFeatureRequest(ne))
# if fiter.nextFeature(feat):
# geom = QgsGeometry(feat.geometry())
#
# neighbours = self.htouch(feat)
# row = feat.id()-self.mod
# self.model.setData(self.model.index(row, 0, QModelIndex()), neighbours)
# self.progressBar.setValue(100*ne/e)
#
#
# def htouch(self, feata):
# geoma = QgsGeometry(feata.geometry())
# feat = QgsFeature()
# provider = self.ml.dataProvider()
# feats = provider.getFeatures()
# self.emit(SIGNAL("runStatus(PyQt_PyObject)"), 0)
# self.emit(SIGNAL("runRange(PyQt_PyObject)"), (0, provider.featureCount()))
# ne = 0
# neighbours = ""
# while feats.nextFeature(feat):
# ne += 1
# self.emit(SIGNAL("runStatus(PyQt_PyObject)"), ne)
# geomb = QgsGeometry(feat.geometry())
# if feata.id()!=feat.id():
# if geoma.touches(geomb)==True:
# neighbours = neighbours + '%s,' % (feat.id()+self.p)
# return neighbours[:-1]
|
IZSVenezie/VetEpiGIS-Stat
|
plugin/globalt.py
|
Python
|
gpl-3.0
| 21,379 | 0.005052 |
def mm_loops(X,Y,Z):
m = len(X)
n = len(Y)
for i in xrange(len(X)):
xi = X[i]
for j in xrange(len(Y)):
yj = Y[j]
total = 0
for k in xrange(len(yj)):
total += xi[k] * yj[k]
Z[i][j] = total
return Z
def make_matrix(m,n):
mat = []
for i in xrange(m):
mat.append(range(n))
return mat
if __name__ == '__main__':
n = 200
x = make_matrix(n,n)
z = make_matrix(n,n)
mm_loops(x, x, z)
|
rjpower/falcon
|
benchmarks/old/matmult_int.py
|
Python
|
apache-2.0
| 511 | 0.029354 |
from __future__ import with_statement
from collections import defaultdict, namedtuple
from functools import partial
from operator import methodcaller
import os
import re
import sys
import copy
import platform
from pytest import raises, mark
from schema import (Schema, Use, And, Or, Regex, Optional, Const,
SchemaError, SchemaWrongKeyError,
SchemaMissingKeyError, SchemaUnexpectedTypeError,
SchemaForbiddenKeyError, Forbidden)
if sys.version_info[0] == 3:
basestring = str # Python 3 does not have basestring
unicode = str # Python 3 does not have unicode
SE = raises(SchemaError)
def ve(_):
raise ValueError()
def se(_):
raise SchemaError('first auto', 'first error')
def test_schema():
assert Schema(1).validate(1) == 1
with SE: Schema(1).validate(9)
assert Schema(int).validate(1) == 1
with SE: Schema(int).validate('1')
assert Schema(Use(int)).validate('1') == 1
with SE: Schema(int).validate(int)
assert Schema(str).validate('hai') == 'hai'
with SE: Schema(str).validate(1)
assert Schema(Use(str)).validate(1) == '1'
assert Schema(list).validate(['a', 1]) == ['a', 1]
assert Schema(dict).validate({'a': 1}) == {'a': 1}
with SE: Schema(dict).validate(['a', 1])
assert Schema(lambda n: 0 < n < 5).validate(3) == 3
with SE: Schema(lambda n: 0 < n < 5).validate(-1)
def test_validate_file():
assert Schema(
Use(open)).validate('LICENSE-MIT').read().startswith('Copyright')
with SE: Schema(Use(open)).validate('NON-EXISTENT')
assert Schema(os.path.exists).validate('.') == '.'
with SE: Schema(os.path.exists).validate('./non-existent/')
assert Schema(os.path.isfile).validate('LICENSE-MIT') == 'LICENSE-MIT'
with SE: Schema(os.path.isfile).validate('NON-EXISTENT')
def test_and():
assert And(int, lambda n: 0 < n < 5).validate(3) == 3
with SE: And(int, lambda n: 0 < n < 5).validate(3.33)
assert And(Use(int), lambda n: 0 < n < 5).validate(3.33) == 3
with SE: And(Use(int), lambda n: 0 < n < 5).validate('3.33')
def test_or():
assert Or(int, dict).validate(5) == 5
assert Or(int, dict).validate({}) == {}
with SE: Or(int, dict).validate('hai')
assert Or(int).validate(4)
with SE: Or().validate(2)
def test_test():
def unique_list(_list):
return len(_list) == len(set(_list))
def dict_keys(key, _list):
return list(map(lambda d: d[key], _list))
schema = (
Schema(
Const(
And(Use(partial(dict_keys, "index")), unique_list))))
data = [
{"index": 1, "value": "foo"},
{"index": 2, "value": "bar"}]
assert schema.validate(data) == data
bad_data = [
{"index": 1, "value": "foo"},
{"index": 1, "value": "bar"}]
with SE: schema.validate(bad_data)
def test_regex():
# Simple case: validate string
assert Regex(r'foo').validate('afoot') == 'afoot'
with SE: Regex(r'bar').validate('afoot')
# More complex case: validate string
assert Regex(r'^[a-z]+$').validate('letters') == 'letters'
with SE:
Regex(r'^[a-z]+$').validate('letters + spaces') == 'letters + spaces'
# Validate dict key
assert (Schema({Regex(r'^foo'): str})
.validate({'fookey': 'value'}) == {'fookey': 'value'})
with SE: Schema({Regex(r'^foo'): str}).validate({'barkey': 'value'})
# Validate dict value
assert (Schema({str: Regex(r'^foo')}).validate({'key': 'foovalue'}) ==
{'key': 'foovalue'})
with SE: Schema({str: Regex(r'^foo')}).validate({'key': 'barvalue'})
# Error if the value does not have a buffer interface
with SE: Regex(r'bar').validate(1)
with SE: Regex(r'bar').validate({})
with SE: Regex(r'bar').validate([])
with SE: Regex(r'bar').validate(None)
# Validate that the pattern has a buffer interface
assert Regex(re.compile(r'foo')).validate('foo') == 'foo'
assert Regex(unicode('foo')).validate('foo') == 'foo'
with raises(TypeError): Regex(1).validate('bar')
with raises(TypeError): Regex({}).validate('bar')
with raises(TypeError): Regex([]).validate('bar')
with raises(TypeError): Regex(None).validate('bar')
def test_validate_list():
assert Schema([1, 0]).validate([1, 0, 1, 1]) == [1, 0, 1, 1]
assert Schema([1, 0]).validate([]) == []
with SE: Schema([1, 0]).validate(0)
with SE: Schema([1, 0]).validate([2])
assert And([1, 0], lambda l: len(l) > 2).validate([0, 1, 0]) == [0, 1, 0]
with SE: And([1, 0], lambda l: len(l) > 2).validate([0, 1])
def test_list_tuple_set_frozenset():
assert Schema([int]).validate([1, 2])
with SE: Schema([int]).validate(['1', 2])
assert Schema(set([int])).validate(set([1, 2])) == set([1, 2])
with SE: Schema(set([int])).validate([1, 2]) # not a set
with SE: Schema(set([int])).validate(['1', 2])
assert Schema(tuple([int])).validate(tuple([1, 2])) == tuple([1, 2])
with SE: Schema(tuple([int])).validate([1, 2]) # not a set
def test_strictly():
assert Schema(int).validate(1) == 1
with SE: Schema(int).validate('1')
def test_dict():
assert Schema({'key': 5}).validate({'key': 5}) == {'key': 5}
with SE: Schema({'key': 5}).validate({'key': 'x'})
with SE: Schema({'key': 5}).validate(['key', 5])
assert Schema({'key': int}).validate({'key': 5}) == {'key': 5}
assert Schema({'n': int, 'f': float}).validate(
{'n': 5, 'f': 3.14}) == {'n': 5, 'f': 3.14}
with SE: Schema({'n': int, 'f': float}).validate(
{'n': 3.14, 'f': 5})
with SE:
try:
Schema({}).validate({'abc': None, 1: None})
except SchemaWrongKeyError as e:
assert e.args[0].startswith("Wrong keys 'abc', 1 in")
raise
with SE:
try:
Schema({'key': 5}).validate({})
except SchemaMissingKeyError as e:
assert e.args[0] == "Missing keys: 'key'"
raise
with SE:
try:
Schema({'key': 5}).validate({'n': 5})
except SchemaMissingKeyError as e:
assert e.args[0] == "Missing keys: 'key'"
raise
with SE:
try:
Schema({}).validate({'n': 5})
except SchemaWrongKeyError as e:
assert e.args[0] == "Wrong keys 'n' in {'n': 5}"
raise
with SE:
try:
Schema({'key': 5}).validate({'key': 5, 'bad': 5})
except SchemaWrongKeyError as e:
assert e.args[0] in ["Wrong keys 'bad' in {'key': 5, 'bad': 5}",
"Wrong keys 'bad' in {'bad': 5, 'key': 5}"]
raise
with SE:
try:
Schema({}).validate({'a': 5, 'b': 5})
except SchemaError as e:
assert e.args[0] in ["Wrong keys 'a', 'b' in {'a': 5, 'b': 5}",
"Wrong keys 'a', 'b' in {'b': 5, 'a': 5}"]
raise
with SE:
try:
Schema({int: int}).validate({'': ''})
except SchemaUnexpectedTypeError as e:
assert e.args[0] in ["'' should be instance of 'int'"]
def test_dict_keys():
assert Schema({str: int}).validate(
{'a': 1, 'b': 2}) == {'a': 1, 'b': 2}
with SE: Schema({str: int}).validate({1: 1, 'b': 2})
assert Schema({Use(str): Use(int)}).validate(
{1: 3.14, 3.14: 1}) == {'1': 3, '3.14': 1}
def test_ignore_extra_keys():
assert Schema({'key': 5}, ignore_extra_keys=True).validate(
{'key': 5, 'bad': 4}) == {'key': 5}
assert Schema({'key': 5, 'dk': {'a': 'a'}}, ignore_extra_keys=True).validate(
{'key': 5, 'bad': 'b', 'dk': {'a': 'a', 'bad': 'b'}}) == \
{'key': 5, 'dk': {'a': 'a'}}
assert Schema([{'key': 'v'}], ignore_extra_keys=True).validate(
[{'key': 'v', 'bad': 'bad'}]) == [{'key': 'v'}]
assert Schema([{'key': 'v'}], ignore_extra_keys=True).validate(
[{'key': 'v', 'bad': 'bad'}]) == [{'key': 'v'}]
def test_ignore_extra_keys_validation_and_return_keys():
assert Schema({'key': 5, object: object}, ignore_extra_keys=True).validate(
{'key': 5, 'bad': 4}) == {'key': 5, 'bad': 4}
assert Schema({'key': 5, 'dk': {'a': 'a', object: object}},
ignore_extra_keys=True).validate(
{'key': 5, 'dk': {'a': 'a', 'bad': 'b'}}) == \
{'key': 5, 'dk': {'a': 'a', 'bad': 'b'}}
def test_dict_forbidden_keys():
with raises(SchemaForbiddenKeyError):
Schema({Forbidden('b'): object}).validate({'b': 'bye'})
with raises(SchemaWrongKeyError):
Schema({Forbidden('b'): int}).validate({'b': 'bye'})
assert (Schema({Forbidden('b'): int,
Optional('b'): object}).validate({'b': 'bye'}) ==
{'b': 'bye'})
with raises(SchemaForbiddenKeyError):
Schema({Forbidden('b'): object, Optional('b'): object}).validate({'b': 'bye'})
def test_dict_optional_keys():
with SE: Schema({'a': 1, 'b': 2}).validate({'a': 1})
assert Schema({'a': 1, Optional('b'): 2}).validate({'a': 1}) == {'a': 1}
assert Schema({'a': 1, Optional('b'): 2}).validate(
{'a': 1, 'b': 2}) == {'a': 1, 'b': 2}
# Make sure Optionals are favored over types:
assert Schema({basestring: 1,
Optional('b'): 2}).validate({'a': 1, 'b': 2}) == {'a': 1, 'b': 2}
# Make sure Optionals hash based on their key:
assert len({Optional('a'): 1, Optional('a'): 1, Optional('b'): 2}) == 2
def test_dict_optional_defaults():
# Optionals fill out their defaults:
assert Schema({Optional('a', default=1): 11,
Optional('b', default=2): 22}).validate({'a': 11}) == {'a': 11, 'b': 2}
# Optionals take precedence over types. Here, the "a" is served by the
# Optional:
assert Schema({Optional('a', default=1): 11,
basestring: 22}).validate({'b': 22}) == {'a': 1, 'b': 22}
with raises(TypeError):
Optional(And(str, Use(int)), default=7)
def test_dict_subtypes():
d = defaultdict(int, key=1)
v = Schema({'key': 1}).validate(d)
assert v == d
assert isinstance(v, defaultdict)
# Please add tests for Counter and OrderedDict once support for Python2.6
# is dropped!
def test_dict_key_error():
try:
Schema({'k': int}).validate({'k': 'x'})
except SchemaError as e:
assert e.code == "Key 'k' error:\n'x' should be instance of 'int'"
try:
Schema({'k': {'k2': int}}).validate({'k': {'k2': 'x'}})
except SchemaError as e:
code = "Key 'k' error:\nKey 'k2' error:\n'x' should be instance of 'int'"
assert e.code == code
try:
Schema({'k': {'k2': int}}, error='k2 should be int').validate({'k': {'k2': 'x'}})
except SchemaError as e:
assert e.code == 'k2 should be int'
def test_complex():
s = Schema({'<file>': And([Use(open)], lambda l: len(l)),
'<path>': os.path.exists,
Optional('--count'): And(int, lambda n: 0 <= n <= 5)})
data = s.validate({'<file>': ['./LICENSE-MIT'], '<path>': './'})
assert len(data) == 2
assert len(data['<file>']) == 1
assert data['<file>'][0].read().startswith('Copyright')
assert data['<path>'] == './'
def test_nice_errors():
try:
Schema(int, error='should be integer').validate('x')
except SchemaError as e:
assert e.errors == ['should be integer']
try:
Schema(Use(float), error='should be a number').validate('x')
except SchemaError as e:
assert e.code == 'should be a number'
try:
Schema({Optional('i'): Use(int, error='should be a number')}).validate({'i': 'x'})
except SchemaError as e:
assert e.code == 'should be a number'
def test_use_error_handling():
try:
Use(ve).validate('x')
except SchemaError as e:
assert e.autos == ["ve('x') raised ValueError()"]
assert e.errors == [None]
try:
Use(ve, error='should not raise').validate('x')
except SchemaError as e:
assert e.autos == ["ve('x') raised ValueError()"]
assert e.errors == ['should not raise']
try:
Use(se).validate('x')
except SchemaError as e:
assert e.autos == [None, 'first auto']
assert e.errors == [None, 'first error']
try:
Use(se, error='second error').validate('x')
except SchemaError as e:
assert e.autos == [None, 'first auto']
assert e.errors == ['second error', 'first error']
def test_or_error_handling():
try:
Or(ve).validate('x')
except SchemaError as e:
assert e.autos[0].startswith('Or(')
assert e.autos[0].endswith(") did not validate 'x'")
assert e.autos[1] == "ve('x') raised ValueError()"
assert len(e.autos) == 2
assert e.errors == [None, None]
try:
Or(ve, error='should not raise').validate('x')
except SchemaError as e:
assert e.autos[0].startswith('Or(')
assert e.autos[0].endswith(") did not validate 'x'")
assert e.autos[1] == "ve('x') raised ValueError()"
assert len(e.autos) == 2
assert e.errors == ['should not raise', 'should not raise']
try:
Or('o').validate('x')
except SchemaError as e:
assert e.autos == ["Or('o') did not validate 'x'",
"'o' does not match 'x'"]
assert e.errors == [None, None]
try:
Or('o', error='second error').validate('x')
except SchemaError as e:
assert e.autos == ["Or('o') did not validate 'x'",
"'o' does not match 'x'"]
assert e.errors == ['second error', 'second error']
def test_and_error_handling():
try:
And(ve).validate('x')
except SchemaError as e:
assert e.autos == ["ve('x') raised ValueError()"]
assert e.errors == [None]
try:
And(ve, error='should not raise').validate('x')
except SchemaError as e:
assert e.autos == ["ve('x') raised ValueError()"]
assert e.errors == ['should not raise']
try:
And(str, se).validate('x')
except SchemaError as e:
assert e.autos == [None, 'first auto']
assert e.errors == [None, 'first error']
try:
And(str, se, error='second error').validate('x')
except SchemaError as e:
assert e.autos == [None, 'first auto']
assert e.errors == ['second error', 'first error']
def test_schema_error_handling():
try:
Schema(Use(ve)).validate('x')
except SchemaError as e:
assert e.autos == [None, "ve('x') raised ValueError()"]
assert e.errors == [None, None]
try:
Schema(Use(ve), error='should not raise').validate('x')
except SchemaError as e:
assert e.autos == [None, "ve('x') raised ValueError()"]
assert e.errors == ['should not raise', None]
try:
Schema(Use(se)).validate('x')
except SchemaError as e:
assert e.autos == [None, None, 'first auto']
assert e.errors == [None, None, 'first error']
try:
Schema(Use(se), error='second error').validate('x')
except SchemaError as e:
assert e.autos == [None, None, 'first auto']
assert e.errors == ['second error', None, 'first error']
def test_use_json():
import json
gist_schema = Schema(And(Use(json.loads), # first convert from JSON
{Optional('description'): basestring,
'public': bool,
'files': {basestring: {'content': basestring}}}))
gist = '''{"description": "the description for this gist",
"public": true,
"files": {
"file1.txt": {"content": "String file contents"},
"other.txt": {"content": "Another file contents"}}}'''
assert gist_schema.validate(gist)
def test_error_reporting():
s = Schema({'<files>': [Use(open, error='<files> should be readable')],
'<path>': And(os.path.exists, error='<path> should exist'),
'--count': Or(None, And(Use(int), lambda n: 0 < n < 5),
error='--count should be integer 0 < n < 5')},
error='Error:')
s.validate({'<files>': [], '<path>': './', '--count': 3})
try:
s.validate({'<files>': [], '<path>': './', '--count': '10'})
except SchemaError as e:
assert e.code == 'Error:\n--count should be integer 0 < n < 5'
try:
s.validate({'<files>': [], '<path>': './hai', '--count': '2'})
except SchemaError as e:
assert e.code == 'Error:\n<path> should exist'
try:
s.validate({'<files>': ['hai'], '<path>': './', '--count': '2'})
except SchemaError as e:
assert e.code == 'Error:\n<files> should be readable'
def test_schema_repr(): # what about repr with `error`s?
schema = Schema([Or(None, And(str, Use(float)))])
repr_ = "Schema([Or(None, And(<type 'str'>, Use(<type 'float'>)))])"
# in Python 3 repr contains <class 'str'>, not <type 'str'>
assert repr(schema).replace('class', 'type') == repr_
def test_validate_object():
schema = Schema({object: str})
assert schema.validate({42: 'str'}) == {42: 'str'}
with SE: schema.validate({42: 777})
def test_issue_9_prioritized_key_comparison():
validate = Schema({'key': 42, object: 42}).validate
assert validate({'key': 42, 777: 42}) == {'key': 42, 777: 42}
def test_issue_9_prioritized_key_comparison_in_dicts():
# http://stackoverflow.com/questions/14588098/docopt-schema-validation
s = Schema({'ID': Use(int, error='ID should be an int'),
'FILE': Or(None, Use(open, error='FILE should be readable')),
Optional(str): object})
data = {'ID': 10, 'FILE': None, 'other': 'other', 'other2': 'other2'}
assert s.validate(data) == data
data = {'ID': 10, 'FILE': None}
assert s.validate(data) == data
def test_missing_keys_exception_with_non_str_dict_keys():
s = Schema({And(str, Use(str.lower), 'name'): And(str, len)})
with SE: s.validate(dict())
with SE:
try:
Schema({1: 'x'}).validate(dict())
except SchemaMissingKeyError as e:
assert e.args[0] == "Missing keys: 1"
raise
# PyPy does have a __name__ attribute for its callables.
@mark.skipif(platform.python_implementation() == 'PyPy',
reason='Running on PyPy')
def test_issue_56_cant_rely_on_callables_to_have_name():
s = Schema(methodcaller('endswith', '.csv'))
assert s.validate('test.csv') == 'test.csv'
with SE:
try:
s.validate('test.py')
except SchemaError as e:
assert "operator.methodcaller" in e.args[0]
raise
def test_exception_handling_with_bad_validators():
BadValidator = namedtuple("BadValidator", ["validate"])
s = Schema(BadValidator("haha"))
with SE:
try:
s.validate("test")
except SchemaError as e:
assert "TypeError" in e.args[0]
raise
def test_issue_83_iterable_validation_return_type():
TestSetType = type("TestSetType", (set,), dict())
data = TestSetType(["test", "strings"])
s = Schema(set([str]))
assert isinstance(s.validate(data), TestSetType)
def test_optional_key_convert_failed_randomly_while_with_another_optional_object():
"""
In this test, created_at string "2015-10-10 00:00:00" is expected to be converted
to a datetime instance.
- it works when the schema is
s = Schema({
'created_at': _datetime_validator,
Optional(basestring): object,
})
- but when wrapping the key 'created_at' with Optional, it fails randomly
:return:
"""
import datetime
fmt = '%Y-%m-%d %H:%M:%S'
_datetime_validator = Or(None, Use(lambda i: datetime.datetime.strptime(i, fmt)))
# FIXME given tests enough
for i in range(1024):
s = Schema({
Optional('created_at'): _datetime_validator,
Optional('updated_at'): _datetime_validator,
Optional('birth'): _datetime_validator,
Optional(basestring): object,
})
data = {
'created_at': '2015-10-10 00:00:00'
}
validated_data = s.validate(data)
# is expected to be converted to a datetime instance, but fails randomly
# (most of the time)
assert isinstance(validated_data['created_at'], datetime.datetime)
# assert isinstance(validated_data['created_at'], basestring)
def test_copy():
s1 = SchemaError('a', None)
s2 = copy.deepcopy(s1)
assert s1 is not s2
assert type(s1) is type(s2)
def test_inheritance():
def convert(data):
if isinstance(data, int):
return data + 1
return data
class MySchema(Schema):
def validate(self, data):
return super(MySchema, self).validate(convert(data))
s = {'k': int, 'd': {'k': int, 'l': [{'l': [int]}]}}
v = {'k': 1, 'd': {'k': 2, 'l': [{'l': [3, 4, 5]}]}}
d = MySchema(s).validate(v)
assert d['k'] == 2 and d['d']['k'] == 3 and d['d']['l'][0]['l'] == [4, 5, 6]
|
bcaudell95/schema
|
test_schema.py
|
Python
|
mit
| 21,259 | 0.002493 |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Module to build pipeline fragment that produces given PCollections.
For internal use only; no backwards-compatibility guarantees.
"""
from __future__ import absolute_import
import apache_beam as beam
from apache_beam.pipeline import PipelineVisitor
from apache_beam.testing.test_stream import TestStream
class PipelineFragment(object):
"""A fragment of a pipeline definition.
A pipeline fragment is built from the original pipeline definition to include
only PTransforms that are necessary to produce the given PCollections.
"""
def __init__(self, pcolls, options=None):
"""Constructor of PipelineFragment.
Args:
pcolls: (List[PCollection]) a list of PCollections to build pipeline
fragment for.
options: (PipelineOptions) the pipeline options for the implicit
pipeline run.
"""
assert len(pcolls) > 0, (
'Need at least 1 PCollection as the target data to build a pipeline '
'fragment that produces it.')
for pcoll in pcolls:
assert isinstance(pcoll, beam.pvalue.PCollection), (
'{} is not an apache_beam.pvalue.PCollection.'.format(pcoll))
# No modification to self._user_pipeline is allowed.
self._user_pipeline = pcolls[0].pipeline
# These are user PCollections. Do not use them to deduce anything that
# will be executed by any runner. Instead, use
# `self._runner_pcolls_to_user_pcolls.keys()` to get copied PCollections.
self._pcolls = set(pcolls)
for pcoll in self._pcolls:
assert pcoll.pipeline is self._user_pipeline, (
'{} belongs to a different user pipeline than other PCollections '
'given and cannot be used to build a pipeline fragment that produces '
'the given PCollections.'.format(pcoll))
self._options = options
# A copied pipeline instance for modification without changing the user
# pipeline instance held by the end user. This instance can be processed
# into a pipeline fragment that later run by the underlying runner.
self._runner_pipeline = self._build_runner_pipeline()
_, self._context = self._runner_pipeline.to_runner_api(
return_context=True, use_fake_coders=True)
from apache_beam.runners.interactive import pipeline_instrument as instr
self._runner_pcoll_to_id = instr.pcolls_to_pcoll_id(
self._runner_pipeline, self._context)
# Correlate components in the runner pipeline to components in the user
# pipeline. The target pcolls are the pcolls given and defined in the user
# pipeline.
self._id_to_target_pcoll = self._calculate_target_pcoll_ids()
self._label_to_user_transform = self._calculate_user_transform_labels()
# Below will give us the 1:1 correlation between
# PCollections/AppliedPTransforms from the copied runner pipeline and
# PCollections/AppliedPTransforms from the user pipeline.
# (Dict[PCollection, PCollection])
(
self._runner_pcolls_to_user_pcolls,
# (Dict[AppliedPTransform, AppliedPTransform])
self._runner_transforms_to_user_transforms
) = self._build_correlation_between_pipelines(
self._runner_pcoll_to_id,
self._id_to_target_pcoll,
self._label_to_user_transform)
# Below are operated on the runner pipeline.
(self._necessary_transforms,
self._necessary_pcollections) = self._mark_necessary_transforms_and_pcolls(
self._runner_pcolls_to_user_pcolls)
self._runner_pipeline = self._prune_runner_pipeline_to_fragment(
self._runner_pipeline, self._necessary_transforms)
def deduce_fragment(self):
"""Deduce the pipeline fragment as an apache_beam.Pipeline instance."""
return beam.pipeline.Pipeline.from_runner_api(
self._runner_pipeline.to_runner_api(use_fake_coders=True),
self._runner_pipeline.runner,
self._options)
def run(self, display_pipeline_graph=False, use_cache=True, blocking=False):
"""Shorthand to run the pipeline fragment."""
try:
preserved_skip_display = self._runner_pipeline.runner._skip_display
preserved_force_compute = self._runner_pipeline.runner._force_compute
preserved_blocking = self._runner_pipeline.runner._blocking
self._runner_pipeline.runner._skip_display = not display_pipeline_graph
self._runner_pipeline.runner._force_compute = not use_cache
self._runner_pipeline.runner._blocking = blocking
return self.deduce_fragment().run()
finally:
self._runner_pipeline.runner._skip_display = preserved_skip_display
self._runner_pipeline.runner._force_compute = preserved_force_compute
self._runner_pipeline.runner._blocking = preserved_blocking
def _build_runner_pipeline(self):
return beam.pipeline.Pipeline.from_runner_api(
self._user_pipeline.to_runner_api(use_fake_coders=True),
self._user_pipeline.runner,
self._options)
def _calculate_target_pcoll_ids(self):
pcoll_id_to_target_pcoll = {}
for pcoll in self._pcolls:
pcoll_id_to_target_pcoll[self._runner_pcoll_to_id.get(str(pcoll),
'')] = pcoll
return pcoll_id_to_target_pcoll
def _calculate_user_transform_labels(self):
label_to_user_transform = {}
class UserTransformVisitor(PipelineVisitor):
def enter_composite_transform(self, transform_node):
self.visit_transform(transform_node)
def visit_transform(self, transform_node):
if transform_node is not None:
label_to_user_transform[transform_node.full_label] = transform_node
v = UserTransformVisitor()
self._runner_pipeline.visit(v)
return label_to_user_transform
def _build_correlation_between_pipelines(
self, runner_pcoll_to_id, id_to_target_pcoll, label_to_user_transform):
runner_pcolls_to_user_pcolls = {}
runner_transforms_to_user_transforms = {}
class CorrelationVisitor(PipelineVisitor):
def enter_composite_transform(self, transform_node):
self.visit_transform(transform_node)
def visit_transform(self, transform_node):
self._process_transform(transform_node)
for in_pcoll in transform_node.inputs:
self._process_pcoll(in_pcoll)
for out_pcoll in transform_node.outputs.values():
self._process_pcoll(out_pcoll)
def _process_pcoll(self, pcoll):
pcoll_id = runner_pcoll_to_id.get(str(pcoll), '')
if pcoll_id in id_to_target_pcoll:
runner_pcolls_to_user_pcolls[pcoll] = (id_to_target_pcoll[pcoll_id])
def _process_transform(self, transform_node):
if transform_node.full_label in label_to_user_transform:
runner_transforms_to_user_transforms[transform_node] = (
label_to_user_transform[transform_node.full_label])
v = CorrelationVisitor()
self._runner_pipeline.visit(v)
return runner_pcolls_to_user_pcolls, runner_transforms_to_user_transforms
def _mark_necessary_transforms_and_pcolls(self, runner_pcolls_to_user_pcolls):
necessary_transforms = set()
all_inputs = set()
updated_all_inputs = set(runner_pcolls_to_user_pcolls.keys())
# Do this until no more new PCollection is recorded.
while len(updated_all_inputs) != len(all_inputs):
all_inputs = set(updated_all_inputs)
for pcoll in all_inputs:
producer = pcoll.producer
while producer:
if producer in necessary_transforms:
break
# Mark the AppliedPTransform as necessary.
necessary_transforms.add(producer)
# Record all necessary input and side input PCollections.
updated_all_inputs.update(producer.inputs)
# pylint: disable=map-builtin-not-iterating
side_input_pvalues = set(
map(lambda side_input: side_input.pvalue, producer.side_inputs))
updated_all_inputs.update(side_input_pvalues)
# Go to its parent AppliedPTransform.
producer = producer.parent
return necessary_transforms, all_inputs
def _prune_runner_pipeline_to_fragment(
self, runner_pipeline, necessary_transforms):
class PruneVisitor(PipelineVisitor):
def enter_composite_transform(self, transform_node):
if isinstance(transform_node.transform, TestStream):
return
pruned_parts = list(transform_node.parts)
for part in transform_node.parts:
if part not in necessary_transforms:
pruned_parts.remove(part)
transform_node.parts = tuple(pruned_parts)
self.visit_transform(transform_node)
def visit_transform(self, transform_node):
if transform_node not in necessary_transforms:
transform_node.parent = None
v = PruneVisitor()
runner_pipeline.visit(v)
return runner_pipeline
|
iemejia/incubator-beam
|
sdks/python/apache_beam/runners/interactive/pipeline_fragment.py
|
Python
|
apache-2.0
| 9,582 | 0.005844 |
"""This test checks for correct wait4() behavior.
"""
import os
import time
from test.fork_wait import ForkWait
from test.test_support import run_unittest, reap_children, get_attribute
# If either of these do not exist, skip this test.
get_attribute(os, 'fork')
get_attribute(os, 'wait4')
class Wait4Test(ForkWait):
def wait_impl(self, cpid):
for i in range(10):
# wait4() shouldn't hang, but some of the buildbots seem to hang
# in the forking tests. This is an attempt to fix the problem.
spid, status, rusage = os.wait4(cpid, os.WNOHANG)
if spid == cpid:
break
time.sleep(1.0)
self.assertEqual(spid, cpid)
self.assertEqual(status, 0, "cause = %d, exit = %d" % (status&0xff, status>>8))
self.assertTrue(rusage)
def test_main():
run_unittest(Wait4Test)
reap_children()
if __name__ == "__main__":
test_main()
|
teeple/pns_server
|
work/install/Python-2.7.4/Lib/test/test_wait4.py
|
Python
|
gpl-2.0
| 940 | 0.005319 |
from flask import render_template
from app import app, db, models
import json
@app.route('/')
@app.route('/index')
def index():
# obtain today's words
# words = models.Words.query.all()
# words = list((str(word[0]), word[1]) for word in db.session.query(models.Words, db.func.count(models.Words.id).label("total")).group_by(models.Words.word).order_by("total DESC"))
data = db.session.query(models.Words, db.func.count(models.Words.id).label("total")).group_by(models.Words.word).order_by("total DESC").all()[:50]
words = [_[0].word for _ in data]
count = [_[1] for _ in data]
return render_template('index.html', words=words, count = count)
|
matbra/radio_fearit
|
app/views.py
|
Python
|
gpl-3.0
| 670 | 0.008955 |
import galaxyxml.tool.parameters as gxtp
from collections import Counter
from pydoc import locate
class ArgparseGalaxyTranslation(object):
def __gxtp_param_from_type(self, param, flag, label, num_dashes, gxparam_extra_kwargs, default=None):
from argparse import FileType
"""Based on a type, convert to appropriate gxtp class
"""
if default is None and (param.type in (int, float)):
default = 0
if param.type == int:
mn = None
mx = None
if param.choices is not None:
mn = min(param.choices)
mx = max(param.choices)
gxparam = gxtp.IntegerParam(flag, default, label=label, min=mn, max=mx, num_dashes=num_dashes, **gxparam_extra_kwargs)
elif param.choices is not None:
choices = {k: k for k in param.choices}
gxparam = gxtp.SelectParam(flag, default=default, label=label, num_dashes=num_dashes, options=choices, **gxparam_extra_kwargs)
elif param.type == float:
gxparam = gxtp.FloatParam(flag, default, label=label, num_dashes=num_dashes, **gxparam_extra_kwargs)
elif param.type is None or param.type == str:
gxparam = gxtp.TextParam(flag, value=default, label=label, num_dashes=num_dashes, **gxparam_extra_kwargs)
elif param.type == locate('file'):
gxparam = gxtp.DataParam(flag, label=label, num_dashes=num_dashes, **gxparam_extra_kwargs)
elif isinstance(param.type, FileType):
if 'w' in param.type._mode:
gxparam = gxtp.OutputParameter(
flag, format='data', default=default, label=label,
num_dashes=num_dashes, **gxparam_extra_kwargs
)
else:
gxparam = gxtp.DataParam(
flag, default=default, label=label, num_dashes=num_dashes,
**gxparam_extra_kwargs
)
else:
gxparam = None
return gxparam
def __args_from_nargs(self, param, repeat_name, repeat_var_name, positional, flag):
"""Based on param.nargs, return the appropriate overrides
"""
gxrepeat_args = []
gxrepeat_kwargs = {}
gxrepeat_cli_after = None
gxrepeat_cli_before = None
gxrepeat_cli_actual = None
gxparam_cli_before = None
gxparam_cli_after = None
if positional:
gxrepeat_cli_actual = '"$%s"' % (repeat_var_name)
else:
gxrepeat_cli_actual = '%s "$%s"' % (param.option_strings[0], repeat_var_name)
if isinstance(param.nargs, int):
# N (an integer). N arguments from the command line will be
# gathered together into a list. For example:
if param.nargs > 1:
gxrepeat_args = [repeat_name, 'repeat_title']
gxrepeat_kwargs = {
'min': param.nargs,
'max': param.nargs,
}
else:
# If we have only one, we don't want a gxrepeat, so we leave well
# enough alone
gxrepeat_args = None
elif param.nargs == '?':
# '?'. One argument will be consumed from the command line if
# possible, and produced as a single item. If no command-line
# argument is present, the value from default will be produced.
# Note that for optional arguments, there is an additional case -
# the option string is present but not followed by a command-line
# argument. In this case the value from const will be produced
# This does NOT provide a way to access the value in const, but
# that seems like a HORRIBLE idea anyway. Seriously, who does that.
gxparam_cli_before = """\n#if $%s and $%s is not None:""" % (flag, flag)
gxparam_cli_after = '#end if'
gxrepeat_args = None
elif param.nargs is None:
# Very similar to '?' but without the case of "optional + specified
# withouth an argument" being OK
#
# This has changed over time, we're (probably) going overboard here.
gxparam_cli_before = """\n#if $%s and $%s is not None:""" % (flag, flag)
gxparam_cli_after = '#end if'
gxrepeat_args = None
elif param.nargs == '*':
# '*'. All command-line arguments present are gathered into a list.
# Note that it generally doesn't make much sense to have more than
# one positional argument with nargs='*', but multiple optional
# arguments with nargs='*' is possible. For example:
# This needs to be handled with a
# set files = '" "'.join( [ str( $file ) for $file in $inputB ] )
gxrepeat_args = [repeat_name, 'repeat_title']
# gxrepeat_cli_after = '#end if\n'
gxrepeat_cli_after = ''
gxrepeat_cli_before = """\n#set %s = '" "'.join([ str($var.%s) for $var in $%s ])""" % (repeat_var_name, flag, repeat_name)
elif param.nargs == '+':
# '+'. Just like '*', all command-line args present are gathered
# into a list. Additionally, an error message will be generated if
# there wasn't at least one command-line argument present. For
# example:
gxrepeat_args = [repeat_name, 'repeat_title']
gxrepeat_kwargs = {'min': 1}
gxrepeat_cli_after = ''
gxrepeat_cli_before = """\n#set %s = '" "'.join([ str($var.%s) for $var in $%s ])""" % (repeat_var_name, flag, repeat_name)
else:
raise Exception("TODO: Handle argparse.REMAINDER")
return (gxrepeat_args, gxrepeat_kwargs, gxrepeat_cli_after,
gxrepeat_cli_before, gxrepeat_cli_actual, gxparam_cli_before, gxparam_cli_after)
def __init__(self):
self.repeat_count = 0
self.positional_count = Counter()
def _VersionAction(self, param, tool=None):
# passing tool is TERRIBLE, I know.
# TODO handle their templating of version
# This is kinda ugly but meh.
tool.root.attrib['version'] = param.version
# Count the repeats for unique names
# TODO improve
def _StoreAction(self, param, tool=None):
"""
Parse argparse arguments action type of "store", the default.
param: argparse.Action
"""
gxparam = None
gxrepeat = None
self.repeat_count += 1
gxparam_extra_kwargs = {}
if not param.required:
gxparam_extra_kwargs['optional'] = True
# Positional arguments don't have an option strings
positional = len(param.option_strings) == 0
if not positional:
flag = max(param.option_strings, key=len) # Pick the longest of the options strings
else:
flag = ''
self.positional_count['param.dest'] += 1
repeat_name = 'repeat_%s' % self.repeat_count
repeat_var_name = 'repeat_var_%s' % self.repeat_count
# TODO: Replace with logic supporting characters other than -
flag_wo_dashes = flag.lstrip('-')
num_dashes = len(flag) - len(flag_wo_dashes)
# Moved because needed in developing repeat CLI
if positional:
v = self.positional_count[param.dest]
flag_wo_dashes = '%s%s' % (param.dest, '_' + str(v) if v > 1 else '')
# SO unclean
gxparam_extra_kwargs['positional'] = True
# Figure out parameters and overrides from param.nargs, mainly.
# This is really unpleasant.
(gxrepeat_args, gxrepeat_kwargs, gxrepeat_cli_after,
gxrepeat_cli_before, gxrepeat_cli_actual, gxparam_cli_before,
gxparam_cli_after) = \
self.__args_from_nargs(param, repeat_name, repeat_var_name, positional, flag_wo_dashes)
# Build the gxrepeat if it's needed
if gxrepeat_args is not None:
gxrepeat = gxtp.Repeat(*gxrepeat_args, **gxrepeat_kwargs)
if gxrepeat_cli_before is not None:
gxrepeat.command_line_before_override = gxrepeat_cli_before
if gxrepeat_cli_after is not None:
gxrepeat.command_line_after_override = gxrepeat_cli_after
if gxrepeat_cli_actual is not None:
gxrepeat.command_line_override = gxrepeat_cli_actual
else:
gxrepeat = None
gxparam = self.__gxtp_param_from_type(
param, flag_wo_dashes, param.help, num_dashes,
gxparam_extra_kwargs, default=param.default
)
# Not really happy with this way of doing this
if gxparam_cli_before is not None:
gxparam.command_line_before_override = gxparam_cli_before
if gxparam_cli_after is not None:
gxparam.command_line_after_override = gxparam_cli_after
# if positional argument, wipe out the CLI flag that's usually present
if positional:
gxparam.command_line_override = '$%s' % flag_wo_dashes
if gxrepeat is not None and gxparam is not None:
gxrepeat.append(gxparam)
return gxrepeat
elif gxrepeat is None and gxparam is not None:
return gxparam
else:
raise Exception("huh")
return None
def _StoreTrueAction(self, param, **kwargs):
return self._StoreConstAction(param, **kwargs)
def _StoreFalseAction(self, param, **kwargs):
return self._StoreConstAction(param, **kwargs)
def _AppendAction(self, param, **kwargs):
self.repeat_count += 1
repeat_name = 'repeat_%s' % self.repeat_count
# TODO: Replace with logic supporting characters other than -
flag = max(param.option_strings, key=len) # Pick one of the options strings
flag_wo_dashes = flag.lstrip('-')
num_dashes = len(flag) - len(flag_wo_dashes)
gxparam = self.__gxtp_param_from_type(param, flag_wo_dashes, param.help, num_dashes, {})
gxrepeat = gxtp.Repeat(repeat_name, 'Repeated Variable')
gxrepeat.command_line_override = '%s $%s.%s' % (param.option_strings[0], 'i', flag_wo_dashes)
gxrepeat.append(gxparam)
return gxrepeat
def _StoreConstAction(self, param, **kwargs):
flag = max(param.option_strings, key=len) # Pick one of the options strings
flag_wo_dashes = flag.lstrip('-')
num_dashes = len(flag) - len(flag_wo_dashes)
gxparam = gxtp.BooleanParam(flag_wo_dashes, label=param.help, num_dashes=num_dashes)
return gxparam
|
erasche/argparse2tool
|
argparse2tool/dropins/argparse/argparse_galaxy_translation.py
|
Python
|
apache-2.0
| 10,696 | 0.00215 |
from model import Event
from geo.geomodel import geotypes
def get(handler, response):
lat = handler.request.get('lat')
lon = handler.request.get('lng')
response.events = Event.proximity_fetch(
Event.all(),
geotypes.Point(float(lat),float(lon)),
)
|
globalspin/haemapod
|
haemapod/handlers/events/proximity.py
|
Python
|
mit
| 264 | 0.018939 |
# Copyright (c) 2015-2016 Cisco Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
import click
from molecule import util
from molecule.command import base
from molecule.dependency import ansible_galaxy
from molecule.dependency import shell
class Dependency(base.Base):
def execute(self, exit=True):
"""
Execute the actions that should run prior to a converge and return a
tuple.
:param exit: (Unused) Provided to complete method signature.
:return: Return a tuple provided by :meth:`.AnsiblePlaybook.execute`.
"""
debug = self.args.get('debug')
if self.molecule.state.installed_deps:
return (None, None)
dependency_name = self.molecule.dependency
if dependency_name == 'galaxy':
dd = self.molecule.config.config.get('dependency')
if dd.get('requirements_file'):
msg = "Downloading dependencies with '{}'...".format(
dependency_name)
util.print_info(msg)
g = ansible_galaxy.AnsibleGalaxy(
self.molecule.config.config, debug=debug)
g.execute()
self.molecule.state.change_state('installed_deps', True)
elif dependency_name == 'shell':
dd = self.molecule.config.config.get('dependency')
if dd.get('command'):
msg = "Downloading dependencies with '{}'...".format(
dependency_name)
util.print_info(msg)
s = shell.Shell(self.molecule.config.config, debug=debug)
s.execute()
self.molecule.state.change_state('installed_deps', True)
return (None, None)
@click.command()
@click.pass_context
def dependency(ctx): # pragma: no cover
""" Perform dependent actions on the current role. """
d = Dependency(ctx.obj.get('args'), {})
d.execute
util.sysexit(d.execute()[0])
|
rgreinho/molecule
|
molecule/command/dependency.py
|
Python
|
mit
| 2,997 | 0 |
import save
import client
def start():
def callback():
client.client.chat('/novice')
found_nations = [ (name, style, id) for name, style, id in client.get_nations() if name == 'Poles' ]
if found_nations:
name, style, id = found_nations[0]
print 'change nation to', name, style, id
client.freeciv.func.set_nation_settings(id, 'Player', style, 2)
return True
save.load_game('data/tutorial.sav', before_callback=callback)
|
eric-stanley/freeciv-android
|
lib/freeciv/tutorial.py
|
Python
|
gpl-2.0
| 497 | 0.008048 |
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import os
import pytest
from translate.filters import checks
from django.db import IntegrityError
from pytest_pootle.factories import LanguageDBFactory
from pootle.core.delegate import revision
from pootle_app.models import Directory
from pootle_language.models import Language
from pootle_project.models import Project
from pootle_store.models import Store
from pootle_translationproject.models import TranslationProject
@pytest.mark.django_db
def test_tp_create_fail(po_directory, tutorial, english):
# Trying to create a TP with no Language raises a RelatedObjectDoesNotExist
# which can be caught with Language.DoesNotExist
with pytest.raises(Language.DoesNotExist):
TranslationProject.objects.create()
# TP needs a project set too...
with pytest.raises(Project.DoesNotExist):
TranslationProject.objects.create(language=english)
# There is already an english tutorial was automagically set up
with pytest.raises(IntegrityError):
TranslationProject.objects.create(project=tutorial, language=english)
@pytest.mark.django_db
def test_tp_create_parent_dirs(tp0):
parent = tp0.create_parent_dirs("%sfoo/bar/baz.po" % tp0.pootle_path)
assert (
parent
== Directory.objects.get(
pootle_path="%sfoo/bar/" % tp0.pootle_path))
@pytest.mark.django_db
def test_tp_create_templates(project0_nongnu, project0,
templates, no_templates_tps, complex_ttk):
# As there is a tutorial template it will automatically create stores for
# our new TP
template_tp = TranslationProject.objects.create(
language=templates, project=project0)
template = Store.objects.create(
name="foo.pot",
translation_project=template_tp,
parent=template_tp.directory)
template.update(complex_ttk)
tp = TranslationProject.objects.create(
project=project0, language=LanguageDBFactory())
tp.init_from_templates()
assert tp.stores.count() == template_tp.stores.count()
assert (
[(s, t)
for s, t
in template_tp.stores.first().units.values_list("source_f",
"target_f")]
== [(s, t)
for s, t
in tp.stores.first().units.values_list("source_f",
"target_f")])
@pytest.mark.django_db
def test_tp_init_from_template_po(project0, templates,
no_templates_tps, complex_ttk):
# When initing a tp from a file called `template.pot` the resulting
# store should be called `langcode.po` if the project is gnuish
project0.config["pootle_fs.translation_mappings"] = dict(
default="/<dir_path>/<language_code>.<ext>")
template_tp = TranslationProject.objects.create(
language=templates, project=project0)
template = Store.objects.create(
name="template.pot",
translation_project=template_tp,
parent=template_tp.directory)
template.update(complex_ttk)
tp = TranslationProject.objects.create(
project=project0, language=LanguageDBFactory())
tp.init_from_templates()
store = tp.stores.get()
assert store.name == "%s.po" % tp.language.code
@pytest.mark.django_db
def test_tp_create_with_files(project0_directory, project0, store0, settings):
# lets add some files by hand
trans_dir = settings.POOTLE_TRANSLATION_DIRECTORY
language = LanguageDBFactory()
tp_dir = os.path.join(trans_dir, "%s/project0" % language.code)
os.makedirs(tp_dir)
with open(os.path.join(tp_dir, "store0.po"), "w") as f:
f.write(store0.serialize())
TranslationProject.objects.create(project=project0, language=language)
@pytest.mark.django_db
def test_tp_stats_created_from_template(po_directory, templates, tutorial):
language = LanguageDBFactory(code="foolang")
tp = TranslationProject.objects.create(language=language, project=tutorial)
tp.init_from_templates()
assert tp.stores.all().count() == 1
stats = tp.data_tool.get_stats()
assert stats['total'] == 2 # there are 2 words in test template
assert stats['translated'] == 0
assert stats['fuzzy'] == 0
assert stats['suggestions'] == 0
assert stats['critical'] == 0
@pytest.mark.django_db
def test_can_be_inited_from_templates(po_directory, tutorial, templates):
language = LanguageDBFactory()
tp = TranslationProject(project=tutorial, language=language)
assert tp.can_be_inited_from_templates()
@pytest.mark.django_db
def test_cannot_be_inited_from_templates(project0, no_templates_tps):
language = LanguageDBFactory()
tp = TranslationProject(project=project0, language=language)
assert not tp.can_be_inited_from_templates()
@pytest.mark.django_db
def test_tp_checker(po_directory, tp_checker_tests):
language = Language.objects.get(code="language0")
checker_name_, project = tp_checker_tests
tp = TranslationProject.objects.create(project=project, language=language)
checkerclasses = [
checks.projectcheckers.get(tp.project.checkstyle,
checks.StandardChecker)
]
assert [x.__class__ for x in tp.checker.checkers] == checkerclasses
@pytest.mark.django_db
def test_tp_cache_on_delete(tp0):
proj_revision = revision.get(
tp0.project.directory.__class__)(
tp0.project.directory)
orig_revision = proj_revision.get("stats")
tp0.delete()
assert (
proj_revision.get("stats")
!= orig_revision)
|
unho/pootle
|
tests/models/translationproject.py
|
Python
|
gpl-3.0
| 5,854 | 0 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pytest
import numpy as np
import torch.nn as nn
import torch as T
from torch.autograd import Variable as var
import torch.nn.functional as F
from torch.nn.utils import clip_grad_norm_
import torch.optim as optim
import numpy as np
import sys
import os
import math
import time
sys.path.insert(0, '.')
import functools
from dnc import DNC
from test_utils import generate_data, criterion
def test_rnn_1():
T.manual_seed(1111)
input_size = 100
hidden_size = 100
rnn_type = 'rnn'
num_layers = 1
num_hidden_layers = 1
dropout = 0
nr_cells = 1
cell_size = 1
read_heads = 1
gpu_id = -1
debug = True
lr = 0.001
sequence_max_length = 10
batch_size = 10
cuda = gpu_id
clip = 10
length = 10
rnn = DNC(
input_size=input_size,
hidden_size=hidden_size,
rnn_type=rnn_type,
num_layers=num_layers,
num_hidden_layers=num_hidden_layers,
dropout=dropout,
nr_cells=nr_cells,
cell_size=cell_size,
read_heads=read_heads,
gpu_id=gpu_id,
debug=debug
)
optimizer = optim.Adam(rnn.parameters(), lr=lr)
optimizer.zero_grad()
input_data, target_output = generate_data(batch_size, length, input_size, cuda)
target_output = target_output.transpose(0, 1).contiguous()
output, (chx, mhx, rv), v = rnn(input_data, None)
output = output.transpose(0, 1)
loss = criterion((output), target_output)
loss.backward()
T.nn.utils.clip_grad_norm_(rnn.parameters(), clip)
optimizer.step()
assert target_output.size() == T.Size([21, 10, 100])
assert chx[0][0].size() == T.Size([10,100])
assert mhx['memory'].size() == T.Size([10,1,1])
assert rv.size() == T.Size([10, 1])
def test_rnn_n():
T.manual_seed(1111)
input_size = 100
hidden_size = 100
rnn_type = 'rnn'
num_layers = 3
num_hidden_layers = 5
dropout = 0.2
nr_cells = 12
cell_size = 17
read_heads = 3
gpu_id = -1
debug = True
lr = 0.001
sequence_max_length = 10
batch_size = 10
cuda = gpu_id
clip = 20
length = 13
rnn = DNC(
input_size=input_size,
hidden_size=hidden_size,
rnn_type=rnn_type,
num_layers=num_layers,
num_hidden_layers=num_hidden_layers,
dropout=dropout,
nr_cells=nr_cells,
cell_size=cell_size,
read_heads=read_heads,
gpu_id=gpu_id,
debug=debug
)
optimizer = optim.Adam(rnn.parameters(), lr=lr)
optimizer.zero_grad()
input_data, target_output = generate_data(batch_size, length, input_size, cuda)
target_output = target_output.transpose(0, 1).contiguous()
output, (chx, mhx, rv), v = rnn(input_data, None)
output = output.transpose(0, 1)
loss = criterion((output), target_output)
loss.backward()
T.nn.utils.clip_grad_norm_(rnn.parameters(), clip)
optimizer.step()
assert target_output.size() == T.Size([27, 10, 100])
assert chx[1].size() == T.Size([num_hidden_layers,10,100])
assert mhx['memory'].size() == T.Size([10,12,17])
assert rv.size() == T.Size([10, 51])
def test_rnn_no_memory_pass():
T.manual_seed(1111)
input_size = 100
hidden_size = 100
rnn_type = 'rnn'
num_layers = 3
num_hidden_layers = 5
dropout = 0.2
nr_cells = 12
cell_size = 17
read_heads = 3
gpu_id = -1
debug = True
lr = 0.001
sequence_max_length = 10
batch_size = 10
cuda = gpu_id
clip = 20
length = 13
rnn = DNC(
input_size=input_size,
hidden_size=hidden_size,
rnn_type=rnn_type,
num_layers=num_layers,
num_hidden_layers=num_hidden_layers,
dropout=dropout,
nr_cells=nr_cells,
cell_size=cell_size,
read_heads=read_heads,
gpu_id=gpu_id,
debug=debug
)
optimizer = optim.Adam(rnn.parameters(), lr=lr)
optimizer.zero_grad()
input_data, target_output = generate_data(batch_size, length, input_size, cuda)
target_output = target_output.transpose(0, 1).contiguous()
(chx, mhx, rv) = (None, None, None)
outputs = []
for x in range(6):
output, (chx, mhx, rv), v = rnn(input_data, (chx, mhx, rv), pass_through_memory=False)
output = output.transpose(0, 1)
outputs.append(output)
output = functools.reduce(lambda x,y: x + y, outputs)
loss = criterion((output), target_output)
loss.backward()
T.nn.utils.clip_grad_norm_(rnn.parameters(), clip)
optimizer.step()
assert target_output.size() == T.Size([27, 10, 100])
assert chx[1].size() == T.Size([num_hidden_layers,10,100])
assert mhx['memory'].size() == T.Size([10,12,17])
assert rv == None
|
ixaxaar/pytorch-dnc
|
test/test_rnn.py
|
Python
|
mit
| 4,532 | 0.02714 |
from collections import defaultdict
import fileinput
mem = defaultdict(int)
s1 = -100000
s2 = -100000
def condition(line):
global mem
l = line[-3:]
if l[1] == "==":
if mem[l[0]] == int(l[2]): return True
else: return False
elif l[1] == "<":
if mem[l[0]] < int(l[2]): return True
else: return False
elif l[1] == ">":
if mem[l[0]] > int(l[2]): return True
else: return False
elif l[1] == "<=":
if mem[l[0]] <= int(l[2]): return True
else: return False
elif l[1] == ">=":
if mem[l[0]] >= int(l[2]): return True
else: return False
elif l[1] == "!=":
if mem[l[0]] != int(l[2]): return True
else: return False
for line in fileinput.input():
line = line.split()
if condition(line):
if line[1] == "inc":
mem[line[0]] += int(line[2])
elif line[1] == "dec":
mem[line[0]] -= int(line[2])
if mem[line[0]] > s2: s2 = mem[line[0]]
for k in mem.keys():
if mem[k] > s1: s1 = mem[k]
print(s1)
print(s2)
|
zigapk/adventofcode
|
2017/8/main.py
|
Python
|
mit
| 1,081 | 0.014801 |
import numpy, sys
import scipy.linalg, scipy.special
'''
VBLinRegARD: Linear basis regression with automatic relevance priors
using Variational Bayes.
For more details on the algorithm see Apprendix of
Roberts, McQuillan, Reece & Aigrain, 2013, MNRAS, 354, 3639.
History:
2011: Translated by Thomas Evans from original Matlab code by Stephen J Roberts
2013: Documentation added by Suzanne Aigrain
'''
def logdet(a):
'''
Compute log of determinant of matrix a using Cholesky decomposition
'''
# First make sure that matrix is symmetric:
if numpy.allclose(a.T,a) == False:
print 'MATRIX NOT SYMMETRIC'
# Second make sure that matrix is positive definite:
eigenvalues = scipy.linalg.eigvalsh(a)
if min(eigenvalues) <=0:
print 'Matrix is NOT positive-definite'
print ' min eigv = %.16f' % min(eigenvalues)
step1 = scipy.linalg.cholesky(a)
step2 = numpy.diag(step1.T)
out = 2. * numpy.sum(numpy.log(step2), axis=0)
return out
def bayes_linear_fit_ard(X, y):
'''
Fit linear basis model with design matrix X to data y.
Calling sequence:
w, V, invV, logdetV, an, bn, E_a, L = bayes_linear_fit_ard(X, y)
Inputs:
X: design matrix
y: target data
Outputs
w: basis function weights
***need to document the others!***
'''
# uninformative priors
a0 = 1e-2
b0 = 1e-4
c0 = 1e-2
d0 = 1e-4
# pre-process data
[N, D] = X.shape
X_corr = X.T * X
Xy_corr = X.T * y
an = a0 + N / 2.
gammaln_an = scipy.special.gammaln(an)
cn = c0 + 1 / 2.
D_gammaln_cn = D * scipy.special.gammaln(cn)
# iterate to find hyperparameters
L_last = -sys.float_info.max
max_iter = 500
E_a = numpy.matrix(numpy.ones(D) * c0 / d0).T
for iter in range(max_iter):
# covariance and weight of linear model
invV = numpy.matrix(numpy.diag(numpy.array(E_a)[:,0])) + X_corr
V = numpy.matrix(scipy.linalg.inv(invV))
logdetV = -logdet(invV)
w = numpy.dot(V, Xy_corr)[:,0]
# parameters of noise model (an remains constant)
sse = numpy.sum(numpy.power(X*w-y, 2), axis=0)
if numpy.imag(sse)==0:
sse = numpy.real(sse)[0]
else:
print 'Something went wrong'
bn = b0 + 0.5 * (sse + numpy.sum((numpy.array(w)[:,0]**2) * numpy.array(E_a)[:,0], axis=0))
E_t = an / bn
# hyperparameters of covariance prior (cn remains constant)
dn = d0 + 0.5 * (E_t * (numpy.array(w)[:,0]**2) + numpy.diag(V))
E_a = numpy.matrix(cn / dn).T
# variational bound, ignoring constant terms for now
L = -0.5 * (E_t*sse + numpy.sum(scipy.multiply(X,X*V))) + \
0.5 * logdetV - b0 * E_t + gammaln_an - an * scipy.log(bn) + an + \
D_gammaln_cn - cn * numpy.sum(scipy.log(dn))
# variational bound must grow!
if L_last > L:
# if this happens, then something has gone wrong....
file = open('ERROR_LOG','w')
file.write('Last bound %6.6f, current bound %6.6f' % (L, L_last))
file.close()
raise Exception('Variational bound should not reduce - see ERROR_LOG')
return
# stop if change in variation bound is < 0.001%
if abs(L_last - L) < abs(0.00001 * L):
break
# print L, L_last
L_last = L
if iter == max_iter:
warnings.warn('Bayes:maxIter ... Bayesian linear regression reached maximum number of iterations.')
# augment variational bound with constant terms
L = L - 0.5 * (N * numpy.log(2 * numpy.pi) - D) - scipy.special.gammaln(a0) + \
a0 * numpy.log(b0) + D * (-scipy.special.gammaln(c0) + c0 * numpy.log(d0))
return w, V, invV, logdetV, an, bn, E_a, L
|
saigrain/CBVshrink
|
src/VBLinRegARD.py
|
Python
|
gpl-2.0
| 3,845 | 0.008583 |
import matplotlib, numpy
import CoolProp
Props = CoolProp.CoolProp.Props
from scipy.optimize import newton
def SimpleCycle(Ref,Te,Tc,DTsh,DTsc,eta_a,Ts_Ph='Ph',skipPlot=False,axis=None):
"""
This function plots a simple four-component cycle, on the current axis, or that given by the optional parameter *axis*
Required parameters:
* Ref : A string for the refrigerant
* Te : Evap Temperature in K
* Tc : Condensing Temperature in K
* DTsh : Evaporator outlet superheat in K
* DTsc : Condenser outlet subcooling in K
* eta_a : Adiabatic efficiency of compressor (no units) in range [0,1]
Optional parameters:
* Ts_Ph : 'Ts' for a Temperature-Entropy plot, 'Ph' for a Pressure-Enthalpy
* axis : An axis to use instead of the active axis
* skipPlot : If True, won't actually plot anything, just print COP
"""
T=numpy.zeros((6))
h=numpy.zeros_like(T)
p=numpy.zeros_like(T)
s=numpy.zeros_like(T)
T[1]=Te+DTsh
pe=Props('P','T',Te,'Q',1.0,Ref)
pc=Props('P','T',Tc,'Q',1.0,Ref)
h[1]=Props('H','T',T[1],'P',pe,Ref)
s[1]=Props('S','T',T[1],'P',pe,Ref)
T2s=newton(lambda T: Props('S','T',T,'P',pc,Ref)-s[1],T[1]+30)
h2s=Props('H','T',T2s,'P',pc,Ref)
h[2]=(h2s-h[1])/eta_a+h[1]
T[2]=Props('T','H',h[2],'P',pc,Ref)
s[2]=Props('S','T',T[2],'P',pc,Ref)
sbubble_c=Props('S','P',pc,'Q',0,Ref)
sdew_c=Props('S','P',pc,'Q',1,Ref)
sbubble_e=Props('S','P',pe,'Q',0,Ref)
sdew_e=Props('S','P',pe,'Q',1,Ref)
T[3]=Tc-DTsc
h[3]=Props('H','T',T[3],'P',pc,Ref)
s[3]=Props('S','T',T[3],'P',pc,Ref)
h[4]=h[3]
h[5]=h[1]
s[5]=s[1]
T[5]=T[1]
p=[numpy.nan,pe,pc,pc,pe,pe]
COP=(h[1]-h[4])/(h[2]-h[1])
COPH=(h[2]-h[3])/(h[2]-h[1])
hsatL=Props('H','T',Te,'Q',0,Ref)
hsatV=Props('H','T',Te,'Q',1,Ref)
ssatL=Props('S','T',Te,'Q',0,Ref)
ssatV=Props('S','T',Te,'Q',1,Ref)
vsatL=1/Props('D','T',Te,'Q',0,Ref)
vsatV=1/Props('D','T',Te,'Q',1,Ref)
x=(h[4]-hsatL)/(hsatV-hsatL)
s[4]=x*ssatV+(1-x)*ssatL
T[4]=x*Te+(1-x)*Te
print(COP,COPH)
if skipPlot==False:
if axis==None:
ax=matplotlib.pyplot.gca()
if Ts_Ph in ['ph','Ph']:
ax.plot(h,p)
elif Ts_Ph in ['Ts','ts']:
s=list(s)
T=list(T)
s.insert(5,sdew_e)
T.insert(5,Te)
s.insert(3,sbubble_c)
T.insert(3,Tc)
s.insert(3,sdew_c)
T.insert(3,Tc)
ax.plot(s[1::],T[1::],'b')
else:
raise TypeError('Type of Ts_Ph invalid')
def TwoStage(Ref,Q,Te,Tc,DTsh,DTsc,eta_oi,f_p,Tsat_ic,DTsh_ic,Ts_Ph='Ph',prints=False,skipPlot=False,axis=None,**kwargs):
"""
This function plots a two-stage cycle, on the current axis, or that given by the optional parameter *axis*
Required parameters:
* Ref : Refrigerant [string]
* Q : Cooling capacity [W]
* Te : Evap Temperature [K]
* Tc : Condensing Temperature [K]
* DTsh : Evaporator outlet superheat [K]
* DTsc : Condenser outlet subcooling [K]
* eta_oi : Adiabatic efficiency of compressor (no units) in range [0,1]
* f_p : fraction of compressor power lost as ambient heat transfer in range [0,1]
* Tsat_ic : Saturation temperature corresponding to intermediate pressure [K]
* DTsh_ic : Superheating at outlet of intermediate stage [K]
Optional parameters:
* Ts_Ph : 'Ts' for a Temperature-Entropy plot, 'Ph' for a Pressure-Enthalpy
* prints : True to print out some values
* axis : An axis to use instead of the active axis
* skipPlot : If True, won't actually plot anything, just print COP
"""
T=numpy.zeros((8))
h=numpy.zeros_like(T)
p=numpy.zeros_like(T)
s=numpy.zeros_like(T)
rho=numpy.zeros_like(T)
T[0]=numpy.NAN
s[0]=numpy.NAN
T[1]=Te+DTsh
pe=Props('P','T',Te,'Q',1.0,Ref)
pc=Props('P','T',Tc,'Q',1.0,Ref)
pic=Props('P','T',Tsat_ic,'Q',1.0,Ref)
Tbubble_c=Props('T','P',pc,'Q',0,Ref)
Tbubble_e=Props('T','P',pe,'Q',0,Ref)
h[1]=Props('H','T',T[1],'P',pe,Ref)
s[1]=Props('S','T',T[1],'P',pe,Ref)
rho[1]=Props('D','T',T[1],'P',pe,Ref)
T[5]=Tbubble_c-DTsc
h[5]=Props('H','T',T[5],'P',pc,Ref)
s[5]=Props('S','T',T[5],'P',pc,Ref)
rho[5]=Props('D','T',T[5],'P',pc,Ref)
mdot=Q/(h[1]-h[5])
rho1=Props('D','T',T[1],'P',pe,Ref)
h2s=Props('H','S',s[1],'P',pic,Ref)
Wdot1=mdot*(h2s-h[1])/eta_oi
h[2]=h[1]+(1-f_p)*Wdot1/mdot
T[2]=Props('T','H',h[2],'P',pic,Ref)
s[2]=Props('S','T',T[2],'P',pic,Ref)
rho[2]=Props('D','T',T[2],'P',pic,Ref)
T[3]=288
p[3]=pic
h[3]=Props('H','T',T[3],'P',pic,Ref)
s[3]=Props('S','T',T[3],'P',pic,Ref)
rho[3]=Props('D','T',T[3],'P',pic,Ref)
rho3=Props('D','T',T[3],'P',pic,Ref)
h4s=Props('H','T',s[3],'P',pc,Ref)
Wdot2=mdot*(h4s-h[3])/eta_oi
h[4]=h[3]+(1-f_p)*Wdot2/mdot
T[4]=Props('T','H',h[4],'P',pc,Ref)
s[4]=Props('S','T',T[4],'P',pc,Ref)
rho[4]=Props('D','T',T[4],'P',pc,Ref)
sbubble_e=Props('S','T',Tbubble_e,'Q',0,Ref)
sbubble_c=Props('S','T',Tbubble_c,'Q',0,Ref)
sdew_e=Props('S','T',Te,'Q',1,Ref)
sdew_c=Props('S','T',Tc,'Q',1,Ref)
hsatL=Props('H','T',Tbubble_e,'Q',0,Ref)
hsatV=Props('H','T',Te,'Q',1,Ref)
ssatL=Props('S','T',Tbubble_e,'Q',0,Ref)
ssatV=Props('S','T',Te,'Q',1,Ref)
vsatL=1/Props('D','T',Tbubble_e,'Q',0,Ref)
vsatV=1/Props('D','T',Te,'Q',1,Ref)
x=(h[5]-hsatL)/(hsatV-hsatL)
s[6]=x*ssatV+(1-x)*ssatL
T[6]=x*Te+(1-x)*Tbubble_e
rho[6]=1.0/(x*vsatV+(1-x)*vsatL)
h[6]=h[5]
h[7]=h[1]
s[7]=s[1]
T[7]=T[1]
p=[numpy.nan,pe,pic,pic,pc,pc,pe,pe]
COP=Q/(Wdot1+Wdot2)
RE=h[1]-h[6]
if prints==True:
print('x5:',x)
print('COP:', COP)
print('COPH', (Q+Wdot1+Wdot2)/(Wdot1+Wdot2))
print(T[2]-273.15,T[4]-273.15,p[2]/p[1],p[4]/p[3])
print(mdot,mdot*(h[4]-h[5]),pic)
print('Vdot1',mdot/rho1,'Vdisp',mdot/rho1/(3500/60.)*1e6/0.7)
print('Vdot2',mdot/rho3,'Vdisp',mdot/rho3/(3500/60.)*1e6/0.7)
print(mdot*(h[4]-h[5]),Tc-273.15)
for i in range(1,len(T)-1):
print('%d & %g & %g & %g & %g & %g \\\\' %(i,T[i]-273.15,p[i],h[i],s[i],rho[i]))
else:
print(Tsat_ic,COP)
if skipPlot==False:
if axis==None:
ax=matplotlib.pyplot.gca()
else:
ax=axis
if Ts_Ph in ['ph','Ph']:
ax.plot(h,p)
elif Ts_Ph in ['Ts','ts']:
s_copy=s.copy()
T_copy=T.copy()
for i in range(1,len(s)-1):
ax.plot(s[i],T[i],'bo',mfc='b',mec='b')
dT=[0,-5,5,-20,5,5,5]
ds=[0,0.05,0,0,0,0,0]
ax.text(s[i]+ds[i],T[i]+dT[i],str(i))
s=list(s)
T=list(T)
s.insert(7,sdew_e)
T.insert(7,Te)
s.insert(5,sbubble_c)
T.insert(5,Tbubble_c)
s.insert(5,sdew_c)
T.insert(5,Tc)
ax.plot(s,T)
s=s_copy
T=T_copy
else:
raise TypeError('Type of Ts_Ph invalid')
return COP
def EconomizedCycle(Ref,Qin,Te,Tc,DTsh,DTsc,eta_oi,f_p,Ti,Ts_Ph='Ts',skipPlot=False,axis=None,**kwargs):
"""
This function plots an economized cycle, on the current axis, or that given by the optional parameter *axis*
Required parameters:
* Ref : Refrigerant [string]
* Qin : Cooling capacity [W]
* Te : Evap Temperature [K]
* Tc : Condensing Temperature [K]
* DTsh : Evaporator outlet superheat [K]
* DTsc : Condenser outlet subcooling [K]
* eta_oi : Adiabatic efficiency of compressor (no units) in range [0,1]
* f_p : fraction of compressor power lost as ambient heat transfer in range [0,1]
* Ti : Saturation temperature corresponding to intermediate pressure [K]
Optional parameters:
* Ts_Ph : 'Ts' for a Temperature-Entropy plot, 'Ph' for a Pressure-Enthalpy
* axis : An axis to use instead of the active axis
* skipPlot : If True, won't actually plot anything, just print COP
"""
m=1
T=numpy.zeros((11))
h=numpy.zeros_like(T)
p=numpy.zeros_like(T)
s=numpy.zeros_like(T)
rho=numpy.zeros_like(T)
T[0]=numpy.NAN
s[0]=numpy.NAN
T[1]=Te+DTsh
pe=Props('P','T',Te,'Q',1.0,Ref)
pc=Props('P','T',Tc,'Q',1.0,Ref)
pi=Props('P','T',Ti,'Q',1.0,Ref)
p[1]=pe
h[1]=Props('H','T',T[1],'P',pe,Ref)
s[1]=Props('S','T',T[1],'P',pe,Ref)
rho[1]=Props('D','T',T[1],'P',pe,Ref)
h2s=Props('H','S',s[1],'P',pi,Ref)
wdot1=(h2s-h[1])/eta_oi
h[2]=h[1]+(1-f_p[0])*wdot1
p[2]=pi
T[2]=T_hp(Ref,h[2],pi,T2s)
s[2]=Props('S','T',T[2],'P',pi,Ref)
rho[2]=Props('D','T',T[2],'P',pi,Ref)
T[5]=Tc-DTsc
h[5]=Props('H','T',T[5],'P',pc,Ref)
s[5]=Props('S','T',T[5],'P',pc,Ref)
rho[5]=Props('D','T',T[5],'P',pc,Ref)
p[5]=pc
p[6]=pi
h[6]=h[5]
p[7]=pi
p[8]=pi
p[6]=pi
T[7]=Ti
h[7]=Props('H','T',Ti,'Q',1,Ref)
s[7]=Props('S','T',Ti,'Q',1,Ref)
rho[7]=Props('D','T',Ti,'Q',1,Ref)
T[8]=Ti
h[8]=Props('H','T',Ti,'Q',0,Ref)
s[8]=Props('S','T',Ti,'Q',0,Ref)
rho[8]=Props('D','T',Ti,'Q',0,Ref)
x6=(h[6]-h[8])/(h[7]-h[8]) #Vapor Quality
s[6]=s[7]*x6+s[8]*(1-x6)
rho[6]=1.0/(x6/rho[7]+(1-x6)/rho[8])
T[6]=Ti
#Injection mass flow rate
x=m*(h[6]-h[8])/(h[7]-h[6])
p[3]=pi
h[3]=(m*h[2]+x*h[7])/(m+x)
T[3]=T_hp(Ref,h[3],pi,T[2])
s[3]=Props('S','T',T[3],'P',pi,Ref)
rho[3]=Props('D','T',T[3],'P',pi,Ref)
T4s=newton(lambda T: Props('S','T',T,'P',pc,Ref)-s[3],T[2]+30)
h4s=Props('H','T',T4s,'P',pc,Ref)
p[4]=pc
wdot2=(h4s-h[3])/eta_oi
h[4]=h[3]+(1-f_p[1])*wdot2
T[4]=T_hp(Ref,h[4],pc,T4s)
s[4]=Props('S','T',T[4],'P',pc,Ref)
rho[4]=Props('D','T',T[4],'P',pc,Ref)
p[9]=pe
h[9]=h[8]
T[9]=Te
hsatL_e=Props('H','T',Te,'Q',0,Ref)
hsatV_e=Props('H','T',Te,'Q',1,Ref)
ssatL_e=Props('S','T',Te,'Q',0,Ref)
ssatV_e=Props('S','T',Te,'Q',1,Ref)
vsatL_e=1/Props('D','T',Te,'Q',0,Ref)
vsatV_e=1/Props('D','T',Te,'Q',1,Ref)
x9=(h[9]-hsatL_e)/(hsatV_e-hsatL_e) #Vapor Quality
s[9]=ssatV_e*x9+ssatL_e*(1-x9)
rho[9]=1.0/(x9*vsatV_e+(1-x9)*vsatL_e)
s[10]=s[1]
T[10]=T[1]
h[10]=h[1]
p[10]=p[1]
Tbubble_e=Te
Tbubble_c=Tc
sbubble_e=Props('S','T',Tbubble_e,'Q',0,Ref)
sbubble_c=Props('S','T',Tbubble_c,'Q',0,Ref)
sdew_e=Props('S','T',Te,'Q',1,Ref)
sdew_c=Props('S','T',Tc,'Q',1,Ref)
Wdot1=m*wdot1
Wdot2=(m+x)*wdot2
if skipPlot==False:
if axis==None:
ax=matplotlib.pyplot.gca()
else:
ax=axis
if Ts_Ph in ['ph','Ph']:
ax.plot(h,p)
ax.set_yscale('log')
elif Ts_Ph in ['Ts','ts']:
ax.plot(numpy.r_[s[7],s[3]],numpy.r_[T[7],T[3]],'b')
s_copy=s.copy()
T_copy=T.copy()
dT=[0,-5,5,-12,5,12,-12,0,0,0]
ds=[0,0.05,0.05,0,0.05,0,0.0,0.05,-0.05,-0.05]
for i in range(1,len(s)-1):
ax.plot(s[i],T[i],'bo',mfc='b',mec='b')
ax.text(s[i]+ds[i],T[i]+dT[i],str(i),ha='center',va='center')
s=list(s)
T=list(T)
s.insert(10,sdew_e)
T.insert(10,Te)
s.insert(5,sbubble_c)
T.insert(5,Tbubble_c)
s.insert(5,sdew_c)
T.insert(5,Tc)
ax.plot(s,T,'b')
s=s_copy
T=T_copy
else:
raise TypeError('Type of Ts_Ph invalid')
COP=m*(h[1]-h[9])/(m*(h[2]-h[1])+(m+x)*(h[4]-h[3]))
for i in range(1,len(T)-1):
print('%d & %g & %g & %g & %g & %g \\\\' %(i,T[i]-273.15,p[i],h[i],s[i],rho[i]))
print(x,m*(h[1]-h[9]),(m*(h[2]-h[1])+(m+x)*(h[4]-h[3])),COP)
mdot=Qin/(h[1]-h[9])
mdot_inj=x*mdot
print('x9',x9,)
print('Qcond',(mdot+mdot_inj)*(h[4]-h[5]),'T4',T[4]-273.15)
print(mdot,mdot+mdot_inj)
f=3500/60.
eta_v=0.7
print('Vdisp1: ',mdot/(rho[1]*f*eta_v)*1e6,'cm^3')
print('Vdisp2: ',(mdot+mdot_inj)/(rho[1]*f*eta_v)*1e6,'cm^3')
return COP
if __name__=='__main__':
Ph = CoolProp.Plots.Plots.Ph
Ts = CoolProp.Plots.Plots.Ts
Ref='R290'
fig=matplotlib.pyplot.figure(figsize=(4,3))
ax=fig.add_axes((0.15,0.15,0.8,0.8))
Ph(Ref,Tmin=273.15-30,hbounds=[0,600],axis=ax)
COP=TwoStage('Propane',10000,273.15-5,273.15+43.3,5,7,0.7,0.3,15+273.15,3,prints = True)
matplotlib.pyplot.show()
Ref='R290'
fig=matplotlib.pyplot.figure(figsize=(4,3))
ax=fig.add_axes((0.15,0.15,0.8,0.8))
Ph(Ref,Tmin=273.15-30,hbounds=[0,600],axis=ax)
COP=SimpleCycle(Ref,273.15-5,273.15+45,5,7,0.7,Ts_Ph='Ph')
matplotlib.pyplot.show()
Ref='R410A'
fig=matplotlib.pyplot.figure(figsize=(4,3))
ax=fig.add_axes((0.15,0.15,0.8,0.8))
Ts(Ref,Tmin=273.15-100,sbounds=[0,600],axis=ax)
COP=SimpleCycle(Ref,273.15-5,273.15+45,5,7,0.7,Ts_Ph='Ts')
matplotlib.pyplot.show()
## for x in numpy.linspace(0,1):
## Ref='REFPROP-MIX:R152A[%g]&R32[%g]' %(x,1-x)
## COP=SimpleCycle(273.15+8,273.15+44,5,7,0.7,skipPlot=True,Ts_Ph='Ph')
## matplotlib.pyplot.show()
|
ibell/coolprop
|
wrappers/Python/CoolProp/Plots/SimpleCycles.py
|
Python
|
mit
| 13,341 | 0.069935 |
# -*- coding: utf-8 -*-
import datetime
from django.db import models
from south.db import db
from south.v2 import SchemaMigration
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'NodeGroup.maas_url'
db.add_column(u'maasserver_nodegroup', 'maas_url',
self.gf('django.db.models.fields.CharField')(default=u'', max_length=255, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'NodeGroup.maas_url'
db.delete_column(u'maasserver_nodegroup', 'maas_url')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'maasserver.bootimage': {
'Meta': {'unique_together': "((u'nodegroup', u'architecture', u'subarchitecture', u'release', u'purpose'),)", 'object_name': 'BootImage'},
'architecture': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nodegroup': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['maasserver.NodeGroup']"}),
'purpose': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'release': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'subarchitecture': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'maasserver.componenterror': {
'Meta': {'object_name': 'ComponentError'},
'component': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}),
'created': ('django.db.models.fields.DateTimeField', [], {}),
'error': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {})
},
u'maasserver.config': {
'Meta': {'object_name': 'Config'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'value': ('maasserver.fields.JSONObjectField', [], {'null': 'True'})
},
u'maasserver.dhcplease': {
'Meta': {'object_name': 'DHCPLease'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'unique': 'True', 'max_length': '15'}),
'mac': ('maasserver.fields.MACAddressField', [], {}),
'nodegroup': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['maasserver.NodeGroup']"})
},
u'maasserver.filestorage': {
'Meta': {'object_name': 'FileStorage'},
'content': ('metadataserver.fields.BinaryField', [], {}),
'filename': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'maasserver.macaddress': {
'Meta': {'object_name': 'MACAddress'},
'created': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mac_address': ('maasserver.fields.MACAddressField', [], {'unique': 'True'}),
'node': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['maasserver.Node']"}),
'updated': ('django.db.models.fields.DateTimeField', [], {})
},
u'maasserver.node': {
'Meta': {'object_name': 'Node'},
'after_commissioning_action': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'architecture': ('django.db.models.fields.CharField', [], {'default': "u'i386/generic'", 'max_length': '31'}),
'cpu_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {}),
'distro_series': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '10', 'null': 'True', 'blank': 'True'}),
'error': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}),
'hardware_details': ('maasserver.fields.XMLField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'default': "u''", 'unique': 'True', 'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'memory': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'netboot': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'nodegroup': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['maasserver.NodeGroup']", 'null': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'power_parameters': ('maasserver.fields.JSONObjectField', [], {'default': "u''", 'blank': 'True'}),
'power_type': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '10', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '10'}),
'system_id': ('django.db.models.fields.CharField', [], {'default': "u'node-2cd56f00-3548-11e2-b1cb-9c4e363b1c94'", 'unique': 'True', 'max_length': '41'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['maasserver.Tag']", 'symmetrical': 'False'}),
'token': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['piston.Token']", 'null': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {})
},
u'maasserver.nodegroup': {
'Meta': {'object_name': 'NodeGroup'},
'api_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '18'}),
'api_token': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['piston.Token']", 'unique': 'True'}),
'cluster_name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {}),
'dhcp_key': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'maas_url': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'updated': ('django.db.models.fields.DateTimeField', [], {}),
'uuid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '36'})
},
u'maasserver.nodegroupinterface': {
'Meta': {'unique_together': "((u'nodegroup', u'interface'),)", 'object_name': 'NodeGroupInterface'},
'broadcast_ip': ('django.db.models.fields.GenericIPAddressField', [], {'default': 'None', 'max_length': '39', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'interface': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}),
'ip': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39'}),
'ip_range_high': ('django.db.models.fields.GenericIPAddressField', [], {'default': 'None', 'max_length': '39', 'null': 'True', 'blank': 'True'}),
'ip_range_low': ('django.db.models.fields.GenericIPAddressField', [], {'default': 'None', 'max_length': '39', 'null': 'True', 'blank': 'True'}),
'management': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'nodegroup': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['maasserver.NodeGroup']"}),
'router_ip': ('django.db.models.fields.GenericIPAddressField', [], {'default': 'None', 'max_length': '39', 'null': 'True', 'blank': 'True'}),
'subnet_mask': ('django.db.models.fields.GenericIPAddressField', [], {'default': 'None', 'max_length': '39', 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {})
},
u'maasserver.sshkey': {
'Meta': {'unique_together': "((u'user', u'key'),)", 'object_name': 'SSHKey'},
'created': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.TextField', [], {}),
'updated': ('django.db.models.fields.DateTimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
u'maasserver.tag': {
'Meta': {'object_name': 'Tag'},
'comment': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {}),
'definition': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kernel_opts': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '256'}),
'updated': ('django.db.models.fields.DateTimeField', [], {})
},
u'maasserver.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'piston.consumer': {
'Meta': {'object_name': 'Consumer'},
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '18'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'pending'", 'max_length': '16'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'consumers'", 'null': 'True', 'to': "orm['auth.User']"})
},
'piston.token': {
'Meta': {'object_name': 'Token'},
'callback': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'callback_confirmed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'consumer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['piston.Consumer']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_approved': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '18'}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'timestamp': ('django.db.models.fields.IntegerField', [], {'default': '1353659487L'}),
'token_type': ('django.db.models.fields.IntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'tokens'", 'null': 'True', 'to': "orm['auth.User']"}),
'verifier': ('django.db.models.fields.CharField', [], {'max_length': '10'})
}
}
complete_apps = ['maasserver']
|
cloudbase/maas
|
src/maasserver/migrations/0046_add_nodegroup_maas_url.py
|
Python
|
agpl-3.0
| 15,514 | 0.007413 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('freebasics', '0005_remove_selected_template_field'),
]
operations = [
migrations.AlterField(
model_name='freebasicstemplatedata',
name='site_name_url',
field=models.CharField(max_length=255, unique=True, null=True, blank=True),
),
]
|
praekeltfoundation/mc2-freebasics
|
freebasics/migrations/0006_change_site_url_field_type.py
|
Python
|
bsd-2-clause
| 477 | 0.002096 |
#!/usr/bin/env python
import os
import sys
sys.path.insert(
0,
os.path.join(
os.path.dirname(os.path.abspath(__file__)), '..', '..', '..', 'common',
'security-features', 'tools'))
import generate
class ReferrerPolicyConfig(object):
def __init__(self):
self.selection_pattern = \
'%(source_context_list)s.%(delivery_type)s/' + \
'%(delivery_value)s/' + \
'%(subresource)s/' + \
'%(origin)s.%(redirection)s.%(source_scheme)s'
self.test_file_path_pattern = 'gen/' + self.selection_pattern + '.html'
self.test_description_template = 'Referrer Policy: Expects %(expectation)s for %(subresource)s to %(origin)s origin and %(redirection)s redirection from %(source_scheme)s context.'
self.test_page_title_template = 'Referrer-Policy: %s'
self.helper_js = '/referrer-policy/generic/test-case.sub.js'
# For debug target only.
self.sanity_checker_js = '/referrer-policy/generic/sanity-checker.js'
self.spec_json_js = '/referrer-policy/spec_json.js'
self.test_case_name = 'TestCase'
script_directory = os.path.dirname(os.path.abspath(__file__))
self.spec_directory = os.path.abspath(
os.path.join(script_directory, '..', '..'))
if __name__ == '__main__':
generate.main(ReferrerPolicyConfig())
|
notriddle/servo
|
tests/wpt/web-platform-tests/referrer-policy/generic/tools/generate.py
|
Python
|
mpl-2.0
| 1,383 | 0.001446 |
import os.path
import shutil
import zipfile
import click
from pros.config import ConfigNotFoundException
from .depot import Depot
from ..templates import BaseTemplate, Template, ExternalTemplate
from pros.common.utils import logger
class LocalDepot(Depot):
def fetch_template(self, template: BaseTemplate, destination: str, **kwargs) -> Template:
if 'location' not in kwargs:
logger(__name__).debug(f"Template not specified. Provided arguments: {kwargs}")
raise KeyError('Location of local template must be specified.')
location = kwargs['location']
if os.path.isdir(location):
location_dir = location
if not os.path.isfile(os.path.join(location_dir, 'template.pros')):
raise ConfigNotFoundException(f'A template.pros file was not found in {location_dir}.')
template_file = os.path.join(location_dir, 'template.pros')
elif zipfile.is_zipfile(location):
with zipfile.ZipFile(location) as zf:
with click.progressbar(length=len(zf.namelist()),
label=f"Extracting {location}") as progress_bar:
for file in zf.namelist():
zf.extract(file, path=destination)
progress_bar.update(1)
template_file = os.path.join(destination, 'template.pros')
location_dir = destination
elif os.path.isfile(location):
location_dir = os.path.dirname(location)
template_file = location
elif isinstance(template, ExternalTemplate):
location_dir = template.directory
template_file = template.save_file
else:
raise ValueError(f"The specified location was not a file or directory ({location}).")
if location_dir != destination:
n_files = len([os.path.join(dp, f) for dp, dn, fn in os.walk(location_dir) for f in fn])
with click.progressbar(length=n_files, label='Copying to local cache') as pb:
def my_copy(*args):
pb.update(1)
shutil.copy2(*args)
shutil.copytree(location_dir, destination, copy_function=my_copy)
return ExternalTemplate(file=template_file)
def __init__(self):
super().__init__('local', 'local')
|
purduesigbots/pros-cli
|
pros/conductor/depots/local_depot.py
|
Python
|
mpl-2.0
| 2,366 | 0.003381 |
import numpy as np
def array_generator():
array = np.array([(1, 2, 3, 4, 5), (10, 20, 30, 40, 50)])
return array
def multiply_by_number(array, number):
print(array)
multiplied = array * number
print(multiplied)
return multiplied
def divide_by_number(array, number):
# Either the numer or the elements of the array need to be double
# to get a double value
print(array)
multiplied = array / number
print(multiplied)
return multiplied
def addition(array_1, array_2):
return array_1 + array_2
def elemtwise_mul(array_1, array_2):
return array_1 * array_2
if __name__ == "__main__":
# -----------------------------------------------------
x = array_generator()
two_x = multiply_by_number(x, 2)
half_x = divide_by_number(x, 2)
added = addition(two_x, half_x)
element_multiplied = elemtwise_mul(x, two_x)
# -----------------------------------------------------
print('Y')
y = np.array([(1, 2 ,3), (4, 5, 6)]) # !
print(y)
print('Z')
z = np.array([(1, 2), (3, 4), (5, 6)]) # !
print(z)
print('D')
d = np.dot(y, z)
print(d)
|
arcyfelix/Courses
|
17-06-05-Machine-Learning-For-Trading/25_arithmetic operations.py
|
Python
|
apache-2.0
| 1,058 | 0.040643 |
"""
calc.py
>>> import calc
>>> s='2+4+8+7-5+3-1'
>>> calc.calc(s)
18
>>> calc.calc('2*3+4-5*4')
-10
"""
import re
from operator import concat
operator_function_table = { '+' : lambda x, y: x + y,
'-' : lambda x, y: x - y,
'*' : lambda x, y: x * y,
'/' : lambda x, y: x / y }
op_re_add_sub = '\+|\-'
op_re_mult_div = '\*|\/'
op_re = op_re_add_sub + '|' + op_re_mult_div
def calc(s):
add_sub_operands = re.split(op_re_add_sub, s)
add_sub_operators = re.findall(op_re_add_sub, s)
post_mult_div = [str(calc_helper(operand)) for operand in add_sub_operands]
new_calc_l = [reduce(concat, list(x)) for x in zip(post_mult_div[:-1], add_sub_operators)]
new_calc_l.extend(post_mult_div[-1])
new_calc_s = reduce(concat, new_calc_l)
result = calc_helper(new_calc_s)
return result
def calc_helper(s):
operands = [int(k) for k in re.split(op_re, s)]
operators = filter(lambda x: x, re.split('\d', s))
operator_functions = [operator_function_table[x] for x in operators]
for f in operator_functions:
result = apply(f, [operands[0], operands[1]])
operands = [result] + operands[2:]
final_result = operands[0]
return final_result
|
clemfeelsgood/hackathontools
|
code_challenges/mopub/calc.py
|
Python
|
mit
| 1,320 | 0.012121 |
#!/usr/bin/env python
def main():
import sys
raw_data = load_csv(sys.argv[1])
create_table(raw_data)
def get_stencil_num(k):
# add the stencil operator
if k['Stencil Kernel coefficients'] in 'constant':
if int(k['Stencil Kernel semi-bandwidth'])==4:
stencil = 0
else:
stencil = 1
elif 'no-symmetry' in k['Stencil Kernel coefficients']:
stencil = 5
elif 'sym' in k['Stencil Kernel coefficients']:
if int(k['Stencil Kernel semi-bandwidth'])==1:
stencil = 3
else:
stencil = 4
else:
stencil = 2
return stencil
def create_table(raw_data):
from operator import itemgetter
import matplotlib.pyplot as plt
import pylab
from csv import DictWriter
ts_l = set()
for k in raw_data:
ts_l.add(k['Time stepper orig name'])
ts_l = list(ts_l)
#tb_l = [3, 7]
tb_l = set()
for k in raw_data:
tb_l.add(k['Time unroll'])
tb_l = list(tb_l)
tb_l = map(int,tb_l)
tb_l.sort()
#print tb_l
req_fields = [('WD main-loop RANK0 MStencil/s MAX', 2), ('Time stepper orig name', 0), ('Stencil Kernel semi-bandwidth', 1), ('Stencil Kernel coefficients', 0), ('Precision', 0), ('Time unroll',1), ('Number of time steps',1), ('Number of tests',1), ('Local NX',1), ('Local NY',1), ('Local NZ',1), ('Total Memory Transfer', 2), ('Thread group size' ,1), ('Intra-diamond prologue/epilogue MStencils',1), ('Total cache block size (kB):',1)]
data = []
for k in raw_data:
tup = dict()
# defaults
if k['Intra-diamond prologue/epilogue MStencils'] == '':
k['Intra-diamond prologue/epilogue MStencils'] = 0
if k['Total cache block size (kB):'] == '':
k['Total cache block size (kB):'] = 0
# add the general fileds
for f in req_fields:
try:
v = k[f[0]]
if f[1]==1: v = int(k[f[0]])
if f[1]==2: v = float(k[f[0]])
except:
print f[0]
tup[f[0]] = v
# add the stencil operator
tup['Kernel'] = get_stencil_num(k)
data.append(tup)
# data = sorted(data, key=itemgetter(0, 1, 2, 3,4))
# for i in data: print i
data2 = []
for tup in data:
if tup['Local NX'] > 96:
tup['Actual Bytes/LUP'] = actual_BpU(tup)
tup['Model'] = models(tup)
# model error
tup['Err %'] = 100 * (tup['Model'] - tup['Actual Bytes/LUP'])/tup['Actual Bytes/LUP']
tup['D_width'] = (tup['Time unroll']+1)*2*tup['Stencil Kernel semi-bandwidth']
tup['Performance'] = tup['WD main-loop RANK0 MStencil/s MAX']
data2.append(tup)
#for i in data2: print i
from operator import itemgetter
data2 = sorted(data2, key=itemgetter('Time stepper orig name', 'Kernel', 'Thread group size', 'Local NX', 'D_width'))
fields = ['Time stepper orig name', 'Kernel', 'Thread group size', 'Local NX', 'Precision', 'D_width', 'Total cache block size (kB):', 'Actual Bytes/LUP', 'Model', 'Err %', 'Performance']
with open('Arithmetic_intensity_model.csv', 'w') as output_file:
r = DictWriter(output_file,fieldnames=fields)
r.writeheader()
for k in data2:
k2 = dict()
for f in k.keys():
for f2 in fields:
if f == f2:
k2[f] = k[f]
r.writerow(k2)
def actual_BpU(tup):
total_mem = tup['Total Memory Transfer']
R = tup['Stencil Kernel semi-bandwidth']
nt = tup['Number of time steps'] * tup['Number of tests']
nx = tup['Local NX']
ny = tup['Local NY']
nz = tup['Local NZ']
oh = tup['Intra-diamond prologue/epilogue MStencils']
stencil_size = 2*ny*nz + ny*nz*(nx+2*R)
BpU = (total_mem * 10**9) / ( stencil_size * nt - oh*10**6*tup['Number of tests'])
return BpU
def models(tup):
if tup['Precision'] == 'DP': word_size = 8
elif tup['Precision'] == 'SP': word_size = 4
R = tup['Stencil Kernel semi-bandwidth']
TB = tup['Time unroll']
ny = tup['Local NY']
# number of streamed copies of the domain (buffers)
if tup['Kernel'] == 0: nb = 3
elif tup['Kernel'] == 1: nb = 2
elif tup['Kernel'] == 4: nb = 2+13
elif tup['Kernel'] == 5: nb = 2+7
width = (TB+1)*2*R
YT_section = float((TB+1)**2 * 2 * R)
# no temporal blocking model
if tup['Time stepper orig name'] == 'Naive':
bpu = (1 + nb) * word_size
else: # temporal blocking model
bpu = ( ((width - 2*R) + width) + (nb*width + 2*R) ) * word_size / YT_section
return bpu
def load_csv(data_file):
from csv import DictReader
with open(data_file, 'rb') as output_file:
data = DictReader(output_file)
data = [k for k in data]
return data
if __name__ == "__main__":
main()
# if 'constant' in tup['Stencil Kernel coefficients']:
# BpU1 = (YT_section + width + 2*R + (t_order-1)*width) * word_size / YT_section
# tup['No TS'] = BpU1
#
# BpU2 = (width + 2*R + (t_order-1)*width) * word_size / YT_section
# tup['All TS'] = BpU2
#
# BpU3 = ((width - 2*R) + 2*width + 2*R + (t_order-1)*width) * word_size / YT_section
# tup['Interior TS'] = BpU3
#
# BpU4 = ( ((width - 2*R) + width) + ((t_order+1)*width + 2*R) ) * word_size / YT_section
# tup['Interior TS 2'] = BpU4
#
# elif 'variable' in tup['Stencil Kernel coefficients']:
# BpU1 = (YT_section + width + 2*R + width*(6*R+1) + (t_order-1)*width) * word_size / YT_section
# tup['No TS'] = BpU1
#
# BpU2 = (width + 2*R + width*(6*R+1) + (t_order-1)*width) * word_size / YT_section
# tup['All TS'] = BpU2
#
# BpU3 = ((width - 2*R) + 2*width + 2*R + width*(6*R+1) + (t_order-1)*width) * word_size / YT_section
# tup['Interior TS'] = BpU3
#
# BpU4 = ( ((width - 2*R) + width) + ((t_order+1)*width + 2*R) + ((R*6+1)*width) ) * word_size / YT_section
# tup['Interior TS 2'] = BpU4
|
tareqmalas/girih
|
scripts/sisc/paper_bytes_requirement_analysis.py
|
Python
|
bsd-3-clause
| 6,160 | 0.010714 |
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from ..lib import elasticbeanstalk
from ..core import io
from ..resources.strings import prompts, strings
from ..objects.exceptions import TimeoutError
from . import commonops
def scale(app_name, env_name, number, confirm, timeout=None):
options = []
# get environment
env = elasticbeanstalk.describe_configuration_settings(
app_name, env_name
)['OptionSettings']
# if single instance, offer to switch to load-balanced
namespace = 'aws:elasticbeanstalk:environment'
setting = next((n for n in env if n["Namespace"] == namespace), None)
value = setting['Value']
if value == 'SingleInstance':
if not confirm:
## prompt to switch to LoadBalanced environment type
io.echo(prompts['scale.switchtoloadbalance'])
io.log_warning(prompts['scale.switchtoloadbalancewarn'])
switch = io.get_boolean_response()
if not switch:
return
options.append({'Namespace': namespace,
'OptionName': 'EnvironmentType',
'Value': 'LoadBalanced'})
# change autoscaling min AND max to number
namespace = 'aws:autoscaling:asg'
max = 'MaxSize'
min = 'MinSize'
for name in [max, min]:
options.append(
{'Namespace': namespace,
'OptionName': name,
'Value': str(number)
}
)
request_id = elasticbeanstalk.update_environment(env_name, options)
try:
commonops.wait_for_success_events(request_id,
timeout_in_minutes=timeout or 5,
can_abort=True)
except TimeoutError:
io.log_error(strings['timeout.error'])
|
AccelAI/accel.ai
|
flask-aws/lib/python2.7/site-packages/ebcli/operations/scaleops.py
|
Python
|
mit
| 2,296 | 0.001742 |
#!/usr/bin/python
# Copyright 2014 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Script for generating the Android framework's version of Skia from gyp
files.
"""
import android_framework_gyp
import os
import shutil
import sys
import tempfile
# Find the top of trunk
SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
SKIA_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, os.pardir, os.pardir,
os.pardir))
# Find the directory with our helper files, and add it to the path.
GYP_GEN_DIR = os.path.join(SKIA_DIR, 'platform_tools', 'android', 'gyp_gen')
sys.path.append(GYP_GEN_DIR)
import gypd_parser
import generate_user_config
import makefile_writer
import vars_dict_lib
# Folder containing all gyp files and generated gypd files.
GYP_FOLDER = 'gyp'
# TODO(scroggo): Update the docstrings to match the style guide:
# http://google-styleguide.googlecode.com/svn/trunk/pyguide.html#Comments
def clean_gypd_files(folder):
"""
Remove the gypd files generated by android_framework_gyp.main().
@param folder Folder in which to delete all files ending with 'gypd'.
"""
assert os.path.isdir(folder)
files = os.listdir(folder)
for f in files:
if f.endswith('gypd'):
os.remove(os.path.join(folder, f))
def generate_var_dict(target_dir, target_file, skia_arch_type, have_neon):
"""
Create a VarsDict for a particular arch type. Each paramater is passed
directly to android_framework_gyp.main().
@param target_dir Directory containing gyp files.
@param target_file Target gyp file.
@param skia_arch_type Target architecture.
@param have_neon Whether the target should build for neon.
@return a VarsDict containing the variable definitions determined by gyp.
"""
result_file = android_framework_gyp.main(target_dir, target_file,
skia_arch_type, have_neon)
var_dict = vars_dict_lib.VarsDict()
gypd_parser.parse_gypd(var_dict, result_file)
clean_gypd_files(target_dir)
print '.',
return var_dict
def main(target_dir=None):
"""
Read gyp files and create Android.mk for the Android framework's
external/skia.
@param target_dir Directory in which to place 'Android.mk'. If None, the file
will be placed in skia's root directory.
"""
# Create a temporary folder to hold gyp and gypd files. Create it in SKIA_DIR
# so that it is a sibling of gyp/, so the relationships between gyp files and
# other files (e.g. platform_tools/android/gyp/dependencies.gypi, referenced
# by android_deps.gyp as a relative path) is unchanged.
# Use mkdtemp to find an unused folder name, but then delete it so copytree
# can be called with a non-existent directory.
tmp_folder = tempfile.mkdtemp(dir=SKIA_DIR)
os.rmdir(tmp_folder)
shutil.copytree(os.path.join(SKIA_DIR, GYP_FOLDER), tmp_folder)
try:
main_gyp_file = 'android_framework_lib.gyp'
print 'Creating Android.mk',
# Generate a separate VarsDict for each architecture type. For each
# archtype:
# 1. call android_framework_gyp.main() to generate gypd files
# 2. call parse_gypd to read those gypd files into the VarsDict
# 3. delete the gypd files
#
# Once we have the VarsDict for each architecture type, we combine them all
# into a single Android.mk file, which can build targets of any
# architecture type.
# The default uses a non-existant archtype, to find all the general
# variable definitions.
default_var_dict = generate_var_dict(tmp_folder, main_gyp_file, 'other',
False)
arm_var_dict = generate_var_dict(tmp_folder, main_gyp_file, 'arm', False)
arm_neon_var_dict = generate_var_dict(tmp_folder, main_gyp_file, 'arm',
True)
x86_var_dict = generate_var_dict(tmp_folder, main_gyp_file, 'x86', False)
mips_var_dict = generate_var_dict(tmp_folder, main_gyp_file, 'mips', False)
arm64_var_dict = generate_var_dict(tmp_folder, main_gyp_file, 'arm64',
False)
# Compute the intersection of all targets. All the files in the intersection
# should be part of the makefile always. Each dict will now contain trimmed
# lists containing only variable definitions specific to that configuration.
var_dict_list = [default_var_dict, arm_var_dict, arm_neon_var_dict,
x86_var_dict, mips_var_dict, arm64_var_dict]
common = vars_dict_lib.intersect(var_dict_list)
# Create SkUserConfig
user_config = os.path.join(SKIA_DIR, 'include', 'config', 'SkUserConfig.h')
if target_dir:
dst_dir = target_dir
else:
dst_dir = os.path.join(SKIA_DIR, 'include', 'core')
generate_user_config.generate_user_config(
original_sk_user_config=user_config, target_dir=dst_dir,
ordered_set=common.DEFINES)
# Now that the defines have been written to SkUserConfig, they are not
# needed in Android.mk.
common.DEFINES.reset()
# Further trim arm_neon_var_dict with arm_var_dict. After this call,
# arm_var_dict (which will now be the intersection) includes all definitions
# used by both arm and arm + neon, and arm_neon_var_dict will only contain
# those specific to arm + neon.
arm_var_dict = vars_dict_lib.intersect([arm_var_dict, arm_neon_var_dict])
# Now create a list of VarsDictData holding everything but common.
deviations_from_common = []
deviations_from_common.append(makefile_writer.VarsDictData(
arm_var_dict, 'arm'))
deviations_from_common.append(makefile_writer.VarsDictData(
arm_neon_var_dict, 'arm', 'ARCH_ARM_HAVE_NEON'))
deviations_from_common.append(makefile_writer.VarsDictData(x86_var_dict,
'x86'))
# Currently, x86_64 is identical to x86
deviations_from_common.append(makefile_writer.VarsDictData(x86_var_dict,
'x86_64'))
deviations_from_common.append(makefile_writer.VarsDictData(mips_var_dict,
'mips'))
deviations_from_common.append(makefile_writer.VarsDictData(arm64_var_dict,
'arm64'))
makefile_writer.write_android_mk(target_dir=target_dir,
common=common, deviations_from_common=deviations_from_common)
finally:
shutil.rmtree(tmp_folder)
if __name__ == '__main__':
main()
|
llluiop/skia
|
platform_tools/android/bin/gyp_to_android.py
|
Python
|
bsd-3-clause
| 6,621 | 0.00589 |
"""
Code : Remove the dependency for Kodak Bank, default excel parser macros and just GNU/Linux to acheive it.
Authors : Ramaseshan, Anandhamoorthy , Engineers, Fractalio Data Pvt Ltd, Magadi, Karnataka.
Licence : GNU GPL v3.
Code Repo URL : https://github.com/ramaseshan/kodak_bank_excel_parser
"""
import pyexcel as pe
import pyexcel.ext.xls
import unicodedata
import sys
import time
def delete_content(pfile):
pfile.seek(0)
pfile.truncate()
filename = sys.argv[1]
fileout = filename.split('.')[0]+".txt"
print "Reading file ",filename
records = pe.get_array(file_name=filename)
f = open(fileout,'w')
print "Starting to process data. Hold your breath"
for count,rec in enumerate(records[1:]):
rec[0] = "DATALIFE"
rec[1] = "RPAY"
rec[5] = "04182010000104"
rec[4] = time.strftime("%d/%m/%Y")
line = ""
for value in rec:
if value and type(value) is unicode:
value = unicodedata.normalize('NFKD', value).encode('ascii','ignore')
if rec[6] % 2 == 0:
rec[6] = int(rec[6])
# Cross check payment types with mahesh
if rec[2] == "NEFT" or rec[2] == "IFT":
line = line + str(value)+"~"
else:
print "Your Payment Type is Wrong in column %d. Please correct it and run the script again."%(count+2)
print "Exiting Script"
delete_content(f)
f.close()
sys.exit()
f.write(line[:-1])
f.write("\n")
f.close()
print "Finished writing ",fileout
|
AnandMoorthy/kodak_bank_excel_parser
|
kodak_excel_parser.py
|
Python
|
gpl-3.0
| 1,376 | 0.025436 |
from py4j.java_gateway import JavaGateway, GatewayParameters
gateway = JavaGateway(gateway_parameters=GatewayParameters(port=25333))
doc1 = gateway.jvm.gate.Factory.newDocument("initial text")
print(doc1.getContent().toString())
doc2 = gateway.jvm.gate.plugin.python.PythonSlave.loadDocument("docs/doc1.xml")
print(doc2.getContent().toString())
js1 = gateway.jvm.gate.plugin.python.PythonSlave.getBdocDocumentJson(doc2)
print(js1)
gateway.close()
|
GateNLP/gateplugin-python
|
examples/pythonSlaveMaster.py
|
Python
|
lgpl-3.0
| 469 | 0.002132 |
from django.http import HttpResponse,HttpResponseRedirect
from django.shortcuts import render_to_response
from django import forms
from django.forms import ModelForm
from django.db.models import F
from django.db import connection
from django.utils import simplejson
from django.contrib import messages
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.template import RequestContext
from django.core.exceptions import ObjectDoesNotExist
from django.core.mail import send_mass_mail
from django.utils.translation import ugettext as _
from notebook.notes.models import create_model, create_model_form
from notebook.scraps.models import Scrap, Folder
from notebook.notes.views import getT, getW, getNote, get_public_notes, get_public_tags, remove_private_tag_notes, __get_ws_tags
from notebook.notes.views import folders_index, settings_tag_add, settings_tag_update, settings_tag, settings_tags
from notebook.notes.views import getSearchResults, getlogger, __getQStr, __get_notes_context
from notebook.notes.views import __get_folder_context, __get_pre_url
import notebook
import datetime
from datetime import date
log = getlogger('scraps.views')
#this method is used for processing the request users send via the browser button
@login_required
def add_scrap(request):
username = request.user.username
N = getNote(username, 'scrapbook')
T = getT(username)
#W = getW(username)
#w = W.objects.get(name='scrapbook')
if request.method == 'POST':
tags = T.objects.all()
#form require tags to be required. So don't use form now, and use the code from add_note in notebook.notes.views for adding a snippet
#AddNForm = create_model_form("AddNForm_add_scrap_post_"+str(username), N, fields={'tags':forms.ModelMultipleChoiceField(queryset=tags)})
n = N()
post = request.POST.copy()
tag_names = post.getlist('item[tags][]')
tags = []
for tag_name in tag_names:
t, created = T.objects.get_or_create(name=tag_name)
#==============Don't need below any more since add_tags will do this logic=================================================================
# if created or not w.tags.filter(name=t.name).exists():
# w.tags.add(t)
#===============================================================================
#tags.append(t.id)
tags.append(t.name)
#if not tag_names:
# tags = [T.objects.get(name='untagged').id]
if not tags or (len(tags) == 1 and tags[0] == u''):
tags = None
#f = AddNForm(post, instance=n)
#log.debug("f.errors:"+str(f.errors))
#TODO:handle errors such as url broken
#n = f.save(commit=False)
n.title = post.get('title')
n.desc = post.get('desc')
n.url = post.get('url')
private = post.get('private', False)
if private in ['true', 'on']:
n.private = True
else:
n.private = False
n.vote = post.get('vote')
n.save()
n.add_tags(tags, 'scrapbook')
n.save() #called this specifically to save the url to the social db as well
return render_to_response("include/notes/addNote_result.html",\
{'message':_('Scrap is successfully added! You can close this window, or it will be closed for you in 1 second.')})
else:
tags = __get_ws_tags(request, username, 'scrapbook')
from django.forms import TextInput
#by adding the tags field specifically here, we avoided it using tags of another user (a strange error which repeat even after changing class names and variable names)
AddNForm_scrap = create_model_form("AddNForm_add_scrap_get_"+str(username), N, fields={#'tags':forms.ModelMultipleChoiceField(queryset=tags)
}, options={'exclude':['deleted'],
'fields':['url','title','tags','desc','vote','private'],
'widgets':{'title': TextInput(attrs={'size': 80}),
}})
url = request.GET.get('url')
title = request.GET.get('title')
desc = request.GET.get('desc')
#default_tag_id = T.objects.get(name='untagged').id
addNoteForm = AddNForm_scrap(initial={'url': url, 'title':title, 'desc':desc#, 'tags': [default_tag_id]
})
#no need of the custimized form in the scrapbook template
return render_to_response('scrapbook/notes/addNote.html', {'addNoteForm': addNoteForm, 'desc':desc, 'url':url, 'tags':tags})
@login_required
def share(request, username):
print 'share in note called'
note_ids = request.POST.getlist('note_ids')
N = getNote(request.user.username)
msgs = []
for note_id in note_ids:
note = N.objects.get(id=note_id)
message = 'From osl scraps:'+' '+note.title+' '+note.url+' '
desc = note.desc
desc = desc.replace('\r','')
desc = desc.replace('\n','')#TODO:
if len(desc) > 100:
desc = desc[:300] + '...... view more from http://new.notebook.opensourcelearning.org/'+\
username+'/scrapbook/scraps/note/'+unicode(note.id)+'/'
message = message+desc
msg = (message.encode('utf8'), '', 'yuanliangliu@gmail.com', ['buzz@gmail.com'])
msgs.append(msg)
#share_note(note_id, username)
send_mass_mail(tuple(msgs), fail_silently=False)
return HttpResponse('success', mimetype="text/plain")
|
yejia/osl_notebook
|
scraps/views.py
|
Python
|
mit
| 5,990 | 0.018364 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.