text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-10-27 09:40
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('images', '0002_alter_fields'),
]
operations = [
migrations.AlterField(
model_name='image',
name='description',
field=models.TextField(blank=True, default=''),
),
]
|
fidals/refarm-site
|
images/migrations/0003_auto_20161027_0940.py
|
Python
|
mit
| 456 | 0 |
# -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsLayoutItemLegend.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = '(C) 2017 by Nyall Dawson'
__date__ = '24/10/2017'
__copyright__ = 'Copyright 2017, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.PyQt.QtCore import QRectF
from qgis.PyQt.QtGui import QColor
from qgis.core import (QgsLayoutItemLegend,
QgsLayoutItemMap,
QgsLayout,
QgsMapSettings,
QgsVectorLayer,
QgsMarkerSymbol,
QgsSingleSymbolRenderer,
QgsRectangle,
QgsProject,
QgsLayoutObject,
QgsProperty,
QgsLayoutMeasurement,
QgsLayoutItem,
QgsLayoutPoint,
QgsLayoutSize)
from qgis.testing import (start_app,
unittest
)
from utilities import unitTestDataPath
from qgslayoutchecker import QgsLayoutChecker
import os
from test_qgslayoutitem import LayoutItemTestCase
start_app()
TEST_DATA_DIR = unitTestDataPath()
class TestQgsLayoutItemLegend(unittest.TestCase, LayoutItemTestCase):
@classmethod
def setUpClass(cls):
cls.item_class = QgsLayoutItemLegend
def testInitialSizeSymbolMapUnits(self):
"""Test initial size of legend with a symbol size in map units"""
point_path = os.path.join(TEST_DATA_DIR, 'points.shp')
point_layer = QgsVectorLayer(point_path, 'points', 'ogr')
QgsProject.instance().addMapLayers([point_layer])
marker_symbol = QgsMarkerSymbol.createSimple({'color': '#ff0000', 'outline_style': 'no', 'size': '5', 'size_unit': 'MapUnit'})
point_layer.setRenderer(QgsSingleSymbolRenderer(marker_symbol))
s = QgsMapSettings()
s.setLayers([point_layer])
layout = QgsLayout(QgsProject.instance())
layout.initializeDefaults()
map = QgsLayoutItemMap(layout)
map.attemptSetSceneRect(QRectF(20, 20, 80, 80))
map.setFrameEnabled(True)
map.setLayers([point_layer])
layout.addLayoutItem(map)
map.setExtent(point_layer.extent())
legend = QgsLayoutItemLegend(layout)
legend.attemptSetSceneRect(QRectF(120, 20, 80, 80))
legend.setFrameEnabled(True)
legend.setFrameStrokeWidth(QgsLayoutMeasurement(2))
legend.setBackgroundColor(QColor(200, 200, 200))
legend.setTitle('')
layout.addLayoutItem(legend)
legend.setLinkedMap(map)
checker = QgsLayoutChecker(
'composer_legend_mapunits', layout)
checker.setControlPathPrefix("composer_legend")
result, message = checker.testLayout()
self.assertTrue(result, message)
# resize with non-top-left reference point
legend.setResizeToContents(False)
legend.setReferencePoint(QgsLayoutItem.LowerRight)
legend.attemptMove(QgsLayoutPoint(120, 90))
legend.attemptResize(QgsLayoutSize(50, 60))
self.assertEqual(legend.positionWithUnits().x(), 120.0)
self.assertEqual(legend.positionWithUnits().y(), 90.0)
self.assertAlmostEqual(legend.pos().x(), 70, -1)
self.assertAlmostEqual(legend.pos().y(), 30, -1)
legend.setResizeToContents(True)
legend.updateLegend()
self.assertEqual(legend.positionWithUnits().x(), 120.0)
self.assertEqual(legend.positionWithUnits().y(), 90.0)
self.assertAlmostEqual(legend.pos().x(), 91, -1)
self.assertAlmostEqual(legend.pos().y(), 71, -1)
QgsProject.instance().removeMapLayers([point_layer.id()])
def testResizeWithMapContent(self):
"""Test test legend resizes to match map content"""
point_path = os.path.join(TEST_DATA_DIR, 'points.shp')
point_layer = QgsVectorLayer(point_path, 'points', 'ogr')
QgsProject.instance().addMapLayers([point_layer])
s = QgsMapSettings()
s.setLayers([point_layer])
layout = QgsLayout(QgsProject.instance())
layout.initializeDefaults()
map = QgsLayoutItemMap(layout)
map.attemptSetSceneRect(QRectF(20, 20, 80, 80))
map.setFrameEnabled(True)
map.setLayers([point_layer])
layout.addLayoutItem(map)
map.setExtent(point_layer.extent())
legend = QgsLayoutItemLegend(layout)
legend.attemptSetSceneRect(QRectF(120, 20, 80, 80))
legend.setFrameEnabled(True)
legend.setFrameStrokeWidth(QgsLayoutMeasurement(2))
legend.setBackgroundColor(QColor(200, 200, 200))
legend.setTitle('')
legend.setLegendFilterByMapEnabled(True)
layout.addLayoutItem(legend)
legend.setLinkedMap(map)
map.setExtent(QgsRectangle(-102.51, 41.16, -102.36, 41.30))
checker = QgsLayoutChecker(
'composer_legend_size_content', layout)
checker.setControlPathPrefix("composer_legend")
result, message = checker.testLayout()
self.assertTrue(result, message)
QgsProject.instance().removeMapLayers([point_layer.id()])
def testResizeDisabled(self):
"""Test that test legend does not resize if auto size is disabled"""
point_path = os.path.join(TEST_DATA_DIR, 'points.shp')
point_layer = QgsVectorLayer(point_path, 'points', 'ogr')
QgsProject.instance().addMapLayers([point_layer])
s = QgsMapSettings()
s.setLayers([point_layer])
layout = QgsLayout(QgsProject.instance())
layout.initializeDefaults()
map = QgsLayoutItemMap(layout)
map.attemptSetSceneRect(QRectF(20, 20, 80, 80))
map.setFrameEnabled(True)
map.setLayers([point_layer])
layout.addLayoutItem(map)
map.setExtent(point_layer.extent())
legend = QgsLayoutItemLegend(layout)
legend.attemptSetSceneRect(QRectF(120, 20, 80, 80))
legend.setFrameEnabled(True)
legend.setFrameStrokeWidth(QgsLayoutMeasurement(2))
legend.setBackgroundColor(QColor(200, 200, 200))
legend.setTitle('')
legend.setLegendFilterByMapEnabled(True)
# disable auto resizing
legend.setResizeToContents(False)
layout.addLayoutItem(legend)
legend.setLinkedMap(map)
map.setExtent(QgsRectangle(-102.51, 41.16, -102.36, 41.30))
checker = QgsLayoutChecker(
'composer_legend_noresize', layout)
checker.setControlPathPrefix("composer_legend")
result, message = checker.testLayout()
self.assertTrue(result, message)
QgsProject.instance().removeMapLayers([point_layer.id()])
def testResizeDisabledCrop(self):
"""Test that if legend resizing is disabled, and legend is too small, then content is cropped"""
point_path = os.path.join(TEST_DATA_DIR, 'points.shp')
point_layer = QgsVectorLayer(point_path, 'points', 'ogr')
QgsProject.instance().addMapLayers([point_layer])
s = QgsMapSettings()
s.setLayers([point_layer])
layout = QgsLayout(QgsProject.instance())
layout.initializeDefaults()
map = QgsLayoutItemMap(layout)
map.attemptSetSceneRect(QRectF(20, 20, 80, 80))
map.setFrameEnabled(True)
map.setLayers([point_layer])
layout.addLayoutItem(map)
map.setExtent(point_layer.extent())
legend = QgsLayoutItemLegend(layout)
legend.attemptSetSceneRect(QRectF(120, 20, 20, 20))
legend.setFrameEnabled(True)
legend.setFrameStrokeWidth(QgsLayoutMeasurement(2))
legend.setBackgroundColor(QColor(200, 200, 200))
legend.setTitle('')
legend.setLegendFilterByMapEnabled(True)
# disable auto resizing
legend.setResizeToContents(False)
layout.addLayoutItem(legend)
legend.setLinkedMap(map)
map.setExtent(QgsRectangle(-102.51, 41.16, -102.36, 41.30))
checker = QgsLayoutChecker(
'composer_legend_noresize_crop', layout)
checker.setControlPathPrefix("composer_legend")
result, message = checker.testLayout()
self.assertTrue(result, message)
QgsProject.instance().removeMapLayers([point_layer.id()])
def testDataDefinedTitle(self):
layout = QgsLayout(QgsProject.instance())
layout.initializeDefaults()
legend = QgsLayoutItemLegend(layout)
layout.addLayoutItem(legend)
legend.setTitle('original')
self.assertEqual(legend.title(), 'original')
self.assertEqual(legend.legendSettings().title(), 'original')
legend.dataDefinedProperties().setProperty(QgsLayoutObject.LegendTitle, QgsProperty.fromExpression("'new'"))
legend.refreshDataDefinedProperty()
self.assertEqual(legend.title(), 'original')
self.assertEqual(legend.legendSettings().title(), 'new')
def testDataDefinedColumnCount(self):
layout = QgsLayout(QgsProject.instance())
layout.initializeDefaults()
legend = QgsLayoutItemLegend(layout)
layout.addLayoutItem(legend)
legend.setColumnCount(2)
self.assertEqual(legend.columnCount(), 2)
self.assertEqual(legend.legendSettings().columnCount(), 2)
legend.dataDefinedProperties().setProperty(QgsLayoutObject.LegendColumnCount, QgsProperty.fromExpression("5"))
legend.refreshDataDefinedProperty()
self.assertEqual(legend.columnCount(), 2)
self.assertEqual(legend.legendSettings().columnCount(), 5)
if __name__ == '__main__':
unittest.main()
|
CS-SI/QGIS
|
tests/src/python/test_qgslayoutlegend.py
|
Python
|
gpl-2.0
| 9,958 | 0.000402 |
import unittest
from coval.es import *
class CodeValidatorEsTestCase(unittest.TestCase):
def setUp(self):
pass
def test_cif(self):
self.assertTrue(cif('A58818501'))
self.assertTrue(cif('B00000000'))
self.assertTrue(cif('C0000000J'))
self.assertTrue(cif('D00000000'))
self.assertTrue(cif('E00000000'))
self.assertTrue(cif('F00000000'))
self.assertTrue(cif('G00000000'))
self.assertTrue(cif('H00000000'))
self.assertFalse(cif('I00000000'))
self.assertFalse(cif('I0000000J'))
self.assertTrue(cif('J00000000'))
self.assertTrue(cif('K0000000J'))
self.assertTrue(cif('L0000000J'))
self.assertTrue(cif('M0000000J'))
self.assertTrue(cif('N0000000J'))
self.assertFalse(cif('O00000000'))
self.assertFalse(cif('O0000000J'))
self.assertTrue(cif('P0000000J'))
self.assertTrue(cif('Q0000000J'))
self.assertTrue(cif('R0000000J'))
self.assertTrue(cif('S0000000J'))
self.assertFalse(cif('T00000000'))
self.assertFalse(cif('T0000000J'))
self.assertTrue(cif('U00000000'))
self.assertTrue(cif('V00000000'))
self.assertTrue(cif('W0000000J'))
self.assertFalse(cif('X00000000'))
self.assertFalse(cif('X0000000J'))
self.assertFalse(cif('Y00000000'))
self.assertFalse(cif('Y0000000J'))
self.assertFalse(cif('Z00000000'))
self.assertFalse(cif('Z0000000J'))
self.assertFalse(cif('B0000000J'))
self.assertFalse(cif('BC0000000'))
self.assertFalse(cif('123456678'))
self.assertTrue(cif('B-00000000', strict=False))
self.assertFalse(cif('B-00000000', strict=True))
self.assertTrue(cif('K-0000000-J', strict=False))
self.assertFalse(cif('K-0000000-J', strict=True))
def test_ccc(self):
self.assertTrue(ccc('2077-0024-00-3102575766',strict=False))
self.assertFalse(ccc('2034 4505 73 1000034682',strict=False))
self.assertTrue(ccc('0000 0000 00 0000000000',strict=False))
self.assertFalse(ccc('0',strict=False))
self.assertFalse(ccc('1111 1111 11 1111111111',strict=False))
self.assertTrue(ccc('0001 0001 65 0000000001',strict=False))
self.assertFalse(ccc('',strict=False))
self.assertFalse(ccc('2077 0024 00 3102575766',strict=True))
self.assertFalse(ccc('0000 0000 00 0000000000',strict=True))
self.assertFalse(ccc('0001 0001 65 0000000001',strict=True))
self.assertTrue(ccc('20770024003102575766',strict=True))
self.assertFalse(ccc('20344505731000034682',strict=True))
self.assertTrue(ccc('00000000000000000000',strict=True))
self.assertFalse(ccc('0',strict=True))
self.assertFalse(ccc('11111111111111111111',strict=True))
self.assertTrue(ccc('00010001650000000001',strict=True))
self.assertFalse(ccc('',strict=True))
def test_ssn(self):
self.assertFalse(ssn('720111361735'))
self.assertTrue(ssn('281234567840'))
self.assertTrue(ssn('351234567825'))
self.assertFalse(ssn('35/12345678/25', strict=True))
self.assertTrue(ssn('35/12345678/25', strict=False))
self.assertFalse(ssn('35-12345678-25', strict=True))
self.assertTrue(ssn('35-12345678-25', strict=False))
self.assertFalse(ssn('35X1234567825'))
self.assertFalse(ssn('031322136383'))
self.assertFalse(ssn('72011a361732'))
self.assertFalse(ssn('73011a361731'))
self.assertFalse(ssn('03092a136383'))
self.assertFalse(ssn('03132a136385'))
self.assertFalse(ssn('201113617312'))
self.assertFalse(ssn('301113617334'))
self.assertFalse(ssn('309221363823'))
self.assertFalse(ssn('313221363822'))
def test_postcode(self):
self.assertTrue(postcode('28080'))
self.assertTrue(postcode('35500'))
self.assertFalse(postcode('59000'))
self.assertTrue(postcode('12012'))
self.assertTrue(postcode('25120'))
self.assertFalse(postcode('10'))
self.assertFalse(postcode('X123'))
if __name__ == '__main__':
unittest.main()
|
jespino/coval
|
tests/es_tests.py
|
Python
|
bsd-3-clause
| 4,205 | 0.004518 |
from django.shortcuts import render
from django.http import Http404
def index(request):
return render(request, 'map/index.html')
|
BrendonKing32/Traffic-Assistant
|
map/views.py
|
Python
|
gpl-3.0
| 135 | 0 |
# You will need maestro.py from https://github.com/FRC4564/Maestro
#
# This is also just a place holder, it has not be tested with an actual
# bot.
import sys
import time
try:
import hardware/maestro
except ImportError:
print "You are missing the maestro.py file from the hardware subdirectory."
print "Please download it from here https://github.com/FRC4564/Maestro"
sys.exit()
servo = None
def setup(robot_config):
global servo
servo = maestro.Controller()
servo.setAccel(0,4)
servo.setAccel(1,4)
servo.setTarget(0, 6000)
servo.setTarget(1, 6000)
def move(args):
direction = args['command']
if direction == 'F':
servo.setTarget(0, 12000)
servo.setTarget(1, 12000)
time.sleep(straightDelay)
servo.setTarget(0, 6000)
servo.setTarget(1, 6000)
elif direction == 'B':
servo.setTarget(0, 0)
servo.setTarget(1, 0)
time.sleep(straightDelay)
servo.setTarget(0, 6000)
servo.setTarget(1, 6000)
elif direction == 'L':
servo.setTarget(0, 0)
servo.setTarget(1, 12000)
time.sleep(turnDelay)
servo.setTarget(0, 6000)
servo.setTarget(1, 6000)
elif direction == 'R':
servo.setTarget(0, 12000)
servo.setTarget(1, 0)
time.sleep(turnDelay)
servo.setTarget(0, 6000)
servo.setTarget(1, 6000)
|
Nocturnal42/runmyrobot
|
hardware/maestro-servo.py
|
Python
|
apache-2.0
| 1,417 | 0.007763 |
# -*- coding: utf-8 -*-
# Copyright 2012-2013 UNED
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.conf.urls import include, patterns, url
from django.views.generic import RedirectView
from moocng.courses.feeds import AnnouncementCourseFeed
urlpatterns = patterns(
'moocng.courses.views',
url(r'^$', 'home', name='home'),
url(r'^course/$', RedirectView.as_view(url='/'), name='course-index'),
# Flatpages
url(r'^faq/$', 'flatpage', {'page': 'faq'}, name='faq'),
url(r'^methodology/$', 'flatpage', {'page': 'methodology'},
name='methodology'),
url(r'^legal/$', 'flatpage', {'page': 'legal'}, name='legal'),
url(r'^tos/$', 'flatpage', {'page': 'tos'}, name='tos'),
url(r'^copyright/$', 'flatpage', {'page': 'copyright'}, name='copyright'),
url(r'^cert/$', 'flatpage', {'page': 'cert'}, name='cert'),
url(r'^oldscore-help/$', 'flatpage', {'page': 'oldscore'}, name='oldscore'),
url(r'^score-help/$', 'flatpage', {'page': 'score'}, name='score'),
url(r'^transcript/$', 'transcript', name='transcript'),
url(r'^transcript/(?P<course_slug>[-\w]+)/$', 'transcript', name='transcript'),
url(r'^course/add$', 'course_add', name='course_add'),
url(r'^course/(?P<course_slug>[-\w]+)/$', 'course_overview',
name='course_overview'),
url(r'^course/(?P<course_slug>[-\w]+)/classroom/$', 'course_classroom',
name='course_classroom'),
url(r'^course/(?P<course_slug>[-\w]+)/progress/$', 'course_progress',
name='course_progress'),
url(r'^course/(?P<course_slug>[-\w]+)/extra_info/$', 'course_extra_info',
name='course_extra_info'),
url(r'^course/(?P<course_slug>[-\w]+)/announcement/(?P<announcement_id>\d+)/(?P<announcement_slug>[-\w]+)$',
'announcement_detail', name='announcement_detail'),
url(r'^course/(?P<course_slug>[-\w]+)/announcements_feed/$',
AnnouncementCourseFeed(), name='announcements_feed'),
url(r'^course/(?P<course_slug>[-\w]+)/clone-activity/$', 'clone_activity', name='course_clone_activity'),
# Teacher's course administration
url(r'^course/(?P<course_slug>[-\w]+)/teacheradmin/',
include('moocng.teacheradmin.urls')),
)
|
OpenMOOC/moocng
|
moocng/courses/urls.py
|
Python
|
apache-2.0
| 2,701 | 0.001481 |
zone_file = '/etc/bind/zones/db.circulospodemos.info'
|
Podemos-TICS/Creaci-n-Wordpress
|
scripts/deploy/Constants.py
|
Python
|
gpl-2.0
| 54 | 0 |
# -*- coding: utf-8 -*-
"""
Display network speed and bandwidth usage.
Configuration parameters:
cache_timeout: refresh interval for this module (default 2)
format: display format for this module
*(default '{nic} [\?color=down LAN(Kb): {down}↓ {up}↑]
[\?color=total T(Mb): {download}↓ {upload}↑ {total}↕]')*
nic: network interface to use (default None)
thresholds: color thresholds to use
*(default {'down': [(0, 'bad'), (30, 'degraded'), (60, 'good')],
'total': [(0, 'good'), (400, 'degraded'), (700, 'bad')]})*
Format placeholders:
{nic} network interface
{down} number of download speed
{up} number of upload speed
{download} number of download usage
{upload} number of upload usage
{total} number of total usage
Color thresholds:
{down} color threshold of download speed
{total} color threshold of total usage
@author Shahin Azad <ishahinism at Gmail>
SAMPLE OUTPUT
[
{'full_text': 'eth0 '},
{'full_text': 'LAN(Kb): 77.8↓ 26.9↑ ', 'color': '#00FF00'},
{'full_text': 'T(Mb): 394↓ 45↑ 438↕', 'color': '#FFFF00'},
]
"""
class GetData:
"""
Get system status.
"""
def __init__(self, nic):
self.nic = nic
def netBytes(self):
"""
Get bytes directly from /proc.
"""
with open('/proc/net/dev') as fh:
net_data = fh.read().split()
interface_index = net_data.index(self.nic + ':')
received_bytes = int(net_data[interface_index + 1])
transmitted_bytes = int(net_data[interface_index + 9])
return received_bytes, transmitted_bytes
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 2
format = u'{nic} [\?color=down LAN(Kb): {down}↓ {up}↑] ' + \
u'[\?color=total T(Mb): {download}↓ {upload}↑ {total}↕]'
nic = None
thresholds = {
'down': [(0, 'bad'), (30, 'degraded'), (60, 'good')],
'total': [(0, 'good'), (400, 'degraded'), (700, 'bad')]
}
class Meta:
def deprecate_function(config):
return {
'thresholds': {
'down': [
(0, 'bad'),
(config.get('low_speed', 30), 'degraded'),
(config.get('med_speed', 60), 'good')
],
'total': [
(0, 'good'),
(config.get('low_traffic', 400), 'degraded'),
(config.get('med_traffic', 700), 'bad')
]
}
}
deprecated = {
'function': [
{'function': deprecate_function},
],
'remove': [
{
'param': 'low_speed',
'msg': 'obsolete, set using thresholds parameter',
},
{
'param': 'med_speed',
'msg': 'obsolete, set using thresholds parameter',
},
{
'param': 'low_traffic',
'msg': 'obsolete, set using thresholds parameter',
},
{
'param': 'med_traffic',
'msg': 'obsolete, set using thresholds parameter',
},
],
}
update_config = {
'update_placeholder_format': [
{
'placeholder_formats': {
'down': ':5.1f',
'up': ':5.1f',
'download': ':3.0f',
'upload': ':3.0f',
'total': ':3.0f',
},
'format_strings': ['format']
},
],
}
def post_config_hook(self):
"""
Get network interface.
"""
self.old_transmitted = 0
self.old_received = 0
if self.nic is None:
# Get default gateway directly from /proc.
with open('/proc/net/route') as fh:
for line in fh:
fields = line.strip().split()
if fields[1] == '00000000' and int(fields[3], 16) & 2:
self.nic = fields[0]
break
if self.nic is None:
self.nic = 'lo'
self.py3.log('selected nic: %s' % self.nic)
def netdata(self):
"""
Calculate network speed and network traffic.
"""
data = GetData(self.nic)
received_bytes, transmitted_bytes = data.netBytes()
# net_speed (statistic)
down = (received_bytes - self.old_received) / 1024.
up = (transmitted_bytes - self.old_transmitted) / 1024.
self.old_received = received_bytes
self.old_transmitted = transmitted_bytes
# net_traffic (statistic)
download = received_bytes / 1024 / 1024.
upload = transmitted_bytes / 1024 / 1024.
total = download + upload
# color threshold
self.py3.threshold_get_color(down, 'down')
self.py3.threshold_get_color(total, 'total')
netdata = self.py3.safe_format(self.format, {'down': down,
'up': up,
'download': download,
'upload': upload,
'total': total,
'nic': self.nic})
return {
'cached_until': self.py3.time_in(self.cache_timeout),
'full_text': netdata
}
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)
|
alexoneill/py3status
|
py3status/modules/netdata.py
|
Python
|
bsd-3-clause
| 5,963 | 0.000674 |
from django.core.management import BaseCommand
from newsfeed.models import Entry
from premises.models import Contention
class Command(BaseCommand):
def handle(self, *args, **options):
for contention in Contention.objects.all():
Entry.objects.create(
object_id=contention.id,
news_type=contention.get_newsfeed_type(),
sender=contention.get_actor(),
related_object=contention.get_newsfeed_bundle(),
date_creation=contention.date_creation
)
|
beratdogan/arguman.org
|
web/newsfeed/management/commands/create_initial_newsfeed.py
|
Python
|
mit
| 557 | 0 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import mmap
import re
import collections
import math
import detectlanguage
import codecs
detectlanguage.configuration.api_key = "d6ed8c76914a9809b58c2e11904fbaa3"
class Analyzer:
def __init__(self, passwd):
self.evaluation = {}
self._passwd = passwd
self.upperCount = 0
self.lowerCount = 0
self.specialCount = 0
self.entropy = 0
self.numberCount = 0
# Start the password analysis
self.checkAttributes()
self.checkIfSmall()
self.checkForCommonPasswd(self._passwd)
self.checkPattern()
self.entropyCalculation()
self.checkPermutation()
self.printEvaluationAndEntropy()
def checkForCommonPasswd(self,testPass):
"""
# Checks the given password in the bad password list
"""
with open('example.txt', 'rb', 0) as file, \
mmap.mmap(file.fileno(), 0, access=mmap.ACCESS_READ) as contents:
if contents.find(str.encode(testPass)) != -1:
self.evaluation['Common Password'] = True
def checkIfSmall(self):
"""
Checks length of given password
"""
if len(self._passwd) < 10:
self.evaluation['Small Password'] = True
def checkAttributes(self):
"""
#Check upperCases, lower and special chars
"""
self.upperCount = sum(1 for c in self._passwd if c.isupper())
self.lowerCount = sum(1 for c in self._passwd if c.islower())
self.specialCount = min(sum(not c.isalnum() for c in self._passwd), 3)
self.numberCount = sum(c.isdigit() for c in self._passwd)
def checkPattern(self):
"""
Checks if password is just the same letters,
also checks for bad repetitions or patterns
"""
diff = 0
#Check if the password have at least 5 different characters
letterFreq = collections.Counter(self._passwd)
if len(letterFreq.keys()) < 5:
self.evaluation['Small Variation'] = True
#Check if the password have a character that is repeated 10 times
for value in letterFreq.values():
if value > 10:
self.evaluation['Repeated character'] = True
#Check if the password is a alphabetical or keyboard sequence
lastChar = self._passwd[0].lower()
alphabeticalDiff = []
keyboardDiff = []
for value in self._passwd.lower():
alphabeticalDiff.append(abs(ord(lastChar) - ord(value)))
keyboardDiff.append(self.distanceInKeyboard(value, lastChar))
lastChar = value
alphabeticalDiffFreq = collections.Counter(alphabeticalDiff)
keyboardDiffFreq = collections.Counter(keyboardDiff)
if len(alphabeticalDiffFreq.keys()) < 5 or len(keyboardDiffFreq.keys()) < 5:
self.evaluation['Sequence pattern'] = True
#Check if a character in repeated more than 2 times consecutively
repCounter = 0
for value in keyboardDiffFreq.values():
if value == 0:
repCounter += 1
else:
repCounter = 0
if repCounter > 2:
self.evaluation['Same character sequence'] = True
def entropyCalculation(self):
"""
Calculation of entropy based on characters found
"""
hasUpper = (self.upperCount != 0 if 1 else 0)
hasLower = (self.lowerCount != 0 if 1 else 0)
hasSpecial = (self.specialCount != 0 if 1 else 0)
hasNumber = (self.numberCount != 0 if 1 else 0)
self.entropy = math.log(math.pow(((hasUpper*27) + (hasLower*27) + (hasSpecial*33)+(hasNumber*10)),len(self._passwd)),2)
print('Entropy: {0} bits'.format(self.entropy))
def distanceInKeyboard(self,char1, char2):
"""
Estimate de distance between two characters in the keyboard
"""
firstKeyboardRow = [('1','!'),('2','@'),('3,'',#'),('4','$'),('5','%'),('6','¨'),('7','&'),('8','*'),('9','('),('0',')'),('-','_'),('=','+')]
secondKeyboardRow = [('q'), ('w'),('e'), ('r'), ('t'), ('y'), ('u'), ('i'), ('o'), ('p'), ('´', '`'), ('[', '{')]
thirdKeyboardRow = [('a'), ('s'), ('d'), ('f'), ('g'), ('h'), ('j'), ('k'), ('l'), ('ç'),('^','~'),( ']', '}')]
fourthKeyboarRow = [('|', '\\'),('z'),('x'),('c'),('v'),('b'),('n'),('m'), (',', '<'),('.', '>'), (';', ':'), ('?', '/')]
char1Pos = [0,0]
char2Pos = [0,0]
for value in firstKeyboardRow:
if char1 in value:
char1Pos = [1, firstKeyboardRow.index(value)+1]
elif char2 in value:
char2Pos = [1, firstKeyboardRow.index(value)+1]
for value in secondKeyboardRow:
if char1 in value:
char1Pos = [2, secondKeyboardRow.index(value)+1]
elif char2 in value:
char2Pos = [2, secondKeyboardRow.index(value)+1]
for value in thirdKeyboardRow:
if char1 in value:
char1Pos = [3, thirdKeyboardRow.index(value)+1]
elif char2 in value:
char2Pos = [3, thirdKeyboardRow.index(value)+1]
for value in fourthKeyboarRow:
if char1 in value:
char1Pos = [4, fourthKeyboarRow.index(value)+1]
elif char2 in value:
char2Pos = [4, fourthKeyboarRow.index(value)+1]
distance = abs(char1Pos[0]-char2Pos[0]) + abs(char1Pos[1]-char2Pos[1])
return distance
def isWord(self,testPass):
"""
Check if the string given is a word, support 164 languages.
"""
detectlanguage.configuration.secure = True
return detectlanguage.detect(testPass)[0]['confidence'] == 10
def checkPermutation(self):
"""
Check if the password is a word disguised by applying reverse permutation methods on the password.
"""
passwordPermutation = []
passwordPermutation.append(self.removeNumber(self._passwd))
passwordPermutation.append(self.removeSymbol(self._passwd))
passwordPermutation.append(self.removeSymbol(passwordPermutation[0]))
passwordPermutation.append(self.replaceSymbol(passwordPermutation[0]))
passwordPermutation.append(self.replaceNumber(passwordPermutation[1]))
passwordPermutation.append(self.removeSymbol(self.removeNumber(passwordPermutation[3])))
passwordPermutation.append(self.removeSymbol(self.removeNumber(passwordPermutation[4])))
foundWord = False
for _password in passwordPermutation:
if self.isWord(_password) and not foundWord:
self.evaluation['Common word as password'] = True
foundWord = True
self.checkForCommonPasswd(_password)
def replaceNumber(self,testPass):
"""
Replace numbers in the password for letters. (leet/1337)
"""
newPassword = testPass.lower().replace("1","l")
newPassword = newPassword.lower().replace("0","o")
newPassword = newPassword.lower().replace("3","e")
newPassword = newPassword.lower().replace("5","s")
newPassword = newPassword.lower().replace("7","t")
newPassword = newPassword.lower().replace("8","b")
return newPassword
def replaceSymbol(self,testPass):
"""
Replace symbols in the password for letters. (leet/1337)
"""
newPassword = testPass.lower().replace("@","a")
newPassword = newPassword.lower().replace("$","s")
newPassword = newPassword.lower().replace("&","e")
newPassword = newPassword.lower().replace("!","l")
return newPassword
def removeNumber(self,testPass):
"""
Remove numbers from the password.
"""
newPassword = ''.join([i for i in testPass if not i.isdigit()])
return newPassword
def removeSymbol(self,testPass):
"""
Remove symbols from the password.
"""
newPassword = ''.join(i for i in testPass if i.isalnum())
return newPassword
def printEvaluationAndEntropy(self):
"""
Prints Evalution scores
"""
for key in self.evaluation.keys():
print (key)
|
haastt/analisador
|
src/Analyzer.py
|
Python
|
mit
| 8,325 | 0.008892 |
"""Top-level module for releng-sop."""
|
release-engineering/releng-sop
|
releng_sop/__init__.py
|
Python
|
mit
| 39 | 0 |
"""
Created on 16 May 2017
@author: Bruno Beloff (bruno.beloff@southcoastscience.com)
Note: time shall always be stored as UTC, then localized on retrieval.
"""
from scs_core.data.rtc_datetime import RTCDatetime
from scs_host.bus.i2c import I2C
from scs_host.lock.lock import Lock
# --------------------------------------------------------------------------------------------------------------------
class DS1338(object):
"""
Maxim Integrated DS1338 serial real-time clock
"""
__ADDR = 0x68
__REG_SECONDS = 0x00
__REG_MINUTES = 0x01
__REG_HOURS = 0x02
__REG_DAY = 0x03
__REG_DATE = 0x04
__REG_MONTH = 0x05
__REG_YEAR = 0x06
__REG_CONTROL = 0x07
__RAM_START_ADDR = 0x08
__RAM_MAX_ADDR = 0xff # 247 bytes
__SECONDS_MASK_CLOCK_HALT = 0x80 # ---- 1000 0000
__HOURS_MASK_24_HOUR = 0x40 # ---- 0100 0000
__CONTROL_MASK_OSC_STOPPED = 0x20 # ---- 0010 0000
__CONTROL_MASK_SQW_EN = 0x10 # ---- 0001 0000
# ----------------------------------------------------------------------------------------------------------------
__LOCK_TIMEOUT = 2.0
# ----------------------------------------------------------------------------------------------------------------
# RTC...
@classmethod
def init(cls, enable_square_wave=False):
try:
cls.obtain_lock()
# use 24 hour...
hours = cls.__read_reg(cls.__REG_HOURS)
hours = hours & ~cls.__HOURS_MASK_24_HOUR
cls.__write_reg(cls.__REG_HOURS, hours)
# enable square wave output...
control = cls.__read_reg(cls.__REG_CONTROL)
control = control | cls.__CONTROL_MASK_SQW_EN if enable_square_wave \
else control & ~cls.__CONTROL_MASK_SQW_EN
cls.__write_reg(cls.__REG_CONTROL, control)
finally:
cls.release_lock()
@classmethod
def get_time(cls):
try:
cls.obtain_lock()
# read RTC...
second = cls.__read_reg_decimal(cls.__REG_SECONDS)
minute = cls.__read_reg_decimal(cls.__REG_MINUTES)
hour = cls.__read_reg_decimal(cls.__REG_HOURS)
weekday = cls.__read_reg_decimal(cls.__REG_DAY)
day = cls.__read_reg_decimal(cls.__REG_DATE)
month = cls.__read_reg_decimal(cls.__REG_MONTH)
year = cls.__read_reg_decimal(cls.__REG_YEAR)
finally:
cls.release_lock()
rtc_datetime = RTCDatetime(year, month, day, weekday, hour, minute, second)
return rtc_datetime
@classmethod
def set_time(cls, rtc_datetime):
try:
cls.obtain_lock()
# update RTC...
cls.__write_reg_decimal(cls.__REG_SECONDS, rtc_datetime.second)
cls.__write_reg_decimal(cls.__REG_MINUTES, rtc_datetime.minute)
cls.__write_reg_decimal(cls.__REG_HOURS, rtc_datetime.hour)
cls.__write_reg_decimal(cls.__REG_DAY, rtc_datetime.weekday)
cls.__write_reg_decimal(cls.__REG_DATE, rtc_datetime.day)
cls.__write_reg_decimal(cls.__REG_MONTH, rtc_datetime.month)
cls.__write_reg_decimal(cls.__REG_YEAR, rtc_datetime.year)
finally:
cls.release_lock()
@classmethod
def get_ctrl(cls):
return cls.__read_reg(cls.__REG_CONTROL)
# ----------------------------------------------------------------------------------------------------------------
# RAM...
@classmethod
def read(cls, addr):
if addr < 0 or addr > cls.__RAM_MAX_ADDR:
raise IndexError("RAM address out of range: %d" % addr)
return cls.__read_reg(cls.__RAM_START_ADDR + addr)
@classmethod
def write(cls, addr, val):
if addr < 0 or addr > cls.__RAM_MAX_ADDR:
raise IndexError("RAM address out of range: %d" % addr)
cls.__write_reg(cls.__RAM_START_ADDR + addr, val)
# ----------------------------------------------------------------------------------------------------------------
@classmethod
def __read_reg_decimal(cls, addr):
return cls.__as_decimal(cls.__read_reg(addr))
@classmethod
def __read_reg(cls, addr):
try:
I2C.Sensors.start_tx(cls.__ADDR)
value = I2C.Sensors.read_cmd(addr, 1)
finally:
I2C.Sensors.end_tx()
return value
@classmethod
def __write_reg_decimal(cls, addr, value):
return cls.__write_reg(addr, cls.__as_bcd(value))
@classmethod
def __write_reg(cls, addr, value):
try:
I2C.Sensors.start_tx(cls.__ADDR)
I2C.Sensors.write(addr, value)
finally:
I2C.Sensors.end_tx()
# ----------------------------------------------------------------------------------------------------------------
@classmethod
def obtain_lock(cls):
Lock.acquire(cls.__lock_name(), DS1338.__LOCK_TIMEOUT)
@classmethod
def release_lock(cls):
Lock.release(cls.__lock_name())
@classmethod
def __lock_name(cls):
return "%s-0x%02x" % (cls.__name__, cls.__ADDR)
# ----------------------------------------------------------------------------------------------------------------
@classmethod
def __as_decimal(cls, bcd):
msb = bcd >> 4
lsb = bcd & 0x0f
return msb * 10 + lsb
@classmethod
def __as_bcd(cls, decimal):
msb = decimal // 10
lsb = decimal % 10
return msb << 4 | lsb
|
south-coast-science/scs_dfe_eng
|
src/scs_dfe/time/ds1338.py
|
Python
|
mit
| 5,825 | 0.006695 |
# -*- coding: utf-8 -*-
"""Module providing views for the site navigation root"""
from Acquisition import aq_inner
from Products.Five.browser import BrowserView
from Products.ZCatalog.interfaces import ICatalogBrain
from plone import api
from plone.app.contentlisting.interfaces import IContentListing
from plone.app.contentlisting.interfaces import IContentListingObject
from plone.app.contenttypes.interfaces import INewsItem
from zope.component import getMultiAdapter
from zope.component import getUtility
from dpf.sitecontent.interfaces import IResponsiveImagesTool
IMG = 'data:image/gif;base64,R0lGODlhAQABAIAAAP///wAAACwAAAAAAQABAAACAkQBADs='
class FrontPageView(BrowserView):
""" General purpose frontpage view """
def __call__(self):
self.has_newsitems = len(self.recent_news()) > 0
return self.render()
def render(self):
return self.index()
def can_edit(self):
show = False
if not api.user.is_anonymous():
show = True
return show
def portal_id(self):
portal = api.portal.get()
return portal.id
def recent_news(self):
catalog = api.portal.get_tool(name='portal_catalog')
items = catalog(object_provides=INewsItem.__identifier__,
review_state='published',
sort_on='Date',
sort_order='reverse',
sort_limit=3)[:3]
return IContentListing(items)
def rendered_news_card(self, uuid):
item = api.content.get(UID=uuid)
template = item.restrictedTraverse('@@card-news-item')()
return template
def section_preview(self, section):
info = {}
if section.startswith('/'):
target = section
else:
target = '/{0}'.format(section)
item = api.content.get(path=target)
if item:
info['title'] = item.Title()
info['teaser'] = item.Description()
info['url'] = item.absolute_url()
info['image'] = self.image_tag(item)
info['subitems'] = None
if target in ('/news'):
info['subitems'] = self.recent_news()
return info
def get_image_data(self, uuid):
tool = getUtility(IResponsiveImagesTool)
return tool.create(uuid)
def image_tag(self, item):
data = {}
sizes = ['small', 'medium', 'large']
idx = 0
for size in sizes:
idx += 0
img = self._get_scaled_img(item, size)
data[size] = '{0} {1}w'.format(img['url'], img['width'])
return data
def _get_scaled_img(self, item, size):
if (
ICatalogBrain.providedBy(item) or
IContentListingObject.providedBy(item)
):
obj = item.getObject()
else:
obj = item
info = {}
if hasattr(obj, 'image'):
scales = getMultiAdapter((obj, self.request), name='images')
if size == 'small':
scale = scales.scale('image', width=300, height=300)
if size == 'medium':
scale = scales.scale('image', width=600, height=600)
else:
scale = scales.scale('image', width=900, height=900)
if scale is not None:
info['url'] = scale.url
info['width'] = scale.width
info['height'] = scale.height
else:
info['url'] = IMG
info['width'] = '1px'
info['height'] = '1px'
return info
|
a25kk/dpf
|
src/dpf.sitecontent/dpf/sitecontent/browser/frontpage.py
|
Python
|
mit
| 3,589 | 0 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from PyQt5 import QtWidgets
from view.analysis_widget import AnalysisWidget
# noinspection PyPep8Naming
class TemporalAnalysisWidget(AnalysisWidget):
# noinspection PyArgumentList
def __init__(self, mplCanvas):
"""
Construct the Temporal Analysis page in the main window. |br|
A ``ScatterPlot.mplCanvas`` will be shown on this page.
:param mplCanvas: The ``ScatterPlot.mplCanvas`` widget.
"""
super().__init__()
upperLabel = QtWidgets.QLabel("Temporal Distribution &Graph:")
upperLabel.setMargin(1)
upperLabel.setBuddy(mplCanvas)
lowerLabel = QtWidgets.QLabel("Temporal Correlation &Quotient:")
lowerLabel.setMargin(1)
lowerLabel.setBuddy(self.tableWidget)
mainLayout = QtWidgets.QVBoxLayout()
mainLayout.addWidget(upperLabel)
mainLayout.addWidget(mplCanvas)
mainLayout.addWidget(lowerLabel)
mainLayout.addWidget(self.tableWidget)
self.setLayout(mainLayout)
|
yuwen41200/biodiversity-analysis
|
src/view/temporal_analysis_widget.py
|
Python
|
gpl-3.0
| 1,068 | 0 |
from discord.ext import commands
import discord.utils
def is_owner_check(ctx):
author = str(ctx.message.author)
owner = ctx.bot.config['master']
return author == owner
def is_owner():
return commands.check(is_owner_check)
def check_permissions(ctx, perms):
#if is_owner_check(ctx):
# return True
if not perms:
return False
ch = ctx.message.channel
author = ctx.message.author
resolved = ch.permissions_for(author)
return all(getattr(resolved, name, None) == value for name, value in perms.items())
def role_or_permissions(ctx, check, **perms):
if check_permissions(ctx, perms):
return True
ch = ctx.message.channel
author = ctx.message.author
if ch.is_private:
return False # can't have roles in PMs
role = discord.utils.find(check, author.roles)
return role is not None
def serverowner_or_permissions(**perms):
def predicate(ctx):
owner = ctx.message.server.owner
if ctx.message.author.id == owner.id:
return True
return check_permissions(ctx,perms)
return commands.check(predicate)
def serverowner():
return serverowner_or_permissions()
def check_wantchannel(ctx):
if ctx.message.server is None:
return False
channel = ctx.message.channel
server = ctx.message.server
try:
want_channels = ctx.bot.server_dict[server]['want_channel_list']
except KeyError:
return False
if channel in want_channels:
return True
def check_citychannel(ctx):
if ctx.message.server is None:
return False
channel = ctx.message.channel.name
server = ctx.message.server
try:
city_channels = ctx.bot.server_dict[server]['city_channels'].keys()
except KeyError:
return False
if channel in city_channels:
return True
def check_raidchannel(ctx):
if ctx.message.server is None:
return False
channel = ctx.message.channel
server = ctx.message.server
try:
raid_channels = ctx.bot.server_dict[server]['raidchannel_dict'].keys()
except KeyError:
return False
if channel in raid_channels:
return True
def check_eggchannel(ctx):
if ctx.message.server is None:
return False
channel = ctx.message.channel
server = ctx.message.server
try:
type = ctx.bot.server_dict[server]['raidchannel_dict'][channel]['type']
except KeyError:
return False
if type == 'egg':
return True
def check_raidactive(ctx):
if ctx.message.server is None:
return False
channel = ctx.message.channel
server = ctx.message.server
try:
return ctx.bot.server_dict[server]['raidchannel_dict'][channel]['active']
except KeyError:
return False
def check_raidset(ctx):
if ctx.message.server is None:
return False
server = ctx.message.server
try:
return ctx.bot.server_dict[server]['raidset']
except KeyError:
return False
def check_wildset(ctx):
if ctx.message.server is None:
return False
server = ctx.message.server
try:
return ctx.bot.server_dict[server]['wildset']
except KeyError:
return False
def check_wantset(ctx):
if ctx.message.server is None:
return False
server = ctx.message.server
try:
return ctx.bot.server_dict[server]['wantset']
except KeyError:
return False
def check_teamset(ctx):
if ctx.message.server is None:
return False
server = ctx.message.server
try:
return ctx.bot.server_dict[server]['team']
except KeyError:
return False
def teamset():
def predicate(ctx):
return check_teamset(ctx)
return commands.check(predicate)
def wantset():
def predicate(ctx):
return check_wantset(ctx)
return commands.check(predicate)
def wildset():
def predicate(ctx):
return check_wildset(ctx)
return commands.check(predicate)
def raidset():
def predicate(ctx):
return check_raidset(ctx)
return commands.check(predicate)
def citychannel():
def predicate(ctx):
return check_citychannel(ctx)
return commands.check(predicate)
def wantchannel():
def predicate(ctx):
if check_wantset(ctx):
return check_wantchannel(ctx)
return commands.check(predicate)
def raidchannel():
def predicate(ctx):
return check_raidchannel(ctx)
return commands.check(predicate)
def notraidchannel():
def predicate(ctx):
return not check_raidchannel(ctx)
return commands.check(predicate)
def activeraidchannel():
def predicate(ctx):
if check_raidchannel(ctx):
return check_raidactive(ctx)
return commands.check(predicate)
def cityraidchannel():
def predicate(ctx):
if check_raidchannel(ctx) == True:
return True
elif check_citychannel(ctx) == True:
return True
return commands.check(predicate)
def cityeggchannel():
def predicate(ctx):
if check_raidchannel(ctx) == True:
if check_eggchannel(ctx) == True:
return True
elif check_citychannel(ctx) == True:
return True
return commands.check(predicate)
|
Jonqora/whiskers
|
checks.py
|
Python
|
gpl-3.0
| 5,290 | 0.006994 |
# -*- coding: utf-8 -*-
# libavg - Media Playback Engine.
# Copyright (C) 2003-2011 Ulrich von Zadow
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Current versions can be found at www.libavg.de
#
from libavg import avg
from libavg.ui import slider, gesture
class ScrollPane(avg.DivNode):
def __init__(self, contentNode, parent=None, **kwargs):
super(ScrollPane, self).__init__(crop=True, **kwargs)
self.registerInstance(self, parent)
self.appendChild(contentNode)
self._contentNode = contentNode
def setContentPos(self, pos):
def constrain(pos, limit):
if limit > 0:
# Content larger than container
if pos > limit:
pos = limit
elif pos < 0:
pos = 0
else:
# Content smaller than container
if pos > 0:
pos = 0
elif pos < limit:
pos = limit
return pos
maxPos = self.getMaxContentPos()
pos = avg.Point2D(pos)
pos.x = constrain(pos.x, maxPos.x)
pos.y = constrain(pos.y, maxPos.y)
self._contentNode.pos = -pos
def getContentPos(self):
return -self._contentNode.pos
contentpos = property(getContentPos, setContentPos)
def getContentSize(self):
return self._contentNode.size
def setContentSize(self, size):
self._contentNode.size = size
self.setContentPos(-self._contentNode.pos) # Recheck constraints.
contentsize = property(getContentSize, setContentSize)
def getMaxContentPos(self):
maxPos = avg.Point2D(self._contentNode.size - self.size)
if maxPos.x < 0:
maxPos.x = 0
if maxPos.y < 0:
maxPos.y = 0
return maxPos
class ScrollArea(avg.DivNode):
PRESSED = avg.Publisher.genMessageID()
RELEASED = avg.Publisher.genMessageID()
CONTENT_POS_CHANGED = avg.Publisher.genMessageID()
def __init__(self, contentNode, size, hScrollBar=None, vScrollBar=None, parent=None,
friction=None, **kwargs):
super(ScrollArea, self).__init__(**kwargs)
self.registerInstance(self, parent)
self.publish(self.PRESSED)
self.publish(self.RELEASED)
self.publish(self.CONTENT_POS_CHANGED)
self._hScrollBar = hScrollBar
self._vScrollBar = vScrollBar
if hScrollBar:
self.appendChild(hScrollBar)
hScrollBar.subscribe(slider.Slider.THUMB_POS_CHANGED, self.__onHThumbMove)
if vScrollBar:
self.appendChild(vScrollBar)
vScrollBar.subscribe(slider.Slider.THUMB_POS_CHANGED, self.__onVThumbMove)
self.__scrollPane = ScrollPane(contentNode=contentNode, parent=self)
self.size = size
self.recognizer = gesture.DragRecognizer(
eventNode=self.__scrollPane,
detectedHandler=self.__onDragStart,
moveHandler=self.__onDragMove,
upHandler=self.__onDragUp,
friction=friction
)
def getContentSize(self):
return self.__scrollPane._contentNode.size
def setContentSize(self, size):
self.__scrollPane.contentsize = size
self.__positionNodes()
contentsize = property(getContentSize, setContentSize)
def getContentPos(self):
return self.__scrollPane.contentpos
def setContentPos(self, pos):
self.__scrollPane.contentpos = pos
self.__positionNodes()
self.__positionThumbs(avg.Point2D(pos))
self.notifySubscribers(self.CONTENT_POS_CHANGED, [self.__scrollPane.contentpos])
contentpos = property(getContentPos, setContentPos)
def getSize(self):
return self.__baseSize
def setSize(self, size):
self.__baseSize = size
self.__positionNodes()
__baseSize = avg.DivNode.size
size = property(getSize, setSize)
def __onHThumbMove(self, thumbPos):
self.__scrollPane.contentpos = (thumbPos, self.__scrollPane.contentpos.y)
self.notifySubscribers(self.CONTENT_POS_CHANGED, [self.__scrollPane.contentpos])
def __onVThumbMove(self, thumbPos):
self.__scrollPane.contentpos = (self.__scrollPane.contentpos.x, thumbPos)
self.notifySubscribers(self.CONTENT_POS_CHANGED, [self.__scrollPane.contentpos])
def __onDragStart(self):
self.__dragStartPos = self.__scrollPane.contentpos
self.notifySubscribers(self.PRESSED, [])
def __onDragMove(self, offset):
contentpos = self.__dragStartPos - offset
self.__scrollPane.contentpos = contentpos
self.__positionThumbs(contentpos)
self.notifySubscribers(self.CONTENT_POS_CHANGED, [self.__scrollPane.contentpos])
def __onDragUp(self, offset):
self.__onDragMove(offset)
self.notifySubscribers(self.RELEASED, [])
def __positionNodes(self):
paneSize = self.__baseSize
if self._hScrollBar:
paneSize -= (0, self._hScrollBar.height)
if self._vScrollBar:
paneSize -= (self._vScrollBar.width, 0)
self.__scrollPane.size = paneSize
if self._hScrollBar:
self._hScrollBar.pos = (0, self.__scrollPane.height)
self._hScrollBar.width = self.__scrollPane.width
if self.__scrollPane.contentsize.x <= self.__scrollPane.width:
self._hScrollBar.range = (0, self.__scrollPane.width)
self._hScrollBar.enabled = False
else:
self._hScrollBar.range = (0, self.__scrollPane.contentsize.x)
self._hScrollBar.enabled = True
self._hScrollBar.thumbextent = self.__scrollPane.width
if self._vScrollBar:
self._vScrollBar.pos = (self.__scrollPane.width, 0)
self._vScrollBar.height = self.__scrollPane.height
if self.__scrollPane.contentsize.y <= self.__scrollPane.height:
self._vScrollBar.range = (0, self.__scrollPane.height)
self._vScrollBar.enabled = False
else:
self._vScrollBar.range = (0, self.__scrollPane.contentsize.y)
self._vScrollBar.enabled = True
self._vScrollBar.thumbextent = self.__scrollPane.height
def __positionThumbs(self, contentPos):
if self._hScrollBar:
self._hScrollBar.thumbpos = contentPos.x
if self._vScrollBar:
self._vScrollBar.thumbpos = contentPos.y
|
pararthshah/libavg-vaapi
|
src/python/ui/scrollarea.py
|
Python
|
lgpl-2.1
| 7,235 | 0.002626 |
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DJANGO_APPS = ['filebrowser']
NICE_NAME = "File Browser"
REQUIRES_HADOOP = False
ICON = "/filebrowser/static/art/icon_filebrowser_24.png"
MENU_INDEX = 20
|
2013Commons/HUE-SHARK
|
apps/filebrowser/src/filebrowser/settings.py
|
Python
|
apache-2.0
| 946 | 0 |
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def rotateRight(self, head, k):
"""
:type head: ListNode
:type k: int
:rtype: ListNode
"""
if not head: return None
p = head
listLen = 0 # calculate list length
while p:
p = p.next
listLen += 1
k = k % listLen # now k < listLen
if k == 0:
return head
p1 = head; p2 = head
for _ in xrange(k):
p2 = p2.next
assert p2
while p2.next:
p1 = p1.next
p2 = p2.next
newHead = p1.next
p1.next = None
p2.next = head
return newHead
from utils import *
printlist(Solution().rotateRight(makelist(1,2 ,3 ,4 ,5), 2))
|
xiaonanln/myleetcode-python
|
src/61. Rotate List.py
|
Python
|
apache-2.0
| 732 | 0.080601 |
#!/usr/bin/env python
#
# Copyright 2015-2021 Flavio Garcia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from firenado.util.sqlalchemy_util import Base, base_to_dict
from sqlalchemy import Column, String
from sqlalchemy.types import Integer, DateTime
from sqlalchemy.sql import text
import unittest
class TestBase(Base):
__tablename__ = "test"
id = Column("id", Integer, primary_key=True)
username = Column("username", String(150), nullable=False)
first_name = Column("first_name", String(150), nullable=False)
last_name = Column("last_name", String(150), nullable=False)
password = Column("password", String(150), nullable=False)
email = Column("email", String(150), nullable=False)
created = Column("created", DateTime, nullable=False,
server_default=text("now()"))
modified = Column("modified", DateTime, nullable=False,
server_default=text("now()"))
class BaseToDictTestCase(unittest.TestCase):
def setUp(self):
self.test_object = TestBase()
self.test_object.id = 1
self.test_object.username = "anusername"
self.test_object.password = "apassword"
self.test_object.first_name = "Test"
self.test_object.last_name = "Object"
self.test_object.email = "test@example.com"
def test_base_to_dict(self):
dict_from_base = base_to_dict(self.test_object)
self.assertEqual(dict_from_base['id'], self.test_object.id)
self.assertEqual(dict_from_base['username'], self.test_object.username)
self.assertEqual(dict_from_base['password'], self.test_object.password)
self.assertEqual(dict_from_base['first_name'],
self.test_object.first_name)
self.assertEqual(dict_from_base['last_name'],
self.test_object.last_name)
self.assertEqual(dict_from_base['email'], self.test_object.email)
self.assertEqual(dict_from_base['created'], self.test_object.created)
self.assertEqual(dict_from_base['modified'], self.test_object.modified)
def test_base_to_dict(self):
dict_from_base = base_to_dict(self.test_object,
["id", "username", "first_name"])
self.assertEqual(dict_from_base['id'], self.test_object.id)
self.assertEqual(dict_from_base['username'], self.test_object.username)
self.assertEqual(dict_from_base['first_name'],
self.test_object.first_name)
self.assertTrue("password" not in dict_from_base)
self.assertTrue("last_name" not in dict_from_base)
self.assertTrue("email" not in dict_from_base)
self.assertTrue("created" not in dict_from_base)
self.assertTrue("modified" not in dict_from_base)
|
piraz/firenado
|
tests/util/sqlalchemy_util_test.py
|
Python
|
apache-2.0
| 3,289 | 0 |
# Lint as: python3
# Copyright 2020 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The core data types ctexplain manipulates."""
from typing import Mapping
from typing import Optional
from typing import Tuple
# Do not edit this line. Copybara replaces it with PY2 migration helper.
from dataclasses import dataclass
from dataclasses import field
from frozendict import frozendict
@dataclass(frozen=True)
class Configuration():
"""Stores a build configuration as a collection of fragments and options."""
# Mapping of each BuildConfiguration.Fragment in this configuration to the
# FragmentOptions it requires.
#
# All names are qualified up to the base file name, without package prefixes.
# For example, foo.bar.BazConfiguration appears as "BazConfiguration".
# foo.bar.BazConfiguration$Options appears as "BazeConfiguration$Options".
fragments: Mapping[str, Tuple[str, ...]]
# Mapping of FragmentOptions to option key/value pairs. For example:
# {"CoreOptions": {"action_env": "[]", "cpu": "x86", ...}, ...}.
#
# Option values are stored as strings of whatever "bazel config" outputs.
#
# Note that Fragment and FragmentOptions aren't the same thing.
options: Mapping[str, Mapping[str, str]]
@dataclass(frozen=True)
class ConfiguredTarget():
"""Encapsulates a target + configuration + required fragments."""
# Label of the target this represents.
label: str
# Configuration this target is applied to. May be None.
config: Optional[Configuration]
# The hash of this configuration as reported by Bazel.
config_hash: str
# Fragments required by this configured target and its transitive
# dependencies. Stored as base names without packages. For example:
# "PlatformOptions" or "FooConfiguration$Options".
transitive_fragments: Tuple[str, ...]
@dataclass(frozen=True)
class HostConfiguration(Configuration):
"""Special marker for the host configuration.
There's exactly one host configuration per build, so we shouldn't suggest
merging it with other configurations.
TODO(gregce): suggest host configuration trimming once we figure out the right
criteria. Even if Bazel's not technically equipped to do the trimming, it's
still theoretically valuable information. Note that moving from host to exec
configurations make this all a little less relevant, since exec configurations
aren't "special" compared to normal configurations.
"""
# We don't currently read the host config's fragments or option values.
fragments: Tuple[str, ...] = ()
options: Mapping[str,
Mapping[str,
str]] = field(default_factory=lambda: frozendict({}))
@dataclass(frozen=True)
class NullConfiguration(Configuration):
"""Special marker for the null configuration.
By definition this has no fragments or options.
"""
fragments: Tuple[str, ...] = ()
options: Mapping[str,
Mapping[str,
str]] = field(default_factory=lambda: frozendict({}))
|
twitter-forks/bazel
|
tools/ctexplain/types.py
|
Python
|
apache-2.0
| 3,539 | 0.010455 |
import logging
from .base import SdoBase
from .constants import *
from .exceptions import *
logger = logging.getLogger(__name__)
class SdoServer(SdoBase):
"""Creates an SDO server."""
def __init__(self, rx_cobid, tx_cobid, node):
"""
:param int rx_cobid:
COB-ID that the server receives on (usually 0x600 + node ID)
:param int tx_cobid:
COB-ID that the server responds with (usually 0x580 + node ID)
:param canopen.LocalNode od:
Node object owning the server
"""
SdoBase.__init__(self, rx_cobid, tx_cobid, node.object_dictionary)
self._node = node
self._buffer = None
self._toggle = 0
self._index = None
self._subindex = None
self.last_received_error = 0x00000000
def on_request(self, can_id, data, timestamp):
command, = struct.unpack_from("B", data, 0)
ccs = command & 0xE0
try:
if ccs == REQUEST_UPLOAD:
self.init_upload(data)
elif ccs == REQUEST_SEGMENT_UPLOAD:
self.segmented_upload(command)
elif ccs == REQUEST_DOWNLOAD:
self.init_download(data)
elif ccs == REQUEST_SEGMENT_DOWNLOAD:
self.segmented_download(command, data)
elif ccs == REQUEST_BLOCK_UPLOAD:
self.block_upload(data)
elif ccs == REQUEST_BLOCK_DOWNLOAD:
self.block_download(data)
elif ccs == REQUEST_ABORTED:
self.request_aborted(data)
else:
self.abort(0x05040001)
except SdoAbortedError as exc:
self.abort(exc.code)
except KeyError as exc:
self.abort(0x06020000)
except Exception as exc:
self.abort()
logger.exception(exc)
def init_upload(self, request):
_, index, subindex = SDO_STRUCT.unpack_from(request)
self._index = index
self._subindex = subindex
res_command = RESPONSE_UPLOAD | SIZE_SPECIFIED
response = bytearray(8)
data = self._node.get_data(index, subindex, check_readable=True)
size = len(data)
if size <= 4:
logger.info("Expedited upload for 0x%X:%d", index, subindex)
res_command |= EXPEDITED
res_command |= (4 - size) << 2
response[4:4 + size] = data
else:
logger.info("Initiating segmented upload for 0x%X:%d", index, subindex)
struct.pack_into("<L", response, 4, size)
self._buffer = bytearray(data)
self._toggle = 0
SDO_STRUCT.pack_into(response, 0, res_command, index, subindex)
self.send_response(response)
def segmented_upload(self, command):
if command & TOGGLE_BIT != self._toggle:
# Toggle bit mismatch
raise SdoAbortedError(0x05030000)
data = self._buffer[:7]
size = len(data)
# Remove sent data from buffer
del self._buffer[:7]
res_command = RESPONSE_SEGMENT_UPLOAD
# Add toggle bit
res_command |= self._toggle
# Add nof bytes not used
res_command |= (7 - size) << 1
if not self._buffer:
# Nothing left in buffer
res_command |= NO_MORE_DATA
# Toggle bit for next message
self._toggle ^= TOGGLE_BIT
response = bytearray(8)
response[0] = res_command
response[1:1 + size] = data
self.send_response(response)
def block_upload(self, data):
# We currently don't support BLOCK UPLOAD
# according to CIA301 the server is allowed
# to switch to regular upload
logger.info("Received block upload, switch to regular SDO upload")
self.init_upload(data)
def request_aborted(self, data):
_, index, subindex, code = struct.unpack_from("<BHBL", data)
self.last_received_error = code
logger.info("Received request aborted for 0x%X:%d with code 0x%X", index, subindex, code)
def block_download(self, data):
# We currently don't support BLOCK DOWNLOAD
logger.error("Block download is not supported")
self.abort(0x05040001)
def init_download(self, request):
# TODO: Check if writable (now would fail on end of segmented downloads)
command, index, subindex = SDO_STRUCT.unpack_from(request)
self._index = index
self._subindex = subindex
res_command = RESPONSE_DOWNLOAD
response = bytearray(8)
if command & EXPEDITED:
logger.info("Expedited download for 0x%X:%d", index, subindex)
if command & SIZE_SPECIFIED:
size = 4 - ((command >> 2) & 0x3)
else:
size = 4
self._node.set_data(index, subindex, request[4:4 + size], check_writable=True)
else:
logger.info("Initiating segmented download for 0x%X:%d", index, subindex)
if command & SIZE_SPECIFIED:
size, = struct.unpack_from("<L", request, 4)
logger.info("Size is %d bytes", size)
self._buffer = bytearray()
self._toggle = 0
SDO_STRUCT.pack_into(response, 0, res_command, index, subindex)
self.send_response(response)
def segmented_download(self, command, request):
if command & TOGGLE_BIT != self._toggle:
# Toggle bit mismatch
raise SdoAbortedError(0x05030000)
last_byte = 8 - ((command >> 1) & 0x7)
self._buffer.extend(request[1:last_byte])
if command & NO_MORE_DATA:
self._node.set_data(self._index,
self._subindex,
self._buffer,
check_writable=True)
res_command = RESPONSE_SEGMENT_DOWNLOAD
# Add toggle bit
res_command |= self._toggle
# Toggle bit for next message
self._toggle ^= TOGGLE_BIT
response = bytearray(8)
response[0] = res_command
self.send_response(response)
def send_response(self, response):
self.network.send_message(self.tx_cobid, response)
def abort(self, abort_code=0x08000000):
"""Abort current transfer."""
data = struct.pack("<BHBL", RESPONSE_ABORTED,
self._index, self._subindex, abort_code)
self.send_response(data)
# logger.error("Transfer aborted with code 0x{:08X}".format(abort_code))
def upload(self, index: int, subindex: int) -> bytes:
"""May be called to make a read operation without an Object Dictionary.
:param index:
Index of object to read.
:param subindex:
Sub-index of object to read.
:return: A data object.
:raises canopen.SdoAbortedError:
When node responds with an error.
"""
return self._node.get_data(index, subindex)
def download(
self,
index: int,
subindex: int,
data: bytes,
force_segment: bool = False,
):
"""May be called to make a write operation without an Object Dictionary.
:param index:
Index of object to write.
:param subindex:
Sub-index of object to write.
:param data:
Data to be written.
:raises canopen.SdoAbortedError:
When node responds with an error.
"""
return self._node.set_data(index, subindex, data)
|
christiansandberg/canopen
|
canopen/sdo/server.py
|
Python
|
mit
| 7,569 | 0.001057 |
import wx
import eos.db
import gui.mainFrame
from gui import globalEvents as GE
from gui.fitCommands.calc.module.projectedAdd import CalcAddProjectedModuleCommand
from gui.fitCommands.helpers import InternalCommandHistory, ModuleInfo
from service.fit import Fit
class GuiAddProjectedModuleCommand(wx.Command):
def __init__(self, fitID, itemID):
wx.Command.__init__(self, True, 'Add Projected Module')
self.internalHistory = InternalCommandHistory()
self.fitID = fitID
self.itemID = itemID
def Do(self):
cmd = CalcAddProjectedModuleCommand(fitID=self.fitID, modInfo=ModuleInfo(itemID=self.itemID))
success = self.internalHistory.submit(cmd)
sFit = Fit.getInstance()
if cmd.needsGuiRecalc:
eos.db.flush()
sFit.recalc(self.fitID)
sFit.fill(self.fitID)
eos.db.commit()
wx.PostEvent(gui.mainFrame.MainFrame.getInstance(), GE.FitChanged(fitIDs=(self.fitID,)))
return success
def Undo(self):
success = self.internalHistory.undoAll()
eos.db.flush()
sFit = Fit.getInstance()
sFit.recalc(self.fitID)
sFit.fill(self.fitID)
eos.db.commit()
wx.PostEvent(gui.mainFrame.MainFrame.getInstance(), GE.FitChanged(fitIDs=(self.fitID,)))
return success
|
pyfa-org/Pyfa
|
gui/fitCommands/gui/projectedModule/add.py
|
Python
|
gpl-3.0
| 1,334 | 0.002999 |
# Copyright (c) 2015, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/scri/blob/master/LICENSE>
import pytest
import numpy as np
from numpy import *
import quaternion
import spherical_functions as sf
import scri
from conftest import linear_waveform, constant_waveform, random_waveform, delta_waveform
@pytest.mark.parametrize("w", [linear_waveform, constant_waveform, random_waveform])
def test_identity_rotation(w):
# Rotation by 1 should be identity operation
W_in = w()
W_out = w()
assert W_in.ensure_validity(alter=False)
assert W_out.ensure_validity(alter=False)
W_out.rotate_decomposition_basis(quaternion.one)
assert W_out.ensure_validity(alter=False)
assert np.array_equal(W_out.t, W_in.t)
assert np.array_equal(W_out.frame, W_in.frame)
assert np.array_equal(W_out.data, W_in.data)
assert np.array_equal(W_out.LM, W_in.LM)
assert W_out.ell_min == W_in.ell_min
assert W_out.ell_max == W_in.ell_max
for h_in, h_out in zip(W_in.history, W_out.history[:-1]):
assert h_in == h_out.replace(
f"{type(W_out).__name__}_{str(W_out.num)}", f"{type(W_in).__name__}_{str(W_in.num)}"
) or (h_in.startswith("# ") and h_out.startswith("# "))
assert W_out.frameType == W_in.frameType
assert W_out.dataType == W_in.dataType
assert W_out.r_is_scaled_out == W_in.r_is_scaled_out
assert W_out.m_is_scaled_out == W_in.m_is_scaled_out
assert isinstance(W_out.num, int)
assert W_out.num != W_in.num
@pytest.mark.parametrize("w", [linear_waveform, constant_waveform, random_waveform])
def test_rotation_invariants(w):
# A random rotation should leave everything but data and frame the
# same (except num, of course)
W_in = w()
W_out = w()
np.random.seed(hash("test_rotation_invariants") % 4294967294) # Use mod to get in an acceptable range
W_out.rotate_decomposition_basis(np.quaternion(*np.random.uniform(-1, 1, 4)).normalized())
assert W_in.ensure_validity(alter=False)
assert W_out.ensure_validity(alter=False)
assert np.array_equal(W_out.t, W_in.t)
assert not np.array_equal(W_out.frame, W_in.frame) # This SHOULD change
assert not np.array_equal(W_out.data, W_in.data) # This SHOULD change
assert W_out.ell_min == W_in.ell_min
assert W_out.ell_max == W_in.ell_max
assert np.array_equal(W_out.LM, W_in.LM)
for h_in, h_out in zip(W_in.history[:-3], W_out.history[:-5]):
assert h_in == h_out.replace(
f"{type(W_out).__name__}_{str(W_out.num)}", f"{type(W_in).__name__}_{str(W_in.num)}"
) or (h_in.startswith("# ") and h_out.startswith("# "))
assert W_out.frameType == W_in.frameType
assert W_out.dataType == W_in.dataType
assert W_out.r_is_scaled_out == W_in.r_is_scaled_out
assert W_out.m_is_scaled_out == W_in.m_is_scaled_out
assert W_out.num != W_in.num
@pytest.mark.parametrize("w", [linear_waveform, constant_waveform, random_waveform])
def test_constant_versus_series(w):
# A random rotation should leave everything but data and frame the
# same (except num, of course)
W_const = w()
W_series = w()
np.random.seed(hash("test_constant_versus_series") % 4294967294) # Use mod to get in an acceptable range
W_const.rotate_decomposition_basis(np.quaternion(*np.random.uniform(-1, 1, 4)).normalized())
W_series.rotate_decomposition_basis(
np.array([np.quaternion(*np.random.uniform(-1, 1, 4)).normalized()] * W_series.n_times)
)
assert W_const.ensure_validity(alter=False)
assert W_series.ensure_validity(alter=False)
assert np.array_equal(W_series.t, W_const.t)
assert not np.array_equal(W_series.frame, W_const.frame) # This SHOULD change
assert not np.array_equal(W_series.data, W_const.data) # This SHOULD change
assert W_series.ell_min == W_const.ell_min
assert W_series.ell_max == W_const.ell_max
assert np.array_equal(W_series.LM, W_const.LM)
for h_const, h_series in zip(W_const.history[:-5], W_series.history[:-11]):
assert h_const == h_series.replace(
f"{type(W_series).__name__}_{str(W_series.num)}", f"{type(W_const).__name__}_{str(W_const.num)}"
) or (h_const.startswith("# ") and h_series.startswith("# "))
assert W_series.frameType == W_const.frameType
assert W_series.dataType == W_const.dataType
assert W_series.r_is_scaled_out == W_const.r_is_scaled_out
assert W_series.m_is_scaled_out == W_const.m_is_scaled_out
assert W_series.num != W_const.num
@pytest.mark.parametrize("w", [linear_waveform, constant_waveform, random_waveform])
def test_rotation_inversion(w):
# Rotation followed by the inverse rotation should leave
# everything the same (except that the frame data will be either a
# 1 or a series of 1s)
np.random.seed(hash("test_rotation_inversion") % 4294967294) # Use mod to get in an acceptable range
W_in = w()
assert W_in.ensure_validity(alter=False)
# We loop over (1) a single constant rotation, and (2) an array of random rotations
for R_basis in [
np.quaternion(*np.random.uniform(-1, 1, 4)).normalized(),
np.array([np.quaternion(*np.random.uniform(-1, 1, 4)).normalized()] * W_in.n_times),
]:
W_out = w()
W_out.rotate_decomposition_basis(R_basis)
W_out.rotate_decomposition_basis(~R_basis)
assert W_out.ensure_validity(alter=False)
assert np.array_equal(W_out.t, W_in.t)
assert np.max(np.abs(W_out.frame - W_in.frame)) < 1e-15
assert np.allclose(W_out.data, W_in.data, atol=W_in.ell_max ** 4 ** 4e-14, rtol=W_in.ell_max ** 4 * 4e-14)
assert W_out.ell_min == W_in.ell_min
assert W_out.ell_max == W_in.ell_max
assert np.array_equal(W_out.LM, W_in.LM)
for h_in, h_out in zip(W_in.history[:-3], W_out.history[:-5]):
assert h_in == h_out.replace(
f"{type(W_out).__name__}_{str(W_out.num)}", f"{type(W_in).__name__}_{str(W_in.num)}"
) or (h_in.startswith("# datetime") and h_out.startswith("# datetime"))
assert W_out.frameType == W_in.frameType
assert W_out.dataType == W_in.dataType
assert W_out.r_is_scaled_out == W_in.r_is_scaled_out
assert W_out.m_is_scaled_out == W_in.m_is_scaled_out
assert W_out.num != W_in.num
def test_rotations_of_0_0_mode(Rs):
# The (ell,m)=(0,0) mode should be rotationally invariant
n_copies = 10
W_in = delta_waveform(0, 0, begin=-10.0, end=100.0, n_times=n_copies * len(Rs), ell_min=0, ell_max=8)
assert W_in.ensure_validity(alter=False)
W_out = scri.WaveformModes(W_in)
R_basis = np.array([R for R in Rs for i in range(n_copies)])
W_out.rotate_decomposition_basis(R_basis)
assert W_out.ensure_validity(alter=False)
assert np.array_equal(W_out.t, W_in.t)
assert np.max(np.abs(W_out.frame - R_basis)) == 0.0
assert np.array_equal(W_out.data, W_in.data)
assert W_out.ell_min == W_in.ell_min
assert W_out.ell_max == W_in.ell_max
assert np.array_equal(W_out.LM, W_in.LM)
for h_in, h_out in zip(W_in.history, W_out.history[:-1]):
assert h_in == h_out.replace(
f"{type(W_out).__name__}_{str(W_out.num)}", f"{type(W_in).__name__}_{str(W_in.num)}"
) or (h_in.startswith("# ") and h_out.startswith("# "))
assert W_out.frameType == W_in.frameType
assert W_out.dataType == W_in.dataType
assert W_out.r_is_scaled_out == W_in.r_is_scaled_out
assert W_out.m_is_scaled_out == W_in.m_is_scaled_out
assert W_out.num != W_in.num
def test_rotations_of_each_mode_individually(Rs):
ell_min = 0
ell_max = 8 # sf.ell_max is just too much; this test is too slow, and ell=8 should be fine
R_basis = Rs
Ds = np.empty((len(Rs), sf.LMpM_total_size(ell_min, ell_max)), dtype=complex)
for i, R in enumerate(Rs):
Ds[i, :] = sf.Wigner_D_matrices(R, ell_min, ell_max)
for ell in range(ell_max + 1):
first_zeros = np.zeros((len(Rs), sf.LM_total_size(ell_min, ell - 1)), dtype=complex)
later_zeros = np.zeros((len(Rs), sf.LM_total_size(ell + 1, ell_max)), dtype=complex)
for Mp in range(-ell, ell):
W_in = delta_waveform(ell, Mp, begin=-10.0, end=100.0, n_times=len(Rs), ell_min=ell_min, ell_max=ell_max)
# Now, the modes are f^{\ell,m[} = \delta^{\ell,mp}_{L,Mp}
assert W_in.ensure_validity(alter=False)
W_out = scri.WaveformModes(W_in)
W_out.rotate_decomposition_basis(Rs)
assert W_out.ensure_validity(alter=False)
assert np.array_equal(W_out.t, W_in.t)
assert np.max(np.abs(W_out.frame - R_basis)) == 0.0
i_D0 = sf.LMpM_index(ell, Mp, -ell, ell_min)
assert np.array_equal(W_out.data[:, : sf.LM_total_size(ell_min, ell - 1)], first_zeros)
if ell < ell_max:
assert np.array_equal(
W_out.data[:, sf.LM_total_size(ell_min, ell - 1) : -sf.LM_total_size(ell + 1, ell_max)],
Ds[:, i_D0 : i_D0 + (2 * ell + 1)],
)
assert np.array_equal(W_out.data[:, -sf.LM_total_size(ell + 1, ell_max) :], later_zeros)
else:
assert np.array_equal(
W_out.data[:, sf.LM_total_size(ell_min, ell - 1) :], Ds[:, i_D0 : i_D0 + (2 * ell + 1)]
)
assert W_out.ell_min == W_in.ell_min
assert W_out.ell_max == W_in.ell_max
assert np.array_equal(W_out.LM, W_in.LM)
for h_in, h_out in zip(W_in.history, W_out.history[:-1]):
assert h_in == h_out.replace(type(W_out).__name__ + str(W_out.num), type(W_in).__name__ + str(W_in.num))
assert W_out.frameType == W_in.frameType
assert W_out.dataType == W_in.dataType
assert W_out.r_is_scaled_out == W_in.r_is_scaled_out
assert W_out.m_is_scaled_out == W_in.m_is_scaled_out
assert W_out.num != W_in.num
|
moble/scri
|
tests/test_rotations.py
|
Python
|
mit
| 10,004 | 0.003898 |
# -*- coding: utf-8 -*-
# Copyright 2017 IBM RESEARCH. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""
Exception for errors raised while interpreting nodes.
"""
class NodeException(Exception):
"""Base class for errors raised while interpreting nodes."""
def __init__(self, *msg):
"""Set the error message."""
self.msg = ' '.join(msg)
def __str__(self):
"""Return the message."""
return repr(self.msg)
|
ChristopheVuillot/qiskit-sdk-py
|
qiskit/qasm/_node/_nodeexception.py
|
Python
|
apache-2.0
| 1,054 | 0 |
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import logging
from superdesk import get_backend
import superdesk
from apps.publish.content import ArchivePublishResource, ArchivePublishService, \
KillPublishResource, KillPublishService, CorrectPublishResource, CorrectPublishService, \
ResendResource, ResendService
from apps.publish.enqueue import EnqueueContent
from apps.publish.published_item import PublishedItemResource, PublishedItemService
logger = logging.getLogger(__name__)
def init_app(app):
endpoint_name = 'archive_publish'
service = ArchivePublishService(endpoint_name, backend=get_backend())
ArchivePublishResource(endpoint_name, app=app, service=service)
endpoint_name = 'archive_kill'
service = KillPublishService(endpoint_name, backend=get_backend())
KillPublishResource(endpoint_name, app=app, service=service)
endpoint_name = 'archive_correct'
service = CorrectPublishService(endpoint_name, backend=get_backend())
CorrectPublishResource(endpoint_name, app=app, service=service)
endpoint_name = 'published'
service = PublishedItemService(endpoint_name, backend=get_backend())
PublishedItemResource(endpoint_name, app=app, service=service)
endpoint_name = 'archive_resend'
service = ResendService(endpoint_name, backend=get_backend())
ResendResource(endpoint_name, app=app, service=service)
superdesk.privilege(name='subscribers', label='Subscribers', description='User can manage subscribers')
superdesk.privilege(name='publish', label='Publish', description='Publish a content')
superdesk.privilege(name='kill', label='Kill', description='Kill a published content')
superdesk.privilege(name='correct', label='Correction', description='Correction to a published content')
superdesk.privilege(name='publish_queue', label='Publish Queue', description='User can update publish queue')
superdesk.privilege(name='resend', label='Resending Stories', description='User can resend published stories')
superdesk.privilege(name='embargo', label='Embargo', description='User can set embargo date')
def enqueue_content():
EnqueueContent().run()
|
nistormihai/superdesk-core
|
apps/publish/__init__.py
|
Python
|
agpl-3.0
| 2,426 | 0.004122 |
# coding=utf-8
# Copyright 2020 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tf_agents.networks.categorical_q_network."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gin
import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import
from tf_agents.networks import categorical_q_network
from tf_agents.specs import tensor_spec
from tf_agents.trajectories import time_step as ts
from tf_agents.utils import test_utils
class CategoricalQNetworkTest(test_utils.TestCase):
def tearDown(self):
gin.clear_config()
super(CategoricalQNetworkTest, self).tearDown()
def testBuild(self):
batch_size = 3
num_state_dims = 5
action_spec = tensor_spec.BoundedTensorSpec([1], tf.int32, 0, 1)
num_actions = action_spec.maximum - action_spec.minimum + 1
self.assertEqual(num_actions, 2)
observations_spec = tensor_spec.TensorSpec([num_state_dims], tf.float32)
observations = tf.random.uniform([batch_size, num_state_dims])
time_steps = ts.restart(observations, batch_size)
q_network = categorical_q_network.CategoricalQNetwork(
input_tensor_spec=observations_spec,
action_spec=action_spec,
fc_layer_params=[3])
logits, _ = q_network(time_steps.observation)
self.assertAllEqual(logits.shape.as_list(),
[batch_size, num_actions, q_network._num_atoms])
# There are two trainable layers here: the specified fc_layer and the final
# logits layer. Each layer has two trainable_variables (kernel and bias),
# for a total of 4.
self.assertLen(q_network.trainable_variables, 4)
def testChangeHiddenLayers(self):
batch_size = 3
num_state_dims = 5
action_spec = tensor_spec.BoundedTensorSpec([1], tf.int32, 0, 1)
num_actions = action_spec.maximum - action_spec.minimum + 1
self.assertEqual(num_actions, 2)
observations_spec = tensor_spec.TensorSpec([num_state_dims], tf.float32)
observations = tf.random.uniform([batch_size, num_state_dims])
time_steps = ts.restart(observations, batch_size)
q_network = categorical_q_network.CategoricalQNetwork(
input_tensor_spec=observations_spec,
action_spec=action_spec,
fc_layer_params=[3, 3])
logits, _ = q_network(time_steps.observation)
self.assertAllEqual(logits.shape.as_list(),
[batch_size, num_actions, q_network._num_atoms])
# This time there is an extra fc layer, for a total of 6
# trainable_variables.
self.assertLen(q_network.trainable_variables, 6)
def testAddConvLayers(self):
batch_size = 3
num_state_dims = 5
action_spec = tensor_spec.BoundedTensorSpec([1], tf.int32, 0, 1)
num_actions = action_spec.maximum - action_spec.minimum + 1
self.assertEqual(num_actions, 2)
observations_spec = tensor_spec.TensorSpec(
[3, 3, num_state_dims], tf.float32)
observations = tf.random.uniform([batch_size, 3, 3, num_state_dims])
time_steps = ts.restart(observations, batch_size)
q_network = categorical_q_network.CategoricalQNetwork(
input_tensor_spec=observations_spec,
action_spec=action_spec,
conv_layer_params=[(16, 2, 1), (15, 2, 1)])
logits, _ = q_network(time_steps.observation)
self.assertAllEqual(logits.shape.as_list(),
[batch_size, num_actions, q_network._num_atoms])
# This time there are two conv layers and one final logits layer, for a
# total of 6 trainable_variables.
self.assertLen(q_network.trainable_variables, 6)
def testCorrectOutputShape(self):
batch_size = 3
num_state_dims = 5
action_spec = tensor_spec.BoundedTensorSpec([1], tf.int32, 0, 1)
num_actions = action_spec.maximum - action_spec.minimum + 1
self.assertEqual(num_actions, 2)
observations_spec = tensor_spec.TensorSpec([num_state_dims], tf.float32)
observations = tf.random.uniform([batch_size, num_state_dims])
time_steps = ts.restart(observations, batch_size)
q_network = categorical_q_network.CategoricalQNetwork(
input_tensor_spec=observations_spec,
action_spec=action_spec,
fc_layer_params=[3])
logits, _ = q_network(time_steps.observation)
self.assertAllEqual(logits.shape.as_list(),
[batch_size, num_actions, q_network._num_atoms])
self.evaluate(tf.compat.v1.global_variables_initializer())
eval_logits = self.evaluate(logits)
self.assertAllEqual(
eval_logits.shape, [batch_size, num_actions, q_network._num_atoms])
def testGinConfig(self):
batch_size = 3
num_state_dims = 5
action_spec = tensor_spec.BoundedTensorSpec([1], tf.int32, 0, 1)
num_actions = action_spec.maximum - action_spec.minimum + 1
self.assertEqual(num_actions, 2)
observations_spec = tensor_spec.TensorSpec(
[3, 3, num_state_dims], tf.float32)
observations = tf.random.uniform([batch_size, 3, 3, num_state_dims])
next_observations = tf.random.uniform([batch_size, 3, 3, num_state_dims])
time_steps = ts.restart(observations, batch_size)
next_time_steps = ts.restart(next_observations, batch_size)
# Note: this is cleared in tearDown().
gin.parse_config("""
CategoricalQNetwork.conv_layer_params = [(16, 2, 1), (15, 2, 1)]
CategoricalQNetwork.fc_layer_params = [4, 3, 5]
""")
q_network = categorical_q_network.CategoricalQNetwork(
input_tensor_spec=observations_spec,
action_spec=action_spec)
logits, _ = q_network(time_steps.observation)
next_logits, _ = q_network(next_time_steps.observation)
self.assertAllEqual(logits.shape.as_list(),
[batch_size, num_actions, q_network.num_atoms])
self.assertAllEqual(next_logits.shape.as_list(),
[batch_size, num_actions, q_network.num_atoms])
# This time there are six layers: two conv layers, three fc layers, and one
# final logits layer, for 12 trainable_variables in total.
self.assertLen(q_network.trainable_variables, 12)
if __name__ == '__main__':
tf.test.main()
|
tensorflow/agents
|
tf_agents/networks/categorical_q_network_test.py
|
Python
|
apache-2.0
| 6,673 | 0.001049 |
#coding:utf-8
'''
Timeouts超时设置
requests.get('http://github.com', timeout=2)
'''
|
qiyeboy/SpiderBook
|
ch03/3.2.3.7.py
|
Python
|
mit
| 91 | 0.012048 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: skip-file
import sys
import os
import mxnet as mx
import numpy as np
import unittest
from mxnet.test_utils import assert_almost_equal, default_context, EnvManager
curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
sys.path.insert(0, os.path.join(curr_path, '../unittest'))
from common import setup_module, with_seed, teardown
from mxnet.cuda_utils import get_device_count
shape = (4, 4)
keys = [5, 7, 11]
str_keys = ['b', 'c', 'd']
def init_kv_with_str(stype='default', kv_type='local'):
"""init kv """
kv = mx.kv.create(kv_type)
# single
kv.init('a', mx.nd.zeros(shape, stype=stype))
# list
kv.init(str_keys, [mx.nd.zeros(shape=shape, stype=stype)] * len(keys))
return kv
# Test seed 89411477 (module seed 1829754103) resulted in a py3-gpu CI runner core dump.
# Not reproducible, so this test is back on random seeds.
@with_seed()
@unittest.skipIf(mx.context.num_gpus() < 2, "test_rsp_push_pull needs more than 1 GPU")
def test_rsp_push_pull():
num_gpus = get_device_count()
def check_rsp_push_pull(kv_type, sparse_pull, is_push_cpu=True):
kv = init_kv_with_str('row_sparse', kv_type)
kv.init('e', mx.nd.ones(shape).tostype('row_sparse'))
push_ctxs = [mx.cpu(i) if is_push_cpu else mx.gpu(i) for i in range(2)]
kv.push('e', [mx.nd.ones(shape, ctx=context).tostype('row_sparse') for context in push_ctxs])
def check_rsp_pull(kv, ctxs, sparse_pull, is_same_rowid=False, use_slice=False):
count = len(ctxs)
num_rows = shape[0]
row_ids = []
all_row_ids = np.arange(num_rows)
vals = [mx.nd.sparse.zeros(shape=shape, ctx=ctxs[i], stype='row_sparse') for i in range(count)]
if is_same_rowid:
row_id = np.random.randint(num_rows, size=num_rows)
row_ids = [mx.nd.array(row_id)] * count
elif use_slice:
total_row_ids = mx.nd.array(np.random.randint(num_rows, size=count*num_rows))
row_ids = [total_row_ids[i*num_rows : (i+1)*num_rows] for i in range(count)]
else:
for i in range(count):
row_id = np.random.randint(num_rows, size=num_rows)
row_ids.append(mx.nd.array(row_id))
row_ids_to_pull = row_ids[0] if (len(row_ids) == 1 or is_same_rowid) else row_ids
vals_to_pull = vals[0] if len(vals) == 1 else vals
kv.row_sparse_pull('e', out=vals_to_pull, row_ids=row_ids_to_pull)
for val, row_id in zip(vals, row_ids):
retained = val.asnumpy()
excluded_row_ids = np.setdiff1d(all_row_ids, row_id.asnumpy())
for row in range(num_rows):
expected_val = np.zeros_like(retained[row])
expected_val += 0 if row in excluded_row_ids else 2
assert_almost_equal(retained[row], expected_val)
if sparse_pull is True:
kv.pull('e', out=vals_to_pull, ignore_sparse=False)
for val in vals:
retained = val.asnumpy()
expected_val = np.zeros_like(retained)
expected_val[:] = 2
assert_almost_equal(retained, expected_val)
check_rsp_pull(kv, [mx.gpu(0)], sparse_pull)
check_rsp_pull(kv, [mx.cpu(0)], sparse_pull)
check_rsp_pull(kv, [mx.gpu(i//2) for i in range(4)], sparse_pull)
check_rsp_pull(kv, [mx.gpu(i//2) for i in range(4)], sparse_pull, is_same_rowid=True)
check_rsp_pull(kv, [mx.cpu(i) for i in range(4)], sparse_pull)
check_rsp_pull(kv, [mx.cpu(i) for i in range(4)], sparse_pull, is_same_rowid=True)
check_rsp_pull(kv, [mx.gpu(i//2) for i in range(4)], sparse_pull, use_slice=True)
check_rsp_pull(kv, [mx.cpu(i) for i in range(4)], sparse_pull, use_slice=True)
envs = ["","1"]
key = "MXNET_KVSTORE_USETREE"
for val in envs:
with EnvManager(key, val):
if val is "1":
sparse_pull = False
else:
sparse_pull = True
check_rsp_push_pull('local', sparse_pull)
check_rsp_push_pull('device', sparse_pull)
if num_gpus >= 2:
check_rsp_push_pull('device', sparse_pull, is_push_cpu=False)
else:
sys.stdout.write('Bypassing 2-GPU test, num gpus found = ' + str(num_gpus) + ' ... ')
sys.stdout.flush()
@with_seed()
def test_row_sparse_pull_single_device():
kvstore = mx.kv.create('device')
copy = mx.nd.random_normal(shape=(4,4), ctx=mx.gpu(0))
grad = copy.tostype("row_sparse")
key = 0
kvstore.init(key, grad)
idx = grad.indices
kvstore.push(key, grad)
kvstore.row_sparse_pull(key, out=grad, row_ids=idx)
assert_almost_equal(grad.asnumpy(), copy.asnumpy())
def test_rsp_push_pull_large_rowid():
num_rows = 793470
val = mx.nd.ones((num_rows, 1)).tostype('row_sparse').copyto(mx.gpu())
kv = mx.kv.create('device')
kv.init('a', val)
out = mx.nd.zeros((num_rows,1), stype='row_sparse').copyto(mx.gpu())
kv.push('a', val)
kv.row_sparse_pull('a', out=out, row_ids=mx.nd.arange(0, num_rows, dtype='int64'))
assert(out.indices.shape[0] == num_rows)
if __name__ == '__main__':
import nose
nose.runmodule()
|
mlperf/training_results_v0.6
|
Fujitsu/benchmarks/resnet/implementations/mxnet/tests/python/gpu/test_kvstore_gpu.py
|
Python
|
apache-2.0
| 6,181 | 0.003721 |
# -*- coding: utf-8 -*-
"""Unit and functional test suite for tg2express."""
from os import getcwd, path
from paste.deploy import loadapp
from webtest import TestApp
from gearbox.commands.setup_app import SetupAppCommand
from tg import config
from tg.util import Bunch
from tg2express import model
__all__ = ['setup_app', 'setup_db', 'teardown_db', 'TestController']
application_name = 'main_without_authn'
def load_app(name=application_name):
"""Load the test application."""
return TestApp(loadapp('config:test.ini#%s' % name, relative_to=getcwd()))
def setup_app():
"""Setup the application."""
cmd = SetupAppCommand(Bunch(options=Bunch(verbose_level=1)), Bunch())
cmd.run(Bunch(config_file='config:test.ini', section_name=None))
def setup_db():
"""Create the database schema (not needed when you run setup_app)."""
engine = config['tg.app_globals'].sa_engine
model.init_model(engine)
model.metadata.create_all(engine)
def teardown_db():
"""Destroy the database schema."""
engine = config['tg.app_globals'].sa_engine
model.metadata.drop_all(engine)
class TestController(object):
"""Base functional test case for the controllers.
The tg2express application instance (``self.app``) set up in this test
case (and descendants) has authentication disabled, so that developers can
test the protected areas independently of the :mod:`repoze.who` plugins
used initially. This way, authentication can be tested once and separately.
Check tg2express.tests.functional.test_authentication for the repoze.who
integration tests.
This is the officially supported way to test protected areas with
repoze.who-testutil (http://code.gustavonarea.net/repoze.who-testutil/).
"""
application_under_test = application_name
def setUp(self):
"""Setup test fixture for each functional test method."""
self.app = load_app(self.application_under_test)
setup_app()
def tearDown(self):
"""Tear down test fixture for each functional test method."""
model.DBSession.remove()
teardown_db()
|
archsh/tg2ext.express
|
example/tg2express/tests/__init__.py
|
Python
|
mit
| 2,131 | 0 |
# -*- coding: utf-8 -*-
from __future__ import print_function
"""
ScriptProcessor processes a script file (generally), loading data using the requested
loading routine and printing
This is part of Acq4
Paul B. Manis, Ph.D.
2011-2013.
Pep8 compliant (via pep8.py) 10/25/2013
Refactoring begun 3/21/2015
"""
import os
import os.path
import numpy as np
import re
import gc
from acq4.analysis.AnalysisModule import AnalysisModule
from acq4.util.metaarray import MetaArray
from acq4.util import DataManager
from acq4.pyqtgraph import configfile
from acq4.util import Qt
from acq4.pyqtgraph.widgets.ProgressDialog import ProgressDialog
class ScriptProcessor(AnalysisModule):
def __init__(self, host):
AnalysisModule.__init__(self, host)
def setAnalysis(self, analysis=None, fileloader=None, template=None, clamps=None, printer=None, dbupdate=None):
"""
Set the analysis and the file loader routines
that will be called by our script
"""
self.analysis = analysis
self.loadFile = fileloader
self.data_template = template
self.clamps = clamps
self.printAnalysis = printer
self.dbUpdate = dbupdate
def read_script(self):
"""
read a script file from disk, and use that information to drive the analysis
Parameters
----------
none
Returns
-------
script_name : str
The name of the script that was opened. If the script was not found, could not
be read, or the dialog was cancelled, the return result will be None
"""
self.script_name = Qt.QFileDialog.getOpenFileName(
None, 'Open Script File', '', 'Script (*.cfg)')
if self.script_name == '': # cancel returns empty string
return None
self.script = configfile.readConfigFile(self.script_name)
if self.script is None:
# print 'Failed to read script'
# self.ctrl.IVCurve_ScriptName.setText('None')
return None
# set the data manager to the script if we can
print(self.script['directory'])
if 'directory' in self.script.keys():
try:
self.dataManager.setBaseDir(self.script['directory'])
print('Set base dir to: {:s}'.format(self.script['directory']))
except:
print('ScriptProcessor:read_script: Cannot set base directory to %s\nLikely directory was not found' % self.script['directory'])
return self.script_name
def run_script(self):
"""
revalidate and run the current script
:return:
"""
if self.validate_script():
self.run_script()
else:
raise Exception("Script failed validation - see terminal output")
def validate_script(self):
"""
validate the current script - by checking the existence of the files needed for the analysis
:return: False if cannot find files; True if all are found
"""
# if self.script['module'] != 'IVCurve':
# print 'Script is not for IVCurve (found %s)' % self.script['module']
# return False
if 'directory' in self.script.keys():
try:
#print dir(self.dataManager())
self.dataManager().setBaseDir(self.script['directory'])
print('Set base dir to: {:s}'.format(self.script['directory']))
except:
print('ScriptProcessor:read_script: \n Cannot set base directory to %s\n Likely directory was not found' % self.script['directory'])
return False
all_found = True
trailingchars = [c for c in map(chr, range(97, 123))] # trailing chars used to identify different parts of a cell's data
for c in self.script['Cells']:
if self.script['Cells'][c]['include'] is False:
continue
sortedkeys = sorted(self.script['Cells'][c]['choice'].keys()) # sort by order of recording
for p in sortedkeys:
pr = self.script['protocol'] + '_' + p # add the underscore here
if c[-1] in trailingchars:
cell = c[:-1]
else:
cell = c
fn = os.path.join(cell, pr)
#print fn
#print 'dm selected file: ', self.dataManager().selectedFile()
if 'directory' in self.script.keys():
dm_selected_file = self.script['directory']
else:
dm_selected_file = self.dataManager().selectedFile().name()
DataManager.cleanup()
gc.collect()
fullpath = os.path.join(dm_selected_file, fn)
file_ok = os.path.exists(fullpath)
if file_ok:
print('File found: {:s}'.format(fullpath))
else:
print(' current dataManager self.dm points to file: ', dm_selected_file)
print(' and file not found was: ', fullpath)
all_found = False
#else:
# print 'file found ok: %s' % fullpath
return all_found
def run_script(self):
"""
Run a script, doing all of the requested analysis
:return:
"""
if self.script['testfiles']:
return
# settext = self.scripts_form.PSPReversal_ScriptResults_text.setPlainText
# apptext = self.scripts_form.PSPReversal_ScriptResults_text.appendPlainText
self.textout = ('\nScript File: {:<32s}\n'.format(self.script_name))
# settext(self.textout)
script_header = True # reset the table to a print new header for each cell
trailingchars = [c for c in map(chr, range(97, 123))] # trailing chars used to identify different parts of a cell's data
self.dataManager().setBaseDir(self.script['directory'])
ordered = sorted(self.script['Cells'].keys()) # order the analysis by date/slice/cell
prog1 = ProgressDialog("Script Processing..", 0, len(ordered))
ncell = len(ordered)
for nc, cell in enumerate(ordered):
if prog1.wasCanceled():
break
presetDict = {}
thiscell = self.script['Cells'][cell]
#print 'processing cell: %s' % thiscell
if thiscell['include'] is False: # skip this cell
try:
print('Skipped: %s, reason:%s' % (cell, thiscell['reason']))
except:
raise ValueError('cell %s has no tag "reason" but "include" is False' % cell)
continue
sortedkeys = sorted(thiscell['choice'].keys()) # sort by order of recording (# on protocol)
prog1.setValue(nc/ncell)
# prog2 = ProgressDialog("Cell Processing..%s" , 0, len(sortedkeys)):
for p in sortedkeys:
if thiscell['choice'][p] not in self.script['datafilter']: # pick out steady-state conditions
print('p: %s not in data: ' % (thiscell['choice'][p]), self.script['datafilter'])
continue
# print 'working on %s' % thiscell['choice'][p]
pr = self.script['protocol'] + '_' + p # add the underscore here
if cell[-1] in trailingchars: # check last letter - if not a number clip it
cell_file = cell[:-1]
else:
cell_file = cell
fn = os.path.join(cell_file, pr)
#dm_selected_file = self.dataManager().selectedFile().name()
dm_selected_file = self.script['directory']
fullpath = os.path.join(dm_selected_file, fn)
file_ok = os.path.exists(fullpath)
if not file_ok: # get the directory handle and take it from there
print('File is not ok: %s' % fullpath)
continue
m = thiscell['choice'][p] # get the tag for the manipulation
presetDict['Choices'] = thiscell['choice'][p]
if 'genotype' in thiscell.keys():
presetDict['Genotype'] = thiscell['genotype']
else:
presetDict['Genotype'] = 'Unknown'
if 'Celltype' in thiscell.keys():
presetDict['Celltype'] = thiscell['Celltype']
else:
presetDict['Celltype'] = 'Unknown'
if 'spikethresh' in thiscell.keys():
presetDict['SpikeThreshold'] = thiscell['spikethresh']
if 'bridgeCorrection' in thiscell.keys():
presetDict['bridgeCorrection'] = thiscell['bridgeCorrection']
else:
presetDict['bridgeCorrection'] = None
dh = self.dataManager().manager.dirHandle(fullpath)
if not self.loadFile([dh], analyze=False, bridge=presetDict['bridgeCorrection']): # note: must pass a list of dh; don't let analyisis run at end
print('Failed to load requested file: ', fullpath)
continue # skip bad sets of records...
if 'datamode' in thiscell.keys():
self.clamps.data_mode = thiscell['datamode']
self.auto_updater = False
self.get_script_analysisPars(self.script, thiscell)
self.analysis(presets=presetDict) # call the caller's analysis routine
if 'addtoDB' in self.script.keys():
if self.script['addtoDB'] is True and self.dbUpdate is not None:
self.dbUpdate() # call routine in parent
ptxt = self.printAnalysis(printnow=False, script_header=script_header, copytoclipboard=False)
self.textout += ptxt + '\n'
script_header = False
DataManager.cleanup()
del dh
gc.collect()
print(self.textout)
self.auto_updater = True # restore function
# print '\nDone'
def get_script_analysisPars(self, script_globals, thiscell):
"""
set the analysis times and modes from the script. Also updates the qt windows
:return: Nothing.
"""
self.analysis_parameters = {}
self.analysis_parameters['baseline'] = False
self.analysis_parameters['lrwin1'] = {}
self.analysis_parameters[' '] = {}
self.analysis_parameters['lrwin0'] = {}
self.analysis_parameters['lrrmp'] = {}
self.auto_updater = False # turn off the updates
scriptg = {'global_jp': ['junction'], 'global_win1_mode': ['lrwin1', 'mode'],
'global_win2_mode': ['lrwin2', 'mode'], 'Celltype': ['Celltype']}
for k in scriptg.keys(): # set globals first
if k in script_globals.keys():
if len(scriptg[k]) == 1:
self.analysis_parameters[scriptg[k][0]] = script_globals[k]
else:
self.analysis_parameters[scriptg[k][0]] = {scriptg[k][1]: script_globals[k]}
if 'junctionpotential' in thiscell:
self.analysis_parameters['junction'] = thiscell['junctionpotential']
if 'alternation' in thiscell:
self.analysis_parameters['alternation'] = thiscell['alternation']
else:
self.analysis_parameters['alternation'] = True
if 'include' in thiscell.keys():
self.analysis_parameters['UseData'] = thiscell['include']
else:
self.analysis_parameters['UseData'] = True
# print 'analysis params after get script \n', self.analysis_parameters
return
def print_script_output(self):
"""
print(a clean version of the results to the terminal)
:return:
"""
print(self.remove_html_markup(self.textout))
def copy_script_output(self):
"""
Copy script output (results) to system clipboard
:return: Nothing
"""
self.scripts_form.PSPReversal_ScriptResults_text.copy()
def remove_html_markup(self, html_string):
"""
simple html stripper for our own generated text (output of analysis, above).
This is not generally useful but is better than requiring yet another library
for the present purpose.
Taken from a stackoverflow answer.
:param s: input html marked text
:return: cleaned text
"""
tag = False
quote = False
out = ""
html_string = html_string.replace('<br>', '\n') # first just take of line breaks
for char in html_string:
if char == '<' and not quote:
tag = True
elif char == '>' and not quote:
tag = False
elif (char == '"' or char == "'") and tag:
quote = not quote
elif not tag:
out = out + char
return out
|
pbmanis/acq4
|
acq4/analysis/tools/ScriptProcessor.py
|
Python
|
mit
| 13,216 | 0.004994 |
import requests
headers = {
'foo': 'bar',
}
response = requests.get('http://example.com/', headers=headers)
|
NickCarneiro/curlconverter
|
fixtures/python/get_with_single_header.py
|
Python
|
mit
| 114 | 0 |
# Generated by Django 2.0 on 2018-02-08 11:45
from django.db import migrations
def forwards(apps, schema_editor):
"""
Change all DancePiece objects into Work objects, and their associated
data into WorkRole and WorkSelection models, then delete the DancePiece.
"""
DancePiece = apps.get_model("spectator_events", "DancePiece")
Work = apps.get_model("spectator_events", "Work")
WorkRole = apps.get_model("spectator_events", "WorkRole")
WorkSelection = apps.get_model("spectator_events", "WorkSelection")
for dp in DancePiece.objects.all():
work = Work.objects.create(
kind="dancepiece", title=dp.title, title_sort=dp.title_sort
)
for role in dp.roles.all():
WorkRole.objects.create(
creator=role.creator,
work=work,
role_name=role.role_name,
role_order=role.role_order,
)
for selection in dp.events.all():
WorkSelection.objects.create(
event=selection.event, work=work, order=selection.order
)
dp.delete()
class Migration(migrations.Migration):
dependencies = [
("spectator_events", "0027_classicalworks_to_works"),
]
operations = [
migrations.RunPython(forwards),
]
|
philgyford/django-spectator
|
spectator/events/migrations/0028_dancepieces_to_works.py
|
Python
|
mit
| 1,326 | 0 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "social_news_site.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
lewfish/django-social-news
|
manage.py
|
Python
|
mit
| 259 | 0.003861 |
# -*- coding: utf-8 -*-
# Copyright 2019 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""An implementation of the ReplicationConfig proto interface."""
from __future__ import print_function
import json
import os
import shutil
import sys
from chromite.api.gen.config import replication_config_pb2
from chromite.lib import constants
from chromite.lib import cros_logging as logging
from chromite.lib import osutils
from chromite.utils import field_mask_util
assert sys.version_info >= (3, 6), 'This module requires Python 3.6+'
def _ValidateFileReplicationRule(rule):
"""Raises an error if a FileReplicationRule is invalid.
For example, checks that if REPLICATION_TYPE_FILTER, destination_fields
are specified.
Args:
rule: (FileReplicationRule) The rule to validate.
"""
if rule.file_type == replication_config_pb2.FILE_TYPE_JSON:
if rule.replication_type != replication_config_pb2.REPLICATION_TYPE_FILTER:
raise ValueError(
'Rule for JSON source %s must use REPLICATION_TYPE_FILTER.' %
rule.source_path)
elif rule.file_type == replication_config_pb2.FILE_TYPE_OTHER:
if rule.replication_type != replication_config_pb2.REPLICATION_TYPE_COPY:
raise ValueError('Rule for source %s must use REPLICATION_TYPE_COPY.' %
rule.source_path)
else:
raise NotImplementedError('Replicate not implemented for file type %s' %
rule.file_type)
if rule.replication_type == replication_config_pb2.REPLICATION_TYPE_COPY:
if rule.destination_fields.paths:
raise ValueError(
'Rule with REPLICATION_TYPE_COPY cannot use destination_fields.')
elif rule.replication_type == replication_config_pb2.REPLICATION_TYPE_FILTER:
if not rule.destination_fields.paths:
raise ValueError(
'Rule with REPLICATION_TYPE_FILTER must use destination_fields.')
else:
raise NotImplementedError(
'Replicate not implemented for replication type %s' %
rule.replication_type)
if os.path.isabs(rule.source_path) or os.path.isabs(rule.destination_path):
raise ValueError(
'Only paths relative to the source root are allowed. In rule: %s' %
rule)
def _ApplyStringReplacementRules(destination_path, rules):
"""Read the file at destination path, apply rules, and write a new file.
Args:
destination_path: (str) Path to the destination file to read. The new file
will also be written at this path.
rules: (list[StringReplacementRule]) Rules to apply. Must not be empty.
"""
assert rules
with open(destination_path, 'r') as f:
dst_data = f.read()
for string_replacement_rule in rules:
dst_data = dst_data.replace(string_replacement_rule.before,
string_replacement_rule.after)
with open(destination_path, 'w') as f:
f.write(dst_data)
def Replicate(replication_config):
"""Run the replication described in replication_config.
Args:
replication_config: (ReplicationConfig) Describes the replication to run.
"""
# Validate all rules before any of them are run, to decrease chance of ending
# with a partial replication.
for rule in replication_config.file_replication_rules:
_ValidateFileReplicationRule(rule)
for rule in replication_config.file_replication_rules:
logging.info('Processing FileReplicationRule: %s', rule)
src = os.path.join(constants.SOURCE_ROOT, rule.source_path)
dst = os.path.join(constants.SOURCE_ROOT, rule.destination_path)
osutils.SafeMakedirs(os.path.dirname(dst))
if rule.file_type == replication_config_pb2.FILE_TYPE_JSON:
assert (rule.replication_type ==
replication_config_pb2.REPLICATION_TYPE_FILTER)
assert rule.destination_fields.paths
with open(src, 'r') as f:
source_json = json.load(f)
try:
source_device_configs = source_json['chromeos']['configs']
except KeyError:
raise NotImplementedError(
('Currently only ChromeOS Configs are supported (expected file %s '
'to have a list at "$.chromeos.configs")') % src)
destination_device_configs = []
for source_device_config in source_device_configs:
destination_device_configs.append(
field_mask_util.CreateFilteredDict(rule.destination_fields,
source_device_config))
destination_json = {'chromeos': {'configs': destination_device_configs}}
logging.info('Writing filtered JSON source to %s', dst)
with open(dst, 'w') as f:
# Use the print function, so the file ends in a newline.
print(
json.dumps(
destination_json,
sort_keys=True,
indent=2,
separators=(',', ': ')),
file=f)
else:
assert rule.file_type == replication_config_pb2.FILE_TYPE_OTHER
assert (
rule.replication_type == replication_config_pb2.REPLICATION_TYPE_COPY)
assert not rule.destination_fields.paths
logging.info('Copying full file from %s to %s', src, dst)
shutil.copy2(src, dst)
if rule.string_replacement_rules:
_ApplyStringReplacementRules(dst, rule.string_replacement_rules)
|
endlessm/chromium-browser
|
third_party/chromite/lib/replication_lib.py
|
Python
|
bsd-3-clause
| 5,370 | 0.007263 |
import mock
from nose.tools import eq_, ok_, assert_raises
from funfactory.urlresolvers import reverse
from .base import ManageTestCase
class TestErrorTrigger(ManageTestCase):
def test_trigger_error(self):
url = reverse('manage:error_trigger')
response = self.client.get(url)
assert self.user.is_superuser
eq_(response.status_code, 200)
# sans a message
response = self.client.post(url, {'message': ''})
eq_(response.status_code, 200)
ok_('This field is required' in response.content)
assert_raises(
NameError,
self.client.post,
url,
{'message': 'Some Message'}
)
@mock.patch('airmozilla.manage.views.errors.Client')
def test_trigger_error_with_raven(self, mocked_client):
url = reverse('manage:error_trigger')
assert self.user.is_superuser
raven_config = {
'dsn': 'fake123'
}
with self.settings(RAVEN_CONFIG=raven_config):
response = self.client.post(url, {
'message': 'Some Message',
'capture_with_raven': True
})
eq_(response.status_code, 302)
mocked_client().captureException.assert_called_with()
|
zofuthan/airmozilla
|
airmozilla/manage/tests/views/test_errors.py
|
Python
|
bsd-3-clause
| 1,278 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-03-18 09:59
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('order_reminder', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='orders',
name='creater',
field=models.CharField(max_length=200, null=True, unique=True),
),
migrations.AddField(
model_name='orders',
name='currency',
field=models.CharField(max_length=200, null=True, unique=True),
),
migrations.AlterField(
model_name='orders',
name='order_id',
field=models.CharField(max_length=200, null=True, unique=True, verbose_name=b'Company Name'),
),
]
|
forance/django-q
|
djangoq_demo/order_reminder/migrations/0002_auto_20160318_1759.py
|
Python
|
mit
| 858 | 0.001166 |
import sys
import os
brief = "create a test for controller"
def usage(argv0):
print("Usage: {} generate test controller CONTROLLER_NAME METHOD [METHOD] [...]".format(argv0))
sys.exit(1)
aliases = ['c']
def execute(argv, argv0, engine):
import lib, inflection
os.environ.setdefault("AIOWEB_SETTINGS_MODULE", "settings")
from aioweb import settings
sys.path.append(os.getcwd())
if len(argv) < 2:
usage(argv0)
controller_name = inflection.camelize(argv[0]) + "Controller"
controller_file_name = inflection.underscore(argv[0]) + ".py"
methods = argv[1:]
dest_file = os.path.join(lib.dirs(settings, format=["tests_controllers"]), controller_file_name)
os.makedirs(os.path.dirname(dest_file), exist_ok=True)
template = lib.get_template("tests/controller.py", settings)
if os.path.exists(dest_file):
if lib.ask("{} already exists!\nDo you wanna replace it?".format(dest_file)) == 'n':
print("Generation was aborted!")
return
print("creating {}".format(dest_file))
with open(template, "r") as f:
controller_code = f.read().replace("CLASS", controller_name).replace("CONTROLLER_NAME", controller_file_name[:-3])
with open(dest_file, "w") as df:
df.write(controller_code)
for method in methods:
engine["commands"]["generate"]["test"]["controller_method"]([argv[0], method], argv0, engine)
|
kreopt/aioweb
|
wyrm/modules/generate/test/controller.py
|
Python
|
mit
| 1,437 | 0.004175 |
'''build RoboFont Extension'''
import os
from AppKit import NSCommandKeyMask, NSAlternateKeyMask, NSShiftKeyMask
from mojo.extensions import ExtensionBundle
# get current folder
basePath = os.path.dirname(__file__)
# source folder for all extension files
sourcePath = os.path.join(basePath, 'source')
# folder with python files
libPath = os.path.join(sourcePath, 'code')
# folder with html files
htmlPath = os.path.join(sourcePath, 'documentation')
# folder with resources (icons etc)
resourcesPath = os.path.join(sourcePath, 'resources')
# load license text from file
# see choosealicense.com for more open-source licenses
licensePath = os.path.join(basePath, 'license.txt')
# required extensions
requirementsPath = os.path.join(basePath, 'requirements.txt')
# name of the compiled extension file
extensionFile = 'myExtension.roboFontExt'
# path of the compiled extension
buildPath = os.path.join(basePath, 'build')
extensionPath = os.path.join(buildPath, extensionFile)
# initiate the extension builder
B = ExtensionBundle()
# name of the extension
B.name = "myExtension"
# name of the developer
B.developer = 'RoboDocs'
# URL of the developer
B.developerURL = 'http://github.com/roboDocs'
# extension icon (file path or NSImage)
imagePath = os.path.join(resourcesPath, 'icon.png')
B.icon = imagePath
# version of the extension
B.version = '0.2.6'
# should the extension be launched at start-up?
B.launchAtStartUp = True
# script to be executed when RF starts
B.mainScript = 'hello.py'
# does the extension contain html help files?
B.html = True
# minimum RoboFont version required for this extension
B.requiresVersionMajor = '4'
B.requiresVersionMinor = '0'
# scripts which should appear in Extensions menu
B.addToMenu = [
{
'path': 'doSomething.py',
'preferredName': 'do something',
'shortKey': (NSCommandKeyMask | NSShiftKeyMask, 'b'),
},
{
'path': 'doSomethingElse.py',
'preferredName': 'do something else',
'shortKey': (NSAlternateKeyMask, 'o'),
}
]
# license for the extension
with open(licensePath) as license:
B.license = license.read()
# required extensions
with open(requirementsPath) as requirements:
B.requirements = requirements.read()
# expiration date for trial extensions
B.expireDate = '2020-12-31'
# compile and save the extension bundle
print('building extension...', end=' ')
B.save(extensionPath, libPath=libPath, htmlPath=htmlPath, resourcesPath=resourcesPath)
print('done!')
# check for problems in the compiled extension
print()
print(B.validationErrors())
|
roboDocs/rf-extension-boilerplate
|
build.py
|
Python
|
mit
| 2,609 | 0.000383 |
#!/usr/bin/python
#coding: utf-8 -*-
# (c) 2017, Wayne Witzel III <wayne@riotousliving.com>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: tower_host
version_added: "2.3"
short_description: create, update, or destroy Ansible Tower host.
description:
- Create, update, or destroy Ansible Tower hosts. See
U(https://www.ansible.com/tower) for an overview.
options:
name:
description:
- The name to use for the host.
required: True
description:
description:
- The description to use for the host.
required: False
default: null
inventory:
description:
- Inventory the host should be made a member of.
required: True
enabled:
description:
- If the host should be enabled.
required: False
default: True
variables:
description:
- Variables to use for the host. Use '@' for a file.
state:
description:
- Desired state of the resource.
required: False
default: "present"
choices: ["present", "absent"]
tower_host:
description:
- URL to your Tower instance.
required: False
default: null
tower_username:
description:
- Username for your Tower instance.
required: False
default: null
tower_password:
description:
- Password for your Tower instance.
required: False
default: null
tower_verify_ssl:
description:
- Dis/allow insecure connections to Tower. If C(no), SSL certificates will not be validated.
This should only be used on personally controlled sites using self-signed certificates.
required: False
default: True
tower_config_file:
description:
- Path to the Tower config file. See notes.
required: False
default: null
requirements:
- "python >= 2.6"
- "ansible-tower-cli >= 3.0.3"
notes:
- If no I(config_file) is provided we will attempt to use the tower-cli library
defaults to find your Tower host information.
- I(config_file) should contain Tower configuration in the following format
host=hostname
username=username
password=password
'''
EXAMPLES = '''
- name: Add tower host
tower_host:
name: localhost
description: "Local Host Group"
inventory: "Local Inventory"
state: present
tower_config_file: "~/tower_cli.cfg"
'''
try:
import os
import tower_cli
import tower_cli.utils.exceptions as exc
from tower_cli.conf import settings
from ansible.module_utils.ansible_tower import tower_auth_config, tower_check_mode
HAS_TOWER_CLI = True
except ImportError:
HAS_TOWER_CLI = False
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True),
description = dict(),
inventory = dict(required=True),
enabled = dict(type='bool', default=True),
variables = dict(),
tower_host = dict(),
tower_username = dict(),
tower_password = dict(no_log=True),
tower_verify_ssl = dict(type='bool', default=True),
tower_config_file = dict(type='path'),
state = dict(choices=['present', 'absent'], default='present'),
),
supports_check_mode=True
)
if not HAS_TOWER_CLI:
module.fail_json(msg='ansible-tower-cli required for this module')
name = module.params.get('name')
description = module.params.get('description')
inventory = module.params.get('inventory')
enabled = module.params.get('enabled')
state = module.params.get('state')
variables = module.params.get('variables')
if variables:
if variables.startswith('@'):
filename = os.path.expanduser(variables[1:])
variables = module.contents_from_file(filename)
json_output = {'host': name, 'state': state}
tower_auth = tower_auth_config(module)
with settings.runtime_values(**tower_auth):
tower_check_mode(module)
host = tower_cli.get_resource('host')
try:
inv_res = tower_cli.get_resource('inventory')
inv = inv_res.get(name=inventory)
if state == 'present':
result = host.modify(name=name, inventory=inv['id'], enabled=enabled,
variables=variables, description=description, create_on_missing=True)
json_output['id'] = result['id']
elif state == 'absent':
result = host.delete(name=name, inventory=inv['id'])
except (exc.NotFound) as excinfo:
module.fail_json(msg='Failed to update host, inventory not found: {0}'.format(excinfo), changed=False)
except (exc.ConnectionError, exc.BadRequest) as excinfo:
module.fail_json(msg='Failed to update host: {0}'.format(excinfo), changed=False)
json_output['changed'] = result['changed']
module.exit_json(**json_output)
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
main()
|
adityacs/ansible
|
lib/ansible/modules/web_infrastructure/ansible_tower/tower_host.py
|
Python
|
gpl-3.0
| 5,874 | 0.005958 |
"""Tests for account activation"""
import unittest
from uuid import uuid4
from django.conf import settings
from django.core.urlresolvers import reverse
from django.test import TestCase, override_settings
from mock import patch
from edxmako.shortcuts import render_to_string
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
from openedx.core.djangoapps.user_api.config.waffle import PREVENT_AUTH_USER_WRITES, SYSTEM_MAINTENANCE_MSG, waffle
from student.models import Registration
from student.tests.factories import UserFactory
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class TestActivateAccount(TestCase):
"""Tests for account creation"""
def setUp(self):
super(TestActivateAccount, self).setUp()
self.username = "jack"
self.email = "jack@fake.edx.org"
self.password = "test-password"
self.user = UserFactory.create(
username=self.username, email=self.email, password=self.password, is_active=False,
)
# Set Up Registration
self.registration = Registration()
self.registration.register(self.user)
self.registration.save()
self.platform_name = configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME)
self.activation_email_support_link = configuration_helpers.get_value(
'ACTIVATION_EMAIL_SUPPORT_LINK', settings.ACTIVATION_EMAIL_SUPPORT_LINK
) or settings.SUPPORT_SITE_LINK
def login(self):
"""
Login with test user.
Since, only active users can login, so we must activate the user before login.
This method does the following tasks in order,
1. Stores user's active/in-active status in a variable.
2. Makes sure user account is active.
3. Authenticated user with the client.
4. Reverts user's original active/in-active status.
"""
is_active = self.user.is_active
# Make sure user is active before login
self.user.is_active = True
self.user.save()
self.client.login(username=self.username, password=self.password)
# Revert user activation status
self.user.is_active = is_active
self.user.save()
def assert_no_tracking(self, mock_segment_identify):
""" Assert that activate sets the flag but does not call segment. """
# Ensure that the user starts inactive
self.assertFalse(self.user.is_active)
# Until you explicitly activate it
self.registration.activate()
self.assertTrue(self.user.is_active)
self.assertFalse(mock_segment_identify.called)
@override_settings(
LMS_SEGMENT_KEY="testkey",
MAILCHIMP_NEW_USER_LIST_ID="listid"
)
@patch('student.models.analytics.identify')
def test_activation_with_keys(self, mock_segment_identify):
expected_segment_payload = {
'email': self.email,
'username': self.username,
'activated': 1,
}
expected_segment_mailchimp_list = {
"MailChimp": {
"listId": settings.MAILCHIMP_NEW_USER_LIST_ID
}
}
# Ensure that the user starts inactive
self.assertFalse(self.user.is_active)
# Until you explicitly activate it
self.registration.activate()
self.assertTrue(self.user.is_active)
mock_segment_identify.assert_called_with(
self.user.id,
expected_segment_payload,
expected_segment_mailchimp_list
)
@override_settings(LMS_SEGMENT_KEY="testkey")
@patch('student.models.analytics.identify')
def test_activation_without_mailchimp_key(self, mock_segment_identify):
self.assert_no_tracking(mock_segment_identify)
@override_settings(MAILCHIMP_NEW_USER_LIST_ID="listid")
@patch('student.models.analytics.identify')
def test_activation_without_segment_key(self, mock_segment_identify):
self.assert_no_tracking(mock_segment_identify)
@patch('student.models.analytics.identify')
def test_activation_without_keys(self, mock_segment_identify):
self.assert_no_tracking(mock_segment_identify)
@override_settings(FEATURES=dict(settings.FEATURES, DISPLAY_ACCOUNT_ACTIVATION_MESSAGE_ON_SIDEBAR=True))
def test_account_activation_message(self):
"""
Verify that account correct activation message is displayed.
If logged in user has not activated his/her account, make sure that an
account activation message is displayed on dashboard sidebar.
"""
# Log in with test user.
self.login()
expected_message = render_to_string(
'registration/account_activation_sidebar_notice.html',
{
'email': self.user.email,
'platform_name': self.platform_name,
'activation_email_support_link': self.activation_email_support_link
}
)
response = self.client.get(reverse('dashboard'))
self.assertContains(response, expected_message, html=True)
# Now make sure account activation message goes away when user activated the account
self.user.is_active = True
self.user.save()
self.login()
expected_message = render_to_string(
'registration/account_activation_sidebar_notice.html',
{
'email': self.user.email,
'platform_name': self.platform_name,
'activation_email_support_link': self.activation_email_support_link
}
)
response = self.client.get(reverse('dashboard'))
self.assertNotContains(response, expected_message, html=True)
@override_settings(FEATURES=dict(settings.FEATURES, DISPLAY_ACCOUNT_ACTIVATION_MESSAGE_ON_SIDEBAR=False))
def test_account_activation_message_disabled(self):
"""
Verify that old account activation message is displayed when
DISPLAY_ACCOUNT_ACTIVATION_MESSAGE_ON_SIDEBAR is disabled.
"""
# Log in with test user.
self.login()
expected_message = render_to_string(
'registration/activate_account_notice.html',
{'email': self.user.email}
)
response = self.client.get(reverse('dashboard'))
self.assertContains(response, expected_message, html=True)
# Now make sure account activation message goes away when user activated the account
self.user.is_active = True
self.user.save()
self.login()
expected_message = render_to_string(
'registration/activate_account_notice.html',
{'email': self.user.email}
)
response = self.client.get(reverse('dashboard'))
self.assertNotContains(response, expected_message, html=True)
def test_account_activation_notification_on_logistration(self):
"""
Verify that logistration page displays success/error/info messages
about account activation.
"""
login_page_url = "{login_url}?next={redirect_url}".format(
login_url=reverse('signin_user'),
redirect_url=reverse('dashboard'),
)
# Access activation link, message should say that account has been activated.
response = self.client.get(reverse('activate', args=[self.registration.activation_key]), follow=True)
self.assertRedirects(response, login_page_url)
self.assertContains(response, 'Success! You have activated your account.')
# Access activation link again, message should say that account is already active.
response = self.client.get(reverse('activate', args=[self.registration.activation_key]), follow=True)
self.assertRedirects(response, login_page_url)
self.assertContains(response, 'This account has already been activated.')
# Open account activation page with an invalid activation link,
# there should be an error message displayed.
response = self.client.get(reverse('activate', args=[uuid4().hex]), follow=True)
self.assertRedirects(response, login_page_url)
self.assertContains(response, 'Your account could not be activated')
def test_account_activation_prevent_auth_user_writes(self):
login_page_url = "{login_url}?next={redirect_url}".format(
login_url=reverse('signin_user'),
redirect_url=reverse('dashboard'),
)
with waffle().override(PREVENT_AUTH_USER_WRITES, True):
response = self.client.get(reverse('activate', args=[self.registration.activation_key]), follow=True)
self.assertRedirects(response, login_page_url)
self.assertContains(response, SYSTEM_MAINTENANCE_MSG)
assert not self.user.is_active
|
procangroup/edx-platform
|
common/djangoapps/student/tests/test_activate_account.py
|
Python
|
agpl-3.0
| 8,881 | 0.002365 |
#!/usr/bin/env python
# This file provided by Facebook is for non-commercial testing and evaluation
# purposes only. Facebook reserves all rights not expressly granted.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# FACEBOOK BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
This script runs a comparative test with the sample app.
It builds and runs the sample app, switching from one library to the next,
taking measurements as it goes.
To select a subset of the libraries, use the -s option with a
space-separated list.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import re
import tempfile
from collections import namedtuple
from subprocess import check_call, PIPE, Popen
""" List of tested libraries """
TESTS = (
'fresco',
'fresco-okhttp',
'glide',
'picasso',
'uil',
'volley',
'drawee-volley'
)
TEST_SOURCES = (
'network',
'local'
)
ABIS = (
'arm64-v8a',
'armeabi',
'armeabi-v7a',
'x86',
'x86_64'
)
""" Appends test class name to method name """
TEST_PATTERN = 'test{}{}'
""" Named tuple containing relevant numbers reported by a test """
Stats = namedtuple('Stats', [
'success_wait_times',
'failure_wait_times',
'cancellation_wait_times',
'java_heap_sizes',
'native_heap_sizes',
'skipped_frames'])
def parse_args():
parser = argparse.ArgumentParser(
description='Runs comparison test and processes results')
parser.add_argument('-s', '--scenarios', choices=TESTS, nargs='+')
parser.add_argument('-d', '--sources', choices=TEST_SOURCES, nargs='+')
parser.add_argument('-c', '--cpu', choices=ABIS, required=True)
return parser.parse_args()
def start_subprocess(command, **kwargs):
""" Starts subprocess after printing command to stdout. """
return Popen(command.split(), **kwargs)
def run_command(command):
""" Runs given command and waits for it to terminate.
Prints the command to stdout and redirects its output to /dev/null. """
with open('/dev/null', 'w') as devnull:
check_call(command.split(), stdout=devnull, stderr=devnull)
def gradle(*tasks):
""" Runs given gradle tasks """
if tasks:
run_command('./gradlew {}'.format(" ".join(tasks)))
def adb(command):
""" Runs adb command - arguments are given as single string"""
run_command('adb {}'.format(command))
def install_apks(abi):
""" Installs comparison app and test apks """
print("Installing comparison app...")
gradle(':samples:comparison:assembleDebug',
':samples:comparison:assembleDebugAndroidTest')
cmd = ('install -r samples/comparison/build/outputs/apk/comparison-'
'{}-debug.apk'.format(abi))
adb(cmd)
adb('install -r samples/comparison/build/outputs/apk/'
'comparison-debug-androidTest-unaligned.apk')
class ComparisonTest:
""" Comparison test case """
def __init__(
self,
method_name,
class_name='com.facebook.samples.comparison.test.ScrollTest',
test_package='com.facebook.samples.comparison.test',
test_runner='android.test.InstrumentationTestRunner'):
self.method_name = method_name
self.class_name = class_name
self.test_package = test_package
self.test_runner = test_runner
def __call__(self):
""" Executes test case and captures logcat output """
adb('logcat -c')
with tempfile.TemporaryFile() as logcat_file:
logcat_reader = start_subprocess(
'adb logcat',
stdout=logcat_file)
adb('shell am instrument -w -e class {}#{} {}/{}'.format(
self.class_name,
self.method_name,
self.test_package,
self.test_runner))
logcat_reader.terminate()
logcat_reader.wait()
logcat_file.seek(0)
self.logcat = logcat_file.readlines()
def get_stats(logs):
pattern = re.compile("""]: loaded after (\d+) ms""")
success_wait_times = [
int(match.group(1)) for match in map(pattern.search, logs) if match]
pattern = re.compile("""]: failed after (\d+) ms""")
failure_wait_times = [
int(match.group(1)) for match in map(pattern.search, logs) if match]
pattern = re.compile("""]: cancelled after (\d+) ms""")
cancellation_wait_times = [
int(match.group(1)) for match in map(pattern.search, logs) if match]
pattern = re.compile("""\s+(\d+.\d+) MB Java""")
java_heap_sizes = [
float(match.group(1)) for match in map(pattern.search, logs) if match]
pattern = re.compile("""\s+(\d+.\d+) MB native""")
native_heap_sizes = [
float(match.group(1)) for match in map(pattern.search, logs) if match]
pattern = re.compile("""Skipped (\d+) frames! The application may be""")
skipped_frames = [
int(match.group(1)) for match in map(pattern.search, logs) if match]
return Stats(
success_wait_times,
failure_wait_times,
cancellation_wait_times,
java_heap_sizes,
native_heap_sizes,
skipped_frames)
def print_stats(stats):
successes = len(stats.success_wait_times)
cancellations = len(stats.cancellation_wait_times)
failures = len(stats.failure_wait_times)
total_count = successes + cancellations + failures
total_wait_time = (
sum(stats.success_wait_times) +
sum(stats.cancellation_wait_times) +
sum(stats.failure_wait_times))
avg_wait_time = float(total_wait_time) / total_count
max_java_heap = max(stats.java_heap_sizes)
max_native_heap = max(stats.native_heap_sizes)
total_skipped_frames = sum(stats.skipped_frames)
print("Average wait time = {0:.1f}".format(avg_wait_time))
print("Successful requests = {}".format(successes))
print("Failures = {}".format(failures))
print("Cancellations = {}".format(cancellations))
print("Max java heap = {0:.1f}".format(max_java_heap))
print("Max native heap = {0:.1f}".format(max_native_heap))
print("Total skipped frames = {}".format(total_skipped_frames))
def get_test_name(option_name, source_name):
return TEST_PATTERN.format(
''.join(word.capitalize() for word in option_name.split('-')), source_name.capitalize())
def valid_scenario(scenario_name, source_name):
return source_name != 'local' or (scenario_name != 'volley' and scenario_name != 'drawee-volley')
def main():
args = parse_args()
scenarios = []
sources = []
if args.scenarios:
scenarios = args.scenarios
else:
scenarios = TESTS
if args.sources:
sources = args.sources
else:
sources = TEST_SOURCES
install_apks(args.cpu)
for scenario_name in scenarios:
for source_name in sources:
if valid_scenario(scenario_name, source_name):
print()
print('Testing {} {}'.format(scenario_name, source_name))
print(get_test_name(scenario_name, source_name))
test = ComparisonTest(get_test_name(scenario_name, source_name))
test()
stats = get_stats(test.logcat)
print_stats(stats)
if __name__ == "__main__":
main()
|
kaoree/fresco
|
run_comparison.py
|
Python
|
bsd-3-clause
| 7,775 | 0.002058 |
#!/usr/bin/env python
ergodoxian = (
("KEY_DeleteBackspace","1x2"),
("KEY_DeleteForward","1x2"),
('KEY_ReturnEnter', '1x2'),
('KEY_Spacebar', '1x2'),
('SPECIAL_Fn', '1x2'),
('KEY_Shift', '1.5x1'),
('KEY_Shift', '1.5x1'),
("KEY_Dash_Underscore", "1.5x1"),
("KEY_Equal_Plus", "1.5x1"),
('KEY_ReturnEnter', '1.5x1'),
("KEY_Escape", "1.5x1"),
("KEY_DeleteForward","1.5x1"),
('SPECIAL_Fn', '1x1.5'),
("KEY_LeftBracket_LeftBrace", "1x1.5"),
("KEY_RightBracket_RightBrace", "1x1.5"),
("KEY_SingleQuote_DoubleQuote", "1.5x1"),
("KEY_GraveAccent_Tilde", "1.5x1"),
("KEY_Slash_Question", "1.5x1"),
("KEY_Tab","1x1.5"))
new1 = (
("KEY_DeleteBackspace","1x2"),
("KEY_DeleteForward","1x2"),
('KEY_ReturnEnter', '1x2'),
('KEY_Spacebar', '1x2'),
('SPECIAL_Fn', '1x2'),
('KEY_Shift', '1.5x1'),
('KEY_Shift', '1.5x1'),
("KEY_Dash_Underscore", "1.5x1"),
("KEY_Equal_Plus", "1.5x1"),
('KEY_ReturnEnter', '1.5x1'),
("KEY_Escape", "1.5x1"),
("KEY_DeleteForward","1.5x1"),
('SPECIAL_Fn', '1x1.5'),
("KEY_LeftBracket_LeftBrace", "1x1.5"),
("KEY_RightBracket_RightBrace", "1x1.5"),
("KEY_SingleQuote_DoubleQuote", "1.5x1"),
("KEY_GraveAccent_Tilde", "1.5x1"),
("KEY_Slash_Question", "1.5x1"),
('SPECIAL_Fn', '1x1.5'))
new2 = (
('KEY_Shift', '1x2'),
("KEY_DeleteForward","1x2"),
('KEY_ReturnEnter', '1x2'),
('KEY_Spacebar', '1x2'),
('SPECIAL_Fn', '1x2'),
('KEY_Shift', '1.5x1'),
('KEY_Shift', '1.5x1'),
("KEY_Dash_Underscore", "1.5x1"),
("KEY_Equal_Plus", "1.5x1"),
('KEY_ReturnEnter', '1.5x1'),
("KEY_Escape", "1.5x1"),
("KEY_DeleteForward","1.5x1"),
('SPECIAL_Fn', '1x1.5'),
("KEY_LeftBracket_LeftBrace", "1x1.5"),
("KEY_RightBracket_RightBrace", "1x1.5"),
("KEY_SingleQuote_DoubleQuote", "1.5x1"),
("KEY_GraveAccent_Tilde", "1.5x1"),
("KEY_Slash_Question", "1.5x1"),
('KEY_Tab', '1x1.5'))
new3 = (
('KEY_Shift', '1x2'),
("KEY_DeleteForward","1x2"),
('KEY_ReturnEnter', '1x2'),
('KEY_Spacebar', '1x2'),
('SPECIAL_Fn', '1x2'),
('KEY_Shift', '1.5x1'),
('KEY_Shift', '1.5x1'),
("KEY_Dash_Underscore", "1.5x1"),
("KEY_Equal_Plus", "1.5x1"),
('KEY_ReturnEnter', '1.5x1'),
("KEY_Escape", "1.5x1"),
("KEY_DeleteForward","1.5x1"),
('SPECIAL_Fn', '1x1.5'),
("KEY_LeftBracket_LeftBrace", "1x1.5"),
("KEY_RightBracket_RightBrace", "1x1.5"),
("KEY_SingleQuote_DoubleQuote", "1.5x1"),
("KEY_GraveAccent_Tilde", "1.5x1"),
("KEY_Slash_Question", "1.5x1"),
('SPECIAL_Fn', '1x1.5'))
new4 = (
('KEY_Shift', '1x2'),
("KEY_DeleteForward","1x2"),
('KEY_ReturnEnter', '1x2'),
('KEY_Spacebar', '1x2'),
('SPECIAL_Fn', '1x2'),
('KEY_Shift', '1.5x1'),
('KEY_Shift', '1.5x1'),
("KEY_Dash_Underscore", "1.5x1"),
("KEY_Equal_Plus", "1.5x1"),
('KEY_ReturnEnter', '1.5x1'),
("KEY_Escape", "1.5x1"),
("KEY_DeleteForward","1.5x1"),
('SPECIAL_Fn', '1x1.5'),
("KEY_LeftBracket_LeftBrace", "1x1.5"),
("KEY_RightBracket_RightBrace", "1x1.5"),
("KEY_SingleQuote_DoubleQuote", "1.5x1"),
("KEY_GraveAccent_Tilde", "1.5x1"),
("KEY_Slash_Question", "1.5x1"),
('SPECIAL_Fn', '1x2'))
new5 = (
#('KEY_Shift', '1x2'),
("KEY_DeleteForward","1x2"),
('KEY_ReturnEnter', '1x2'),
('KEY_Spacebar', '1x2'),
('SPECIAL_Fn', '1x2'),
('KEY_Shift', '1.5x1'),
#('KEY_Shift', '1.5x1'),
("KEY_Dash_Underscore", "1.5x1"),
("KEY_Equal_Plus", "1.5x1"),
('KEY_ReturnEnter', '1.5x1'),
("KEY_Escape", "1.5x1"),
("KEY_DeleteForward","1.5x1"),
('SPECIAL_Fn', '1x1.5'),
("KEY_LeftBracket_LeftBrace", "1x1.5"),
("KEY_RightBracket_RightBrace", "1x1.5"),
("KEY_SingleQuote_DoubleQuote", "1.5x1"),
("KEY_GraveAccent_Tilde", "1.5x1"),
("KEY_Slash_Question", "1.5x1"),
#('SPECIAL_Fn', '1x1.5'),
)
hof = (ergodoxian, new1, new2, new3, new4, new5)
|
jdeblese/ergovolve
|
proposals.py
|
Python
|
mit
| 3,554 | 0.037141 |
# -*- coding: utf-8 -*-
"""
@created: Thu Jul 02 10:56:57 2015
Usage:
main.py
Options:
-h --help # Show this screen.
--version # Show version.
"""
### Imports
# Standard Library
from __future__ import print_function, division
from __future__ import absolute_import
import logging
import os.path
import functools
import abc
import inspect
import datetime
import time
# Third Party
import wx
import wx.gizmos as wxdv
from docopt import docopt
import bs4
from bs4 import BeautifulSoup
# Package / Application
try:
# Imports used for unittests
from . import (__project_name__,
__version__,
__released__,
)
logging.debug("Imports for UnitTests")
except (SystemError, ValueError):
try:
# Imports used by Spyder
# import blah
from __init__ import (__project_name__,
__version__,
__released__,
)
logging.debug("Imports for Spyder IDE")
except ImportError:
# Imports used by cx_freeze
# from tpedit import blah
from tpedit import (__project_name__,
__version__,
__released__,
)
logging.debug("imports for Executable")
### Module Constants
HIGHLIGHT = wx.Colour(255, 255, 0)
HIGHLIGHT2 = wx.Colour(255, 128, 30)
DEFAULT_LOG_LEVEL = logging.INFO
ROOT_PATH = os.path.join(os.getcwd(), "tests", "data")
TITLE_TEXT = "{} v{} Released {}".format(__project_name__,
__version__,
__released__,
)
def logged(func):
"""
Decorator that logs entry and exit points of a function.
"""
# Customize these messages
entry_msg = '+Entering {}'
exit_msg = '-Exiting {}. Exec took {:.6}ms'
logger = logging.getLogger()
@functools.wraps(func)
def wrapper(*args, **kwds):
logger.debug(entry_msg.format(func.__name__))
start = time.time() # TODO PY3: change to time.monotonic()
# or time.perf_counter()
# or time.process_time()
f_result = func(*args, **kwds)
end = time.time()
elapsed = (end - start) * 1000
logger.debug(exit_msg.format(func.__name__, elapsed))
return f_result
return wrapper
class LocalLogHandler(logging.StreamHandler):
"""
A logging handler that directs logs to a ``target`` wx.TextCtrl.
"""
def __init__(self, target):
logging.StreamHandler.__init__(self)
self.target = target
def emit(self, record):
msg = self.format(record)
self.target.WriteText(msg + "\n")
self.target.ShowPosition(self.target.GetLastPosition())
self.flush()
def _init_logging(target, level=DEFAULT_LOG_LEVEL):
"""
Initialize logging to the on-screen log
"""
logfmt = ("%(asctime)s.%(msecs)03d"
" [%(levelname)-8.8s]" # Note implicit string concatenation.
" %(message)s"
)
datefmt = "%Y-%m-%d %H:%M:%S"
# datefmt = "%H:%M:%S"
logger = logging.getLogger()
handler = LocalLogHandler(target)
handler.setLevel(level)
formatter = logging.Formatter(logfmt, datefmt)
handler.setFormatter(formatter)
handler.set_name("GUI Handler")
logger.addHandler(handler)
logging.info("GUI Logging Initialized, level = {}".format(level))
class MainApp(object):
"""
"""
def __init__(self):
self.app = wx.App()
self.frame = MainFrame(TITLE_TEXT, (1200, 650))
self.frame.Show()
logging.info("App init complete")
self.app.MainLoop()
class MainFrame(wx.Frame):
"""
"""
def __init__(self, title, size):
wx.Frame.__init__(self,
None,
wx.ID_ANY,
title=title,
size=size,
)
self._init_ui()
log_str = "{} init complete"
logging.info(log_str.format(type(self).__name__))
@logged
def _init_ui(self):
""" Initi UI Components """
# normally I'd make the panel later, but I want to be able to log
# things to it.
self.panel = MainPanel(self)
# Start logging.
_init_logging(self.panel.log_panel.log)
# Create the menu bar and bind events
self.menu_bar = wx.MenuBar()
self._create_menus()
self._bind_events()
# Initialize default states
self._set_defaults()
# Set the MenuBar and create a status bar
self.SetMenuBar(self.menu_bar)
self.CreateStatusBar()
_fns = ("1.xml", "2.xml", "3.xml")
# Uncomment this to auto-load some temp files
# self.open_files((os.path.join(ROOT_PATH, _fn) for _fn in _fns))
@logged
def _create_menus(self):
""" Create each menu for the menu bar """
self._create_file_menu()
self._create_edit_menu()
self._create_view_menu()
# self._create_tools_menu()
# self._create_options_menu()
# self._create_help_menu()
@logged
def _set_defaults(self, default_log_level=DEFAULT_LOG_LEVEL):
"""
"""
# TODO: refactor this hack
try:
if default_log_level == logging.DEBUG:
logging.info("Setting log level to DEBUG.")
self.sm_ll_debug.Check()
elif default_log_level == logging.INFO:
logging.info("Setting log level to INFO.")
self.sm_ll_info_.Check()
elif default_log_level == logging.WARNING:
logging.info("Setting log level to WARNING.")
self.sm_ll_warn_.Check()
elif default_log_level == logging.ERROR:
logging.info("Setting log level to ERROR.")
self.sm_ll_error.Check()
elif default_log_level == logging.CRITICAL:
logging.info("Setting log level to CRITICAL.")
self.sm_ll_crit_.Check()
else:
err_txt = "Invalid default log level `{}`."
raise ValueError(err_txt.format(DEFAULT_LOG_LEVEL))
except NameError:
logging.warning("Default log level not found, setting to INFO.")
default_log_level = logging.INFO
self.sm_ll_info_.Check()
except ValueError:
logging.warning("Invalid default log level, setting to INFO.")
default_log_level = logging.INFO
self.sm_ll_info_.Check()
except Exception:
raise
@logged
def _create_file_menu(self):
"""
Creates the File menu.
"""
# Create the menu and items
self.mfile = wx.Menu()
self.mf_new = wx.MenuItem(self.mfile, wx.ID_NEW, "&New\tCtrl+N",
"Create a new FTI Test Program file")
self.mf_open = wx.MenuItem(self.mfile, wx.ID_OPEN, "&Open\tCtrl+O",
"Open a Test Program file")
self.mf_close = wx.MenuItem(self.mfile, wx.ID_CLOSE, "&Close",
"Closes all open files")
self.mf_exit = wx.MenuItem(self.mfile, wx.ID_EXIT, "&Exit\tCtrl+Q",
"Exit the application")
# Add menu items to the menu
self.mfile.AppendItem(self.mf_new)
self.mfile.AppendItem(self.mf_open)
self.mfile.AppendItem(self.mf_close)
self.mfile.AppendSeparator()
self.mfile.AppendItem(self.mf_exit)
self.menu_bar.Append(self.mfile, "&File")
@logged
def _create_edit_menu(self):
"""
Creates the Edit menu
"""
# Create the menu and items
self.medit = wx.Menu()
self.me_temp = wx.MenuItem(self.medit,
wx.ID_EDIT,
"&Temp",
"TempItem")
self.sm_loglevel = wx.Menu()
self.sm_ll_debug = wx.MenuItem(self.sm_loglevel,
wx.ID_ANY,
"&Debug",
"Sets the log level to DEBUG",
wx.ITEM_RADIO)
self.sm_ll_info_ = wx.MenuItem(self.sm_loglevel,
wx.ID_ANY,
"&Info",
"Sets the log level to INFO",
wx.ITEM_RADIO)
self.sm_ll_warn_ = wx.MenuItem(self.sm_loglevel,
wx.ID_ANY,
"&Warning",
"Sets the log level to WARNING",
wx.ITEM_RADIO)
self.sm_ll_error = wx.MenuItem(self.sm_loglevel,
wx.ID_ANY,
"&Error",
"Sets the log level to ERROR",
wx.ITEM_RADIO)
self.sm_ll_crit_ = wx.MenuItem(self.sm_loglevel,
wx.ID_ANY,
"&Critical",
"Sets the log level to CRITICAL",
wx.ITEM_RADIO)
self.sm_loglevel.AppendItem(self.sm_ll_debug)
self.sm_loglevel.AppendItem(self.sm_ll_info_)
self.sm_loglevel.AppendItem(self.sm_ll_warn_)
self.sm_loglevel.AppendItem(self.sm_ll_error)
self.sm_loglevel.AppendItem(self.sm_ll_crit_)
# Add menu items to the menu
self.medit.AppendItem(self.me_temp)
self.medit.AppendMenu(wx.ID_ANY,
"Logging Level",
self.sm_loglevel,
"Change the logging level.")
self.menu_bar.Append(self.medit, "&Edit")
@logged
def _create_view_menu(self):
"""
Creates the View menu.
"""
# Create the menu and items
self.mview = wx.Menu()
self.mv_expand_all = wx.MenuItem(self.mview,
wx.ID_ANY,
"&Expand All",
"Expand All")
self.mv_collapse_all = wx.MenuItem(self.mview,
wx.ID_ANY,
"&Collapse All",
"Collapse All")
self.mv_expand_diffs = wx.MenuItem(self.mview,
wx.ID_ANY,
"Expand &Diffs",
"Expand diffs")
# Add menu items to the menu
self.mview.AppendItem(self.mv_expand_all)
self.mview.AppendItem(self.mv_collapse_all)
self.mview.AppendItem(self.mv_expand_diffs)
self.menu_bar.Append(self.mview, "&View")
@logged
def _bind_events(self):
""" Bind all initial events """
# File Menu
self.Bind(wx.EVT_MENU, self._on_new, id=wx.ID_NEW)
self.Bind(wx.EVT_MENU, self._on_open, id=wx.ID_OPEN)
self.Bind(wx.EVT_MENU, self._on_close, id=wx.ID_CLOSE)
self.Bind(wx.EVT_MENU, self._on_exit, id=wx.ID_EXIT)
# Edit Menu
self.Bind(wx.EVT_MENU, self._on_loglevel_change, self.sm_ll_debug)
self.Bind(wx.EVT_MENU, self._on_loglevel_change, self.sm_ll_info_)
self.Bind(wx.EVT_MENU, self._on_loglevel_change, self.sm_ll_warn_)
self.Bind(wx.EVT_MENU, self._on_loglevel_change, self.sm_ll_error)
self.Bind(wx.EVT_MENU, self._on_loglevel_change, self.sm_ll_crit_)
# View Menu
# self.Bind(wx.EVT_MENU, self._nothing)
self.Bind(wx.EVT_MENU, self._on_expand_all, self.mv_expand_all)
self.Bind(wx.EVT_MENU, self._on_collapse_all, self.mv_collapse_all)
self.Bind(wx.EVT_MENU, self._on_expand_diffs, self.mv_expand_diffs)
# Tools Menu
# Options Menu
# Help Menu
@logged
def _on_loglevel_change(self, event):
""" Process the log level change event """
new_level = event.GetEventObject().GetLabelText(event.GetId()).upper()
logging.info("Log Level Changed to {}".format(new_level))
_set_log_level(new_level)
@logged
def _on_new(self, event):
logging.warn("Command 'New' not yet implemented.")
@logged
def _on_open(self, event):
self.close_files()
self._open_file_dialog()
@logged
def _on_open_multiple(self, event):
logging.warn("'Open Multiple' command not yet implemented.")
@logged
def _on_close(self, event):
""" Delete all items in the tree and remove all file columns. """
self.close_files()
@logged
def _on_expand_all(self, event):
logging.info("Expanding all tree items.")
self.panel.edit_panel.tree.ExpandAll(self.panel.edit_panel.root)
@logged
def _on_collapse_all(self, event):
logging.info("Collapsing all tree items.")
collapse_all(self.panel.edit_panel.tree)
@logged
def _on_expand_diffs(self, event):
logging.info("Expanding differences.")
expand_diffs(self.panel.edit_panel.tree)
def _on_exit(self, event):
""" Execute Exit actions """
logging.info("Exiting app")
self.Close(True)
@logged
def _open_file_dialog(self):
""" Displayes the open file dialog """
file_dialog_style = (wx.FD_OPEN
| wx.FD_FILE_MUST_EXIST
| wx.FD_MULTIPLE
)
open_file_dialog = wx.FileDialog(self,
"prompt",
defaultDir=ROOT_PATH,
defaultFile="",
wildcard="XML Files (*.xml)|*.xml",
style=file_dialog_style
)
if open_file_dialog.ShowModal() == wx.ID_CANCEL:
# don't load
logging.info("User canceled open dialog")
return
paths = open_file_dialog.GetPaths()
for fp in paths:
logging.info(" Chosen file: `{}`".format(fp))
self.open_files(paths)
@logged
def open_files(self, paths):
""" """
# set some shorter names...
edit_panel = self.panel.edit_panel
# Reset the diff counter - don't want to double-count
edit_panel.diff_count = 0
# make sure a root exists:
try:
edit_panel.root = edit_panel.tree.AddRoot("root")
except AssertionError:
# root already exists
pass
# process each file into soup.
soups = []
for _n, fp in enumerate(paths):
with open(fp) as openf:
_, fn = os.path.split(fp)
logging.info("Processing `{}`".format(fn))
soups.append(BeautifulSoup(openf, 'xml'))
edit_panel.tree.AddColumn(fn)
edit_panel.tree.SetColumnWidth(_n + 2, 160)
edit_panel.tree.SetColumnEditable(_n + 2)
edit_panel._build_element_tree_recursively(edit_panel.root, soups)
edit_panel.tree.ExpandAll(edit_panel.root)
log_str = "Total {} differences found."
logging.info(log_str.format(edit_panel.diff_count))
self.panel.status_panel.update_diff_count(edit_panel.diff_count)
@logged
def close_files(self):
""" """
logging.info("Closing all files.")
tree = self.panel.edit_panel.tree
tree.DeleteAllItems()
for col in reversed(range(2, tree.GetColumnCount())):
tree.RemoveColumn(col)
class MainPanel(wx.Panel):
"""
Root Panel of the UI.
Contains the EditPanel, where files are compared and edited, and the
LogPanel.
"""
def __init__(self, parent):
wx.Panel.__init__(self, parent)
self.parent = parent
self._init_ui()
log_str = "{} init complete"
logging.info(log_str.format(type(self).__name__))
def _init_ui(self):
self.edit_panel = EditPanel(self)
self.log_panel = LogPanel(self)
self.status_panel = StatusPanel(self)
self.hbox = wx.BoxSizer(wx.HORIZONTAL)
self.hbox.Add(self.status_panel, 0, wx.EXPAND)
self.hbox.Add(self.log_panel, 1, wx.EXPAND)
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.edit_panel, 4, wx.EXPAND)
self.vbox.Add(self.hbox, 1, wx.EXPAND)
self.SetSizer(self.vbox)
class StatusPanel(wx.Panel):
"""
"""
@logged
def __init__(self, parent):
wx.Panel.__init__(self, parent)
self.parent = parent
self.diff_count = 0
self.count_str = "{} differences found."
self._init_ui()
log_str = "{} init complete"
logging.info(log_str.format(type(self).__name__))
@logged
def _init_ui(self):
self.status_box = wx.StaticBox(self, wx.ID_ANY, "Status",
size=(200, -1),
)
initial_text = "No files open."
self.diff_count_display = wx.StaticText(self, wx.ID_ANY,
initial_text,
)
vbox = wx.StaticBoxSizer(self.status_box, wx.VERTICAL)
vbox.Add(self.diff_count_display, 1, wx.EXPAND)
self.SetSizer(vbox)
@logged
def update_diff_count(self, value):
""" """
self.diff_count = value
self.diff_count_display.SetLabel(self.count_str.format(value))
class LogPanel(wx.Panel):
"""
Logging window.
Contains a read-only TextCtrl that displays logging messages.
"""
def __init__(self, parent):
""" Init the parent class and instance variables """
wx.Panel.__init__(self, parent)
self.parent = parent
self._init_ui()
def _init_ui(self):
""" Init the UI elements """
log_style = (wx.TE_MULTILINE
| wx.TE_READONLY
| wx.HSCROLL
)
self.log = wx.TextCtrl(self, wx.ID_ANY, style=log_style)
monospace_font = wx.Font(10,
family=wx.MODERN,
style=wx.NORMAL,
weight=wx.NORMAL,
underline=False,
face='Consolas',
)
self.log.SetFont(monospace_font)
self.hbox = wx.BoxSizer(wx.HORIZONTAL)
self.hbox.Add(self.log, 1, wx.EXPAND)
self.SetSizer(self.hbox)
class EditPanel(wx.Panel):
"""
Primary Edit panel.
Contains all of the logic for displaying and editing the XML files.
"""
def __init__(self, parent):
""" Init the parent class and instance variables """
wx.Panel.__init__(self, parent)
self.parent = parent
self.diff_count = 0
self.edit_col = -1
self._init_ui()
# must bind events *after* init because they rely on those ui elements
self._bind_events()
log_str = "{} init complete"
logging.info(log_str.format(type(self).__name__))
def _init_ui(self):
"""
Init the UI elements
"""
# A TreeListCtrl contains all of the XML
tree_style = (wx.TR_DEFAULT_STYLE
| wx.TR_ROW_LINES
| wx.TR_COLUMN_LINES
| wx.TR_FULL_ROW_HIGHLIGHT
)
self.tree = wxdv.TreeListCtrl(self,
wx.ID_ANY,
style=tree_style,
)
# Add the columns that always exist.
self.tree.AddColumn("Item")
self.tree.AddColumn("DataType")
self.tree.SetMainColumn(0) # contains the tree
self.tree.SetColumnWidth(0, 325)
self.tree.SetColumnWidth(1, 140)
self.root = self.tree.AddRoot("root")
# Expand some items by default
self.tree.ExpandAll(self.root)
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.tree, 1, wx.EXPAND)
self.SetSizer(self.vbox)
@logged
def _bind_events(self):
"""
Bind various events for the Edit Panel
"""
main_win = self.tree.GetMainWindow()
main_win.Bind(wx.EVT_RIGHT_DCLICK, self._on_right_dclick)
self.tree.Bind(wx.EVT_TREE_ITEM_ACTIVATED, self._on_activate)
self.tree.Bind(wx.EVT_TREE_BEGIN_LABEL_EDIT, self._on_item_edit_start)
self.tree.Bind(wx.EVT_TREE_END_LABEL_EDIT, self._on_item_edit_end)
@logged
def _on_right_dclick(self, event):
"""
Placeholder for value propagation.
"""
logging.info("Double-right click detected")
pos = event.GetPosition()
logging.info(" Pos: {}".format(pos))
item, flags, col = self.tree.HitTest(pos)
logging.info(" {} .. {} .. {}".format(item, flags, col))
if item:
item_text = self.tree.GetItemText(item)
col_text = self.tree.GetItemText(item, col)
log_str = "EXAMPLE: Item `{}: {}` propagated to all open files."
log_str = log_str.format(item_text, col_text)
logging.info(log_str)
@logged
def _on_activate(self, event):
"""
Placeholder - logging only.
"""
item_text = self.tree.GetItemText(event.GetItem())
logging.info("item activated: {}".format(item_text))
@logged
def _on_item_edit_start(self, event):
"""
Primary purpose: record which column is being edited (self.edit_col)
"""
self.edit_col = event.GetInt()
item_text = self.tree.GetItemText(event.GetItem())
item_value = self.tree.GetItemText(event.GetItem(), self.edit_col)
log_str = "Editing column {} for item `{}`"
logging.info(log_str.format(self.edit_col, item_text))
logging.info(" old value: `{}`".format(item_value))
@logged
def _on_item_edit_end(self, event):
"""
http://docs.wxwidgets.org/trunk/classwx_tree_event.html
"""
string = event.GetLabel()
log_str = "Column {} changed to: `{}`"
logging.info(log_str.format(self.edit_col, string))
if event.IsEditCancelled():
# I'm not sure when this would actually happen...
# It's not happening upon pressing ESC, so perhaps it only
# happens if EVT_TREE_BEGIN_LABEL_EDIT is vetoed?
logging.info("Column edit canceled.")
# TODO: move outside of the class?
@logged
def _build_element_tree_recursively(self, parent, soups):
"""
"""
skipped_items = ("FTI.Subsystems.Variables.Variables",
"FTI.TesterInstruments6.TesterInstruments",
"FTI.Subsystems.Coordinators.Coordinators",
)
all_children = ((x for x in soup.children if x != '\n')
for soup in soups)
for childs in zip(*all_children):
# assume that the 1st file is the "master file" that everything
# compares to.
child = childs[0]
# Ignore some stuff that I don't care about.
if child.name in skipped_items:
continue
# if the child is "Properties" then the next two items are
# going to be Name and Value
if child.name == "Properties":
# find the grandchildren
grandchildren = ([x for x in _child.children if x != '\n']
for _child in childs)
# collect the names and values of the grandchildren
names = []
values = []
for grandchild in grandchildren:
names.append(grandchild[0].string)
values.append(grandchild[1].string)
# set the item name as the 1st item
key = self.tree.AppendItem(parent, names[0])
# add the units to the units column
dtype = None
try:
value = unicode(values[0])
dtype, _ = parse_dtype(value)
except IndexError:
pass
if dtype is None:
dtype = ""
self.tree.SetItemText(key, dtype, 1)
# add values to each column
for _n, value in enumerate(values):
try:
value = unicode(value)
_, value = parse_dtype(value)
except IndexError:
pass
if value is None:
value = ""
self.tree.SetItemText(key, value, _n + 2)
# If any values are different, highlight the row and parents
if any(values[0] != x for x in values):
self._highlight_item_and_parents(key)
continue
# if we're at a NavigableString, then we need to add it
if isinstance(child, bs4.element.NavigableString):
# check for duplicates, highlight if true
if any(childs[0].string != x.string for x in childs):
self._highlight_item_and_parents(parent)
for _n, item in enumerate(childs):
self.tree.SetItemText(parent, item.string, _n + 2)
# if the child is a tag, then we set it as the new parent
# and recurse
if isinstance(child, bs4.element.Tag):
new_parent = self.tree.AppendItem(parent, child.name)
self._build_element_tree_recursively(new_parent, childs)
@logged
def _highlight_item_and_parents(self, item):
""" highlights an item row and parents """
self.diff_count += 1
self.tree.SetItemBackgroundColour(item, HIGHLIGHT)
for parent in get_parents(self.tree, item):
self.tree.SetItemBackgroundColour(parent, HIGHLIGHT2)
@logged
def _set_log_level(level_str):
"""
Sets the global logging level
Parmeters:
----------
level_str : string
String representation of logging.level. Accpeted values are::
DEBUG, INFO, WARN, WARNING, ERROR, CRITICAL
Returns:
--------
None
"""
# TODO: figure out a stdlib way to do this:
levels = {50: "CRITICAL",
40: "ERROR",
30: "WARNING",
20: "INFO",
10: "DEBUG",
}
if level_str not in levels.values():
raise ValueError("Invalid log level `{}`".format(level_str))
# Get the Logger and the previous logging level
logger = logging.getLogger()
prev_level = logger.level
new_level = getattr(logging, level_str) # Get numeric value
# Always record log level changes
log_str = "Changing logging level from {} to {}."
logging.log(99, log_str.format(levels[prev_level], levels[new_level]))
# Set the logger and handler levels
logger.setLevel(new_level)
log_str = "Logging Handler `{}` set to {}."
for handler in logger.handlers:
handler.setLevel(new_level)
logging.debug(log_str.format(handler.get_name(), level_str))
# logging.info("Global Log level set to {}".format(level_str))
@logged
def get_parents(tree, item, retval=None):
"""
Gets all the parents of a tree item, recursively.
Parameters:
-----------
tree : wx.gizmos.TreeListCtrl object
The tree to act on.
item : wx._controls.TreeItemId
The item to get the parent of.
retval : list of wx._controls.TreeItemId
Only used during recursion. A list containing all of the parents.
Returns:
--------
retval : list of wx._controls.TreeItemId
A list of all ancestors of `item`.
"""
if retval is None:
retval = []
try:
logging.debug("Getting parent of `{}`".format(tree.GetItemText(item)))
parent = tree.GetItemParent(item)
retval.append(parent)
logging.debug(" Parent is: `{}`".format(tree.GetItemText(parent)))
get_parents(tree, parent, retval)
except AssertionError:
# we're at the top, ignore the error and return.
pass
return retval[:-1]
@logged
def collapse_all(tree):
"""
Collapse all items in a tree, recursively.
Parameters:
-----------
tree : wx.gizmos.TreeListCtrl object
The tree to act on.
Returns:
--------
None
"""
item = tree.GetRootItem()
# get the first child, returning if no children exist.
try:
child = tree.GetFirstExpandedItem()
except AssertionError:
# raise AssertionError("Root item has no children")
return
expanded_items = [item, child]
while True:
try:
child = tree.GetNextExpanded(child)
except:
break
expanded_items.append(child)
for item in reversed(expanded_items):
try:
logging.debug("Collapsing `{}`".format(tree.GetItemText(item)))
tree.Collapse(item)
except:
pass
@logged
def expand_diffs(tree, item=None):
"""
Expand only the items that are different and their parents
"""
if item is None:
collapse_all(tree)
item = tree.GetRootItem()
bg = tree.GetItemBackgroundColour(item)
if bg == HIGHLIGHT2:
text = tree.GetItemText(item)
logging.debug("Expanding `{}`".format(text))
tree.Expand(item)
else:
return
# get the first child, returning if no children exist.
try:
child = tree.GetFirstChild(item)[0]
text = tree.GetItemText(child)
except AssertionError:
# raise AssertionError("Root item has no children")
return
children = [child, ]
while True:
try:
child = tree.GetNextSibling(child)
text = tree.GetItemText(child)
children.append(child)
except:
break
for i in children:
# logging.info("checking `{}`".format(tree.GetItemText(i)))
try:
bg = tree.GetItemBackgroundColour(i)
except TypeError:
continue
if bg == HIGHLIGHT2:
text = tree.GetItemText(i)
logging.debug("Expanding `{}`".format(text))
tree.Expand(i)
expand_diffs(tree, i)
@logged
def parse_dtype(string):
"""
Parses a data type from an FTI value string.
FTI value strings sometimes are of the form::
<Double>6</Double>
which, after translating the HTML codes, becomes valid XML::
<Double>6</Double>
The tag name ``Double`` is the data type and the tag's value ``6`` is
the value to return.
Parmeters:
----------
string : string
The string to parse
Returns:
--------
dtype : string
The parsed data type
value : string
The parsed value
"""
soup = BeautifulSoup(string, 'xml')
dtypes = [x.name for x in soup.find_all(True, recursive=True)]
dtype = ".".join(dtypes)
value = soup.find(dtypes[-1]).string
return dtype, value
def main():
""" Main Code """
docopt(__doc__, version=__version__)
MainApp()
if __name__ == "__main__":
main()
# string = "<Double>-30</Double>"
# string = "<A><B><C>value</C></B></A>"
# parse_dtype(string)
|
dougthor42/TPEdit
|
tpedit/main.py
|
Python
|
gpl-3.0
| 33,403 | 0.000419 |
"""
Author : tharindra galahena (inf0_warri0r)
Project: l_viewer
Blog : http://www.inf0warri0r.blogspot.com
Date : 30/04/2013
License:
Copyright 2013 Tharindra Galahena
l_viewer is free software: you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation, either version 3 of the License, or (at your option) any later
version. l_viewer is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details.
* You should have received a copy of the GNU General Public License along with
this. If not, see http://www.gnu.org/licenses/.
"""
import Tkinter as tk
import l_system
import thread
from threading import Lock
import time
import tkFileDialog
import tkMessageBox as dialog
class Application(tk.Frame):
def __init__(self, master=None):
tk.Frame.__init__(self, master)
self.grid()
self.run = False
self.l_sys = None
self.pause = False
self.file = ''
self.mutex = Lock()
self.createWidgets()
def createWidgets(self):
self.load_button = tk.Button(self, text='load', command=self.load)
self.start_button = tk.Button(self, text='start', command=self.start)
self.stop_button = tk.Button(self, text='stop', command=self.stop)
self.zoom_in_button = tk.Button(self, text='+', command=self.zoom_in)
self.zoom_out_button = tk.Button(self, text='-', command=self.zoom_out)
self.up_button = tk.Button(self, text='^', command=self.go_up)
self.down_button = tk.Button(self, text='v', command=self.go_down)
self.left_button = tk.Button(self, text='<', command=self.go_left)
self.right_button = tk.Button(self, text='>', command=self.go_right)
self.pause_button = tk.Button(self, text='pause/restart',
command=self.toggle_pause)
self.load_button.grid(column=0, row=0,
columnspan=4, sticky=tk.W + tk.E)
self.start_button.grid(column=0, row=1,
columnspan=2, sticky=tk.W + tk.E)
self.stop_button.grid(column=2, row=1,
columnspan=2, sticky=tk.W + tk.E)
self.zoom_in_button.grid(column=0, row=2,
columnspan=2, sticky=tk.W + tk.E)
self.zoom_out_button.grid(column=2, row=2,
columnspan=2, sticky=tk.W + tk.E)
self.up_button.grid(column=1, row=3,
columnspan=2, sticky=tk.W + tk.E)
self.left_button.grid(column=0, row=4,
columnspan=2, sticky=tk.W + tk.E)
self.right_button.grid(column=2, row=4,
columnspan=2, sticky=tk.W + tk.E)
self.down_button.grid(column=1, row=5,
columnspan=2, sticky=tk.W + tk.E)
self.pause_button.grid(column=1, row=6,
columnspan=2, sticky=tk.W + tk.E)
self.file_name = tk.StringVar()
self.file_entry = tk.Entry(textvariable=self.file_name)
self.content = tk.StringVar()
self.max_entry = tk.Entry(textvariable=self.content)
self.max_entry.insert(0, '10')
self.gen_entry = tk.Entry()
tk.Label(text="file :").grid(column=0, row=7, sticky=tk.W)
self.file_entry.grid(column=0, row=8)
tk.Label(text="maximum generations :").grid(column=0,
row=9, sticky=tk.W)
self.max_entry.grid(column=0, row=10)
tk.Label(text="current generation :").grid(column=0,
row=11, sticky=tk.W)
self.gen_entry.grid(column=0, row=12)
tk.Label(text="").grid(column=0, row=13, rowspan=10, sticky=tk.W)
self.canvas = tk.Canvas(width=640, height=640, background="black")
self.canvas.grid(row=0, rowspan=23,
column=4, sticky=tk.W + tk.E + tk.N + tk.S)
self.x_scale = 1
self.y_scale = 1
def start(self):
if self.run is False:
self.file = self.file_name.get()
if self.file != '':
if self.read_file(self.file):
self.run = True
try:
thread.start_new_thread(self.thread_func, ())
except Exception, e:
dialog.showerror(title='ERROR !!', message='Thread error')
def read_file(self, name):
f = open(name, 'r')
try:
cat = f.read()
f.close()
self.lst_rules = list()
self.lst_symbols = list()
lines = cat.splitlines()
self.axiom = lines[0]
self.angle = float(lines[1])
self.ang = float(lines[2])
num_rules = int(lines[3])
for i in range(4, num_rules + 4):
rule = lines[i].split('=')
self.lst_rules.append((rule[0], rule[1]))
num_symbols = int(lines[num_rules + 4])
for i in range(num_rules + 5, num_rules + 5 + num_symbols):
symbol = lines[i].split('=')
commands = symbol[1].split(',')
self.lst_symbols.append((symbol[0], commands))
return True
except Exception:
dialog.showerror(title='ERROR !!', message='Invailed File')
def stop(self):
self.run = False
def load(self):
self.file = tkFileDialog.askopenfilename()
if self.file != '':
self.file_entry.delete(0, tk.END)
self.file_entry.insert(0, self.file)
def redraw(self):
self.mutex.acquire()
self.canvas.delete(tk.ALL)
self.l_sys.reset()
lst = self.l_sys.draw()
for li in lst:
if not self.run:
break
self.canvas.create_line(li[0][0],
li[0][1],
li[1][0],
li[1][1],
fill='yellow')
self.canvas.update()
self.mutex.release()
def zoom_in(self):
if self.l_sys is not None:
self.l_sys.length = self.l_sys.length + 0.5
if self.pause:
thread.start_new_thread(self.redraw, ())
def zoom_out(self):
if self.l_sys is not None and self.l_sys.length >= 1.0:
self.l_sys.length = self.l_sys.length - 0.5
if self.pause:
thread.start_new_thread(self.redraw, ())
def go_left(self):
if self.l_sys is not None:
self.l_sys.st_x = self.l_sys.st_x - 10
if self.pause:
thread.start_new_thread(self.redraw, ())
def go_right(self):
if self.l_sys is not None:
self.l_sys.st_x = self.l_sys.st_x + 10
if self.pause:
thread.start_new_thread(self.redraw, ())
def go_up(self):
if self.l_sys is not None:
self.l_sys.st_y = self.l_sys.st_y - 10
if self.pause:
thread.start_new_thread(self.redraw, ())
def go_down(self):
if self.l_sys is not None:
self.l_sys.st_y = self.l_sys.st_y + 10
if self.pause:
thread.start_new_thread(self.redraw, ())
def toggle_pause(self):
if self.pause:
self.pause = False
else:
self.pause = True
def thread_func(self):
self.l_sys = l_system.l_system(self.axiom,
300, 300,
600, 600,
5,
self.angle, self.ang)
self.l_sys.set_symbols(self.lst_symbols)
self.l_sys.set_rules(self.lst_rules)
self.run = True
self.pause = False
text = self.content.get()
self.max = int(text)
self.gen_count = 0
while self.run:
self.mutex.acquire()
self.l_sys.reset()
self.gen_entry.delete(0, tk.END)
self.gen_entry.insert(0, str(self.gen_count + 1))
if not self.pause and self.gen_count < self.max:
self.gen_count = self.gen_count + 1
self.l_sys.next_gen()
lst = self.l_sys.draw()
self.canvas.delete(tk.ALL)
for li in lst:
if not self.run:
break
self.canvas.create_line(li[0][0],
li[0][1],
li[1][0],
li[1][1],
fill='yellow')
self.canvas.update()
self.mutex.release()
while self.gen_count >= self.max or self.pause:
time.sleep(0.01)
if not self.run:
break
time.sleep(1)
self.canvas.delete(tk.ALL)
|
inf0-warri0r/l_viewer
|
display.py
|
Python
|
agpl-3.0
| 9,288 | 0.002046 |
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2008 Zsolt Foldvari
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id$
"Provide formatting tag definition for StyledText."
#-------------------------------------------------------------------------
#
# GRAMPS modules
#
#-------------------------------------------------------------------------
from styledtexttagtype import StyledTextTagType
#-------------------------------------------------------------------------
#
# StyledTextTag class
#
#-------------------------------------------------------------------------
class StyledTextTag(object):
"""Hold formatting information for StyledText.
StyledTextTag is a container class, it's attributes are directly accessed.
@ivar name: Type (or name) of the tag instance. E.g. 'bold', etc.
:type name: :class:`~gen.lib.styledtexttagtype.StyledTextTagType` instace
@ivar value: Value of the tag. E.g. color hex string for font color, etc.
:type value: str or None
@ivar ranges: Pointer pairs into the string, where the tag applies.
:type ranges: list of (int(start), int(end)) tuples.
"""
def __init__(self, name=None, value=None, ranges=None):
"""Setup initial instance variable values.
@note: Since :class:`~gen.lib.grampstype.GrampsType` supports the instance initialization
with several different base types, please note that C{name} parameter
can be int, str, unicode, tuple, or even another L{StyledTextTagType}
instance.
"""
self.name = StyledTextTagType(name)
self.value = value
if ranges is None:
self.ranges = []
else:
# Current use of StyledTextTag is such that a shallow copy suffices.
self.ranges = ranges
def serialize(self):
"""Convert the object to a serialized tuple of data.
:returns: Serialized format of the instance.
:returnstype: tuple
"""
return (self.name.serialize(), self.value, self.ranges)
def to_struct(self):
"""
Convert the data held in this object to a structure (eg,
struct) that represents all the data elements.
This method is used to recursively convert the object into a
self-documenting form that can easily be used for various
purposes, including diffs and queries.
These structures may be primitive Python types (string,
integer, boolean, etc.) or complex Python types (lists,
tuples, or dicts). If the return type is a dict, then the keys
of the dict match the fieldname of the object. If the return
struct (or value of a dict key) is a list, then it is a list
of structs. Otherwise, the struct is just the value of the
attribute.
:returns: Returns a struct containing the data of the object.
:rtype: dict
"""
return {"name": self.name.to_struct(),
"value": self.value,
"ranges": self.ranges}
def unserialize(self, data):
"""Convert a serialized tuple of data to an object.
:param data: Serialized format of instance variables.
:type data: tuple
"""
(the_name, self.value, self.ranges) = data
self.name = StyledTextTagType()
self.name.unserialize(the_name)
return self
|
arunkgupta/gramps
|
gramps/gen/lib/styledtexttag.py
|
Python
|
gpl-2.0
| 4,127 | 0.005331 |
# Copyright (c) 2014-2015, NVIDIA CORPORATION. All rights reserved.
import time
import os.path
from collections import OrderedDict, namedtuple
import gevent
import flask
from digits import device_query
from digits.task import Task
from digits.utils import subclass, override
# NOTE: Increment this everytime the picked object changes
PICKLE_VERSION = 2
# Used to store network outputs
NetworkOutput = namedtuple('NetworkOutput', ['kind', 'data'])
@subclass
class TrainTask(Task):
"""
Defines required methods for child classes
"""
def __init__(self, dataset, train_epochs, snapshot_interval, learning_rate, lr_policy, **kwargs):
"""
Arguments:
dataset -- a DatasetJob containing the dataset for this model
train_epochs -- how many epochs of training data to train on
snapshot_interval -- how many epochs between taking a snapshot
learning_rate -- the base learning rate
lr_policy -- a hash of options to be used for the learning rate policy
Keyword arguments:
gpu_count -- how many GPUs to use for training (integer)
selected_gpus -- a list of GPU indexes to be used for training
batch_size -- if set, override any network specific batch_size with this value
val_interval -- how many epochs between validating the model with an epoch of validation data
pretrained_model -- filename for a model to use for fine-tuning
crop_size -- crop each image down to a square of this size
use_mean -- subtract the dataset's mean file or mean pixel
random_seed -- optional random seed
"""
self.gpu_count = kwargs.pop('gpu_count', None)
self.selected_gpus = kwargs.pop('selected_gpus', None)
self.batch_size = kwargs.pop('batch_size', None)
self.val_interval = kwargs.pop('val_interval', None)
self.pretrained_model = kwargs.pop('pretrained_model', None)
self.crop_size = kwargs.pop('crop_size', None)
self.use_mean = kwargs.pop('use_mean', None)
self.random_seed = kwargs.pop('random_seed', None)
self.solver_type = kwargs.pop('solver_type', None)
self.shuffle = kwargs.pop('shuffle', None)
self.network = kwargs.pop('network', None)
self.framework_id = kwargs.pop('framework_id', None)
super(TrainTask, self).__init__(**kwargs)
self.pickver_task_train = PICKLE_VERSION
self.dataset = dataset
self.train_epochs = train_epochs
self.snapshot_interval = snapshot_interval
self.learning_rate = learning_rate
self.lr_policy = lr_policy
self.current_epoch = 0
self.snapshots = []
# data gets stored as dicts of lists (for graphing)
self.train_outputs = OrderedDict()
self.val_outputs = OrderedDict()
def __getstate__(self):
state = super(TrainTask, self).__getstate__()
if 'dataset' in state:
del state['dataset']
if 'snapshots' in state:
del state['snapshots']
if '_labels' in state:
del state['_labels']
if '_gpu_socketio_thread' in state:
del state['_gpu_socketio_thread']
return state
def __setstate__(self, state):
if state['pickver_task_train'] < 2:
state['train_outputs'] = OrderedDict()
state['val_outputs'] = OrderedDict()
tl = state.pop('train_loss_updates', None)
vl = state.pop('val_loss_updates', None)
va = state.pop('val_accuracy_updates', None)
lr = state.pop('lr_updates', None)
if tl:
state['train_outputs']['epoch'] = NetworkOutput('Epoch', [x[0] for x in tl])
state['train_outputs']['loss'] = NetworkOutput('SoftmaxWithLoss', [x[1] for x in tl])
state['train_outputs']['learning_rate'] = NetworkOutput('LearningRate', [x[1] for x in lr])
if vl:
state['val_outputs']['epoch'] = NetworkOutput('Epoch', [x[0] for x in vl])
if va:
state['val_outputs']['accuracy'] = NetworkOutput('Accuracy', [x[1]/100 for x in va])
state['val_outputs']['loss'] = NetworkOutput('SoftmaxWithLoss', [x[1] for x in vl])
if state['use_mean'] == True:
state['use_mean'] = 'pixel'
elif state['use_mean'] == False:
state['use_mean'] = 'none'
state['pickver_task_train'] = PICKLE_VERSION
super(TrainTask, self).__setstate__(state)
self.snapshots = []
self.dataset = None
@override
def offer_resources(self, resources):
if 'gpus' not in resources:
return None
if not resources['gpus']:
return {} # don't use a GPU at all
if self.gpu_count is not None:
identifiers = []
for resource in resources['gpus']:
if resource.remaining() >= 1:
identifiers.append(resource.identifier)
if len(identifiers) == self.gpu_count:
break
if len(identifiers) == self.gpu_count:
return {'gpus': [(i, 1) for i in identifiers]}
else:
return None
elif self.selected_gpus is not None:
all_available = True
for i in self.selected_gpus:
available = False
for gpu in resources['gpus']:
if i == gpu.identifier:
if gpu.remaining() >= 1:
available = True
break
if not available:
all_available = False
break
if all_available:
return {'gpus': [(i, 1) for i in self.selected_gpus]}
else:
return None
return None
@override
def before_run(self):
if 'gpus' in self.current_resources:
# start a thread which sends SocketIO updates about GPU utilization
self._gpu_socketio_thread = gevent.spawn(
self.gpu_socketio_updater,
[identifier for (identifier, value)
in self.current_resources['gpus']]
)
def gpu_socketio_updater(self, gpus):
"""
This thread sends SocketIO messages about GPU utilization
to connected clients
Arguments:
gpus -- a list of identifiers for the GPUs currently being used
"""
from digits.webapp import app, socketio
devices = []
for index in gpus:
device = device_query.get_device(index)
if device:
devices.append((index, device))
if not devices:
raise RuntimeError('Failed to load gpu information for "%s"' % gpus)
# this thread continues until killed in after_run()
while True:
data = []
for index, device in devices:
update = {'name': device.name, 'index': index}
nvml_info = device_query.get_nvml_info(index)
if nvml_info is not None:
update.update(nvml_info)
data.append(update)
with app.app_context():
html = flask.render_template('models/gpu_utilization.html',
data = data)
socketio.emit('task update',
{
'task': self.html_id(),
'update': 'gpu_utilization',
'html': html,
},
namespace='/jobs',
room=self.job_id,
)
gevent.sleep(1)
def send_progress_update(self, epoch):
"""
Sends socketio message about the current progress
"""
if self.current_epoch == epoch:
return
self.current_epoch = epoch
self.progress = epoch/self.train_epochs
self.emit_progress_update()
def save_train_output(self, *args):
"""
Save output to self.train_outputs
"""
from digits.webapp import socketio
if not self.save_output(self.train_outputs, *args):
return
if self.last_train_update and (time.time() - self.last_train_update) < 5:
return
self.last_train_update = time.time()
self.logger.debug('Training %s%% complete.' % round(100 * self.current_epoch/self.train_epochs,2))
# loss graph data
data = self.combined_graph_data()
if data:
socketio.emit('task update',
{
'task': self.html_id(),
'update': 'combined_graph',
'data': data,
},
namespace='/jobs',
room=self.job_id,
)
if data['columns']:
# isolate the Loss column data for the sparkline
graph_data = data['columns'][0][1:]
socketio.emit('task update',
{
'task': self.html_id(),
'job_id': self.job_id,
'update': 'combined_graph',
'data': graph_data,
},
namespace='/jobs',
room='job_management',
)
# lr graph data
data = self.lr_graph_data()
if data:
socketio.emit('task update',
{
'task': self.html_id(),
'update': 'lr_graph',
'data': data,
},
namespace='/jobs',
room=self.job_id,
)
def save_val_output(self, *args):
"""
Save output to self.val_outputs
"""
from digits.webapp import socketio
if not self.save_output(self.val_outputs, *args):
return
# loss graph data
data = self.combined_graph_data()
if data:
socketio.emit('task update',
{
'task': self.html_id(),
'update': 'combined_graph',
'data': data,
},
namespace='/jobs',
room=self.job_id,
)
def save_output(self, d, name, kind, value):
"""
Save output to self.train_outputs or self.val_outputs
Returns true if all outputs for this epoch have been added
Arguments:
d -- the dictionary where the output should be stored
name -- name of the output (e.g. "accuracy")
kind -- the type of outputs (e.g. "Accuracy")
value -- value for this output (e.g. 0.95)
"""
# don't let them be unicode
name = str(name)
kind = str(kind)
# update d['epoch']
if 'epoch' not in d:
d['epoch'] = NetworkOutput('Epoch', [self.current_epoch])
elif d['epoch'].data[-1] != self.current_epoch:
d['epoch'].data.append(self.current_epoch)
if name not in d:
d[name] = NetworkOutput(kind, [])
epoch_len = len(d['epoch'].data)
name_len = len(d[name].data)
# save to back of d[name]
if name_len > epoch_len:
raise Exception('Received a new output without being told the new epoch')
elif name_len == epoch_len:
# already exists
if isinstance(d[name].data[-1], list):
d[name].data[-1].append(value)
else:
d[name].data[-1] = [d[name].data[-1], value]
elif name_len == epoch_len - 1:
# expected case
d[name].data.append(value)
else:
# we might have missed one
for _ in xrange(epoch_len - name_len - 1):
d[name].data.append(None)
d[name].data.append(value)
for key in d:
if key not in ['epoch', 'learning_rate']:
if len(d[key].data) != epoch_len:
return False
return True
@override
def after_run(self):
if hasattr(self, '_gpu_socketio_thread'):
self._gpu_socketio_thread.kill()
def detect_snapshots(self):
"""
Populate self.snapshots with snapshots that exist on disk
Returns True if at least one usable snapshot is found
"""
return False
def snapshot_list(self):
"""
Returns an array of arrays for creating an HTML select field
"""
return [[s[1], 'Epoch #%s' % s[1]] for s in reversed(self.snapshots)]
def est_next_snapshot(self):
"""
Returns the estimated time in seconds until the next snapshot is taken
"""
return None
def can_view_weights(self):
"""
Returns True if this Task can visualize the weights of each layer for a given model
"""
raise NotImplementedError()
def view_weights(self, model_epoch=None, layers=None):
"""
View the weights for a specific model and layer[s]
"""
return None
def can_infer_one(self):
"""
Returns True if this Task can run inference on one input
"""
raise NotImplementedError()
def can_view_activations(self):
"""
Returns True if this Task can visualize the activations of a model after inference
"""
raise NotImplementedError()
def infer_one(self, data, model_epoch=None, layers=None):
"""
Run inference on one input
"""
return None
def can_infer_many(self):
"""
Returns True if this Task can run inference on many inputs
"""
raise NotImplementedError()
def infer_many(self, data, model_epoch=None):
"""
Run inference on many inputs
"""
return None
def get_labels(self):
"""
Read labels from labels_file and return them in a list
"""
# The labels might be set already
if hasattr(self, '_labels') and self._labels and len(self._labels) > 0:
return self._labels
assert hasattr(self.dataset, 'labels_file'), 'labels_file not set'
assert self.dataset.labels_file, 'labels_file not set'
assert os.path.exists(self.dataset.path(self.dataset.labels_file)), 'labels_file does not exist'
labels = []
with open(self.dataset.path(self.dataset.labels_file)) as infile:
for line in infile:
label = line.strip()
if label:
labels.append(label)
assert len(labels) > 0, 'no labels in labels_file'
self._labels = labels
return self._labels
def lr_graph_data(self):
"""
Returns learning rate data formatted for a C3.js graph
Keyword arguments:
"""
if not self.train_outputs or 'epoch' not in self.train_outputs or 'learning_rate' not in self.train_outputs:
return None
# return 100-200 values or fewer
stride = max(len(self.train_outputs['epoch'].data)/100,1)
e = ['epoch'] + self.train_outputs['epoch'].data[::stride]
lr = ['lr'] + self.train_outputs['learning_rate'].data[::stride]
return {
'columns': [e, lr],
'xs': {
'lr': 'epoch'
},
'names': {
'lr': 'Learning Rate'
},
}
def combined_graph_data(self, cull=True):
"""
Returns all train/val outputs in data for one C3.js graph
Keyword arguments:
cull -- if True, cut down the number of data points returned to a reasonable size
"""
data = {
'columns': [],
'xs': {},
'axes': {},
'names': {},
}
added_train_data = False
added_val_data = False
if self.train_outputs and 'epoch' in self.train_outputs:
if cull:
# max 200 data points
stride = max(len(self.train_outputs['epoch'].data)/100,1)
else:
# return all data
stride = 1
for name, output in self.train_outputs.iteritems():
if name not in ['epoch', 'learning_rate']:
col_id = '%s-train' % name
data['xs'][col_id] = 'train_epochs'
data['names'][col_id] = '%s (train)' % name
if 'accuracy' in output.kind.lower():
data['columns'].append([col_id] + [100*x for x in output.data[::stride]])
data['axes'][col_id] = 'y2'
else:
data['columns'].append([col_id] + output.data[::stride])
added_train_data = True
if added_train_data:
data['columns'].append(['train_epochs'] + self.train_outputs['epoch'].data[::stride])
if self.val_outputs and 'epoch' in self.val_outputs:
if cull:
# max 200 data points
stride = max(len(self.val_outputs['epoch'].data)/100,1)
else:
# return all data
stride = 1
for name, output in self.val_outputs.iteritems():
if name not in ['epoch']:
col_id = '%s-val' % name
data['xs'][col_id] = 'val_epochs'
data['names'][col_id] = '%s (val)' % name
if 'accuracy' in output.kind.lower():
data['columns'].append([col_id] + [100*x for x in output.data[::stride]])
data['axes'][col_id] = 'y2'
else:
data['columns'].append([col_id] + output.data[::stride])
added_val_data = True
if added_val_data:
data['columns'].append(['val_epochs'] + self.val_outputs['epoch'].data[::stride])
if added_train_data:
return data
else:
# return None if only validation data exists
# helps with ordering of columns in graph
return None
# return id of framework used for training
@override
def get_framework_id(self):
return self.framework_id
def get_model_files(self):
"""
return path to model file
"""
raise NotImplementedError()
def get_network_desc(self):
"""
return text description of model
"""
raise NotImplementedError()
|
batra-mlp-lab/DIGITS
|
digits/model/tasks/train.py
|
Python
|
bsd-3-clause
| 19,069 | 0.002203 |
#!/usr/bin/env python
import unittest
import importlib
import random
from mock import patch
vis_map = importlib.import_module('ciftify.bin.cifti_vis_map')
class TestUserSettings(unittest.TestCase):
temp = '/tmp/fake_temp_dir'
palette = 'PALETTE-NAME'
def test_snap_set_to_none_when_in_index_mode(self):
args = self.get_default_arguments()
args['index'] = True
args['<map-name>'] = 'some-map-name'
cifti = vis_map.UserSettings(args, self.temp).snap
assert cifti is None
@patch('ciftify.utilities.docmd')
def test_snap_set_to_original_cifti_when_in_cifti_snaps_mode(self,
mock_docmd):
cifti_path = '/some/path/my_map.dscalar.nii'
args = self.get_default_arguments()
args['cifti-snaps'] = True
args['<map.dscalar.nii>'] = cifti_path
cifti = vis_map.UserSettings(args, self.temp).snap
assert cifti == cifti_path
assert mock_docmd.call_count == 0
@patch('ciftify.utilities.docmd')
def test_snap_is_nifti_converted_to_cifti_in_nifti_snaps_mode(self,
mock_docmd):
args = self.get_default_arguments()
args['nifti-snaps'] = True
args['<map.nii>'] = '/some/path/my_map.nii'
cifti = vis_map.UserSettings(args, self.temp).snap
# Expect only ciftify-a-nifti needs to be run
assert mock_docmd.call_count == 1
# Extract first (only) call, then arguments to call, then command list.
assert 'ciftify-a-nifti' in mock_docmd.call_args_list[0][0][0]
@patch('ciftify.utilities.docmd')
def test_palette_changed_when_option_set_in_nifti_snaps_mode(self,
mock_docmd):
args = self.get_default_arguments()
args['nifti-snaps'] = True
args['<map.nii>'] = '/some/path/my_map.nii'
args['--colour-palette'] = self.palette
vis_map.UserSettings(args, self.temp)
assert self.palette_changed(mock_docmd.call_args_list)
@patch('ciftify.utilities.docmd')
def test_palette_not_changed_when_option_unset_in_nifti_snaps_mode(self,
mock_docmd):
args = self.get_default_arguments()
args['nifti-snaps'] = True
args['<map.nii>'] = '/some/path/my_map.nii'
vis_map.UserSettings(args, self.temp)
assert not self.palette_changed(mock_docmd.call_args_list,
strict_check=True)
@patch('ciftify.utilities.docmd')
def test_palette_changed_when_option_set_in_cifti_snaps_mode(self,
mock_docmd):
args = self.get_default_arguments()
args['cifti-snaps'] = True
args['<map.dscalar.nii>'] = '/some/path/my_map.dscalar.nii'
args['--colour-palette'] = self.palette
vis_map.UserSettings(args, self.temp)
assert self.palette_changed(mock_docmd.call_args_list)
@patch('ciftify.utilities.docmd')
def test_palette_not_changed_when_option_unset_in_cifti_snaps_mode(self,
mock_docmd):
args = self.get_default_arguments()
args['cifti-snaps'] = True
args['<map.dscalar.nii>'] = '/some/path/my_map.dscalar.nii'
vis_map.UserSettings(args, self.temp)
assert not self.palette_changed(mock_docmd.call_args_list,
strict_check=True)
@patch('ciftify.utilities.docmd')
def test_nifti_resampled_during_conversion_to_cifti_when_resample_nifti_set(
self, mock_docmd):
args = self.get_default_arguments()
args['nifti-snaps'] = True
nifti = '/some/path/my_map.nii'
args['<map.nii>'] = nifti
args['--resample-nifti'] = True
settings = vis_map.UserSettings(args, self.temp)
# Reset mock_docmd to clear call_args_list
mock_docmd.reset_mock()
settings._UserSettings__convert_nifti(nifti)
args_list = mock_docmd.call_args_list[0][0][0]
assert '--resample-voxels' in args_list
@patch('ciftify.utilities.docmd')
def test_nifti_not_resampled_when_resample_nifti_unset(self, mock_docmd):
args = self.get_default_arguments()
args['nifti-snaps'] = True
nifti = '/some/path/my_map.nii'
args['<map.nii>'] = nifti
settings = vis_map.UserSettings(args, self.temp)
# Reset mock_docmd to clear call_args_list
mock_docmd.reset_mock()
settings._UserSettings__convert_nifti(nifti)
args_list = mock_docmd.call_args_list[0][0][0]
assert '--resample-voxels' not in args_list
def get_default_arguments(self):
# arguments 'stub' - acts as a template to be modified by tests
arguments = {'cifti-snaps': False,
'<map.dscalar.nii>': None,
'nifti-snaps': False,
'<map.nii>': None,
'index': False,
'<map-name>': None,
'<subject>': 'subject_id',
'--qcdir': '/some/path/qc',
'--roi-overlay': None,
'--hcp-data-dir': '/some/path/hcp',
'--subjects-filter': None,
'--colour-palette': None,
'--output-dscalar': None,
'--resample-nifti': False}
return arguments
def palette_changed(self, call_args_list, strict_check=False):
changed = False
for call in call_args_list:
arg_list = call[0][0]
if '-palette-name' in arg_list:
# If strict_check any changes to the palette will be caught
if strict_check or self.palette in arg_list:
changed = True
return changed
class TestModifyTemplateContents(unittest.TestCase):
variables = ['HCP_DATA_PATH', 'HCP_DATA_RELPATH', 'SUBJID', 'SEEDCORRDIR',
'SEEDCORRRELDIR', 'SEEDCORRCIFTI']
def test_expected_strings_are_replaced(self):
scene_path = '/some/path/ciftify/data/qc_mode.scene'
settings = self.get_settings()
template_contents = get_template_contents(self.variables)
new_contents = vis_map.modify_template_contents(template_contents,
scene_path, settings)
for key in self.variables:
assert key not in new_contents
def get_settings(self):
class SettingsStub(object):
def __init__(self):
self.hcp_dir = '/some/path/hcp'
self.subject = 'subject1234'
self.snap = '/some/path/data/{}_map.dscalar.nii'.format(
self.subject)
return SettingsStub()
def get_template_contents(keys):
# Not a stroke, just randomly generated text
mock_contents = ['Behind sooner dining so window excuse he summer.',
' Breakfast met certainty and fulfilled propriety led. ',
' Waited get either are wooded little her. Contrasted ',
'unreserved as mr particular collecting it everything as ',
'indulgence. Seems ask meant merry could put. Age old begin ',
'had boy noisy table front whole given.']
mock_contents.extend(keys)
random.shuffle(mock_contents)
template_contents = ' '.join(mock_contents)
return template_contents
|
BrainIntensive/OnlineBrainIntensive
|
resources/HCP/ciftify/tests/test_cifti_vis_map.py
|
Python
|
mit
| 7,233 | 0.002627 |
# -*- coding: utf-8 -*-
from odoo import http
from odoo.addons.website_sale_delivery.controllers.main import WebsiteSaleDelivery
from odoo.http import request
class WebsiteSaleCouponDelivery(WebsiteSaleDelivery):
@http.route()
def update_eshop_carrier(self, **post):
Monetary = request.env['ir.qweb.field.monetary']
result = super(WebsiteSaleCouponDelivery, self).update_eshop_carrier(**post)
order = request.website.sale_get_order()
free_shipping_lines = None
if order:
order.recompute_coupon_lines()
order.validate_taxes_on_sales_order()
free_shipping_lines = order._get_free_shipping_lines()
if free_shipping_lines:
currency = order.currency_id
amount_free_shipping = sum(free_shipping_lines.mapped('price_subtotal'))
result.update({
'new_amount_delivery': Monetary.value_to_html(0.0, {'display_currency': currency}),
'new_amount_untaxed': Monetary.value_to_html(order.amount_untaxed, {'display_currency': currency}),
'new_amount_tax': Monetary.value_to_html(order.amount_tax, {'display_currency': currency}),
'new_amount_total': Monetary.value_to_html(order.amount_total, {'display_currency': currency}),
'new_amount_order_discounted': Monetary.value_to_html(order.reward_amount - amount_free_shipping, {'display_currency': currency}),
})
return result
@http.route()
def cart_carrier_rate_shipment(self, carrier_id, **kw):
Monetary = request.env['ir.qweb.field.monetary']
order = request.website.sale_get_order(force_create=True)
free_shipping_lines = order._get_free_shipping_lines()
# Avoid computing carrier price delivery is free (coupon). It means if
# the carrier has error (eg 'delivery only for Belgium') it will show
# Free until the user clicks on it.
if free_shipping_lines:
return {
'carrier_id': carrier_id,
'status': True,
'is_free_delivery': True,
'new_amount_delivery': Monetary.value_to_html(0.0, {'display_currency': order.currency_id}),
'error_message': None,
}
return super(WebsiteSaleCouponDelivery, self).cart_carrier_rate_shipment(carrier_id, **kw)
|
rven/odoo
|
addons/website_sale_coupon_delivery/controllers/main.py
|
Python
|
agpl-3.0
| 2,386 | 0.004191 |
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
class VersionTestDependencyPreferred(AutotoolsPackage):
"""Dependency of version-test-pkg, which has a multi-valued
variant with two default values (a very low priority optimization
criterion for clingo is to maximize their number)
"""
homepage = "http://www.spack.org"
url = "http://www.spack.org/downloads/xz-1.0.tar.gz"
version('5.2.5', sha256='5117f930900b341493827d63aa910ff5e011e0b994197c3b71c08a20228a42df')
variant('libs', default='shared,static', values=('shared', 'static'),
multi=True, description='Build shared libs, static libs or both')
|
LLNL/spack
|
var/spack/repos/builtin.mock/packages/version-test-dependency-preferred/package.py
|
Python
|
lgpl-2.1
| 794 | 0.001259 |
import deep_architect.searchers.common as se
import numpy as np
# NOTE: this searcher does not do any budget adjustment and needs to be
# combined with an evaluator that does.
class SuccessiveNarrowing(se.Searcher):
def __init__(self, search_space_fn, num_initial_samples, reduction_factor,
reset_default_scope_upon_sample):
se.Searcher.__init__(self, search_space_fn,
reset_default_scope_upon_sample)
self.num_initial_samples = num_initial_samples
self.reduction_factor = reduction_factor
self.vals = [None for _ in range(num_initial_samples)]
self.num_remaining = num_initial_samples
self.idx = 0
self.queue = []
for _ in range(num_initial_samples):
inputs, outputs = search_space_fn()
hyperp_value_lst = se.random_specify(outputs)
self.queue.append(hyperp_value_lst)
def sample(self):
assert self.idx < len(self.queue)
hyperp_value_lst = self.queue[self.idx]
(inputs, outputs) = self.search_space_fn()
se.specify(outputs, hyperp_value_lst)
idx = self.idx
self.idx += 1
return inputs, outputs, hyperp_value_lst, {"idx": idx}
def update(self, val, searcher_eval_token):
assert self.num_remaining > 0
idx = searcher_eval_token["idx"]
assert self.vals[idx] is None
self.vals[idx] = val
self.num_remaining -= 1
# generate the next round of architectures by keeping the best ones.
if self.num_remaining == 0:
num_samples = int(self.reduction_factor * len(self.queue))
assert num_samples > 0
top_idxs = np.argsort(self.vals)[::-1][:num_samples]
self.queue = [self.queue[idx] for idx in top_idxs]
self.vals = [None for _ in range(num_samples)]
self.num_remaining = num_samples
self.idx = 0
# run simple successive narrowing on a single machine.
def run_successive_narrowing(search_space_fn, num_initial_samples,
initial_budget, get_evaluator, extract_val_fn,
num_samples_reduction_factor,
budget_increase_factor, num_rounds,
get_evaluation_logger):
num_samples = num_initial_samples
searcher = SuccessiveNarrowing(search_space_fn, num_initial_samples,
num_samples_reduction_factor)
evaluation_id = 0
for round_idx in range(num_rounds):
budget = initial_budget * (budget_increase_factor**round_idx)
evaluator = get_evaluator(budget)
for idx in range(num_samples):
(inputs, outputs, hyperp_value_lst,
searcher_eval_token) = searcher.sample()
results = evaluator.eval(inputs, outputs)
val = extract_val_fn(results)
searcher.update(val, searcher_eval_token)
logger = get_evaluation_logger(evaluation_id)
logger.log_config(hyperp_value_lst, searcher_eval_token)
logger.log_results(results)
evaluation_id += 1
num_samples = int(num_samples_reduction_factor * num_samples)
|
negrinho/deep_architect
|
deep_architect/searchers/successive_narrowing.py
|
Python
|
mit
| 3,234 | 0 |
import logging
import os
import ctypes
import ctypes.util
log = logging.getLogger("lrrbot.systemd")
try:
libsystemd = ctypes.CDLL(ctypes.util.find_library("systemd"))
libsystemd.sd_notify.argtypes = [ctypes.c_int, ctypes.c_char_p]
def notify(status):
libsystemd.sd_notify(0, status.encode('utf-8'))
except OSError as e:
log.warning("failed to load libsystemd: {}", e)
def notify(status):
pass
class Service:
def __init__(self, loop):
self.loop = loop
timeout_usec = os.environ.get("WATCHDOG_USEC")
if timeout_usec is not None:
self.timeout = (int(timeout_usec) * 1e-6) / 2
self.watchdog_handle = self.loop.call_later(self.timeout, self.watchdog)
self.subsystems = {"irc"}
def watchdog(self):
notify("WATCHDOG=1")
self.watchdog_handle = self.loop.call_later(self.timeout, self.watchdog)
def subsystem_started(self, subsystem):
if subsystem in self.subsystems:
self.subsystems.remove(subsystem)
if self.subsystems == set():
notify("READY=1")
|
andreasots/lrrbot
|
lrrbot/systemd.py
|
Python
|
apache-2.0
| 992 | 0.025202 |
"""
Small event module
=======================
"""
import numpy as np
import logging
logger = logging.getLogger(__name__)
from ...utils.decorators import face_lookup
from ...geometry.sheet_geometry import SheetGeometry
from ...topology.sheet_topology import cell_division
from .actions import (
exchange,
remove,
merge_vertices,
detach_vertices,
increase,
decrease,
increase_linear_tension,
)
def reconnect(sheet, manager, **kwargs):
"""Performs reconnections (vertex merging / splitting) following Finegan et al. 2019
kwargs overwrite their corresponding `sheet.settings` entries
Keyword Arguments
-----------------
threshold_length : the threshold length at which vertex merging is performed
p_4 : the probability per unit time to perform a detachement from a rank 4 vertex
p_5p : the probability per unit time to perform a detachement from a rank 5 or more vertex
See Also
--------
**The tricellular vertex-specific adhesion molecule Sidekick
facilitates polarised cell intercalation during Drosophila axis
extension** _Tara M Finegan, Nathan Hervieux, Alexander
Nestor-Bergmann, Alexander G. Fletcher, Guy B Blanchard, Benedicte
Sanson_ bioRxiv 704932; doi: https://doi.org/10.1101/704932
"""
sheet.settings.update(kwargs)
nv = sheet.Nv
merge_vertices(sheet)
if nv != sheet.Nv:
logger.info(f"Merged {nv - sheet.Nv+1} vertices")
nv = sheet.Nv
retval = detach_vertices(sheet)
if retval:
logger.info("Failed to detach, skipping")
if nv != sheet.Nv:
logger.info(f"Detached {sheet.Nv - nv} vertices")
manager.append(reconnect, **kwargs)
default_division_spec = {
"face_id": -1,
"face": -1,
"growth_rate": 0.1,
"critical_vol": 2.0,
"geom": SheetGeometry,
}
@face_lookup
def division(sheet, manager, **kwargs):
"""Cell division happens through cell growth up to a critical volume,
followed by actual division of the face.
Parameters
----------
sheet : a `Sheet` object
manager : an `EventManager` instance
face_id : int,
index of the mother face
growth_rate : float, default 0.1
rate of increase of the prefered volume
critical_vol : float, default 2.
volume at which the cells stops to grow and devides
"""
division_spec = default_division_spec
division_spec.update(**kwargs)
face = division_spec["face"]
division_spec["critical_vol"] *= sheet.specs["face"]["prefered_vol"]
print(sheet.face_df.loc[face, "vol"], division_spec["critical_vol"])
if sheet.face_df.loc[face, "vol"] < division_spec["critical_vol"]:
increase(
sheet, "face", face, division_spec["growth_rate"], "prefered_vol", True
)
manager.append(division, **division_spec)
else:
daughter = cell_division(sheet, face, division_spec["geom"])
sheet.face_df.loc[daughter, "id"] = sheet.face_df.id.max() + 1
default_contraction_spec = {
"face_id": -1,
"face": -1,
"contractile_increase": 1.0,
"critical_area": 1e-2,
"max_contractility": 10,
"multiply": False,
"contraction_column": "contractility",
"unique": True,
}
@face_lookup
def contraction(sheet, manager, **kwargs):
"""Single step contraction event."""
contraction_spec = default_contraction_spec
contraction_spec.update(**kwargs)
face = contraction_spec["face"]
if (sheet.face_df.loc[face, "area"] < contraction_spec["critical_area"]) or (
sheet.face_df.loc[face, contraction_spec["contraction_column"]]
> contraction_spec["max_contractility"]
):
return
increase(
sheet,
"face",
face,
contraction_spec["contractile_increase"],
contraction_spec["contraction_column"],
contraction_spec["multiply"],
)
default_type1_transition_spec = {
"face_id": -1,
"face": -1,
"critical_length": 0.1,
"geom": SheetGeometry,
}
@face_lookup
def type1_transition(sheet, manager, **kwargs):
"""Custom type 1 transition event that tests if
the the shorter edge of the face is smaller than
the critical length.
"""
type1_transition_spec = default_type1_transition_spec
type1_transition_spec.update(**kwargs)
face = type1_transition_spec["face"]
edges = sheet.edge_df[sheet.edge_df["face"] == face]
if min(edges["length"]) < type1_transition_spec["critical_length"]:
exchange(sheet, face, type1_transition_spec["geom"])
default_face_elimination_spec = {"face_id": -1, "face": -1, "geom": SheetGeometry}
@face_lookup
def face_elimination(sheet, manager, **kwargs):
"""Removes the face with if face_id from the sheet."""
face_elimination_spec = default_face_elimination_spec
face_elimination_spec.update(**kwargs)
remove(sheet, face_elimination_spec["face"], face_elimination_spec["geom"])
default_check_tri_face_spec = {"geom": SheetGeometry}
def check_tri_faces(sheet, manager, **kwargs):
"""Three neighbourghs cell elimination
Add all cells with three neighbourghs in the manager
to be eliminated at the next time step.
Parameters
----------
sheet : a :class:`tyssue.sheet` object
manager : a :class:`tyssue.events.EventManager` object
"""
check_tri_faces_spec = default_check_tri_face_spec
check_tri_faces_spec.update(**kwargs)
tri_faces = sheet.face_df[(sheet.face_df["num_sides"] < 4)].id
manager.extend(
[
(face_elimination, {"face_id": f, "geom": check_tri_faces_spec["geom"]})
for f in tri_faces
]
)
default_contraction_line_tension_spec = {
"face_id": -1,
"face": -1,
"shrink_rate": 1.05,
"contractile_increase": 1.0,
"critical_area": 1e-2,
"max_contractility": 10,
"multiply": True,
"contraction_column": "line_tension",
"unique": True,
}
@face_lookup
def contraction_line_tension(sheet, manager, **kwargs):
"""
Single step contraction event
"""
contraction_spec = default_contraction_line_tension_spec
contraction_spec.update(**kwargs)
face = contraction_spec["face"]
if sheet.face_df.loc[face, "area"] < contraction_spec["critical_area"]:
return
# reduce prefered_area
decrease(
sheet,
"face",
face,
contraction_spec["shrink_rate"],
col="prefered_area",
divide=True,
bound=contraction_spec["critical_area"] / 2,
)
increase_linear_tension(
sheet,
face,
contraction_spec["contractile_increase"],
multiply=contraction_spec["multiply"],
isotropic=True,
limit=100,
)
|
CellModels/tyssue
|
tyssue/behaviors/sheet/basic_events.py
|
Python
|
gpl-2.0
| 6,721 | 0.001785 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import pprint
import random
import wx
import armid
from Borg import Borg
import matplotlib
matplotlib.use('WXAgg')
from matplotlib.figure import Figure
from matplotlib.backends.backend_wxagg import \
FigureCanvasWxAgg as FigCanvas, \
NavigationToolbar2WxAgg as NavigationToolbar
def riskColourCode(riskScore):
if (riskScore <= 1):
return '#fef2ec'
elif (riskScore == 2):
return '#fcd9c8'
elif (riskScore == 3):
return '#f7ac91'
elif (riskScore == 4):
return '#f67e61'
elif (riskScore == 5):
return '#f2543d'
elif (riskScore == 6):
return '#e42626'
elif (riskScore == 7):
return '#b9051a'
elif (riskScore == 8):
return '#900014'
else:
return '#52000D'
class RiskScatterPanel(wx.Panel):
def __init__(self,parent):
wx.Panel.__init__(self,parent,armid.RISKSCATTER_ID)
b = Borg()
self.dbProxy = b.dbProxy
self.dpi = 100
self.fig = Figure((5.0, 4.0), dpi=self.dpi)
self.canvas = FigCanvas(self, -1, self.fig)
self.axes = self.fig.add_subplot(111,xlabel='Severity',ylabel='Likelihood',autoscale_on=False)
self.axes.set_xticklabels(['Marginal','Critical','Catastrophic'])
self.axes.set_yticks([0,1,2,3,4,5])
self.toolbar = NavigationToolbar(self.canvas)
envs = self.dbProxy.getDimensionNames('environment')
self.envCombo = wx.ComboBox(self,armid.RISKSCATTER_COMBOENVIRONMENT_ID,envs[0],choices=envs,size=(300,-1),style=wx.CB_DROPDOWN)
self.envCombo.Bind(wx.EVT_COMBOBOX,self.onEnvironmentChange)
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.toolbar, 0, wx.EXPAND)
self.vbox.Add(self.envCombo,0, wx.EXPAND)
self.vbox.Add(self.canvas, 1, wx.LEFT | wx.TOP | wx.GROW)
self.SetSizer(self.vbox)
self.vbox.Fit(self)
self.drawScatter(envs[0])
def drawScatter(self,envName):
self.axes.clear()
self.axes.grid(True)
self.axes.set_xlabel('Severity')
self.axes.set_ylabel('Likelihood')
self.axes.set_xbound(0,4)
self.axes.set_ybound(0,5)
xs,ys,cs = self.dbProxy.riskScatter(envName)
ccs = []
for c in cs:
ccs.append(riskColourCode(c))
if ((len(xs) > 0) and (len(ys) > 0)):
self.axes.scatter(xs,ys,c=ccs,marker='d')
self.canvas.draw()
def onEnvironmentChange(self,evt):
envName = self.envCombo.GetStringSelection()
self.drawScatter(envName)
def on_save_plot(self, event):
fileChoices = "PNG (*.png)|*.png"
dlg = wx.FileDialog(self,message="Save risk scatter",defaultDir=os.getcwd(),defaultFile="scatter.png",wildcard=fileChoices,style=wx.SAVE)
if dlg.ShowModal() == wx.ID_OK:
path = dlg.GetPath()
self.canvas.print_figure(path, dpi=self.dpi)
|
RobinQuetin/CAIRIS-web
|
cairis/cairis/RiskScatterPanel.py
|
Python
|
apache-2.0
| 3,503 | 0.017985 |
from course import Course
from course_offering import CourseOffering
from distributive_requirement import DistributiveRequirement
from instructor import Instructor
from course_median import CourseMedian
from review import Review
from vote import Vote
from student import Student
|
layuplist/layup-list
|
apps/web/models/__init__.py
|
Python
|
gpl-3.0
| 279 | 0 |
"""Elmax integration common classes and utilities."""
from __future__ import annotations
from datetime import timedelta
import logging
from logging import Logger
import async_timeout
from elmax_api.exceptions import (
ElmaxApiError,
ElmaxBadLoginError,
ElmaxBadPinError,
ElmaxNetworkError,
)
from elmax_api.http import Elmax
from elmax_api.model.actuator import Actuator
from elmax_api.model.endpoint import DeviceEndpoint
from elmax_api.model.panel import PanelEntry, PanelStatus
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, HomeAssistantError
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
UpdateFailed,
)
from .const import DEFAULT_TIMEOUT, DOMAIN
_LOGGER = logging.getLogger(__name__)
class ElmaxCoordinator(DataUpdateCoordinator[PanelStatus]):
"""Coordinator helper to handle Elmax API polling."""
def __init__(
self,
hass: HomeAssistant,
logger: Logger,
username: str,
password: str,
panel_id: str,
panel_pin: str,
name: str,
update_interval: timedelta,
) -> None:
"""Instantiate the object."""
self._client = Elmax(username=username, password=password)
self._panel_id = panel_id
self._panel_pin = panel_pin
self._panel_entry = None
self._state_by_endpoint = None
super().__init__(
hass=hass, logger=logger, name=name, update_interval=update_interval
)
@property
def panel_entry(self) -> PanelEntry | None:
"""Return the panel entry."""
return self._panel_entry
def get_actuator_state(self, actuator_id: str) -> Actuator:
"""Return state of a specific actuator."""
if self._state_by_endpoint is not None:
return self._state_by_endpoint.get(actuator_id)
raise HomeAssistantError("Unknown actuator")
@property
def http_client(self):
"""Return the current http client being used by this instance."""
return self._client
async def _async_update_data(self):
try:
async with async_timeout.timeout(DEFAULT_TIMEOUT):
# Retrieve the panel online status first
panels = await self._client.list_control_panels()
panel = next(
(panel for panel in panels if panel.hash == self._panel_id), None
)
# If the panel is no more available within the given. Raise config error as the user must
# reconfigure it in order to make it work again
if not panel:
raise ConfigEntryAuthFailed(
f"Panel ID {self._panel_id} is no more linked to this user account"
)
self._panel_entry = panel
# If the panel is online, proceed with fetching its state
# and return it right away
if panel.online:
status = await self._client.get_panel_status(
control_panel_id=panel.hash, pin=self._panel_pin
) # type: PanelStatus
# Store a dictionary for fast endpoint state access
self._state_by_endpoint = {
k.endpoint_id: k for k in status.all_endpoints
}
return status
# Otherwise, return None. Listeners will know that this means the device is offline
return None
except ElmaxBadPinError as err:
raise ConfigEntryAuthFailed("Control panel pin was refused") from err
except ElmaxBadLoginError as err:
raise ConfigEntryAuthFailed("Refused username/password") from err
except ElmaxApiError as err:
raise UpdateFailed(f"Error communicating with ELMAX API: {err}") from err
except ElmaxNetworkError as err:
raise UpdateFailed(
"A network error occurred while communicating with Elmax cloud."
) from err
class ElmaxEntity(CoordinatorEntity):
"""Wrapper for Elmax entities."""
coordinator: ElmaxCoordinator
def __init__(
self,
panel: PanelEntry,
elmax_device: DeviceEndpoint,
panel_version: str,
coordinator: ElmaxCoordinator,
) -> None:
"""Construct the object."""
super().__init__(coordinator=coordinator)
self._panel = panel
self._device = elmax_device
self._panel_version = panel_version
self._client = coordinator.http_client
@property
def panel_id(self) -> str:
"""Retrieve the panel id."""
return self._panel.hash
@property
def unique_id(self) -> str | None:
"""Provide a unique id for this entity."""
return self._device.endpoint_id
@property
def name(self) -> str | None:
"""Return the entity name."""
return self._device.name
@property
def device_info(self):
"""Return device specific attributes."""
return {
"identifiers": {(DOMAIN, self._panel.hash)},
"name": self._panel.get_name_by_user(
self.coordinator.http_client.get_authenticated_username()
),
"manufacturer": "Elmax",
"model": self._panel_version,
"sw_version": self._panel_version,
}
@property
def available(self) -> bool:
"""Return if entity is available."""
return super().available and self._panel.online
|
rohitranjan1991/home-assistant
|
homeassistant/components/elmax/common.py
|
Python
|
mit
| 5,658 | 0.001414 |
"""
Transform 2019: Integrating Striplog and GemPy
==============================================
"""
# %%
# ! pip install welly striplog
# %%
# Authors: M. de la Varga, Evan Bianco, Brian Burnham and Dieter Werthmüller
# Importing GemPy
import gempy as gp
# Importing auxiliary libraries
import numpy as np
import pandas as pn
import matplotlib.pyplot as plt
import os
import welly
from welly import Location, Project
import glob
from striplog import Striplog, Legend, Decor
pn.set_option('precision', 2)
# %%
# Creating striplog object
# -----------------------------
#
# %%
# get well header coordinates
well_heads = {'alpha': {'kb_coords': (0, 0, 0)},
'beta': {'kb_coords': (10, 10, 0)},
'gamma': {'kb_coords': (12, 0, 0)},
'epsilon': {'kb_coords': (20, 0, 0)}}
# %%
# Reading tops file
cwd = os.getcwd()
if 'examples' not in cwd:
data_path = os.getcwd() + '/examples'
else:
data_path = cwd + '/..'
print(data_path+'/data/input_data/striplog_integration/*.tops')
topsfiles = glob.glob(data_path+'/data/input_data/striplog_integration/*.tops')
topsfiles
# %%
# Creating striplog object
my_striplogs = []
for file in topsfiles:
with open(file) as f:
text = f.read()
striplog = Striplog.from_csv(text=text)
my_striplogs.append(striplog)
striplog_dict = {'alpha': my_striplogs[1],
'beta': my_striplogs[2],
'gamma': my_striplogs[3],
'epsilon': my_striplogs[0]}
striplog_dict['alpha'][0]
# %%
# Plot striplog
f, a = plt.subplots(ncols=4, sharey=True)
for e, log in enumerate(striplog_dict.items()):
log[1].plot(ax=a[e], legend=None)
f.tight_layout()
plt.show()
# %%
# Striplog to pandas df of bottoms
rows = []
for wellname in striplog_dict.keys():
for i, interval in enumerate(striplog_dict[wellname]):
surface_name = interval.primary.lith
surface_base = interval.base.middle
x, y = well_heads[wellname]['kb_coords'][:-1]
series = 1
rows.append([x, y, surface_base, surface_name, series, wellname])
column_names = ['X', 'Y', 'Z', 'surface', 'series', 'wellname']
df = pn.DataFrame(rows, columns=column_names)
df
# %%
# GemPy model
# ----------------
#
# %%
# Create gempy model object
geo_model = gp.create_model('welly_integration')
extent = [-100, 300, -100, 200, -150, 0]
res = [60, 60, 60]
# Initializting model using the striplog df
gp.init_data(geo_model, extent, res, surface_points_df=df)
# %%
geo_model.surface_points.df.head()
# %%
geo_model.surfaces
# %%
dec_list = []
for e, i in enumerate(striplog_dict['alpha']):
dec_list.append(Decor({'_colour': geo_model.surfaces.df.loc[e, 'color'],
'width': None,
'component': i.primary,
'hatch': None}))
# %%
# welly plot with gempy colors
# Create Decor list
dec_list = []
for e, i in enumerate(striplog_dict['alpha']):
dec_list.append(Decor({'_colour': geo_model.surfaces.df.loc[e, 'color'],
'width': None,
'component': i.primary,
'hatch': None}))
# Create legend
legend = Legend(dec_list)
legend
# %%
# Plot striplogs:
f, a = plt.subplots(ncols=4, sharey=True)
for e, log in enumerate(striplog_dict.items()):
log[1].plot(ax=a[e], legend=legend)
f.tight_layout()
plt.show()
# %%
# Modifying the coordinates to make more sense
geo_model.surface_points.df[['X', 'Y']] = geo_model.surface_points.df[['X', 'Y']] * 10
geo_model.surface_points.df['Z'] *= -1
# %%
# Delete points of the basement surface since we are intepolating bottoms (that surface wont exit).
geo_model.delete_surface_points_basement()
# %%
# Adding an arbitrary orientation. Remember gempy need an orientation per series
geo_model.set_default_orientation()
geo_model.modify_orientations(0, X=-500)
# %%
gp.plot_2d(geo_model)
# %%
gp.set_interpolator(geo_model)
# %%
gp.compute_model(geo_model)
# %%
p2d = gp.plot_2d(geo_model, cell_number=[30], show_data=True, show=True)
# %%
gp.plot_3d(geo_model)
# %%
# Pinch out model
# ------------------
#
# As we can see the 3D model generated above does not honor the forth well
# lets fix it. First lets add an unconformity: between the yellow and
# green layer:
#
# %%
geo_model.add_features('Unconformity')
# %%
# Now we set the green layer in the second series
#
# %%
geo_model.map_stack_to_surfaces({'Uncomformity': ['brian', 'evan', 'dieter']})
geo_model.add_surfaces('basement')
# %%
# Lastly we need to add a dummy orientation to the new series:
#
# %%
geo_model.add_orientations(-500, 0, -100, 'dieter', [0, 0, 1])
# %%
# Now we can compute:
#
# %%
gp.compute_model(geo_model)
# %%
p = gp.plot_2d(geo_model, cell_number=[30], show_data=True)
f, a = plt.subplots(ncols=4, sharey=True)
for e, log in enumerate(striplog_dict.items()):
log[1].plot(ax=a[e], legend=legend)
f.tight_layout()
plt.show()
# %%
# Getting better but not quite there yet. Since the yellow does not show
# up in the last well the pinch out has to happen somewhere before so lets
# add an artifial point to get that shape:
#
# %%
geo_model.add_surface_points(200, 0, -75, 'evan');
# %%
gp.compute_model(geo_model)
p = gp.plot_2d(geo_model, cell_number=[30], show_data=True)
f, a = plt.subplots(ncols=4, sharey=True)
for e, log in enumerate(striplog_dict.items()):
log[1].plot(ax=a[e], legend=legend)
f.tight_layout()
plt.show()
# %%
# sphinx_gallery_thumbnail_number = 7
gp.plot_3d(geo_model)
# %%
# gp.save_model(geo_model)
|
cgre-aachen/gempy
|
examples/integrations/gempy_striplog.py
|
Python
|
lgpl-3.0
| 5,611 | 0.006595 |
import time
from goose import Goose
def load_jezebel():
with open('resources/additional_html/jezebel1.txt') as f:
data = f.read()
return data
def bench(iterations=100):
data = load_jezebel()
goose = Goose()
times = []
for _ in xrange(iterations):
t1 = time.time()
goose.extract(raw_html=data)
t2 = time.time()
iteration_time = t2 - t1
times.append(iteration_time)
return (sum(times) / float(len(times)))
if __name__ == '__main__':
start = time.time()
print bench()
end = time.time()
total_len = end - start
print "total test length: %f" % total_len
|
scivey/goosepp
|
scripts/benchmark_python_goose.py
|
Python
|
mit
| 648 | 0.00463 |
'''
Created on 2014-8-1
@author: xiajie
'''
import numpy as np
def fmax(a, b):
if a >= b:
return a
else:
return b
def fmin(a, b):
if a <= b:
return a
else:
return b
def radia_kernel(x1, x2):
return np.transpose(x1).dot(x2)
def kernel(x1, x2):
d = x1 - x2
res = np.sum(d**2)
return np.exp(-res)
def f(X, Y, alphas, x, b):
N = len(alphas)
ret = -b
for i in range(N):
if alphas[i] >= 0 and alphas[i] < 0.000001:
continue
if alphas[i] <= 0 and alphas[i] > -0.000001:
continue
ret += alphas[i]*Y[i]*radia_kernel(x,X[i])
return ret
def W(X, Y, alphas, i, v):
print 'WWWWWWW'
tmp = alphas[i]
alphas[i] = v
N = len(Y)
w = np.sum(alphas)
s = 0.
for i in range(N):
for j in range(N):
s += Y[i]*Y[j]*radia_kernel(X[i],X[j])*alphas[i]*alphas[j]
w = w - 0.5*s
alphas[i] = tmp
return w
def takestep(Y, X, alphas, i1, i2, b, E, C=10):
N = len(alphas)
if i1 == i2:
return 0
alpha1 = alphas[i1]
alpha2 = alphas[i2]
y1 = Y[i1]
y2 = Y[i2]
x1 = X[i1]
x2 = X[i2]
if alphas[i1] > 0 and alphas[i1] < C:
E1 = E[i1]
else:
E1 = f(X, Y, alphas, x1, b[0])-y1
if alphas[i2] > 0 and alphas[i2] < C:
E2 = E[i2]
else:
E2 = f(X, Y, alphas, x2, b[0])-y2
s = y1*y2
if y1 != y2:
L = fmax(0, alpha2-alpha1)
H = fmin(C, C+alpha2-alpha1)
else:
L = fmax(0, alpha1+alpha2-C)
H = fmin(C, alpha1+alpha2)
if L == H:
return 0
k11 = radia_kernel(x1, x1)
k12 = radia_kernel(x1, x2)
k22 = radia_kernel(x2, x2)
eta = 2*k12-k11-k22
eps = 0.001
if eta < 0:
a2 = alpha2-y2*(E1-E2)/eta
if a2 < L:
a2 = L
elif a2 > H:
a2 = H
else:
Lobj = W(X, Y, alphas, i2, L)
Hobj = W(X, Y, alphas, i2, H)
if Lobj > Hobj + eps:
a2 = Lobj
elif Lobj < Hobj - eps:
a2 = Hobj
else:
a2 = alpha2
if a2 < 1e-8:
a2 = 0
elif a2 > C-1e-8:
a2 = C
if abs(a2-alpha2) < eps*(a2+alpha2+eps):
return 0
a1 = alpha1 + s*(alpha2-a2)
if a1 < 1e-8:
a1 = 0
elif a1 > C-1e-8:
a1 = C
b1 = E1 + y1*(a1-alpha1)*radia_kernel(x1,x1) + y2*(a2-alpha2)*radia_kernel(x1,x2) + b[0]
b2 = E2 + y1*(a1-alpha1)*radia_kernel(x1,x2) + y2*(a2-alpha2)*radia_kernel(x2,x2) + b[0]
if a1 == 0 or a1 == C:
if a2 == 0 or a2 == C:
new_b = (b1+b2)/2.
else:
new_b = b2
else:
new_b = b1
for k in range(N):
if alphas[k] > 0 and alphas[k] < C:
if k == i1 or k == i2:
E[k] = 0.
else:
E[k] = E[k] + y1*(a1-alpha1)*radia_kernel(x1,X[k]) + y2*(a2-alpha2)*radia_kernel(x2,X[k]) + b[0] - new_b
alphas[i1] = a1
alphas[i2] = a2
b[0] = new_b
print 'new_b:', new_b
return 1
def secondheuristic(alphas, E, E1, i2, C):
N = len(E)
best_i = None
if E1 >= 0:
min_e = 999999999.
for i in range(N):
if i != i2 and alphas[i] > 0 and alphas[i] < C:
if E[i] < min_e:
min_e = E[i]
best_i = i
else:
max_e = -999999999.
for i in range(N):
if i != i2 and alphas[i] > 0 and alphas[i] < C:
if E[i] > max_e:
max_e = E[i]
best_i = i
return best_i
def examineExample(X, Y, alphas, b, E, i2, tol=0.001, C=10):
y2 = Y[i2]
alpha2 = alphas[i2]
if alphas[i2] > 0 and alphas[i2] < C:
E2 = E[i2]
else:
E2 = f(X, Y, alphas, X[i2], b[0])-y2
r2 = E2*y2
if (r2 < -tol and alpha2 < C) or (r2 > tol and alpha2 > 0):
i1 = secondheuristic(alphas, E, E2, i2, C)
if i1 != None:
if takestep(Y, X, alphas, i1, i2, b, E):
return 1
for i1 in range(len(alphas)):
if i1 != i2 and alphas[i1] > 0 and alphas[i1] < C:
if takestep(Y, X, alphas, i1, i2, b, E):
return 1
for i1 in range(len(alphas)):
if i1 != i2 and takestep(Y, X, alphas, i1, i2, b, E):
return 1
return 0
def run(X, Y, C=10):
N = len(Y)
alphas = np.zeros(N)
E = np.zeros(N)
b = [0.]
for i in range(N):
E[i] = f(X, Y, alphas, X[i], b[0]) - Y[i]
numChanged = 0
examineAll = 1
while numChanged > 0 or examineAll == 1:
numChanged = 0
if examineAll == 1:
for i in range(N):
numChanged += examineExample(X, Y, alphas, b, E, i)
else:
for i in range(N):
if alphas[i] > 0 and alphas[i] < C:
numChanged += examineExample(X, Y, alphas, b, E, i)
if examineAll == 1:
examineAll = 0
elif numChanged == 0:
examineAll = 1
return (alphas, b[0])
def predict(X, Y, alphas, b, x):
res = f(X, Y, alphas, x, b)
if res > 0:
return 1
else:
return 0
|
jayshonzs/ESL
|
SVM/SMO.py
|
Python
|
mit
| 5,230 | 0.005354 |
from ..models import models
class RasterModel(models.Model):
rast = models.RasterField('A Verbose Raster Name', null=True, srid=4326, spatial_index=True, blank=True)
class Meta:
required_db_features = ['supports_raster']
def __str__(self):
return str(self.id)
|
DONIKAN/django
|
tests/gis_tests/rasterapp/models.py
|
Python
|
bsd-3-clause
| 292 | 0.003425 |
"""
super simple utitities to display tabular data
columns is a list of tuples:
- name: header name for the column
- f: a function which takes one argument *row* and returns the value to
display for a cell. the function which be called for each of the rows
supplied
"""
import sys
import csv
def pcsv(columns, rows, key=lambda x: x):
writer = csv.writer(sys.stdout)
writer.writerow([x for x, _ in columns])
for row in sorted(rows, key=key):
writer.writerow([f(row) for _, f in columns])
def pprint(columns, rows, key=lambda x: x):
lengths = {}
for name, _ in columns:
lengths[name] = len(name) + 1
for row in rows:
for name, f in columns:
lengths[name] = max(lengths[name], len(str(f(row)))+1)
fmt = ' '.join(['{:<%s}' % lengths[x] for x, _ in columns])
print fmt.format(*[x for x, _ in columns])
for row in sorted(rows, key=key):
print fmt.format(*[f(row) for _, f in columns])
|
Livefyre/awscensus
|
ec2/tabular.py
|
Python
|
mit
| 990 | 0 |
#!/usr/bin/env python
#
# Copyright 2011 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
"""
Read samples from a UHD device and write to file formatted as binary
outputs single precision complex float values or complex short values
(interleaved 16 bit signed short integers).
"""
from gnuradio import gr, eng_notation
from gnuradio import uhd
from gnuradio.eng_option import eng_option
from optparse import OptionParser
import sys
n2s = eng_notation.num_to_str
class rx_cfile_block(gr.top_block):
def __init__(self, options, filename):
gr.top_block.__init__(self)
# Create a UHD device source
if options.output_shorts:
self._u = uhd.usrp_source(device_addr=options.address,
io_type=uhd.io_type.COMPLEX_INT16,
num_channels=1)
self._sink = gr.file_sink(gr.sizeof_short*2, filename)
else:
self._u = uhd.usrp_source(device_addr=options.address,
io_type=uhd.io_type.COMPLEX_FLOAT32,
num_channels=1)
self._sink = gr.file_sink(gr.sizeof_gr_complex, filename)
# Set receiver sample rate
self._u.set_samp_rate(options.samp_rate)
# Set receive daughterboard gain
if options.gain is None:
g = self._u.get_gain_range()
options.gain = float(g.start()+g.stop())/2
print "Using mid-point gain of", options.gain, "(", g.start(), "-", g.stop(), ")"
self._u.set_gain(options.gain)
# Set the antenna
if(options.antenna):
self._u.set_antenna(options.antenna, 0)
# Set frequency (tune request takes lo_offset)
if(options.lo_offset is not None):
treq = uhd.tune_request(options.freq, options.lo_offset)
else:
treq = uhd.tune_request(options.freq)
tr = self._u.set_center_freq(treq)
if tr == None:
sys.stderr.write('Failed to set center frequency\n')
raise SystemExit, 1
# Create head block if needed and wire it up
if options.nsamples is None:
self.connect(self._u, self._sink)
else:
if options.output_shorts:
self._head = gr.head(gr.sizeof_short*2, int(options.nsamples))
else:
self._head = gr.head(gr.sizeof_gr_complex, int(options.nsamples))
self.connect(self._u, self._head, self._sink)
input_rate = self._u.get_samp_rate()
if options.verbose:
print "Address:", options.address
print "Rx gain:", options.gain
print "Rx baseband frequency:", n2s(tr.actual_rf_freq)
print "Rx DDC frequency:", n2s(tr.actual_dsp_freq)
print "Rx Sample Rate:", n2s(input_rate)
if options.nsamples is None:
print "Receiving samples until Ctrl-C"
else:
print "Receving", n2s(options.nsamples), "samples"
if options.output_shorts:
print "Writing 16-bit complex shorts"
else:
print "Writing 32-bit complex floats"
print "Output filename:", filename
def get_options():
usage="%prog: [options] output_filename"
parser = OptionParser(option_class=eng_option, usage=usage)
parser.add_option("-a", "--address", type="string", default="addr=192.168.10.2",
help="Address of UHD device, [default=%default]")
parser.add_option("-A", "--antenna", type="string", default=None,
help="select Rx Antenna where appropriate")
parser.add_option("", "--samp-rate", type="eng_float", default=1e6,
help="set sample rate (bandwidth) [default=%default]")
parser.add_option("-f", "--freq", type="eng_float", default=None,
help="set frequency to FREQ", metavar="FREQ")
parser.add_option("-g", "--gain", type="eng_float", default=None,
help="set gain in dB (default is midpoint)")
parser.add_option( "-s","--output-shorts", action="store_true", default=False,
help="output interleaved shorts instead of complex floats")
parser.add_option("-N", "--nsamples", type="eng_float", default=None,
help="number of samples to collect [default=+inf]")
parser.add_option("-v", "--verbose", action="store_true", default=False,
help="verbose output")
parser.add_option("", "--lo-offset", type="eng_float", default=None,
help="set daughterboard LO offset to OFFSET [default=hw default]")
(options, args) = parser.parse_args ()
if len(args) != 1:
parser.print_help()
raise SystemExit, 1
if options.freq is None:
parser.print_help()
sys.stderr.write('You must specify the frequency with -f FREQ\n');
raise SystemExit, 1
return (options, args[0])
if __name__ == '__main__':
(options, filename) = get_options()
tb = rx_cfile_block(options, filename)
try:
tb.run()
except KeyboardInterrupt:
pass
|
tta/gnuradio-tta
|
gr-uhd/apps/uhd_rx_cfile.py
|
Python
|
gpl-3.0
| 5,930 | 0.006071 |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from heat.common import exception
from heat.common.i18n import _
from heat.common import password_gen
from heat.engine import attributes
from heat.engine import constraints
from heat.engine import properties
from heat.engine import resource
from heat.engine import support
from heat.engine import translation
class RandomString(resource.Resource):
"""A resource which generates a random string.
This is useful for configuring passwords and secrets on services. Random
string can be generated from specified character sequences, which means
that all characters will be randomly chosen from specified sequences, or
with some classes, e.g. letterdigits, which means that all character will
be randomly chosen from union of ascii letters and digits. Output string
will be randomly generated string with specified length (or with length of
32, if length property doesn't specified).
"""
support_status = support.SupportStatus(version='2014.1')
PROPERTIES = (
LENGTH, SEQUENCE, CHARACTER_CLASSES, CHARACTER_SEQUENCES,
SALT,
) = (
'length', 'sequence', 'character_classes', 'character_sequences',
'salt',
)
_CHARACTER_CLASSES_KEYS = (
CHARACTER_CLASSES_CLASS, CHARACTER_CLASSES_MIN,
) = (
'class', 'min',
)
_CHARACTER_SEQUENCES = (
CHARACTER_SEQUENCES_SEQUENCE, CHARACTER_SEQUENCES_MIN,
) = (
'sequence', 'min',
)
ATTRIBUTES = (
VALUE,
) = (
'value',
)
properties_schema = {
LENGTH: properties.Schema(
properties.Schema.INTEGER,
_('Length of the string to generate.'),
default=32,
constraints=[
constraints.Range(1, 512),
]
),
SEQUENCE: properties.Schema(
properties.Schema.STRING,
_('Sequence of characters to build the random string from.'),
constraints=[
constraints.AllowedValues(password_gen.CHARACTER_CLASSES),
],
support_status=support.SupportStatus(
status=support.HIDDEN,
version='5.0.0',
previous_status=support.SupportStatus(
status=support.DEPRECATED,
message=_('Use property %s.') % CHARACTER_CLASSES,
version='2014.2'
)
)
),
CHARACTER_CLASSES: properties.Schema(
properties.Schema.LIST,
_('A list of character class and their constraints to generate '
'the random string from.'),
schema=properties.Schema(
properties.Schema.MAP,
schema={
CHARACTER_CLASSES_CLASS: properties.Schema(
properties.Schema.STRING,
(_('A character class and its corresponding %(min)s '
'constraint to generate the random string from.')
% {'min': CHARACTER_CLASSES_MIN}),
constraints=[
constraints.AllowedValues(
password_gen.CHARACTER_CLASSES),
],
default=password_gen.LETTERS_DIGITS),
CHARACTER_CLASSES_MIN: properties.Schema(
properties.Schema.INTEGER,
_('The minimum number of characters from this '
'character class that will be in the generated '
'string.'),
default=1,
constraints=[
constraints.Range(1, 512),
]
)
}
),
# add defaults for backward compatibility
default=[{CHARACTER_CLASSES_CLASS: password_gen.LETTERS_DIGITS,
CHARACTER_CLASSES_MIN: 1}]
),
CHARACTER_SEQUENCES: properties.Schema(
properties.Schema.LIST,
_('A list of character sequences and their constraints to '
'generate the random string from.'),
schema=properties.Schema(
properties.Schema.MAP,
schema={
CHARACTER_SEQUENCES_SEQUENCE: properties.Schema(
properties.Schema.STRING,
_('A character sequence and its corresponding %(min)s '
'constraint to generate the random string '
'from.') % {'min': CHARACTER_SEQUENCES_MIN},
required=True),
CHARACTER_SEQUENCES_MIN: properties.Schema(
properties.Schema.INTEGER,
_('The minimum number of characters from this '
'sequence that will be in the generated '
'string.'),
default=1,
constraints=[
constraints.Range(1, 512),
]
)
}
)
),
SALT: properties.Schema(
properties.Schema.STRING,
_('Value which can be set or changed on stack update to trigger '
'the resource for replacement with a new random string. The '
'salt value itself is ignored by the random generator.')
),
}
attributes_schema = {
VALUE: attributes.Schema(
_('The random string generated by this resource. This value is '
'also available by referencing the resource.'),
cache_mode=attributes.Schema.CACHE_NONE,
type=attributes.Schema.STRING
),
}
def translation_rules(self, props):
if props.get(self.SEQUENCE):
return [
translation.TranslationRule(
props,
translation.TranslationRule.ADD,
[self.CHARACTER_CLASSES],
[{self.CHARACTER_CLASSES_CLASS: props.get(
self.SEQUENCE),
self.CHARACTER_CLASSES_MIN: 1}]),
translation.TranslationRule(
props,
translation.TranslationRule.DELETE,
[self.SEQUENCE]
)
]
def _generate_random_string(self, char_sequences, char_classes, length):
seq_mins = [
password_gen.special_char_class(
char_seq[self.CHARACTER_SEQUENCES_SEQUENCE],
char_seq[self.CHARACTER_SEQUENCES_MIN])
for char_seq in char_sequences]
char_class_mins = [
password_gen.named_char_class(
char_class[self.CHARACTER_CLASSES_CLASS],
char_class[self.CHARACTER_CLASSES_MIN])
for char_class in char_classes]
return password_gen.generate_password(length,
seq_mins + char_class_mins)
def validate(self):
super(RandomString, self).validate()
char_sequences = self.properties[self.CHARACTER_SEQUENCES]
char_classes = self.properties[self.CHARACTER_CLASSES]
def char_min(char_dicts, min_prop):
if char_dicts:
return sum(char_dict[min_prop] for char_dict in char_dicts)
return 0
length = self.properties[self.LENGTH]
min_length = (char_min(char_sequences, self.CHARACTER_SEQUENCES_MIN) +
char_min(char_classes, self.CHARACTER_CLASSES_MIN))
if min_length > length:
msg = _("Length property cannot be smaller than combined "
"character class and character sequence minimums")
raise exception.StackValidationFailed(message=msg)
def handle_create(self):
char_sequences = self.properties[self.CHARACTER_SEQUENCES] or []
char_classes = self.properties[self.CHARACTER_CLASSES] or []
length = self.properties[self.LENGTH]
random_string = self._generate_random_string(char_sequences,
char_classes,
length)
self.data_set('value', random_string, redact=True)
self.resource_id_set(self.physical_resource_name())
def _resolve_attribute(self, name):
if name == self.VALUE:
return self.data().get(self.VALUE)
def get_reference_id(self):
if self.resource_id is not None:
return self.data().get('value')
else:
return six.text_type(self.name)
def resource_mapping():
return {
'OS::Heat::RandomString': RandomString,
}
|
noironetworks/heat
|
heat/engine/resources/openstack/heat/random_string.py
|
Python
|
apache-2.0
| 9,442 | 0 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "vgid.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Guest007/vgid
|
manage.py
|
Python
|
mit
| 247 | 0 |
# -*- coding: utf-8 -*-
"""
Test accounts module
"""
import os
from decimal import Decimal
from mock import patch
from django.core.urlresolvers import reverse
from django.http import HttpResponseForbidden
from django.test import TestCase
from .factories import UserFactory, UserWithAvatarFactory, AdminFactory
from .models import UserProfile, get_user_avatar_path, TeamAccessKey, Team
from .tasks import topup_accounts_task, update_portfolio_value, create_accounts_snapshot, \
update_users_classification
from .templatetags.user import user_home, user_rank
from .utils import process_username
from constance import config
from events.factories import EventFactory, BetFactory
from events.models import Event, Bet
from politikon.templatetags.format import formatted
from politikon.templatetags.path import startswith
class UserProfileModelTestCase(TestCase):
"""
Test methods for user object
"""
def test_user_creation(self):
"""
Create user and check his attributes
"""
user = UserFactory(username='johnsmith', name='John Smith')
self.assertEqual('johnsmith', user.__unicode__())
self.assertEqual('John Smith', user.name)
self.assertEqual('John Smith', user.get_short_name())
self.assertEqual(False, user.is_vip)
self.assertEqual('John Smith (johnsmith)', user.get_full_name())
self.assertEqual('John Smith (johnsmith)', user.full_name)
user.calculate_reputation()
user.save()
self.assertEqual(False, user.is_superuser)
self.assertEqual({
'user_id': 1,
'total_cash': formatted(0),
'portfolio_value': formatted(0),
'reputation': '100%',
}, user.statistics_dict)
self.assertEqual(0, user.current_portfolio_value)
def test_get_user_avatar_path(self):
"""
Get image path
"""
user = UserFactory(username='johnsmith')
path = get_user_avatar_path(user, 'my-avatar.png')
self.assertEqual('avatars/johnsmith.png', path)
def test_user_urls(self):
"""
Check is urls are valid
"""
user = UserFactory(
twitter_user='jsmith',
facebook_user='facesmith'
)
# TODO: FIXME
# url = user.get_absolute_url()
# self.assertEqual('/accounts/1/', url)
#
# url = user.get_avatar_url()
# self.assertEqual('/static/img/blank-avatar.jpg', url)
#
# url = user.get_twitter_url()
# self.assertEqual('https://twitter.com/jsmith', url)
#
# url = user.get_facebook_url()
# self.assertEqual('https://www.facebook.com/facesmith', url)
def test_twitter_user(self):
"""
Check method for account connected with twitter
"""
user = UserFactory()
url = user.get_facebook_url()
self.assertIsNone(url)
url = user.get_twitter_url()
self.assertIsNone(url)
user.twitter_user = 'jsmith'
user.save()
url = user.get_twitter_url()
self.assertEqual('https://twitter.com/jsmith', url)
def test_current_portfolio_value(self):
"""
Current portfolio value
"""
user = UserFactory()
self.assertEqual(0, user.current_portfolio_value)
event = EventFactory()
bet = BetFactory(user=user, event=event)
self.assertEqual(50, user.current_portfolio_value)
bet.outcome = Bet.NO
bet.has = 2
bet.save()
self.assertEqual(100, user.current_portfolio_value)
def test_get_avatar_url(self):
"""
Get avatar URL
"""
user = UserFactory()
self.assertEqual('/static/img/blank-avatar.jpg', user.get_avatar_url())
user2 = UserWithAvatarFactory(username='johnrambro')
self.assertEqual('avatars/johnrambro.jpg', user2.get_avatar_url())
os.remove('avatars/johnrambro.jpg')
def test_reset_account_without_bonus(self):
"""
Test reset account
"""
user = UserFactory()
user.reset_account()
self.assertEqual({
'user_id': 1,
'total_cash': formatted(1000),
'portfolio_value': formatted(0),
'reputation': "100%",
}, user.statistics_dict)
def test_reset_account_with_bonus(self):
"""
Test reset account
"""
user = UserFactory()
user.reset_account(0.1)
self.assertEqual({
'user_id': 1,
'total_cash': formatted(1100),
'portfolio_value': formatted(0),
'reputation': "110%",
}, user.statistics_dict)
def test_get_newest_results(self):
"""
Get newest results
"""
users = UserFactory.create_batch(2)
events = EventFactory.create_batch(5)
BetFactory(user=users[0], event=events[0])
bet2 = BetFactory(user=users[0], event=events[1])
bet3 = BetFactory(user=users[0], event=events[2])
bet4 = BetFactory(user=users[0], event=events[3])
bet5 = BetFactory(user=users[1], event=events[4])
events[1].outcome = Event.CANCELLED
events[1].save()
events[2].outcome = Event.FINISHED_YES
events[2].save()
events[3].outcome = Event.FINISHED_NO
events[3].save()
events[4].outcome = Event.FINISHED_YES
events[4].save()
bet2.is_new_resolved = True
bet2.save()
bet3.is_new_resolved = True
bet3.save()
bet4.is_new_resolved = True
bet4.save()
bet5.is_new_resolved = True
bet5.save()
self.assertEqual([bet2, bet3, bet4], list(users[0].get_newest_results()))
self.assertEqual([bet5], list(users[1].get_newest_results()))
class UserProfileManagerTestCase(TestCase):
"""
accounts/managers UserProfileManager
"""
def test_return_new_user_object(self):
"""
Return new user object
"""
user = UserProfile.objects.return_new_user_object(
username='j_smith',
password='password9',
)
self.assertIsInstance(user, UserProfile)
self.assertEqual('j_smith', user.username)
self.assertTrue(user.check_password('password9'))
with self.assertRaises(ValueError):
UserProfile.objects.return_new_user_object(
username=None,
)
def test_create_user(self):
"""
Create user
"""
user = UserProfile.objects.create_user(
username='j_smith',
email='j_smith@example.com',
password='password9',
)
self.assertIsInstance(user, UserProfile)
self.assertEqual('j_smith', user.username)
self.assertTrue(user.check_password('password9'))
self.assertTrue(user.is_active)
self.assertEqual({
'user_id': 1,
'total_cash': formatted(config.STARTING_CASH),
'portfolio_value': formatted(0),
'reputation': '100%',
}, user.statistics_dict)
user2 = UserProfile.objects.create_user(
username='j_smith',
email='j_smith@example.com',
)
self.assertIsInstance(user2, HttpResponseForbidden)
def test_create_superuser(self):
"""
Create superuser
"""
user = UserProfile.objects.create_superuser(
username='j_smith',
email='j_smith@example.com',
password='password9',
)
self.assertIsInstance(user, UserProfile)
self.assertEqual('j_smith', user.username)
self.assertTrue(user.check_password('password9'))
self.assertTrue(user.is_staff)
self.assertTrue(user.is_admin)
self.assertTrue(user.is_active)
self.assertEqual({
'user_id': 1,
'total_cash': formatted(0),
'portfolio_value': formatted(0),
'reputation': '100%',
}, user.statistics_dict)
user2 = UserProfile.objects.create_superuser(
username='j_smith',
email='j_smith@example.com',
)
self.assertIsInstance(user2, HttpResponseForbidden)
def test_create_user_with_random_password(self):
"""
Create user with random password
"""
user, password = UserProfile.objects.create_user_with_random_password(
username='j_smith',
)
self.assertTrue(user.check_password(password))
def test_get_users(self):
"""
Get users
"""
user1 = UserFactory()
UserFactory(is_deleted=True)
UserFactory(is_active=False)
users = UserProfile.objects.get_users()
self.assertIsInstance(users[0], UserProfile)
self.assertEqual(1, len(users))
self.assertEqual([user1], list(users))
def test_get_ranking_users(self):
"""
Get ranking users
"""
UserFactory()
UserFactory()
UserFactory(is_deleted=True)
UserFactory(is_active=False)
users = UserProfile.objects.get_ranking_users()
self.assertEqual(0, len(users))
self.assertEqual([], list(users))
# TODO mock transaction
def test_get_admins(self):
"""
Get admins
"""
UserFactory()
UserFactory(is_admin=True)
UserFactory(is_staff=True)
user4 = AdminFactory()
admins = UserProfile.objects.get_admins()
self.assertIsInstance(admins[0], UserProfile)
self.assertEqual(1, len(admins))
self.assertEqual([user4], list(admins))
def test_get_best_weekly(self):
"""
Get best weekly
"""
user1 = UserFactory(weekly_result=100)
user2 = UserFactory(weekly_result=300)
UserFactory()
AdminFactory()
users = UserProfile.objects.get_best_weekly()
self.assertEqual(0, len(users))
self.assertEqual([], list(users))
# TODO mock transaction
# self.assertIsInstance(users[0], UserProfile)
# self.assertEqual(2, len(users))
# self.assertEqual([user2, user1], list(users))
def test_get_best_monthly(self):
"""
Get best monthly
"""
UserFactory()
user2 = UserFactory(monthly_result=300)
AdminFactory()
user4 = UserFactory(monthly_result=100)
users = UserProfile.objects.get_best_monthly()
self.assertEqual(0, len(users))
self.assertEqual([], list(users))
# TODO mock transaction
# self.assertIsInstance(users[0], UserProfile)
# self.assertEqual(2, len(users))
# self.assertEqual([user2, user4], list(users))
def test_get_best_overall(self):
"""
Get best overall
"""
user1 = UserFactory()
user2 = UserFactory(reputation=Decimal(300))
AdminFactory()
user4 = UserFactory(reputation=Decimal(50))
users = UserProfile.objects.get_best_overall()
self.assertEqual(0, len(users))
self.assertEqual([], list(users))
# TODO mock transaction
# self.assertIsInstance(users[0], UserProfile)
# self.assertEqual(3, len(users))
# self.assertEqual([user2, user1, user4], list(users))
def test_get_user_positions(self):
"""
Get user positions
"""
user1 = UserFactory(weekly_result=100)
user2 = UserFactory(weekly_result=300, monthly_result=300, reputation=Decimal(300))
user3 = AdminFactory()
user4 = UserFactory(monthly_result=100, reputation=Decimal(50))
# TODO mock
self.assertEqual({
'week_rank': '-',
'month_rank': '-',
'overall_rank': '-'
}, UserProfile.objects.get_user_positions(user1))
self.assertEqual({
'week_rank': '-',
'month_rank': '-',
'overall_rank': '-'
}, UserProfile.objects.get_user_positions(user2))
self.assertEqual({
'week_rank': '-',
'month_rank': '-',
'overall_rank': '-'
}, UserProfile.objects.get_user_positions(user3))
self.assertEqual({
'week_rank': '-',
'month_rank': '-',
'overall_rank': '-'
}, UserProfile.objects.get_user_positions(user4))
# self.assertEqual({
# 'week_rank': 2,
# 'month_rank': '-',
# 'overall_rank': 2
# }, UserProfile.objects.get_user_positions(user1))
# self.assertEqual({
# 'week_rank': 1,
# 'month_rank': 1,
# 'overall_rank': 1
# }, UserProfile.objects.get_user_positions(user2))
# self.assertEqual({
# 'week_rank': '-',
# 'month_rank': '-',
# 'overall_rank': '-'
# }, UserProfile.objects.get_user_positions(user3))
# self.assertEqual({
# 'week_rank': '-',
# 'month_rank': 2,
# 'overall_rank': 3
# }, UserProfile.objects.get_user_positions(user4))
class UserPipelineTestCase(TestCase):
"""
accounts/pipeline
"""
def test_save_profile(self):
"""
Save profile
"""
user = UserFactory()
# save_profile(user,
class UserTasksTestCase(TestCase):
"""
accounts/tasks
"""
def test_topup_accounts_task(self):
"""
Topup
"""
user = UserFactory()
topup_accounts_task()
user.refresh_from_db()
self.assertEqual(config.DAILY_TOPUP, user.total_cash)
# TODO mock and test exception
@patch.object(UserProfile, 'topup_cash')
@patch('accounts.tasks.logger')
def test_topup_accounts_task_error(self, logger, topup_cash):
UserFactory()
topup_cash.side_effect = Exception()
topup_accounts_task()
logger.exception.assert_called_once()
def test_update_portfolio_value(self):
"""
Update portfolio_value
"""
price = 90
user = UserFactory()
event = EventFactory(current_sell_for_price=price)
BetFactory(user=user, event=event, has=1, outcome=True)
self.assertEqual(0, user.portfolio_value)
update_portfolio_value()
user.refresh_from_db()
# TODO FIXME
# self.assertEqual(price, user.portfolio_value)
def test_create_accounts_snapshot(self):
user = UserFactory()
create_accounts_snapshot()
# TODO mock logger and create_snapshot()
def test_update_users_classification(self):
users = UserFactory.create_batch(6)
update_users_classification()
# TODO: mock reputation changes
class UserTemplatetagsTestCase(TestCase):
"""
accounts/templatetags
"""
def test_user_home(self):
"""
User home
"""
user = UserFactory()
user_templatetag = user_home(user, 1000, True)
self.assertEqual({
'user': user,
'reputation_change': 1000,
'is_formatted': True
}, user_templatetag)
user_templatetag = user_home(user, -100)
self.assertEqual({
'user': user,
'reputation_change': -100,
'is_formatted': False
}, user_templatetag)
# TODO FIXME
# def test_user_rank(self):
# """
# User rank
# """
# user = UserFactory()
# user_templatetag = user_rank(user)
# self.assertEqual({
# 'profit': None,
# 'user': user,
# 'counter': 1,
# }, user_templatetag)
# user_templatetag_with_profit = user_rank(user, 10)
# self.assertEqual({
# 'profit': 10,
# 'user': user,
# 'counter': 1,
# }, user_templatetag_with_profit)
def test_get_reputation_history(self):
"""
Get reputation history
"""
# TODO
def test_get_reputation_change(self):
"""
Get reputation change
"""
# TODO
def test_last_week_reputation_change(self):
"""
Get last week reputation change
"""
# TODO
def test_last_month_reputation_change(self):
"""
Get last month reputation change
"""
# TODO
class PolitikonUserTemplatetagsTestCase(TestCase):
"""
politikon/templatetags
"""
def test_startswith(self):
"""
Startswith
"""
start_path = reverse('accounts:rank')
path = reverse('accounts:rank')
self.assertTrue(startswith(path, start_path))
class UserUtilsTestCase(TestCase):
"""
accounts/utils
"""
def test_process_username(self):
"""
Process username
"""
username = process_username(u"zażółćgęśląjaźń")
self.assertEqual('zazolcgeslajazn', username)
UserFactory(username='zazolcgeslajazn')
username2 = process_username(u"zażółćgęśląjaźń")
self.assertNotEqual('zazolcgeslajazn', username2)
class TeamAccessTokenModelTestCase(TestCase):
def test_distinction_of_tokens(self):
team = Team.objects.create(name='TestTeam')
key1 = TeamAccessKey.objects.create(team=team)
key2 = TeamAccessKey.objects.create(team=team)
self.assertEqual(TeamAccessKey.objects.count(), 2)
self.assertNotEqual(key1.value, key2.value)
self.assertIsNotNone(key1.team)
self.assertIsNotNone(key2.team)
self.assertIs(key1.team, team)
self.assertIs(key2.team, team)
key3 = TeamAccessKey(team=team)
key4 = TeamAccessKey(team=team)
key3.save()
key4.save()
self.assertEqual(TeamAccessKey.objects.count(), 4)
self.assertNotEqual(key3.value, key4.value)
self.assertIsNotNone(key3.team)
self.assertIsNotNone(key4.team)
self.assertIs(key3.team, team)
self.assertIs(key4.team, team)
|
KlubJagiellonski/Politikon
|
accounts/tests.py
|
Python
|
gpl-2.0
| 18,113 | 0.000276 |
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import cmd
import functools
import os
import pprint
import sys
import threading
import time
from collections import deque
from multiprocessing import Lock
from jinja2.exceptions import UndefinedError
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable
from ansible.executor import action_write_locks
from ansible.executor.process.worker import WorkerProcess
from ansible.executor.task_result import TaskResult
from ansible.inventory.host import Host
from ansible.module_utils.six.moves import queue as Queue
from ansible.module_utils.six import iteritems, itervalues, string_types
from ansible.module_utils._text import to_text
from ansible.module_utils.connection import Connection, ConnectionError
from ansible.playbook.helpers import load_list_of_blocks
from ansible.playbook.included_file import IncludedFile
from ansible.playbook.task_include import TaskInclude
from ansible.playbook.role_include import IncludeRole
from ansible.plugins.loader import action_loader, connection_loader, filter_loader, lookup_loader, module_loader, test_loader
from ansible.template import Templar
from ansible.utils.vars import combine_vars
from ansible.vars.clean import strip_internal_keys
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
__all__ = ['StrategyBase']
class StrategySentinel:
pass
# TODO: this should probably be in the plugins/__init__.py, with
# a smarter mechanism to set all of the attributes based on
# the loaders created there
class SharedPluginLoaderObj:
'''
A simple object to make pass the various plugin loaders to
the forked processes over the queue easier
'''
def __init__(self):
self.action_loader = action_loader
self.connection_loader = connection_loader
self.filter_loader = filter_loader
self.test_loader = test_loader
self.lookup_loader = lookup_loader
self.module_loader = module_loader
_sentinel = StrategySentinel()
def results_thread_main(strategy):
while True:
try:
result = strategy._final_q.get()
if isinstance(result, StrategySentinel):
break
else:
strategy._results_lock.acquire()
strategy._results.append(result)
strategy._results_lock.release()
except (IOError, EOFError):
break
except Queue.Empty:
pass
def debug_closure(func):
"""Closure to wrap ``StrategyBase._process_pending_results`` and invoke the task debugger"""
@functools.wraps(func)
def inner(self, iterator, one_pass=False, max_passes=None):
status_to_stats_map = (
('is_failed', 'failures'),
('is_unreachable', 'dark'),
('is_changed', 'changed'),
('is_skipped', 'skipped'),
)
# We don't know the host yet, copy the previous states, for lookup after we process new results
prev_host_states = iterator._host_states.copy()
results = func(self, iterator, one_pass=one_pass, max_passes=max_passes)
_processed_results = []
for result in results:
task = result._task
host = result._host
_queued_task_args = self._queued_task_cache.pop((host.name, task._uuid), None)
task_vars = _queued_task_args['task_vars']
play_context = _queued_task_args['play_context']
# Try to grab the previous host state, if it doesn't exist use get_host_state to generate an empty state
try:
prev_host_state = prev_host_states[host.name]
except KeyError:
prev_host_state = iterator.get_host_state(host)
while result.needs_debugger(globally_enabled=self.debugger_active):
next_action = NextAction()
dbg = Debugger(task, host, task_vars, play_context, result, next_action)
dbg.cmdloop()
if next_action.result == NextAction.REDO:
# rollback host state
self._tqm.clear_failed_hosts()
iterator._host_states[host.name] = prev_host_state
for method, what in status_to_stats_map:
if getattr(result, method)():
self._tqm._stats.decrement(what, host.name)
self._tqm._stats.decrement('ok', host.name)
# redo
self._queue_task(host, task, task_vars, play_context)
_processed_results.extend(debug_closure(func)(self, iterator, one_pass))
break
elif next_action.result == NextAction.CONTINUE:
_processed_results.append(result)
break
elif next_action.result == NextAction.EXIT:
# Matches KeyboardInterrupt from bin/ansible
sys.exit(99)
else:
_processed_results.append(result)
return _processed_results
return inner
class StrategyBase:
'''
This is the base class for strategy plugins, which contains some common
code useful to all strategies like running handlers, cleanup actions, etc.
'''
def __init__(self, tqm):
self._tqm = tqm
self._inventory = tqm.get_inventory()
self._workers = tqm.get_workers()
self._notified_handlers = tqm._notified_handlers
self._listening_handlers = tqm._listening_handlers
self._variable_manager = tqm.get_variable_manager()
self._loader = tqm.get_loader()
self._final_q = tqm._final_q
self._step = getattr(tqm._options, 'step', False)
self._diff = getattr(tqm._options, 'diff', False)
self.flush_cache = getattr(tqm._options, 'flush_cache', False)
# the task cache is a dictionary of tuples of (host.name, task._uuid)
# used to find the original task object of in-flight tasks and to store
# the task args/vars and play context info used to queue the task.
self._queued_task_cache = {}
# Backwards compat: self._display isn't really needed, just import the global display and use that.
self._display = display
# internal counters
self._pending_results = 0
self._cur_worker = 0
# this dictionary is used to keep track of hosts that have
# outstanding tasks still in queue
self._blocked_hosts = dict()
self._results = deque()
self._results_lock = threading.Condition(threading.Lock())
# create the result processing thread for reading results in the background
self._results_thread = threading.Thread(target=results_thread_main, args=(self,))
self._results_thread.daemon = True
self._results_thread.start()
# holds the list of active (persistent) connections to be shutdown at
# play completion
self._active_connections = dict()
self.debugger_active = C.ENABLE_TASK_DEBUGGER
def cleanup(self):
# close active persistent connections
for sock in itervalues(self._active_connections):
try:
conn = Connection(sock)
conn.reset()
except ConnectionError as e:
# most likely socket is already closed
display.debug("got an error while closing persistent connection: %s" % e)
self._final_q.put(_sentinel)
self._results_thread.join()
def run(self, iterator, play_context, result=0):
# execute one more pass through the iterator without peeking, to
# make sure that all of the hosts are advanced to their final task.
# This should be safe, as everything should be ITERATING_COMPLETE by
# this point, though the strategy may not advance the hosts itself.
[iterator.get_next_task_for_host(host) for host in self._inventory.get_hosts(iterator._play.hosts) if host.name not in self._tqm._unreachable_hosts]
# save the failed/unreachable hosts, as the run_handlers()
# method will clear that information during its execution
failed_hosts = iterator.get_failed_hosts()
unreachable_hosts = self._tqm._unreachable_hosts.keys()
display.debug("running handlers")
handler_result = self.run_handlers(iterator, play_context)
if isinstance(handler_result, bool) and not handler_result:
result |= self._tqm.RUN_ERROR
elif not handler_result:
result |= handler_result
# now update with the hosts (if any) that failed or were
# unreachable during the handler execution phase
failed_hosts = set(failed_hosts).union(iterator.get_failed_hosts())
unreachable_hosts = set(unreachable_hosts).union(self._tqm._unreachable_hosts.keys())
# return the appropriate code, depending on the status hosts after the run
if not isinstance(result, bool) and result != self._tqm.RUN_OK:
return result
elif len(unreachable_hosts) > 0:
return self._tqm.RUN_UNREACHABLE_HOSTS
elif len(failed_hosts) > 0:
return self._tqm.RUN_FAILED_HOSTS
else:
return self._tqm.RUN_OK
def get_hosts_remaining(self, play):
return [host for host in self._inventory.get_hosts(play.hosts)
if host.name not in self._tqm._failed_hosts and host.name not in self._tqm._unreachable_hosts]
def get_failed_hosts(self, play):
return [host for host in self._inventory.get_hosts(play.hosts) if host.name in self._tqm._failed_hosts]
def add_tqm_variables(self, vars, play):
'''
Base class method to add extra variables/information to the list of task
vars sent through the executor engine regarding the task queue manager state.
'''
vars['ansible_current_hosts'] = [h.name for h in self.get_hosts_remaining(play)]
vars['ansible_failed_hosts'] = [h.name for h in self.get_failed_hosts(play)]
def _queue_task(self, host, task, task_vars, play_context):
''' handles queueing the task up to be sent to a worker '''
display.debug("entering _queue_task() for %s/%s" % (host.name, task.action))
# Add a write lock for tasks.
# Maybe this should be added somewhere further up the call stack but
# this is the earliest in the code where we have task (1) extracted
# into its own variable and (2) there's only a single code path
# leading to the module being run. This is called by three
# functions: __init__.py::_do_handler_run(), linear.py::run(), and
# free.py::run() so we'd have to add to all three to do it there.
# The next common higher level is __init__.py::run() and that has
# tasks inside of play_iterator so we'd have to extract them to do it
# there.
if task.action not in action_write_locks.action_write_locks:
display.debug('Creating lock for %s' % task.action)
action_write_locks.action_write_locks[task.action] = Lock()
# and then queue the new task
try:
# create a dummy object with plugin loaders set as an easier
# way to share them with the forked processes
shared_loader_obj = SharedPluginLoaderObj()
queued = False
starting_worker = self._cur_worker
while True:
(worker_prc, rslt_q) = self._workers[self._cur_worker]
if worker_prc is None or not worker_prc.is_alive():
self._queued_task_cache[(host.name, task._uuid)] = {
'host': host,
'task': task,
'task_vars': task_vars,
'play_context': play_context
}
worker_prc = WorkerProcess(self._final_q, task_vars, host, task, play_context, self._loader, self._variable_manager, shared_loader_obj)
self._workers[self._cur_worker][0] = worker_prc
worker_prc.start()
display.debug("worker is %d (out of %d available)" % (self._cur_worker + 1, len(self._workers)))
queued = True
self._cur_worker += 1
if self._cur_worker >= len(self._workers):
self._cur_worker = 0
if queued:
break
elif self._cur_worker == starting_worker:
time.sleep(0.0001)
self._pending_results += 1
except (EOFError, IOError, AssertionError) as e:
# most likely an abort
display.debug("got an error while queuing: %s" % e)
return
display.debug("exiting _queue_task() for %s/%s" % (host.name, task.action))
def get_task_hosts(self, iterator, task_host, task):
if task.run_once:
host_list = [host for host in self._inventory.get_hosts(iterator._play.hosts) if host.name not in self._tqm._unreachable_hosts]
else:
host_list = [task_host]
return host_list
def get_delegated_hosts(self, result, task):
host_name = result.get('_ansible_delegated_vars', {}).get('ansible_delegated_host', None)
if host_name is not None:
actual_host = self._inventory.get_host(host_name)
if actual_host is None:
actual_host = Host(name=host_name)
else:
actual_host = Host(name=task.delegate_to)
return [actual_host]
@debug_closure
def _process_pending_results(self, iterator, one_pass=False, max_passes=None):
'''
Reads results off the final queue and takes appropriate action
based on the result (executing callbacks, updating state, etc.).
'''
ret_results = []
def get_original_host(host_name):
# FIXME: this should not need x2 _inventory
host_name = to_text(host_name)
if host_name in self._inventory.hosts:
return self._inventory.hosts[host_name]
else:
return self._inventory.get_host(host_name)
def search_handler_blocks_by_name(handler_name, handler_blocks):
for handler_block in handler_blocks:
for handler_task in handler_block.block:
if handler_task.name:
handler_vars = self._variable_manager.get_vars(play=iterator._play, task=handler_task)
templar = Templar(loader=self._loader, variables=handler_vars)
try:
# first we check with the full result of get_name(), which may
# include the role name (if the handler is from a role). If that
# is not found, we resort to the simple name field, which doesn't
# have anything extra added to it.
target_handler_name = templar.template(handler_task.name)
if target_handler_name == handler_name:
return handler_task
else:
target_handler_name = templar.template(handler_task.get_name())
if target_handler_name == handler_name:
return handler_task
except (UndefinedError, AnsibleUndefinedVariable):
# We skip this handler due to the fact that it may be using
# a variable in the name that was conditionally included via
# set_fact or some other method, and we don't want to error
# out unnecessarily
continue
return None
def search_handler_blocks_by_uuid(handler_uuid, handler_blocks):
for handler_block in handler_blocks:
for handler_task in handler_block.block:
if handler_uuid == handler_task._uuid:
return handler_task
return None
def parent_handler_match(target_handler, handler_name):
if target_handler:
if isinstance(target_handler, (TaskInclude, IncludeRole)):
try:
handler_vars = self._variable_manager.get_vars(play=iterator._play, task=target_handler)
templar = Templar(loader=self._loader, variables=handler_vars)
target_handler_name = templar.template(target_handler.name)
if target_handler_name == handler_name:
return True
else:
target_handler_name = templar.template(target_handler.get_name())
if target_handler_name == handler_name:
return True
except (UndefinedError, AnsibleUndefinedVariable):
pass
return parent_handler_match(target_handler._parent, handler_name)
else:
return False
cur_pass = 0
while True:
try:
self._results_lock.acquire()
task_result = self._results.popleft()
except IndexError:
break
finally:
self._results_lock.release()
# get the original host and task. We then assign them to the TaskResult for use in callbacks/etc.
original_host = get_original_host(task_result._host)
queue_cache_entry = (original_host.name, task_result._task)
found_task = self._queued_task_cache.get(queue_cache_entry)['task']
original_task = found_task.copy(exclude_parent=True, exclude_tasks=True)
original_task._parent = found_task._parent
original_task.from_attrs(task_result._task_fields)
task_result._host = original_host
task_result._task = original_task
# get the correct loop var for use later
if original_task.loop_control:
loop_var = original_task.loop_control.loop_var
else:
loop_var = 'item'
# send callbacks for 'non final' results
if '_ansible_retry' in task_result._result:
self._tqm.send_callback('v2_runner_retry', task_result)
continue
elif '_ansible_item_result' in task_result._result:
if task_result.is_failed() or task_result.is_unreachable():
self._tqm.send_callback('v2_runner_item_on_failed', task_result)
elif task_result.is_skipped():
self._tqm.send_callback('v2_runner_item_on_skipped', task_result)
else:
if 'diff' in task_result._result:
if self._diff:
self._tqm.send_callback('v2_on_file_diff', task_result)
self._tqm.send_callback('v2_runner_item_on_ok', task_result)
continue
if original_task.register:
host_list = self.get_task_hosts(iterator, original_host, original_task)
clean_copy = strip_internal_keys(task_result._result)
if 'invocation' in clean_copy:
del clean_copy['invocation']
for target_host in host_list:
self._variable_manager.set_nonpersistent_facts(target_host, {original_task.register: clean_copy})
# all host status messages contain 2 entries: (msg, task_result)
role_ran = False
if task_result.is_failed():
role_ran = True
ignore_errors = original_task.ignore_errors
if not ignore_errors:
display.debug("marking %s as failed" % original_host.name)
if original_task.run_once:
# if we're using run_once, we have to fail every host here
for h in self._inventory.get_hosts(iterator._play.hosts):
if h.name not in self._tqm._unreachable_hosts:
state, _ = iterator.get_next_task_for_host(h, peek=True)
iterator.mark_host_failed(h)
state, new_task = iterator.get_next_task_for_host(h, peek=True)
else:
iterator.mark_host_failed(original_host)
# increment the failed count for this host
self._tqm._stats.increment('failures', original_host.name)
# grab the current state and if we're iterating on the rescue portion
# of a block then we save the failed task in a special var for use
# within the rescue/always
state, _ = iterator.get_next_task_for_host(original_host, peek=True)
if iterator.is_failed(original_host) and state and state.run_state == iterator.ITERATING_COMPLETE:
self._tqm._failed_hosts[original_host.name] = True
if state and state.run_state == iterator.ITERATING_RESCUE:
self._variable_manager.set_nonpersistent_facts(
original_host,
dict(
ansible_failed_task=original_task.serialize(),
ansible_failed_result=task_result._result,
),
)
else:
self._tqm._stats.increment('ok', original_host.name)
if 'changed' in task_result._result and task_result._result['changed']:
self._tqm._stats.increment('changed', original_host.name)
self._tqm.send_callback('v2_runner_on_failed', task_result, ignore_errors=ignore_errors)
elif task_result.is_unreachable():
self._tqm._unreachable_hosts[original_host.name] = True
iterator._play._removed_hosts.append(original_host.name)
self._tqm._stats.increment('dark', original_host.name)
self._tqm.send_callback('v2_runner_on_unreachable', task_result)
elif task_result.is_skipped():
self._tqm._stats.increment('skipped', original_host.name)
self._tqm.send_callback('v2_runner_on_skipped', task_result)
else:
role_ran = True
if original_task.loop:
# this task had a loop, and has more than one result, so
# loop over all of them instead of a single result
result_items = task_result._result.get('results', [])
else:
result_items = [task_result._result]
for result_item in result_items:
if '_ansible_notify' in result_item:
if task_result.is_changed():
# The shared dictionary for notified handlers is a proxy, which
# does not detect when sub-objects within the proxy are modified.
# So, per the docs, we reassign the list so the proxy picks up and
# notifies all other threads
for handler_name in result_item['_ansible_notify']:
found = False
# Find the handler using the above helper. First we look up the
# dependency chain of the current task (if it's from a role), otherwise
# we just look through the list of handlers in the current play/all
# roles and use the first one that matches the notify name
target_handler = search_handler_blocks_by_name(handler_name, iterator._play.handlers)
if target_handler is not None:
found = True
if target_handler._uuid not in self._notified_handlers:
self._notified_handlers[target_handler._uuid] = []
if original_host not in self._notified_handlers[target_handler._uuid]:
self._notified_handlers[target_handler._uuid].append(original_host)
self._tqm.send_callback('v2_playbook_on_notify', target_handler, original_host)
else:
# As there may be more than one handler with the notified name as the
# parent, so we just keep track of whether or not we found one at all
for target_handler_uuid in self._notified_handlers:
target_handler = search_handler_blocks_by_uuid(target_handler_uuid, iterator._play.handlers)
if target_handler and parent_handler_match(target_handler, handler_name):
found = True
if original_host not in self._notified_handlers[target_handler._uuid]:
self._notified_handlers[target_handler._uuid].append(original_host)
self._tqm.send_callback('v2_playbook_on_notify', target_handler, original_host)
if handler_name in self._listening_handlers:
for listening_handler_uuid in self._listening_handlers[handler_name]:
listening_handler = search_handler_blocks_by_uuid(listening_handler_uuid, iterator._play.handlers)
if listening_handler is not None:
found = True
else:
continue
if original_host not in self._notified_handlers[listening_handler._uuid]:
self._notified_handlers[listening_handler._uuid].append(original_host)
self._tqm.send_callback('v2_playbook_on_notify', listening_handler, original_host)
# and if none were found, then we raise an error
if not found:
msg = ("The requested handler '%s' was not found in either the main handlers list nor in the listening "
"handlers list" % handler_name)
if C.ERROR_ON_MISSING_HANDLER:
raise AnsibleError(msg)
else:
display.warning(msg)
if 'add_host' in result_item:
# this task added a new host (add_host module)
new_host_info = result_item.get('add_host', dict())
self._add_host(new_host_info, iterator)
elif 'add_group' in result_item:
# this task added a new group (group_by module)
self._add_group(original_host, result_item)
if 'ansible_facts' in result_item:
# if delegated fact and we are delegating facts, we need to change target host for them
if original_task.delegate_to is not None and original_task.delegate_facts:
host_list = self.get_delegated_hosts(result_item, original_task)
else:
host_list = self.get_task_hosts(iterator, original_host, original_task)
if original_task.action == 'include_vars':
for (var_name, var_value) in iteritems(result_item['ansible_facts']):
# find the host we're actually referring too here, which may
# be a host that is not really in inventory at all
for target_host in host_list:
self._variable_manager.set_host_variable(target_host, var_name, var_value)
else:
cacheable = result_item.pop('_ansible_facts_cacheable', False)
for target_host in host_list:
if not original_task.action == 'set_fact' or cacheable:
self._variable_manager.set_host_facts(target_host, result_item['ansible_facts'].copy())
if original_task.action == 'set_fact':
self._variable_manager.set_nonpersistent_facts(target_host, result_item['ansible_facts'].copy())
if 'ansible_stats' in result_item and 'data' in result_item['ansible_stats'] and result_item['ansible_stats']['data']:
if 'per_host' not in result_item['ansible_stats'] or result_item['ansible_stats']['per_host']:
host_list = self.get_task_hosts(iterator, original_host, original_task)
else:
host_list = [None]
data = result_item['ansible_stats']['data']
aggregate = 'aggregate' in result_item['ansible_stats'] and result_item['ansible_stats']['aggregate']
for myhost in host_list:
for k in data.keys():
if aggregate:
self._tqm._stats.update_custom_stats(k, data[k], myhost)
else:
self._tqm._stats.set_custom_stats(k, data[k], myhost)
if 'diff' in task_result._result:
if self._diff:
self._tqm.send_callback('v2_on_file_diff', task_result)
if not isinstance(original_task, TaskInclude):
self._tqm._stats.increment('ok', original_host.name)
if 'changed' in task_result._result and task_result._result['changed']:
self._tqm._stats.increment('changed', original_host.name)
# finally, send the ok for this task
self._tqm.send_callback('v2_runner_on_ok', task_result)
self._pending_results -= 1
if original_host.name in self._blocked_hosts:
del self._blocked_hosts[original_host.name]
# If this is a role task, mark the parent role as being run (if
# the task was ok or failed, but not skipped or unreachable)
if original_task._role is not None and role_ran: # TODO: and original_task.action != 'include_role':?
# lookup the role in the ROLE_CACHE to make sure we're dealing
# with the correct object and mark it as executed
for (entry, role_obj) in iteritems(iterator._play.ROLE_CACHE[original_task._role._role_name]):
if role_obj._uuid == original_task._role._uuid:
role_obj._had_task_run[original_host.name] = True
ret_results.append(task_result)
if one_pass or max_passes is not None and (cur_pass + 1) >= max_passes:
break
cur_pass += 1
return ret_results
def _wait_on_pending_results(self, iterator):
'''
Wait for the shared counter to drop to zero, using a short sleep
between checks to ensure we don't spin lock
'''
ret_results = []
display.debug("waiting for pending results...")
while self._pending_results > 0 and not self._tqm._terminated:
if self._tqm.has_dead_workers():
raise AnsibleError("A worker was found in a dead state")
results = self._process_pending_results(iterator)
ret_results.extend(results)
if self._pending_results > 0:
time.sleep(C.DEFAULT_INTERNAL_POLL_INTERVAL)
display.debug("no more pending results, returning what we have")
return ret_results
def _add_host(self, host_info, iterator):
'''
Helper function to add a new host to inventory based on a task result.
'''
if host_info:
host_name = host_info.get('host_name')
# Check if host in inventory, add if not
if host_name not in self._inventory.hosts:
self._inventory.add_host(host_name, 'all')
new_host = self._inventory.hosts.get(host_name)
# Set/update the vars for this host
new_host.vars = combine_vars(new_host.get_vars(), host_info.get('host_vars', dict()))
new_groups = host_info.get('groups', [])
for group_name in new_groups:
if group_name not in self._inventory.groups:
self._inventory.add_group(group_name)
new_group = self._inventory.groups[group_name]
new_group.add_host(self._inventory.hosts[host_name])
# reconcile inventory, ensures inventory rules are followed
self._inventory.reconcile_inventory()
def _add_group(self, host, result_item):
'''
Helper function to add a group (if it does not exist), and to assign the
specified host to that group.
'''
changed = False
# the host here is from the executor side, which means it was a
# serialized/cloned copy and we'll need to look up the proper
# host object from the master inventory
real_host = self._inventory.hosts[host.name]
group_name = result_item.get('add_group')
parent_group_names = result_item.get('parent_groups', [])
for name in [group_name] + parent_group_names:
if name not in self._inventory.groups:
# create the new group and add it to inventory
self._inventory.add_group(name)
changed = True
group = self._inventory.groups[group_name]
for parent_group_name in parent_group_names:
parent_group = self._inventory.groups[parent_group_name]
parent_group.add_child_group(group)
if real_host.name not in group.get_hosts():
group.add_host(real_host)
changed = True
if group_name not in host.get_groups():
real_host.add_group(group)
changed = True
if changed:
self._inventory.reconcile_inventory()
return changed
def _copy_included_file(self, included_file):
'''
A proven safe and performant way to create a copy of an included file
'''
ti_copy = included_file._task.copy(exclude_parent=True)
ti_copy._parent = included_file._task._parent
temp_vars = ti_copy.vars.copy()
temp_vars.update(included_file._args)
ti_copy.vars = temp_vars
return ti_copy
def _load_included_file(self, included_file, iterator, is_handler=False):
'''
Loads an included YAML file of tasks, applying the optional set of variables.
'''
display.debug("loading included file: %s" % included_file._filename)
try:
data = self._loader.load_from_file(included_file._filename)
if data is None:
return []
elif not isinstance(data, list):
raise AnsibleError("included task files must contain a list of tasks")
ti_copy = self._copy_included_file(included_file)
# pop tags out of the include args, if they were specified there, and assign
# them to the include. If the include already had tags specified, we raise an
# error so that users know not to specify them both ways
tags = included_file._task.vars.pop('tags', [])
if isinstance(tags, string_types):
tags = tags.split(',')
if len(tags) > 0:
if len(included_file._task.tags) > 0:
raise AnsibleParserError("Include tasks should not specify tags in more than one way (both via args and directly on the task). "
"Mixing tag specify styles is prohibited for whole import hierarchy, not only for single import statement",
obj=included_file._task._ds)
display.deprecated("You should not specify tags in the include parameters. All tags should be specified using the task-level option")
included_file._task.tags = tags
block_list = load_list_of_blocks(
data,
play=iterator._play,
parent_block=None,
task_include=ti_copy,
role=included_file._task._role,
use_handlers=is_handler,
loader=self._loader,
variable_manager=self._variable_manager,
)
# since we skip incrementing the stats when the task result is
# first processed, we do so now for each host in the list
for host in included_file._hosts:
self._tqm._stats.increment('ok', host.name)
except AnsibleError as e:
# mark all of the hosts including this file as failed, send callbacks,
# and increment the stats for this host
for host in included_file._hosts:
tr = TaskResult(host=host, task=included_file._task, return_data=dict(failed=True, reason=to_text(e)))
iterator.mark_host_failed(host)
self._tqm._failed_hosts[host.name] = True
self._tqm._stats.increment('failures', host.name)
self._tqm.send_callback('v2_runner_on_failed', tr)
return []
# finally, send the callback and return the list of blocks loaded
self._tqm.send_callback('v2_playbook_on_include', included_file)
display.debug("done processing included file")
return block_list
def run_handlers(self, iterator, play_context):
'''
Runs handlers on those hosts which have been notified.
'''
result = self._tqm.RUN_OK
for handler_block in iterator._play.handlers:
# FIXME: handlers need to support the rescue/always portions of blocks too,
# but this may take some work in the iterator and gets tricky when
# we consider the ability of meta tasks to flush handlers
for handler in handler_block.block:
if handler._uuid in self._notified_handlers and len(self._notified_handlers[handler._uuid]):
result = self._do_handler_run(handler, handler.get_name(), iterator=iterator, play_context=play_context)
if not result:
break
return result
def _do_handler_run(self, handler, handler_name, iterator, play_context, notified_hosts=None):
# FIXME: need to use iterator.get_failed_hosts() instead?
# if not len(self.get_hosts_remaining(iterator._play)):
# self._tqm.send_callback('v2_playbook_on_no_hosts_remaining')
# result = False
# break
saved_name = handler.name
handler.name = handler_name
self._tqm.send_callback('v2_playbook_on_handler_task_start', handler)
handler.name = saved_name
if notified_hosts is None:
notified_hosts = self._notified_handlers[handler._uuid]
run_once = False
try:
action = action_loader.get(handler.action, class_only=True)
if handler.run_once or getattr(action, 'BYPASS_HOST_LOOP', False):
run_once = True
except KeyError:
# we don't care here, because the action may simply not have a
# corresponding action plugin
pass
host_results = []
for host in notified_hosts:
if not handler.has_triggered(host) and (not iterator.is_failed(host) or play_context.force_handlers):
task_vars = self._variable_manager.get_vars(play=iterator._play, host=host, task=handler)
self.add_tqm_variables(task_vars, play=iterator._play)
self._queue_task(host, handler, task_vars, play_context)
if run_once:
break
# collect the results from the handler run
host_results = self._wait_on_pending_results(iterator)
try:
included_files = IncludedFile.process_include_results(
host_results,
iterator=iterator,
loader=self._loader,
variable_manager=self._variable_manager
)
except AnsibleError as e:
return False
result = True
if len(included_files) > 0:
for included_file in included_files:
try:
new_blocks = self._load_included_file(included_file, iterator=iterator, is_handler=True)
# for every task in each block brought in by the include, add the list
# of hosts which included the file to the notified_handlers dict
for block in new_blocks:
iterator._play.handlers.append(block)
iterator.cache_block_tasks(block)
for task in block.block:
result = self._do_handler_run(
handler=task,
handler_name=task.get_name(),
iterator=iterator,
play_context=play_context,
notified_hosts=included_file._hosts[:],
)
if not result:
break
except AnsibleError as e:
for host in included_file._hosts:
iterator.mark_host_failed(host)
self._tqm._failed_hosts[host.name] = True
display.warning(str(e))
continue
# wipe the notification list
self._notified_handlers[handler._uuid] = []
display.debug("done running handlers, result is: %s" % result)
return result
def _take_step(self, task, host=None):
ret = False
msg = u'Perform task: %s ' % task
if host:
msg += u'on %s ' % host
msg += u'(N)o/(y)es/(c)ontinue: '
resp = display.prompt(msg)
if resp.lower() in ['y', 'yes']:
display.debug("User ran task")
ret = True
elif resp.lower() in ['c', 'continue']:
display.debug("User ran task and canceled step mode")
self._step = False
ret = True
else:
display.debug("User skipped task")
display.banner(msg)
return ret
def _execute_meta(self, task, play_context, iterator, target_host):
# meta tasks store their args in the _raw_params field of args,
# since they do not use k=v pairs, so get that
meta_action = task.args.get('_raw_params')
# FIXME(s):
# * raise an error or show a warning when a conditional is used
# on a meta task that doesn't support them
def _evaluate_conditional(h):
all_vars = self._variable_manager.get_vars(play=iterator._play, host=h, task=task)
templar = Templar(loader=self._loader, variables=all_vars)
return task.evaluate_conditional(templar, all_vars)
skipped = False
msg = ''
if meta_action == 'noop':
# FIXME: issue a callback for the noop here?
msg = "noop"
elif meta_action == 'flush_handlers':
self.run_handlers(iterator, play_context)
msg = "ran handlers"
elif meta_action == 'refresh_inventory' or self.flush_cache:
self._inventory.refresh_inventory()
msg = "inventory successfully refreshed"
elif meta_action == 'clear_facts':
if _evaluate_conditional(target_host):
for host in self._inventory.get_hosts(iterator._play.hosts):
hostname = host.get_name()
self._variable_manager.clear_facts(hostname)
msg = "facts cleared"
else:
skipped = True
elif meta_action == 'clear_host_errors':
if _evaluate_conditional(target_host):
for host in self._inventory.get_hosts(iterator._play.hosts):
self._tqm._failed_hosts.pop(host.name, False)
self._tqm._unreachable_hosts.pop(host.name, False)
iterator._host_states[host.name].fail_state = iterator.FAILED_NONE
msg = "cleared host errors"
else:
skipped = True
elif meta_action == 'end_play':
if _evaluate_conditional(target_host):
for host in self._inventory.get_hosts(iterator._play.hosts):
if host.name not in self._tqm._unreachable_hosts:
iterator._host_states[host.name].run_state = iterator.ITERATING_COMPLETE
msg = "ending play"
elif meta_action == 'reset_connection':
if target_host in self._active_connections:
connection = Connection(self._active_connections[target_host])
del self._active_connections[target_host]
else:
connection = connection_loader.get(play_context.connection, play_context, os.devnull)
play_context.set_options_from_plugin(connection)
if connection:
try:
connection.reset()
msg = 'reset connection'
except ConnectionError as e:
# most likely socket is already closed
display.debug("got an error while closing persistent connection: %s" % e)
else:
msg = 'no connection, nothing to reset'
else:
raise AnsibleError("invalid meta action requested: %s" % meta_action, obj=task._ds)
result = {'msg': msg}
if skipped:
result['skipped'] = True
else:
result['changed'] = False
display.vv("META: %s" % msg)
return [TaskResult(target_host, task, result)]
def get_hosts_left(self, iterator):
''' returns list of available hosts for this iterator by filtering out unreachables '''
hosts_left = []
for host in self._inventory.get_hosts(iterator._play.hosts, order=iterator._play.order):
if host.name not in self._tqm._unreachable_hosts:
hosts_left.append(host)
return hosts_left
def update_active_connections(self, results):
''' updates the current active persistent connections '''
for r in results:
if 'args' in r._task_fields:
socket_path = r._task_fields['args'].get('_ansible_socket')
if socket_path:
if r._host not in self._active_connections:
self._active_connections[r._host] = socket_path
class NextAction(object):
""" The next action after an interpreter's exit. """
REDO = 1
CONTINUE = 2
EXIT = 3
def __init__(self, result=EXIT):
self.result = result
class Debugger(cmd.Cmd):
prompt_continuous = '> ' # multiple lines
def __init__(self, task, host, task_vars, play_context, result, next_action):
# cmd.Cmd is old-style class
cmd.Cmd.__init__(self)
self.prompt = '[%s] %s (debug)> ' % (host, task)
self.intro = None
self.scope = {}
self.scope['task'] = task
self.scope['task_vars'] = task_vars
self.scope['host'] = host
self.scope['play_context'] = play_context
self.scope['result'] = result
self.next_action = next_action
def cmdloop(self):
try:
cmd.Cmd.cmdloop(self)
except KeyboardInterrupt:
pass
do_h = cmd.Cmd.do_help
def do_EOF(self, args):
"""Quit"""
return self.do_quit(args)
def do_quit(self, args):
"""Quit"""
display.display('User interrupted execution')
self.next_action.result = NextAction.EXIT
return True
do_q = do_quit
def do_continue(self, args):
"""Continue to next result"""
self.next_action.result = NextAction.CONTINUE
return True
do_c = do_continue
def do_redo(self, args):
"""Schedule task for re-execution. The re-execution may not be the next result"""
self.next_action.result = NextAction.REDO
return True
do_r = do_redo
def evaluate(self, args):
try:
return eval(args, globals(), self.scope)
except Exception:
t, v = sys.exc_info()[:2]
if isinstance(t, str):
exc_type_name = t
else:
exc_type_name = t.__name__
display.display('***%s:%s' % (exc_type_name, repr(v)))
raise
def do_pprint(self, args):
"""Pretty Print"""
try:
result = self.evaluate(args)
display.display(pprint.pformat(result))
except Exception:
pass
do_p = do_pprint
def execute(self, args):
try:
code = compile(args + '\n', '<stdin>', 'single')
exec(code, globals(), self.scope)
except Exception:
t, v = sys.exc_info()[:2]
if isinstance(t, str):
exc_type_name = t
else:
exc_type_name = t.__name__
display.display('***%s:%s' % (exc_type_name, repr(v)))
raise
def default(self, line):
try:
self.execute(line)
except Exception:
pass
|
ptisserand/ansible
|
lib/ansible/plugins/strategy/__init__.py
|
Python
|
gpl-3.0
| 51,787 | 0.002819 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# namedtuple is needed for find_mod_objs so it can have a non-local module
from collections import namedtuple
from unittest import mock
import pytest
import yaml
from astropy.utils import introspection
from astropy.utils.introspection import (find_current_module, find_mod_objs,
minversion)
def test_pkg_finder():
"""
Tests that the `find_current_module` function works. Note that
this also implicitly tests compat.misc._patched_getmodule
"""
mod1 = 'astropy.utils.introspection'
mod2 = 'astropy.utils.tests.test_introspection'
mod3 = 'astropy.utils.tests.test_introspection'
assert find_current_module(0).__name__ == mod1
assert find_current_module(1).__name__ == mod2
assert find_current_module(0, True).__name__ == mod3
def test_find_current_mod():
from sys import getrecursionlimit
thismodnm = __name__
assert find_current_module(0) is introspection
assert find_current_module(1).__name__ == thismodnm
assert find_current_module(getrecursionlimit() + 1) is None
assert find_current_module(0, True).__name__ == thismodnm
assert find_current_module(0, [introspection]).__name__ == thismodnm
assert find_current_module(0, ['astropy.utils.introspection']).__name__ == thismodnm
with pytest.raises(ImportError):
find_current_module(0, ['faddfdsasewrweriopunjlfiurrhujnkflgwhu'])
def test_find_mod_objs():
lnms, fqns, objs = find_mod_objs('astropy')
# this import is after the above call intentionally to make sure
# find_mod_objs properly imports astropy on its own
import astropy
# just check for astropy.test ... other things might be added, so we
# shouldn't check that it's the only thing
assert 'test' in lnms
assert astropy.test in objs
lnms, fqns, objs = find_mod_objs(__name__, onlylocals=False)
assert 'namedtuple' in lnms
assert 'collections.namedtuple' in fqns
assert namedtuple in objs
lnms, fqns, objs = find_mod_objs(__name__, onlylocals=True)
assert 'namedtuple' not in lnms
assert 'collections.namedtuple' not in fqns
assert namedtuple not in objs
def test_minversion():
import numpy
good_versions = ['1.16', '1.16.1', '1.16.0.dev', '1.16dev']
bad_versions = ['100000', '100000.2rc1']
for version in good_versions:
assert minversion(numpy, version)
assert minversion("numpy", version)
for version in bad_versions:
assert not minversion(numpy, version)
assert not minversion("numpy", version)
assert minversion(yaml, '3.1')
assert minversion('yaml', '3.1')
def test_find_current_module_bundle():
"""
Tests that the `find_current_module` function would work if used inside
an application bundle. Since we can't test this directly, we test what
would happen if inspect.getmodule returned `None`, which is what happens
inside PyInstaller and py2app bundles.
"""
with mock.patch('inspect.getmodule', return_value=None):
mod1 = 'astropy.utils.introspection'
mod2 = 'astropy.utils.tests.test_introspection'
mod3 = 'astropy.utils.tests.test_introspection'
assert find_current_module(0).__name__ == mod1
assert find_current_module(1).__name__ == mod2
assert find_current_module(0, True).__name__ == mod3
|
pllim/astropy
|
astropy/utils/tests/test_introspection.py
|
Python
|
bsd-3-clause
| 3,422 | 0.000292 |
"""Class for storing shared keys."""
from utils.cryptomath import *
from utils.compat import *
from mathtls import *
from Session import Session
from BaseDB import BaseDB
class SharedKeyDB(BaseDB):
"""This class represent an in-memory or on-disk database of shared
keys.
A SharedKeyDB can be passed to a server handshake function to
authenticate a client based on one of the shared keys.
This class is thread-safe.
"""
def __init__(self, filename=None):
"""Create a new SharedKeyDB.
@type filename: str
@param filename: Filename for an on-disk database, or None for
an in-memory database. If the filename already exists, follow
this with a call to open(). To create a new on-disk database,
follow this with a call to create().
"""
BaseDB.__init__(self, filename, "shared key")
def _getItem(self, username, valueStr):
session = Session()
session._createSharedKey(username, valueStr)
return session
def __setitem__(self, username, sharedKey):
"""Add a shared key to the database.
@type username: str
@param username: The username to associate the shared key with.
Must be less than or equal to 16 characters in length, and must
not already be in the database.
@type sharedKey: str
@param sharedKey: The shared key to add. Must be less than 48
characters in length.
"""
BaseDB.__setitem__(self, username, sharedKey)
def _setItem(self, username, value):
if len(username)>16:
raise ValueError("username too long")
if len(value)>=48:
raise ValueError("shared key too long")
return value
def _checkItem(self, value, username, param):
newSession = self._getItem(username, param)
return value.masterSecret == newSession.masterSecret
|
edisonlz/fruit
|
web_project/base/site-packages/gdata/tlslite/SharedKeyDB.py
|
Python
|
apache-2.0
| 1,914 | 0.00209 |
# -*- coding: utf-8 -*-
#
# Copyright © 2011 Pierre Raybaut
# Licensed under the terms of the MIT License
# (see spyderlib/__init__.py for details)
"""IPython v0.11+ Plugin"""
from spyderlib.qt.QtGui import QHBoxLayout
# Local imports
from spyderlib.widgets.ipython import create_widget
from spyderlib.plugins import SpyderPluginWidget
class IPythonPlugin(SpyderPluginWidget):
"""Find in files DockWidget"""
CONF_SECTION = 'ipython'
def __init__(self, parent, args, kernel_widget, kernel_name):
super(IPythonPlugin, self).__init__(parent)
self.kernel_widget = kernel_widget
self.kernel_name = kernel_name
self.ipython_widget = create_widget(argv=args.split())
layout = QHBoxLayout()
layout.addWidget(self.ipython_widget)
self.setLayout(layout)
# Initialize plugin
self.initialize_plugin()
def toggle(self, state):
"""Toggle widget visibility"""
if self.dockwidget:
self.dockwidget.setVisible(state)
#------ SpyderPluginWidget API ---------------------------------------------
def get_plugin_title(self):
"""Return widget title"""
return "IPython (%s) - Experimental!" % self.kernel_name
def get_focus_widget(self):
"""
Return the widget to give focus to when
this plugin's dockwidget is raised on top-level
"""
return self.ipython_widget._control
def get_plugin_actions(self):
"""Return a list of actions related to plugin"""
return []
def register_plugin(self):
"""Register plugin in Spyder's main window"""
self.main.add_dockwidget(self)
def refresh_plugin(self):
"""Refresh widget"""
pass
def closing_plugin(self, cancelable=False):
"""Perform actions before parent main window is closed"""
return True
|
jromang/retina-old
|
distinclude/spyderlib/plugins/ipython.py
|
Python
|
gpl-3.0
| 2,012 | 0.006464 |
# Orca
#
# Copyright 2006-2009 Sun Microsystems Inc.
# Copyright 2010 Joanmarie Diggs
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., Franklin Street, Fifth Floor,
# Boston MA 02110-1301 USA.
__id__ = "$Id$"
__version__ = "$Revision$"
__date__ = "$Date$"
__copyright__ = "Copyright (c) 2005-2009 Sun Microsystems Inc., " \
"Copyright (c) 2010 Joanmarie Diggs"
__license__ = "LGPL"
import pyatspi
import orca.scripts.default as default
import orca.input_event as input_event
import orca.orca as orca
import orca.orca_state as orca_state
from .script_utilities import Utilities
from .speech_generator import SpeechGenerator
from .formatting import Formatting
########################################################################
# #
# The Java script class. #
# #
########################################################################
class Script(default.Script):
def __init__(self, app):
"""Creates a new script for Java applications.
Arguments:
- app: the application to create a script for.
"""
default.Script.__init__(self, app)
# Some objects which issue descendant changed events lack
# STATE_MANAGES_DESCENDANTS. As a result, onSelectionChanged
# doesn't ignore these objects. That in turn causes Orca to
# double-speak some items and/or set the locusOfFocus to a
# parent it shouldn't. See bgo#616582. [[[TODO - JD: remove
# this hack if and when we get a fix for that bug]]]
#
self.lastDescendantChangedSource = None
def getSpeechGenerator(self):
"""Returns the speech generator for this script."""
return SpeechGenerator(self)
def getFormatting(self):
"""Returns the formatting strings for this script."""
return Formatting(self)
def getUtilities(self):
"""Returns the utilites for this script."""
return Utilities(self)
def checkKeyboardEventData(self, keyboardEvent):
"""Checks the data on the keyboard event.
Some toolkits don't fill all the key event fields, and/or fills
them out with unexpected data. This method tries to fill in the
missing fields and validate/standardize the data we've been given.
While any script can override this method, it is expected that
this will only be done at the toolkit script level.
Arguments:
- keyboardEvent: an instance of input_event.KeyboardEvent
"""
default.Script.checkKeyboardEventData(self, keyboardEvent)
if not keyboardEvent.keyval_name:
return
from gi.repository import Gdk
keymap = Gdk.Keymap.get_default()
keyval = Gdk.keyval_from_name(keyboardEvent.keyval_name)
success, entries = keymap.get_entries_for_keyval(keyval)
for entry in entries:
if entry.group == 0:
keyboardEvent.hw_code = entry.keycode
break
# Put the event_string back to what it was prior to the Java
# Atk Wrapper hack which gives us the keyname and not the
# expected and needed printable character for punctuation
# marks.
#
if keyboardEvent.event_string == keyboardEvent.keyval_name \
and len(keyboardEvent.event_string) > 1:
keyval = Gdk.keyval_from_name(keyboardEvent.keyval_name)
if 0 < keyval < 256:
keyboardEvent.event_string = chr(keyval)
def onCaretMoved(self, event):
# Java's SpinButtons are the most caret movement happy thing
# I've seen to date. If you Up or Down on the keyboard to
# change the value, they typically emit three caret movement
# events, first to the beginning, then to the end, and then
# back to the beginning. It's a very excitable little widget.
# Luckily, it only issues one value changed event. So, we'll
# ignore caret movement events caused by value changes and
# just process the single value changed event.
#
isSpinBox = self.utilities.hasMatchingHierarchy(
event.source, [pyatspi.ROLE_TEXT,
pyatspi.ROLE_PANEL,
pyatspi.ROLE_SPIN_BUTTON])
if isSpinBox:
eventStr, mods = self.utilities.lastKeyAndModifiers()
if eventStr in ["Up", "Down"] or isinstance(
orca_state.lastInputEvent, input_event.MouseButtonEvent):
return
default.Script.onCaretMoved(self, event)
def onSelectionChanged(self, event):
"""Called when an object's selection changes.
Arguments:
- event: the Event
"""
# Avoid doing this with objects that manage their descendants
# because they'll issue a descendant changed event. (Note: This
# equality check is intentional; utilities.isSameObject() is
# especially thorough with trees and tables, which is not
# performant.
#
if event.source == self.lastDescendantChangedSource:
return
# We treat selected children as the locus of focus. When the
# selection changes in a list we want to update the locus of
# focus. If there is no selection, we default the locus of
# focus to the containing object.
#
if (event.source.getRole() in [pyatspi.ROLE_LIST,
pyatspi.ROLE_PAGE_TAB_LIST,
pyatspi.ROLE_TREE]) \
and event.source.getState().contains(pyatspi.STATE_FOCUSED):
newFocus = event.source
if event.source.childCount:
selection = event.source.querySelection()
if selection.nSelectedChildren > 0:
newFocus = selection.getSelectedChild(0)
orca.setLocusOfFocus(event, newFocus)
else:
default.Script.onSelectionChanged(self, event)
def onFocusedChanged(self, event):
"""Callback for object:state-changed:focused accessibility events."""
if not event.detail1:
return
obj = event.source
role = obj.getRole()
# Accessibility support for menus in Java is badly broken: Missing
# events, missing states, bogus events from other objects, etc.
# Therefore if we get an event, however broken, for menus or their
# their items that suggests they are selected, we'll just cross our
# fingers and hope that's true.
menuRoles = [pyatspi.ROLE_MENU,
pyatspi.ROLE_MENU_BAR,
pyatspi.ROLE_MENU_ITEM,
pyatspi.ROLE_CHECK_MENU_ITEM,
pyatspi.ROLE_RADIO_MENU_ITEM,
pyatspi.ROLE_POPUP_MENU]
if role in menuRoles or obj.parent.getRole() in menuRoles:
orca.setLocusOfFocus(event, obj)
return
try:
focusRole = orca_state.locusOfFocus.getRole()
except:
focusRole = None
if focusRole in menuRoles and role == pyatspi.ROLE_ROOT_PANE:
return
default.Script.onFocusedChanged(self, event)
def onValueChanged(self, event):
"""Called whenever an object's value changes.
Arguments:
- event: the Event
"""
# We'll ignore value changed events for Java's toggle buttons since
# they also send a redundant object:state-changed:checked event.
#
ignoreRoles = [pyatspi.ROLE_TOGGLE_BUTTON,
pyatspi.ROLE_RADIO_BUTTON,
pyatspi.ROLE_CHECK_BOX]
if event.source.getRole() in ignoreRoles:
return
# Java's SpinButtons are the most caret movement happy thing
# I've seen to date. If you Up or Down on the keyboard to
# change the value, they typically emit three caret movement
# events, first to the beginning, then to the end, and then
# back to the beginning. It's a very excitable little widget.
# Luckily, it only issues one value changed event. So, we'll
# ignore caret movement events caused by value changes and
# just process the single value changed event.
#
if event.source.getRole() == pyatspi.ROLE_SPIN_BUTTON:
try:
thisBox = orca_state.locusOfFocus.parent.parent == event.source
except:
thisBox = False
if thisBox:
self._presentTextAtNewCaretPosition(event,
orca_state.locusOfFocus)
return
default.Script.onValueChanged(self, event)
def skipObjectEvent(self, event):
# Accessibility support for menus in Java is badly broken. One problem
# is bogus focus claims following menu-related focus claims. Therefore
# in this particular toolkit, we mustn't skip events for menus.
menuRoles = [pyatspi.ROLE_MENU,
pyatspi.ROLE_MENU_BAR,
pyatspi.ROLE_MENU_ITEM,
pyatspi.ROLE_CHECK_MENU_ITEM,
pyatspi.ROLE_RADIO_MENU_ITEM,
pyatspi.ROLE_POPUP_MENU]
if event.source.getRole() in menuRoles:
return False
return default.Script.skipObjectEvent(self, event)
|
ruibarreira/linuxtrail
|
usr/lib/python3/dist-packages/orca/scripts/toolkits/J2SE-access-bridge/script.py
|
Python
|
gpl-3.0
| 10,245 | 0.001659 |
default_app_config = 'mtr.sync.apps.MtrSyncConfig'
|
mtrgroup/django-mtr-sync
|
mtr/sync/__init__.py
|
Python
|
mit
| 51 | 0 |
#!/bin/python3
import argparse
import code
import readline
import signal
import sys
import capstone
from load import ELF
def SigHandler_SIGINT(signum, frame):
print()
sys.exit(0)
class Argparser(object):
def __init__(self):
parser = argparse.ArgumentParser()
parser.add_argument("--arglist", nargs="+", type=str, help="list of args")
parser.add_argument("--hex", action="store_true", help="generate hex(string) code, otherwise generate int", default=False)
self.args = parser.parse_args()
self.code = {}
class Call_Rewriter(object):
def __init__(self, obj_code, arch, mode):
self.obj_code = obj_code
#self.md = Cs(CS_ARCG_X86, CS_MODE_64)
self.md = Cs(arch, mode)
def run():
for i in md.disasm(self.obj_code, 0x0):
print("0x%x:\t%s\t%s" %(i.address, i.mnemonic, i.op_str))
class Global_Rewriter(object):
def __init__(self):
pass
# Main is here
def premain():
signal.signal(signal.SIGINT, SigHandler_SIGINT)
argparser = Argparser()
# write code here
###############################################################################
def main():
try:
premain()
except:
variables = globals().copy()
variables.update(locals())
shell = code.InteractiveConsole(variables)
shell.interact(banner="CALL REWRITER DEBUG REPL")
if __name__ == "__main__":
main()
|
bloodstalker/mutator
|
bfd/codegen.py
|
Python
|
gpl-3.0
| 1,436 | 0.008357 |
import vk
import json
from sentiment_classifiers import SentimentClassifier, binary_dict, files
class VkFeatureProvider(object):
def __init__(self):
self._vk_api = vk.API(vk.Session())
self._vk_delay = 0.3
self._clf = SentimentClassifier(files['binary_goods'], binary_dict)
def _vk_grace(self):
import time
time.sleep(self._vk_delay)
def get_news(self, sources, amount=10):
# entry for Alex Anlysis tool
result = []
for source in sources:
try:
data = self._vk_api.wall.get(domain=source, count=amount, extended=1, fields='name')
self._vk_grace()
except:
return {}
news = []
for node in data['wall'][1:]:
try:
if node['post_type'] != 'post':
continue
text = node['text']
#print('{}'.format(text.encode('utf-8')))
rate = self._clf.predict_text(text)[0]
news.append({'text' : '{}'.format(text.encode('utf-8')), 'rate' : rate})
except Exception as e:
print('Exception: {}'.format(e))
result.append({'source': data['groups'][0]['name'], 'news': news})
#return json.dumps(result)
return result
# NOTE: the completely other feature, very usefull personally for me
def friends_intersect(self, uid_list):
result = None
try:
result = set(self._vk_api.friends.get(user_id=uid_list[0]))
self._vk_grace()
except:
pass
for i, uid in enumerate(uid_list[1:]):
try:
tmp = set(self._vk_api.friends.get(user_id=uid))
self._vk_grace()
except:
continue
if result is not None:
result = result.intersection(tmp)
else:
result = tmp
return result
def get_user_info(self, entry_uid, fname=None, lname=None):
try:
friend_list = self._vk_api.friends.get(user_id=entry_uid, fields='personal', name_case='nom')
self._vk_grace()
except:
return []
return [x for x in friend_list
if (not fname or fname in x['first_name']) and (not lname or lname in x['last_name'])]
def get_uid_set_info(self, uid_set):
result = []
for friend_uid in uid_set:
try:
friend = self._vk_api.users.get(user_id=friend_uid, fields='sex,personal', name_case='nom')
self._vk_grace()
except:
continue
result.append(friend)
return result
if __name__ == '__main__':
provider = VkFeatureProvider()
res = provider.get_news(['scientific.american'], 5)
print(res)
|
ArtemMIPT/sentiment_analysis
|
vk_parser.py
|
Python
|
mit
| 2,902 | 0.005513 |
"""SCons.Tool.FortranCommon
Stuff for processing Fortran, common to all fortran dialects.
"""
#
# Copyright (c) 2001 - 2015 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/FortranCommon.py rel_2.4.0:3365:9259ea1c13d7 2015/09/21 14:03:43 bdbaddog"
import re
import os.path
import SCons.Action
import SCons.Defaults
import SCons.Scanner.Fortran
import SCons.Tool
import SCons.Util
def isfortran(env, source):
"""Return 1 if any of code in source has fortran files in it, 0
otherwise."""
try:
fsuffixes = env['FORTRANSUFFIXES']
except KeyError:
# If no FORTRANSUFFIXES, no fortran tool, so there is no need to look
# for fortran sources.
return 0
if not source:
# Source might be None for unusual cases like SConf.
return 0
for s in source:
if s.sources:
ext = os.path.splitext(str(s.sources[0]))[1]
if ext in fsuffixes:
return 1
return 0
def _fortranEmitter(target, source, env):
node = source[0].rfile()
if not node.exists() and not node.is_derived():
print "Could not locate " + str(node.name)
return ([], [])
mod_regex = """(?i)^\s*MODULE\s+(?!PROCEDURE)(\w+)"""
cre = re.compile(mod_regex,re.M)
# Retrieve all USE'd module names
modules = cre.findall(node.get_text_contents())
# Remove unique items from the list
modules = SCons.Util.unique(modules)
# Convert module name to a .mod filename
suffix = env.subst('$FORTRANMODSUFFIX', target=target, source=source)
moddir = env.subst('$FORTRANMODDIR', target=target, source=source)
modules = [x.lower() + suffix for x in modules]
for m in modules:
target.append(env.fs.File(m, moddir))
return (target, source)
def FortranEmitter(target, source, env):
target, source = _fortranEmitter(target, source, env)
return SCons.Defaults.StaticObjectEmitter(target, source, env)
def ShFortranEmitter(target, source, env):
target, source = _fortranEmitter(target, source, env)
return SCons.Defaults.SharedObjectEmitter(target, source, env)
def ComputeFortranSuffixes(suffixes, ppsuffixes):
"""suffixes are fortran source files, and ppsuffixes the ones to be
pre-processed. Both should be sequences, not strings."""
assert len(suffixes) > 0
s = suffixes[0]
sup = s.upper()
upper_suffixes = [_.upper() for _ in suffixes]
if SCons.Util.case_sensitive_suffixes(s, sup):
ppsuffixes.extend(upper_suffixes)
else:
suffixes.extend(upper_suffixes)
def CreateDialectActions(dialect):
"""Create dialect specific actions."""
CompAction = SCons.Action.Action('$%sCOM ' % dialect, '$%sCOMSTR' % dialect)
CompPPAction = SCons.Action.Action('$%sPPCOM ' % dialect, '$%sPPCOMSTR' % dialect)
ShCompAction = SCons.Action.Action('$SH%sCOM ' % dialect, '$SH%sCOMSTR' % dialect)
ShCompPPAction = SCons.Action.Action('$SH%sPPCOM ' % dialect, '$SH%sPPCOMSTR' % dialect)
return CompAction, CompPPAction, ShCompAction, ShCompPPAction
def DialectAddToEnv(env, dialect, suffixes, ppsuffixes, support_module = 0):
"""Add dialect specific construction variables."""
ComputeFortranSuffixes(suffixes, ppsuffixes)
fscan = SCons.Scanner.Fortran.FortranScan("%sPATH" % dialect)
for suffix in suffixes + ppsuffixes:
SCons.Tool.SourceFileScanner.add_scanner(suffix, fscan)
env.AppendUnique(FORTRANSUFFIXES = suffixes + ppsuffixes)
compaction, compppaction, shcompaction, shcompppaction = \
CreateDialectActions(dialect)
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
for suffix in suffixes:
static_obj.add_action(suffix, compaction)
shared_obj.add_action(suffix, shcompaction)
static_obj.add_emitter(suffix, FortranEmitter)
shared_obj.add_emitter(suffix, ShFortranEmitter)
for suffix in ppsuffixes:
static_obj.add_action(suffix, compppaction)
shared_obj.add_action(suffix, shcompppaction)
static_obj.add_emitter(suffix, FortranEmitter)
shared_obj.add_emitter(suffix, ShFortranEmitter)
if '%sFLAGS' % dialect not in env:
env['%sFLAGS' % dialect] = SCons.Util.CLVar('')
if 'SH%sFLAGS' % dialect not in env:
env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS' % dialect)
# If a tool does not define fortran prefix/suffix for include path, use C ones
if 'INC%sPREFIX' % dialect not in env:
env['INC%sPREFIX' % dialect] = '$INCPREFIX'
if 'INC%sSUFFIX' % dialect not in env:
env['INC%sSUFFIX' % dialect] = '$INCSUFFIX'
env['_%sINCFLAGS' % dialect] = '$( ${_concat(INC%sPREFIX, %sPATH, INC%sSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)' % (dialect, dialect, dialect)
if support_module == 1:
env['%sCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect)
env['%sPPCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect)
env['SH%sCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect)
env['SH%sPPCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect)
else:
env['%sCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect)
env['%sPPCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect)
env['SH%sCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect)
env['SH%sPPCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect)
def add_fortran_to_env(env):
"""Add Builders and construction variables for Fortran to an Environment."""
try:
FortranSuffixes = env['FORTRANFILESUFFIXES']
except KeyError:
FortranSuffixes = ['.f', '.for', '.ftn']
#print "Adding %s to fortran suffixes" % FortranSuffixes
try:
FortranPPSuffixes = env['FORTRANPPFILESUFFIXES']
except KeyError:
FortranPPSuffixes = ['.fpp', '.FPP']
DialectAddToEnv(env, "FORTRAN", FortranSuffixes,
FortranPPSuffixes, support_module = 1)
env['FORTRANMODPREFIX'] = '' # like $LIBPREFIX
env['FORTRANMODSUFFIX'] = '.mod' # like $LIBSUFFIX
env['FORTRANMODDIR'] = '' # where the compiler should place .mod files
env['FORTRANMODDIRPREFIX'] = '' # some prefix to $FORTRANMODDIR - similar to $INCPREFIX
env['FORTRANMODDIRSUFFIX'] = '' # some suffix to $FORTRANMODDIR - similar to $INCSUFFIX
env['_FORTRANMODFLAG'] = '$( ${_concat(FORTRANMODDIRPREFIX, FORTRANMODDIR, FORTRANMODDIRSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)'
def add_f77_to_env(env):
"""Add Builders and construction variables for f77 to an Environment."""
try:
F77Suffixes = env['F77FILESUFFIXES']
except KeyError:
F77Suffixes = ['.f77']
#print "Adding %s to f77 suffixes" % F77Suffixes
try:
F77PPSuffixes = env['F77PPFILESUFFIXES']
except KeyError:
F77PPSuffixes = []
DialectAddToEnv(env, "F77", F77Suffixes, F77PPSuffixes)
def add_f90_to_env(env):
"""Add Builders and construction variables for f90 to an Environment."""
try:
F90Suffixes = env['F90FILESUFFIXES']
except KeyError:
F90Suffixes = ['.f90']
#print "Adding %s to f90 suffixes" % F90Suffixes
try:
F90PPSuffixes = env['F90PPFILESUFFIXES']
except KeyError:
F90PPSuffixes = []
DialectAddToEnv(env, "F90", F90Suffixes, F90PPSuffixes,
support_module = 1)
def add_f95_to_env(env):
"""Add Builders and construction variables for f95 to an Environment."""
try:
F95Suffixes = env['F95FILESUFFIXES']
except KeyError:
F95Suffixes = ['.f95']
#print "Adding %s to f95 suffixes" % F95Suffixes
try:
F95PPSuffixes = env['F95PPFILESUFFIXES']
except KeyError:
F95PPSuffixes = []
DialectAddToEnv(env, "F95", F95Suffixes, F95PPSuffixes,
support_module = 1)
def add_f03_to_env(env):
"""Add Builders and construction variables for f03 to an Environment."""
try:
F03Suffixes = env['F03FILESUFFIXES']
except KeyError:
F03Suffixes = ['.f03']
#print "Adding %s to f95 suffixes" % F95Suffixes
try:
F03PPSuffixes = env['F03PPFILESUFFIXES']
except KeyError:
F03PPSuffixes = []
DialectAddToEnv(env, "F03", F03Suffixes, F03PPSuffixes,
support_module = 1)
def add_f08_to_env(env):
"""Add Builders and construction variables for f08 to an Environment."""
try:
F08Suffixes = env['F08FILESUFFIXES']
except KeyError:
F08Suffixes = ['.f08']
try:
F08PPSuffixes = env['F08PPFILESUFFIXES']
except KeyError:
F08PPSuffixes = []
DialectAddToEnv(env, "F08", F08Suffixes, F08PPSuffixes,
support_module = 1)
def add_all_to_env(env):
"""Add builders and construction variables for all supported fortran
dialects."""
add_fortran_to_env(env)
add_f77_to_env(env)
add_f90_to_env(env)
add_f95_to_env(env)
add_f03_to_env(env)
add_f08_to_env(env)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
Uli1/mapnik
|
scons/scons-local-2.4.0/SCons/Tool/FortranCommon.py
|
Python
|
lgpl-2.1
| 10,707 | 0.006538 |
import gc
import os
import argparse
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
from util import generate_features
def get_arguments():
parser = argparse.ArgumentParser(description='Generate features using a previously trained model')
parser.add_argument('data', type=str, help='File containing the input smiles matrices')
parser.add_argument('model', type=str, help='The model file')
parser.add_argument('features', type=str, help='Output file that will contain the generated features')
parser.add_argument('--batch_size', type=int, default=100, help='Size of the batches (default: 100)')
return parser.parse_args()
args = get_arguments()
generate_features.generate_features(args.data, args.model, args.features, args.batch_size)
gc.collect()
|
patrick-winter-knime/deep-learning-on-molecules
|
smiles-vhts/generate_features.py
|
Python
|
gpl-3.0
| 769 | 0.007802 |
# -*- coding: utf-8 -*-
"""
***************************************************************************
r_li_richness_ascii.py
----------------------
Date : February 2016
Copyright : (C) 2016 by Médéric Ribreux
Email : medspx at medspx dot fr
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Médéric Ribreux'
__date__ = 'February 2016'
__copyright__ = '(C) 2016, Médéric Ribreux'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from .r_li import checkMovingWindow, configFile, moveOutputTxtFile
def checkParameterValuesBeforeExecuting(alg, parameters, context):
return checkMovingWindow(alg, parameters, context, True)
def processCommand(alg, parameters, context):
configFile(alg, parameters, context, True)
def processOutputs(alg, parameters, context):
moveOutputTxtFile(alg, parameters, context)
|
stevenmizuno/QGIS
|
python/plugins/processing/algs/grass7/ext/r_li_richness_ascii.py
|
Python
|
gpl-2.0
| 1,514 | 0 |
import pyaudio
import wave
#CHUNK = 1024
CHUNK = 1
FORMAT = pyaudio.paInt16
#CHANNELS = 2
CHANNELS = 1
#RATE = 44100
RATE = 10025
RECORD_SECONDS = 5
WAVE_OUTPUT_FILENAME = "output.wav"
p = pyaudio.PyAudio()
stream = p.open(format=FORMAT,
channels=CHANNELS,
rate=RATE,
input=True,
frames_per_buffer=CHUNK)
print("* recording, CHUNK=%d" % CHUNK)
for i in range(0, int(RATE / CHUNK * RECORD_SECONDS)):
data = stream.read(CHUNK)
print('data=%s, len=%d' % (str(data), len(data)))
# print(str(data))
# print('%d' % ord(data))
print("* done recording")
stream.stop_stream()
stream.close()
p.terminate()
|
rzzzwilson/morse
|
morse/test.py
|
Python
|
mit
| 682 | 0.005865 |
# Copyright (C) 2014,2015 VA Linux Systems Japan K.K.
# Copyright (C) 2014,2015 YAMAMOTO Takashi <yamamoto at valinux co jp>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from neutron.tests.unit.plugins.ml2.drivers.openvswitch.agent.openflow.native \
import ovs_bridge_test_base
call = mock.call # short hand
class OVSIntegrationBridgeTest(ovs_bridge_test_base.OVSBridgeTestBase):
def setUp(self):
super(OVSIntegrationBridgeTest, self).setUp()
self.setup_bridge_mock('br-int', self.br_int_cls)
self.stamp = self.br.default_cookie
def test_setup_default_table(self):
self.br.setup_default_table()
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[],
match=ofpp.OFPMatch(),
priority=0,
table_id=23)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[
ofpp.OFPInstructionGotoTable(table_id=60),
],
match=ofpp.OFPMatch(),
priority=0,
table_id=0)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[
ofpp.OFPInstructionActions(
ofp.OFPIT_APPLY_ACTIONS, [
ofpp.OFPActionOutput(ofp.OFPP_NORMAL, 0)
]),
],
match=ofpp.OFPMatch(),
priority=3,
table_id=60)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[],
match=ofpp.OFPMatch(),
priority=0,
table_id=24)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_provision_local_vlan(self):
port = 999
lvid = 888
segmentation_id = 777
self.br.provision_local_vlan(port=port, lvid=lvid,
segmentation_id=segmentation_id)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, [
ofpp.OFPActionSetField(
vlan_vid=lvid | ofp.OFPVID_PRESENT),
]),
ofpp.OFPInstructionGotoTable(table_id=60),
],
match=ofpp.OFPMatch(
in_port=port,
vlan_vid=segmentation_id | ofp.OFPVID_PRESENT),
priority=3,
table_id=0)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_provision_local_vlan_novlan(self):
port = 999
lvid = 888
segmentation_id = None
self.br.provision_local_vlan(port=port, lvid=lvid,
segmentation_id=segmentation_id)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, [
ofpp.OFPActionPushVlan(),
ofpp.OFPActionSetField(
vlan_vid=lvid | ofp.OFPVID_PRESENT),
]),
ofpp.OFPInstructionGotoTable(table_id=60),
],
match=ofpp.OFPMatch(
in_port=port,
vlan_vid=ofp.OFPVID_NONE),
priority=3,
table_id=0)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_reclaim_local_vlan(self):
port = 999
segmentation_id = 777
self.br.reclaim_local_vlan(port=port, segmentation_id=segmentation_id)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call.uninstall_flows(
match=ofpp.OFPMatch(
in_port=port,
vlan_vid=segmentation_id | ofp.OFPVID_PRESENT)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_reclaim_local_vlan_novlan(self):
port = 999
segmentation_id = None
self.br.reclaim_local_vlan(port=port, segmentation_id=segmentation_id)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call.uninstall_flows(
match=ofpp.OFPMatch(
in_port=port,
vlan_vid=ofp.OFPVID_NONE)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_install_dvr_to_src_mac(self):
network_type = 'vxlan'
vlan_tag = 1111
gateway_mac = '08:60:6e:7f:74:e7'
dst_mac = '00:02:b3:13:fe:3d'
dst_port = 6666
self.br.install_dvr_to_src_mac(network_type=network_type,
vlan_tag=vlan_tag,
gateway_mac=gateway_mac,
dst_mac=dst_mac,
dst_port=dst_port)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, [
ofpp.OFPActionSetField(eth_src=gateway_mac),
]),
ofpp.OFPInstructionGotoTable(table_id=60),
],
match=ofpp.OFPMatch(
eth_dst=dst_mac,
vlan_vid=vlan_tag | ofp.OFPVID_PRESENT),
priority=4,
table_id=1)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, [
ofpp.OFPActionPopVlan(),
ofpp.OFPActionOutput(6666, 0),
]),
],
match=ofpp.OFPMatch(
eth_dst=dst_mac,
vlan_vid=vlan_tag | ofp.OFPVID_PRESENT),
priority=4,
table_id=60)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_delete_dvr_to_src_mac(self):
network_type = 'vxlan'
vlan_tag = 1111
dst_mac = '00:02:b3:13:fe:3d'
self.br.delete_dvr_to_src_mac(network_type=network_type,
vlan_tag=vlan_tag,
dst_mac=dst_mac)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call.uninstall_flows(
strict=True,
priority=4,
table_id=1,
match=ofpp.OFPMatch(
eth_dst=dst_mac,
vlan_vid=vlan_tag | ofp.OFPVID_PRESENT)),
call.uninstall_flows(
strict=True,
priority=4,
table_id=60,
match=ofpp.OFPMatch(
eth_dst=dst_mac,
vlan_vid=vlan_tag | ofp.OFPVID_PRESENT)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_install_dvr_to_src_mac_vlan(self):
network_type = 'vlan'
vlan_tag = 1111
gateway_mac = '08:60:6e:7f:74:e7'
dst_mac = '00:02:b3:13:fe:3d'
dst_port = 6666
self.br.install_dvr_to_src_mac(network_type=network_type,
vlan_tag=vlan_tag,
gateway_mac=gateway_mac,
dst_mac=dst_mac,
dst_port=dst_port)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, [
ofpp.OFPActionSetField(eth_src=gateway_mac),
]),
ofpp.OFPInstructionGotoTable(table_id=60),
],
match=ofpp.OFPMatch(
eth_dst=dst_mac,
vlan_vid=vlan_tag | ofp.OFPVID_PRESENT),
priority=4,
table_id=2)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, [
ofpp.OFPActionPopVlan(),
ofpp.OFPActionOutput(dst_port, 0),
]),
],
match=ofpp.OFPMatch(
eth_dst=dst_mac,
vlan_vid=vlan_tag | ofp.OFPVID_PRESENT),
priority=4,
table_id=60)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_delete_dvr_to_src_mac_vlan(self):
network_type = 'vlan'
vlan_tag = 1111
dst_mac = '00:02:b3:13:fe:3d'
self.br.delete_dvr_to_src_mac(network_type=network_type,
vlan_tag=vlan_tag,
dst_mac=dst_mac)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call.uninstall_flows(
strict=True,
priority=4,
table_id=2,
match=ofpp.OFPMatch(
eth_dst=dst_mac,
vlan_vid=vlan_tag | ofp.OFPVID_PRESENT)),
call.uninstall_flows(
strict=True,
priority=4,
table_id=60,
match=ofpp.OFPMatch(
eth_dst=dst_mac,
vlan_vid=vlan_tag | ofp.OFPVID_PRESENT)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_add_dvr_mac_vlan(self):
mac = '00:02:b3:13:fe:3d'
port = 8888
self.br.add_dvr_mac_vlan(mac=mac, port=port)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[
ofpp.OFPInstructionGotoTable(table_id=2),
],
match=ofpp.OFPMatch(
eth_src=mac,
in_port=port),
priority=4,
table_id=0))
]
self.assertEqual(expected, self.mock.mock_calls)
def test_remove_dvr_mac_vlan(self):
mac = '00:02:b3:13:fe:3d'
self.br.remove_dvr_mac_vlan(mac=mac)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call.uninstall_flows(eth_src=mac, table_id=0),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_add_dvr_mac_tun(self):
mac = '00:02:b3:13:fe:3d'
port = 8888
self.br.add_dvr_mac_tun(mac=mac, port=port)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[
ofpp.OFPInstructionGotoTable(table_id=1),
],
match=ofpp.OFPMatch(
eth_src=mac,
in_port=port),
priority=2,
table_id=0))
]
self.assertEqual(expected, self.mock.mock_calls)
def test_remove_dvr_mac_tun(self):
mac = '00:02:b3:13:fe:3d'
port = 8888
self.br.remove_dvr_mac_tun(mac=mac, port=port)
expected = [
call.uninstall_flows(eth_src=mac, in_port=port, table_id=0),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_install_icmpv6_na_spoofing_protection(self):
port = 8888
ip_addresses = ['2001:db8::1', 'fdf8:f53b:82e4::1/128']
self.br.install_icmpv6_na_spoofing_protection(port, ip_addresses)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[
ofpp.OFPInstructionGotoTable(table_id=60),
],
match=ofpp.OFPMatch(
eth_type=self.ether_types.ETH_TYPE_IPV6,
icmpv6_type=self.icmpv6.ND_NEIGHBOR_ADVERT,
ip_proto=self.in_proto.IPPROTO_ICMPV6,
ipv6_nd_target='2001:db8::1',
in_port=8888,
),
priority=2,
table_id=24)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[
ofpp.OFPInstructionGotoTable(table_id=60),
],
match=ofpp.OFPMatch(
eth_type=self.ether_types.ETH_TYPE_IPV6,
icmpv6_type=self.icmpv6.ND_NEIGHBOR_ADVERT,
ip_proto=self.in_proto.IPPROTO_ICMPV6,
ipv6_nd_target='fdf8:f53b:82e4::1',
in_port=8888,
),
priority=2,
table_id=24)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[
ofpp.OFPInstructionGotoTable(table_id=24),
],
match=ofpp.OFPMatch(
eth_type=self.ether_types.ETH_TYPE_IPV6,
icmpv6_type=self.icmpv6.ND_NEIGHBOR_ADVERT,
ip_proto=self.in_proto.IPPROTO_ICMPV6,
in_port=8888,
),
priority=10,
table_id=0)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_install_arp_spoofing_protection(self):
port = 8888
ip_addresses = ['192.0.2.1', '192.0.2.2/32']
self.br.install_arp_spoofing_protection(port, ip_addresses)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[
ofpp.OFPInstructionGotoTable(table_id=25),
],
match=ofpp.OFPMatch(
eth_type=self.ether_types.ETH_TYPE_ARP,
arp_spa='192.0.2.1',
in_port=8888,
),
priority=2,
table_id=24)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[
ofpp.OFPInstructionGotoTable(table_id=25),
],
match=ofpp.OFPMatch(
eth_type=self.ether_types.ETH_TYPE_ARP,
arp_spa='192.0.2.2',
in_port=8888
),
priority=2,
table_id=24)),
call._send_msg(ofpp.OFPFlowMod(dp,
cookie=self.stamp,
instructions=[
ofpp.OFPInstructionGotoTable(table_id=24),
],
match=ofpp.OFPMatch(
eth_type=self.ether_types.ETH_TYPE_ARP,
in_port=8888,
),
priority=10,
table_id=0)),
]
self.assertEqual(expected, self.mock.mock_calls)
def test_delete_arp_spoofing_protection(self):
port = 8888
self.br.delete_arp_spoofing_protection(port)
(dp, ofp, ofpp) = self._get_dp()
expected = [
call.uninstall_flows(table_id=0, match=ofpp.OFPMatch(
eth_type=self.ether_types.ETH_TYPE_ARP,
in_port=8888)),
call.uninstall_flows(table_id=0, match=ofpp.OFPMatch(
eth_type=self.ether_types.ETH_TYPE_IPV6,
icmpv6_type=self.icmpv6.ND_NEIGHBOR_ADVERT,
in_port=8888,
ip_proto=self.in_proto.IPPROTO_ICMPV6)),
call.uninstall_flows(table_id=24, in_port=port),
]
self.assertEqual(expected, self.mock.mock_calls)
|
eayunstack/neutron
|
neutron/tests/unit/plugins/ml2/drivers/openvswitch/agent/openflow/native/test_br_int.py
|
Python
|
apache-2.0
| 17,011 | 0.002116 |
import re
import transaction
from ..models import DBSession
SQL_TABLE = """
SELECT c.oid, n.nspname, c.relname
FROM pg_catalog.pg_class c
LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE c.relname = :table_name
AND pg_catalog.pg_table_is_visible(c.oid)
ORDER BY 2, 3
"""
SQL_TABLE_SCHEMA = """
SELECT c.oid, n.nspname, c.relname
FROM pg_catalog.pg_class c
LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE c.relname = :table_name
AND n.nspname = :schema
ORDER BY 2, 3
"""
SQL_FIELDS = """
SELECT a.attname,
pg_catalog.format_type(a.atttypid, a.atttypmod),
(SELECT substring(pg_catalog.pg_get_expr(d.adbin, d.adrelid) for 128)
FROM pg_catalog.pg_attrdef d
WHERE d.adrelid = a.attrelid
AND d.adnum = a.attnum
AND a.atthasdef),
a.attnotnull, a.attnum,
(SELECT c.collname
FROM pg_catalog.pg_collation c, pg_catalog.pg_type t
WHERE c.oid = a.attcollation
AND t.oid = a.atttypid
AND a.attcollation <> t.typcollation) AS attcollation,
NULL AS indexdef,
NULL AS attfdwoptions
FROM pg_catalog.pg_attribute a
WHERE a.attrelid = :table_id AND a.attnum > 0 AND NOT a.attisdropped
ORDER BY a.attnum"""
def get_table_seq(table_name):
t = table_name.split('.')
if t[1:]:
schema = t[0]
table_name = t[1]
sql = text(SQL_TABLE_SCHEMA)
q = engine.execute(sql, schema=schema, table_name=table_name)
else:
sql = text(SQL_TABLE)
q = engine.execute(sql, table_name=table_name)
r = q.fetchone()
table_id = r.oid
sql = text(SQL_FIELDS)
q = engine.execute(sql, table_id=table_id)
regex = re.compile("nextval\('(.*)'\:")
for r in q.fetchall():
if not r.substring:
continue
if r.substring.find('nextval') == -1:
continue
match = regex.search(r.substring)
return match.group(1)
def set_sequence(orm, seq_name):
row = DBSession.query(orm).order_by('id DESC').first()
last_id = row.id
seq_name = get_table_seq(orm.__tablename__)
sql = "SELECT setval('%s', %d)" % (seq_name, last_id)
engine = DBSession.bind
engine.execute(sql)
def set_sequences(ORMs):
for orm in ORMs:
set_sequence(orm)
def get_pkeys(table):
r = []
for c in table.constraints:
if c.__class__ is not PrimaryKeyConstraint:
continue
for col in c:
r.append(col.name)
return r
def insert_(engine, fixtures):
session_factory = sessionmaker(bind=engine)
session = session_factory()
metadata = MetaData(engine)
sequences = []
for tablename, data in fixtures:
table = Table(tablename, metadata, autoload=True)
class T(Base, BaseModel):
__table__ = table
keys = get_pkeys(table)
for d in data:
filter_ = {}
for key in keys:
val = d[key]
filter_[key] = val
q = session.query(T).filter_by(**filter_)
if q.first():
continue
u = T()
u.from_dict(d)
m = session.add(u)
seq_name = get_table_seq(tablename)
if seq_name:
sequences.append((T, seq_name))
session.commit()
set_sequences(sequences)
|
aagusti/osipkd-json-rpc
|
jsonrpc/scripts/DbTools.py
|
Python
|
lgpl-2.1
| 3,322 | 0.003612 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'ProjectBadge.awardLevel'
db.add_column(u'badges_projectbadge', 'awardLevel',
self.gf('django.db.models.fields.IntegerField')(default=1000),
keep_default=False)
# Adding field 'ProjectBadge.multipleAwards'
db.add_column(u'badges_projectbadge', 'multipleAwards',
self.gf('django.db.models.fields.BooleanField')(default=True),
keep_default=False)
# Renaming column for 'ProjectBadge.project' to match new field type.
db.rename_column(u'badges_projectbadge', 'project', 'project_id')
# Changing field 'ProjectBadge.project'
db.alter_column(u'badges_projectbadge', 'project_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['core.Project']))
# Adding index on 'ProjectBadge', fields ['project']
db.create_index(u'badges_projectbadge', ['project_id'])
def backwards(self, orm):
# Removing index on 'ProjectBadge', fields ['project']
db.delete_index(u'badges_projectbadge', ['project_id'])
# Deleting field 'ProjectBadge.awardLevel'
db.delete_column(u'badges_projectbadge', 'awardLevel')
# Deleting field 'ProjectBadge.multipleAwards'
db.delete_column(u'badges_projectbadge', 'multipleAwards')
# Renaming column for 'ProjectBadge.project' to match new field type.
db.rename_column(u'badges_projectbadge', 'project_id', 'project')
# Changing field 'ProjectBadge.project'
db.alter_column(u'badges_projectbadge', 'project', self.gf('django.db.models.fields.IntegerField')())
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'badges.badge': {
'Meta': {'object_name': 'Badge'},
'icon': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'badges.badgesettings': {
'Meta': {'object_name': 'BadgeSettings'},
'awardLevel': ('django.db.models.fields.IntegerField', [], {'default': '1000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'multipleAwards': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
u'badges.projectbadge': {
'Meta': {'object_name': 'ProjectBadge'},
'awardLevel': ('django.db.models.fields.IntegerField', [], {'default': '1000'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['badges.Badge']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'description': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'multipleAwards': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Project']"}),
'user': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'badges'", 'symmetrical': 'False', 'through': u"orm['badges.ProjectBadgeToUser']", 'to': u"orm['auth.User']"})
},
u'badges.projectbadgetouser': {
'Meta': {'object_name': 'ProjectBadgeToUser'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'projectbadge': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['badges.ProjectBadge']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'core.project': {
'Meta': {'ordering': "('-created_at',)", 'object_name': 'Project'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'private': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['badges']
|
ngageoint/gamification-server
|
gamification/badges/migrations/0002_auto__add_field_projectbadge_awardLevel__add_field_projectbadge_multip.py
|
Python
|
mit
| 8,179 | 0.007214 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-11-19 18:40
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('mqtt_logger', '0002_mqttsubscription_active'),
]
operations = [
migrations.AlterModelOptions(
name='mqttmessage',
options={'verbose_name': 'MQTT message', 'verbose_name_plural': 'MQTT messages'},
),
migrations.AlterModelOptions(
name='mqttsubscription',
options={'verbose_name': 'MQTT subscription', 'verbose_name_plural': 'MQTT subscriptions'},
),
]
|
ast0815/mqtt-hub
|
mqtt_logger/migrations/0003_auto_20161119_1840.py
|
Python
|
mit
| 664 | 0.003012 |
# Hack, hide DataLossWarnings
# Based on html5lib code namespaceHTMLElements=False should do it, but nope ...
# Also it doesn't seem to be available in older version from html5lib, removing it
import warnings
from typing import IO, Union
from bs4 import BeautifulSoup
from html5lib.constants import DataLossWarning
warnings.simplefilter('ignore', DataLossWarning)
def get_soup(obj: Union[str, IO, bytes], parser: str = 'html5lib') -> BeautifulSoup:
return BeautifulSoup(obj, parser)
|
Flexget/Flexget
|
flexget/utils/soup.py
|
Python
|
mit
| 491 | 0.004073 |
import requests
LRS = "http://cygnus.ic.uva.nl:8000/XAPI/statements"
u = raw_input("LRS username: ")
p = raw_input("LRS password: ")
r = requests.get(LRS,headers={"X-Experience-API-Version":"1.0"},auth=(u,p));
if r.status_code == 200:
print "Success"
else:
print "Server returns",r.status_code
|
ictofnwi/coach
|
test_lrs.py
|
Python
|
agpl-3.0
| 305 | 0.019672 |
import pytest
from plenum.test.helper import perf_monitor_disabled
from plenum.test.node_catchup.helper import ensure_all_nodes_have_same_data
from plenum.test.view_change_with_delays.helper import \
do_view_change_with_propagate_primary_on_one_delayed_node
# This is needed only with current view change implementation to give enough time
# to show what is exactly broken
TestRunningTimeLimitSec = 300
@pytest.fixture(scope="module")
def tconf(tconf):
"""
Patch config so that monitor won't start view change unexpectedly
"""
with perf_monitor_disabled(tconf):
yield tconf
def test_view_change_with_propagate_primary_on_one_delayed_node(
txnPoolNodeSet, looper, sdk_pool_handle, sdk_wallet_client, tconf):
"""
Perform view change on all the nodes except for one slow node and then
propagate primary on it so that delayed Commits are processed by the slow
node in the old view and by the other nodes in the new view. After that
verify that all the nodes have the same ledgers and state.
"""
do_view_change_with_propagate_primary_on_one_delayed_node(
txnPoolNodeSet[-1], txnPoolNodeSet, looper, sdk_pool_handle, sdk_wallet_client)
ensure_all_nodes_have_same_data(looper, txnPoolNodeSet)
|
evernym/zeno
|
plenum/test/view_change_with_delays/test_view_change_with_propagate_primary_on_one_delayed_node.py
|
Python
|
apache-2.0
| 1,269 | 0.001576 |
"""
Defines the Sumatra version control interface for Git.
Classes
-------
GitWorkingCopy
GitRepository
:copyright: Copyright 2006-2015 by the Sumatra team, see doc/authors.txt
:license: BSD 2-clause, see LICENSE for details.
"""
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
from future import standard_library
standard_library.install_aliases()
import logging
import git
import os
import sys
import shutil
from distutils.version import LooseVersion
from configparser import NoSectionError, NoOptionError
try:
from git.errors import InvalidGitRepositoryError, NoSuchPathError
except:
from git.exc import InvalidGitRepositoryError, NoSuchPathError
from .base import Repository, WorkingCopy, VersionControlError
from ..core import component
logger = logging.getLogger("Sumatra")
def check_version():
if not hasattr(git, "Repo"):
raise VersionControlError(
"GitPython not installed. There is a 'git' package, but it is not "
"GitPython (https://pypi.python.org/pypi/GitPython/)")
minimum_version = '0.3.5'
if LooseVersion(git.__version__) < LooseVersion(minimum_version):
raise VersionControlError(
"Your Git Python binding is too old. You require at least "
"version {0}. You can install the latest version e.g. via "
"'pip install -U gitpython'".format(minimum_version))
def findrepo(path):
check_version()
try:
repo = git.Repo(path, search_parent_directories=True)
except InvalidGitRepositoryError:
return
else:
return os.path.dirname(repo.git_dir)
@component
class GitWorkingCopy(WorkingCopy):
"""
An object which allows various operations on a Git working copy.
"""
name = "git"
def __init__(self, path=None):
check_version()
WorkingCopy.__init__(self, path)
self.path = findrepo(self.path)
self.repository = GitRepository(self.path)
@property
def exists(self):
return bool(self.path and findrepo(self.path))
def current_version(self):
head = self.repository._repository.head
try:
return head.commit.hexsha
except AttributeError:
return head.commit.sha
def use_version(self, version):
logger.debug("Using git version: %s" % version)
if version is not 'master':
assert not self.has_changed()
g = git.Git(self.path)
g.checkout(version)
def use_latest_version(self):
self.use_version('master') # note that we are assuming all code is in the 'master' branch
def status(self):
raise NotImplementedError()
def has_changed(self):
return self.repository._repository.is_dirty()
def diff(self):
"""Difference between working copy and repository."""
g = git.Git(self.path)
return g.diff('HEAD', color='never')
def content(self, digest, file=None):
"""Get the file content from repository."""
repo = git.Repo(self.path)
curtree = repo.commit(digest).tree
if file is None:
return curtree.blobs[0].data_stream.read() # Get the latest added file content.
dirname, filename = os.path.split(file)
if dirname != '':
for dname in dirname.split(os.path.sep):
for subtree in curtree.trees:
if subtree.name == dname:
curtree = subtree
break
for blob in curtree.blobs:
if blob.name == filename:
expected_encoding = sys.getfilesystemencoding()
file_content = blob.data_stream.read().decode(expected_encoding)
return file_content
return 'File content not found.'
def contains(self, path):
"""Does the repository contain the file with the given path?"""
return path in self.repository._repository.git.ls_files().split()
def get_username(self):
config = self.repository._repository.config_reader()
try:
username, email = (config.get('user', 'name'), config.get('user', 'email'))
except (NoSectionError, NoOptionError):
return ""
return "%s <%s>" % (username, email)
def move_contents(src, dst):
for file in os.listdir(src):
src_path = os.path.join(src, file)
if os.path.isdir(src_path):
shutil.copytree(src_path, os.path.join(dst, file))
else:
shutil.copy2(src_path, dst)
shutil.rmtree(src)
@component
class GitRepository(Repository):
name = "git"
use_version_cmd = "git checkout"
apply_patch_cmd = "git apply"
def __init__(self, url, upstream=None):
check_version()
Repository.__init__(self, url, upstream)
self.__repository = None
self.upstream = self.upstream or self._get_upstream()
@property
def exists(self):
try:
self._repository
except VersionControlError:
pass
return bool(self.__repository)
@property
def _repository(self):
if self.__repository is None:
try:
self.__repository = git.Repo(self.url)
except (InvalidGitRepositoryError, NoSuchPathError) as err:
raise VersionControlError("Cannot access Git repository at %s: %s" % (self.url, err))
return self.__repository
def checkout(self, path="."):
"""Clone a repository."""
path = os.path.abspath(path)
g = git.Git(path)
if self.url == path:
# already have a repository in the working directory
pass
else:
tmpdir = os.path.join(path, "tmp_git")
g.clone(self.url, tmpdir)
move_contents(tmpdir, path)
self.__repository = git.Repo(path)
def get_working_copy(self, path=None):
return GitWorkingCopy(path)
def _get_upstream(self):
if self.exists:
config = self._repository.config_reader()
if config.has_option('remote "origin"', 'url'):
return config.get('remote "origin"', 'url')
|
maxalbert/sumatra
|
sumatra/versioncontrol/_git.py
|
Python
|
bsd-2-clause
| 6,219 | 0.002573 |
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Claw(CMakePackage):
"""CLAW Compiler targets performance portability problem in climate and
weather application written in Fortran. From a single source code, it
generates architecture specific code decorated with OpenMP or OpenACC"""
homepage = 'https://claw-project.github.io/'
git = 'https://github.com/claw-project/claw-compiler.git'
maintainers = ['clementval']
version('2.0.2', commit='8c012d58484d8caf79a4fe45597dc74b4367421c', submodules=True)
version('2.0.1', commit='f5acc929df74ce66a328aa4eda9cc9664f699b91', submodules=True)
version('2.0', commit='53e705b8bfce40a5c5636e8194a7622e337cf4f5', submodules=True)
version('1.2.3', commit='eaf5e5fb39150090e51bec1763170ce5c5355198', submodules=True)
version('1.2.2', commit='fc27a267eef9f412dd6353dc0b358a05b3fb3e16', submodules=True)
version('1.2.1', commit='939989ab52edb5c292476e729608725654d0a59a', submodules=True)
version('1.2.0', commit='fc9c50fe02be97b910ff9c7015064f89be88a3a2', submodules=True)
version('1.1.0', commit='16b165a443b11b025a77cad830b1280b8c9bcf01', submodules=True)
depends_on('cmake@3.0:', type='build')
depends_on('java@8:', when="@2.0:")
depends_on('java@7:', when="@1.1.0:1.2.3")
depends_on('ant@1.9:')
depends_on('libxml2')
depends_on('bison')
def cmake_args(self):
args = []
spec = self.spec
args.append('-DOMNI_CONF_OPTION=--with-libxml2={0}'.
format(spec['libxml2'].prefix))
args.append('-DCMAKE_Fortran_COMPILER={0}'.
format(self.compiler.fc))
return args
|
rspavel/spack
|
var/spack/repos/builtin/packages/claw/package.py
|
Python
|
lgpl-2.1
| 1,854 | 0.004854 |
from __future__ import division
from sys import stdin, stdout
from collections import deque
def solve(n, edges, s):
def build_graph(n, edges):
graph = [[] for _ in range(n)]
for (a, b) in edges:
a, b = a - 1, b - 1
graph[a].append(b)
graph[b].append(a)
return graph
graph = build_graph(n, edges)
dis , que = [-1] * n, deque()
dis[s] = 0
que.append(s)
while que:
node = que.popleft()
for adj in graph[node]:
if dis[adj] == -1:
dis[adj] = dis[node] + 6
que.append(adj)
del dis[s]
return dis
if __name__ == '__main__':
t = int(stdin.readline())
for _ in range(t):
edges = []
n, m = map(int, stdin.readline().strip().split())
for _ in range(m):
a, b = map(int, stdin.readline().strip().split())
edges.append((a, b))
s = int(stdin.readline())
print ' '.join(map(str, solve(n, edges, s - 1)))
|
m00nlight/hackerrank
|
algorithm/Graph-Theory/Breadth-First-Search-Shortest-Reach/main.py
|
Python
|
gpl-2.0
| 1,017 | 0.00295 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.10 on 2018-04-16 16:39
from __future__ import unicode_literals
import base.models.learning_unit_year
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('base', '0259_auto_20180416_1404'),
]
operations = [
migrations.RemoveField(
model_name='learningunit',
name='acronym',
),
migrations.RemoveField(
model_name='learningunit',
name='title',
),
migrations.AlterField(
model_name='learningcontaineryear',
name='common_title',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='common_official_title'),
),
migrations.AlterField(
model_name='learningcontaineryear',
name='common_title_english',
field=models.CharField(blank=True, max_length=250, null=True, verbose_name='common_official_english_title'),
),
migrations.AlterField(
model_name='learningcontaineryear',
name='container_type',
field=models.CharField(choices=[('COURSE', 'COURSE'), ('INTERNSHIP', 'INTERNSHIP'), ('DISSERTATION', 'DISSERTATION'), ('OTHER_COLLECTIVE', 'OTHER_COLLECTIVE'), ('OTHER_INDIVIDUAL', 'OTHER_INDIVIDUAL'), ('MASTER_THESIS', 'MASTER_THESIS'), ('EXTERNAL', 'EXTERNAL')], max_length=20, verbose_name='type'),
),
migrations.AlterField(
model_name='learningcontaineryear',
name='language',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='reference.Language', verbose_name='language'),
),
migrations.AlterField(
model_name='learningunit',
name='faculty_remark',
field=models.TextField(blank=True, null=True, verbose_name='faculty_remark'),
),
migrations.AlterField(
model_name='learningunit',
name='other_remark',
field=models.TextField(blank=True, null=True, verbose_name='other_remark'),
),
migrations.AlterField(
model_name='learningunit',
name='periodicity',
field=models.CharField(choices=[('ANNUAL', 'ANNUAL'), ('BIENNIAL_EVEN', 'BIENNIAL_EVEN'), ('BIENNIAL_ODD', 'BIENNIAL_ODD')], default='ANNUAL', max_length=20, verbose_name='periodicity'),
),
migrations.AlterField(
model_name='learningunityear',
name='academic_year',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='base.AcademicYear', validators=[base.models.learning_unit_year.academic_year_validator], verbose_name='academic_year'),
),
migrations.AlterField(
model_name='learningunityear',
name='acronym',
field=models.CharField(db_index=True, max_length=15, validators=[django.core.validators.RegexValidator('^[BLMW][A-Z]{2,4}\\d{4}[A-Z0-9]{0,1}$')], verbose_name='code'),
),
migrations.AlterField(
model_name='learningunityear',
name='internship_subtype',
field=models.CharField(blank=True, choices=[('TEACHING_INTERNSHIP', 'TEACHING_INTERNSHIP'), ('CLINICAL_INTERNSHIP', 'CLINICAL_INTERNSHIP'), ('PROFESSIONAL_INTERNSHIP', 'PROFESSIONAL_INTERNSHIP'), ('RESEARCH_INTERNSHIP', 'RESEARCH_INTERNSHIP')], max_length=250, null=True, verbose_name='internship_subtype'),
),
migrations.AlterField(
model_name='learningunityear',
name='quadrimester',
field=models.CharField(blank=True, choices=[('Q1', 'Q1'), ('Q2', 'Q2'), ('Q1&2', 'Q1&2'), ('Q1|2', 'Q1|2'), ('Q3', 'Q3')], max_length=4, null=True, verbose_name='quadrimester'),
),
migrations.AlterField(
model_name='learningunityear',
name='specific_title',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='official_title_proper_to_UE'),
),
migrations.AlterField(
model_name='learningunityear',
name='specific_title_english',
field=models.CharField(blank=True, max_length=250, null=True, verbose_name='official_english_title_proper_to_UE'),
),
migrations.AlterField(
model_name='learningunityear',
name='status',
field=models.BooleanField(default=False, verbose_name='active_title'),
),
migrations.AlterField(
model_name='learningunityear',
name='subtype',
field=models.CharField(choices=[('FULL', 'FULL'), ('PARTIM', 'PARTIM')], default='FULL', max_length=50),
),
]
|
uclouvain/osis_louvain
|
base/migrations/0260_auto_20180416_1839.py
|
Python
|
agpl-3.0
| 4,812 | 0.003117 |
from api_linked_rpc import * #@UnusedWildImport
|
linkedin/indextank-service
|
nebu/rpc.py
|
Python
|
apache-2.0
| 48 | 0.041667 |
puts(green("installing MidoNet cli on %s" % env.host_string))
args = {}
Puppet.apply('midonet::midonet_cli', args, metadata)
run("""
cat >/root/.midonetrc <<EOF
[cli]
api_url = http://%s:8080/midonet-api
username = admin
password = admin
project_id = admin
tenant = admin
EOF
""" % metadata.servers[metadata.roles['midonet_api'][0]]['ip'])
|
midonet/Chimata-No-Kami
|
stages/midonet_cli/fabfile.py
|
Python
|
apache-2.0
| 363 | 0.00551 |
import unittest
from bolt.core.plugin import Plugin
from bolt import interval
from bolt import Bot
import yaml
class TestIntervalPlugin(Plugin):
@interval(60)
def intervaltest(self):
pass
class TestInterval(unittest.TestCase):
def setUp(self):
self.config_file = "/tmp/bolt-test-config.yaml"
fake_config = {
"api_key": "1234",
"log_dir": "/tmp/"
}
with open(self.config_file, "w") as tempconfig:
tempconfig.write(yaml.dump(fake_config))
def test_interval_decos(self):
bot = Bot(self.config_file)
plugin = TestIntervalPlugin(bot)
plugin.load()
self.assertTrue(len(plugin.intervals) > 0)
def test_interval_will_run(self):
bot = Bot(self.config_file)
plugin = TestIntervalPlugin(bot)
plugin.load()
self.assertTrue(plugin.intervals[0].ready())
|
Arcbot-Org/Arcbot
|
tests/core/test_interval.py
|
Python
|
gpl-3.0
| 910 | 0 |
"""
Django settings for pennapps project.
Generated by 'django-admin startproject' using Django 1.8.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'rtl)lx9h2^u_4x49bx*+__o$^ocah8b915$jzh03pzr-8-5mmb'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'AI_chat',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'pennapps.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'pennapps.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
|
raoariel/PennApps-F15
|
pennapps/pennapps/settings.py
|
Python
|
mit
| 2,659 | 0 |
# coding=utf-8
import unittest
from text import grep
from text import string_utils
import text_test
class GrepTest(unittest.TestCase):
def test_grep(self):
log_list = text_test.read_log()
linux_syslog_head = '(\S+\s+\d+)\s+(\d+:\d+:\d+)\s+(\S+)\s+'
group_data = grep.grep(log_list, linux_syslog_head + '(login|ssh|su|sshd|passwd)\[(\d+)\].*')
self.assertEqual(len(group_data), 11)
group_data = grep.grep(log_list, '[1,4]', False, 'n')
self.assertEqual(len(group_data), 4)
group_data = grep.grep(log_list, '[1,4]', False, 'n')
self.assertEqual(len(group_data), 4)
group_data = grep.grep(log_list, 'pam', False, 's')
self.assertEqual(len(group_data), 6)
group_data = grep.grep(log_list, 'pam', True, 's')
self.assertEqual(len(group_data), 6)
self.assertTrue(string_utils.startswith(group_data[0], '1'))
self.assertTrue(string_utils.startswith(group_data[1], '2'))
self.assertTrue(string_utils.startswith(group_data[4], '12'))
self.assertTrue(string_utils.startswith(group_data[5], '19'))
group_data = grep.grep(log_list, None, True, 'e')
self.assertEqual(len(group_data), 19)
group_data = grep.grep(log_list, grep_action, True, 'a')
self.assertEqual(len(group_data), 3)
group_data = grep.grep(None, None)
self.assertEqual(group_data, None)
def grep_action(line_text):
if 'cron' in line_text:
return True
return False
if __name__ == '__main__':
# import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
interhui/py-text
|
text_test/grep_test.py
|
Python
|
apache-2.0
| 1,719 | 0.016289 |
# Copyright (C) 2013 Andrew Okin
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
# to permit persons to whom the Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from . import app, db
from flask import render_template, request, redirect, abort, url_for
from urlmodel import URLMapping, get_mapping, url_id_taken, generate_unused_url_id
# List of URL types. These act as 'modules' for Sporkk features.
url_types = []
@app.route('/')
def index():
if len(url_types) > 0:
return redirect(url_for('submit_form', type_spec = url_types[0].get_specifiers()[0]))
else:
abort(500)
@app.route('/<type_spec>/submit', methods = ['GET', 'POST'])
def submit_form(type_spec):
"""The page for submitting things to the URL shortener."""
if request.method == 'GET':
submit_forms = []
url_type = None
# Get a list of submit forms and find the apropriate URL type in the same loop.
for utype in url_types:
submit_forms.append(utype.get_submit_form_info().
to_template_dict(url_for("submit_form", type_spec = utype.get_specifiers()[0])))
# If the given type spec matches one of the URL type's specifiers, show its submit page.
# We'll do this after we're done getting the submit form list.
if type_spec in utype.get_specifiers():
url_type = utype
submit_forms[len(submit_forms)-1]["active"] = True
# Now, we show the submit form page (or error if the URL type wasn't found)
if url_type is not None:
return url_type.handle_submit_form(submit_forms)
else:
return redirect("/")
elif request.method == 'POST':
use_json = False
if "json" in request.args:
use_json = True
for utype in url_types:
if type_spec in utype.get_specifiers():
return utype.handle_submit(request, use_json)
# For any URL types
@app.route('/<url_id>')
def url_id_view(url_id):
"""View that goes to whatever the specified URL ID maps to."""
mapping = get_mapping(url_id)
if mapping is None:
return redirect("/")
mapping_type = type(mapping)
# Find the URL type whose model type matches and let it handle rendering the view.
for utype in url_types:
if mapping_type is utype.get_model_type():
return utype.handle_view(url_id, mapping)
# If nothing is found, act like the URL ID doesn't even exist.
return redirect("/")
@app.route('/<type_spec>/<url_id>')
def url_type_spec_view(type_spec, url_id):
"""View that goes to the URL of the given type that the given ID maps to"""
for utype in url_types:
# If the given type spec matches one of the URL type's specifiers.
if type_spec in utype.get_specifiers():
mapping = get_mapping(url_id)
# Make sure the model type matches the URL type's model type as well.
if type(mapping) is utype.get_model_type():
return utype.handle_view(url_id, mapping)
# It's not valid, so go home.
return redirect("/")
####################
#### INITIALIZE ####
####################
# Load URL type modules.
import shortenedurl
import pastebinurl
# FIXME: This is a pretty stupid way of loading things.
typemodules = [ pastebinurl, shortenedurl ]
for typemod in typemodules:
try:
url_types += typemod.get_url_types_provided()
except AttributeError:
# If get_url_types_provided doesn't exist, this isn't a proper type module we can load URL types from.
continue
# Initialize the DB table.
db.create_all()
|
Forkk/Sporkk-Pastebin
|
sporkk/views.py
|
Python
|
mit
| 4,231 | 0.022453 |
import pprint
import sys
from xml.dom import xmlbuilder, expatbuilder, Node
from xml.dom.NodeFilter import NodeFilter
class Filter(xmlbuilder.DOMBuilderFilter):
whatToShow = NodeFilter.SHOW_ELEMENT
def startContainer(self, node):
assert node.nodeType == Node.ELEMENT_NODE
if node.tagName == "skipthis":
return self.FILTER_SKIP
elif node.tagName == "rejectbefore":
return self.FILTER_REJECT
elif node.tagName == "stopbefore":
return self.FILTER_INTERRUPT
else:
return self.FILTER_ACCEPT
def acceptNode(self, node):
assert node.nodeType == Node.ELEMENT_NODE
if node.tagName == "skipafter":
return self.FILTER_SKIP
elif node.tagName == "rejectafter":
return self.FILTER_REJECT
elif node.tagName == "stopafter":
return self.FILTER_INTERRUPT
else:
return self.FILTER_ACCEPT
class RecordingFilter:
# Inheriting from xml.dom.xmlbuilder.DOMBuilderFilter is not
# required, so we won't inherit from it this time to make sure it
# isn't a problem. We have to implement the entire interface
# directly.
whatToShow = NodeFilter.SHOW_ALL
def __init__(self):
self.events = []
def startContainer(self, node):
self.events.append(("start", node.nodeType, str(node.nodeName)))
return xmlbuilder.DOMBuilderFilter.FILTER_ACCEPT
def acceptNode(self, node):
self.events.append(("accept", node.nodeType, str(node.nodeName)))
return xmlbuilder.DOMBuilderFilter.FILTER_ACCEPT
simple_options = xmlbuilder.Options()
simple_options.filter = Filter()
simple_options.namespaces = 0
record_options = xmlbuilder.Options()
record_options.namespaces = 0
def checkResult(src):
print
dom = expatbuilder.makeBuilder(simple_options).parseString(src)
print dom.toxml()
dom.unlink()
def checkFilterEvents(src, record, what=NodeFilter.SHOW_ALL):
record_options.filter = RecordingFilter()
record_options.filter.whatToShow = what
dom = expatbuilder.makeBuilder(record_options).parseString(src)
if record != record_options.filter.events:
print
print "Received filter events:"
pprint.pprint(record_options.filter.events)
print
print "Expected filter events:"
pprint.pprint(record)
dom.unlink()
# a simple case of skipping an element
checkResult("<doc><e><skipthis>text<e/>more</skipthis>abc</e>xyz</doc>")
# skip an element nested indirectly within another skipped element
checkResult('''\
<doc>Text.
<skipthis>Nested text.
<skipthis>Nested text in skipthis element.</skipthis>
More nested text.
</skipthis>Outer text.</doc>
''')
# skip an element nested indirectly within another skipped element
checkResult('''\
<doc>Text.
<skipthis>Nested text.
<nested-element>
<skipthis>Nested text in skipthis element.</skipthis>
More nested text.
</nested-element>
More text.
</skipthis>Outer text.</doc>
''')
checkResult("<doc><rejectbefore/></doc>")
checkResult("<doc><rejectafter/></doc>")
checkResult('''\
<doc><rejectbefore>
Text.
<?my processing instruction?>
<more stuff="foo"/>
<!-- a comment -->
</rejectbefore></doc>
''')
checkResult('''\
<doc><rejectafter>
Text.
<?my processing instruction?>
<more stuff="foo"/>
<!-- a comment -->
</rejectafter></doc>
''')
# Make sure the document element is not passed to the filter:
checkResult("<rejectbefore/>")
checkResult("<rejectafter/>")
checkResult("<stopbefore/>")
checkResult("<doc>text<stopbefore> and </stopbefore>more</doc>")
checkResult("<doc>text<stopafter> and </stopafter>more</doc>")
checkResult("<doc><a/><skipafter>text</skipafter><a/></doc>")
checkFilterEvents("<doc/>", [])
checkFilterEvents("<doc attr='value'/>", [])
checkFilterEvents("<doc><e/></doc>", [
("start", Node.ELEMENT_NODE, "e"),
("accept", Node.ELEMENT_NODE, "e"),
])
src = """\
<!DOCTYPE doc [
<!ENTITY e 'foo'>
<!NOTATION n SYSTEM 'http://xml.python.org/notation/n'>
]>
<!-- comment -->
<?sample pi?>
<doc><e attr='value'><?pi data?><!--comment--></e></doc>
"""
checkFilterEvents(src, [
("accept", Node.DOCUMENT_TYPE_NODE, "doc"),
("accept", Node.ENTITY_NODE, "e"),
("accept", Node.NOTATION_NODE, "n"),
("accept", Node.COMMENT_NODE, "#comment"),
("accept", Node.PROCESSING_INSTRUCTION_NODE, "sample"),
("start", Node.ELEMENT_NODE, "e"),
("accept", Node.PROCESSING_INSTRUCTION_NODE, "pi"),
("accept", Node.COMMENT_NODE, "#comment"),
("accept", Node.ELEMENT_NODE, "e"),
])
# Show everything except a couple of things to the filter, to check
# that whatToShow is implemented. This isn't sufficient to be a
# black-box test, but will get us started.
checkFilterEvents(src, [
("accept", Node.DOCUMENT_TYPE_NODE, "doc"),
("accept", Node.ENTITY_NODE, "e"),
("accept", Node.NOTATION_NODE, "n"),
("accept", Node.PROCESSING_INSTRUCTION_NODE, "sample"),
("start", Node.ELEMENT_NODE, "e"),
("accept", Node.PROCESSING_INSTRUCTION_NODE, "pi"),
("accept", Node.ELEMENT_NODE, "e"),
], what=NodeFilter.SHOW_ALL & ~NodeFilter.SHOW_COMMENT)
checkFilterEvents(src, [
("accept", Node.DOCUMENT_TYPE_NODE, "doc"),
("accept", Node.ENTITY_NODE, "e"),
("accept", Node.NOTATION_NODE, "n"),
("accept", Node.COMMENT_NODE, "#comment"),
("start", Node.ELEMENT_NODE, "e"),
("accept", Node.COMMENT_NODE, "#comment"),
("accept", Node.ELEMENT_NODE, "e"),
], what=NodeFilter.SHOW_ALL & ~NodeFilter.SHOW_PROCESSING_INSTRUCTION)
|
Pikecillo/genna
|
external/PyXML-0.8.4/test/test_filter.py
|
Python
|
gpl-2.0
| 5,625 | 0.000533 |
#
# Copyright 2009-2010 Goran Sterjov
# This file is part of Myelin.
#
# Myelin is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Myelin is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Myelin. If not, see <http://www.gnu.org/licenses/>.
#
import ctypes
from type import Type
# get library
import myelin.library
_lib = myelin.library.get_library()
_types = []
def add_type (klass):
_types.append (klass)
def get_type (type):
for klass in _types:
if klass._class.get_type().get_atom() == type.get_atom():
return klass
return None
def get_types ():
return _types
class Value (object):
def __init__ (self, ptr = None):
if ptr is None:
ptr = _lib.myelin_value_new ()
self._ptr = ptr
def __del__ (self):
_lib.myelin_value_unref (self)
def __repr__ (self):
return ("<%s.%s object at %#x with an instance of type %s at %#x>" %
(self.__module__,
self.__class__.__name__,
id(self),
self.get_type().get_name(),
self.as_pointer()))
@classmethod
def from_pointer (cls, ptr):
if ptr is None:
raise ValueError ("Value pointer cannot be 'None'")
instance = cls (ptr)
_lib.myelin_value_ref (instance)
return instance
def from_param (self):
return self._ptr
def get (self):
# empty value
if self.is_empty(): return None
# get value type
type = self.get_type()
atom = type.get_atom()
# convert value types
if not type.is_pointer() and not type.is_reference():
# fundamental types
if atom == Type.type_bool (): return self.get_bool ()
elif atom == Type.type_char (): return self.get_char ()
elif atom == Type.type_uchar (): return self.get_uchar ()
elif atom == Type.type_int (): return self.get_int ()
elif atom == Type.type_uint (): return self.get_uint ()
elif atom == Type.type_long (): return self.get_long ()
elif atom == Type.type_ulong (): return self.get_ulong ()
elif atom == Type.type_int64 (): return self.get_int64 ()
elif atom == Type.type_uint64 (): return self.get_uint64 ()
elif atom == Type.type_float (): return self.get_float ()
elif atom == Type.type_double (): return self.get_double ()
# elif atom == Type.type_string (): return self.get_string ()
# convert value to meta class instance
class_type = get_type (type)
if class_type is not None:
return class_type (instance = self)
# dont know how to convert value so just return it as is
else:
return self
def set (self, value, atom = None):
from myelin.module import MetaObject
# convert python types
if type(value) is bool: self.set_bool (value)
# set the right integer type
elif type(value) is int or type(value) is long:
if atom is not None:
if atom == Type.type_char(): self.set_char (value)
elif atom == Type.type_uchar(): self.set_uchar (value)
elif atom == Type.type_int(): self.set_int (value)
elif atom == Type.type_uint(): self.set_uint (value)
elif atom == Type.type_long(): self.set_long (value)
elif atom == Type.type_ulong(): self.set_ulong (value)
# for long only
elif type(value) is long:
if atom == Type.type_int64(): self.set_int64 (value)
elif atom == Type.type_uint64(): self.set_uint64 (value)
else:
if type(value) is int: self.set_long (value)
else: self.set_int64 (value)
elif type(value) is float:
if atom is not None:
if atom == Type.type_float(): self.set_float (value)
elif atom == Type.type_double(): self.set_double (value)
else: self.set_double (value)
elif type(value) is str: self.set_string (value)
# set meta object instance
elif isinstance(value, MetaObject):
val = value._object.get_instance()
self.set_pointer (val.get_type(), val.as_pointer())
else:
raise TypeError ("Cannot determine an equivalent type for the " \
"value type '%s'. Conversion failed." %
type(value))
def get_type (self):
type = _lib.myelin_value_get_type (self)
return Type.from_pointer (type)
def is_empty (self):
return _lib.myelin_value_is_empty (self)
def clear (self):
_lib.myelin_value_clear (self)
def get_bool (self):
return _lib.myelin_value_get_bool (self)
def set_bool (self, value):
_lib.myelin_value_set_bool (self, value)
def get_char (self):
return _lib.myelin_value_get_char (self)
def set_char (self, value):
_lib.myelin_value_set_char (self, value)
def get_uchar (self):
return _lib.myelin_value_get_uchar (self)
def set_uchar (self, value):
_lib.myelin_value_set_uchar (self, value)
def get_int (self):
return _lib.myelin_value_get_int (self)
def set_int (self, value):
_lib.myelin_value_set_int (self, value)
def get_uint (self):
return _lib.myelin_value_get_uint (self)
def set_uint (self, value):
_lib.myelin_value_set_uint (self, value)
def get_long (self):
return _lib.myelin_value_get_long (self)
def set_long (self, value):
_lib.myelin_value_set_long (self, value)
def get_ulong (self):
return _lib.myelin_value_get_ulong (self)
def set_ulong (self, value):
_lib.myelin_value_set_ulong (self, value)
def get_int64 (self):
return _lib.myelin_value_get_int64 (self)
def set_int64 (self, value):
_lib.myelin_value_set_int64 (self, value)
def get_uint64 (self):
return _lib.myelin_value_get_uint64 (self)
def set_uint64 (self, value):
_lib.myelin_value_set_uint64 (self, value)
def get_float (self):
return _lib.myelin_value_get_float (self)
def set_float (self, value):
_lib.myelin_value_set_float (self, value)
def get_double (self):
return _lib.myelin_value_get_double (self)
def set_double (self, value):
_lib.myelin_value_set_double (self, value)
def get_string (self):
return _lib.myelin_value_get_string (self)
def set_string (self, value):
_lib.myelin_value_set_string (self, value)
def as_pointer (self):
return _lib.myelin_value_as_pointer (self)
def set_pointer (self, type, pointer):
_lib.myelin_value_set_pointer (self, type, pointer)
###############################################
# Prototypes #
###############################################
_lib.myelin_value_new.argtypes = None
_lib.myelin_value_new.restype = ctypes.c_void_p
_lib.myelin_value_ref.argtypes = [Value]
_lib.myelin_value_ref.restype = ctypes.c_void_p
_lib.myelin_value_unref.argtypes = [Value]
_lib.myelin_value_unref.restype = None
_lib.myelin_value_get_type.argtypes = [Value]
_lib.myelin_value_get_type.restype = ctypes.c_void_p
_lib.myelin_value_is_empty.argtypes = [Value]
_lib.myelin_value_is_empty.restype = ctypes.c_bool
_lib.myelin_value_clear.argtypes = [Value]
_lib.myelin_value_clear.restype = None
# boolean
_lib.myelin_value_get_bool.argtypes = [Value]
_lib.myelin_value_get_bool.restype = ctypes.c_bool
_lib.myelin_value_set_bool.argtypes = [Value, ctypes.c_bool]
_lib.myelin_value_set_bool.restype = None
# char
_lib.myelin_value_get_char.argtypes = [Value]
_lib.myelin_value_get_char.restype = ctypes.c_char
_lib.myelin_value_set_char.argtypes = [Value, ctypes.c_char]
_lib.myelin_value_set_char.restype = None
# uchar
_lib.myelin_value_get_uchar.argtypes = [Value]
_lib.myelin_value_get_uchar.restype = ctypes.c_ubyte
_lib.myelin_value_set_uchar.argtypes = [Value, ctypes.c_ubyte]
_lib.myelin_value_set_uchar.restype = None
# integer
_lib.myelin_value_get_int.argtypes = [Value]
_lib.myelin_value_get_int.restype = ctypes.c_int
_lib.myelin_value_set_int.argtypes = [Value, ctypes.c_int]
_lib.myelin_value_set_int.restype = None
# uint
_lib.myelin_value_get_uint.argtypes = [Value]
_lib.myelin_value_get_uint.restype = ctypes.c_bool
_lib.myelin_value_set_uint.argtypes = [Value, ctypes.c_uint]
_lib.myelin_value_set_uint.restype = None
# long
_lib.myelin_value_get_long.argtypes = [Value]
_lib.myelin_value_get_long.restype = ctypes.c_long
_lib.myelin_value_set_long.argtypes = [Value, ctypes.c_long]
_lib.myelin_value_set_long.restype = None
# ulong
_lib.myelin_value_get_ulong.argtypes = [Value]
_lib.myelin_value_get_ulong.restype = ctypes.c_ulong
_lib.myelin_value_set_ulong.argtypes = [Value, ctypes.c_ulong]
_lib.myelin_value_set_ulong.restype = None
# 64bit integer
_lib.myelin_value_get_int64.argtypes = [Value]
_lib.myelin_value_get_int64.restype = ctypes.c_int64
_lib.myelin_value_set_int64.argtypes = [Value, ctypes.c_int64]
_lib.myelin_value_set_int64.restype = None
# unsigned 64bit integer
_lib.myelin_value_get_uint64.argtypes = [Value]
_lib.myelin_value_get_uint64.restype = ctypes.c_uint64
_lib.myelin_value_set_uint64.argtypes = [Value, ctypes.c_uint64]
_lib.myelin_value_set_uint64.restype = None
# float
_lib.myelin_value_get_float.argtypes = [Value]
_lib.myelin_value_get_float.restype = ctypes.c_float
_lib.myelin_value_set_float.argtypes = [Value, ctypes.c_float]
_lib.myelin_value_set_float.restype = None
# double
_lib.myelin_value_get_double.argtypes = [Value]
_lib.myelin_value_get_double.restype = ctypes.c_double
_lib.myelin_value_set_double.argtypes = [Value, ctypes.c_double]
_lib.myelin_value_set_double.restype = None
# string
_lib.myelin_value_get_string.argtypes = [Value]
_lib.myelin_value_get_string.restype = ctypes.c_char_p
_lib.myelin_value_set_string.argtypes = [Value, ctypes.c_char_p]
_lib.myelin_value_set_string.restype = None
# pointer
_lib.myelin_value_as_pointer.argtypes = [Value]
_lib.myelin_value_as_pointer.restype = ctypes.c_void_p
_lib.myelin_value_set_pointer.argtypes = [Value, Type, ctypes.c_void_p]
_lib.myelin_value_set_pointer.restype = None
|
gsterjov/Myelin
|
bindings/python/myelin/introspection/value.py
|
Python
|
gpl-3.0
| 11,453 | 0.024797 |
# -*- Mode: Python; py-indent-offset: 4 -*-
# coding=utf-8
# vim: tabstop=4 shiftwidth=4 expandtab
import unittest
import sys
sys.path.insert(0, "../")
import sys
import copy
try:
import cairo
has_cairo = True
except ImportError:
has_cairo = False
from gi.repository import GObject
from gi.repository import GLib
from gi.repository import Gio
from gi.repository import Regress as Everything
if sys.version_info < (3, 0):
UNICHAR = "\xe2\x99\xa5"
PY2_UNICODE_UNICHAR = unicode(UNICHAR, 'UTF-8')
else:
UNICHAR = "♥"
class TestEverything(unittest.TestCase):
@unittest.skipUnless(has_cairo, 'built without cairo support')
def test_cairo_context(self):
context = Everything.test_cairo_context_full_return()
self.assertTrue(isinstance(context, cairo.Context))
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10)
context = cairo.Context(surface)
Everything.test_cairo_context_none_in(context)
@unittest.skipUnless(has_cairo, 'built without cairo support')
def test_cairo_surface(self):
surface = Everything.test_cairo_surface_none_return()
self.assertTrue(isinstance(surface, cairo.ImageSurface))
self.assertTrue(isinstance(surface, cairo.Surface))
self.assertEqual(surface.get_format(), cairo.FORMAT_ARGB32)
self.assertEqual(surface.get_width(), 10)
self.assertEqual(surface.get_height(), 10)
surface = Everything.test_cairo_surface_full_return()
self.assertTrue(isinstance(surface, cairo.ImageSurface))
self.assertTrue(isinstance(surface, cairo.Surface))
self.assertEqual(surface.get_format(), cairo.FORMAT_ARGB32)
self.assertEqual(surface.get_width(), 10)
self.assertEqual(surface.get_height(), 10)
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10)
Everything.test_cairo_surface_none_in(surface)
surface = Everything.test_cairo_surface_full_out()
self.assertTrue(isinstance(surface, cairo.ImageSurface))
self.assertTrue(isinstance(surface, cairo.Surface))
self.assertEqual(surface.get_format(), cairo.FORMAT_ARGB32)
self.assertEqual(surface.get_width(), 10)
self.assertEqual(surface.get_height(), 10)
def test_unichar(self):
self.assertEqual("c", Everything.test_unichar("c"))
if sys.version_info < (3, 0):
self.assertEqual(UNICHAR, Everything.test_unichar(PY2_UNICODE_UNICHAR))
self.assertEqual(UNICHAR, Everything.test_unichar(UNICHAR))
self.assertRaises(TypeError, Everything.test_unichar, "")
self.assertRaises(TypeError, Everything.test_unichar, "morethanonechar")
def test_floating(self):
e = Everything.TestFloating()
self.assertEqual(e.__grefcount__, 1)
e = GObject.new(Everything.TestFloating)
self.assertEqual(e.__grefcount__, 1)
e = Everything.TestFloating.new()
self.assertEqual(e.__grefcount__, 1)
def test_caller_allocates(self):
struct_a = Everything.TestStructA()
struct_a.some_int = 10
struct_a.some_int8 = 21
struct_a.some_double = 3.14
struct_a.some_enum = Everything.TestEnum.VALUE3
struct_a_clone = struct_a.clone()
self.assertTrue(struct_a != struct_a_clone)
self.assertEqual(struct_a.some_int, struct_a_clone.some_int)
self.assertEqual(struct_a.some_int8, struct_a_clone.some_int8)
self.assertEqual(struct_a.some_double, struct_a_clone.some_double)
self.assertEqual(struct_a.some_enum, struct_a_clone.some_enum)
struct_b = Everything.TestStructB()
struct_b.some_int8 = 8
struct_b.nested_a.some_int = 20
struct_b.nested_a.some_int8 = 12
struct_b.nested_a.some_double = 333.3333
struct_b.nested_a.some_enum = Everything.TestEnum.VALUE2
struct_b_clone = struct_b.clone()
self.assertTrue(struct_b != struct_b_clone)
self.assertEqual(struct_b.some_int8, struct_b_clone.some_int8)
self.assertEqual(struct_b.nested_a.some_int, struct_b_clone.nested_a.some_int)
self.assertEqual(struct_b.nested_a.some_int8, struct_b_clone.nested_a.some_int8)
self.assertEqual(struct_b.nested_a.some_double, struct_b_clone.nested_a.some_double)
self.assertEqual(struct_b.nested_a.some_enum, struct_b_clone.nested_a.some_enum)
def test_wrong_type_of_arguments(self):
try:
Everything.test_int8()
except TypeError:
(e_type, e) = sys.exc_info()[:2]
self.assertEqual(e.args, ("test_int8() takes exactly 1 argument (0 given)",))
def test_gtypes(self):
gchararray_gtype = GObject.type_from_name('gchararray')
gtype = Everything.test_gtype(str)
self.assertEqual(gchararray_gtype, gtype)
gtype = Everything.test_gtype('gchararray')
self.assertEqual(gchararray_gtype, gtype)
gobject_gtype = GObject.GObject.__gtype__
gtype = Everything.test_gtype(GObject.GObject)
self.assertEqual(gobject_gtype, gtype)
gtype = Everything.test_gtype('GObject')
self.assertEqual(gobject_gtype, gtype)
self.assertRaises(TypeError, Everything.test_gtype, 'invalidgtype')
class NotARegisteredClass(object):
pass
self.assertRaises(TypeError, Everything.test_gtype, NotARegisteredClass)
class ARegisteredClass(GObject.GObject):
__gtype_name__ = 'EverythingTestsARegisteredClass'
gtype = Everything.test_gtype('EverythingTestsARegisteredClass')
self.assertEqual(ARegisteredClass.__gtype__, gtype)
gtype = Everything.test_gtype(ARegisteredClass)
self.assertEqual(ARegisteredClass.__gtype__, gtype)
self.assertRaises(TypeError, Everything.test_gtype, 'ARegisteredClass')
def test_dir(self):
attr_list = dir(Everything)
# test that typelib attributes are listed
self.assertTrue('TestStructA' in attr_list)
# test that class attributes and methods are listed
self.assertTrue('__class__' in attr_list)
self.assertTrue('__dir__' in attr_list)
self.assertTrue('__repr__' in attr_list)
# test that instance members are listed
self.assertTrue('_namespace' in attr_list)
self.assertTrue('_version' in attr_list)
# test that there are no duplicates returned
self.assertEqual(len(attr_list), len(set(attr_list)))
def test_ptrarray(self):
# transfer container
result = Everything.test_garray_container_return()
self.assertEqual(result, ['regress'])
result = None
# transfer full
result = Everything.test_garray_full_return()
self.assertEqual(result, ['regress'])
result = None
def test_hash_return(self):
result = Everything.test_ghash_gvalue_return()
self.assertEqual(result['integer'], 12)
self.assertEqual(result['boolean'], True)
self.assertEqual(result['string'], 'some text')
self.assertEqual(result['strings'], ['first', 'second', 'third'])
self.assertEqual(result['flags'], Everything.TestFlags.FLAG1 | Everything.TestFlags.FLAG3)
self.assertEqual(result['enum'], Everything.TestEnum.VALUE2)
result = None
def test_hash_in(self):
# specifying a simple string array for "strings" does not work due to
# https://bugzilla.gnome.org/show_bug.cgi?id=666636
# workaround by explicitly building a GStrv object
class GStrv(list):
__gtype__ = GObject.TYPE_STRV
data = {'integer': 12,
'boolean': True,
'string': 'some text',
'strings': GStrv(['first', 'second', 'third']),
'flags': Everything.TestFlags.FLAG1 | Everything.TestFlags.FLAG3,
'enum': Everything.TestEnum.VALUE2,
}
Everything.test_ghash_gvalue_in(data)
data = None
def test_struct_gpointer(self):
l1 = GLib.List()
self.assertEqual(l1.data, None)
init_refcount = sys.getrefcount(l1)
l1.data = 'foo'
self.assertEqual(l1.data, 'foo')
l2 = l1
self.assertEqual(l1.data, l2.data)
self.assertEqual(sys.getrefcount(l1), init_refcount + 1)
l3 = copy.copy(l1)
l3.data = 'bar'
self.assertEqual(l1.data, 'foo')
self.assertEqual(l2.data, 'foo')
self.assertEqual(l3.data, 'bar')
self.assertEqual(sys.getrefcount(l1), init_refcount + 1)
self.assertEqual(sys.getrefcount(l3), init_refcount)
class TestNullableArgs(unittest.TestCase):
def test_in_nullable_hash(self):
Everything.test_ghash_null_in(None)
def test_in_nullable_list(self):
Everything.test_gslist_null_in(None)
Everything.test_glist_null_in(None)
Everything.test_gslist_null_in([])
Everything.test_glist_null_in([])
def test_in_nullable_array(self):
Everything.test_array_int_null_in(None)
Everything.test_array_int_null_in([])
def test_in_nullable_string(self):
Everything.test_utf8_null_in(None)
def test_in_nullable_object(self):
Everything.func_obj_null_in(None)
def test_out_nullable_hash(self):
self.assertEqual(None, Everything.test_ghash_null_out())
def test_out_nullable_list(self):
self.assertEqual([], Everything.test_gslist_null_out())
self.assertEqual([], Everything.test_glist_null_out())
def test_out_nullable_array(self):
self.assertEqual([], Everything.test_array_int_null_out())
def test_out_nullable_string(self):
self.assertEqual(None, Everything.test_utf8_null_out())
def test_out_nullable_object(self):
self.assertEqual(None, Everything.TestObj.null_out())
class TestCallbacks(unittest.TestCase):
called = False
main_loop = GObject.MainLoop()
def test_callback(self):
TestCallbacks.called = False
def callback():
TestCallbacks.called = True
Everything.test_simple_callback(callback)
self.assertTrue(TestCallbacks.called)
def test_callback_exception(self):
"""
This test ensures that we get errors from callbacks correctly
and in particular that we do not segv when callbacks fail
"""
def callback():
x = 1 / 0
self.fail('unexpected surviving zero divsion:' + str(x))
# note that we do NOT expect the ZeroDivisionError to be propagated
# through from the callback, as it crosses the Python<->C boundary
# twice. (See GNOME #616279)
Everything.test_simple_callback(callback)
def test_double_callback_exception(self):
"""
This test ensures that we get errors from callbacks correctly
and in particular that we do not segv when callbacks fail
"""
def badcallback():
x = 1 / 0
self.fail('unexpected surviving zero divsion:' + str(x))
def callback():
Everything.test_boolean(True)
Everything.test_boolean(False)
Everything.test_simple_callback(badcallback())
# note that we do NOT expect the ZeroDivisionError to be propagated
# through from the callback, as it crosses the Python<->C boundary
# twice. (See GNOME #616279)
Everything.test_simple_callback(callback)
def test_return_value_callback(self):
TestCallbacks.called = False
def callback():
TestCallbacks.called = True
return 44
self.assertEqual(Everything.test_callback(callback), 44)
self.assertTrue(TestCallbacks.called)
def test_callback_async(self):
TestCallbacks.called = False
def callback(foo):
TestCallbacks.called = True
return foo
Everything.test_callback_async(callback, 44)
i = Everything.test_callback_thaw_async()
self.assertEqual(44, i)
self.assertTrue(TestCallbacks.called)
def test_callback_scope_call(self):
TestCallbacks.called = 0
def callback():
TestCallbacks.called += 1
return 0
Everything.test_multi_callback(callback)
self.assertEqual(TestCallbacks.called, 2)
def test_callback_userdata(self):
TestCallbacks.called = 0
def callback(userdata):
self.assertEqual(userdata, "Test%d" % TestCallbacks.called)
TestCallbacks.called += 1
return TestCallbacks.called
for i in range(100):
val = Everything.test_callback_user_data(callback, "Test%d" % i)
self.assertEqual(val, i + 1)
self.assertEqual(TestCallbacks.called, 100)
def test_callback_userdata_refcount(self):
TestCallbacks.called = False
def callback(userdata):
TestCallbacks.called = True
return 1
ud = "Test User Data"
start_ref_count = sys.getrefcount(ud)
for i in range(100):
Everything.test_callback_destroy_notify(callback, ud)
Everything.test_callback_thaw_notifications()
end_ref_count = sys.getrefcount(ud)
self.assertEqual(start_ref_count, end_ref_count)
def test_async_ready_callback(self):
TestCallbacks.called = False
TestCallbacks.main_loop = GObject.MainLoop()
def callback(obj, result, user_data):
TestCallbacks.main_loop.quit()
TestCallbacks.called = True
Everything.test_async_ready_callback(callback)
TestCallbacks.main_loop.run()
self.assertTrue(TestCallbacks.called)
def test_callback_destroy_notify(self):
def callback(user_data):
TestCallbacks.called = True
return 42
TestCallbacks.called = False
self.assertEqual(Everything.test_callback_destroy_notify(callback, 42), 42)
self.assertTrue(TestCallbacks.called)
self.assertEqual(Everything.test_callback_thaw_notifications(), 42)
def test_callback_in_methods(self):
object_ = Everything.TestObj()
def callback():
TestCallbacks.called = True
return 42
TestCallbacks.called = False
object_.instance_method_callback(callback)
self.assertTrue(TestCallbacks.called)
TestCallbacks.called = False
Everything.TestObj.static_method_callback(callback)
self.assertTrue(TestCallbacks.called)
def callbackWithUserData(user_data):
TestCallbacks.called = True
return 42
TestCallbacks.called = False
Everything.TestObj.new_callback(callbackWithUserData, None)
self.assertTrue(TestCallbacks.called)
def test_callback_none(self):
# make sure this doesn't assert or crash
Everything.test_simple_callback(None)
def test_callback_gerror(self):
def callback(error):
self.assertEqual(error.message, 'regression test error')
self.assertTrue('g-io' in error.domain)
self.assertEqual(error.code, Gio.IOErrorEnum.NOT_SUPPORTED)
TestCallbacks.called = True
TestCallbacks.called = False
Everything.test_gerror_callback(callback)
self.assertTrue(TestCallbacks.called)
def test_callback_null_gerror(self):
def callback(error):
self.assertEqual(error, None)
TestCallbacks.called = True
TestCallbacks.called = False
Everything.test_null_gerror_callback(callback)
self.assertTrue(TestCallbacks.called)
def test_callback_owned_gerror(self):
def callback(error):
self.assertEqual(error.message, 'regression test owned error')
self.assertTrue('g-io' in error.domain)
self.assertEqual(error.code, Gio.IOErrorEnum.PERMISSION_DENIED)
TestCallbacks.called = True
TestCallbacks.called = False
Everything.test_owned_gerror_callback(callback)
self.assertTrue(TestCallbacks.called)
def test_callback_hashtable(self):
def callback(data):
self.assertEqual(data, mydict)
mydict['new'] = 42
TestCallbacks.called = True
mydict = {'foo': 1, 'bar': 2}
TestCallbacks.called = False
Everything.test_hash_table_callback(mydict, callback)
self.assertTrue(TestCallbacks.called)
self.assertEqual(mydict, {'foo': 1, 'bar': 2, 'new': 42})
class TestClosures(unittest.TestCase):
def test_int_arg(self):
def callback(num):
self.called = True
return num + 1
self.called = False
result = Everything.test_closure_one_arg(callback, 42)
self.assertTrue(self.called)
self.assertEqual(result, 43)
def test_variant(self):
def callback(variant):
self.called = True
if variant is None:
return None
self.assertEqual(variant.get_type_string(), 'i')
return GLib.Variant('i', variant.get_int32() + 1)
self.called = False
result = Everything.test_closure_variant(callback, GLib.Variant('i', 42))
self.assertTrue(self.called)
self.assertEqual(result.get_type_string(), 'i')
self.assertEqual(result.get_int32(), 43)
self.called = False
result = Everything.test_closure_variant(callback, None)
self.assertTrue(self.called)
self.assertEqual(result, None)
self.called = False
self.assertRaises(TypeError, Everything.test_closure_variant, callback, 'foo')
self.assertFalse(self.called)
class TestProperties(unittest.TestCase):
def test_basic(self):
object_ = Everything.TestObj()
self.assertEqual(object_.props.int, 0)
object_.props.int = 42
self.assertTrue(isinstance(object_.props.int, int))
self.assertEqual(object_.props.int, 42)
self.assertEqual(object_.props.float, 0.0)
object_.props.float = 42.42
self.assertTrue(isinstance(object_.props.float, float))
self.assertAlmostEqual(object_.props.float, 42.42, places=5)
self.assertEqual(object_.props.double, 0.0)
object_.props.double = 42.42
self.assertTrue(isinstance(object_.props.double, float))
self.assertAlmostEqual(object_.props.double, 42.42, places=5)
self.assertEqual(object_.props.string, None)
object_.props.string = 'mec'
self.assertTrue(isinstance(object_.props.string, str))
self.assertEqual(object_.props.string, 'mec')
self.assertEqual(object_.props.gtype, GObject.TYPE_INVALID)
object_.props.gtype = int
self.assertEqual(object_.props.gtype, GObject.TYPE_INT)
def test_hash_table(self):
object_ = Everything.TestObj()
self.assertEqual(object_.props.hash_table, None)
object_.props.hash_table = {'mec': 56}
self.assertTrue(isinstance(object_.props.hash_table, dict))
self.assertEqual(list(object_.props.hash_table.items())[0], ('mec', 56))
def test_list(self):
object_ = Everything.TestObj()
self.assertEqual(object_.props.list, [])
object_.props.list = ['1', '2', '3']
self.assertTrue(isinstance(object_.props.list, list))
self.assertEqual(object_.props.list, ['1', '2', '3'])
def test_boxed(self):
object_ = Everything.TestObj()
self.assertEqual(object_.props.boxed, None)
boxed = Everything.TestBoxed()
boxed.some_int8 = 42
object_.props.boxed = boxed
self.assertTrue(isinstance(object_.props.boxed, Everything.TestBoxed))
self.assertEqual(object_.props.boxed.some_int8, 42)
def test_gtype(self):
object_ = Everything.TestObj()
self.assertEqual(object_.props.gtype, GObject.TYPE_INVALID)
object_.props.gtype = int
self.assertEqual(object_.props.gtype, GObject.TYPE_INT)
object_ = Everything.TestObj(gtype=int)
self.assertEqual(object_.props.gtype, GObject.TYPE_INT)
object_.props.gtype = str
self.assertEqual(object_.props.gtype, GObject.TYPE_STRING)
class TestTortureProfile(unittest.TestCase):
def test_torture_profile(self):
import time
total_time = 0
print("")
object_ = Everything.TestObj()
sys.stdout.write("\ttorture test 1 (10000 iterations): ")
start_time = time.clock()
for i in range(10000):
(y, z, q) = object_.torture_signature_0(5000,
"Torture Test 1",
12345)
end_time = time.clock()
delta_time = end_time - start_time
total_time += delta_time
print("%f secs" % delta_time)
sys.stdout.write("\ttorture test 2 (10000 iterations): ")
start_time = time.clock()
for i in range(10000):
(y, z, q) = Everything.TestObj().torture_signature_0(
5000, "Torture Test 2", 12345)
end_time = time.clock()
delta_time = end_time - start_time
total_time += delta_time
print("%f secs" % delta_time)
sys.stdout.write("\ttorture test 3 (10000 iterations): ")
start_time = time.clock()
for i in range(10000):
try:
(y, z, q) = object_.torture_signature_1(
5000, "Torture Test 3", 12345)
except:
pass
end_time = time.clock()
delta_time = end_time - start_time
total_time += delta_time
print("%f secs" % delta_time)
sys.stdout.write("\ttorture test 4 (10000 iterations): ")
def callback(userdata):
pass
userdata = [1, 2, 3, 4]
start_time = time.clock()
for i in range(10000):
(y, z, q) = Everything.test_torture_signature_2(
5000, callback, userdata, "Torture Test 4", 12345)
end_time = time.clock()
delta_time = end_time - start_time
total_time += delta_time
print("%f secs" % delta_time)
print("\t====")
print("\tTotal: %f sec" % total_time)
class TestAdvancedInterfaces(unittest.TestCase):
def test_array_objs(self):
obj1, obj2 = Everything.test_array_fixed_out_objects()
self.assertTrue(isinstance(obj1, Everything.TestObj))
self.assertTrue(isinstance(obj2, Everything.TestObj))
self.assertNotEqual(obj1, obj2)
def test_obj_skip_return_val(self):
obj = Everything.TestObj()
ret = obj.skip_return_val(50, 42.0, 60, 2, 3)
self.assertEqual(len(ret), 3)
self.assertEqual(ret[0], 51)
self.assertEqual(ret[1], 61)
self.assertEqual(ret[2], 32)
def test_obj_skip_return_val_no_out(self):
obj = Everything.TestObj()
# raises an error for 0, succeeds for any other value
self.assertRaises(GLib.GError, obj.skip_return_val_no_out, 0)
ret = obj.skip_return_val_no_out(1)
self.assertEqual(ret, None)
class TestSignals(unittest.TestCase):
def test_object_param_signal(self):
obj = Everything.TestObj()
def callback(obj, obj_param):
self.assertEqual(obj_param.props.int, 3)
self.assertGreater(obj_param.__grefcount__, 1)
obj.connect('sig-with-obj', callback)
obj.emit_sig_with_obj()
|
jdahlin/pygobject
|
tests/test_everything.py
|
Python
|
lgpl-2.1
| 23,544 | 0.000935 |
"""
Unit tests for the red2d (3-7-column) reader
"""
import warnings
warnings.simplefilter("ignore")
import unittest
from sas.sascalc.dataloader.loader import Loader
import os.path
class abs_reader(unittest.TestCase):
def setUp(self):
self.loader = Loader()
def test_checkdata(self):
"""
Test .DAT file loaded as IGOR/DAT 2D Q_map
"""
f = self.loader.load("exp18_14_igor_2dqxqy.dat")[0]
# The length of the data is 10
self.assertEqual(len(f.qx_data), 36864)
self.assertEqual(f.qx_data[0],-0.03573497)
self.assertEqual(f.qx_data[36863],0.2908819)
self.assertEqual(f.Q_unit, '1/A')
self.assertEqual(f.I_unit, '1/cm')
self.assertEqual(f.meta_data['loader'],"IGOR/DAT 2D Q_map")
if __name__ == '__main__':
unittest.main()
|
lewisodriscoll/sasview
|
test/sasdataloader/test/utest_red2d_reader.py
|
Python
|
bsd-3-clause
| 844 | 0.009479 |
#
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
from ast import literal_eval
from threading import Thread
from ovirtscheduler import utils
class PythonMethodRunner(Thread):
def __init__(self, path, module, cls, method, args, request_id=''):
super(PythonMethodRunner, self).__init__(group=None)
logger = logging.getLogger()
self._log_adapter = utils.RequestAdapter(
logger,
{'method': 'PythonMethodRunner',
'request_id': request_id})
self._path = path
self._result = None
self._error = None
self._process = None
self._script = self.createScript(module, cls, method, args)
self.request_id = request_id
def run(self):
try:
self._log_adapter.debug(
'running %s in %s' % (self._script, self._path))
self._process = utils.createProcess(self._script, self._path)
(result, error) = self._process.communicate()
if not isinstance(result, str):
result = result.decode()
try:
self._result = literal_eval(result)
except Exception as ex:
if not error:
self._error = "Unable to parse result: %s" \
" got error : %s " % (result, ex)
if error:
self._error = error
except Exception as ex:
self._error = ex
if self._error:
self._log_adapter.error("script %s got error %s" %
(self._script, self._error))
def getResults(self):
return self._result
def getErrors(self):
return self._error
def getReturnCode(self):
return self._process.returncode
def stop(self):
return utils.killProcess(self._process)
def createScript(self, module, cls, method, args):
command_template = "import {m}; {m}.{c}().{method}{args}"
command_string = command_template\
.format(m=module,
c=cls,
method=method,
args=repr(utils.createFunctionArgs(args)))
return ["python3", "-c", command_string]
|
oVirt/ovirt-scheduler-proxy
|
src/ovirtscheduler/runner.py
|
Python
|
apache-2.0
| 2,775 | 0 |
import keyword
import sys
import warnings
import rope.base.codeanalyze
import rope.base.evaluate
from rope.base import pyobjects, pyobjectsdef, pynames, builtins, exceptions, worder
from rope.base.codeanalyze import SourceLinesAdapter
from rope.contrib import fixsyntax
from rope.refactor import functionutils
def code_assist(project, source_code, offset, resource=None,
templates=None, maxfixes=1, later_locals=True):
"""Return python code completions as a list of `CodeAssistProposal`\s
`resource` is a `rope.base.resources.Resource` object. If
provided, relative imports are handled.
`maxfixes` is the maximum number of errors to fix if the code has
errors in it.
If `later_locals` is `False` names defined in this scope and after
this line is ignored.
"""
if templates is not None:
warnings.warn('Codeassist no longer supports templates',
DeprecationWarning, stacklevel=2)
assist = _PythonCodeAssist(
project, source_code, offset, resource=resource,
maxfixes=maxfixes, later_locals=later_locals)
return assist()
def starting_offset(source_code, offset):
"""Return the offset in which the completion should be inserted
Usually code assist proposals should be inserted like::
completion = proposal.name
result = (source_code[:starting_offset] +
completion + source_code[offset:])
Where starting_offset is the offset returned by this function.
"""
word_finder = worder.Worder(source_code, True)
expression, starting, starting_offset = \
word_finder.get_splitted_primary_before(offset)
return starting_offset
def get_doc(project, source_code, offset, resource=None, maxfixes=1):
"""Get the pydoc"""
fixer = fixsyntax.FixSyntax(project.pycore, source_code,
resource, maxfixes)
pymodule = fixer.get_pymodule()
pyname = fixer.pyname_at(offset)
if pyname is None:
return None
pyobject = pyname.get_object()
return PyDocExtractor().get_doc(pyobject)
def get_calltip(project, source_code, offset, resource=None,
maxfixes=1, ignore_unknown=False, remove_self=False):
"""Get the calltip of a function
The format of the returned string is
``module_name.holding_scope_names.function_name(arguments)``. For
classes `__init__()` and for normal objects `__call__()` function
is used.
Note that the offset is on the function itself *not* after the its
open parenthesis. (Actually it used to be the other way but it
was easily confused when string literals were involved. So I
decided it is better for it not to try to be too clever when it
cannot be clever enough). You can use a simple search like::
offset = source_code.rindex('(', 0, offset) - 1
to handle simple situations.
If `ignore_unknown` is `True`, `None` is returned for functions
without source-code like builtins and extensions.
If `remove_self` is `True`, the first parameter whose name is self
will be removed for methods.
"""
fixer = fixsyntax.FixSyntax(project.pycore, source_code,
resource, maxfixes)
pymodule = fixer.get_pymodule()
pyname = fixer.pyname_at(offset)
if pyname is None:
return None
pyobject = pyname.get_object()
return PyDocExtractor().get_calltip(pyobject, ignore_unknown, remove_self)
def get_definition_location(project, source_code, offset,
resource=None, maxfixes=1):
"""Return the definition location of the python name at `offset`
Return a (`rope.base.resources.Resource`, lineno) tuple. If no
`resource` is given and the definition is inside the same module,
the first element of the returned tuple would be `None`. If the
location cannot be determined ``(None, None)`` is returned.
"""
fixer = fixsyntax.FixSyntax(project.pycore, source_code,
resource, maxfixes)
pymodule = fixer.get_pymodule()
pyname = fixer.pyname_at(offset)
if pyname is not None:
module, lineno = pyname.get_definition_location()
if module is not None:
return module.get_module().get_resource(), lineno
return (None, None)
def find_occurrences(*args, **kwds):
import rope.contrib.findit
warnings.warn('Use `rope.contrib.findit.find_occurrences()` instead',
DeprecationWarning, stacklevel=2)
return rope.contrib.findit.find_occurrences(*args, **kwds)
class CompletionProposal(object):
"""A completion proposal
The `scope` instance variable shows where proposed name came from
and can be 'global', 'local', 'builtin', 'attribute', 'keyword',
'imported', 'parameter_keyword'.
The `type` instance variable shows the approximate type of the
proposed object and can be 'instance', 'class', 'function', 'module',
and `None`.
All possible relations between proposal's `scope` and `type` are shown
in the table below (different scopes in rows and types in columns):
| instance | class | function | module | None
local | + | + | + | + |
global | + | + | + | + |
builtin | + | + | + | |
attribute | + | + | + | + |
imported | + | + | + | + |
keyword | | | | | +
parameter_keyword | | | | | +
"""
def __init__(self, name, scope, pyname=None):
self.name = name
self.pyname = pyname
self.scope = self._get_scope(scope)
def __str__(self):
return '%s (%s, %s)' % (self.name, self.scope, self.type)
def __repr__(self):
return str(self)
@property
def parameters(self):
"""The names of the parameters the function takes.
Returns None if this completion is not a function.
"""
pyname = self.pyname
if isinstance(pyname, pynames.ImportedName):
pyname = pyname._get_imported_pyname()
if isinstance(pyname, pynames.DefinedName):
pyobject = pyname.get_object()
if isinstance(pyobject, pyobjects.AbstractFunction):
return pyobject.get_param_names()
@property
def type(self):
pyname = self.pyname
if isinstance(pyname, builtins.BuiltinName):
pyobject = pyname.get_object()
if isinstance(pyobject, builtins.BuiltinFunction):
return 'function'
elif isinstance(pyobject, builtins.BuiltinClass):
clsobj = pyobject.builtin
return 'class'
elif isinstance(pyobject, builtins.BuiltinObject) or \
isinstance(pyobject, builtins.BuiltinName):
return 'instance'
elif isinstance(pyname, pynames.ImportedModule):
return 'module'
elif isinstance(pyname, pynames.ImportedName) or \
isinstance(pyname, pynames.DefinedName):
pyobject = pyname.get_object()
if isinstance(pyobject, pyobjects.AbstractFunction):
return 'function'
if isinstance(pyobject, pyobjects.AbstractClass):
return 'class'
return 'instance'
def _get_scope(self, scope):
if isinstance(self.pyname, builtins.BuiltinName):
return 'builtin'
if isinstance(self.pyname, pynames.ImportedModule) or \
isinstance(self.pyname, pynames.ImportedName):
return 'imported'
return scope
def get_doc(self):
"""Get the proposed object's docstring.
Returns None if it can not be get.
"""
if not self.pyname:
return None
pyobject = self.pyname.get_object()
if not hasattr(pyobject, 'get_doc'):
return None
return self.pyname.get_object().get_doc()
@property
def kind(self):
warnings.warn("the proposal's `kind` property is deprecated, " \
"use `scope` instead")
return self.scope
# leaved for backward compatibility
CodeAssistProposal = CompletionProposal
class NamedParamProposal(CompletionProposal):
"""A parameter keyword completion proposal
Holds reference to ``_function`` -- the function which
parameter ``name`` belongs to. This allows to determine
default value for this parameter.
"""
def __init__(self, name, function):
self.argname = name
name = '%s=' % name
super(NamedParamProposal, self).__init__(name, 'parameter_keyword')
self._function = function
def get_default(self):
"""Get a string representation of a param's default value.
Returns None if there is no default value for this param.
"""
definfo = functionutils.DefinitionInfo.read(self._function)
for arg, default in definfo.args_with_defaults:
if self.argname == arg:
return default
return None
def sorted_proposals(proposals, scopepref=None, typepref=None):
"""Sort a list of proposals
Return a sorted list of the given `CodeAssistProposal`\s.
`scopepref` can be a list of proposal scopes. Defaults to
``['parameter_keyword', 'local', 'global', 'imported',
'attribute', 'builtin', 'keyword']``.
`typepref` can be a list of proposal types. Defaults to
``['class', 'function', 'instance', 'module', None]``.
(`None` stands for completions with no type like keywords.)
"""
sorter = _ProposalSorter(proposals, scopepref, typepref)
return sorter.get_sorted_proposal_list()
def starting_expression(source_code, offset):
"""Return the expression to complete"""
word_finder = worder.Worder(source_code, True)
expression, starting, starting_offset = \
word_finder.get_splitted_primary_before(offset)
if expression:
return expression + '.' + starting
return starting
def default_templates():
warnings.warn('default_templates() is deprecated.',
DeprecationWarning, stacklevel=2)
return {}
class _PythonCodeAssist(object):
def __init__(self, project, source_code, offset, resource=None,
maxfixes=1, later_locals=True):
self.project = project
self.pycore = self.project.pycore
self.code = source_code
self.resource = resource
self.maxfixes = maxfixes
self.later_locals = later_locals
self.word_finder = worder.Worder(source_code, True)
self.expression, self.starting, self.offset = \
self.word_finder.get_splitted_primary_before(offset)
keywords = keyword.kwlist
def _find_starting_offset(self, source_code, offset):
current_offset = offset - 1
while current_offset >= 0 and (source_code[current_offset].isalnum() or
source_code[current_offset] in '_'):
current_offset -= 1;
return current_offset + 1
def _matching_keywords(self, starting):
result = []
for kw in self.keywords:
if kw.startswith(starting):
result.append(CompletionProposal(kw, 'keyword'))
return result
def __call__(self):
if self.offset > len(self.code):
return []
completions = list(self._code_completions().values())
if self.expression.strip() == '' and self.starting.strip() != '':
completions.extend(self._matching_keywords(self.starting))
return completions
def _dotted_completions(self, module_scope, holding_scope):
result = {}
found_pyname = rope.base.evaluate.eval_str(holding_scope,
self.expression)
if found_pyname is not None:
element = found_pyname.get_object()
compl_scope = 'attribute'
if isinstance(element, (pyobjectsdef.PyModule,
pyobjectsdef.PyPackage)):
compl_scope = 'imported'
for name, pyname in element.get_attributes().items():
if name.startswith(self.starting):
result[name] = CompletionProposal(name, compl_scope, pyname)
return result
def _undotted_completions(self, scope, result, lineno=None):
if scope.parent != None:
self._undotted_completions(scope.parent, result)
if lineno is None:
names = scope.get_propagated_names()
else:
names = scope.get_names()
for name, pyname in names.items():
if name.startswith(self.starting):
compl_scope = 'local'
if scope.get_kind() == 'Module':
compl_scope = 'global'
if lineno is None or self.later_locals or \
not self._is_defined_after(scope, pyname, lineno):
result[name] = CompletionProposal(name, compl_scope,
pyname)
def _from_import_completions(self, pymodule):
module_name = self.word_finder.get_from_module(self.offset)
if module_name is None:
return {}
pymodule = self._find_module(pymodule, module_name)
result = {}
for name in pymodule:
if name.startswith(self.starting):
result[name] = CompletionProposal(name, scope='global',
pyname=pymodule[name])
return result
def _find_module(self, pymodule, module_name):
dots = 0
while module_name[dots] == '.':
dots += 1
pyname = pynames.ImportedModule(pymodule,
module_name[dots:], dots)
return pyname.get_object()
def _is_defined_after(self, scope, pyname, lineno):
location = pyname.get_definition_location()
if location is not None and location[1] is not None:
if location[0] == scope.pyobject.get_module() and \
lineno <= location[1] <= scope.get_end():
return True
def _code_completions(self):
lineno = self.code.count('\n', 0, self.offset) + 1
fixer = fixsyntax.FixSyntax(self.pycore, self.code,
self.resource, self.maxfixes)
pymodule = fixer.get_pymodule()
module_scope = pymodule.get_scope()
code = pymodule.source_code
lines = code.split('\n')
result = {}
start = fixsyntax._logical_start(lines, lineno)
indents = fixsyntax._get_line_indents(lines[start - 1])
inner_scope = module_scope.get_inner_scope_for_line(start, indents)
if self.word_finder.is_a_name_after_from_import(self.offset):
return self._from_import_completions(pymodule)
if self.expression.strip() != '':
result.update(self._dotted_completions(module_scope, inner_scope))
else:
result.update(self._keyword_parameters(module_scope.pyobject,
inner_scope))
self._undotted_completions(inner_scope, result, lineno=lineno)
return result
def _keyword_parameters(self, pymodule, scope):
offset = self.offset
if offset == 0:
return {}
word_finder = worder.Worder(self.code, True)
lines = SourceLinesAdapter(self.code)
lineno = lines.get_line_number(offset)
if word_finder.is_on_function_call_keyword(offset - 1):
name_finder = rope.base.evaluate.ScopeNameFinder(pymodule)
function_parens = word_finder.\
find_parens_start_from_inside(offset - 1)
primary = word_finder.get_primary_at(function_parens - 1)
try:
function_pyname = rope.base.evaluate.\
eval_str(scope, primary)
except exceptions.BadIdentifierError as e:
return {}
if function_pyname is not None:
pyobject = function_pyname.get_object()
if isinstance(pyobject, pyobjects.AbstractFunction):
pass
elif isinstance(pyobject, pyobjects.AbstractClass) and \
'__init__' in pyobject:
pyobject = pyobject['__init__'].get_object()
elif '__call__' in pyobject:
pyobject = pyobject['__call__'].get_object()
if isinstance(pyobject, pyobjects.AbstractFunction):
param_names = []
param_names.extend(
pyobject.get_param_names(special_args=False))
result = {}
for name in param_names:
if name.startswith(self.starting):
result[name + '='] = NamedParamProposal(
name, pyobject
)
return result
return {}
class _ProposalSorter(object):
"""Sort a list of code assist proposals"""
def __init__(self, code_assist_proposals, scopepref=None, typepref=None):
self.proposals = code_assist_proposals
if scopepref is None:
scopepref = ['parameter_keyword', 'local', 'global', 'imported',
'attribute', 'builtin', 'keyword']
self.scopepref = scopepref
if typepref is None:
typepref = ['class', 'function', 'instance', 'module', None]
self.typerank = dict((type, index)
for index, type in enumerate(typepref))
def get_sorted_proposal_list(self):
"""Return a list of `CodeAssistProposal`"""
proposals = {}
for proposal in self.proposals:
proposals.setdefault(proposal.scope, []).append(proposal)
result = []
for scope in self.scopepref:
scope_proposals = proposals.get(scope, [])
scope_proposals = [proposal for proposal in scope_proposals
if proposal.type in self.typerank]
scope_proposals.sort(key = self._proposal_cmp)
result.extend(scope_proposals)
return result
def _proposal_cmp(self, proposal):
def underline_count(name):
result = 0
while result < len(name) and name[result] == '_':
result += 1
return result
return (self.typerank.get(proposal.type, 100), underline_count(proposal.name), proposal.name)
def _compare_underlined_names(self, name1, name2):
def underline_count(name):
result = 0
while result < len(name) and name[result] == '_':
result += 1
return result
underline_count1 = underline_count(name1)
underline_count2 = underline_count(name2)
if underline_count1 != underline_count2:
return cmp(underline_count1, underline_count2)
return cmp(name1, name2)
class PyDocExtractor(object):
def get_doc(self, pyobject):
if isinstance(pyobject, pyobjects.AbstractFunction):
return self._get_function_docstring(pyobject)
elif isinstance(pyobject, pyobjects.AbstractClass):
return self._get_class_docstring(pyobject)
elif isinstance(pyobject, pyobjects.AbstractModule):
return self._trim_docstring(pyobject.get_doc())
return None
def get_calltip(self, pyobject, ignore_unknown=False, remove_self=False):
try:
if isinstance(pyobject, pyobjects.AbstractClass):
pyobject = pyobject['__init__'].get_object()
if not isinstance(pyobject, pyobjects.AbstractFunction):
pyobject = pyobject['__call__'].get_object()
except exceptions.AttributeNotFoundError:
return None
if ignore_unknown and not isinstance(pyobject, pyobjects.PyFunction):
return
if isinstance(pyobject, pyobjects.AbstractFunction):
result = self._get_function_signature(pyobject, add_module=True)
if remove_self and self._is_method(pyobject):
return result.replace('(self)', '()').replace('(self, ', '(')
return result
def _get_class_docstring(self, pyclass):
contents = self._trim_docstring(pyclass.get_doc(), 2)
supers = [super.get_name() for super in pyclass.get_superclasses()]
doc = 'class %s(%s):\n\n' % (pyclass.get_name(), ', '.join(supers)) + contents
if '__init__' in pyclass:
init = pyclass['__init__'].get_object()
if isinstance(init, pyobjects.AbstractFunction):
doc += '\n\n' + self._get_single_function_docstring(init)
return doc
def _get_function_docstring(self, pyfunction):
functions = [pyfunction]
if self._is_method(pyfunction):
functions.extend(self._get_super_methods(pyfunction.parent,
pyfunction.get_name()))
return '\n\n'.join([self._get_single_function_docstring(function)
for function in functions])
def _is_method(self, pyfunction):
return isinstance(pyfunction, pyobjects.PyFunction) and \
isinstance(pyfunction.parent, pyobjects.PyClass)
def _get_single_function_docstring(self, pyfunction):
signature = self._get_function_signature(pyfunction)
docs = self._trim_docstring(pyfunction.get_doc(), indents=2)
return signature + ':\n\n' + docs
def _get_super_methods(self, pyclass, name):
result = []
for super_class in pyclass.get_superclasses():
if name in super_class:
function = super_class[name].get_object()
if isinstance(function, pyobjects.AbstractFunction):
result.append(function)
result.extend(self._get_super_methods(super_class, name))
return result
def _get_function_signature(self, pyfunction, add_module=False):
location = self._location(pyfunction, add_module)
if isinstance(pyfunction, pyobjects.PyFunction):
info = functionutils.DefinitionInfo.read(pyfunction)
return location + info.to_string()
else:
return '%s(%s)' % (location + pyfunction.get_name(),
', '.join(pyfunction.get_param_names()))
def _location(self, pyobject, add_module=False):
location = []
parent = pyobject.parent
while parent and not isinstance(parent, pyobjects.AbstractModule):
location.append(parent.get_name())
location.append('.')
parent = parent.parent
if add_module:
if isinstance(pyobject, pyobjects.PyFunction):
module = pyobject.get_module()
location.insert(0, self._get_module(pyobject))
if isinstance(parent, builtins.BuiltinModule):
location.insert(0, parent.get_name() + '.')
return ''.join(location)
def _get_module(self, pyfunction):
module = pyfunction.get_module()
if module is not None:
resource = module.get_resource()
if resource is not None:
return pyfunction.pycore.modname(resource) + '.'
return ''
def _trim_docstring(self, docstring, indents=0):
"""The sample code from :PEP:`257`"""
if not docstring:
return ''
# Convert tabs to spaces (following normal Python rules)
# and split into a list of lines:
lines = docstring.expandtabs().splitlines()
# Determine minimum indentation (first line doesn't count):
indent = sys.maxsize
for line in lines[1:]:
stripped = line.lstrip()
if stripped:
indent = min(indent, len(line) - len(stripped))
# Remove indentation (first line is special):
trimmed = [lines[0].strip()]
if indent < sys.maxsize:
for line in lines[1:]:
trimmed.append(line[indent:].rstrip())
# Strip off trailing and leading blank lines:
while trimmed and not trimmed[-1]:
trimmed.pop()
while trimmed and not trimmed[0]:
trimmed.pop(0)
# Return a single string:
return '\n'.join((' ' * indents + line for line in trimmed))
# Deprecated classes
class TemplateProposal(CodeAssistProposal):
def __init__(self, name, template):
warnings.warn('TemplateProposal is deprecated.',
DeprecationWarning, stacklevel=2)
super(TemplateProposal, self).__init__(name, 'template')
self.template = template
class Template(object):
def __init__(self, template):
self.template = template
warnings.warn('Template is deprecated.',
DeprecationWarning, stacklevel=2)
def variables(self):
return []
def substitute(self, mapping):
return self.template
def get_cursor_location(self, mapping):
return len(self.template)
|
JetChars/vim
|
vim/bundle/python-mode/pymode/libs3/rope/contrib/codeassist.py
|
Python
|
apache-2.0
| 25,419 | 0.000669 |
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import shutil
import tempfile
from pants.base.build_invalidator import CacheKey, CacheKeyGenerator
from pants.base.cache_manager import InvalidationCacheManager, InvalidationCheck, VersionedTarget
from pants_test.base_test import BaseTest
class AppendingCacheKeyGenerator(CacheKeyGenerator):
"""Generates cache keys for versions of target sets."""
@staticmethod
def combine_cache_keys(cache_keys):
if len(cache_keys) == 1:
return cache_keys[0]
else:
sorted_cache_keys = sorted(cache_keys) # For commutativity.
combined_id = ','.join([cache_key.id for cache_key in sorted_cache_keys])
combined_hash = ','.join([cache_key.hash for cache_key in sorted_cache_keys])
combined_num_sources = reduce(lambda x, y: x + y,
[cache_key.num_sources for cache_key in sorted_cache_keys], 0)
return CacheKey(combined_id, combined_hash, combined_num_sources)
def key_for_target(self, target, sources=None, transitive=False, fingerprint_strategy=None):
return CacheKey(target.id, target.id, target.num_chunking_units)
def key_for(self, tid, sources):
return CacheKey(tid, tid, len(sources))
def print_vt(vt):
print('%d (%s) %s: [ %s ]' % (len(vt.targets), vt.cache_key, vt.valid, ', '.join(['%s(%s)' % (v.id, v.cache_key) for v in vt.versioned_targets])))
class InvalidationCacheManagerTest(BaseTest):
class TestInvalidationCacheManager(InvalidationCacheManager):
def __init__(self, tmpdir):
InvalidationCacheManager.__init__(self, AppendingCacheKeyGenerator(), tmpdir, True, None)
def setUp(self):
super(InvalidationCacheManagerTest, self).setUp()
self._dir = tempfile.mkdtemp()
self.cache_manager = InvalidationCacheManagerTest.TestInvalidationCacheManager(self._dir)
def tearDown(self):
shutil.rmtree(self._dir, ignore_errors=True)
super(InvalidationCacheManagerTest, self).tearDown()
def make_vts(self, target):
return VersionedTarget(self.cache_manager, target, target.id)
def test_partition(self):
# The default EmptyPayload chunking unit happens to be 1, so each of these Targets
# has a chunking unit contribution of 1
a = self.make_target(':a', dependencies=[])
b = self.make_target(':b', dependencies=[a])
c = self.make_target(':c', dependencies=[b])
d = self.make_target(':d', dependencies=[c, a])
e = self.make_target(':e', dependencies=[d])
targets = [a, b, c, d, e]
def print_partitions(partitions):
strs = []
for partition in partitions:
strs.append('(%s)' % ', '.join([t.id for t in partition.targets]))
print('[%s]' % ' '.join(strs))
# Verify basic data structure soundness.
all_vts = self.cache_manager._wrap_targets(targets)
invalid_vts = filter(lambda vt: not vt.valid, all_vts)
self.assertEquals(5, len(invalid_vts))
self.assertEquals(5, len(all_vts))
vts_targets = [vt.targets[0] for vt in all_vts]
self.assertEquals(set(targets), set(vts_targets))
# Test a simple partition.
ic = InvalidationCheck(all_vts, [], 3)
partitioned = ic.all_vts_partitioned
print_partitions(partitioned)
# Several correct partitionings are possible, but in all cases 4 1-source targets will be
# added to the first partition before it exceeds the limit of 3, and the final target will
# be in a partition by itself.
self.assertEquals(2, len(partitioned))
self.assertEquals(4, len(partitioned[0].targets))
self.assertEquals(1, len(partitioned[1].targets))
# Test partition with colors.
red = 'red'
blue = 'blue'
colors = {
a: blue,
b: red,
c: red,
d: red,
e: blue
}
# As a reference, we partition without colors.
ic = InvalidationCheck(all_vts, [], 2)
partitioned = ic.all_vts_partitioned
print_partitions(partitioned)
self.assertEquals(2, len(partitioned))
self.assertEquals(3, len(partitioned[0].targets))
self.assertEquals(2, len(partitioned[1].targets))
# Now apply color restrictions.
ic = InvalidationCheck(all_vts, [], 2, target_colors=colors)
partitioned = ic.all_vts_partitioned
print_partitions(partitioned)
self.assertEquals(3, len(partitioned))
self.assertEquals(1, len(partitioned[0].targets))
self.assertEquals(3, len(partitioned[1].targets))
self.assertEquals(1, len(partitioned[2].targets))
|
pgroudas/pants
|
tests/python/pants_test/tasks/test_cache_manager.py
|
Python
|
apache-2.0
| 4,697 | 0.006813 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.