text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
# coding=utf-8
from safe.common.exceptions import NoAttributeInLayerError
from safe.impact_functions.bases.utilities import check_attribute_exist
__author__ = 'Rizky Maulana Nugraha "lucernae" <lana.pcfre@gmail.com>'
__date__ = '08/05/15'
class ClassifiedVectorExposureMixin(object):
def __init__(self):
self._exposure_class_attribute = None
self._exposure_unique_values = None
@property
def exposure_class_attribute(self):
return self._exposure_class_attribute
@exposure_class_attribute.setter
def exposure_class_attribute(self, value):
# self.exposure is from base IF.
exposure_layer = self.exposure.qgis_vector_layer()
if (exposure_layer and
check_attribute_exist(exposure_layer, value)):
self._exposure_class_attribute = value
else:
message = ('The attribute "%s" does not exist in the exposure '
'layer.') % value
raise NoAttributeInLayerError(message)
# finding unique values in layer
if exposure_layer:
attr_index = exposure_layer.dataProvider().\
fieldNameIndex(value)
unique_list = list()
for feature in exposure_layer.getFeatures():
feature_value = feature.attributes()[attr_index]
if feature_value not in unique_list:
unique_list.append(feature_value)
self.exposure_unique_values = unique_list
@property
def exposure_unique_values(self):
return self._exposure_unique_values
@exposure_unique_values.setter
def exposure_unique_values(self, value):
self._exposure_unique_values = value
|
cchristelis/inasafe
|
safe/impact_functions/bases/layer_types/classified_vector_exposure.py
|
Python
|
gpl-3.0
| 1,719 | 0 |
# Copyright (C) 2010 Trinity Western University
from cube.books.models import Book
from cube.twupass.settings import TWUPASS_LOGOUT_URL
from django.contrib.auth.models import User
from django.contrib import admin
from django.conf.urls.defaults import *
from django.views.generic.simple import direct_to_template, redirect_to
admin.autodiscover()
urlpatterns = patterns('',
url(r'^twupass-logout/$', redirect_to, {'url': TWUPASS_LOGOUT_URL},
name="twupass-logout"),
url(r'^help/$', direct_to_template, {'template' : 'help.html'},
name="help"),
(r'^admin/doc/', include('django.contrib.admindocs.urls')),
(r'^admin/(.*)', admin.site.root),
)
urlpatterns += patterns('cube.twupass.views',
(r'^$', 'login_cube'),
(r'^logout/$', 'logout_cube')
)
urlpatterns += patterns('cube.books.views.books',
url(r'^books/$', 'book_list', name="list"),
url(r'^books/update/book/$', 'update_book', name="update_book"),
url(r'^books/update/book/edit/$', 'update_book_edit',
name="update_book_edit"),
url(r'books/update/remove_holds_by_user/$', 'remove_holds_by_user',
name="remove_holds_by_user"),
url(r'^add_book/$', 'add_book', name="add_book"),
url(r'^add_new_book/$', 'add_new_book', name="add_new_book"),
url(r'^attach_book/$', 'attach_book', name="attach_book"),
url(r'^my_books/$', 'my_books', name="my_books"),
)
urlpatterns += patterns('cube.books.views.reports',
url(r'^reports/$', 'menu', name="reports_menu"),
url(r'^reports/per_status/$', 'per_status', name='per_status'),
url(r'^reports/books_sold_within_date/$', 'books_sold_within_date',
name='books_sold_within_date'),
url(r'^reports/user/(\d+)/$', 'user', name='user'),
url(r'^reports/book/(\d+)/$', 'book', name='book'),
url(r'^reports/metabook/(\d+)/$', 'metabook', name='metabook'),
url(r'^reports/holds_by_user/$', 'holds_by_user', name='holds_by_user'),
)
urlpatterns += patterns('cube.books.views.metabooks',
url(r'^metabooks/$','metabook_list', name="list_metabooks"),
url(r'metabooks/update/$', 'update', name="update_metabooks"),
)
urlpatterns += patterns('cube.books.views.staff',
url(r'^staff/$','staff_list', name="staff"),
url(r'^staff_edit/$','staff_edit', name="staff_edit"),
url(r'^update_staff/$','update_staff', name="update_staff"),
)
urlpatterns += patterns('cube.books.views.admin',
url(r'^books/admin/dumpdata/$', 'dumpdata', name='dumpdata'),
url(r'^books/admin/bad_unholds/$', 'bad_unholds', name='bad_unholds'),
)
urlpatterns += patterns('cube.users.views',
url(r'^profile/$', 'profile', name='profile'),
url(r'^profile/edit/$', 'edit_profile', name='edit_profile')
)
urlpatterns += patterns('cube.appsettings.views',
url(r'^appsettings/$', 'setting_list', name='appsettings'),
url(r'^appsettings/(\d+)/$', 'edit_setting', name='edit_setting'),
url(r'^appsettings/save/$', 'save_setting', name="save_setting"),
)
|
kd7iwp/cube-bookstore
|
cube/urls.py
|
Python
|
gpl-3.0
| 2,981 | 0.006038 |
'''
https://leetcode.com/problems/path-sum/#/description
Given a binary tree and a sum, determine if the tree has a root-to-leaf path such that adding up all the values along the path equals the given sum.
For example:
Given the below binary tree and sum = 22,
5
/ \
4 8
/ / \
11 13 4
/ \ \
7 2 1
return true, as there exist a root-to-leaf path 5->4->11->2 which sum is 22.
'''
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def hasPathSum(self, root, sum):
"""
:type root: TreeNode
:type sum: int
:rtype: bool
"""
answer = False
total = 0
if root is None:
return answer
return self.sumAndCheck(root,total,sum,answer)
def sumAndCheck(self,node,total,sum,answer):
'''
@thought process:
- Use a depth first search,
- set a base condition, if answer is true, just return and exit happily.
- else, at each valid node, check if the node is a leaf and if the val plus total
equal the sum, if true, you have found your result,just return.
- if not, check on the left node, then check then check right and return answer.
- Your base condition takes care of unnecessary traversal.
- If you reach the leaf without finding the sum, return False.
'''
if answer:
return True
elif node:
if self.isLeaf(node) and (node.val + total) == sum:
return True
else:
answer = self.sumAndCheck(node.left,node.val + total,sum, answer)
answer = self.sumAndCheck(node.right,node.val + total,sum,answer)
return answer
else:
return False
def isLeaf(self,node):
'''
@params takes a valid node
@return bool
'''
return not node.left and not node.right
|
jcchuks/MiscCodes
|
CheckPathSum.py
|
Python
|
mit
| 2,184 | 0.012821 |
import csv
import math
import numpy as np
from PIL import Image
width = 854
height = 480
fov_multiplier = 1.73 # For 60 degrees, set to 1.73. For 90 degrees, set to 1.
minwh2 = 0.5 * min(width, height)
class Star:
def __init__(self, ra, dec, parallax, g_flux, bp_flux, rp_flux):
self.ra = ra
self.dec = dec
self.parallax = parallax
self.g_flux = g_flux
self.bp_flux = bp_flux
self.rp_flux = rp_flux
distance_parsecs = 1000 / parallax
distance_ly = distance_parsecs * 3.26156
ra_rad = ra * math.pi / 180
dec_rad = (dec + 90) * math.pi / 180
self.x = distance_ly * math.sin(dec_rad) * math.cos(ra_rad)
self.y = distance_ly * math.sin(dec_rad) * math.sin(ra_rad)
self.z = distance_ly * math.cos(dec_rad)
self.absolute_luminosity = g_flux * distance_ly**2
def ParseFloat(s):
try:
return float(s)
except:
return 0
stars = []
with open('lmc-stars.csv', 'rb') as input_file:
reader = csv.DictReader(input_file)
for row in reader:
stars.append(Star(
ParseFloat(row['ra']),
ParseFloat(row['dec']),
ParseFloat(row['parallax']),
ParseFloat(row['phot_g_mean_flux']),
ParseFloat(row['phot_bp_mean_flux']),
ParseFloat(row['phot_rp_mean_flux'])
))
def ProjectPointOntoVector(p, v):
return np.dot(p, v) / dot(v, v)
def IntegrateFromPointOfView(position, direction, up):
g_flux = np.zeros((width, height))
red_flux = np.zeros((width, height))
blue_flux = np.zeros((width, height))
right = -np.cross(direction, up)
for s in stars:
transformed = [s.x - position[0], s.y - position[1], s.z - position[2]]
x = np.dot(transformed, right)
y = np.dot(transformed, up)
z = np.dot(transformed, direction)
if z < 1:
continue
sx = int(width / 2 + fov_multiplier * minwh2 * x / z)
sy = int(height / 2 - fov_multiplier * minwh2 * y / z)
if sx < 0 or sx >= width or sy < 0 or sy >= height:
continue
d2 = x**2 + y**2 + z**2
apparent_luminosity = s.absolute_luminosity / d2
g_flux[sx,sy] += apparent_luminosity
redness = 0.5
if s.rp_flux + s.bp_flux > 0:
redness = s.rp_flux / (s.rp_flux + s.bp_flux)
red_flux[sx,sy] += apparent_luminosity * redness
blue_flux[sx,sy] += apparent_luminosity * (1 - redness)
return g_flux, red_flux, blue_flux
# Mix the two colors in the proportion specified by the ratio.
def MixColors(color1, color2, ratio):
r = ratio * color2[0] + (1 - ratio) * color1[0]
g = ratio * color2[1] + (1 - ratio) * color1[1]
b = ratio * color2[2] + (1 - ratio) * color1[2]
return r, g, b
# Converts a color's components to integer values.
def IntColor(c):
return (int(c[0]), int(c[1]), int(c[2]))
# What fraction of the way between lo and hi is the value? If outside the
# range of (lo,hi), it's capped to 0 and 1 respectively.
def CappedRange(lo, hi, value):
if value < lo:
return float(0)
elif value > hi:
return float(1)
else:
return float(value - lo) / (hi - lo)
# redness is a number between 0 and 1. It's the ratio of red to blue light.
def RednessRatioToColor(redness):
red = (255, 0, 0)
blue = (0, 0, 255)
return MixColors(red, blue, CappedRange(0.3, 0.9, redness))
# g_normalized: a number between 0 and 1 representing the percentile
# brightness of a pixel.
# red_flux: how much total red flux in a pixel. No need to normalize.
# blue_flux: how much total blue flux in a pixel. No need to normalize.
def FluxToColor(g_normalized, red_flux, blue_flux):
redness = 0.6
if red_flux + blue_flux > 0:
redness = red_flux / (red_flux + blue_flux)
base_color = RednessRatioToColor(redness)
black = (0, 0, 0)
white = (255, 255, 255)
if g_normalized < 0.5:
return MixColors(black, base_color, CappedRange(0, 0.5, g_normalized))
else:
return MixColors(base_color, white, CappedRange(0.5, 1, g_normalized))
# Normalizes a raw flux value into the range [0,1].
def FluxPercentile(flux, sorted_sample):
lo = 0
hi = len(sorted_sample)
while hi - lo > 1:
mid = int((lo + hi) / 2)
if flux >= sorted_sample[mid]:
lo = mid
else:
hi = mid
return 1.0 * lo / len(sorted_sample)
frame_number = 1
def RenderImageFromFlux(g_flux, red_flux, blue_flux):
global frame_number
sorted_flux = []
for i in range(width):
for j in range(height):
flux = g_flux[i,j]
if flux > 0.000000001:
sorted_flux.append(flux)
sorted_flux.sort()
image = Image.new('RGB', (width, height))
for i in range(width):
for j in range(height):
p = FluxPercentile(g_flux[i,j], sorted_flux)
color = FluxToColor(p, red_flux[i,j], blue_flux[i,j])
image.putpixel((i, j), IntColor(color))
image.save('frames/lmc%05d.png' % frame_number)
frame_number += 1
def RenderFrameFromPointOfView(position, direction, up):
g_flux, red_flux, blue_flux = IntegrateFromPointOfView(position, direction, up)
RenderImageFromFlux(g_flux, red_flux, blue_flux)
num_frames = 10 * 30
up = np.array([0, 1, 0])
lmc = np.array([8950, 59000, 152880])
orbit_radius = 100 * 1000
for i in range(num_frames):
print 'Frame', (i + 1), 'of', num_frames
angle = 2 * math.pi * i / num_frames
direction = np.array([math.sin(angle), 0, -math.cos(angle)])
position = lmc - orbit_radius * direction
RenderFrameFromPointOfView(position, direction, up)
|
j3camero/galaxyatlas
|
data-release-2/render-lmc-frames.py
|
Python
|
mit
| 5,728 | 0.00419 |
# -*- coding: utf-8 -*-
# Some utils
import hashlib
import uuid
def get_hash(data):
"""Returns hashed string"""
return hashlib.sha256(data).hexdigest()
def get_token():
return str(uuid.uuid4())
|
aluminiumgeek/organic
|
utils.py
|
Python
|
lgpl-3.0
| 212 | 0 |
import string
from django.utils.text import slugify
from django.utils.timezone import now
from lxml import html
from lxml.html import tostring
from lxml.html.clean import Cleaner
from cl.lib.string_utils import anonymize, trunc
from cl.search.models import OpinionCluster
from juriscraper.lib.string_utils import clean_string, harmonize, titlecase
import re
import subprocess
BROWSER = 'firefox'
def merge_cases_simple(new, target_id):
"""Add `new` to the database, merging with target_id
Merging is done by picking the best fields from each item.
"""
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# !! THIS CODE IS OUT OF DATE AND UNMAINTAINED. FEEL FREE TO FIX IT, BUT !!
# !! DO NOT TRUST IT IN ITS CURRENT STATE. !!
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
target = OpinionCluster.objects.get(pk=target_id)
print "Merging %s with" % new.case_name
print " %s" % target.case_name
cached_source = target.source # Original value is needed below.
if target.source == 'C':
target.source = 'LC'
elif target.source == 'R':
target.source = 'LR'
elif target.source == 'CR':
target.source = 'LCR'
# Add the URL if it's not a court one, replacing public.resource.org's
# info in some cases.
if cached_source == 'R':
target.download_url = new.download_url
# Recreate the slug from the new case name (this changes the URL, but the
# old will continue working)
target.slug = slugify(trunc(new.case_name, 75))
# Take the case name from the new item; they tend to be pretty good
target.case_name = new.case_name
# Add the docket number if the old doesn't exist, but keep the old if one
# does.
if not target.docket.docket_number:
target.docket.docket_number = new.docket.docket_number
# Get the citations from the new item (ditch the old).
target.federal_cite_one = new.federal_cite_one
target.federal_cite_two = new.federal_cite_two
target.federal_cite_three = new.federal_cite_three
target.state_cite_one = new.state_cite_one
target.state_cite_two = new.state_cite_two
target.state_cite_three = new.state_cite_three
target.state_cite_regional = new.state_cite_regional
target.specialty_cite_one = new.specialty_cite_one
target.scotus_early_cite = new.scotus_early_cite
target.lexis_cite = new.lexis_cite
target.westlaw_cite = new.westlaw_cite
target.neutral_cite = new.neutral_cite
# Add judge information if lacking. New is dirty, but better than none.
if not target.judges:
target.judges = new.judges
# Add the text.
target.html_lawbox, blocked = anonymize(new.html)
if blocked:
target.blocked = True
target.date_blocked = now()
target.extracted_by_ocr = False # No longer true for any LB case.
# save_doc_and_cite(target, index=False)
def merge_cases_complex(case, target_ids):
"""Merge data from PRO with multiple cases that seem to be a match.
The process here is a conservative one. We take *only* the information
from PRO that is not already in CL in any form, and add only that.
"""
# THIS CODE ONLY UPDATED IN THE MOST CURSORY FASHION. DO NOT TRUST IT.
for target_id in target_ids:
simulate = False
oc = OpinionCluster.objects.get(pk=target_id)
print "Merging %s with" % case.case_name
print " %s" % oc.case_name
oc.source = 'CR'
oc.west_cite = case.west_cite
if not simulate:
oc.save()
def find_same_docket_numbers(doc, candidates):
"""Identify the candidates that have the same docket numbers as doc after
each has been cleaned.
"""
new_docket_number = re.sub('(\D|0)', '', doc.docket.docket_number)
same_docket_numbers = []
for candidate in candidates:
old_docket_number = re.sub('(\D|0)', '', candidate.get('docketNumber', ''))
if all([len(new_docket_number) > 3, len(old_docket_number) > 3]):
if old_docket_number in new_docket_number:
same_docket_numbers.append(candidate)
return same_docket_numbers
def case_name_in_candidate(case_name_new, case_name_candidate):
"""When there is one candidate match, this compares their case names to see
if one is contained in the other, in the right order.
Returns True if so, else False.
"""
regex = re.compile('[%s]' % re.escape(string.punctuation))
case_name_new_words = regex.sub('', case_name_new.lower()).split()
case_name_candidate_words = regex.sub('', case_name_candidate.lower()).split()
index = 0
for word in case_name_new_words:
if len(word) <= 2:
continue
try:
index = case_name_candidate_words[index:].index(word)
except ValueError:
# The items were out of order or the item wasn't in the candidate.
return False
return True
def filter_by_stats(candidates, stats):
"""Looks at the candidates and their stats, and filters out obviously
different candidates.
"""
filtered_candidates = []
filtered_stats = {
'candidate_count': 0,
'case_name_similarities': [],
'length_diffs': [],
'gestalt_diffs': [],
'cos_sims': [],
}
for i in range(0, len(candidates)):
# Commented out because the casenames in public.resource.org can be so
# long this varies too much.
# if stats['case_name_similarities'][i] < 0.125:
# # The case name is wildly different
# continue
if stats['length_diffs'][i] > 400:
# The documents have wildly different lengths
continue
# Commented out because the headnotes sometimes included in Resource.org made this calculation vary too much.
#elif stats['gestalt_diffs'][i] < 0.4:
# # The contents are wildly different
# continue
elif stats['cos_sims'][i] < 0.90:
# Very different cosine similarities
continue
else:
# It's a reasonably close match.
filtered_candidates.append(candidates[i])
filtered_stats['case_name_similarities'].append(stats['case_name_similarities'][i])
filtered_stats['length_diffs'].append(stats['length_diffs'][i])
filtered_stats['gestalt_diffs'].append(stats['gestalt_diffs'][i])
filtered_stats['cos_sims'].append(stats['cos_sims'][i])
filtered_stats['candidate_count'] = len(filtered_candidates)
return filtered_candidates, filtered_stats
class Case(object):
def _get_case_name_and_status(self):
case_name = self.url_element.get('title').lower()
ca1regex = re.compile('(unpublished disposition )?notice: first circuit local rule 36.2\(b\)6 states unpublished opinions may be cited only in related cases.?')
ca2regex = re.compile('(unpublished disposition )?notice: second circuit local rule 0.23 states unreported opinions shall not be cited or otherwise used in unrelated cases.?')
ca2regex2 = re.compile('(unpublished disposition )?notice: this summary order may not be cited as precedential authority, but may be called to the attention of the court in a subsequent stage of this case, in a related case, or in any case for purposes of collateral estoppel or res judicata. see second circuit rule 0.23.?')
ca3regex = re.compile('(unpublished disposition )?notice: third circuit rule 21\(i\) states citations to federal decisions which have not been formally reported should identify the court, docket number and date.?')
ca4regex = re.compile('(unpublished disposition )?notice: fourth circuit (local rule 36\(c\)|i.o.p. 36.6) states that citation of unpublished dispositions is disfavored except for establishing res judicata, estoppel, or the law of the case and requires service of copies of cited unpublished dispositions of the fourth circuit.?')
ca5regex = re.compile('(unpublished disposition )?notice: fifth circuit local rule 47.5.3 states that unpublished opinions should normally be cited only when they establish the law of the case, are relied upon as a basis for res judicata or collateral estoppel, or involve related facts. if an unpublished opinion is cited, a copy shall be attached to each copy of the brief.?')
ca6regex = re.compile('(unpublished disposition )?notice: sixth circuit rule 24\(c\) states that citation of unpublished dispositions is disfavored except for establishing res judicata, estoppel, or the law of the case and requires service of copies of cited unpublished dispositions of the sixth circuit.?')
ca7regex = re.compile('(unpublished disposition )?notice: seventh circuit rule 53\(b\)\(2\) states unpublished orders shall not be cited or used as precedent except to support a claim of res judicata, collateral estoppel or law of the case in any federal court within the circuit.?')
ca8regex = re.compile('(unpublished disposition )?notice: eighth circuit rule 28a\(k\) governs citation of unpublished opinions and provides that (no party may cite an opinion not intended for publication unless the cases are related by identity between the parties or the causes of action|they are not precedent and generally should not be cited unless relevant to establishing the doctrines of res judicata, collateral estoppel, the law of the case, or if the opinion has persuasive value on a material issue and no published opinion would serve as well).?')
ca9regex = re.compile('(unpublished disposition )?notice: ninth circuit rule 36-3 provides that dispositions other than opinions or orders designated for publication are not precedential and should not be cited except when relevant under the doctrines of law of the case, res judicata, or collateral estoppel.?')
ca10regex = re.compile('(unpublished disposition )?notice: tenth circuit rule 36.3 states that unpublished opinions and orders and judgments have no precedential value and shall not be cited except for purposes of establishing the doctrines of the law of the case, res judicata, or collateral estoppel.?')
cadcregex = re.compile('(unpublished disposition )?notice: d.c. circuit local rule 11\(c\) states that unpublished orders, judgments, and explanatory memoranda may not be cited as precedents, but counsel may refer to unpublished dispositions when the binding or preclusive effect of the disposition, rather than its quality as precedent, is relevant.?')
cafcregex = re.compile('(unpublished disposition )?notice: federal circuit local rule 47.(6|8)\(b\) states that opinions and orders which are designated as not citable as precedent shall not be employed or cited as precedent. this does not preclude assertion of issues of claim preclusion, issue preclusion, judicial estoppel, law of the case or the like based on a decision of the court rendered in a nonprecedential opinion or order.?')
# Clean off special cases
if 'first circuit' in case_name:
case_name = re.sub(ca1regex, '', case_name)
status = 'Unpublished'
elif 'second circuit' in case_name:
case_name = re.sub(ca2regex, '', case_name)
case_name = re.sub(ca2regex2, '', case_name)
status = 'Unpublished'
elif 'third circuit' in case_name:
case_name = re.sub(ca3regex, '', case_name)
status = 'Unpublished'
elif 'fourth circuit' in case_name:
case_name = re.sub(ca4regex, '', case_name)
status = 'Unpublished'
elif 'fifth circuit' in case_name:
case_name = re.sub(ca5regex, '', case_name)
status = 'Unpublished'
elif 'sixth circuit' in case_name:
case_name = re.sub(ca6regex, '', case_name)
status = 'Unpublished'
elif 'seventh circuit' in case_name:
case_name = re.sub(ca7regex, '', case_name)
status = 'Unpublished'
elif 'eighth circuit' in case_name:
case_name = re.sub(ca8regex, '', case_name)
status = 'Unpublished'
elif 'ninth circuit' in case_name:
case_name = re.sub(ca9regex, '', case_name)
status = 'Unpublished'
elif 'tenth circuit' in case_name:
case_name = re.sub(ca10regex, '', case_name)
status = 'Unpublished'
elif 'd.c. circuit' in case_name:
case_name = re.sub(cadcregex, '', case_name)
status = 'Unpublished'
elif 'federal circuit' in case_name:
case_name = re.sub(cafcregex, '', case_name)
status = 'Unpublished'
else:
status = 'Published'
case_name = titlecase(harmonize(clean_string(case_name)))
if case_name == '' or case_name == 'unpublished disposition':
# No luck getting the case name
saved_case_name = self._check_fix_list(self.sha1_hash, self.case_name_dict)
if saved_case_name:
case_name = saved_case_name
else:
print self.url
if BROWSER:
subprocess.Popen([BROWSER, self.url], shell=False).communicate()
case_name = raw_input("Short case name: ")
self.case_name_fix_file.write("%s|%s\n" % (self.sha1_hash, case_name))
return case_name, status
def get_html_from_raw_text(raw_text):
"""Using the raw_text, creates four useful variables:
1. complete_html_tree: A tree of the complete HTML from the file, including <head> tags and whatever else.
2. clean_html_tree: A tree of the HTML after stripping bad stuff.
3. clean_html_str: A str of the HTML after stripping bad stuff.
4. body_text: A str of the text of the body of the document.
We require all of these because sometimes we need the complete HTML tree, other times we don't. We create them all
up front for performance reasons.
"""
complete_html_tree = html.fromstring(raw_text)
cleaner = Cleaner(style=True,
remove_tags=('a', 'body', 'font', 'noscript',),
kill_tags=('title',),)
clean_html_str = cleaner.clean_html(raw_text)
clean_html_tree = html.fromstring(clean_html_str)
body_text = tostring(clean_html_tree, method='text', encoding='unicode')
return clean_html_tree, complete_html_tree, clean_html_str, body_text
|
voutilad/courtlistener
|
cl/corpus_importer/dup_helpers.py
|
Python
|
agpl-3.0
| 14,568 | 0.003089 |
from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded
from urllib.parse import urlparse
from httpobs.conf import (RETRIEVER_CONNECT_TIMEOUT,
RETRIEVER_CORS_ORIGIN,
RETRIEVER_READ_TIMEOUT,
RETRIEVER_USER_AGENT)
from httpobs.scanner.utils import parse_http_equiv_headers
import logging
import requests
# Disable the requests InsecureRequestWarning -- we will track certificate errors manually when
# verification is disabled. Also disable requests errors at levels lower than CRITICAL, see:
# https://github.com/celery/celery/issues/3633 for crashy details
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
logging.getLogger('requests').setLevel(logging.CRITICAL)
# MIME types for HTML requests
HTML_TYPES = ('text/html', 'application/xhtml+xml')
# Maximum timeout for requests for all GET requests for anything but the TLS Observatory
# The default ConnectionTimeout is something like 75 seconds, which means that things like
# tiles can take ~600s to timeout, since they have 8 DNS entries. Setting it to lower
# should hopefully keep requests from taking forever
TIMEOUT = (RETRIEVER_CONNECT_TIMEOUT, RETRIEVER_READ_TIMEOUT)
# Create a session, returning the session and the HTTP response in a dictionary
# Don't create the sessions if it can't connect and retrieve the root of the website
# TODO: Allow people to scan a subdirectory instead of using '/' as the default path?
def __create_session(url: str, **kwargs) -> dict:
s = requests.Session()
# Allow certificate verification to be disabled on the initial request, which means that sites won't get
# penalized on things like HSTS, even for self-signed certificates
s.verify = kwargs['verify']
# Add the headers to the session
if kwargs['headers']:
s.headers.update(kwargs['headers'])
# Set all the cookies and force them to be sent only over HTTPS; this might change in the future
if kwargs['cookies']:
s.cookies.update(kwargs['cookies'])
for cookie in s.cookies:
cookie.secure = True
# Override the User-Agent; some sites (like twitter) don't send the CSP header unless you have a modern
# user agent
s.headers.update({
'User-Agent': RETRIEVER_USER_AGENT,
})
try:
r = s.get(url, timeout=TIMEOUT)
# No tls errors
r.verified = True
# Let celery exceptions percolate upward
except (SoftTimeLimitExceeded, TimeLimitExceeded):
raise
# We can try again if there's an SSL error, making sure to note it in the session
except requests.exceptions.SSLError:
try:
r = s.get(url, timeout=TIMEOUT, verify=False)
r.verified = False
except (KeyboardInterrupt, SystemExit):
raise
except:
r = None
s = None
except (KeyboardInterrupt, SystemExit):
raise
except:
r = None
s = None
# Store the domain name and scheme in the session
if r is not None and s is not None:
s.url = urlparse(r.url)
return {'session': s, 'response': r}
def __get(session, relative_path='/', headers=None, cookies=None):
if not headers:
headers = {}
if not cookies:
cookies = {}
try:
# TODO: limit the maximum size of the response, to keep malicious site operators from killing us
# TODO: Perhaps we can naively do it for now by simply setting a timeout?
# TODO: catch TLS errors instead of just setting it to None?
return session.get(session.url.scheme + '://' + session.url.netloc + relative_path,
headers=headers,
cookies=cookies,
timeout=TIMEOUT)
# Let celery exceptions percolate upward
except (SoftTimeLimitExceeded, TimeLimitExceeded):
raise
except (KeyboardInterrupt, SystemExit):
raise
except:
return None
def __get_page_text(response: requests.Response, force: bool = False) -> str:
if response is None:
return None
elif response.status_code == 200 or force: # Some pages we want to get the page text even with non-200s
# A quick and dirty check to make sure that somebody's 404 page didn't actually return 200 with html
ext = (response.history[0].url if response.history else response.url).split('.')[-1]
if response.headers.get('Content-Type', '') in HTML_TYPES and ext in ('json', 'txt', 'xml'):
return None
return response.text
else:
return None
def retrieve_all(hostname, **kwargs):
kwargs['cookies'] = kwargs.get('cookies', {}) # HTTP cookies to send, instead of from the database
kwargs['headers'] = kwargs.get('headers', {}) # HTTP headers to send, instead of from the database
# This way of doing it keeps the urls tidy even if makes the code ugly
kwargs['http_port'] = ':' + str(kwargs.get('http_port', '')) if 'http_port' in kwargs else ''
kwargs['https_port'] = ':' + str(kwargs.get('https_port', '')) if 'https_port' in kwargs else ''
kwargs['path'] = kwargs.get('path', '/')
kwargs['verify'] = kwargs.get('verify', True)
retrievals = {
'hostname': hostname,
'resources': {
},
'responses': {
'auto': None, # whichever of 'http' or 'https' actually works, with 'https' as higher priority
'cors': None, # CORS preflight test
'http': None,
'https': None,
},
'session': None,
}
# The list of resources to get
resources = (
'/clientaccesspolicy.xml',
'/contribute.json',
'/crossdomain.xml',
'/robots.txt'
)
# Create some reusable sessions, one for HTTP and one for HTTPS
http_session = __create_session('http://' + hostname + kwargs['http_port'] + kwargs['path'], **kwargs)
https_session = __create_session('https://' + hostname + kwargs['https_port'] + kwargs['path'], **kwargs)
# If neither one works, then the site just can't be loaded
if http_session['session'] is None and https_session['session'] is None:
return retrievals
else:
# Store the HTTP only and HTTPS only responses (some things can only be retrieved over one or the other)
retrievals['responses']['http'] = http_session['response']
retrievals['responses']['https'] = https_session['response']
if https_session['session'] is not None:
retrievals['responses']['auto'] = https_session['response']
retrievals['session'] = https_session['session']
else:
retrievals['responses']['auto'] = http_session['response']
retrievals['session'] = http_session['session']
# Store the contents of the "base" page
retrievals['resources']['__path__'] = __get_page_text(retrievals['responses']['auto'], force=True)
# Do a CORS preflight request
retrievals['responses']['cors'] = __get(retrievals['session'],
kwargs['path'],
headers={'Origin': RETRIEVER_CORS_ORIGIN})
# Store all the files we retrieve
for resource in resources:
resp = __get(retrievals['session'], resource)
retrievals['resources'][resource] = __get_page_text(resp)
# Parse out the HTTP meta-equiv headers
if (retrievals['responses']['auto'].headers.get('Content-Type', '').split(';')[0] in HTML_TYPES and
retrievals['resources']['__path__']):
retrievals['responses']['auto'].http_equiv = parse_http_equiv_headers(retrievals['resources']['__path__'])
else:
retrievals['responses']['auto'].http_equiv = {}
return retrievals
|
april/http-observatory
|
httpobs/scanner/retriever/retriever.py
|
Python
|
mpl-2.0
| 7,939 | 0.004157 |
import unittest
import datetime
import httpretty as HP
import json
from urllib.parse import parse_qsl
from malaysiaflights.aa import AirAsia as AA
class AARequestTests(unittest.TestCase):
def url_helper(self, from_, to, date):
host = 'https://argon.airasia.com'
path = '/api/7.0/search'
body = {'origin': from_,
'destination': to,
'depart': date,
'passenger-count': '1',
'infant-count': '0',
'currency': 'MYR'}
return host, path, body
@HP.activate
def test_search_calls_api_using_correct_path_and_body(self):
host, path, body = self.url_helper('KUL', 'TGG', '18-06-2015')
HP.register_uri(HP.POST, host+path, status=200)
d = datetime.datetime(2015, 6, 18)
AA.search('KUL', 'TGG', d)
mocked_request = HP.last_request()
actual_body = dict(parse_qsl(mocked_request.body.decode()))
self.assertEqual(path, mocked_request.path)
self.assertEqual(body, actual_body)
class ResponseExtractionTests(unittest.TestCase):
def fixture_loader(self, path):
prefix = 'malaysiaflights/fixtures/'
with open(prefix + path, 'r') as file_:
return json.loads(file_.read())
def setUp(self):
self.single = self.fixture_loader('aa-single.json')
self.zero = self.fixture_loader('aa-no-flights.json')
def test_get_number_of_results_for_valid_response(self):
json = self.single
actual = AA.get_number_of_results(json)
self.assertEqual(4, actual)
def test_get_number_of_results_for_no_flights_on_date(self):
json = self.zero
actual = AA.get_number_of_results(json)
self.assertEqual(0, actual)
def test_get_flight_details_using_index_0_should_return_results(self):
json = self.single
expected = {
'flight_number': 'AK6225',
'departure_airport': 'TGG',
'arrival_airport': 'KUL',
'departure_time': 'Sat, 20 Jun 2015 08:20:00 +0800',
'arrival_time': 'Sat, 20 Jun 2015 09:15:00 +0800',
'total_fare': 133.99,
'fare_currency': 'MYR'}
actual = AA.get_direct_flight_details(json, 0)
self.assertEqual(expected, actual)
def test_get_flight_details_using_index_1_should_return_results(self):
json = self.single
expected = {
'flight_number': 'AK6229',
'departure_airport': 'TGG',
'arrival_airport': 'KUL',
'departure_time': 'Sat, 20 Jun 2015 13:10:00 +0800',
'arrival_time': 'Sat, 20 Jun 2015 14:05:00 +0800',
'total_fare': 133.99,
'fare_currency': 'MYR'}
actual = AA.get_direct_flight_details(json, 1)
self.assertEqual(expected, actual)
@unittest.skip('no-data-yet')
def test_is_connecting_flights_should_return_true_for_connecting(self):
json = ''
actual = AA.is_connecting_flights(json, 0)
self.assertTrue(actual)
def test_is_connecting_flights_should_return_false_for_direct(self):
json = self.single
actual = AA.is_connecting_flights(json, 2)
self.assertFalse(actual)
class TimeConversionTest(unittest.TestCase):
def test_convert_to_api_format_returns_correct_output(self):
date_object = datetime.datetime(2015, 9, 25)
expected = '25-09-2015'
actual = AA.to_api(date_object)
self.assertEqual(expected, actual)
def test_convert_extracted_time_to_datetime_returns_correct_object(self):
offset = datetime.timedelta(hours=8)
expected = datetime.datetime(2015, 6, 20, 13, 10,
tzinfo=datetime.timezone(offset))
actual = AA.to_datetime('Sat, 20 Jun 2015 13:10:00 +0800')
self.assertEqual(expected, actual)
|
azam-a/malaysiaflights
|
malaysiaflights/tests/test_aa.py
|
Python
|
mit
| 3,889 | 0 |
"""
The Plaid API
The Plaid REST API. Please see https://plaid.com/docs/api for more details. # noqa: E501
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from plaid.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from plaid.model.distribution_breakdown import DistributionBreakdown
globals()['DistributionBreakdown'] = DistributionBreakdown
class PayPeriodDetails(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('pay_frequency',): {
'None': None,
'PAY_FREQUENCY_UNKNOWN': "PAY_FREQUENCY_UNKNOWN",
'PAY_FREQUENCY_WEEKLY': "PAY_FREQUENCY_WEEKLY",
'PAY_FREQUENCY_BIWEEKLY': "PAY_FREQUENCY_BIWEEKLY",
'PAY_FREQUENCY_SEMIMONTHLY': "PAY_FREQUENCY_SEMIMONTHLY",
'PAY_FREQUENCY_MONTHLY': "PAY_FREQUENCY_MONTHLY",
'NULL': "null",
},
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'check_amount': (float, none_type,), # noqa: E501
'distribution_breakdown': ([DistributionBreakdown],), # noqa: E501
'end_date': (date, none_type,), # noqa: E501
'gross_earnings': (float, none_type,), # noqa: E501
'pay_date': (date, none_type,), # noqa: E501
'pay_frequency': (str, none_type,), # noqa: E501
'pay_day': (date, none_type,), # noqa: E501
'start_date': (date, none_type,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'check_amount': 'check_amount', # noqa: E501
'distribution_breakdown': 'distribution_breakdown', # noqa: E501
'end_date': 'end_date', # noqa: E501
'gross_earnings': 'gross_earnings', # noqa: E501
'pay_date': 'pay_date', # noqa: E501
'pay_frequency': 'pay_frequency', # noqa: E501
'pay_day': 'pay_day', # noqa: E501
'start_date': 'start_date', # noqa: E501
}
_composed_schemas = {}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""PayPeriodDetails - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
check_amount (float, none_type): The amount of the paycheck.. [optional] # noqa: E501
distribution_breakdown ([DistributionBreakdown]): [optional] # noqa: E501
end_date (date, none_type): The pay period end date, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format: \"yyyy-mm-dd\".. [optional] # noqa: E501
gross_earnings (float, none_type): Total earnings before tax/deductions.. [optional] # noqa: E501
pay_date (date, none_type): The date on which the paystub was issued, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format (\"yyyy-mm-dd\").. [optional] # noqa: E501
pay_frequency (str, none_type): The frequency at which an individual is paid.. [optional] # noqa: E501
pay_day (date, none_type): The date on which the paystub was issued, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format (\"yyyy-mm-dd\").. [optional] # noqa: E501
start_date (date, none_type): The pay period start date, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format: \"yyyy-mm-dd\".. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
|
plaid/plaid-python
|
plaid/model/pay_period_details.py
|
Python
|
mit
| 9,216 | 0.000434 |
import os
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(BASE_DIR)
sys.path.append(os.path.dirname(BASE_DIR))
from global_variables import *
from evaluation_helper import *
cls_names = g_shape_names
img_name_file_list = [os.path.join(g_real_images_voc12val_det_bbox_folder, name+'.txt') for name in cls_names]
det_bbox_mat_file_list = [os.path.join(g_detection_results_folder, x.rstrip()) for x in open(g_rcnn_detection_bbox_mat_filelist)]
result_folder = os.path.join(BASE_DIR, 'avp_test_results')
test_avp_nv(cls_names, img_name_file_list, det_bbox_mat_file_list, result_folder)
img_name_file_list = [os.path.join(g_real_images_voc12val_easy_gt_bbox_folder, name+'.txt') for name in cls_names]
view_label_folder = g_real_images_voc12val_easy_gt_bbox_folder
result_folder = os.path.join(BASE_DIR, 'vp_test_results')
test_vp_acc(cls_names, img_name_file_list, result_folder, view_label_folder)
|
ShapeNet/RenderForCNN
|
view_estimation/run_evaluation.py
|
Python
|
mit
| 932 | 0.006438 |
# Portions Copyright (c) Facebook, Inc. and its affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
# hgweb/__init__.py - web interface to a mercurial repository
#
# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
# Copyright 2005 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from __future__ import absolute_import
import os
from .. import error, pycompat, util
from ..i18n import _
from . import hgweb_mod, hgwebdir_mod, server
def hgweb(config, name=None, baseui=None):
"""create an hgweb wsgi object
config can be one of:
- repo object (single repo view)
- path to repo (single repo view)
- path to config file (multi-repo view)
- dict of virtual:real pairs (multi-repo view)
- list of virtual:real tuples (multi-repo view)
"""
if (
(isinstance(config, str) and not os.path.isdir(config))
or isinstance(config, dict)
or isinstance(config, list)
):
# create a multi-dir interface
return hgwebdir_mod.hgwebdir(config, baseui=baseui)
return hgweb_mod.hgweb(config, name=name, baseui=baseui)
def hgwebdir(config, baseui=None):
return hgwebdir_mod.hgwebdir(config, baseui=baseui)
class httpservice(object):
def __init__(self, ui, app, opts):
self.ui = ui
self.app = app
self.opts = opts
def init(self):
util.setsignalhandler()
self.httpd = server.create_server(self.ui, self.app)
portfile = self.opts.get("port_file")
if portfile:
util.writefile(portfile, "%s" % self.httpd.port)
if (self.opts["port"] or portfile) and not self.ui.verbose:
return
if self.httpd.prefix:
prefix = self.httpd.prefix.strip("/") + "/"
else:
prefix = ""
port = r":%d" % self.httpd.port
if port == r":80":
port = r""
bindaddr = self.httpd.addr
if bindaddr == r"0.0.0.0":
bindaddr = r"*"
elif r":" in bindaddr: # IPv6
bindaddr = r"[%s]" % bindaddr
fqaddr = self.httpd.fqaddr
if r":" in fqaddr:
fqaddr = r"[%s]" % fqaddr
if self.opts["port"] or portfile:
write = self.ui.status
else:
write = self.ui.write
write(
_("listening at http://%s%s/%s (bound to %s:%d)\n")
% (fqaddr, port, prefix, bindaddr, self.httpd.port)
)
self.ui.flush() # avoid buffering of status message
def run(self):
self.httpd.serve_forever()
def createapp(baseui, repo, webconf):
if webconf:
return hgwebdir_mod.hgwebdir(webconf, baseui=baseui)
else:
if not repo:
raise error.RepoError(
_("there is no Mercurial repository" " here (.hg not found)")
)
return hgweb_mod.hgweb(repo, baseui=baseui)
|
facebookexperimental/eden
|
eden/hg-server/edenscm/mercurial/hgweb/__init__.py
|
Python
|
gpl-2.0
| 3,073 | 0 |
import py, sys, platform
import pytest
from testing import backend_tests, test_function, test_ownlib
from cffi import FFI
import _cffi_backend
class TestFFI(backend_tests.BackendTests,
test_function.TestFunction,
test_ownlib.TestOwnLib):
TypeRepr = "<ctype '%s'>"
@staticmethod
def Backend():
return _cffi_backend
def test_not_supported_bitfield_in_result(self):
ffi = FFI(backend=self.Backend())
ffi.cdef("struct foo_s { int a,b,c,d,e; int x:1; };")
e = py.test.raises(NotImplementedError, ffi.callback,
"struct foo_s foo(void)", lambda: 42)
assert str(e.value) == ("<struct foo_s(*)(void)>: "
"cannot pass as argument or return value a struct with bit fields")
def test_inspecttype(self):
ffi = FFI(backend=self.Backend())
assert ffi.typeof("long").kind == "primitive"
assert ffi.typeof("long(*)(long, long**, ...)").cname == (
"long(*)(long, long * *, ...)")
assert ffi.typeof("long(*)(long, long**, ...)").ellipsis is True
def test_new_handle(self):
ffi = FFI(backend=self.Backend())
o = [2, 3, 4]
p = ffi.new_handle(o)
assert ffi.typeof(p) == ffi.typeof("void *")
assert ffi.from_handle(p) is o
assert ffi.from_handle(ffi.cast("char *", p)) is o
py.test.raises(RuntimeError, ffi.from_handle, ffi.NULL)
class TestBitfield:
def check(self, source, expected_ofs_y, expected_align, expected_size):
# NOTE: 'expected_*' is the numbers expected from GCC.
# The numbers expected from MSVC are not explicitly written
# in this file, and will just be taken from the compiler.
ffi = FFI()
ffi.cdef("struct s1 { %s };" % source)
ctype = ffi.typeof("struct s1")
# verify the information with gcc
ffi1 = FFI()
ffi1.cdef("""
static const int Gofs_y, Galign, Gsize;
struct s1 *try_with_value(int fieldnum, long long value);
""")
fnames = [name for name, cfield in ctype.fields
if name and cfield.bitsize > 0]
setters = ['case %d: s.%s = value; break;' % iname
for iname in enumerate(fnames)]
lib = ffi1.verify("""
struct s1 { %s };
struct sa { char a; struct s1 b; };
#define Gofs_y offsetof(struct s1, y)
#define Galign offsetof(struct sa, b)
#define Gsize sizeof(struct s1)
struct s1 *try_with_value(int fieldnum, long long value)
{
static struct s1 s;
memset(&s, 0, sizeof(s));
switch (fieldnum) { %s }
return &s;
}
""" % (source, ' '.join(setters)))
if sys.platform == 'win32':
expected_ofs_y = lib.Gofs_y
expected_align = lib.Galign
expected_size = lib.Gsize
else:
assert (lib.Gofs_y, lib.Galign, lib.Gsize) == (
expected_ofs_y, expected_align, expected_size)
# the real test follows
assert ffi.offsetof("struct s1", "y") == expected_ofs_y
assert ffi.alignof("struct s1") == expected_align
assert ffi.sizeof("struct s1") == expected_size
# compare the actual storage of the two
for name, cfield in ctype.fields:
if cfield.bitsize < 0 or not name:
continue
if int(ffi.cast(cfield.type, -1)) == -1: # signed
min_value = -(1 << (cfield.bitsize-1))
max_value = (1 << (cfield.bitsize-1)) - 1
else:
min_value = 0
max_value = (1 << cfield.bitsize) - 1
for t in [1, 2, 4, 8, 16, 128, 2813, 89728, 981729,
-1,-2,-4,-8,-16,-128,-2813,-89728,-981729]:
if min_value <= t <= max_value:
self._fieldcheck(ffi, lib, fnames, name, t)
def _fieldcheck(self, ffi, lib, fnames, name, value):
s = ffi.new("struct s1 *")
setattr(s, name, value)
assert getattr(s, name) == value
raw1 = ffi.buffer(s)[:]
t = lib.try_with_value(fnames.index(name), value)
raw2 = ffi.buffer(t, len(raw1))[:]
assert raw1 == raw2
def test_bitfield_basic(self):
self.check("int a; int b:9; int c:20; int y;", 8, 4, 12)
self.check("int a; short b:9; short c:7; int y;", 8, 4, 12)
self.check("int a; short b:9; short c:9; int y;", 8, 4, 12)
def test_bitfield_reuse_if_enough_space(self):
self.check("int a:2; char y;", 1, 4, 4)
self.check("int a:1; char b ; int c:1; char y;", 3, 4, 4)
self.check("int a:1; char b:8; int c:1; char y;", 3, 4, 4)
self.check("char a; int b:9; char y;", 3, 4, 4)
self.check("char a; short b:9; char y;", 4, 2, 6)
self.check("int a:2; char b:6; char y;", 1, 4, 4)
self.check("int a:2; char b:7; char y;", 2, 4, 4)
self.check("int a:2; short b:15; char c:2; char y;", 5, 4, 8)
self.check("int a:2; char b:1; char c:1; char y;", 1, 4, 4)
@pytest.mark.skipif("platform.machine().startswith('arm')")
def test_bitfield_anonymous_no_align(self):
L = FFI().alignof("long long")
self.check("char y; int :1;", 0, 1, 2)
self.check("char x; int z:1; char y;", 2, 4, 4)
self.check("char x; int :1; char y;", 2, 1, 3)
self.check("char x; long long z:48; char y;", 7, L, 8)
self.check("char x; long long :48; char y;", 7, 1, 8)
self.check("char x; long long z:56; char y;", 8, L, 8 + L)
self.check("char x; long long :56; char y;", 8, 1, 9)
self.check("char x; long long z:57; char y;", L + 8, L, L + 8 + L)
self.check("char x; long long :57; char y;", L + 8, 1, L + 9)
@pytest.mark.skipif("not platform.machine().startswith('arm')")
def test_bitfield_anonymous_align_arm(self):
L = FFI().alignof("long long")
self.check("char y; int :1;", 0, 4, 4)
self.check("char x; int z:1; char y;", 2, 4, 4)
self.check("char x; int :1; char y;", 2, 4, 4)
self.check("char x; long long z:48; char y;", 7, L, 8)
self.check("char x; long long :48; char y;", 7, 8, 8)
self.check("char x; long long z:56; char y;", 8, L, 8 + L)
self.check("char x; long long :56; char y;", 8, L, 8 + L)
self.check("char x; long long z:57; char y;", L + 8, L, L + 8 + L)
self.check("char x; long long :57; char y;", L + 8, L, L + 8 + L)
@pytest.mark.skipif("platform.machine().startswith('arm')")
def test_bitfield_zero(self):
L = FFI().alignof("long long")
self.check("char y; int :0;", 0, 1, 4)
self.check("char x; int :0; char y;", 4, 1, 5)
self.check("char x; int :0; int :0; char y;", 4, 1, 5)
self.check("char x; long long :0; char y;", L, 1, L + 1)
self.check("short x, y; int :0; int :0;", 2, 2, 4)
self.check("char x; int :0; short b:1; char y;", 5, 2, 6)
self.check("int a:1; int :0; int b:1; char y;", 5, 4, 8)
@pytest.mark.skipif("not platform.machine().startswith('arm')")
def test_bitfield_zero_arm(self):
L = FFI().alignof("long long")
self.check("char y; int :0;", 0, 4, 4)
self.check("char x; int :0; char y;", 4, 4, 8)
self.check("char x; int :0; int :0; char y;", 4, 4, 8)
self.check("char x; long long :0; char y;", L, 8, L + 8)
self.check("short x, y; int :0; int :0;", 2, 4, 4)
self.check("char x; int :0; short b:1; char y;", 5, 4, 8)
self.check("int a:1; int :0; int b:1; char y;", 5, 4, 8)
def test_error_cases(self):
ffi = FFI()
py.test.raises(TypeError,
'ffi.cdef("struct s1 { float x:1; };"); ffi.new("struct s1 *")')
py.test.raises(TypeError,
'ffi.cdef("struct s2 { char x:0; };"); ffi.new("struct s2 *")')
py.test.raises(TypeError,
'ffi.cdef("struct s3 { char x:9; };"); ffi.new("struct s3 *")')
def test_struct_with_typedef(self):
ffi = FFI()
ffi.cdef("typedef struct { float x; } foo_t;")
p = ffi.new("foo_t *", [5.2])
assert repr(p).startswith("<cdata 'foo_t *' ")
def test_struct_array_no_length(self):
ffi = FFI()
ffi.cdef("struct foo_s { int x; int a[]; };")
p = ffi.new("struct foo_s *", [100, [200, 300, 400]])
assert p.x == 100
assert ffi.typeof(p.a) is ffi.typeof("int *") # no length available
assert p.a[0] == 200
assert p.a[1] == 300
assert p.a[2] == 400
@pytest.mark.skipif("sys.platform != 'win32'")
def test_getwinerror(self):
ffi = FFI()
code, message = ffi.getwinerror(1155)
assert code == 1155
assert message == ("No application is associated with the "
"specified file for this operation")
ffi.cdef("void SetLastError(int);")
lib = ffi.dlopen("Kernel32.dll")
lib.SetLastError(2)
code, message = ffi.getwinerror()
assert code == 2
assert message == "The system cannot find the file specified"
code, message = ffi.getwinerror(-1)
assert code == 2
assert message == "The system cannot find the file specified"
|
mhnatiuk/phd_sociology_of_religion
|
scrapper/build/cffi/testing/test_ffi_backend.py
|
Python
|
gpl-2.0
| 9,406 | 0.001701 |
#!/usr/bin/python
# coding: utf-8
class Solution(object):
def convertToTitle(self, n):
"""
:type n: int
:rtype: str
"""
return "" if n == 0 else self.convertToTitle((n - 1) / 26) + chr((n - 1) % 26 + ord('A'))
|
Lanceolata/code-problems
|
python/leetcode/Question_168_Excel_Sheet_Column_Title.py
|
Python
|
mit
| 255 | 0.003922 |
"""
Imitate the parser representation.
"""
import inspect
import re
import sys
import os
from functools import partial
from jedi._compatibility import builtins as _builtins, unicode
from jedi import debug
from jedi.cache import underscore_memoization, memoize_method
from jedi.parser.tree import Param, Base, Operator, zero_position_modifier
from jedi.evaluate.helpers import FakeName
from . import fake
_sep = os.path.sep
if os.path.altsep is not None:
_sep += os.path.altsep
_path_re = re.compile('(?:\.[^{0}]+|[{0}]__init__\.py)$'.format(re.escape(_sep)))
del _sep
class CheckAttribute(object):
"""Raises an AttributeError if the attribute X isn't available."""
def __init__(self, func):
self.func = func
# Remove the py in front of e.g. py__call__.
self.check_name = func.__name__[2:]
def __get__(self, instance, owner):
# This might raise an AttributeError. That's wanted.
getattr(instance.obj, self.check_name)
return partial(self.func, instance)
class CompiledObject(Base):
# comply with the parser
start_pos = 0, 0
path = None # modules have this attribute - set it to None.
used_names = {} # To be consistent with modules.
def __init__(self, evaluator, obj, parent=None):
self._evaluator = evaluator
self.obj = obj
self.parent = parent
@CheckAttribute
def py__call__(self, params):
if inspect.isclass(self.obj):
from jedi.evaluate.representation import Instance
return set([Instance(self._evaluator, self, params)])
else:
return set(self._execute_function(params))
@CheckAttribute
def py__class__(self):
return create(self._evaluator, self.obj.__class__)
@CheckAttribute
def py__mro__(self):
return tuple(create(self._evaluator, cls) for cls in self.obj.__mro__)
@CheckAttribute
def py__bases__(self):
return tuple(create(self._evaluator, cls) for cls in self.obj.__bases__)
def py__bool__(self):
return bool(self.obj)
def py__file__(self):
return self.obj.__file__
def is_class(self):
return inspect.isclass(self.obj)
@property
def doc(self):
return inspect.getdoc(self.obj) or ''
@property
def params(self):
params_str, ret = self._parse_function_doc()
tokens = params_str.split(',')
if inspect.ismethoddescriptor(self.obj):
tokens.insert(0, 'self')
params = []
for p in tokens:
parts = [FakeName(part) for part in p.strip().split('=')]
if len(parts) > 1:
parts.insert(1, Operator(zero_position_modifier, '=', (0, 0)))
params.append(Param(parts, self))
return params
def __repr__(self):
return '<%s: %s>' % (type(self).__name__, repr(self.obj))
@underscore_memoization
def _parse_function_doc(self):
if self.doc is None:
return '', ''
return _parse_function_doc(self.doc)
def api_type(self):
obj = self.obj
if inspect.isclass(obj):
return 'class'
elif inspect.ismodule(obj):
return 'module'
elif inspect.isbuiltin(obj) or inspect.ismethod(obj) \
or inspect.ismethoddescriptor(obj) or inspect.isfunction(obj):
return 'function'
# Everything else...
return 'instance'
@property
def type(self):
"""Imitate the tree.Node.type values."""
cls = self._get_class()
if inspect.isclass(cls):
return 'classdef'
elif inspect.ismodule(cls):
return 'file_input'
elif inspect.isbuiltin(cls) or inspect.ismethod(cls) or \
inspect.ismethoddescriptor(cls):
return 'funcdef'
@underscore_memoization
def _cls(self):
"""
We used to limit the lookups for instantiated objects like list(), but
this is not the case anymore. Python itself
"""
# Ensures that a CompiledObject is returned that is not an instance (like list)
return self
def _get_class(self):
if not fake.is_class_instance(self.obj) or \
inspect.ismethoddescriptor(self.obj): # slots
return self.obj
try:
return self.obj.__class__
except AttributeError:
# happens with numpy.core.umath._UFUNC_API (you get it
# automatically by doing `import numpy`.
return type
@property
def names_dict(self):
# For compatibility with `representation.Class`.
return self.names_dicts(False)[0]
def names_dicts(self, search_global, is_instance=False):
return self._names_dict_ensure_one_dict(is_instance)
@memoize_method
def _names_dict_ensure_one_dict(self, is_instance):
"""
search_global shouldn't change the fact that there's one dict, this way
there's only one `object`.
"""
return [LazyNamesDict(self._evaluator, self, is_instance)]
def get_subscope_by_name(self, name):
if name in dir(self.obj):
return CompiledName(self._evaluator, self, name).parent
else:
raise KeyError("CompiledObject doesn't have an attribute '%s'." % name)
@CheckAttribute
def py__getitem__(self, index):
if type(self.obj) not in (str, list, tuple, unicode, bytes, bytearray, dict):
# Get rid of side effects, we won't call custom `__getitem__`s.
return set()
return set([create(self._evaluator, self.obj[index])])
@CheckAttribute
def py__iter__(self):
if type(self.obj) not in (str, list, tuple, unicode, bytes, bytearray, dict):
# Get rid of side effects, we won't call custom `__getitem__`s.
return
for part in self.obj:
yield set([create(self._evaluator, part)])
@property
def name(self):
try:
name = self._get_class().__name__
except AttributeError:
name = repr(self.obj)
return FakeName(name, self)
def _execute_function(self, params):
if self.type != 'funcdef':
return
for name in self._parse_function_doc()[1].split():
try:
bltn_obj = getattr(_builtins, name)
except AttributeError:
continue
else:
if bltn_obj is None:
# We want to evaluate everything except None.
# TODO do we?
continue
bltn_obj = create(self._evaluator, bltn_obj)
for result in self._evaluator.execute(bltn_obj, params):
yield result
@property
@underscore_memoization
def subscopes(self):
"""
Returns only the faked scopes - the other ones are not important for
internal analysis.
"""
module = self.get_parent_until()
faked_subscopes = []
for name in dir(self.obj):
try:
faked_subscopes.append(
fake.get_faked(module.obj, self.obj, parent=self, name=name)
)
except fake.FakeDoesNotExist:
pass
return faked_subscopes
def is_scope(self):
return True
def get_self_attributes(self):
return [] # Instance compatibility
def get_imports(self):
return [] # Builtins don't have imports
class CompiledName(FakeName):
def __init__(self, evaluator, compiled_obj, name):
super(CompiledName, self).__init__(name)
self._evaluator = evaluator
self._compiled_obj = compiled_obj
self.name = name
def __repr__(self):
try:
name = self._compiled_obj.name # __name__ is not defined all the time
except AttributeError:
name = None
return '<%s: (%s).%s>' % (type(self).__name__, name, self.name)
def is_definition(self):
return True
@property
@underscore_memoization
def parent(self):
module = self._compiled_obj.get_parent_until()
return _create_from_name(self._evaluator, module, self._compiled_obj, self.name)
@parent.setter
def parent(self, value):
pass # Just ignore this, FakeName tries to overwrite the parent attribute.
class LazyNamesDict(object):
"""
A names_dict instance for compiled objects, resembles the parser.tree.
"""
name_class = CompiledName
def __init__(self, evaluator, compiled_obj, is_instance=False):
self._evaluator = evaluator
self._compiled_obj = compiled_obj
self._is_instance = is_instance
def __iter__(self):
return (v[0].value for v in self.values())
@memoize_method
def __getitem__(self, name):
try:
getattr(self._compiled_obj.obj, name)
except AttributeError:
raise KeyError('%s in %s not found.' % (name, self._compiled_obj))
except Exception:
# This is a bit ugly. We're basically returning this to make
# lookups possible without having the actual attribute. However
# this makes proper completion possible.
return [FakeName(name, create(self._evaluator, None), is_definition=True)]
return [self.name_class(self._evaluator, self._compiled_obj, name)]
def values(self):
obj = self._compiled_obj.obj
values = []
for name in dir(obj):
try:
values.append(self[name])
except KeyError:
# The dir function can be wrong.
pass
is_instance = self._is_instance or fake.is_class_instance(obj)
# ``dir`` doesn't include the type names.
if not inspect.ismodule(obj) and obj != type and not is_instance:
values += create(self._evaluator, type).names_dict.values()
return values
def dotted_from_fs_path(fs_path, sys_path):
"""
Changes `/usr/lib/python3.4/email/utils.py` to `email.utils`. I.e.
compares the path with sys.path and then returns the dotted_path. If the
path is not in the sys.path, just returns None.
"""
if os.path.basename(fs_path).startswith('__init__.'):
# We are calculating the path. __init__ files are not interesting.
fs_path = os.path.dirname(fs_path)
# prefer
# - UNIX
# /path/to/pythonX.Y/lib-dynload
# /path/to/pythonX.Y/site-packages
# - Windows
# C:\path\to\DLLs
# C:\path\to\Lib\site-packages
# over
# - UNIX
# /path/to/pythonX.Y
# - Windows
# C:\path\to\Lib
path = ''
for s in sys_path:
if (fs_path.startswith(s) and len(path) < len(s)):
path = s
# - Window
# X:\path\to\lib-dynload/datetime.pyd => datetime
module_path = fs_path[len(path):].lstrip(os.path.sep).lstrip('/')
# - Window
# Replace like X:\path\to\something/foo/bar.py
return _path_re.sub('', module_path).replace(os.path.sep, '.').replace('/', '.')
def load_module(evaluator, path=None, name=None):
sys_path = evaluator.sys_path
if path is not None:
dotted_path = dotted_from_fs_path(path, sys_path=sys_path)
else:
dotted_path = name
if dotted_path is None:
p, _, dotted_path = path.partition(os.path.sep)
sys_path.insert(0, p)
temp, sys.path = sys.path, sys_path
try:
__import__(dotted_path)
except RuntimeError:
if 'PySide' in dotted_path or 'PyQt' in dotted_path:
# RuntimeError: the PyQt4.QtCore and PyQt5.QtCore modules both wrap
# the QObject class.
# See https://github.com/davidhalter/jedi/pull/483
return None
raise
except ImportError:
# If a module is "corrupt" or not really a Python module or whatever.
debug.warning('Module %s not importable.', path)
return None
finally:
sys.path = temp
# Just access the cache after import, because of #59 as well as the very
# complicated import structure of Python.
module = sys.modules[dotted_path]
return create(evaluator, module)
docstr_defaults = {
'floating point number': 'float',
'character': 'str',
'integer': 'int',
'dictionary': 'dict',
'string': 'str',
}
def _parse_function_doc(doc):
"""
Takes a function and returns the params and return value as a tuple.
This is nothing more than a docstring parser.
TODO docstrings like utime(path, (atime, mtime)) and a(b [, b]) -> None
TODO docstrings like 'tuple of integers'
"""
# parse round parentheses: def func(a, (b,c))
try:
count = 0
start = doc.index('(')
for i, s in enumerate(doc[start:]):
if s == '(':
count += 1
elif s == ')':
count -= 1
if count == 0:
end = start + i
break
param_str = doc[start + 1:end]
except (ValueError, UnboundLocalError):
# ValueError for doc.index
# UnboundLocalError for undefined end in last line
debug.dbg('no brackets found - no param')
end = 0
param_str = ''
else:
# remove square brackets, that show an optional param ( = None)
def change_options(m):
args = m.group(1).split(',')
for i, a in enumerate(args):
if a and '=' not in a:
args[i] += '=None'
return ','.join(args)
while True:
param_str, changes = re.subn(r' ?\[([^\[\]]+)\]',
change_options, param_str)
if changes == 0:
break
param_str = param_str.replace('-', '_') # see: isinstance.__doc__
# parse return value
r = re.search('-[>-]* ', doc[end:end + 7])
if r is None:
ret = ''
else:
index = end + r.end()
# get result type, which can contain newlines
pattern = re.compile(r'(,\n|[^\n-])+')
ret_str = pattern.match(doc, index).group(0).strip()
# New object -> object()
ret_str = re.sub(r'[nN]ew (.*)', r'\1()', ret_str)
ret = docstr_defaults.get(ret_str, ret_str)
return param_str, ret
def _create_from_name(evaluator, module, parent, name):
try:
return fake.get_faked(module.obj, parent.obj, parent=parent, name=name)
except fake.FakeDoesNotExist:
pass
try:
obj = getattr(parent.obj, name)
except AttributeError:
# Happens e.g. in properties of
# PyQt4.QtGui.QStyleOptionComboBox.currentText
# -> just set it to None
obj = None
return create(evaluator, obj, parent)
def builtin_from_name(evaluator, string):
bltn_obj = getattr(_builtins, string)
return create(evaluator, bltn_obj)
def _a_generator(foo):
"""Used to have an object to return for generators."""
yield 42
yield foo
_SPECIAL_OBJECTS = {
'FUNCTION_CLASS': type(load_module),
'METHOD_CLASS': type(CompiledObject.is_class),
'MODULE_CLASS': type(os),
'GENERATOR_OBJECT': _a_generator(1.0),
'BUILTINS': _builtins,
}
def get_special_object(evaluator, identifier):
obj = _SPECIAL_OBJECTS[identifier]
return create(evaluator, obj, parent=create(evaluator, _builtins))
def compiled_objects_cache(attribute_name):
def decorator(func):
"""
This decorator caches just the ids, oopposed to caching the object itself.
Caching the id has the advantage that an object doesn't need to be
hashable.
"""
def wrapper(evaluator, obj, parent=None, module=None):
cache = getattr(evaluator, attribute_name)
# Do a very cheap form of caching here.
key = id(obj), id(parent)
try:
return cache[key][0]
except KeyError:
# TODO this whole decorator looks way too ugly and this if
# doesn't make it better. Find a more generic solution.
if parent or module:
result = func(evaluator, obj, parent, module)
else:
result = func(evaluator, obj)
# Need to cache all of them, otherwise the id could be overwritten.
cache[key] = result, obj, parent, module
return result
return wrapper
return decorator
@compiled_objects_cache('compiled_cache')
def create(evaluator, obj, parent=None, module=None):
"""
A very weird interface class to this module. The more options provided the
more acurate loading compiled objects is.
"""
if inspect.ismodule(obj):
if parent is not None:
# Modules don't have parents, be careful with caching: recurse.
return create(evaluator, obj)
else:
if parent is None and obj != _builtins:
return create(evaluator, obj, create(evaluator, _builtins))
try:
return fake.get_faked(module and module.obj, obj, parent=parent)
except fake.FakeDoesNotExist:
pass
return CompiledObject(evaluator, obj, parent)
|
snakeleon/YouCompleteMe-x86
|
third_party/ycmd/third_party/JediHTTP/vendor/jedi/jedi/evaluate/compiled/__init__.py
|
Python
|
gpl-3.0
| 17,309 | 0.000924 |
from __future__ import unicode_literals
import unittest
from ship.datastructures import rowdatacollection as rdc
from ship.datastructures import dataobject as do
from ship.fmp.datunits import ROW_DATA_TYPES as rdt
class RowDataCollectionTests(unittest.TestCase):
def setUp(self):
# Create some object to use and add a couple of rows
# create chainage in position 1
self.obj1 = do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3)
self.obj1.data_collection.append(0.00)
self.obj1.data_collection.append(3.65)
# Create elevation in position 2
self.obj2 = do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3)
self.obj2.data_collection.append(32.345)
self.obj2.data_collection.append(33.45)
# Create roughness in position 3
self.obj3 = do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=None, no_of_dps=3)
self.obj3.data_collection.append(0.035)
self.obj3.data_collection.append(0.035)
self.testcol = rdc.RowDataCollection()
self.testcol._collection.append(self.obj1)
self.testcol._collection.append(self.obj2)
self.testcol._collection.append(self.obj3)
def test_initCollection(self):
'''
'''
# Create a dummy collection
obj1 = do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3)
obj2 = do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3)
obj3 = do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=0.0, no_of_dps=3)
localcol = rdc.RowDataCollection()
localcol._collection.append(obj1)
localcol._collection.append(obj2)
localcol._collection.append(obj3)
# Initiliase a real collection
col = rdc.RowDataCollection()
col.addToCollection(do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3))
col.addToCollection(do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3))
col.addToCollection(do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=0.0, no_of_dps=3))
# Check that they're the same
col_eq, msg = self.checkCollectionEqual(localcol, col)
self.assertTrue(col_eq, 'rdc.RowDataCollection initialisation fail - ' + msg)
def test_bulkInitCollection(self):
objs = [
do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3),
do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3),
do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=0.0, no_of_dps=3),
]
col = rdc.RowDataCollection.bulkInitCollection(objs)
localcol = rdc.RowDataCollection()
localcol._collection.append(objs[0])
localcol._collection.append(objs[1])
localcol._collection.append(objs[2])
# Check they're the same
col_eq, msg = self.checkCollectionEqual(localcol, col)
self.assertTrue(col_eq, 'rdc.RowDataCollection initialisation fail - ' + msg)
def checkCollectionEqual(self, c1, c2):
'''Check the two given collections to make sure that they contain the same data.
@param c1: First rdc.RowDataCollection object
@param c2: Second rdc.RowDataCollection object
@return: True if they're equal False and reason if not.
'''
if not len(c1._collection) == len(c2._collection):
return False, 'Collections are different lengths'
for i in range(0, len(c1._collection)):
if not c1._collection[i].data_type == c2._collection[i].data_type:
return False, 'Collections have different data_types'
if not c1._collection[i].format_str == c2._collection[i].format_str:
return False, 'Collections have different format_str'
if not c1._collection[i].default == c2._collection[i].default:
return False, 'Collections have different default'
for j in range(0, len(c1._collection[i].data_collection)):
if not c1._collection[i].data_collection[j] == c1._collection[i].data_collection[j]:
return False, 'Collections have different data'
return True, ''
def test_indexOfDataObject(self):
"""Should return the corrent index of a particular ADataObject in colleciton."""
index1 = self.testcol.indexOfDataObject(rdt.CHAINAGE)
index2 = self.testcol.indexOfDataObject(rdt.ELEVATION)
index3 = self.testcol.indexOfDataObject(rdt.ROUGHNESS)
self.assertEquals(index1, 0)
self.assertEquals(index2, 1)
self.assertEquals(index3, 2)
def test_iterateRows(self):
"""Test generator for complete row as a list"""
testrows = [
[0.00, 32.345, 0.035],
[3.65, 33.45, 0.035],
]
i = 0
for row in self.testcol.iterateRows():
self.assertListEqual(row, testrows[i])
i += 1
def test_iterateRowsWithKey(self):
"""Test generator for a single DataObject"""
testrows = [
32.345,
33.45,
]
i = 0
for row in self.testcol.iterateRows(rdt.ELEVATION):
self.assertEqual(row, testrows[i])
i += 1
def test_rowAsDict(self):
"""Shoud return a row as a dict of single values."""
test_dict = {rdt.CHAINAGE: 0.00, rdt.ELEVATION: 32.345, rdt.ROUGHNESS: 0.035}
row = self.testcol.rowAsDict(0)
self.assertDictEqual(row, test_dict)
def test_rowAsList(self):
test_list = [0.00, 32.345, 0.035]
row = self.testcol.rowAsList(0)
self.assertListEqual(row, test_list)
def test_dataObject(self):
"""Should return the correct ADataObject."""
test_vals = [0.00, 3.65]
obj = self.testcol.dataObject(rdt.CHAINAGE)
self.assertEqual(obj.data_type, rdt.CHAINAGE)
for i, o in enumerate(obj):
self.assertEqual(o, test_vals[i])
def test_dataObjectAsList(self):
"""Should return the contents of a DataObject as a list."""
test_list = [0.00, 3.65]
obj_list = self.testcol.dataObjectAsList(rdt.CHAINAGE)
self.assertListEqual(obj_list, test_list)
def test_toList(self):
test_list = [
[0.00, 3.65],
[32.345, 33.45],
[0.035, 0.035]
]
row_list = self.testcol.toList()
self.assertListEqual(row_list, test_list)
def test_toDict(self):
test_dict = {
rdt.CHAINAGE: [0.00, 3.65],
rdt.ELEVATION: [32.345, 33.45],
rdt.ROUGHNESS: [0.035, 0.035],
}
row_dict = self.testcol.toDict()
self.assertDictEqual(row_dict, test_dict)
def test_addValue(self):
# Initiliase a real collection
col = rdc.RowDataCollection()
col.addToCollection(do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3))
col.addToCollection(do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3))
col.addToCollection(do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=0.0, no_of_dps=3))
col._addValue(rdt.CHAINAGE, 2.5)
self.assertEqual(col._collection[0][0], 2.5)
def test_setValue(self):
# Initiliase a real collection
col = rdc.RowDataCollection()
col.addToCollection(do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3))
col.addToCollection(do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3))
col.addToCollection(do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=0.0, no_of_dps=3))
col._collection[0].addValue(2.5)
self.assertEqual(col._collection[0][0], 2.5)
col._setValue(rdt.CHAINAGE, 3.5, 0)
self.assertEqual(col._collection[0][0], 3.5)
def test_getPrintableRow(self):
test_row = ' 0.000 32.345 0.035'
row = self.testcol.getPrintableRow(0)
self.assertEqual(row, test_row)
def test_updateRow(self):
new_row = {rdt.CHAINAGE: 0.1, rdt.ELEVATION: 40, rdt.ROUGHNESS: 0.06}
self.testcol.updateRow(new_row, 0)
row = self.testcol.rowAsDict(0)
self.assertDictEqual(row, new_row)
with self.assertRaises(IndexError):
self.testcol.updateRow(new_row, 3)
fake_row = {'fakekey': 4.3, 'andagain': 3454}
with self.assertRaises(KeyError):
self.testcol.updateRow(fake_row, 0)
def test_addRow(self):
# Initiliase a real collection
col = rdc.RowDataCollection()
col.addToCollection(do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3))
col.addToCollection(do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3))
col.addToCollection(do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=0.0, no_of_dps=3))
new_row = {rdt.CHAINAGE: 3.0, rdt.ELEVATION: 41, rdt.ROUGHNESS: 0.06}
new_row2 = {rdt.CHAINAGE: 6.0, rdt.ELEVATION: 42, rdt.ROUGHNESS: 0.07}
new_row3 = {rdt.CHAINAGE: 10.0, rdt.ELEVATION: 43, rdt.ROUGHNESS: 0.08}
new_row4 = {rdt.CHAINAGE: 20.0, rdt.ELEVATION: 44, rdt.ROUGHNESS: 0.09}
# append and insert rows
col.addRow(new_row2)
col.addRow(new_row, 0)
# append and insert again
col.addRow(new_row4)
col.addRow(new_row3, 2)
row = col.rowAsDict(0)
row2 = col.rowAsDict(1)
row3 = col.rowAsDict(2)
row4 = col.rowAsDict(3)
self.assertDictEqual(row, new_row)
self.assertDictEqual(row2, new_row2)
fake_row = {59: 4.3}
with self.assertRaises(KeyError):
col.addRow(fake_row)
def test_numberOfRows(self):
self.assertEqual(self.testcol.numberOfRows(), 2)
def test_deleteRow(self):
test_list = [3.65, 33.45, 0.035]
self.testcol.deleteRow(0)
self.assertEqual(self.testcol.numberOfRows(), 1)
row = self.testcol.rowAsList(0)
self.assertListEqual(row, test_list)
|
duncan-r/SHIP
|
tests/test_rowdatacollection.py
|
Python
|
mit
| 10,219 | 0.002838 |
# coding: utf-8
import os
from setuptools import setup, find_packages
README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-nomad-activity-feed',
version='0.1.1',
packages=find_packages(),
description='A simple Django app attach an activity feed to any Django model.',
long_description=README,
url='https://github.com/Nomadblue/django-activity-feed',
author='José Sazo',
author_email='jose@nomadblue.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
Nomadblue/django-nomad-activity-feed
|
setup.py
|
Python
|
bsd-3-clause
| 1,187 | 0.000843 |
import pytest
from sqlobject import boundattributes
from sqlobject import declarative
pytestmark = pytest.mark.skipif(
True,
reason='The module "boundattributes" and its tests were not finished yet')
class SOTestMe(object):
pass
class AttrReplace(boundattributes.BoundAttribute):
__unpackargs__ = ('replace',)
replace = None
@declarative.classinstancemethod
def make_object(self, cls, added_class, attr_name, **attrs):
if not self:
return cls.singleton().make_object(
added_class, attr_name, **attrs)
self.replace.added_class = added_class
self.replace.name = attr_name
assert attrs['replace'] is self.replace
del attrs['replace']
self.replace.attrs = attrs
return self.replace
class Holder:
def __init__(self, name):
self.holder_name = name
def __repr__(self):
return '<Holder %s>' % self.holder_name
def test_1():
v1 = Holder('v1')
v2 = Holder('v2')
v3 = Holder('v3')
class V2Class(AttrReplace):
arg1 = 'nothing'
arg2 = ['something']
class A1(SOTestMe):
a = AttrReplace(v1)
v = V2Class(v2)
class inline(AttrReplace):
replace = v3
arg3 = 'again'
arg4 = 'so there'
for n in ('a', 'v', 'inline'):
assert getattr(A1, n).name == n
assert getattr(A1, n).added_class is A1
assert A1.a is v1
assert A1.a.attrs == {}
assert A1.v is v2
assert A1.v.attrs == {'arg1': 'nothing', 'arg2': ['something']}
assert A1.inline is v3
assert A1.inline.attrs == {'arg3': 'again', 'arg4': 'so there'}
|
drnlm/sqlobject
|
sqlobject/tests/test_boundattributes.py
|
Python
|
lgpl-2.1
| 1,672 | 0 |
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Consume and serialize all of the data from a running TensorBoard instance.
This program connects to a live TensorBoard backend at given port, and saves
all of the data to local disk JSON in a predictable format.
This makes it easy to mock out the TensorBoard backend so that the frontend
may be tested in isolation.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os
import os.path
import shutil
import threading
import urllib
import six
from six.moves import http_client
import tensorflow as tf
from tensorflow.python.summary import event_multiplexer
from tensorflow.tensorboard.backend import server
tf.flags.DEFINE_string('logdir', None, """the logdir to pass to the TensorBoard
backend; data will be read from this logdir for serialization.""")
tf.flags.DEFINE_string('target', None, """The directoy where serialized data
will be written""")
tf.flags.DEFINE_boolean('overwrite', False, """Whether to remove and overwrite
TARGET if it already exists.""")
tf.flags.DEFINE_boolean(
'purge_orphaned_data', True, 'Whether to purge data that '
'may have been orphaned due to TensorBoard restarts. '
'Disabling purge_orphaned_data can be used to debug data '
'disappearance.')
FLAGS = tf.flags.FLAGS
BAD_CHARACTERS = "#%&{}\\/<>*? $!'\":@+`|="
def Url(route, params):
"""Takes route and query params, and produce encoded url for that asset."""
out = route
if params:
# sorting ensures a unique filename for each query
sorted_params = sorted(six.iteritems(params))
out += '?' + urllib.urlencode(sorted_params)
return out
def Clean(s):
"""Clean a string so it can be used as a filepath."""
for c in BAD_CHARACTERS:
s = s.replace(c, '_')
return s
class TensorBoardStaticSerializer(object):
"""Serialize all the routes from a TensorBoard server to static json."""
def __init__(self, connection, target_path):
self.connection = connection
EnsureDirectoryExists(os.path.join(target_path, 'data'))
self.path = target_path
def GetAndSave(self, url):
"""GET the given url. Serialize the result at clean path version of url."""
self.connection.request('GET', '/data/' + url)
response = self.connection.getresponse()
destination = self.path + '/data/' + Clean(url)
if response.status != 200:
raise IOError(url)
content = response.read()
with open(destination, 'w') as f:
f.write(content)
return content
def GetRouteAndSave(self, route, params=None):
"""GET given route and params. Serialize the result. Return as JSON."""
url = Url(route, params)
return json.loads(self.GetAndSave(url))
def Run(self):
"""Serialize everything from a TensorBoard backend."""
# get the runs object, which is an index for every tag.
runs = self.GetRouteAndSave('runs')
# collect sampled data.
self.GetRouteAndSave('scalars')
# now let's just download everything!
for run, tag_type_to_tags in six.iteritems(runs):
for tag_type, tags in six.iteritems(tag_type_to_tags):
try:
if tag_type == 'graph':
# in this case, tags is a bool which specifies if graph is present.
if tags:
self.GetRouteAndSave('graph', {run: run})
elif tag_type == 'images':
for t in tags:
images = self.GetRouteAndSave('images', {'run': run, 'tag': t})
for im in images:
url = 'individualImage?' + im['query']
# pull down the images themselves.
self.GetAndSave(url)
else:
for t in tags:
# Save this, whatever it is :)
self.GetRouteAndSave(tag_type, {'run': run, 'tag': t})
except IOError as e:
PrintAndLog('Retrieval failed for %s/%s/%s' % (tag_type, run, tags),
tf.logging.WARN)
PrintAndLog('Got Exception: %s' % e, tf.logging.WARN)
PrintAndLog('continuing...', tf.logging.WARN)
continue
def EnsureDirectoryExists(path):
if not os.path.exists(path):
os.makedirs(path)
def PrintAndLog(msg, lvl=tf.logging.INFO):
tf.logging.log(lvl, msg)
print(msg)
def main(unused_argv=None):
target = FLAGS.target
logdir = FLAGS.logdir
if not target or not logdir:
PrintAndLog('Both --target and --logdir are required.', tf.logging.ERROR)
return -1
if os.path.exists(target):
if FLAGS.overwrite:
if os.path.isdir(target):
shutil.rmtree(target)
else:
os.remove(target)
else:
PrintAndLog('Refusing to overwrite target %s without --overwrite' %
target, tf.logging.ERROR)
return -2
path_to_run = server.ParseEventFilesSpec(FLAGS.logdir)
PrintAndLog('About to load Multiplexer. This may take some time.')
multiplexer = event_multiplexer.EventMultiplexer(
size_guidance=server.TENSORBOARD_SIZE_GUIDANCE,
purge_orphaned_data=FLAGS.purge_orphaned_data)
server.ReloadMultiplexer(multiplexer, path_to_run)
PrintAndLog('Multiplexer load finished. Starting TensorBoard server.')
s = server.BuildServer(multiplexer, 'localhost', 0)
server_thread = threading.Thread(target=s.serve_forever)
server_thread.daemon = True
server_thread.start()
connection = http_client.HTTPConnection('localhost', s.server_address[1])
PrintAndLog('Server setup! Downloading data from the server.')
x = TensorBoardStaticSerializer(connection, target)
x.Run()
PrintAndLog('Done downloading data.')
connection.close()
s.shutdown()
s.server_close()
if __name__ == '__main__':
tf.app.run()
|
panmari/tensorflow
|
tensorflow/tensorboard/scripts/serialize_tensorboard.py
|
Python
|
apache-2.0
| 6,341 | 0.008831 |
# This file is a part of MediaDrop (http://www.mediadrop.net),
# Copyright 2009-2015 MediaDrop contributors
# For the exact contribution history, see the git revision log.
# The source code contained in this file is licensed under the GPLv3 or
# (at your option) any later version.
# See LICENSE.txt in the main project directory, for more information.
"""add custom head tags
add setting for custom tags (HTML) in <head> section
added: 2012-02-13 (v0.10dev)
previously migrate script v054
Revision ID: 280565a54124
Revises: 4d27ff5680e5
Create Date: 2013-05-14 22:38:02.552230
"""
# revision identifiers, used by Alembic.
revision = '280565a54124'
down_revision = '4d27ff5680e5'
from alembic.op import execute, inline_literal
from sqlalchemy import Integer, Unicode, UnicodeText
from sqlalchemy import Column, MetaData, Table
# -- table definition ---------------------------------------------------------
metadata = MetaData()
settings = Table('settings', metadata,
Column('id', Integer, autoincrement=True, primary_key=True),
Column('key', Unicode(255), nullable=False, unique=True),
Column('value', UnicodeText),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
# -- helpers ------------------------------------------------------------------
def insert_setting(key, value):
execute(
settings.insert().\
values({
'key': inline_literal(key),
'value': inline_literal(value),
})
)
def delete_setting(key):
execute(
settings.delete().\
where(settings.c.key==inline_literal(key))
)
# -----------------------------------------------------------------------------
SETTINGS = [
(u'appearance_custom_head_tags', u''),
]
def upgrade():
for key, value in SETTINGS:
insert_setting(key, value)
def downgrade():
for key, value in SETTINGS:
delete_setting(key)
|
rbu/mediadrop
|
mediadrop/migrations/versions/004-280565a54124-add_custom_head_tags.py
|
Python
|
gpl-3.0
| 1,908 | 0.007862 |
"""Main entry points for scripts."""
from __future__ import print_function, division
from argparse import ArgumentParser
from collections import OrderedDict
from copy import copy
from datetime import datetime
import glob
import json
import logging
import math
import os
import scipy.stats
import numpy as np
from .version import __version__
from .psffuncs import gaussian_moffat_psf
from .psf import TabularPSF, GaussianMoffatPSF
from .io import read_datacube, write_results, read_results
from .fitting import (guess_sky, fit_galaxy_single, fit_galaxy_sky_multi,
fit_position_sky, fit_position_sky_sn_multi,
RegularizationPenalty)
from .utils import yxbounds
from .extern import ADR, Hyper_PSF3D_PL
__all__ = ["cubefit", "cubefit_subtract", "cubefit_plot"]
MODEL_SHAPE = (32, 32)
SPAXEL_SIZE = 0.43
MIN_NMAD = 2.5 # Minimum Number of Median Absolute Deviations above
# the minimum spaxel value in fit_position
LBFGSB_FACTOR = 1e10
REFWAVE = 5000. # reference wavelength in Angstroms for PSF params and ADR
POSITION_BOUND = 3. # Bound on fitted positions relative in initial positions
def snfpsf(wave, psfparams, header, psftype):
"""Create a 3-d PSF based on SNFactory-specific parameterization of
Gaussian + Moffat PSF parameters and ADR."""
# Get Gaussian+Moffat parameters at each wavelength.
relwave = wave / REFWAVE - 1.0
ellipticity = abs(psfparams[0]) * np.ones_like(wave)
alpha = np.abs(psfparams[1] +
psfparams[2] * relwave +
psfparams[3] * relwave**2)
# correlated parameters (coefficients determined externally)
sigma = 0.545 + 0.215 * alpha # Gaussian parameter
beta = 1.685 + 0.345 * alpha # Moffat parameter
eta = 1.040 + 0.0 * alpha # gaussian ampl. / moffat ampl.
# Atmospheric differential refraction (ADR): Because of ADR,
# the center of the PSF will be different at each wavelength,
# by an amount that we can determine (pretty well) from the
# atmospheric conditions and the pointing and angle of the
# instrument. We calculate the offsets here as a function of
# observation and wavelength and input these to the model.
# Correction to parallactic angle and airmass for 2nd-order effects
# such as MLA rotation, mechanical flexures or finite-exposure
# corrections. These values have been trained on faint-std star
# exposures.
#
# `predict_adr_params` uses 'AIRMASS', 'PARANG' and 'CHANNEL' keys
# in input dictionary.
delta, theta = Hyper_PSF3D_PL.predict_adr_params(header)
# check for crazy values of pressure and temperature, and assign default
# values.
pressure = header.get('PRESSURE', 617.)
if not 550. < pressure < 650.:
pressure = 617.
temp = header.get('TEMP', 2.)
if not -20. < temp < 20.:
temp = 2.
adr = ADR(pressure, temp, lref=REFWAVE, delta=delta, theta=theta)
adr_refract = adr.refract(0, 0, wave, unit=SPAXEL_SIZE)
# adr_refract[0, :] corresponds to x, adr_refract[1, :] => y
xctr, yctr = adr_refract
if psftype == 'gaussian-moffat':
return GaussianMoffatPSF(sigma, alpha, beta, ellipticity, eta,
yctr, xctr, MODEL_SHAPE, subpix=3)
elif psftype == 'tabular':
A = gaussian_moffat_psf(sigma, alpha, beta, ellipticity, eta,
yctr, xctr, MODEL_SHAPE, subpix=3)
return TabularPSF(A)
else:
raise ValueError("unknown psf type: " + repr(psftype))
def setup_logging(loglevel, logfname=None):
# if loglevel isn't an integer, parse it as "debug", "info", etc:
if not isinstance(loglevel, int):
loglevel = getattr(logging, loglevel.upper(), None)
if not isinstance(loglevel, int):
print('Invalid log level: %s' % loglevel)
exit(1)
# remove logfile if it already exists
if logfname is not None and os.path.exists(logfname):
os.remove(logfname)
logging.basicConfig(filename=logfname, format="%(levelname)s %(message)s",
level=loglevel)
def cubefit(argv=None):
DESCRIPTION = "Fit SN + galaxy model to SNFactory data cubes."
parser = ArgumentParser(prog="cubefit", description=DESCRIPTION)
parser.add_argument("configfile",
help="configuration file name (JSON format)")
parser.add_argument("outfile", help="Output file name (FITS format)")
parser.add_argument("--dataprefix", default="",
help="path prepended to data file names; default is "
"empty string")
parser.add_argument("--logfile", help="Write log to this file "
"(default: print to stdout)", default=None)
parser.add_argument("--loglevel", default="info",
help="one of: debug, info, warning (default is info)")
parser.add_argument("--diagdir", default=None,
help="If given, write intermediate diagnostic results "
"to this directory")
parser.add_argument("--refitgal", default=False, action="store_true",
help="Add an iteration where galaxy model is fit "
"using all epochs and then data/SN positions are "
"refit")
parser.add_argument("--mu_wave", default=0.07, type=float,
help="Wavelength regularization parameter. "
"Default is 0.07.")
parser.add_argument("--mu_xy", default=0.001, type=float,
help="Spatial regularization parameter. "
"Default is 0.001.")
parser.add_argument("--psftype", default="gaussian-moffat",
help="Type of PSF: 'gaussian-moffat' or 'tabular'. "
"Currently, tabular means generate a tabular PSF from "
"gaussian-moffat parameters.")
args = parser.parse_args(argv)
setup_logging(args.loglevel, logfname=args.logfile)
# record start time
tstart = datetime.now()
logging.info("cubefit v%s started at %s", __version__,
tstart.strftime("%Y-%m-%d %H:%M:%S"))
tsteps = OrderedDict() # finish time of each step.
logging.info("parameters: mu_wave={:.3g} mu_xy={:.3g} refitgal={}"
.format(args.mu_wave, args.mu_xy, args.refitgal))
logging.info(" psftype={}".format(args.psftype))
logging.info("reading config file")
with open(args.configfile) as f:
cfg = json.load(f)
# basic checks on config contents.
assert (len(cfg["filenames"]) == len(cfg["xcenters"]) ==
len(cfg["ycenters"]) == len(cfg["psf_params"]))
# -------------------------------------------------------------------------
# Load data cubes from the list of FITS files.
nt = len(cfg["filenames"])
logging.info("reading %d data cubes", nt)
cubes = []
for fname in cfg["filenames"]:
logging.debug(" reading %s", fname)
cubes.append(read_datacube(os.path.join(args.dataprefix, fname)))
wave = cubes[0].wave
nw = len(wave)
# assign some local variables for convenience
refs = cfg["refs"]
master_ref = cfg["master_ref"]
if master_ref not in refs:
raise ValueError("master ref choice must be one of the final refs (" +
" ".join(refs.astype(str)) + ")")
nonmaster_refs = [i for i in refs if i != master_ref]
nonrefs = [i for i in range(nt) if i not in refs]
# Ensure that all cubes have the same wavelengths.
if not all(np.all(cubes[i].wave == wave) for i in range(1, nt)):
raise ValueError("all data must have same wavelengths")
# -------------------------------------------------------------------------
# PSF for each observation
logging.info("setting up PSF for all %d epochs", nt)
psfs = [snfpsf(wave, cfg["psf_params"][i], cubes[i].header, args.psftype)
for i in range(nt)]
# -------------------------------------------------------------------------
# Initialize all model parameters to be fit
yctr0 = np.array(cfg["ycenters"])
xctr0 = np.array(cfg["xcenters"])
galaxy = np.zeros((nw, MODEL_SHAPE[0], MODEL_SHAPE[1]), dtype=np.float64)
sn = np.zeros((nt, nw), dtype=np.float64) # SN spectrum at each epoch
skys = np.zeros((nt, nw), dtype=np.float64) # Sky spectrum at each epoch
yctr = yctr0.copy()
xctr = xctr0.copy()
snctr = (0., 0.)
# For writing out to FITS
modelwcs = {"CRVAL1": -SPAXEL_SIZE * (MODEL_SHAPE[0] - 1) / 2.,
"CRPIX1": 1,
"CDELT1": SPAXEL_SIZE,
"CRVAL2": -SPAXEL_SIZE * (MODEL_SHAPE[1] - 1) / 2.,
"CRPIX2": 1,
"CDELT2": SPAXEL_SIZE,
"CRVAL3": cubes[0].header["CRVAL3"],
"CRPIX3": cubes[0].header["CRPIX3"],
"CDELT3": cubes[0].header["CDELT3"]}
# -------------------------------------------------------------------------
# Position bounds
# Bounds on data position: shape=(nt, 2)
xctrbounds = np.vstack((xctr - POSITION_BOUND, xctr + POSITION_BOUND)).T
yctrbounds = np.vstack((yctr - POSITION_BOUND, yctr + POSITION_BOUND)).T
snctrbounds = (-POSITION_BOUND, POSITION_BOUND)
# For data positions, check that bounds do not extend
# past the edge of the model and adjust the minbound and maxbound.
# This doesn't apply to SN position.
gshape = galaxy.shape[1:3] # model shape
for i in range(nt):
dshape = cubes[i].data.shape[1:3]
(yminabs, ymaxabs), (xminabs, xmaxabs) = yxbounds(gshape, dshape)
yctrbounds[i, 0] = max(yctrbounds[i, 0], yminabs)
yctrbounds[i, 1] = min(yctrbounds[i, 1], ymaxabs)
xctrbounds[i, 0] = max(xctrbounds[i, 0], xminabs)
xctrbounds[i, 1] = min(xctrbounds[i, 1], xmaxabs)
# -------------------------------------------------------------------------
# Guess sky
logging.info("guessing sky for all %d epochs", nt)
for i, cube in enumerate(cubes):
skys[i, :] = guess_sky(cube, npix=30)
# -------------------------------------------------------------------------
# Regularization penalty parameters
# Calculate rough average galaxy spectrum from all final refs.
spectra = np.zeros((len(refs), len(wave)), dtype=np.float64)
for j, i in enumerate(refs):
avg_spec = np.average(cubes[i].data, axis=(1, 2)) - skys[i]
mean_spec, bins, bn = scipy.stats.binned_statistic(wave, avg_spec,
bins=len(wave)/10)
spectra[j] = np.interp(wave, bins[:-1] + np.diff(bins)[0]/2.,
mean_spec)
mean_gal_spec = np.average(spectra, axis=0)
# Ensure that there won't be any negative or tiny values in mean:
mean_floor = 0.1 * np.median(mean_gal_spec)
mean_gal_spec[mean_gal_spec < mean_floor] = mean_floor
galprior = np.zeros((nw, MODEL_SHAPE[0], MODEL_SHAPE[1]), dtype=np.float64)
regpenalty = RegularizationPenalty(galprior, mean_gal_spec, args.mu_xy,
args.mu_wave)
tsteps["setup"] = datetime.now()
# -------------------------------------------------------------------------
# Fit just the galaxy model to just the master ref.
data = cubes[master_ref].data - skys[master_ref, :, None, None]
weight = cubes[master_ref].weight
logging.info("fitting galaxy to master ref [%d]", master_ref)
galaxy = fit_galaxy_single(galaxy, data, weight,
(yctr[master_ref], xctr[master_ref]),
psfs[master_ref], regpenalty, LBFGSB_FACTOR)
if args.diagdir:
fname = os.path.join(args.diagdir, 'step1.fits')
write_results(galaxy, skys, sn, snctr, yctr, xctr, yctr0, xctr0,
yctrbounds, xctrbounds, cubes, psfs, modelwcs, fname)
tsteps["fit galaxy to master ref"] = datetime.now()
# -------------------------------------------------------------------------
# Fit the positions of the other final refs
#
# Here we only use spaxels where the *model* has significant flux.
# We define "significant" as some number of median absolute deviations
# (MAD) above the minimum flux in the model. We (temporarily) set the
# weight of "insignificant" spaxels to zero during this process, then
# restore the original weight after we're done.
#
# If there are less than 20 "significant" spaxels, we do not attempt to
# fit the position, but simply leave it as is.
logging.info("fitting position of non-master refs %s", nonmaster_refs)
for i in nonmaster_refs:
cube = cubes[i]
# Evaluate galaxy on this epoch for purpose of masking spaxels.
gal = psfs[i].evaluate_galaxy(galaxy, (cube.ny, cube.nx),
(yctr[i], xctr[i]))
# Set weight of low-valued spaxels to zero.
gal2d = gal.sum(axis=0) # Sum of gal over wavelengths
mad = np.median(np.abs(gal2d - np.median(gal2d)))
mask = gal2d > np.min(gal2d) + MIN_NMAD * mad
if mask.sum() < 20:
continue
weight = cube.weight * mask[None, :, :]
fctr, fsky = fit_position_sky(galaxy, cube.data, weight,
(yctr[i], xctr[i]), psfs[i],
(yctrbounds[i], xctrbounds[i]))
yctr[i], xctr[i] = fctr
skys[i, :] = fsky
tsteps["fit positions of other refs"] = datetime.now()
# -------------------------------------------------------------------------
# Redo model fit, this time including all final refs.
datas = [cubes[i].data for i in refs]
weights = [cubes[i].weight for i in refs]
ctrs = [(yctr[i], xctr[i]) for i in refs]
psfs_refs = [psfs[i] for i in refs]
logging.info("fitting galaxy to all refs %s", refs)
galaxy, fskys = fit_galaxy_sky_multi(galaxy, datas, weights, ctrs,
psfs_refs, regpenalty, LBFGSB_FACTOR)
# put fitted skys back in `skys`
for i,j in enumerate(refs):
skys[j, :] = fskys[i]
if args.diagdir:
fname = os.path.join(args.diagdir, 'step2.fits')
write_results(galaxy, skys, sn, snctr, yctr, xctr, yctr0, xctr0,
yctrbounds, xctrbounds, cubes, psfs, modelwcs, fname)
tsteps["fit galaxy to all refs"] = datetime.now()
# -------------------------------------------------------------------------
# Fit position of data and SN in non-references
#
# Now we think we have a good galaxy model. We fix this and fit
# the relative position of the remaining epochs (which presumably
# all have some SN light). We simultaneously fit the position of
# the SN itself.
logging.info("fitting position of all %d non-refs and SN position",
len(nonrefs))
if len(nonrefs) > 0:
datas = [cubes[i].data for i in nonrefs]
weights = [cubes[i].weight for i in nonrefs]
psfs_nonrefs = [psfs[i] for i in nonrefs]
fyctr, fxctr, snctr, fskys, fsne = fit_position_sky_sn_multi(
galaxy, datas, weights, yctr[nonrefs], xctr[nonrefs],
snctr, psfs_nonrefs, LBFGSB_FACTOR, yctrbounds[nonrefs],
xctrbounds[nonrefs], snctrbounds)
# put fitted results back in parameter lists.
yctr[nonrefs] = fyctr
xctr[nonrefs] = fxctr
for i,j in enumerate(nonrefs):
skys[j, :] = fskys[i]
sn[j, :] = fsne[i]
tsteps["fit positions of nonrefs & SN"] = datetime.now()
# -------------------------------------------------------------------------
# optional step(s)
if args.refitgal and len(nonrefs) > 0:
if args.diagdir:
fname = os.path.join(args.diagdir, 'step3.fits')
write_results(galaxy, skys, sn, snctr, yctr, xctr, yctr0, xctr0,
yctrbounds, xctrbounds, cubes, psfs, modelwcs, fname)
# ---------------------------------------------------------------------
# Redo fit of galaxy, using ALL epochs, including ones with SN
# light. We hold the SN "fixed" simply by subtracting it from the
# data and fitting the remainder.
#
# This is slightly dangerous: any errors in the original SN
# determination, whether due to an incorrect PSF or ADR model
# or errors in the galaxy model will result in residuals. The
# galaxy model will then try to compensate for these.
#
# We should look at the galaxy model at the position of the SN
# before and after this step to see if there is a bias towards
# the galaxy flux increasing.
logging.info("fitting galaxy using all %d epochs", nt)
datas = [cube.data for cube in cubes]
weights = [cube.weight for cube in cubes]
ctrs = [(yctr[i], xctr[i]) for i in range(nt)]
# subtract SN from non-ref cubes.
for i in nonrefs:
s = psfs[i].point_source(snctr, datas[i].shape[1:3], ctrs[i])
# do *not* use in-place operation (-=) here!
datas[i] = cubes[i].data - sn[i, :, None, None] * s
galaxy, fskys = fit_galaxy_sky_multi(galaxy, datas, weights, ctrs,
psfs, regpenalty, LBFGSB_FACTOR)
for i in range(nt):
skys[i, :] = fskys[i] # put fitted skys back in skys
if args.diagdir:
fname = os.path.join(args.diagdir, 'step4.fits')
write_results(galaxy, skys, sn, snctr, yctr, xctr, yctr0, xctr0,
yctrbounds, xctrbounds, cubes, psfs, modelwcs, fname)
# ---------------------------------------------------------------------
# Repeat step before last: fit position of data and SN in
# non-references
logging.info("re-fitting position of all %d non-refs and SN position",
len(nonrefs))
if len(nonrefs) > 0:
datas = [cubes[i].data for i in nonrefs]
weights = [cubes[i].weight for i in nonrefs]
psfs_nonrefs = [psfs[i] for i in nonrefs]
fyctr, fxctr, snctr, fskys, fsne = fit_position_sky_sn_multi(
galaxy, datas, weights, yctr[nonrefs], xctr[nonrefs],
snctr, psfs_nonrefs, LBFGSB_FACTOR, yctrbounds[nonrefs],
xctrbounds[nonrefs], snctrbounds)
# put fitted results back in parameter lists.
yctr[nonrefs] = fyctr
xctr[nonrefs] = fxctr
for i, j in enumerate(nonrefs):
skys[j, :] = fskys[i]
sn[j, :] = fsne[i]
# -------------------------------------------------------------------------
# Write results
logging.info("writing results to %s", args.outfile)
write_results(galaxy, skys, sn, snctr, yctr, xctr, yctr0, xctr0,
yctrbounds, xctrbounds, cubes, psfs, modelwcs, args.outfile)
# time info
logging.info("step times:")
maxlen = max(len(key) for key in tsteps)
fmtstr = " %2dm%02ds - %-" + str(maxlen) + "s"
tprev = tstart
for key, tstep in tsteps.items():
t = (tstep - tprev).seconds
logging.info(fmtstr, t//60, t%60, key)
tprev = tstep
tfinish = datetime.now()
logging.info("finished at %s", tfinish.strftime("%Y-%m-%d %H:%M:%S"))
t = (tfinish - tstart).seconds
logging.info("took %3dm%2ds", t // 60, t % 60)
return 0
def cubefit_subtract(argv=None):
DESCRIPTION = \
"""Subtract model determined by cubefit from the original data.
The "outnames" key in the supplied configuration file is used to
determine the output FITS file names. The input FITS header is passed
unaltered to the output file, with the following additions:
(1) A `HISTORY` entry. (2) `CBFT_SNX` and `CBFT_SNY` records giving
the cubefit-determined position of the SN relative to the center of
the data array (at the reference wavelength).
This script also writes fitted SN spectra to individual FITS files.
The "sn_outnames" configuration field determines the output filenames.
"""
import shutil
import fitsio
prog_name = "cubefit-subtract"
prog_name_ver = "{} v{}".format(prog_name, __version__)
parser = ArgumentParser(prog=prog_name, description=DESCRIPTION)
parser.add_argument("configfile", help="configuration file name "
"(JSON format), same as cubefit input.")
parser.add_argument("resultfile", help="Result FITS file from cubefit")
parser.add_argument("--dataprefix", default="",
help="path prepended to data file names; default is "
"empty string")
parser.add_argument("--outprefix", default="",
help="path prepended to output file names; default is "
"empty string")
args = parser.parse_args(argv)
setup_logging("info")
# get input & output filenames
with open(args.configfile) as f:
cfg = json.load(f)
fnames = [os.path.join(args.dataprefix, fname)
for fname in cfg["filenames"]]
outfnames = [os.path.join(args.outprefix, fname)
for fname in cfg["outnames"]]
# load results
results = read_results(args.resultfile)
epochs = results["epochs"]
sny, snx = results["snctr"]
if not len(epochs) == len(fnames) == len(outfnames):
raise RuntimeError("number of epochs in result file not equal to "
"number of input and output files in config file")
# subtract and write out.
for fname, outfname, epoch in zip(fnames, outfnames, epochs):
logging.info("writing %s", outfname)
shutil.copy(fname, outfname)
f = fitsio.FITS(outfname, "rw")
data = f[0].read()
data -= epoch["galeval"]
f[0].write(data)
f[0].write_history("galaxy subtracted by " + prog_name_ver)
f[0].write_key("CBFT_SNX", snx - epoch['xctr'],
comment="SN x offset from center at {:.0f} A [spaxels]"
.format(REFWAVE))
f[0].write_key("CBFT_SNY", sny - epoch['yctr'],
comment="SN y offset from center at {:.0f} A [spaxels]"
.format(REFWAVE))
f.close()
# output SN spectra to separate files.
sn_outnames = [os.path.join(args.outprefix, fname)
for fname in cfg["sn_outnames"]]
header = {"CRVAL1": results["header"]["CRVAL3"],
"CRPIX1": results["header"]["CRPIX3"],
"CDELT1": results["header"]["CDELT3"]}
for outfname, epoch in zip(sn_outnames, epochs):
logging.info("writing %s", outfname)
if os.path.exists(outfname): # avoid warning from clobber=True
os.remove(outfname)
with fitsio.FITS(outfname, "rw") as f:
f.write(epoch["sn"], extname="sn", header=header)
f[0].write_history("created by " + prog_name_ver)
return 0
def cubefit_plot(argv=None):
DESCRIPTION = """Plot results and diagnostics from cubefit"""
from .plotting import plot_timeseries, plot_epoch, plot_sn, plot_adr
# arguments are the same as cubefit except an output
parser = ArgumentParser(prog="cubefit-plot", description=DESCRIPTION)
parser.add_argument("configfile", help="configuration filename")
parser.add_argument("resultfile", help="Result filename from cubefit")
parser.add_argument("outprefix", help="output prefix")
parser.add_argument("--dataprefix", default="",
help="path prepended to data file names; default is "
"empty string")
parser.add_argument('-b', '--band', help='timeseries band (U, B, V). '
'Default is a 1000 A wide band in middle of cube.',
default=None, dest='band')
parser.add_argument('--idrfiles', nargs='+', default=None,
help='Prefix of IDR. If given, the cubefit SN '
'spectra are plotted against the production values.')
parser.add_argument("--diagdir", default=None,
help="If given, read intermediate diagnostic "
"results from this directory and include in plot(s)")
parser.add_argument("--plotepochs", default=False, action="store_true",
help="Make diagnostic plots for each epoch")
args = parser.parse_args(argv)
# Read in data
with open(args.configfile) as f:
cfg = json.load(f)
cubes = [read_datacube(os.path.join(args.dataprefix, fname), scale=False)
for fname in cfg["filenames"]]
results = OrderedDict()
# Diagnostic results at each step
if args.diagdir is not None:
fnames = sorted(glob.glob(os.path.join(args.diagdir, "step*.fits")))
for fname in fnames:
name = os.path.basename(fname).split(".")[0]
results[name] = read_results(fname)
# Final result (don't fail if not available)
if os.path.exists(args.resultfile):
results["final"] = read_results(args.resultfile)
# plot time series
plot_timeseries(cubes, results, band=args.band,
fname=(args.outprefix + '_timeseries.png'))
# Plot wave slices and sn, galaxy and sky spectra for all epochs.
if 'final' in results and args.plotepochs:
for i_t in range(len(cubes)):
plot_epoch(cubes[i_t], results['final']['epochs'][i_t],
fname=(args.outprefix + '_epoch%02d.png' % i_t))
# Plot result spectra against IDR spectra.
if 'final' in results and args.idrfiles is not None:
plot_sn(cfg['filenames'], results['final']['epochs']['sn'],
results['final']['wave'], args.idrfiles,
args.outprefix + '_sn.png')
# Plot the x-y coordinates of the adr versus wavelength
# (Skip this for now; contains no interesting information)
#plot_adr(cubes, cubes[0].wave, fname=(args.outprefix + '_adr.png'))
return 0
|
snfactory/cubefit
|
cubefit/main.py
|
Python
|
mit
| 26,267 | 0.000533 |
#!/usr/bin/env python
# coding=utf-8
import struct
from twisted.internet import defer
from txportal.packet import cmcc, huawei
from txportal.simulator.handlers import base_handler
import functools
class AuthHandler(base_handler.BasicHandler):
def proc_cmccv1(self, req, rundata):
resp = cmcc.Portal.newMessage(
cmcc.ACK_AUTH,
req.userIp,
req.serialNo,
req.reqId,
secret=self.secret
)
resp.attrNum = 1
resp.attrs = [
(0x05, 'success'),
]
return resp
def proc_cmccv2(self, req, rundata):
resp = cmcc.Portal.newMessage(
cmcc.ACK_AUTH,
req.userIp,
req.serialNo,
req.reqId,
secret=self.secret
)
resp.attrNum = 1
resp.attrs = [
(0x05, 'success'),
]
return resp
def proc_huaweiv1(self, req, rundata):
resp = huawei.Portal.newMessage(
huawei.ACK_AUTH,
req.userIp,
req.serialNo,
req.reqId,
secret=self.secret
)
resp.attrNum = 1
resp.attrs = [
(0x05, 'success'),
]
return resp
@defer.inlineCallbacks
def proc_huaweiv2(self, req, rundata):
resp = huawei.PortalV2.newMessage(
huawei.ACK_AUTH,
req.userIp,
req.serialNo,
req.reqId,
self.secret,
auth=req.auth,
chap=(req.isChap==0x00)
)
resp.attrNum = 1
resp.attrs = [
(0x05, 'success'),
]
resp.auth_packet()
return resp
|
talkincode/txportal
|
txportal/simulator/handlers/auth_handler.py
|
Python
|
mit
| 1,737 | 0.003454 |
"""Support for Satel Integra zone states- represented as binary sensors."""
import logging
from homeassistant.components.binary_sensor import BinarySensorDevice
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import (
CONF_OUTPUTS, CONF_ZONE_NAME, CONF_ZONE_TYPE, CONF_ZONES, DATA_SATEL,
SIGNAL_OUTPUTS_UPDATED, SIGNAL_ZONES_UPDATED)
DEPENDENCIES = ['satel_integra']
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(
hass, config, async_add_entities, discovery_info=None):
"""Set up the Satel Integra binary sensor devices."""
if not discovery_info:
return
configured_zones = discovery_info[CONF_ZONES]
devices = []
for zone_num, device_config_data in configured_zones.items():
zone_type = device_config_data[CONF_ZONE_TYPE]
zone_name = device_config_data[CONF_ZONE_NAME]
device = SatelIntegraBinarySensor(
zone_num, zone_name, zone_type, SIGNAL_ZONES_UPDATED)
devices.append(device)
configured_outputs = discovery_info[CONF_OUTPUTS]
for zone_num, device_config_data in configured_outputs.items():
zone_type = device_config_data[CONF_ZONE_TYPE]
zone_name = device_config_data[CONF_ZONE_NAME]
device = SatelIntegraBinarySensor(
zone_num, zone_name, zone_type, SIGNAL_OUTPUTS_UPDATED)
devices.append(device)
async_add_entities(devices)
class SatelIntegraBinarySensor(BinarySensorDevice):
"""Representation of an Satel Integra binary sensor."""
def __init__(self, device_number, device_name, zone_type, react_to_signal):
"""Initialize the binary_sensor."""
self._device_number = device_number
self._name = device_name
self._zone_type = zone_type
self._state = 0
self._react_to_signal = react_to_signal
async def async_added_to_hass(self):
"""Register callbacks."""
if self._react_to_signal == SIGNAL_OUTPUTS_UPDATED:
if self._device_number in\
self.hass.data[DATA_SATEL].violated_outputs:
self._state = 1
else:
self._state = 0
else:
if self._device_number in\
self.hass.data[DATA_SATEL].violated_zones:
self._state = 1
else:
self._state = 0
async_dispatcher_connect(
self.hass, self._react_to_signal, self._devices_updated)
@property
def name(self):
"""Return the name of the entity."""
return self._name
@property
def icon(self):
"""Icon for device by its type."""
if self._zone_type == 'smoke':
return "mdi:fire"
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def is_on(self):
"""Return true if sensor is on."""
return self._state == 1
@property
def device_class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
return self._zone_type
@callback
def _devices_updated(self, zones):
"""Update the zone's state, if needed."""
if self._device_number in zones \
and self._state != zones[self._device_number]:
self._state = zones[self._device_number]
self.async_schedule_update_ha_state()
|
nugget/home-assistant
|
homeassistant/components/satel_integra/binary_sensor.py
|
Python
|
apache-2.0
| 3,430 | 0 |
# Copyright 2008-2015 Nokia Networks
# Copyright 2016- Robot Framework Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import difflib
import re
import time
import token
from tokenize import generate_tokens, untokenize
from robot.api import logger
from robot.errors import (ContinueForLoop, DataError, ExecutionFailed,
ExecutionFailures, ExecutionPassed, ExitForLoop,
PassExecution, ReturnFromKeyword)
from robot.running import Keyword, RUN_KW_REGISTER
from robot.running.context import EXECUTION_CONTEXTS
from robot.running.usererrorhandler import UserErrorHandler
from robot.utils import (DotDict, escape, format_assign_message,
get_error_message, get_time, is_falsy, is_integer,
is_string, is_truthy, is_unicode, IRONPYTHON, JYTHON,
Matcher, normalize, NormalizedDict, parse_time, prepr,
RERAISED_EXCEPTIONS, plural_or_not as s, roundup,
secs_to_timestr, seq2str, split_from_equals, StringIO,
timestr_to_secs, type_name, unic)
from robot.utils.asserts import assert_equal, assert_not_equal
from robot.variables import (is_list_var, is_var, DictVariableTableValue,
VariableTableValue, VariableSplitter,
variable_not_found)
from robot.version import get_version
if JYTHON:
from java.lang import String, Number
# TODO: Clean-up registering run keyword variants in RF 3.1.
# https://github.com/robotframework/robotframework/issues/2190
def run_keyword_variant(resolve):
def decorator(method):
RUN_KW_REGISTER.register_run_keyword('BuiltIn', method.__name__,
resolve, deprecation_warning=False)
return method
return decorator
class _BuiltInBase(object):
@property
def _context(self):
return self._get_context()
def _get_context(self, top=False):
ctx = EXECUTION_CONTEXTS.current if not top else EXECUTION_CONTEXTS.top
if ctx is None:
raise RobotNotRunningError('Cannot access execution context')
return ctx
@property
def _namespace(self):
return self._get_context().namespace
@property
def _variables(self):
return self._namespace.variables
def _matches(self, string, pattern):
# Must use this instead of fnmatch when string may contain newlines.
matcher = Matcher(pattern, caseless=False, spaceless=False)
return matcher.match(string)
def _is_true(self, condition):
if is_string(condition):
condition = self.evaluate(condition, modules='os,sys')
return bool(condition)
def _log_types(self, *args):
self._log_types_at_level('DEBUG', *args)
def _log_types_at_level(self, level, *args):
msg = ["Argument types are:"] + [self._get_type(a) for a in args]
self.log('\n'.join(msg), level)
def _get_type(self, arg):
# In IronPython type(u'x') is str. We want to report unicode anyway.
if is_unicode(arg):
return "<type 'unicode'>"
return str(type(arg))
class _Converter(_BuiltInBase):
def convert_to_integer(self, item, base=None):
"""Converts the given item to an integer number.
If the given item is a string, it is by default expected to be an
integer in base 10. There are two ways to convert from other bases:
- Give base explicitly to the keyword as ``base`` argument.
- Prefix the given string with the base so that ``0b`` means binary
(base 2), ``0o`` means octal (base 8), and ``0x`` means hex (base 16).
The prefix is considered only when ``base`` argument is not given and
may itself be prefixed with a plus or minus sign.
The syntax is case-insensitive and possible spaces are ignored.
Examples:
| ${result} = | Convert To Integer | 100 | | # Result is 100 |
| ${result} = | Convert To Integer | FF AA | 16 | # Result is 65450 |
| ${result} = | Convert To Integer | 100 | 8 | # Result is 64 |
| ${result} = | Convert To Integer | -100 | 2 | # Result is -4 |
| ${result} = | Convert To Integer | 0b100 | | # Result is 4 |
| ${result} = | Convert To Integer | -0x100 | | # Result is -256 |
See also `Convert To Number`, `Convert To Binary`, `Convert To Octal`,
`Convert To Hex`, and `Convert To Bytes`.
"""
self._log_types(item)
return self._convert_to_integer(item, base)
def _convert_to_integer(self, orig, base=None):
try:
item = self._handle_java_numbers(orig)
item, base = self._get_base(item, base)
if base:
return int(item, self._convert_to_integer(base))
return int(item)
except:
raise RuntimeError("'%s' cannot be converted to an integer: %s"
% (orig, get_error_message()))
def _handle_java_numbers(self, item):
if not JYTHON:
return item
if isinstance(item, String):
return unic(item)
if isinstance(item, Number):
return item.doubleValue()
return item
def _get_base(self, item, base):
if not is_string(item):
return item, base
item = normalize(item)
if item.startswith(('-', '+')):
sign = item[0]
item = item[1:]
else:
sign = ''
bases = {'0b': 2, '0o': 8, '0x': 16}
if base or not item.startswith(tuple(bases)):
return sign+item, base
return sign+item[2:], bases[item[:2]]
def convert_to_binary(self, item, base=None, prefix=None, length=None):
"""Converts the given item to a binary string.
The ``item``, with an optional ``base``, is first converted to an
integer using `Convert To Integer` internally. After that it
is converted to a binary number (base 2) represented as a
string such as ``1011``.
The returned value can contain an optional ``prefix`` and can be
required to be of minimum ``length`` (excluding the prefix and a
possible minus sign). If the value is initially shorter than
the required length, it is padded with zeros.
Examples:
| ${result} = | Convert To Binary | 10 | | | # Result is 1010 |
| ${result} = | Convert To Binary | F | base=16 | prefix=0b | # Result is 0b1111 |
| ${result} = | Convert To Binary | -2 | prefix=B | length=4 | # Result is -B0010 |
See also `Convert To Integer`, `Convert To Octal` and `Convert To Hex`.
"""
return self._convert_to_bin_oct_hex(item, base, prefix, length, 'b')
def convert_to_octal(self, item, base=None, prefix=None, length=None):
"""Converts the given item to an octal string.
The ``item``, with an optional ``base``, is first converted to an
integer using `Convert To Integer` internally. After that it
is converted to an octal number (base 8) represented as a
string such as ``775``.
The returned value can contain an optional ``prefix`` and can be
required to be of minimum ``length`` (excluding the prefix and a
possible minus sign). If the value is initially shorter than
the required length, it is padded with zeros.
Examples:
| ${result} = | Convert To Octal | 10 | | | # Result is 12 |
| ${result} = | Convert To Octal | -F | base=16 | prefix=0 | # Result is -017 |
| ${result} = | Convert To Octal | 16 | prefix=oct | length=4 | # Result is oct0020 |
See also `Convert To Integer`, `Convert To Binary` and `Convert To Hex`.
"""
return self._convert_to_bin_oct_hex(item, base, prefix, length, 'o')
def convert_to_hex(self, item, base=None, prefix=None, length=None,
lowercase=False):
"""Converts the given item to a hexadecimal string.
The ``item``, with an optional ``base``, is first converted to an
integer using `Convert To Integer` internally. After that it
is converted to a hexadecimal number (base 16) represented as
a string such as ``FF0A``.
The returned value can contain an optional ``prefix`` and can be
required to be of minimum ``length`` (excluding the prefix and a
possible minus sign). If the value is initially shorter than
the required length, it is padded with zeros.
By default the value is returned as an upper case string, but the
``lowercase`` argument a true value (see `Boolean arguments`) turns
the value (but not the given prefix) to lower case.
Examples:
| ${result} = | Convert To Hex | 255 | | | # Result is FF |
| ${result} = | Convert To Hex | -10 | prefix=0x | length=2 | # Result is -0x0A |
| ${result} = | Convert To Hex | 255 | prefix=X | lowercase=yes | # Result is Xff |
See also `Convert To Integer`, `Convert To Binary` and `Convert To Octal`.
"""
spec = 'x' if is_truthy(lowercase) else 'X'
return self._convert_to_bin_oct_hex(item, base, prefix, length, spec)
def _convert_to_bin_oct_hex(self, item, base, prefix, length, format_spec):
self._log_types(item)
ret = format(self._convert_to_integer(item, base), format_spec)
prefix = prefix or ''
if ret[0] == '-':
prefix = '-' + prefix
ret = ret[1:]
if length:
ret = ret.rjust(self._convert_to_integer(length), '0')
return prefix + ret
def convert_to_number(self, item, precision=None):
"""Converts the given item to a floating point number.
If the optional ``precision`` is positive or zero, the returned number
is rounded to that number of decimal digits. Negative precision means
that the number is rounded to the closest multiple of 10 to the power
of the absolute precision. If a number is equally close to a certain
precision, it is always rounded away from zero.
Examples:
| ${result} = | Convert To Number | 42.512 | | # Result is 42.512 |
| ${result} = | Convert To Number | 42.512 | 1 | # Result is 42.5 |
| ${result} = | Convert To Number | 42.512 | 0 | # Result is 43.0 |
| ${result} = | Convert To Number | 42.512 | -1 | # Result is 40.0 |
Notice that machines generally cannot store floating point numbers
accurately. This may cause surprises with these numbers in general
and also when they are rounded. For more information see, for example,
these resources:
- http://docs.python.org/2/tutorial/floatingpoint.html
- http://randomascii.wordpress.com/2012/02/25/comparing-floating-point-numbers-2012-edition
If you need an integer number, use `Convert To Integer` instead.
"""
self._log_types(item)
return self._convert_to_number(item, precision)
def _convert_to_number(self, item, precision=None):
number = self._convert_to_number_without_precision(item)
if precision is not None:
number = roundup(number, self._convert_to_integer(precision),
return_type=float)
return number
def _convert_to_number_without_precision(self, item):
try:
if JYTHON:
item = self._handle_java_numbers(item)
return float(item)
except:
error = get_error_message()
try:
return float(self._convert_to_integer(item))
except RuntimeError:
raise RuntimeError("'%s' cannot be converted to a floating "
"point number: %s" % (item, error))
def convert_to_string(self, item):
"""Converts the given item to a Unicode string.
Uses ``__unicode__`` or ``__str__`` method with Python objects and
``toString`` with Java objects.
Use `Encode String To Bytes` and `Decode Bytes To String` keywords
in ``String`` library if you need to convert between Unicode and byte
strings using different encodings. Use `Convert To Bytes` if you just
want to create byte strings.
"""
self._log_types(item)
return self._convert_to_string(item)
def _convert_to_string(self, item):
return unic(item)
def convert_to_boolean(self, item):
"""Converts the given item to Boolean true or false.
Handles strings ``True`` and ``False`` (case-insensitive) as expected,
otherwise returns item's
[http://docs.python.org/2/library/stdtypes.html#truth|truth value]
using Python's ``bool()`` method.
"""
self._log_types(item)
if is_string(item):
if item.upper() == 'TRUE':
return True
if item.upper() == 'FALSE':
return False
return bool(item)
def convert_to_bytes(self, input, input_type='text'):
u"""Converts the given ``input`` to bytes according to the ``input_type``.
Valid input types are listed below:
- ``text:`` Converts text to bytes character by character. All
characters with ordinal below 256 can be used and are converted to
bytes with same values. Many characters are easiest to represent
using escapes like ``\\x00`` or ``\\xff``. Supports both Unicode
strings and bytes.
- ``int:`` Converts integers separated by spaces to bytes. Similarly as
with `Convert To Integer`, it is possible to use binary, octal, or
hex values by prefixing the values with ``0b``, ``0o``, or ``0x``,
respectively.
- ``hex:`` Converts hexadecimal values to bytes. Single byte is always
two characters long (e.g. ``01`` or ``FF``). Spaces are ignored and
can be used freely as a visual separator.
- ``bin:`` Converts binary values to bytes. Single byte is always eight
characters long (e.g. ``00001010``). Spaces are ignored and can be
used freely as a visual separator.
In addition to giving the input as a string, it is possible to use
lists or other iterables containing individual characters or numbers.
In that case numbers do not need to be padded to certain length and
they cannot contain extra spaces.
Examples (last column shows returned bytes):
| ${bytes} = | Convert To Bytes | hyv\xe4 | | # hyv\\xe4 |
| ${bytes} = | Convert To Bytes | \\xff\\x07 | | # \\xff\\x07 |
| ${bytes} = | Convert To Bytes | 82 70 | int | # RF |
| ${bytes} = | Convert To Bytes | 0b10 0x10 | int | # \\x02\\x10 |
| ${bytes} = | Convert To Bytes | ff 00 07 | hex | # \\xff\\x00\\x07 |
| ${bytes} = | Convert To Bytes | 5246212121 | hex | # RF!!! |
| ${bytes} = | Convert To Bytes | 0000 1000 | bin | # \\x08 |
| ${input} = | Create List | 1 | 2 | 12 |
| ${bytes} = | Convert To Bytes | ${input} | int | # \\x01\\x02\\x0c |
| ${bytes} = | Convert To Bytes | ${input} | hex | # \\x01\\x02\\x12 |
Use `Encode String To Bytes` in ``String`` library if you need to
convert text to bytes using a certain encoding.
New in Robot Framework 2.8.2.
"""
try:
try:
ordinals = getattr(self, '_get_ordinals_from_%s' % input_type)
except AttributeError:
raise RuntimeError("Invalid input type '%s'." % input_type)
return bytes(bytearray(o for o in ordinals(input)))
except:
raise RuntimeError("Creating bytes failed: %s" % get_error_message())
def _get_ordinals_from_text(self, input):
# https://github.com/IronLanguages/main/issues/1237
if IRONPYTHON and isinstance(input, bytearray):
input = bytes(input)
for char in input:
ordinal = char if is_integer(char) else ord(char)
yield self._test_ordinal(ordinal, char, 'Character')
def _test_ordinal(self, ordinal, original, type):
if 0 <= ordinal <= 255:
return ordinal
raise RuntimeError("%s '%s' cannot be represented as a byte."
% (type, original))
def _get_ordinals_from_int(self, input):
if is_string(input):
input = input.split()
elif is_integer(input):
input = [input]
for integer in input:
ordinal = self._convert_to_integer(integer)
yield self._test_ordinal(ordinal, integer, 'Integer')
def _get_ordinals_from_hex(self, input):
for token in self._input_to_tokens(input, length=2):
ordinal = self._convert_to_integer(token, base=16)
yield self._test_ordinal(ordinal, token, 'Hex value')
def _get_ordinals_from_bin(self, input):
for token in self._input_to_tokens(input, length=8):
ordinal = self._convert_to_integer(token, base=2)
yield self._test_ordinal(ordinal, token, 'Binary value')
def _input_to_tokens(self, input, length):
if not is_string(input):
return input
input = ''.join(input.split())
if len(input) % length != 0:
raise RuntimeError('Expected input to be multiple of %d.' % length)
return (input[i:i+length] for i in range(0, len(input), length))
def create_list(self, *items):
"""Returns a list containing given items.
The returned list can be assigned both to ``${scalar}`` and ``@{list}``
variables.
Examples:
| @{list} = | Create List | a | b | c |
| ${scalar} = | Create List | a | b | c |
| ${ints} = | Create List | ${1} | ${2} | ${3} |
"""
return list(items)
@run_keyword_variant(resolve=0)
def create_dictionary(self, *items):
"""Creates and returns a dictionary based on the given ``items``.
Items are typically given using the ``key=value`` syntax same way as
``&{dictionary}`` variables are created in the Variable table. Both
keys and values can contain variables, and possible equal sign in key
can be escaped with a backslash like ``escaped\\=key=value``. It is
also possible to get items from existing dictionaries by simply using
them like ``&{dict}``.
Alternatively items can be specified so that keys and values are given
separately. This and the ``key=value`` syntax can even be combined,
but separately given items must be first.
If same key is used multiple times, the last value has precedence.
The returned dictionary is ordered, and values with strings as keys
can also be accessed using a convenient dot-access syntax like
``${dict.key}``.
Examples:
| &{dict} = | Create Dictionary | key=value | foo=bar | | | # key=value syntax |
| Should Be True | ${dict} == {'key': 'value', 'foo': 'bar'} |
| &{dict2} = | Create Dictionary | key | value | foo | bar | # separate key and value |
| Should Be Equal | ${dict} | ${dict2} |
| &{dict} = | Create Dictionary | ${1}=${2} | &{dict} | foo=new | | # using variables |
| Should Be True | ${dict} == {1: 2, 'key': 'value', 'foo': 'new'} |
| Should Be Equal | ${dict.key} | value | | | | # dot-access |
This keyword was changed in Robot Framework 2.9 in many ways:
- Moved from ``Collections`` library to ``BuiltIn``.
- Support also non-string keys in ``key=value`` syntax.
- Returned dictionary is ordered and dot-accessible.
- Old syntax to give keys and values separately was deprecated, but
deprecation was later removed in RF 3.0.1.
"""
separate, combined = self._split_dict_items(items)
result = DotDict(self._format_separate_dict_items(separate))
combined = DictVariableTableValue(combined).resolve(self._variables)
result.update(combined)
return result
def _split_dict_items(self, items):
separate = []
for item in items:
name, value = split_from_equals(item)
if value is not None or VariableSplitter(item).is_dict_variable():
break
separate.append(item)
return separate, items[len(separate):]
def _format_separate_dict_items(self, separate):
separate = self._variables.replace_list(separate)
if len(separate) % 2 != 0:
raise DataError('Expected even number of keys and values, got %d.'
% len(separate))
return [separate[i:i+2] for i in range(0, len(separate), 2)]
class _Verify(_BuiltInBase):
def _set_and_remove_tags(self, tags):
set_tags = [tag for tag in tags if not tag.startswith('-')]
remove_tags = [tag[1:] for tag in tags if tag.startswith('-')]
if remove_tags:
self.remove_tags(*remove_tags)
if set_tags:
self.set_tags(*set_tags)
def fail(self, msg=None, *tags):
"""Fails the test with the given message and optionally alters its tags.
The error message is specified using the ``msg`` argument.
It is possible to use HTML in the given error message, similarly
as with any other keyword accepting an error message, by prefixing
the error with ``*HTML*``.
It is possible to modify tags of the current test case by passing tags
after the message. Tags starting with a hyphen (e.g. ``-regression``)
are removed and others added. Tags are modified using `Set Tags` and
`Remove Tags` internally, and the semantics setting and removing them
are the same as with these keywords.
Examples:
| Fail | Test not ready | | | # Fails with the given message. |
| Fail | *HTML*<b>Test not ready</b> | | | # Fails using HTML in the message. |
| Fail | Test not ready | not-ready | | # Fails and adds 'not-ready' tag. |
| Fail | OS not supported | -regression | | # Removes tag 'regression'. |
| Fail | My message | tag | -t* | # Removes all tags starting with 't' except the newly added 'tag'. |
See `Fatal Error` if you need to stop the whole test execution.
Support for modifying tags was added in Robot Framework 2.7.4 and
HTML message support in 2.8.
"""
self._set_and_remove_tags(tags)
raise AssertionError(msg) if msg else AssertionError()
def fatal_error(self, msg=None):
"""Stops the whole test execution.
The test or suite where this keyword is used fails with the provided
message, and subsequent tests fail with a canned message.
Possible teardowns will nevertheless be executed.
See `Fail` if you only want to stop one test case unconditionally.
"""
error = AssertionError(msg) if msg else AssertionError()
error.ROBOT_EXIT_ON_FAILURE = True
raise error
def should_not_be_true(self, condition, msg=None):
"""Fails if the given condition is true.
See `Should Be True` for details about how ``condition`` is evaluated
and how ``msg`` can be used to override the default error message.
"""
if self._is_true(condition):
raise AssertionError(msg or "'%s' should not be true." % condition)
def should_be_true(self, condition, msg=None):
"""Fails if the given condition is not true.
If ``condition`` is a string (e.g. ``${rc} < 10``), it is evaluated as
a Python expression as explained in `Evaluating expressions` and the
keyword status is decided based on the result. If a non-string item is
given, the status is got directly from its
[http://docs.python.org/2/library/stdtypes.html#truth|truth value].
The default error message (``<condition> should be true``) is not very
informative, but it can be overridden with the ``msg`` argument.
Examples:
| Should Be True | ${rc} < 10 |
| Should Be True | '${status}' == 'PASS' | # Strings must be quoted |
| Should Be True | ${number} | # Passes if ${number} is not zero |
| Should Be True | ${list} | # Passes if ${list} is not empty |
Variables used like ``${variable}``, as in the examples above, are
replaced in the expression before evaluation. Variables are also
available in the evaluation namespace and can be accessed using special
syntax ``$variable``. This is a new feature in Robot Framework 2.9
and it is explained more thoroughly in `Evaluating expressions`.
Examples:
| Should Be True | $rc < 10 |
| Should Be True | $status == 'PASS' | # Expected string must be quoted |
Starting from Robot Framework 2.8, `Should Be True` automatically
imports Python's [http://docs.python.org/2/library/os.html|os] and
[http://docs.python.org/2/library/sys.html|sys] modules that contain
several useful attributes:
| Should Be True | os.linesep == '\\n' | # Unixy |
| Should Be True | os.linesep == '\\r\\n' | # Windows |
| Should Be True | sys.platform == 'darwin' | # OS X |
| Should Be True | sys.platform.startswith('java') | # Jython |
"""
if not self._is_true(condition):
raise AssertionError(msg or "'%s' should be true." % condition)
def should_be_equal(self, first, second, msg=None, values=True):
"""Fails if the given objects are unequal.
Optional ``msg`` and ``values`` arguments specify how to construct
the error message if this keyword fails:
- If ``msg`` is not given, the error message is ``<first> != <second>``.
- If ``msg`` is given and ``values`` gets a true value, the error
message is ``<msg>: <first> != <second>``.
- If ``msg`` is given and ``values`` gets a false value, the error
message is simply ``<msg>``.
``values`` is true by default, but can be turned to false by using,
for example, string ``false`` or ``no values``. See `Boolean arguments`
section for more details.
If both arguments are multiline strings, the comparison is done using
`multiline string comparisons`.
"""
self._log_types_at_info_if_different(first, second)
self._should_be_equal(first, second, msg, values)
def _should_be_equal(self, first, second, msg, values):
if first == second:
return
include_values = self._include_values(values)
if include_values and is_string(first) and is_string(second):
self._raise_multi_diff(first, second)
assert_equal(first, second, msg, include_values)
def _log_types_at_info_if_different(self, first, second):
level = 'DEBUG' if type(first) == type(second) else 'INFO'
self._log_types_at_level(level, first, second)
def _raise_multi_diff(self, first, second):
first_lines, second_lines = first.splitlines(), second.splitlines()
if len(first_lines) < 3 or len(second_lines) < 3:
return
self.log("%s\n!=\n%s" % (first, second))
err = 'Multiline strings are different:\n'
for line in difflib.unified_diff(first_lines, second_lines,
fromfile='first', tofile='second',
lineterm=''):
err += line + '\n'
raise AssertionError(err)
def _include_values(self, values):
return is_truthy(values) and str(values).upper() != 'NO VALUES'
def should_not_be_equal(self, first, second, msg=None, values=True):
"""Fails if the given objects are equal.
See `Should Be Equal` for an explanation on how to override the default
error message with ``msg`` and ``values``.
"""
self._log_types_at_info_if_different(first, second)
self._should_not_be_equal(first, second, msg, values)
def _should_not_be_equal(self, first, second, msg, values):
assert_not_equal(first, second, msg, self._include_values(values))
def should_not_be_equal_as_integers(self, first, second, msg=None,
values=True, base=None):
"""Fails if objects are equal after converting them to integers.
See `Convert To Integer` for information how to convert integers from
other bases than 10 using ``base`` argument or ``0b/0o/0x`` prefixes.
See `Should Be Equal` for an explanation on how to override the default
error message with ``msg`` and ``values``.
See `Should Be Equal As Integers` for some usage examples.
"""
self._log_types_at_info_if_different(first, second)
self._should_not_be_equal(self._convert_to_integer(first, base),
self._convert_to_integer(second, base),
msg, values)
def should_be_equal_as_integers(self, first, second, msg=None, values=True,
base=None):
"""Fails if objects are unequal after converting them to integers.
See `Convert To Integer` for information how to convert integers from
other bases than 10 using ``base`` argument or ``0b/0o/0x`` prefixes.
See `Should Be Equal` for an explanation on how to override the default
error message with ``msg`` and ``values``.
Examples:
| Should Be Equal As Integers | 42 | ${42} | Error message |
| Should Be Equal As Integers | ABCD | abcd | base=16 |
| Should Be Equal As Integers | 0b1011 | 11 |
"""
self._log_types_at_info_if_different(first, second)
self._should_be_equal(self._convert_to_integer(first, base),
self._convert_to_integer(second, base),
msg, values)
def should_not_be_equal_as_numbers(self, first, second, msg=None,
values=True, precision=6):
"""Fails if objects are equal after converting them to real numbers.
The conversion is done with `Convert To Number` keyword using the
given ``precision``.
See `Should Be Equal As Numbers` for examples on how to use
``precision`` and why it does not always work as expected. See also
`Should Be Equal` for an explanation on how to override the default
error message with ``msg`` and ``values``.
"""
self._log_types_at_info_if_different(first, second)
first = self._convert_to_number(first, precision)
second = self._convert_to_number(second, precision)
self._should_not_be_equal(first, second, msg, values)
def should_be_equal_as_numbers(self, first, second, msg=None, values=True,
precision=6):
"""Fails if objects are unequal after converting them to real numbers.
The conversion is done with `Convert To Number` keyword using the
given ``precision``.
Examples:
| Should Be Equal As Numbers | ${x} | 1.1 | | # Passes if ${x} is 1.1 |
| Should Be Equal As Numbers | 1.123 | 1.1 | precision=1 | # Passes |
| Should Be Equal As Numbers | 1.123 | 1.4 | precision=0 | # Passes |
| Should Be Equal As Numbers | 112.3 | 75 | precision=-2 | # Passes |
As discussed in the documentation of `Convert To Number`, machines
generally cannot store floating point numbers accurately. Because of
this limitation, comparing floats for equality is problematic and
a correct approach to use depends on the context. This keyword uses
a very naive approach of rounding the numbers before comparing them,
which is both prone to rounding errors and does not work very well if
numbers are really big or small. For more information about comparing
floats, and ideas on how to implement your own context specific
comparison algorithm, see
http://randomascii.wordpress.com/2012/02/25/comparing-floating-point-numbers-2012-edition/.
See `Should Not Be Equal As Numbers` for a negative version of this
keyword and `Should Be Equal` for an explanation on how to override
the default error message with ``msg`` and ``values``.
"""
self._log_types_at_info_if_different(first, second)
first = self._convert_to_number(first, precision)
second = self._convert_to_number(second, precision)
self._should_be_equal(first, second, msg, values)
def should_not_be_equal_as_strings(self, first, second, msg=None, values=True):
"""Fails if objects are equal after converting them to strings.
See `Should Be Equal` for an explanation on how to override the default
error message with ``msg`` and ``values``.
"""
self._log_types_at_info_if_different(first, second)
first, second = [self._convert_to_string(i) for i in (first, second)]
self._should_not_be_equal(first, second, msg, values)
def should_be_equal_as_strings(self, first, second, msg=None, values=True):
"""Fails if objects are unequal after converting them to strings.
See `Should Be Equal` for an explanation on how to override the default
error message with ``msg`` and ``values``.
If both arguments are multiline strings, the comparison is done using
`multiline string comparisons`.
"""
self._log_types_at_info_if_different(first, second)
first, second = [self._convert_to_string(i) for i in (first, second)]
self._should_be_equal(first, second, msg, values)
def should_not_start_with(self, str1, str2, msg=None, values=True):
"""Fails if the string ``str1`` starts with the string ``str2``.
See `Should Be Equal` for an explanation on how to override the default
error message with ``msg`` and ``values``.
"""
if str1.startswith(str2):
raise AssertionError(self._get_string_msg(str1, str2, msg, values,
'starts with'))
def should_start_with(self, str1, str2, msg=None, values=True):
"""Fails if the string ``str1`` does not start with the string ``str2``.
See `Should Be Equal` for an explanation on how to override the default
error message with ``msg`` and ``values``.
"""
if not str1.startswith(str2):
raise AssertionError(self._get_string_msg(str1, str2, msg, values,
'does not start with'))
def should_not_end_with(self, str1, str2, msg=None, values=True):
"""Fails if the string ``str1`` ends with the string ``str2``.
See `Should Be Equal` for an explanation on how to override the default
error message with ``msg`` and ``values``.
"""
if str1.endswith(str2):
raise AssertionError(self._get_string_msg(str1, str2, msg, values,
'ends with'))
def should_end_with(self, str1, str2, msg=None, values=True):
"""Fails if the string ``str1`` does not end with the string ``str2``.
See `Should Be Equal` for an explanation on how to override the default
error message with ``msg`` and ``values``.
"""
if not str1.endswith(str2):
raise AssertionError(self._get_string_msg(str1, str2, msg, values,
'does not end with'))
def should_not_contain(self, container, item, msg=None, values=True):
"""Fails if ``container`` contains ``item`` one or more times.
Works with strings, lists, and anything that supports Python's ``in``
operator. See `Should Be Equal` for an explanation on how to override
the default error message with ``msg`` and ``values``.
Examples:
| Should Not Contain | ${output} | FAILED |
| Should Not Contain | ${some list} | value |
"""
if item in container:
raise AssertionError(self._get_string_msg(container, item, msg,
values, 'contains'))
def should_contain(self, container, item, msg=None, values=True):
"""Fails if ``container`` does not contain ``item`` one or more times.
Works with strings, lists, and anything that supports Python's ``in``
operator. See `Should Be Equal` for an explanation on how to override
the default error message with ``msg`` and ``values``.
Examples:
| Should Contain | ${output} | PASS |
| Should Contain | ${some list} | value |
"""
if item not in container:
raise AssertionError(self._get_string_msg(container, item, msg,
values, 'does not contain'))
def should_contain_x_times(self, item1, item2, count, msg=None):
"""Fails if ``item1`` does not contain ``item2`` ``count`` times.
Works with strings, lists and all objects that `Get Count` works
with. The default error message can be overridden with ``msg`` and
the actual count is always logged.
Examples:
| Should Contain X Times | ${output} | hello | 2 |
| Should Contain X Times | ${some list} | value | 3 |
"""
count = self._convert_to_integer(count)
x = self.get_count(item1, item2)
if not msg:
msg = "'%s' contains '%s' %d time%s, not %d time%s." \
% (unic(item1), unic(item2), x, s(x), count, s(count))
self.should_be_equal_as_integers(x, count, msg, values=False)
def get_count(self, item1, item2):
"""Returns and logs how many times ``item2`` is found from ``item1``.
This keyword works with Python strings and lists and all objects
that either have ``count`` method or can be converted to Python lists.
Example:
| ${count} = | Get Count | ${some item} | interesting value |
| Should Be True | 5 < ${count} < 10 |
"""
if not hasattr(item1, 'count'):
try:
item1 = list(item1)
except:
raise RuntimeError("Converting '%s' to list failed: %s"
% (item1, get_error_message()))
count = item1.count(item2)
self.log('Item found from the first item %d time%s' % (count, s(count)))
return count
def should_not_match(self, string, pattern, msg=None, values=True):
"""Fails if the given ``string`` matches the given ``pattern``.
Pattern matching is similar as matching files in a shell, and it is
always case-sensitive. In the pattern ``*`` matches to anything and
``?`` matches to any single character.
See `Should Be Equal` for an explanation on how to override the default
error message with ``msg`` and ``values``.
"""
if self._matches(string, pattern):
raise AssertionError(self._get_string_msg(string, pattern, msg,
values, 'matches'))
def should_match(self, string, pattern, msg=None, values=True):
"""Fails unless the given ``string`` matches the given ``pattern``.
Pattern matching is similar as matching files in a shell, and it is
always case-sensitive. In the pattern, ``*`` matches to anything and
``?`` matches to any single character.
See `Should Be Equal` for an explanation on how to override the default
error message with ``msg`` and ``values``.
"""
if not self._matches(string, pattern):
raise AssertionError(self._get_string_msg(string, pattern, msg,
values, 'does not match'))
def should_match_regexp(self, string, pattern, msg=None, values=True):
"""Fails if ``string`` does not match ``pattern`` as a regular expression.
Regular expression check is implemented using the Python
[http://docs.python.org/2/library/re.html|re module]. Python's regular
expression syntax is derived from Perl, and it is thus also very
similar to the syntax used, for example, in Java, Ruby and .NET.
Things to note about the regexp syntax in Robot Framework test data:
1) Backslash is an escape character in the test data, and possible
backslashes in the pattern must thus be escaped with another backslash
(e.g. ``\\\\d\\\\w+``).
2) Strings that may contain special characters, but should be handled
as literal strings, can be escaped with the `Regexp Escape` keyword.
3) The given pattern does not need to match the whole string. For
example, the pattern ``ello`` matches the string ``Hello world!``. If
a full match is needed, the ``^`` and ``$`` characters can be used to
denote the beginning and end of the string, respectively. For example,
``^ello$`` only matches the exact string ``ello``.
4) Possible flags altering how the expression is parsed (e.g.
``re.IGNORECASE``, ``re.MULTILINE``) can be set by prefixing the
pattern with the ``(?iLmsux)`` group like ``(?im)pattern``. The
available flags are ``i`` (case-insensitive), ``m`` (multiline mode),
``s`` (dotall mode), ``x`` (verbose), ``u`` (Unicode dependent) and
``L`` (locale dependent).
If this keyword passes, it returns the portion of the string that
matched the pattern. Additionally, the possible captured groups are
returned.
See the `Should Be Equal` keyword for an explanation on how to override
the default error message with the ``msg`` and ``values`` arguments.
Examples:
| Should Match Regexp | ${output} | \\\\d{6} | # Output contains six numbers |
| Should Match Regexp | ${output} | ^\\\\d{6}$ | # Six numbers and nothing more |
| ${ret} = | Should Match Regexp | Foo: 42 | (?i)foo: \\\\d+ |
| ${match} | ${group1} | ${group2} = |
| ... | Should Match Regexp | Bar: 43 | (Foo|Bar): (\\\\d+) |
=>
| ${ret} = 'Foo: 42'
| ${match} = 'Bar: 43'
| ${group1} = 'Bar'
| ${group2} = '43'
"""
res = re.search(pattern, string)
if res is None:
raise AssertionError(self._get_string_msg(string, pattern, msg,
values, 'does not match'))
match = res.group(0)
groups = res.groups()
if groups:
return [match] + list(groups)
return match
def should_not_match_regexp(self, string, pattern, msg=None, values=True):
"""Fails if ``string`` matches ``pattern`` as a regular expression.
See `Should Match Regexp` for more information about arguments.
"""
if re.search(pattern, string) is not None:
raise AssertionError(self._get_string_msg(string, pattern, msg,
values, 'matches'))
def get_length(self, item):
"""Returns and logs the length of the given item as an integer.
The item can be anything that has a length, for example, a string,
a list, or a mapping. The keyword first tries to get the length with
the Python function ``len``, which calls the item's ``__len__`` method
internally. If that fails, the keyword tries to call the item's
possible ``length`` and ``size`` methods directly. The final attempt is
trying to get the value of the item's ``length`` attribute. If all
these attempts are unsuccessful, the keyword fails.
Examples:
| ${length} = | Get Length | Hello, world! | |
| Should Be Equal As Integers | ${length} | 13 |
| @{list} = | Create List | Hello, | world! |
| ${length} = | Get Length | ${list} | |
| Should Be Equal As Integers | ${length} | 2 |
See also `Length Should Be`, `Should Be Empty` and `Should Not Be
Empty`.
"""
length = self._get_length(item)
self.log('Length is %d' % length)
return length
def _get_length(self, item):
try:
return len(item)
except RERAISED_EXCEPTIONS:
raise
except:
try:
return item.length()
except RERAISED_EXCEPTIONS:
raise
except:
try:
return item.size()
except RERAISED_EXCEPTIONS:
raise
except:
try:
return item.length
except RERAISED_EXCEPTIONS:
raise
except:
raise RuntimeError("Could not get length of '%s'." % item)
def length_should_be(self, item, length, msg=None):
"""Verifies that the length of the given item is correct.
The length of the item is got using the `Get Length` keyword. The
default error message can be overridden with the ``msg`` argument.
"""
length = self._convert_to_integer(length)
actual = self.get_length(item)
if actual != length:
raise AssertionError(msg or "Length of '%s' should be %d but is %d."
% (item, length, actual))
def should_be_empty(self, item, msg=None):
"""Verifies that the given item is empty.
The length of the item is got using the `Get Length` keyword. The
default error message can be overridden with the ``msg`` argument.
"""
if self.get_length(item) > 0:
raise AssertionError(msg or "'%s' should be empty." % item)
def should_not_be_empty(self, item, msg=None):
"""Verifies that the given item is not empty.
The length of the item is got using the `Get Length` keyword. The
default error message can be overridden with the ``msg`` argument.
"""
if self.get_length(item) == 0:
raise AssertionError(msg or "'%s' should not be empty." % item)
def _get_string_msg(self, str1, str2, msg, values, delim):
default = "'%s' %s '%s'" % (unic(str1), delim, unic(str2))
if not msg:
msg = default
elif self._include_values(values):
msg = '%s: %s' % (msg, default)
return msg
class _Variables(_BuiltInBase):
def get_variables(self, no_decoration=False):
"""Returns a dictionary containing all variables in the current scope.
Variables are returned as a special dictionary that allows accessing
variables in space, case, and underscore insensitive manner similarly
as accessing variables in the test data. This dictionary supports all
same operations as normal Python dictionaries and, for example,
Collections library can be used to access or modify it. Modifying the
returned dictionary has no effect on the variables available in the
current scope.
By default variables are returned with ``${}``, ``@{}`` or ``&{}``
decoration based on variable types. Giving a true value (see `Boolean
arguments`) to the optional argument ``no_decoration`` will return
the variables without the decoration. This option is new in Robot
Framework 2.9.
Example:
| ${example_variable} = | Set Variable | example value |
| ${variables} = | Get Variables | |
| Dictionary Should Contain Key | ${variables} | \\${example_variable} |
| Dictionary Should Contain Key | ${variables} | \\${ExampleVariable} |
| Set To Dictionary | ${variables} | \\${name} | value |
| Variable Should Not Exist | \\${name} | | |
| ${no decoration} = | Get Variables | no_decoration=Yes |
| Dictionary Should Contain Key | ${no decoration} | example_variable |
Note: Prior to Robot Framework 2.7.4 variables were returned as
a custom object that did not support all dictionary methods.
"""
return self._variables.as_dict(decoration=is_falsy(no_decoration))
@run_keyword_variant(resolve=0)
def get_variable_value(self, name, default=None):
"""Returns variable value or ``default`` if the variable does not exist.
The name of the variable can be given either as a normal variable name
(e.g. ``${NAME}``) or in escaped format (e.g. ``\\${NAME}``). Notice
that the former has some limitations explained in `Set Suite Variable`.
Examples:
| ${x} = | Get Variable Value | ${a} | default |
| ${y} = | Get Variable Value | ${a} | ${b} |
| ${z} = | Get Variable Value | ${z} | |
=>
| ${x} gets value of ${a} if ${a} exists and string 'default' otherwise
| ${y} gets value of ${a} if ${a} exists and value of ${b} otherwise
| ${z} is set to Python None if it does not exist previously
See `Set Variable If` for another keyword to set variables dynamically.
"""
try:
return self._variables[self._get_var_name(name)]
except DataError:
return self._variables.replace_scalar(default)
def log_variables(self, level='INFO'):
"""Logs all variables in the current scope with given log level."""
variables = self.get_variables()
for name in sorted(variables, key=lambda s: s[2:-1].lower()):
msg = format_assign_message(name, variables[name], cut_long=False)
self.log(msg, level)
@run_keyword_variant(resolve=0)
def variable_should_exist(self, name, msg=None):
"""Fails unless the given variable exists within the current scope.
The name of the variable can be given either as a normal variable name
(e.g. ``${NAME}``) or in escaped format (e.g. ``\\${NAME}``). Notice
that the former has some limitations explained in `Set Suite Variable`.
The default error message can be overridden with the ``msg`` argument.
See also `Variable Should Not Exist` and `Keyword Should Exist`.
"""
name = self._get_var_name(name)
msg = self._variables.replace_string(msg) if msg \
else "Variable %s does not exist." % name
try:
self._variables[name]
except DataError:
raise AssertionError(msg)
@run_keyword_variant(resolve=0)
def variable_should_not_exist(self, name, msg=None):
"""Fails if the given variable exists within the current scope.
The name of the variable can be given either as a normal variable name
(e.g. ``${NAME}``) or in escaped format (e.g. ``\\${NAME}``). Notice
that the former has some limitations explained in `Set Suite Variable`.
The default error message can be overridden with the ``msg`` argument.
See also `Variable Should Exist` and `Keyword Should Exist`.
"""
name = self._get_var_name(name)
msg = self._variables.replace_string(msg) if msg \
else "Variable %s exists." % name
try:
self._variables[name]
except DataError:
pass
else:
raise AssertionError(msg)
def replace_variables(self, text):
"""Replaces variables in the given text with their current values.
If the text contains undefined variables, this keyword fails.
If the given ``text`` contains only a single variable, its value is
returned as-is and it can be any object. Otherwise this keyword
always returns a string.
Example:
The file ``template.txt`` contains ``Hello ${NAME}!`` and variable
``${NAME}`` has the value ``Robot``.
| ${template} = | Get File | ${CURDIR}/template.txt |
| ${message} = | Replace Variables | ${template} |
| Should Be Equal | ${message} | Hello Robot! |
"""
return self._variables.replace_scalar(text)
def set_variable(self, *values):
"""Returns the given values which can then be assigned to a variables.
This keyword is mainly used for setting scalar variables.
Additionally it can be used for converting a scalar variable
containing a list to a list variable or to multiple scalar variables.
It is recommended to use `Create List` when creating new lists.
Examples:
| ${hi} = | Set Variable | Hello, world! |
| ${hi2} = | Set Variable | I said: ${hi} |
| ${var1} | ${var2} = | Set Variable | Hello | world |
| @{list} = | Set Variable | ${list with some items} |
| ${item1} | ${item2} = | Set Variable | ${list with 2 items} |
Variables created with this keyword are available only in the
scope where they are created. See `Set Global Variable`,
`Set Test Variable` and `Set Suite Variable` for information on how to
set variables so that they are available also in a larger scope.
"""
if len(values) == 0:
return ''
elif len(values) == 1:
return values[0]
else:
return list(values)
@run_keyword_variant(resolve=0)
def set_test_variable(self, name, *values):
"""Makes a variable available everywhere within the scope of the current test.
Variables set with this keyword are available everywhere within the
scope of the currently executed test case. For example, if you set a
variable in a user keyword, it is available both in the test case level
and also in all other user keywords used in the current test. Other
test cases will not see variables set with this keyword.
See `Set Suite Variable` for more information and examples.
"""
name = self._get_var_name(name)
value = self._get_var_value(name, values)
self._variables.set_test(name, value)
self._log_set_variable(name, value)
@run_keyword_variant(resolve=0)
def set_suite_variable(self, name, *values):
"""Makes a variable available everywhere within the scope of the current suite.
Variables set with this keyword are available everywhere within the
scope of the currently executed test suite. Setting variables with this
keyword thus has the same effect as creating them using the Variable
table in the test data file or importing them from variable files.
Possible child test suites do not see variables set with this keyword
by default. Starting from Robot Framework 2.9, that can be controlled
by using ``children=<option>`` as the last argument. If the specified
``<option>`` is a non-empty string or any other value considered true
in Python, the variable is set also to the child suites. Parent and
sibling suites will never see variables set with this keyword.
The name of the variable can be given either as a normal variable name
(e.g. ``${NAME}``) or in escaped format as ``\\${NAME}`` or ``$NAME``.
Variable value can be given using the same syntax as when variables
are created in the Variable table.
If a variable already exists within the new scope, its value will be
overwritten. Otherwise a new variable is created. If a variable already
exists within the current scope, the value can be left empty and the
variable within the new scope gets the value within the current scope.
Examples:
| Set Suite Variable | ${SCALAR} | Hello, world! |
| Set Suite Variable | ${SCALAR} | Hello, world! | children=true |
| Set Suite Variable | @{LIST} | First item | Second item |
| Set Suite Variable | &{DICT} | key=value | foo=bar |
| ${ID} = | Get ID |
| Set Suite Variable | ${ID} |
To override an existing value with an empty value, use built-in
variables ``${EMPTY}``, ``@{EMPTY}`` or ``&{EMPTY}``:
| Set Suite Variable | ${SCALAR} | ${EMPTY} |
| Set Suite Variable | @{LIST} | @{EMPTY} | # New in RF 2.7.4 |
| Set Suite Variable | &{DICT} | &{EMPTY} | # New in RF 2.9 |
*NOTE:* If the variable has value which itself is a variable (escaped
or not), you must always use the escaped format to set the variable:
Example:
| ${NAME} = | Set Variable | \\${var} |
| Set Suite Variable | ${NAME} | value | # Sets variable ${var} |
| Set Suite Variable | \\${NAME} | value | # Sets variable ${NAME} |
This limitation applies also to `Set Test Variable`, `Set Global
Variable`, `Variable Should Exist`, `Variable Should Not Exist` and
`Get Variable Value` keywords.
"""
name = self._get_var_name(name)
if (values and is_string(values[-1]) and
values[-1].startswith('children=')):
children = self._variables.replace_scalar(values[-1][9:])
children = is_truthy(children)
values = values[:-1]
else:
children = False
value = self._get_var_value(name, values)
self._variables.set_suite(name, value, children=children)
self._log_set_variable(name, value)
@run_keyword_variant(resolve=0)
def set_global_variable(self, name, *values):
"""Makes a variable available globally in all tests and suites.
Variables set with this keyword are globally available in all test
cases and suites executed after setting them. Setting variables with
this keyword thus has the same effect as creating from the command line
using the options ``--variable`` or ``--variablefile``. Because this
keyword can change variables everywhere, it should be used with care.
See `Set Suite Variable` for more information and examples.
"""
name = self._get_var_name(name)
value = self._get_var_value(name, values)
self._variables.set_global(name, value)
self._log_set_variable(name, value)
# Helpers
def _get_var_name(self, orig):
name = self._resolve_possible_variable(orig)
try:
return self._unescape_variable_if_needed(name)
except ValueError:
raise RuntimeError("Invalid variable syntax '%s'." % orig)
def _resolve_possible_variable(self, name):
try:
resolved = self._variables.replace_string(name)
return self._unescape_variable_if_needed(resolved)
except (KeyError, ValueError, DataError):
return name
def _unescape_variable_if_needed(self, name):
if name.startswith('\\'):
name = name[1:]
if len(name) < 2:
raise ValueError
if name[0] in '$@&' and name[1] != '{':
name = '%s{%s}' % (name[0], name[1:])
if is_var(name):
return name
# Support for possible internal variables (issue 397)
name = '%s{%s}' % (name[0], self.replace_variables(name[2:-1]))
if is_var(name):
return name
raise ValueError
def _get_var_value(self, name, values):
if not values:
return self._variables[name]
if name[0] == '$':
# We could consider catenating values similarly as when creating
# scalar variables in the variable table, but that would require
# handling non-string values somehow. For details see
# https://github.com/robotframework/robotframework/issues/1919
if len(values) != 1 or VariableSplitter(values[0]).is_list_variable():
raise DataError("Setting list value to scalar variable '%s' "
"is not supported anymore. Create list "
"variable '@%s' instead." % (name, name[1:]))
return self._variables.replace_scalar(values[0])
return VariableTableValue(values, name).resolve(self._variables)
def _log_set_variable(self, name, value):
self.log(format_assign_message(name, value))
class _RunKeyword(_BuiltInBase):
# If you use any of these run keyword variants from another library, you
# should register those keywords with 'register_run_keyword' method. See
# the documentation of that method at the end of this file. There are also
# other run keyword variant keywords in BuiltIn which can also be seen
# at the end of this file.
@run_keyword_variant(resolve=1)
def run_keyword(self, name, *args):
"""Executes the given keyword with the given arguments.
Because the name of the keyword to execute is given as an argument, it
can be a variable and thus set dynamically, e.g. from a return value of
another keyword or from the command line.
"""
if not is_string(name):
raise RuntimeError('Keyword name must be a string.')
kw = Keyword(name, args=args)
return kw.run(self._context)
@run_keyword_variant(resolve=0)
def run_keywords(self, *keywords):
"""Executes all the given keywords in a sequence.
This keyword is mainly useful in setups and teardowns when they need
to take care of multiple actions and creating a new higher level user
keyword would be an overkill.
By default all arguments are expected to be keywords to be executed.
Examples:
| Run Keywords | Initialize database | Start servers | Clear logs |
| Run Keywords | ${KW 1} | ${KW 2} |
| Run Keywords | @{KEYWORDS} |
Starting from Robot Framework 2.7.6, keywords can also be run with
arguments using upper case ``AND`` as a separator between keywords.
The keywords are executed so that the first argument is the first
keyword and proceeding arguments until the first ``AND`` are arguments
to it. First argument after the first ``AND`` is the second keyword and
proceeding arguments until the next ``AND`` are its arguments. And so on.
Examples:
| Run Keywords | Initialize database | db1 | AND | Start servers | server1 | server2 |
| Run Keywords | Initialize database | ${DB NAME} | AND | Start servers | @{SERVERS} | AND | Clear logs |
| Run Keywords | ${KW} | AND | @{KW WITH ARGS} |
Notice that the ``AND`` control argument must be used explicitly and
cannot itself come from a variable. If you need to use literal ``AND``
string as argument, you can either use variables or escape it with
a backslash like ``\\AND``.
"""
self._run_keywords(self._split_run_keywords(list(keywords)))
def _run_keywords(self, iterable):
errors = []
for kw, args in iterable:
try:
self.run_keyword(kw, *args)
except ExecutionPassed as err:
err.set_earlier_failures(errors)
raise err
except ExecutionFailed as err:
errors.extend(err.get_errors())
if not err.can_continue(self._context.in_teardown):
break
if errors:
raise ExecutionFailures(errors)
def _split_run_keywords(self, keywords):
if 'AND' not in keywords:
for name in self._variables.replace_list(keywords):
yield name, ()
else:
for name, args in self._split_run_keywords_from_and(keywords):
yield name, args
def _split_run_keywords_from_and(self, keywords):
while 'AND' in keywords:
index = keywords.index('AND')
yield self._resolve_run_keywords_name_and_args(keywords[:index])
keywords = keywords[index+1:]
yield self._resolve_run_keywords_name_and_args(keywords)
def _resolve_run_keywords_name_and_args(self, kw_call):
kw_call = self._variables.replace_list(kw_call, replace_until=1)
if not kw_call:
raise DataError('Incorrect use of AND')
return kw_call[0], kw_call[1:]
@run_keyword_variant(resolve=2)
def run_keyword_if(self, condition, name, *args):
"""Runs the given keyword with the given arguments, if ``condition`` is true.
The given ``condition`` is evaluated in Python as explained in
`Evaluating expressions`, and ``name`` and ``*args`` have same
semantics as with `Run Keyword`.
Example, a simple if/else construct:
| ${status} | ${value} = | `Run Keyword And Ignore Error` | `My Keyword` |
| `Run Keyword If` | '${status}' == 'PASS' | `Some Action` | arg |
| `Run Keyword Unless` | '${status}' == 'PASS' | `Another Action` |
In this example, only either `Some Action` or `Another Action` is
executed, based on the status of `My Keyword`. Instead of `Run Keyword
And Ignore Error` you can also use `Run Keyword And Return Status`.
Variables used like ``${variable}``, as in the examples above, are
replaced in the expression before evaluation. Variables are also
available in the evaluation namespace and can be accessed using special
syntax ``$variable``. This is a new feature in Robot Framework 2.9
and it is explained more thoroughly in `Evaluating expressions`.
Example:
| `Run Keyword If` | $result is None or $result == 'FAIL' | `Keyword` |
Starting from Robot version 2.7.4, this keyword supports also optional
ELSE and ELSE IF branches. Both of these are defined in ``*args`` and
must use exactly format ``ELSE`` or ``ELSE IF``, respectively. ELSE
branches must contain first the name of the keyword to execute and then
its possible arguments. ELSE IF branches must first contain a condition,
like the first argument to this keyword, and then the keyword to execute
and its possible arguments. It is possible to have ELSE branch after
ELSE IF and to have multiple ELSE IF branches.
Given previous example, if/else construct can also be created like this:
| ${status} | ${value} = | `Run Keyword And Ignore Error` | My Keyword |
| `Run Keyword If` | '${status}' == 'PASS' | `Some Action` | arg | ELSE | `Another Action` |
The return value is the one of the keyword that was executed or None if
no keyword was executed (i.e. if ``condition`` was false). Hence, it is
recommended to use ELSE and/or ELSE IF branches to conditionally assign
return values from keyword to variables (to conditionally assign fixed
values to variables, see `Set Variable If`). This is illustrated by the
example below:
| ${var1} = | `Run Keyword If` | ${rc} == 0 | `Some keyword returning a value` |
| ... | ELSE IF | 0 < ${rc} < 42 | `Another keyword` |
| ... | ELSE IF | ${rc} < 0 | `Another keyword with args` | ${rc} | arg2 |
| ... | ELSE | `Final keyword to handle abnormal cases` | ${rc} |
| ${var2} = | `Run Keyword If` | ${condition} | `Some keyword` |
In this example, ${var2} will be set to None if ${condition} is false.
Notice that ``ELSE`` and ``ELSE IF`` control words must be used
explicitly and thus cannot come from variables. If you need to use
literal ``ELSE`` and ``ELSE IF`` strings as arguments, you can escape
them with a backslash like ``\\ELSE`` and ``\\ELSE IF``.
Starting from Robot Framework 2.8, Python's
[http://docs.python.org/2/library/os.html|os] and
[http://docs.python.org/2/library/sys.html|sys] modules are
automatically imported when evaluating the ``condition``.
Attributes they contain can thus be used in the condition:
| `Run Keyword If` | os.sep == '/' | `Unix Keyword` |
| ... | ELSE IF | sys.platform.startswith('java') | `Jython Keyword` |
| ... | ELSE | `Windows Keyword` |
"""
args, branch = self._split_elif_or_else_branch(args)
if self._is_true(condition):
return self.run_keyword(name, *args)
return branch()
def _split_elif_or_else_branch(self, args):
if 'ELSE IF' in args:
args, branch = self._split_branch(args, 'ELSE IF', 2,
'condition and keyword')
return args, lambda: self.run_keyword_if(*branch)
if 'ELSE' in args:
args, branch = self._split_branch(args, 'ELSE', 1, 'keyword')
return args, lambda: self.run_keyword(*branch)
return args, lambda: None
def _split_branch(self, args, control_word, required, required_error):
index = list(args).index(control_word)
branch = self._variables.replace_list(args[index+1:], required)
if len(branch) < required:
raise DataError('%s requires %s.' % (control_word, required_error))
return args[:index], branch
@run_keyword_variant(resolve=2)
def run_keyword_unless(self, condition, name, *args):
"""Runs the given keyword with the given arguments, if ``condition`` is false.
See `Run Keyword If` for more information and an example.
"""
if not self._is_true(condition):
return self.run_keyword(name, *args)
@run_keyword_variant(resolve=1)
def run_keyword_and_ignore_error(self, name, *args):
"""Runs the given keyword with the given arguments and ignores possible error.
This keyword returns two values, so that the first is either string
``PASS`` or ``FAIL``, depending on the status of the executed keyword.
The second value is either the return value of the keyword or the
received error message. See `Run Keyword And Return Status` If you are
only interested in the execution status.
The keyword name and arguments work as in `Run Keyword`. See
`Run Keyword If` for a usage example.
Errors caused by invalid syntax, timeouts, or fatal exceptions are not
caught by this keyword. Otherwise this keyword itself never fails.
Since Robot Framework 2.9, variable errors are caught by this keyword.
"""
try:
return 'PASS', self.run_keyword(name, *args)
except ExecutionFailed as err:
if err.dont_continue:
raise
return 'FAIL', unic(err)
@run_keyword_variant(resolve=1)
def run_keyword_and_return_status(self, name, *args):
"""Runs the given keyword with given arguments and returns the status as a Boolean value.
This keyword returns Boolean ``True`` if the keyword that is executed
succeeds and ``False`` if it fails. This is useful, for example, in
combination with `Run Keyword If`. If you are interested in the error
message or return value, use `Run Keyword And Ignore Error` instead.
The keyword name and arguments work as in `Run Keyword`.
Example:
| ${passed} = | `Run Keyword And Return Status` | Keyword | args |
| `Run Keyword If` | ${passed} | Another keyword |
Errors caused by invalid syntax, timeouts, or fatal exceptions are not
caught by this keyword. Otherwise this keyword itself never fails.
New in Robot Framework 2.7.6.
"""
status, _ = self.run_keyword_and_ignore_error(name, *args)
return status == 'PASS'
@run_keyword_variant(resolve=1)
def run_keyword_and_continue_on_failure(self, name, *args):
"""Runs the keyword and continues execution even if a failure occurs.
The keyword name and arguments work as with `Run Keyword`.
Example:
| Run Keyword And Continue On Failure | Fail | This is a stupid example |
| Log | This keyword is executed |
The execution is not continued if the failure is caused by invalid syntax,
timeout, or fatal exception.
Since Robot Framework 2.9, variable errors are caught by this keyword.
"""
try:
return self.run_keyword(name, *args)
except ExecutionFailed as err:
if not err.dont_continue:
err.continue_on_failure = True
raise err
@run_keyword_variant(resolve=2)
def run_keyword_and_expect_error(self, expected_error, name, *args):
"""Runs the keyword and checks that the expected error occurred.
The expected error must be given in the same format as in
Robot Framework reports. It can be a pattern containing
characters ``?``, which matches to any single character and
``*``, which matches to any number of any characters. ``name`` and
``*args`` have same semantics as with `Run Keyword`.
If the expected error occurs, the error message is returned and it can
be further processed/tested, if needed. If there is no error, or the
error does not match the expected error, this keyword fails.
Examples:
| Run Keyword And Expect Error | My error | Some Keyword | arg1 | arg2 |
| ${msg} = | Run Keyword And Expect Error | * | My KW |
| Should Start With | ${msg} | Once upon a time in |
Errors caused by invalid syntax, timeouts, or fatal exceptions are not
caught by this keyword.
Since Robot Framework 2.9, variable errors are caught by this keyword.
"""
try:
self.run_keyword(name, *args)
except ExecutionFailed as err:
if err.dont_continue:
raise
error = err
else:
raise AssertionError("Expected error '%s' did not occur."
% expected_error)
if not self._matches(unic(error), expected_error):
raise AssertionError("Expected error '%s' but got '%s'."
% (expected_error, error))
return unic(error)
@run_keyword_variant(resolve=2)
def repeat_keyword(self, repeat, name, *args):
"""Executes the specified keyword multiple times.
``name`` and ``args`` define the keyword that is executed similarly as
with `Run Keyword`. ``repeat`` specifies how many times (as a count) or
how long time (as a timeout) the keyword should be executed.
If ``repeat`` is given as count, it specifies how many times the
keyword should be executed. ``repeat`` can be given as an integer or
as a string that can be converted to an integer. If it is a string,
it can have postfix ``times`` or ``x`` (case and space insensitive)
to make the expression more explicit.
If ``repeat`` is given as timeout, it must be in Robot Framework's
time format (e.g. ``1 minute``, ``2 min 3 s``). Using a number alone
(e.g. ``1`` or ``1.5``) does not work in this context.
If ``repeat`` is zero or negative, the keyword is not executed at
all. This keyword fails immediately if any of the execution
rounds fails.
Examples:
| Repeat Keyword | 5 times | Go to Previous Page |
| Repeat Keyword | ${var} | Some Keyword | arg1 | arg2 |
| Repeat Keyword | 2 minutes | Some Keyword | arg1 | arg2 |
Specifying ``repeat`` as a timeout is new in Robot Framework 3.0.
"""
try:
count = self._get_repeat_count(repeat)
except RuntimeError as err:
timeout = self._get_repeat_timeout(repeat)
if timeout is None:
raise err
keywords = self._keywords_repeated_by_timeout(timeout, name, args)
else:
keywords = self._keywords_repeated_by_count(count, name, args)
self._run_keywords(keywords)
def _get_repeat_count(self, times, require_postfix=False):
times = normalize(str(times))
if times.endswith('times'):
times = times[:-5]
elif times.endswith('x'):
times = times[:-1]
elif require_postfix:
raise ValueError
return self._convert_to_integer(times)
def _get_repeat_timeout(self, timestr):
try:
float(timestr)
except ValueError:
pass
else:
return None
try:
return timestr_to_secs(timestr)
except ValueError:
return None
def _keywords_repeated_by_count(self, count, name, args):
if count <= 0:
self.log("Keyword '%s' repeated zero times." % name)
for i in range(count):
self.log("Repeating keyword, round %d/%d." % (i + 1, count))
yield name, args
def _keywords_repeated_by_timeout(self, timeout, name, args):
if timeout <= 0:
self.log("Keyword '%s' repeated zero times." % name)
repeat_round = 0
maxtime = time.time() + timeout
while time.time() < maxtime:
repeat_round += 1
self.log("Repeating keyword, round %d, %s remaining."
% (repeat_round,
secs_to_timestr(maxtime - time.time(), compact=True)))
yield name, args
@run_keyword_variant(resolve=3)
def wait_until_keyword_succeeds(self, retry, retry_interval, name, *args):
"""Runs the specified keyword and retries if it fails.
``name`` and ``args`` define the keyword that is executed similarly
as with `Run Keyword`. How long to retry running the keyword is
defined using ``retry`` argument either as timeout or count.
``retry_interval`` is the time to wait before trying to run the
keyword again after the previous run has failed.
If ``retry`` is given as timeout, it must be in Robot Framework's
time format (e.g. ``1 minute``, ``2 min 3 s``, ``4.5``) that is
explained in an appendix of Robot Framework User Guide. If it is
given as count, it must have ``times`` or ``x`` postfix (e.g.
``5 times``, ``10 x``). ``retry_interval`` must always be given in
Robot Framework's time format.
If the keyword does not succeed regardless of retries, this keyword
fails. If the executed keyword passes, its return value is returned.
Examples:
| Wait Until Keyword Succeeds | 2 min | 5 sec | My keyword | argument |
| ${result} = | Wait Until Keyword Succeeds | 3x | 200ms | My keyword |
All normal failures are caught by this keyword. Errors caused by
invalid syntax, test or keyword timeouts, or fatal exceptions (caused
e.g. by `Fatal Error`) are not caught.
Running the same keyword multiple times inside this keyword can create
lots of output and considerably increase the size of the generated
output files. Starting from Robot Framework 2.7, it is possible to
remove unnecessary keywords from the outputs using
``--RemoveKeywords WUKS`` command line option.
Support for specifying ``retry`` as a number of times to retry is
a new feature in Robot Framework 2.9.
Since Robot Framework 2.9, variable errors are caught by this keyword.
"""
maxtime = count = -1
try:
count = self._get_repeat_count(retry, require_postfix=True)
except ValueError:
timeout = timestr_to_secs(retry)
maxtime = time.time() + timeout
message = 'for %s' % secs_to_timestr(timeout)
else:
if count <= 0:
raise ValueError('Retry count %d is not positive.' % count)
message = '%d time%s' % (count, s(count))
retry_interval = timestr_to_secs(retry_interval)
while True:
try:
return self.run_keyword(name, *args)
except ExecutionFailed as err:
if err.dont_continue:
raise
count -= 1
if time.time() > maxtime > 0 or count == 0:
raise AssertionError("Keyword '%s' failed after retrying "
"%s. The last error was: %s"
% (name, message, err))
self._sleep_in_parts(retry_interval)
@run_keyword_variant(resolve=1)
def set_variable_if(self, condition, *values):
"""Sets variable based on the given condition.
The basic usage is giving a condition and two values. The
given condition is first evaluated the same way as with the
`Should Be True` keyword. If the condition is true, then the
first value is returned, and otherwise the second value is
returned. The second value can also be omitted, in which case
it has a default value None. This usage is illustrated in the
examples below, where ``${rc}`` is assumed to be zero.
| ${var1} = | Set Variable If | ${rc} == 0 | zero | nonzero |
| ${var2} = | Set Variable If | ${rc} > 0 | value1 | value2 |
| ${var3} = | Set Variable If | ${rc} > 0 | whatever | |
=>
| ${var1} = 'zero'
| ${var2} = 'value2'
| ${var3} = None
It is also possible to have 'else if' support by replacing the
second value with another condition, and having two new values
after it. If the first condition is not true, the second is
evaluated and one of the values after it is returned based on
its truth value. This can be continued by adding more
conditions without a limit.
| ${var} = | Set Variable If | ${rc} == 0 | zero |
| ... | ${rc} > 0 | greater than zero | less then zero |
| |
| ${var} = | Set Variable If |
| ... | ${rc} == 0 | zero |
| ... | ${rc} == 1 | one |
| ... | ${rc} == 2 | two |
| ... | ${rc} > 2 | greater than two |
| ... | ${rc} < 0 | less than zero |
Use `Get Variable Value` if you need to set variables
dynamically based on whether a variable exist or not.
"""
values = self._verify_values_for_set_variable_if(list(values))
if self._is_true(condition):
return self._variables.replace_scalar(values[0])
values = self._verify_values_for_set_variable_if(values[1:], True)
if len(values) == 1:
return self._variables.replace_scalar(values[0])
return self.run_keyword('BuiltIn.Set Variable If', *values[0:])
def _verify_values_for_set_variable_if(self, values, default=False):
if not values:
if default:
return [None]
raise RuntimeError('At least one value is required')
if is_list_var(values[0]):
values[:1] = [escape(item) for item in self._variables[values[0]]]
return self._verify_values_for_set_variable_if(values)
return values
@run_keyword_variant(resolve=1)
def run_keyword_if_test_failed(self, name, *args):
"""Runs the given keyword with the given arguments, if the test failed.
This keyword can only be used in a test teardown. Trying to use it
anywhere else results in an error.
Otherwise, this keyword works exactly like `Run Keyword`, see its
documentation for more details.
Prior to Robot Framework 2.9 failures in test teardown itself were
not detected by this keyword.
"""
test = self._get_test_in_teardown('Run Keyword If Test Failed')
if not test.passed:
return self.run_keyword(name, *args)
@run_keyword_variant(resolve=1)
def run_keyword_if_test_passed(self, name, *args):
"""Runs the given keyword with the given arguments, if the test passed.
This keyword can only be used in a test teardown. Trying to use it
anywhere else results in an error.
Otherwise, this keyword works exactly like `Run Keyword`, see its
documentation for more details.
Prior to Robot Framework 2.9 failures in test teardown itself were
not detected by this keyword.
"""
test = self._get_test_in_teardown('Run Keyword If Test Passed')
if test.passed:
return self.run_keyword(name, *args)
@run_keyword_variant(resolve=1)
def run_keyword_if_timeout_occurred(self, name, *args):
"""Runs the given keyword if either a test or a keyword timeout has occurred.
This keyword can only be used in a test teardown. Trying to use it
anywhere else results in an error.
Otherwise, this keyword works exactly like `Run Keyword`, see its
documentation for more details.
"""
self._get_test_in_teardown('Run Keyword If Timeout Occurred')
if self._context.timeout_occurred:
return self.run_keyword(name, *args)
def _get_test_in_teardown(self, kwname):
ctx = self._context
if ctx.test and ctx.in_test_teardown:
return ctx.test
raise RuntimeError("Keyword '%s' can only be used in test teardown."
% kwname)
@run_keyword_variant(resolve=1)
def run_keyword_if_all_critical_tests_passed(self, name, *args):
"""Runs the given keyword with the given arguments, if all critical tests passed.
This keyword can only be used in suite teardown. Trying to use it in
any other place will result in an error.
Otherwise, this keyword works exactly like `Run Keyword`, see its
documentation for more details.
"""
suite = self._get_suite_in_teardown('Run Keyword If '
'All Critical Tests Passed')
if suite.statistics.critical.failed == 0:
return self.run_keyword(name, *args)
@run_keyword_variant(resolve=1)
def run_keyword_if_any_critical_tests_failed(self, name, *args):
"""Runs the given keyword with the given arguments, if any critical tests failed.
This keyword can only be used in a suite teardown. Trying to use it
anywhere else results in an error.
Otherwise, this keyword works exactly like `Run Keyword`, see its
documentation for more details.
"""
suite = self._get_suite_in_teardown('Run Keyword If '
'Any Critical Tests Failed')
if suite.statistics.critical.failed > 0:
return self.run_keyword(name, *args)
@run_keyword_variant(resolve=1)
def run_keyword_if_all_tests_passed(self, name, *args):
"""Runs the given keyword with the given arguments, if all tests passed.
This keyword can only be used in a suite teardown. Trying to use it
anywhere else results in an error.
Otherwise, this keyword works exactly like `Run Keyword`, see its
documentation for more details.
"""
suite = self._get_suite_in_teardown('Run Keyword If All Tests Passed')
if suite.statistics.all.failed == 0:
return self.run_keyword(name, *args)
@run_keyword_variant(resolve=1)
def run_keyword_if_any_tests_failed(self, name, *args):
"""Runs the given keyword with the given arguments, if one or more tests failed.
This keyword can only be used in a suite teardown. Trying to use it
anywhere else results in an error.
Otherwise, this keyword works exactly like `Run Keyword`, see its
documentation for more details.
"""
suite = self._get_suite_in_teardown('Run Keyword If Any Tests Failed')
if suite.statistics.all.failed > 0:
return self.run_keyword(name, *args)
def _get_suite_in_teardown(self, kwname):
if not self._context.in_suite_teardown:
raise RuntimeError("Keyword '%s' can only be used in suite teardown."
% kwname)
return self._context.suite
class _Control(_BuiltInBase):
def continue_for_loop(self):
"""Skips the current for loop iteration and continues from the next.
Skips the remaining keywords in the current for loop iteration and
continues from the next one. Can be used directly in a for loop or
in a keyword that the loop uses.
Example:
| :FOR | ${var} | IN | @{VALUES} |
| | Run Keyword If | '${var}' == 'CONTINUE' | Continue For Loop |
| | Do Something | ${var} |
See `Continue For Loop If` to conditionally continue a for loop without
using `Run Keyword If` or other wrapper keywords.
New in Robot Framework 2.8.
"""
self.log("Continuing for loop from the next iteration.")
raise ContinueForLoop()
def continue_for_loop_if(self, condition):
"""Skips the current for loop iteration if the ``condition`` is true.
A wrapper for `Continue For Loop` to continue a for loop based on
the given condition. The condition is evaluated using the same
semantics as with `Should Be True` keyword.
Example:
| :FOR | ${var} | IN | @{VALUES} |
| | Continue For Loop If | '${var}' == 'CONTINUE' |
| | Do Something | ${var} |
New in Robot Framework 2.8.
"""
if self._is_true(condition):
self.continue_for_loop()
def exit_for_loop(self):
"""Stops executing the enclosing for loop.
Exits the enclosing for loop and continues execution after it.
Can be used directly in a for loop or in a keyword that the loop uses.
Example:
| :FOR | ${var} | IN | @{VALUES} |
| | Run Keyword If | '${var}' == 'EXIT' | Exit For Loop |
| | Do Something | ${var} |
See `Exit For Loop If` to conditionally exit a for loop without
using `Run Keyword If` or other wrapper keywords.
"""
self.log("Exiting for loop altogether.")
raise ExitForLoop()
def exit_for_loop_if(self, condition):
"""Stops executing the enclosing for loop if the ``condition`` is true.
A wrapper for `Exit For Loop` to exit a for loop based on
the given condition. The condition is evaluated using the same
semantics as with `Should Be True` keyword.
Example:
| :FOR | ${var} | IN | @{VALUES} |
| | Exit For Loop If | '${var}' == 'EXIT' |
| | Do Something | ${var} |
New in Robot Framework 2.8.
"""
if self._is_true(condition):
self.exit_for_loop()
@run_keyword_variant(resolve=0)
def return_from_keyword(self, *return_values):
"""Returns from the enclosing user keyword.
This keyword can be used to return from a user keyword with PASS status
without executing it fully. It is also possible to return values
similarly as with the ``[Return]`` setting. For more detailed information
about working with the return values, see the User Guide.
This keyword is typically wrapped to some other keyword, such as
`Run Keyword If` or `Run Keyword If Test Passed`, to return based
on a condition:
| Run Keyword If | ${rc} < 0 | Return From Keyword |
| Run Keyword If Test Passed | Return From Keyword |
It is possible to use this keyword to return from a keyword also inside
a for loop. That, as well as returning values, is demonstrated by the
`Find Index` keyword in the following somewhat advanced example.
Notice that it is often a good idea to move this kind of complicated
logic into a test library.
| ***** Variables *****
| @{LIST} = foo baz
|
| ***** Test Cases *****
| Example
| ${index} = Find Index baz @{LIST}
| Should Be Equal ${index} ${1}
| ${index} = Find Index non existing @{LIST}
| Should Be Equal ${index} ${-1}
|
| ***** Keywords *****
| Find Index
| [Arguments] ${element} @{items}
| ${index} = Set Variable ${0}
| :FOR ${item} IN @{items}
| \\ Run Keyword If '${item}' == '${element}' Return From Keyword ${index}
| \\ ${index} = Set Variable ${index + 1}
| Return From Keyword ${-1} # Also [Return] would work here.
The most common use case, returning based on an expression, can be
accomplished directly with `Return From Keyword If`. Both of these
keywords are new in Robot Framework 2.8.
See also `Run Keyword And Return` and `Run Keyword And Return If`.
"""
self.log('Returning from the enclosing user keyword.')
raise ReturnFromKeyword(return_values)
@run_keyword_variant(resolve=1)
def return_from_keyword_if(self, condition, *return_values):
"""Returns from the enclosing user keyword if ``condition`` is true.
A wrapper for `Return From Keyword` to return based on the given
condition. The condition is evaluated using the same semantics as
with `Should Be True` keyword.
Given the same example as in `Return From Keyword`, we can rewrite the
`Find Index` keyword as follows:
| ***** Keywords *****
| Find Index
| [Arguments] ${element} @{items}
| ${index} = Set Variable ${0}
| :FOR ${item} IN @{items}
| \\ Return From Keyword If '${item}' == '${element}' ${index}
| \\ ${index} = Set Variable ${index + 1}
| Return From Keyword ${-1} # Also [Return] would work here.
See also `Run Keyword And Return` and `Run Keyword And Return If`.
New in Robot Framework 2.8.
"""
if self._is_true(condition):
self.return_from_keyword(*return_values)
@run_keyword_variant(resolve=1)
def run_keyword_and_return(self, name, *args):
"""Runs the specified keyword and returns from the enclosing user keyword.
The keyword to execute is defined with ``name`` and ``*args`` exactly
like with `Run Keyword`. After running the keyword, returns from the
enclosing user keyword and passes possible return value from the
executed keyword further. Returning from a keyword has exactly same
semantics as with `Return From Keyword`.
Example:
| `Run Keyword And Return` | `My Keyword` | arg1 | arg2 |
| # Above is equivalent to: |
| ${result} = | `My Keyword` | arg1 | arg2 |
| `Return From Keyword` | ${result} | | |
Use `Run Keyword And Return If` if you want to run keyword and return
based on a condition.
New in Robot Framework 2.8.2.
"""
ret = self.run_keyword(name, *args)
self.return_from_keyword(escape(ret))
@run_keyword_variant(resolve=2)
def run_keyword_and_return_if(self, condition, name, *args):
"""Runs the specified keyword and returns from the enclosing user keyword.
A wrapper for `Run Keyword And Return` to run and return based on
the given ``condition``. The condition is evaluated using the same
semantics as with `Should Be True` keyword.
Example:
| `Run Keyword And Return If` | ${rc} > 0 | `My Keyword` | arg1 | arg2 |
| # Above is equivalent to: |
| `Run Keyword If` | ${rc} > 0 | `Run Keyword And Return` | `My Keyword ` | arg1 | arg2 |
Use `Return From Keyword If` if you want to return a certain value
based on a condition.
New in Robot Framework 2.8.2.
"""
if self._is_true(condition):
self.run_keyword_and_return(name, *args)
def pass_execution(self, message, *tags):
"""Skips rest of the current test, setup, or teardown with PASS status.
This keyword can be used anywhere in the test data, but the place where
used affects the behavior:
- When used in any setup or teardown (suite, test or keyword), passes
that setup or teardown. Possible keyword teardowns of the started
keywords are executed. Does not affect execution or statuses
otherwise.
- When used in a test outside setup or teardown, passes that particular
test case. Possible test and keyword teardowns are executed.
Possible continuable failures before this keyword is used, as well as
failures in executed teardowns, will fail the execution.
It is mandatory to give a message explaining why execution was passed.
By default the message is considered plain text, but starting it with
``*HTML*`` allows using HTML formatting.
It is also possible to modify test tags passing tags after the message
similarly as with `Fail` keyword. Tags starting with a hyphen
(e.g. ``-regression``) are removed and others added. Tags are modified
using `Set Tags` and `Remove Tags` internally, and the semantics
setting and removing them are the same as with these keywords.
Examples:
| Pass Execution | All features available in this version tested. |
| Pass Execution | Deprecated test. | deprecated | -regression |
This keyword is typically wrapped to some other keyword, such as
`Run Keyword If`, to pass based on a condition. The most common case
can be handled also with `Pass Execution If`:
| Run Keyword If | ${rc} < 0 | Pass Execution | Negative values are cool. |
| Pass Execution If | ${rc} < 0 | Negative values are cool. |
Passing execution in the middle of a test, setup or teardown should be
used with care. In the worst case it leads to tests that skip all the
parts that could actually uncover problems in the tested application.
In cases where execution cannot continue do to external factors,
it is often safer to fail the test case and make it non-critical.
New in Robot Framework 2.8.
"""
message = message.strip()
if not message:
raise RuntimeError('Message cannot be empty.')
self._set_and_remove_tags(tags)
log_message, level = self._get_logged_test_message_and_level(message)
self.log('Execution passed with message:\n%s' % log_message, level)
raise PassExecution(message)
@run_keyword_variant(resolve=1)
def pass_execution_if(self, condition, message, *tags):
"""Conditionally skips rest of the current test, setup, or teardown with PASS status.
A wrapper for `Pass Execution` to skip rest of the current test,
setup or teardown based the given ``condition``. The condition is
evaluated similarly as with `Should Be True` keyword, and ``message``
and ``*tags`` have same semantics as with `Pass Execution`.
Example:
| :FOR | ${var} | IN | @{VALUES} |
| | Pass Execution If | '${var}' == 'EXPECTED' | Correct value was found |
| | Do Something | ${var} |
New in Robot Framework 2.8.
"""
if self._is_true(condition):
message = self._variables.replace_string(message)
tags = self._variables.replace_list(tags)
self.pass_execution(message, *tags)
class _Misc(_BuiltInBase):
def no_operation(self):
"""Does absolutely nothing."""
def sleep(self, time_, reason=None):
"""Pauses the test executed for the given time.
``time`` may be either a number or a time string. Time strings are in
a format such as ``1 day 2 hours 3 minutes 4 seconds 5milliseconds`` or
``1d 2h 3m 4s 5ms``, and they are fully explained in an appendix of
Robot Framework User Guide. Optional `reason` can be used to explain why
sleeping is necessary. Both the time slept and the reason are logged.
Examples:
| Sleep | 42 |
| Sleep | 1.5 |
| Sleep | 2 minutes 10 seconds |
| Sleep | 10s | Wait for a reply |
"""
seconds = timestr_to_secs(time_)
# Python hangs with negative values
if seconds < 0:
seconds = 0
self._sleep_in_parts(seconds)
self.log('Slept %s' % secs_to_timestr(seconds))
if reason:
self.log(reason)
def _sleep_in_parts(self, seconds):
# time.sleep can't be stopped in windows
# to ensure that we can signal stop (with timeout)
# split sleeping to small pieces
endtime = time.time() + float(seconds)
while True:
remaining = endtime - time.time()
if remaining <= 0:
break
time.sleep(min(remaining, 0.01))
def catenate(self, *items):
"""Catenates the given items together and returns the resulted string.
By default, items are catenated with spaces, but if the first item
contains the string ``SEPARATOR=<sep>``, the separator ``<sep>`` is
used instead. Items are converted into strings when necessary.
Examples:
| ${str1} = | Catenate | Hello | world | |
| ${str2} = | Catenate | SEPARATOR=--- | Hello | world |
| ${str3} = | Catenate | SEPARATOR= | Hello | world |
=>
| ${str1} = 'Hello world'
| ${str2} = 'Hello---world'
| ${str3} = 'Helloworld'
"""
if not items:
return ''
items = [unic(item) for item in items]
if items[0].startswith('SEPARATOR='):
sep = items[0][len('SEPARATOR='):]
items = items[1:]
else:
sep = ' '
return sep.join(items)
def log(self, message, level='INFO', html=False, console=False, repr=False):
u"""Logs the given message with the given level.
Valid levels are TRACE, DEBUG, INFO (default), HTML, WARN, and ERROR.
Messages below the current active log level are ignored. See
`Set Log Level` keyword and ``--loglevel`` command line option
for more details about setting the level.
Messages logged with the WARN or ERROR levels will be automatically
visible also in the console and in the Test Execution Errors section
in the log file.
Logging can be configured using optional ``html``, ``console`` and
``repr`` arguments. They are off by default, but can be enabled
by giving them a true value. See `Boolean arguments` section for more
information about true and false values.
If the ``html`` argument is given a true value, the message will be
considered HTML and special characters such as ``<`` in it are not
escaped. For example, logging ``<img src="image.png">`` creates an
image when ``html`` is true, but otherwise the message is that exact
string. An alternative to using the ``html`` argument is using the HTML
pseudo log level. It logs the message as HTML using the INFO level.
If the ``console`` argument is true, the message will be written to
the console where test execution was started from in addition to
the log file. This keyword always uses the standard output stream
and adds a newline after the written message. Use `Log To Console`
instead if either of these is undesirable,
If the ``repr`` argument is true, the given item will be passed through
a custom version of Python's ``pprint.pformat()`` function before
logging it. This is useful, for example, when working with strings or
bytes containing invisible characters, or when working with nested data
structures. The custom version differs from the standard one so that it
omits the ``u`` prefix from Unicode strings and adds ``b`` prefix to
byte strings.
Examples:
| Log | Hello, world! | | | # Normal INFO message. |
| Log | Warning, world! | WARN | | # Warning. |
| Log | <b>Hello</b>, world! | html=yes | | # INFO message as HTML. |
| Log | <b>Hello</b>, world! | HTML | | # Same as above. |
| Log | <b>Hello</b>, world! | DEBUG | html=true | # DEBUG as HTML. |
| Log | Hello, console! | console=yes | | # Log also to the console. |
| Log | Hyv\xe4 \\x00 | repr=yes | | # Log ``'Hyv\\xe4 \\x00'``. |
See `Log Many` if you want to log multiple messages in one go, and
`Log To Console` if you only want to write to the console.
Arguments ``html``, ``console``, and ``repr`` are new in Robot Framework
2.8.2.
Pprint support when ``repr`` is used is new in Robot Framework 2.8.6,
and it was changed to drop the ``u`` prefix and add the ``b`` prefix
in Robot Framework 2.9.
"""
if is_truthy(repr):
message = prepr(message, width=80)
logger.write(message, level, is_truthy(html))
if is_truthy(console):
logger.console(message)
@run_keyword_variant(resolve=0)
def log_many(self, *messages):
"""Logs the given messages as separate entries using the INFO level.
Supports also logging list and dictionary variable items individually.
Examples:
| Log Many | Hello | ${var} |
| Log Many | @{list} | &{dict} |
See `Log` and `Log To Console` keywords if you want to use alternative
log levels, use HTML, or log to the console.
"""
for msg in self._yield_logged_messages(messages):
self.log(msg)
def _yield_logged_messages(self, messages):
for msg in messages:
var = VariableSplitter(msg)
value = self._variables.replace_scalar(msg)
if var.is_list_variable():
for item in value:
yield item
elif var.is_dict_variable():
for name, value in value.items():
yield '%s=%s' % (name, value)
else:
yield value
def log_to_console(self, message, stream='STDOUT', no_newline=False):
"""Logs the given message to the console.
By default uses the standard output stream. Using the standard error
stream is possibly by giving the ``stream`` argument value ``STDERR``
(case-insensitive).
By default appends a newline to the logged message. This can be
disabled by giving the ``no_newline`` argument a true value (see
`Boolean arguments`).
Examples:
| Log To Console | Hello, console! | |
| Log To Console | Hello, stderr! | STDERR |
| Log To Console | Message starts here and is | no_newline=true |
| Log To Console | continued without newline. | |
This keyword does not log the message to the normal log file. Use
`Log` keyword, possibly with argument ``console``, if that is desired.
New in Robot Framework 2.8.2.
"""
logger.console(message, newline=is_falsy(no_newline), stream=stream)
@run_keyword_variant(resolve=0)
def comment(self, *messages):
"""Displays the given messages in the log file as keyword arguments.
This keyword does nothing with the arguments it receives, but as they
are visible in the log, this keyword can be used to display simple
messages. Given arguments are ignored so thoroughly that they can even
contain non-existing variables. If you are interested about variable
values, you can use the `Log` or `Log Many` keywords.
"""
pass
def set_log_level(self, level):
"""Sets the log threshold to the specified level and returns the old level.
Messages below the level will not logged. The default logging level is
INFO, but it can be overridden with the command line option
``--loglevel``.
The available levels: TRACE, DEBUG, INFO (default), WARN, ERROR and NONE (no
logging).
"""
try:
old = self._context.output.set_log_level(level)
except DataError as err:
raise RuntimeError(unic(err))
self._namespace.variables.set_global('${LOG_LEVEL}', level.upper())
self.log('Log level changed from %s to %s.' % (old, level.upper()))
return old
def reload_library(self, name_or_instance):
"""Rechecks what keywords the specified library provides.
Can be called explicitly in the test data or by a library itself
when keywords it provides have changed.
The library can be specified by its name or as the active instance of
the library. The latter is especially useful if the library itself
calls this keyword as a method.
New in Robot Framework 2.9.
"""
library = self._namespace.reload_library(name_or_instance)
self.log('Reloaded library %s with %s keywords.' % (library.name,
len(library)))
@run_keyword_variant(resolve=0)
def import_library(self, name, *args):
"""Imports a library with the given name and optional arguments.
This functionality allows dynamic importing of libraries while tests
are running. That may be necessary, if the library itself is dynamic
and not yet available when test data is processed. In a normal case,
libraries should be imported using the Library setting in the Setting
table.
This keyword supports importing libraries both using library
names and physical paths. When paths are used, they must be
given in absolute format or found from
[http://robotframework.org/robotframework/latest/RobotFrameworkUserGuide.html#pythonpath-jythonpath-and-ironpythonpath|
search path]. Forward slashes can be used as path separators in all
operating systems.
It is possible to pass arguments to the imported library and also
named argument syntax works if the library supports it. ``WITH NAME``
syntax can be used to give a custom name to the imported library.
Examples:
| Import Library | MyLibrary |
| Import Library | ${CURDIR}/../Library.py | arg1 | named=arg2 |
| Import Library | ${LIBRARIES}/Lib.java | arg | WITH NAME | JavaLib |
"""
try:
self._namespace.import_library(name, list(args))
except DataError as err:
raise RuntimeError(unic(err))
@run_keyword_variant(resolve=0)
def import_variables(self, path, *args):
"""Imports a variable file with the given path and optional arguments.
Variables imported with this keyword are set into the test suite scope
similarly when importing them in the Setting table using the Variables
setting. These variables override possible existing variables with
the same names. This functionality can thus be used to import new
variables, for example, for each test in a test suite.
The given path must be absolute or found from
[http://robotframework.org/robotframework/latest/RobotFrameworkUserGuide.html#pythonpath-jythonpath-and-ironpythonpath|
search path]. Forward slashes can be used as path separator regardless
the operating system.
Examples:
| Import Variables | ${CURDIR}/variables.py | | |
| Import Variables | ${CURDIR}/../vars/env.py | arg1 | arg2 |
| Import Variables | file_from_pythonpath.py | | |
"""
try:
self._namespace.import_variables(path, list(args), overwrite=True)
except DataError as err:
raise RuntimeError(unic(err))
@run_keyword_variant(resolve=0)
def import_resource(self, path):
"""Imports a resource file with the given path.
Resources imported with this keyword are set into the test suite scope
similarly when importing them in the Setting table using the Resource
setting.
The given path must be absolute or found from
[http://robotframework.org/robotframework/latest/RobotFrameworkUserGuide.html#pythonpath-jythonpath-and-ironpythonpath|
search path]. Forward slashes can be used as path separator regardless
the operating system.
Examples:
| Import Resource | ${CURDIR}/resource.txt |
| Import Resource | ${CURDIR}/../resources/resource.html |
| Import Resource | found_from_pythonpath.robot |
"""
try:
self._namespace.import_resource(path)
except DataError as err:
raise RuntimeError(unic(err))
def set_library_search_order(self, *search_order):
"""Sets the resolution order to use when a name matches multiple keywords.
The library search order is used to resolve conflicts when a keyword
name in the test data matches multiple keywords. The first library
(or resource, see below) containing the keyword is selected and that
keyword implementation used. If the keyword is not found from any library
(or resource), test executing fails the same way as when the search
order is not set.
When this keyword is used, there is no need to use the long
``LibraryName.Keyword Name`` notation. For example, instead of
having
| MyLibrary.Keyword | arg |
| MyLibrary.Another Keyword |
| MyLibrary.Keyword | xxx |
you can have
| Set Library Search Order | MyLibrary |
| Keyword | arg |
| Another Keyword |
| Keyword | xxx |
This keyword can be used also to set the order of keywords in different
resource files. In this case resource names must be given without paths
or extensions like:
| Set Library Search Order | resource | another_resource |
*NOTE:*
- The search order is valid only in the suite where this keywords is used.
- Keywords in resources always have higher priority than
keywords in libraries regardless the search order.
- The old order is returned and can be used to reset the search order later.
- Library and resource names in the search order are both case and space
insensitive.
"""
return self._namespace.set_search_order(search_order)
def keyword_should_exist(self, name, msg=None):
"""Fails unless the given keyword exists in the current scope.
Fails also if there are more than one keywords with the same name.
Works both with the short name (e.g. ``Log``) and the full name
(e.g. ``BuiltIn.Log``).
The default error message can be overridden with the ``msg`` argument.
See also `Variable Should Exist`.
"""
try:
runner = self._namespace.get_runner(name)
except DataError as err:
raise AssertionError(msg or unic(err))
if isinstance(runner, UserErrorHandler):
raise AssertionError(msg or runner.error)
def get_time(self, format='timestamp', time_='NOW'):
"""Returns the given time in the requested format.
*NOTE:* DateTime library added in Robot Framework 2.8.5 contains
much more flexible keywords for getting the current date and time
and for date and time handling in general.
How time is returned is determined based on the given ``format``
string as follows. Note that all checks are case-insensitive.
1) If ``format`` contains the word ``epoch``, the time is returned
in seconds after the UNIX epoch (1970-01-01 00:00:00 UTC).
The return value is always an integer.
2) If ``format`` contains any of the words ``year``, ``month``,
``day``, ``hour``, ``min``, or ``sec``, only the selected parts are
returned. The order of the returned parts is always the one
in the previous sentence and the order of words in ``format``
is not significant. The parts are returned as zero-padded
strings (e.g. May -> ``05``).
3) Otherwise (and by default) the time is returned as a
timestamp string in the format ``2006-02-24 15:08:31``.
By default this keyword returns the current local time, but
that can be altered using ``time`` argument as explained below.
Note that all checks involving strings are case-insensitive.
1) If ``time`` is a number, or a string that can be converted to
a number, it is interpreted as seconds since the UNIX epoch.
This documentation was originally written about 1177654467
seconds after the epoch.
2) If ``time`` is a timestamp, that time will be used. Valid
timestamp formats are ``YYYY-MM-DD hh:mm:ss`` and
``YYYYMMDD hhmmss``.
3) If ``time`` is equal to ``NOW`` (default), the current local
time is used. This time is got using Python's ``time.time()``
function.
4) If ``time`` is equal to ``UTC``, the current time in
[http://en.wikipedia.org/wiki/Coordinated_Universal_Time|UTC]
is used. This time is got using ``time.time() + time.altzone``
in Python.
5) If ``time`` is in the format like ``NOW - 1 day`` or ``UTC + 1 hour
30 min``, the current local/UTC time plus/minus the time
specified with the time string is used. The time string format
is described in an appendix of Robot Framework User Guide.
Examples (expecting the current local time is 2006-03-29 15:06:21):
| ${time} = | Get Time | | | |
| ${secs} = | Get Time | epoch | | |
| ${year} = | Get Time | return year | | |
| ${yyyy} | ${mm} | ${dd} = | Get Time | year,month,day |
| @{time} = | Get Time | year month day hour min sec | | |
| ${y} | ${s} = | Get Time | seconds and year | |
=>
| ${time} = '2006-03-29 15:06:21'
| ${secs} = 1143637581
| ${year} = '2006'
| ${yyyy} = '2006', ${mm} = '03', ${dd} = '29'
| @{time} = ['2006', '03', '29', '15', '06', '21']
| ${y} = '2006'
| ${s} = '21'
Examples (expecting the current local time is 2006-03-29 15:06:21 and
UTC time is 2006-03-29 12:06:21):
| ${time} = | Get Time | | 1177654467 | # Time given as epoch seconds |
| ${secs} = | Get Time | sec | 2007-04-27 09:14:27 | # Time given as a timestamp |
| ${year} = | Get Time | year | NOW | # The local time of execution |
| @{time} = | Get Time | hour min sec | NOW + 1h 2min 3s | # 1h 2min 3s added to the local time |
| @{utc} = | Get Time | hour min sec | UTC | # The UTC time of execution |
| ${hour} = | Get Time | hour | UTC - 1 hour | # 1h subtracted from the UTC time |
=>
| ${time} = '2007-04-27 09:14:27'
| ${secs} = 27
| ${year} = '2006'
| @{time} = ['16', '08', '24']
| @{utc} = ['12', '06', '21']
| ${hour} = '11'
Support for UTC time was added in Robot Framework 2.7.5 but it did not
work correctly until 2.7.7.
"""
return get_time(format, parse_time(time_))
def evaluate(self, expression, modules=None, namespace=None):
"""Evaluates the given expression in Python and returns the results.
``expression`` is evaluated in Python as explained in `Evaluating
expressions`.
``modules`` argument can be used to specify a comma separated
list of Python modules to be imported and added to the evaluation
namespace.
``namespace`` argument can be used to pass a custom evaluation
namespace as a dictionary. Possible ``modules`` are added to this
namespace. This is a new feature in Robot Framework 2.8.4.
Variables used like ``${variable}`` are replaced in the expression
before evaluation. Variables are also available in the evaluation
namespace and can be accessed using special syntax ``$variable``.
This is a new feature in Robot Framework 2.9 and it is explained more
thoroughly in `Evaluating expressions`.
Examples (expecting ``${result}`` is 3.14):
| ${status} = | Evaluate | 0 < ${result} < 10 | # Would also work with string '3.14' |
| ${status} = | Evaluate | 0 < $result < 10 | # Using variable itself, not string representation |
| ${random} = | Evaluate | random.randint(0, sys.maxint) | modules=random, sys |
| ${ns} = | Create Dictionary | x=${4} | y=${2} |
| ${result} = | Evaluate | x*10 + y | namespace=${ns} |
=>
| ${status} = True
| ${random} = <random integer>
| ${result} = 42
"""
variables = self._variables.as_dict(decoration=False)
expression = self._handle_variables_in_expression(expression, variables)
namespace = self._create_evaluation_namespace(namespace, modules)
variables = self._decorate_variables_for_evaluation(variables)
try:
if not is_string(expression):
raise TypeError("Expression must be string, got %s."
% type_name(expression))
if not expression:
raise ValueError("Expression cannot be empty.")
return eval(expression, namespace, variables)
except:
raise RuntimeError("Evaluating expression '%s' failed: %s"
% (expression, get_error_message()))
def _handle_variables_in_expression(self, expression, variables):
if not is_string(expression):
return expression
tokens = []
variable_started = seen_variable = False
generated = generate_tokens(StringIO(expression).readline)
for toknum, tokval, _, _, _ in generated:
if variable_started:
if toknum == token.NAME:
if tokval not in variables:
variable_not_found('$%s' % tokval, variables,
deco_braces=False)
tokval = 'RF_VAR_' + tokval
seen_variable = True
else:
tokens.append((token.ERRORTOKEN, '$'))
variable_started = False
if toknum == token.ERRORTOKEN and tokval == '$':
variable_started = True
else:
tokens.append((toknum, tokval))
if seen_variable:
return untokenize(tokens).strip()
return expression
def _create_evaluation_namespace(self, namespace, modules):
namespace = dict(namespace or {})
modules = modules.replace(' ', '').split(',') if modules else []
namespace.update((m, __import__(m)) for m in modules if m)
return namespace
def _decorate_variables_for_evaluation(self, variables):
decorated = [('RF_VAR_' + name, value)
for name, value in variables.items()]
return NormalizedDict(decorated, ignore='_')
def call_method(self, object, method_name, *args, **kwargs):
"""Calls the named method of the given object with the provided arguments.
The possible return value from the method is returned and can be
assigned to a variable. Keyword fails both if the object does not have
a method with the given name or if executing the method raises an
exception.
Support for ``**kwargs`` is new in Robot Framework 2.9. Since that
possible equal signs in other arguments must be escaped with a
backslash like ``\\=``.
Examples:
| Call Method | ${hashtable} | put | myname | myvalue |
| ${isempty} = | Call Method | ${hashtable} | isEmpty | |
| Should Not Be True | ${isempty} | | | |
| ${value} = | Call Method | ${hashtable} | get | myname |
| Should Be Equal | ${value} | myvalue | | |
| Call Method | ${object} | kwargs | name=value | foo=bar |
| Call Method | ${object} | positional | escaped\\=equals |
"""
try:
method = getattr(object, method_name)
except AttributeError:
raise RuntimeError("Object '%s' does not have method '%s'."
% (object, method_name))
try:
return method(*args, **kwargs)
except:
raise RuntimeError("Calling method '%s' failed: %s"
% (method_name, get_error_message()))
def regexp_escape(self, *patterns):
"""Returns each argument string escaped for use as a regular expression.
This keyword can be used to escape strings to be used with
`Should Match Regexp` and `Should Not Match Regexp` keywords.
Escaping is done with Python's ``re.escape()`` function.
Examples:
| ${escaped} = | Regexp Escape | ${original} |
| @{strings} = | Regexp Escape | @{strings} |
"""
if len(patterns) == 0:
return ''
if len(patterns) == 1:
return re.escape(patterns[0])
return [re.escape(p) for p in patterns]
def set_test_message(self, message, append=False):
"""Sets message for the current test case.
If the optional ``append`` argument is given a true value (see `Boolean
arguments`), the given ``message`` is added after the possible earlier
message by joining the messages with a space.
In test teardown this keyword can alter the possible failure message,
but otherwise failures override messages set by this keyword. Notice
that in teardown the message is available as a built-in variable
``${TEST MESSAGE}``.
It is possible to use HTML format in the message by starting the message
with ``*HTML*``.
Examples:
| Set Test Message | My message | |
| Set Test Message | is continued. | append=yes |
| Should Be Equal | ${TEST MESSAGE} | My message is continued. |
| Set Test Message | `*`HTML`*` <b>Hello!</b> | |
This keyword can not be used in suite setup or suite teardown.
Support for ``append`` was added in Robot Framework 2.7.7 and support
for HTML format in 2.8.
"""
test = self._context.test
if not test:
raise RuntimeError("'Set Test Message' keyword cannot be used in "
"suite setup or teardown.")
test.message = self._get_possibly_appended_value(test.message, message,
append)
if self._context.in_test_teardown:
self._variables.set_test("${TEST_MESSAGE}", test.message)
message, level = self._get_logged_test_message_and_level(test.message)
self.log('Set test message to:\n%s' % message, level)
def _get_possibly_appended_value(self, initial, new, append):
if not is_unicode(new):
new = unic(new)
if is_truthy(append) and initial:
return '%s %s' % (initial, new)
return new
def _get_logged_test_message_and_level(self, message):
if message.startswith('*HTML*'):
return message[6:].lstrip(), 'HTML'
return message, 'INFO'
def set_test_documentation(self, doc, append=False):
"""Sets documentation for the current test case.
By default the possible existing documentation is overwritten, but
this can be changed using the optional ``append`` argument similarly
as with `Set Test Message` keyword.
The current test documentation is available as a built-in variable
``${TEST DOCUMENTATION}``. This keyword can not be used in suite
setup or suite teardown.
New in Robot Framework 2.7. Support for ``append`` was added in 2.7.7.
"""
test = self._context.test
if not test:
raise RuntimeError("'Set Test Documentation' keyword cannot be "
"used in suite setup or teardown.")
test.doc = self._get_possibly_appended_value(test.doc, doc, append)
self._variables.set_test('${TEST_DOCUMENTATION}', test.doc)
self.log('Set test documentation to:\n%s' % test.doc)
def set_suite_documentation(self, doc, append=False, top=False):
"""Sets documentation for the current test suite.
By default the possible existing documentation is overwritten, but
this can be changed using the optional ``append`` argument similarly
as with `Set Test Message` keyword.
This keyword sets the documentation of the current suite by default.
If the optional ``top`` argument is given a true value (see `Boolean
arguments`), the documentation of the top level suite is altered
instead.
The documentation of the current suite is available as a built-in
variable ``${SUITE DOCUMENTATION}``.
New in Robot Framework 2.7. Support for ``append`` and ``top`` were
added in 2.7.7.
"""
top = is_truthy(top)
suite = self._get_context(top).suite
suite.doc = self._get_possibly_appended_value(suite.doc, doc, append)
self._variables.set_suite('${SUITE_DOCUMENTATION}', suite.doc, top)
self.log('Set suite documentation to:\n%s' % suite.doc)
def set_suite_metadata(self, name, value, append=False, top=False):
"""Sets metadata for the current test suite.
By default possible existing metadata values are overwritten, but
this can be changed using the optional ``append`` argument similarly
as with `Set Test Message` keyword.
This keyword sets the metadata of the current suite by default.
If the optional ``top`` argument is given a true value (see `Boolean
arguments`), the metadata of the top level suite is altered instead.
The metadata of the current suite is available as a built-in variable
``${SUITE METADATA}`` in a Python dictionary. Notice that modifying this
variable directly has no effect on the actual metadata the suite has.
New in Robot Framework 2.7.4. Support for ``append`` and ``top`` were
added in 2.7.7.
"""
top = is_truthy(top)
if not is_unicode(name):
name = unic(name)
metadata = self._get_context(top).suite.metadata
original = metadata.get(name, '')
metadata[name] = self._get_possibly_appended_value(original, value, append)
self._variables.set_suite('${SUITE_METADATA}', metadata.copy(), top)
self.log("Set suite metadata '%s' to value '%s'." % (name, metadata[name]))
def set_tags(self, *tags):
"""Adds given ``tags`` for the current test or all tests in a suite.
When this keyword is used inside a test case, that test gets
the specified tags and other tests are not affected.
If this keyword is used in a suite setup, all test cases in
that suite, recursively, gets the given tags. It is a failure
to use this keyword in a suite teardown.
The current tags are available as a built-in variable ``@{TEST TAGS}``.
See `Remove Tags` if you want to remove certain tags and `Fail` if
you want to fail the test case after setting and/or removing tags.
"""
ctx = self._context
if ctx.test:
ctx.test.tags.add(tags)
ctx.variables.set_test('@{TEST_TAGS}', list(ctx.test.tags))
elif not ctx.in_suite_teardown:
ctx.suite.set_tags(tags, persist=True)
else:
raise RuntimeError("'Set Tags' cannot be used in suite teardown.")
self.log('Set tag%s %s.' % (s(tags), seq2str(tags)))
def remove_tags(self, *tags):
"""Removes given ``tags`` from the current test or all tests in a suite.
Tags can be given exactly or using a pattern where ``*`` matches
anything and ``?`` matches one character.
This keyword can affect either one test case or all test cases in a
test suite similarly as `Set Tags` keyword.
The current tags are available as a built-in variable ``@{TEST TAGS}``.
Example:
| Remove Tags | mytag | something-* | ?ython |
See `Set Tags` if you want to add certain tags and `Fail` if you want
to fail the test case after setting and/or removing tags.
"""
ctx = self._context
if ctx.test:
ctx.test.tags.remove(tags)
ctx.variables.set_test('@{TEST_TAGS}', list(ctx.test.tags))
elif not ctx.in_suite_teardown:
ctx.suite.set_tags(remove=tags, persist=True)
else:
raise RuntimeError("'Remove Tags' cannot be used in suite teardown.")
self.log('Removed tag%s %s.' % (s(tags), seq2str(tags)))
def get_library_instance(self, name=None, all=False):
"""Returns the currently active instance of the specified test library.
This keyword makes it easy for test libraries to interact with
other test libraries that have state. This is illustrated by
the Python example below:
| from robot.libraries.BuiltIn import BuiltIn
|
| def title_should_start_with(expected):
| seleniumlib = BuiltIn().get_library_instance('SeleniumLibrary')
| title = seleniumlib.get_title()
| if not title.startswith(expected):
| raise AssertionError("Title '%s' did not start with '%s'"
| % (title, expected))
It is also possible to use this keyword in the test data and
pass the returned library instance to another keyword. If a
library is imported with a custom name, the ``name`` used to get
the instance must be that name and not the original library name.
If the optional argument ``all`` is given a true value, then a
dictionary mapping all library names to instances will be returned.
This feature is new in Robot Framework 2.9.2.
Example:
| &{all libs} = | Get library instance | all=True |
"""
if is_truthy(all):
return self._namespace.get_library_instances()
try:
return self._namespace.get_library_instance(name)
except DataError as err:
raise RuntimeError(unic(err))
class BuiltIn(_Verify, _Converter, _Variables, _RunKeyword, _Control, _Misc):
"""An always available standard library with often needed keywords.
``BuiltIn`` is Robot Framework's standard library that provides a set
of generic keywords needed often. It is imported automatically and
thus always available. The provided keywords can be used, for example,
for verifications (e.g. `Should Be Equal`, `Should Contain`),
conversions (e.g. `Convert To Integer`) and for various other purposes
(e.g. `Log`, `Sleep`, `Run Keyword If`, `Set Global Variable`).
== Table of contents ==
- `HTML error messages`
- `Evaluating expressions`
- `Boolean arguments`
- `Multiline string comparisons`
- `Shortcuts`
- `Keywords`
= HTML error messages =
Many of the keywords accept an optional error message to use if the keyword
fails. Starting from Robot Framework 2.8, it is possible to use HTML in
these messages by prefixing them with ``*HTML*``. See `Fail` keyword for
a usage example. Notice that using HTML in messages is not limited to
BuiltIn library but works with any error message.
= Evaluating expressions =
Many keywords, such as `Evaluate`, `Run Keyword If` and `Should Be True`,
accept an expression that is evaluated in Python. These expressions are
evaluated using Python's
[https://docs.python.org/2/library/functions.html#eval|eval] function so
that all Python built-ins like ``len()`` and ``int()`` are available.
`Evaluate` allows configuring the execution namespace with custom modules,
and other keywords have [https://docs.python.org/2/library/os.html|os]
and [https://docs.python.org/2/library/sys.html|sys] modules available
automatically.
Examples:
| `Run Keyword If` | os.sep == '/' | Log | Not on Windows |
| ${random int} = | `Evaluate` | random.randint(0, 5) | modules=random |
When a variable is used in the expressing using the normal ``${variable}``
syntax, its value is replaces before the expression is evaluated. This
means that the value used in the expression will be the string
representation of the variable value, not the variable value itself.
This is not a problem with numbers and other objects that have a string
representation that can be evaluated directly, but with other objects
the behavior depends on the string representation. Most importantly,
strings must always be quoted, and if they can contain newlines, they must
be triple quoted.
Examples:
| `Should Be True` | ${rc} < 10 | Return code greater than 10 |
| `Run Keyword If` | '${status}' == 'PASS' | Log | Passed |
| `Run Keyword If` | 'FAIL' in '''${output}''' | Log | Output contains FAIL |
Starting from Robot Framework 2.9, variables themselves are automatically
available in the evaluation namespace. They can be accessed using special
variable syntax without the curly braces like ``$variable``. These
variables should never be quoted, and in fact they are not even replaced
inside strings.
Examples:
| `Should Be True` | $rc < 10 | Return code greater than 10 |
| `Run Keyword If` | $status == 'PASS' | `Log` | Passed |
| `Run Keyword If` | 'FAIL' in $output | `Log` | Output contains FAIL |
| `Should Be True` | len($result) > 1 and $result[1] == 'OK' |
Notice that instead of creating complicated expressions, it is often better
to move the logic into a test library.
= Boolean arguments =
Some keywords accept arguments that are handled as Boolean values true or
false. If such an argument is given as a string, it is considered false if
it is either empty or case-insensitively equal to ``false`` or ``no``.
Keywords verifying something that allow dropping actual and expected values
from the possible error message also consider string ``no values`` as false.
Other strings are considered true regardless their value, and other
argument types are tested using same
[http://docs.python.org/2/library/stdtypes.html#truth-value-testing|rules
as in Python].
True examples:
| `Should Be Equal` | ${x} | ${y} | Custom error | values=True | # Strings are generally true. |
| `Should Be Equal` | ${x} | ${y} | Custom error | values=yes | # Same as the above. |
| `Should Be Equal` | ${x} | ${y} | Custom error | values=${TRUE} | # Python ``True`` is true. |
| `Should Be Equal` | ${x} | ${y} | Custom error | values=${42} | # Numbers other than 0 are true. |
False examples:
| `Should Be Equal` | ${x} | ${y} | Custom error | values=False | # String ``false`` is false. |
| `Should Be Equal` | ${x} | ${y} | Custom error | values=no | # Also string ``no`` is false. |
| `Should Be Equal` | ${x} | ${y} | Custom error | values=${EMPTY} | # Empty string is false. |
| `Should Be Equal` | ${x} | ${y} | Custom error | values=${FALSE} | # Python ``False`` is false. |
| `Should Be Equal` | ${x} | ${y} | Custom error | values=no values | # ``no values`` works with ``values`` argument |
Note that prior to Robot Framework 2.9 some keywords considered all
non-empty strings, including ``false`` and ``no``, to be true.
= Multiline string comparisons =
`Should Be Equal` and `Should Be Equal As Strings` report the failures using
[https://en.wikipedia.org/wiki/Diff_utility#Unified_format|unified diff
format] if both strings have more than two lines. New in Robot Framework
2.9.1.
Example:
| ${first} = | `Catenate` | SEPARATOR=\\n | Not in second | Same | Differs | Same |
| ${second} = | `Catenate` | SEPARATOR=\\n | Same | Differs2 | Same | Not in first |
| `Should Be Equal` | ${first} | ${second} |
Results in the following error message:
| Multiline strings are different:
| --- first
| +++ second
| @@ -1,4 +1,4 @@
| -Not in second
| Same
| -Differs
| +Differs2
| Same
| +Not in first
"""
ROBOT_LIBRARY_SCOPE = 'GLOBAL'
ROBOT_LIBRARY_VERSION = get_version()
class RobotNotRunningError(AttributeError):
"""Used when something cannot be done because Robot is not running.
Based on AttributeError to be backwards compatible with RF < 2.8.5.
May later be based directly on Exception, so new code should except
this exception explicitly.
"""
pass
def register_run_keyword(library, keyword, args_to_process=None,
deprecation_warning=True):
"""Registers 'run keyword' so that its arguments can be handled correctly.
*NOTE:* This API will change in RF 3.1. For more information see
https://github.com/robotframework/robotframework/issues/2190. Use with
`deprecation_warning=False` to avoid related deprecation warnings.
1) Why is this method needed
Keywords running other keywords internally (normally using `Run Keyword`
or some variants of it in BuiltIn) must have the arguments meant to the
internally executed keyword handled specially to prevent processing them
twice. This is done ONLY for keywords registered using this method.
If the register keyword has same name as any keyword from Robot Framework
standard libraries, it can be used without getting warnings. Normally
there is a warning in such cases unless the keyword is used in long
format (e.g. MyLib.Keyword).
Keywords executed by registered run keywords can be tested in dry-run mode
if they have 'name' argument which takes the name of the executed keyword.
2) How to use this method
`library` is the name of the library where the registered keyword is
implemented.
`keyword` can be either a function or method implementing the
keyword, or name of the implemented keyword as a string.
`args_to_process` is needed when `keyword` is given as a string, and it
defines how many of the arguments to the registered keyword must be
processed normally. When `keyword` is a method or function, this
information is got directly from it so that varargs (those specified with
syntax '*args') are not processed but others are.
3) Examples
from robot.libraries.BuiltIn import BuiltIn, register_run_keyword
def my_run_keyword(name, *args):
# do something
return BuiltIn().run_keyword(name, *args)
# Either one of these works
register_run_keyword(__name__, my_run_keyword)
register_run_keyword(__name__, 'My Run Keyword', 1)
-------------
from robot.libraries.BuiltIn import BuiltIn, register_run_keyword
class MyLibrary:
def my_run_keyword_if(self, expression, name, *args):
# do something
return BuiltIn().run_keyword_if(expression, name, *args)
# Either one of these works
register_run_keyword('MyLibrary', MyLibrary.my_run_keyword_if)
register_run_keyword('MyLibrary', 'my_run_keyword_if', 2)
"""
RUN_KW_REGISTER.register_run_keyword(library, keyword, args_to_process,
deprecation_warning)
|
jaloren/robotframework
|
src/robot/libraries/BuiltIn.py
|
Python
|
apache-2.0
| 146,969 | 0.001082 |
self.description = "Remove a package required by other packages"
lp1 = pmpkg("pkg1")
self.addpkg2db("local", lp1)
lp2 = pmpkg("pkg2")
lp2.depends = ["pkg1"]
self.addpkg2db("local", lp2)
lp3 = pmpkg("pkg3")
lp3.depends = ["pkg1"]
self.addpkg2db("local", lp3)
lp4 = pmpkg("pkg4")
lp4.depends = ["pkg1"]
self.addpkg2db("local", lp4)
self.args = "-R pkg1 pkg2"
self.addrule("!PACMAN_RETCODE=0")
self.addrule("PKG_EXIST=pkg1")
self.addrule("PKG_EXIST=pkg2")
self.addrule("PKG_EXIST=pkg3")
self.addrule("PKG_EXIST=pkg4")
|
AWhetter/pacman
|
test/pacman/tests/remove047.py
|
Python
|
gpl-2.0
| 521 | 0 |
class A(Aa):
@property
def <warning descr="Getter signature should be (self)">x<caret></warning>(self, r):
return ""
@x.setter
def <warning descr="Setter should not return a value">x</warning>(self, r):
return r
|
asedunov/intellij-community
|
python/testData/quickFixes/PyUpdatePropertySignatureQuickFixTest/getter.py
|
Python
|
apache-2.0
| 245 | 0.053061 |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops to manipulate lists of tensors."""
# pylint: disable=g-bad-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.core.framework import types_pb2
from tensorflow.python.framework import cpp_shape_inference_pb2
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_list_ops
from tensorflow.python.ops import handle_data_util
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.ops.gen_list_ops import *
# pylint: enable=wildcard-import
from tensorflow.python.util.lazy_loader import LazyLoader
# list_ops -> control_flow_ops -> tensor_array_ops -> list_ops
control_flow_ops = LazyLoader(
"control_flow_ops", globals(),
"tensorflow.python.ops.control_flow_ops")
ops.NotDifferentiable("TensorListConcatLists")
ops.NotDifferentiable("TensorListElementShape")
ops.NotDifferentiable("TensorListLength")
ops.NotDifferentiable("TensorListPushBackBatch")
def empty_tensor_list(element_shape,
element_dtype,
max_num_elements=None,
name=None):
if max_num_elements is None:
max_num_elements = -1
return gen_list_ops.empty_tensor_list(
element_shape=_build_element_shape(element_shape),
element_dtype=element_dtype,
max_num_elements=max_num_elements,
name=name)
def _set_handle_data(list_handle, element_shape, element_dtype):
"""Sets type information on `list_handle` for consistency with graphs."""
# TODO(b/169968286): It would be better if we had a consistent story for
# creating handle data from eager operations (shared with VarHandleOp).
if isinstance(list_handle, ops.EagerTensor):
if tensor_util.is_tf_type(element_shape):
element_shape = tensor_shape.TensorShape(None)
elif not isinstance(element_shape, tensor_shape.TensorShape):
element_shape = tensor_shape.TensorShape(element_shape)
handle_data = cpp_shape_inference_pb2.CppShapeInferenceResult.HandleData()
handle_data.is_set = True
handle_data.shape_and_type.append(
cpp_shape_inference_pb2.CppShapeInferenceResult.HandleShapeAndType(
shape=element_shape.as_proto(),
dtype=element_dtype.as_datatype_enum,
specialized_type=types_pb2.ST_TENSOR_LIST))
list_handle._handle_data = handle_data # pylint: disable=protected-access
def tensor_list_reserve(element_shape, num_elements, element_dtype, name=None):
result = gen_list_ops.tensor_list_reserve(
element_shape=_build_element_shape(element_shape),
num_elements=num_elements,
element_dtype=element_dtype,
name=name)
# TODO(b/169968286): gen_ops needs to ensure the metadata is properly
# populated for eager operations.
_set_handle_data(result, element_shape, element_dtype)
return result
def tensor_list_from_tensor(tensor, element_shape, name=None):
tensor = ops.convert_to_tensor(tensor)
result = gen_list_ops.tensor_list_from_tensor(
tensor=tensor,
element_shape=_build_element_shape(element_shape),
name=name)
_set_handle_data(result, tensor.shape, tensor.dtype)
return result
def tensor_list_get_item(input_handle, index, element_dtype, element_shape=None,
name=None):
return gen_list_ops.tensor_list_get_item(
input_handle=input_handle,
index=index,
element_shape=_build_element_shape(element_shape),
element_dtype=element_dtype,
name=name)
def tensor_list_pop_back(input_handle, element_dtype, name=None):
return gen_list_ops.tensor_list_pop_back(
input_handle=input_handle,
element_shape=-1,
element_dtype=element_dtype,
name=name)
def tensor_list_gather(input_handle,
indices,
element_dtype,
element_shape=None,
name=None):
return gen_list_ops.tensor_list_gather(
input_handle=input_handle,
indices=indices,
element_shape=_build_element_shape(element_shape),
element_dtype=element_dtype,
name=name)
def tensor_list_scatter(tensor,
indices,
element_shape=None,
input_handle=None,
name=None):
"""Returns a TensorList created or updated by scattering `tensor`."""
tensor = ops.convert_to_tensor(tensor)
if input_handle is not None:
output_handle = gen_list_ops.tensor_list_scatter_into_existing_list(
input_handle=input_handle, tensor=tensor, indices=indices, name=name)
handle_data_util.copy_handle_data(input_handle, output_handle)
return output_handle
else:
output_handle = gen_list_ops.tensor_list_scatter_v2(
tensor=tensor,
indices=indices,
element_shape=_build_element_shape(element_shape),
num_elements=-1,
name=name)
_set_handle_data(output_handle, element_shape, tensor.dtype)
return output_handle
def tensor_list_stack(input_handle,
element_dtype,
num_elements=-1,
element_shape=None,
name=None):
return gen_list_ops.tensor_list_stack(
input_handle=input_handle,
element_shape=_build_element_shape(element_shape),
element_dtype=element_dtype,
num_elements=num_elements,
name=name)
def tensor_list_concat(input_handle, element_dtype, element_shape=None,
name=None):
# Ignore the lengths output of TensorListConcat. It is only used during
# gradient computation.
return gen_list_ops.tensor_list_concat_v2(
input_handle=input_handle,
element_dtype=element_dtype,
element_shape=_build_element_shape(element_shape),
leading_dims=ops.convert_to_tensor([], dtype=dtypes.int64),
name=name)[0]
def tensor_list_split(tensor, element_shape, lengths, name=None):
return gen_list_ops.tensor_list_split(
tensor=tensor,
element_shape=_build_element_shape(element_shape),
lengths=lengths,
name=name)
def tensor_list_set_item(input_handle,
index,
item,
resize_if_index_out_of_bounds=False,
name=None):
"""Sets `item` at `index` in input list."""
if resize_if_index_out_of_bounds:
input_list_size = gen_list_ops.tensor_list_length(input_handle)
# TODO(srbs): This could cause some slowdown. Consider fusing resize
# functionality in the SetItem op.
input_handle = control_flow_ops.cond(
index >= input_list_size,
lambda: gen_list_ops.tensor_list_resize( # pylint: disable=g-long-lambda
input_handle, index + 1),
lambda: input_handle)
output_handle = gen_list_ops.tensor_list_set_item(
input_handle=input_handle, index=index, item=item, name=name)
handle_data_util.copy_handle_data(input_handle, output_handle)
return output_handle
@ops.RegisterGradient("TensorListPushBack")
def _PushBackGrad(op, dresult):
return gen_list_ops.tensor_list_pop_back(
dresult,
element_shape=array_ops.shape(op.inputs[1]),
element_dtype=op.get_attr("element_dtype"))
@ops.RegisterGradient("TensorListPopBack")
def _PopBackGrad(op, dlist, delement):
if dlist is None:
dlist = empty_tensor_list(
element_dtype=delement.dtype,
element_shape=gen_list_ops.tensor_list_element_shape(
op.outputs[0], shape_type=dtypes.int32))
if delement is None:
delement = array_ops.zeros_like(op.outputs[1])
return gen_list_ops.tensor_list_push_back(dlist, delement), None
@ops.RegisterGradient("TensorListStack")
def _TensorListStackGrad(unused_op, dtensor):
return tensor_list_from_tensor(dtensor, element_shape=dtensor.shape[1:]), None
@ops.RegisterGradient("TensorListConcat")
@ops.RegisterGradient("TensorListConcatV2")
def _TensorListConcatGrad(op, dtensor, unused_dlengths):
"""Gradient function for TensorListConcat."""
dlist = tensor_list_split(
dtensor,
element_shape=gen_list_ops.tensor_list_element_shape(
op.inputs[0], shape_type=dtypes.int32),
lengths=op.outputs[1])
if op.type == "TensorListConcatV2":
return dlist, None, None
else:
return dlist
@ops.RegisterGradient("TensorListSplit")
def _TensorListSplitGrad(op, dlist):
tensor, _, lengths = op.inputs
element_shape = array_ops.slice(array_ops.shape(tensor), [1], [-1])
element_shape = array_ops.concat([[-1], element_shape], axis=0)
return gen_list_ops.tensor_list_concat_v2(
dlist,
element_shape=element_shape,
leading_dims=lengths,
element_dtype=op.inputs[0].dtype)[0], None, None
@ops.RegisterGradient("TensorListFromTensor")
def _TensorListFromTensorGrad(op, dlist):
"""Gradient for TensorListFromTensor."""
t = op.inputs[0]
if t.shape.dims and t.shape.dims[0].value is not None:
num_elements = t.shape.dims[0].value
else:
num_elements = None
if dlist is None:
dlist = empty_tensor_list(
element_dtype=t.dtype,
element_shape=gen_list_ops.tensor_list_element_shape(
op.outputs[0], shape_type=dtypes.int32))
tensor_grad = gen_list_ops.tensor_list_stack(
dlist,
element_shape=array_ops.slice(array_ops.shape(t), [1], [-1]),
element_dtype=t.dtype,
num_elements=num_elements)
shape_grad = None
return tensor_grad, shape_grad
@ops.RegisterGradient("TensorListGetItem")
def _TensorListGetItemGrad(op, ditem):
"""Gradient for TensorListGetItem."""
list_size = gen_list_ops.tensor_list_length(op.inputs[0])
list_grad = gen_list_ops.tensor_list_set_item(
gen_list_ops.tensor_list_reserve(
gen_list_ops.tensor_list_element_shape(op.inputs[0],
shape_type=dtypes.int32),
list_size, element_dtype=ditem.dtype),
index=op.inputs[1],
item=ditem)
index_grad = None
element_shape_grad = None
return list_grad, index_grad, element_shape_grad
@ops.RegisterGradient("TensorListSetItem")
def _TensorListSetItemGrad(op, dlist):
"""Gradient function for TensorListSetItem."""
_, index, item = op.inputs
list_grad = gen_list_ops.tensor_list_set_item(
dlist, index=index, item=array_ops.zeros_like(item))
index_grad = None
element_grad = tensor_list_get_item(
dlist,
index,
element_shape=array_ops.shape(item),
element_dtype=item.dtype)
return list_grad, index_grad, element_grad
@ops.RegisterGradient("TensorListResize")
def _TensorListResizeGrad(op, dlist):
input_list, _ = op.inputs
input_list_size = gen_list_ops.tensor_list_length(input_list)
return gen_list_ops.tensor_list_resize(dlist, input_list_size), None
@ops.RegisterGradient("TensorListGather")
def _TensorListGatherGrad(op, dtensor):
"""Gradient function for TensorListGather."""
input_list, indices, _ = op.inputs
element_shape = gen_list_ops.tensor_list_element_shape(
input_list, shape_type=dtypes.int32)
num_elements = gen_list_ops.tensor_list_length(input_list)
dlist = tensor_list_reserve(element_shape, num_elements, dtensor.dtype)
dlist = tensor_list_scatter(
tensor=dtensor, indices=indices, input_handle=dlist)
return dlist, None, None
@ops.RegisterGradient("TensorListScatter")
@ops.RegisterGradient("TensorListScatterV2")
def _TensorListScatterGrad(op, dlist):
"""Gradient function for TensorListScatter."""
tensor = op.inputs[0]
indices = op.inputs[1]
dtensor = gen_list_ops.tensor_list_gather(
dlist,
indices,
element_shape=array_ops.slice(array_ops.shape(tensor), [1], [-1]),
element_dtype=tensor.dtype)
if op.type == "TensorListScatterV2":
return dtensor, None, None, None
else:
return dtensor, None, None
@ops.RegisterGradient("TensorListScatterIntoExistingList")
def _TensorListScatterIntoExistingListGrad(op, dlist):
"""Gradient function for TensorListScatterIntoExistingList."""
_, tensor, indices = op.inputs
dtensor = gen_list_ops.tensor_list_gather(
dlist,
indices,
element_shape=array_ops.slice(array_ops.shape(tensor), [1], [-1]),
element_dtype=tensor.dtype)
zeros = array_ops.zeros_like(tensor)
dlist = tensor_list_scatter(zeros, indices, indices, input_handle=dlist)
return dlist, dtensor, None
def _build_element_shape(shape):
"""Converts shape to a format understood by list_ops for element_shape.
If `shape` is already a `Tensor` it is returned as-is. We do not perform a
type check here.
If shape is None or a TensorShape with unknown rank, -1 is returned.
If shape is a scalar, an int32 tensor with empty list is returned. Note we
do directly return an empty list since ops.convert_to_tensor would conver it
to a float32 which is not a valid type for element_shape.
If shape is a sequence of dims, None's in the list are replaced with -1. We
do not check the dtype of the other dims.
Args:
shape: Could be None, Tensor, TensorShape or a list of dims (each dim could
be a None, scalar or Tensor).
Returns:
A None-free shape that can be converted to a tensor.
"""
if isinstance(shape, ops.Tensor):
return shape
if isinstance(shape, tensor_shape.TensorShape):
# `TensorShape.as_list` requires rank to be known.
shape = shape.as_list() if shape else None
# Shape is unknown.
if shape is None:
return -1
# Shape is numpy array or a scalar.
if isinstance(shape, (np.ndarray, np.generic)) or not shape:
return ops.convert_to_tensor(shape, dtype=dtypes.int32)
# Shape is a sequence of dimensions. Convert None dims to -1.
def convert(val):
if val is None:
return -1
if isinstance(val, ops.Tensor):
return val
if isinstance(val, tensor_shape.Dimension):
return val.value if val.value is not None else -1
return val
return [convert(d) for d in shape]
|
annarev/tensorflow
|
tensorflow/python/ops/list_ops.py
|
Python
|
apache-2.0
| 14,846 | 0.00714 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'SocialAccount.uid'
db.alter_column(u'socialaccount_socialaccount', 'uid', self.gf('django.db.models.fields.CharField')(max_length=191))
# Changing field 'SocialApp.secret'
db.alter_column(u'socialaccount_socialapp', 'secret', self.gf('django.db.models.fields.CharField')(max_length=191))
# Changing field 'SocialApp.client_id'
db.alter_column(u'socialaccount_socialapp', 'client_id', self.gf('django.db.models.fields.CharField')(max_length=191))
# Changing field 'SocialApp.key'
db.alter_column(u'socialaccount_socialapp', 'key', self.gf('django.db.models.fields.CharField')(max_length=191))
def backwards(self, orm):
# Changing field 'SocialAccount.uid'
db.alter_column(u'socialaccount_socialaccount', 'uid', self.gf('django.db.models.fields.CharField')(max_length=255))
# Changing field 'SocialApp.secret'
db.alter_column(u'socialaccount_socialapp', 'secret', self.gf('django.db.models.fields.CharField')(max_length=100))
# Changing field 'SocialApp.client_id'
db.alter_column(u'socialaccount_socialapp', 'client_id', self.gf('django.db.models.fields.CharField')(max_length=100))
# Changing field 'SocialApp.key'
db.alter_column(u'socialaccount_socialapp', 'key', self.gf('django.db.models.fields.CharField')(max_length=100))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'sites.site': {
'Meta': {'ordering': "(u'domain',)", 'object_name': 'Site', 'db_table': "u'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'socialaccount.socialaccount': {
'Meta': {'unique_together': "(('provider', 'uid'),)", 'object_name': 'SocialAccount'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'extra_data': ('allauth.socialaccount.fields.JSONField', [], {'default': "'{}'"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'uid': ('django.db.models.fields.CharField', [], {'max_length': '191'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'socialaccount.socialapp': {
'Meta': {'object_name': 'SocialApp'},
'client_id': ('django.db.models.fields.CharField', [], {'max_length': '191'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '191', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '191'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['sites.Site']", 'symmetrical': 'False', 'blank': 'True'})
},
u'socialaccount.socialtoken': {
'Meta': {'unique_together': "(('app', 'account'),)", 'object_name': 'SocialToken'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['socialaccount.SocialAccount']"}),
'app': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['socialaccount.SocialApp']"}),
'expires_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'token': ('django.db.models.fields.TextField', [], {}),
'token_secret': ('django.db.models.fields.TextField', [], {'blank': 'True'})
}
}
complete_apps = ['socialaccount']
|
Alexander-M-Waldman/local_currency_site
|
lib/python2.7/site-packages/allauth/socialaccount/south_migrations/0013_auto__chg_field_socialaccount_uid__chg_field_socialapp_secret__chg_fie.py
|
Python
|
gpl-3.0
| 7,605 | 0.00789 |
"""This demo demonstrates how to move the vertex coordinates of a
boundary mesh and then updating the interior vertex coordinates of the
original mesh by suitably interpolating the vertex coordinates (useful
for implementation of ALE methods)."""
# Copyright (C) 2008 Solveig Bruvoll and Anders Logg
#
# This file is part of DOLFIN.
#
# DOLFIN is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DOLFIN is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with DOLFIN. If not, see <http://www.gnu.org/licenses/>.
#
# First added: 2008-05-02
# Last changed: 2008-12-12
from dolfin import *
print "This demo is presently broken. See https://bugs.launchpad.net/dolfin/+bug/1047641"
exit()
# Create mesh
mesh = UnitSquareMesh(20, 20)
# Create boundary mesh
boundary = BoundaryMesh(mesh)
# Move vertices in boundary
for x in boundary.coordinates():
x[0] *= 3.0
x[1] += 0.1*sin(5.0*x[0])
# Move mesh
mesh.move(boundary)
# Plot mesh
plot(mesh, interactive=True)
|
alogg/dolfin
|
demo/undocumented/ale/python/demo_ale.py
|
Python
|
gpl-3.0
| 1,429 | 0.0007 |
#!/usr/bin/env python
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Michael A.G. Aivazis
# California Institute of Technology
# (C) 1998-2005 All Rights Reserved
#
# <LicenseText>
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
from pyre.components.Component import Component
class Device(Component):
class Inventory(Component.Inventory):
from RendererFacility import RendererFacility
renderer = RendererFacility()
renderer.meta['tip'] = 'the facility that controls how the messages are formatted'
def createDevice(self):
raise NotImplementedError("class '%s' must override 'device'" % self.__class__.__name__)
def __init__(self, name):
Component.__init__(self, name, "journal-device")
self.device = None
return
def _init(self):
device = self.createDevice()
renderer = self.inventory.renderer.renderer
device.renderer = renderer
self.device = device
return
# version
__id__ = "$Id: Device.py,v 1.2 2005/03/10 06:16:37 aivazis Exp $"
# End of file
|
bmi-forum/bmi-pyre
|
pythia-0.8/packages/journal/journal/components/Device.py
|
Python
|
gpl-2.0
| 1,235 | 0.006478 |
import random
import datetime
import time
import hashlib
from django.db import models
from django.conf import settings
from django.urls import reverse
from django.contrib.auth.models import User, Group
from django.db.models.signals import post_save
from djangopress.core.models import Property
from django.utils import timezone
from PIL import Image
DEFAULT_USER_GROUP = getattr(settings, 'DEFAULT_USER_GROUP', None)
def avatar_path(instance, filename):
return ("avatars/%s/%s-%s-%s" % (time.strftime("%y/%m"), instance.user.pk, instance.user.username.lower(), filename.lower()))
class UserProfile(models.Model):
EMAIL_SETTINGS = (
('HI', 'Hide Email'),
('SW', 'Show Email'),
('HB', 'Use Web Form')
)
title = models.CharField(max_length=100, default="New member")
homepage = models.CharField(max_length=100, blank=True, null=True)
#IM contact (jabber, icq, msn, aim, yahoo, gtalk, twitter, facebook)
location = models.CharField(max_length=50, blank=True, null=True)
avatar = models.ImageField(blank=True, null=True, upload_to=avatar_path)
signature = models.TextField(blank=True, null=True)
timezone = models.CharField(max_length=50, null=True, blank=True)
language = models.CharField(max_length=50, null=True, blank=True)
registration_ip = models.GenericIPAddressField(blank=True, null=True, )
last_ip_used = models.GenericIPAddressField(blank=True, null=True)
admin_note = models.TextField(blank=True, null=True)
activate_key = models.CharField(max_length=127, blank=True, editable=False)
activate_key_expirary = models.DateTimeField(blank=True, editable=False)
banned = models.BooleanField(default=False)
#remember_between_visits = models.BooleanField(default=True)
user = models.OneToOneField(User, related_name="profile", on_delete=models.CASCADE)
email_settings = models.CharField(choices=EMAIL_SETTINGS, default='HI', max_length=2)
gender = models.CharField(max_length=1, blank=True, null=True, default=None, choices=(('', 'Private'), ('M', 'Male'), ('F', 'Female')))
date_of_birth = models.DateTimeField(blank=True, null=True)
def get_ip(self):
if self.last_ip_used:
return self.last_ip_used
return self.registration_ip
def __getattr__(self, name):
if name.startswith("social_"):
try:
return self.user.social.filter(account=name[7:])[0]
except:
raise AttributeError(name)
return super(UserProfile, self).__getattr__(name)
def get_absolute_url(self):
return reverse('accounts-profile', kwargs={"username": self.user.username})
def __init__(self, *args, **kwargs):
super(UserProfile, self).__init__(*args, **kwargs)
self._banned = self.banned
self._avatar = self.avatar
def save(self, force_insert=False, force_update=False):
if self._banned == False and self.banned == True:
# if we banned them, they can't then login
self.user.is_active = False
self.user.save()
if self._avatar != self.avatar and self.avatar:
image = Image.open(self.avatar)
size = settings.ACCOUNTS_USER_LIMITS.get('avatar', {}).get('size', 50)
image.resize((size, size), Image.ANTIALIAS)
image.save(self.avatar.path)
super(UserProfile, self).save(force_insert, force_update)
self._banned = self.banned
self._avatar = self.avatar
def set_activate_key(self):
salt = hashlib.sha1((str(random.random()) + str(random.random())).encode('utf-8')).hexdigest()[:5]
key = "".join(str(item) for item in (self.user.username,
self.user.email, datetime.datetime.now()))
hsh = hashlib.sha1((salt + key).encode('utf-8')).hexdigest()
self.activate_key = hsh
self.activate_key_expirary = datetime.datetime.fromtimestamp(time.time() + (7 * 24 * 60 * 60))
def check_activate_key(self, hsh):
return (hsh == self.activate_key
and timezone.now() <= self.activate_key_expirary)
class UserSocial(models.Model):
ACCOUNTS = (
('twitter', 'Twitter'),
('google_plus', 'Google Plus'),
('facebook', 'Facebook'),
('linkedin', 'Linked In'),
('pinterest', 'Pinterest'),
)
account = models.CharField(max_length=20, choices=ACCOUNTS)
value = models.CharField(max_length=100)
user_profile = models.ForeignKey(User, related_name="social", on_delete=models.CASCADE)
class UserProperty(Property):
user_profile = models.ForeignKey(User, related_name="properties", on_delete=models.CASCADE)
def create_profile(sender, **kargs):
if kargs.get("created", False):
profile = UserProfile(user=kargs.get("instance"))
profile.set_activate_key()
profile.save()
post_save.connect(create_profile, User, dispatch_uid="djangopress.accounts.create_profile")
def add_to_group(sender, **kargs):
if DEFAULT_USER_GROUP and kargs.get("created", False):
user = kargs.get("instance")
user.groups.add(Group.objects.get(name=DEFAULT_USER_GROUP))
post_save.connect(add_to_group, User, dispatch_uid="djangopress.accounts.add_to_group")
|
codefisher/djangopress
|
djangopress/accounts/models.py
|
Python
|
mit
| 5,282 | 0.006437 |
#!/usr/bin/env python
#-------------------------------------------------------------------------------
import os
import sys
bin_dir = os.path.dirname(os.path.abspath(__file__))
pkg_dir = os.path.abspath(os.path.join(bin_dir, ".."))
sys.path.append(pkg_dir)
#-------------------------------------------------------------------------------
import argparse
import collections
import cktapps
from cktapps import apps
from cktapps.formats import spice
#-------------------------------------------------------------------------------
def main(args=None):
parser = argparse.ArgumentParser(description="Report net capacitances "
"and fanout")
parser.add_argument('spice_files', metavar='file', nargs='+',
type=argparse.FileType('r'), help='spice netlist file(s)')
parser.add_argument('--lib', type=argparse.FileType('r'),
help='lib file(s) with model (e.g. nch, pch) defintions')
parser.add_argument('--cell', help='name of the cell to be analyzed '
'(top cell by default)')
arg_ns = parser.parse_args(args)
#---------------------------------------------------------------------------
ckt = cktapps.Ckt()
if arg_ns.lib:
ckt.read_spice(arg_ns.lib)
for spice_file in arg_ns.spice_files:
ckt.read_spice(spice_file)
ckt.link()
#topcellnames = [cell.name for cell in ckt.get_topcells()]
#print "Top cells: %s" % topcellnames
if arg_ns.cell:
cell = ckt.get_cell(arg_ns.cell)
else:
topcells = ckt.get_topcells()
if topcells:
cell = topcells[0]
else:
cell = ckt
#print cell
#print "-"*80
#apps.report_hierarchy(cell)
#ckt.write_spice(cell)
#print "-"*80
cell.ungroup(flatten=True)
#print cell
#ckt.write_spice(cell)
#print "-"*80
lib = arg_ns.lib.name
netlists = [f.name for f in arg_ns.spice_files]
apps.report_net(cell, lib, netlists)
#print "-"*80
#apps.report_hierarchy(cell)
return ckt
#-------------------------------------------------------------------------------
if __name__ == "__main__":
ckt = main()
|
r-rathi/ckt-apps
|
bin/report_net.py
|
Python
|
mit
| 2,251 | 0.01466 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: aci_filter_entry
short_description: Manage filter entries on Cisco ACI fabrics (vz:Entry)
description:
- Manage filter entries for a filter on Cisco ACI fabrics.
- More information from the internal APIC class
I(vz:Entry) at U(https://developer.cisco.com/media/mim-ref/MO-vzEntry.html).
author:
- Swetha Chunduri (@schunduri)
- Dag Wieers (@dagwieers)
- Jacob McGill (@jmcgill298)
version_added: '2.4'
requirements:
- Tested with ACI Fabric 1.0(3f)+
notes:
- The C(tenant) and C(filter) used must exist before using this module in your playbook.
The M(aci_tenant) and M(aci_filter) modules can be used for this.
options:
arp_flag:
description:
- The arp flag to use when the ether_type is arp.
choices: [ arp_reply, arp_request, unspecified ]
description:
description:
- Description for the Filter Entry.
aliases: [ descr ]
dst_port:
description:
- Used to set both destination start and end ports to the same value when ip_protocol is tcp or udp.
choices: [ Valid TCP/UDP Port Ranges]
dst_port_end:
description:
- Used to set the destination end port when ip_protocol is tcp or udp.
choices: [ Valid TCP/UDP Port Ranges]
dst_port_start:
description:
- Used to set the destination start port when ip_protocol is tcp or udp.
choices: [ Valid TCP/UDP Port Ranges]
entry:
description:
- Then name of the Filter Entry.
aliases: [ entry_name, name ]
ether_type:
description:
- The Ethernet type.
choices: [ arp, fcoe, ip, mac_security, mpls_ucast, trill, unspecified ]
filter_name:
description:
The name of Filter that the entry should belong to.
icmp_msg_type:
description:
- ICMPv4 message type; used when ip_protocol is icmp.
choices: [ dst_unreachable, echo, echo_reply, src_quench, time_exceeded, unspecified ]
icmp6_msg_type:
description:
- ICMPv6 message type; used when ip_protocol is icmpv6.
choices: [ dst_unreachable, echo_request, echo_reply, neighbor_advertisement, neighbor_solicitation, redirect, time_exceeded, unspecified ]
ip_protocol:
description:
- The IP Protocol type when ether_type is ip.
choices: [ eigrp, egp, icmp, icmpv6, igmp, igp, l2tp, ospfigp, pim, tcp, udp, unspecified ]
state:
description:
- present, absent, query
default: present
choices: [ absent, present, query ]
stateful:
description:
- Determines the statefulness of the filter entry.
tenant:
description:
- The name of the tenant.
aliases: [ tenant_name ]
extends_documentation_fragment: aci
'''
EXAMPLES = r'''
- aci_filter_entry:
action: "{{ action }}"
entry: "{{ entry }}"
tenant: "{{ tenant }}"
ether_name: "{{ ether_name }}"
icmp_msg_type: "{{ icmp_msg_type }}"
filter_name: "{{ filter_name }}"
descr: "{{ descr }}"
host: "{{ inventory_hostname }}"
username: "{{ user }}"
password: "{{ pass }}"
protocol: "{{ protocol }}"
'''
RETURN = ''' # '''
from ansible.module_utils.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
VALID_ARP_FLAGS = ['arp_reply', 'arp_request', 'unspecified']
VALID_ETHER_TYPES = ['arp', 'fcoe', 'ip', 'mac_security', 'mpls_ucast', 'trill', 'unspecified']
VALID_ICMP_TYPES = ['dst_unreachable', 'echo', 'echo_reply', 'src_quench', 'time_exceeded',
'unspecified', 'echo-rep', 'dst-unreach']
VALID_ICMP6_TYPES = ['dst_unreachable', 'echo_request', 'echo_reply', 'neighbor_advertisement',
'neighbor_solicitation', 'redirect', 'time_exceeded', 'unspecified']
VALID_IP_PROTOCOLS = ['eigrp', 'egp', 'icmp', 'icmpv6', 'igmp', 'igp', 'l2tp', 'ospfigp', 'pim', 'tcp', 'udp', 'unspecified']
# mapping dicts are used to normalize the proposed data to what the APIC expects, which will keep diffs accurate
ARP_FLAG_MAPPING = dict(arp_reply='reply', arp_request='req', unspecified=None)
FILTER_PORT_MAPPING = {'443': 'https', '25': 'smtp', '80': 'http', '20': 'ftpData', '53': 'dns', '110': 'pop3', '554': 'rtsp'}
ICMP_MAPPING = {'dst_unreachable': 'dst-unreach', 'echo': 'echo', 'echo_reply': 'echo-rep', 'src_quench': 'src-quench',
'time_exceeded': 'time-exceeded', 'unspecified': 'unspecified', 'echo-re': 'echo-rep', 'dst-unreach': 'dst-unreach'}
ICMP6_MAPPING = dict(dst_unreachable='dst-unreach', echo_request='echo-req', echo_reply='echo-rep', neighbor_advertisement='nbr-advert',
neighbor_solicitation='nbr-solicit', redirect='redirect', time_exceeded='time-exceeded', unspecified='unspecified')
def main():
argument_spec = aci_argument_spec
argument_spec.update(
arp_flag=dict(type='str', choices=VALID_ARP_FLAGS),
description=dict(type='str'),
dst_port=dict(type='str'),
dst_port_end=dict(type='str'),
dst_port_start=dict(type='str'),
entry=dict(type='str', aliases=['entry_name', 'name']),
ether_type=dict(choices=VALID_ETHER_TYPES, type='str'),
filter_name=dict(type='str'),
icmp_msg_type=dict(type='str', choices=VALID_ICMP_TYPES),
icmp6_msg_type=dict(type='str', choices=VALID_ICMP6_TYPES),
ip_protocol=dict(choices=VALID_IP_PROTOCOLS, type='str'),
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
stateful=dict(type='str', choices=['no', 'yes']),
tenant=dict(type="str", aliases=['tenant_name'])
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
arp_flag = module.params['arp_flag']
if arp_flag is not None:
arp_flag = ARP_FLAG_MAPPING[arp_flag]
description = module.params['description']
dst_port = module.params['dst_port']
if dst_port in FILTER_PORT_MAPPING.keys():
dst_port = FILTER_PORT_MAPPING[dst_port]
dst_end = module.params['dst_port_end']
if dst_end in FILTER_PORT_MAPPING.keys():
dst_end = FILTER_PORT_MAPPING[dst_end]
dst_start = module.params['dst_port_start']
if dst_start in FILTER_PORT_MAPPING.keys():
dst_start = FILTER_PORT_MAPPING[dst_start]
entry = module.params['entry']
ether_type = module.params['ether_type']
filter_name = module.params['filter_name']
icmp_msg_type = module.params['icmp_msg_type']
if icmp_msg_type is not None:
icmp_msg_type = ICMP_MAPPING[icmp_msg_type]
icmp6_msg_type = module.params['icmp6_msg_type']
if icmp6_msg_type is not None:
icmp6_msg_type = ICMP6_MAPPING[icmp6_msg_type]
ip_protocol = module.params['ip_protocol']
state = module.params['state']
stateful = module.params['stateful']
tenant = module.params['tenant']
aci = ACIModule(module)
# validate that dst_port is not passed with dst_start or dst_end
if dst_port is not None and (dst_end is not None or dst_start is not None):
module.fail_json(msg="Parameter 'dst_port' cannot be used with 'dst_end' and 'dst_start'")
elif dst_port is not None:
dst_end = dst_port
dst_start = dst_port
# validate that filter_name is not passed without tenant
if filter_name is not None and tenant is None:
module.fail_json(msg="Parameter 'filter_name' cannot be used without 'tenant'")
# TODO: Think through the logic here and see if there is a better way
if entry is not None:
# fail when entry is provided without tenant and filter_name
if tenant is not None and filter_name is not None:
path = 'api/mo/uni/tn-%(tenant)s/flt-%(filter_name)s/e-%(entry)s.json' % module.params
elif tenant is not None and state == 'query':
path = 'api/mo/uni/tn-%(tenant)s.json?rsp-subtree=full&rsp-subtree-class=vzEntry&rsp-subtree-filter=eq(vzEntry.name, \
\"%(entry)s\")&rsp-subtree-include=no-scoped' % module.params
else:
path = 'api/class/vzEntry.json?query-target-filter=eq(vzEntry.name, \"%(entry)s\")' % module.params
elif state == 'query':
if tenant is None:
path = 'api/class/vzEntry.json'
else:
path = 'api/mo/uni/tn-%(tenant)s.json?rsp-subtree=full&rsp-subtree-class=vzEntry&rsp-subtree-include=no-scoped' % module.params
else:
module.fail_json(msg="Parameters 'tenant', 'filter_name', and 'entry' are required for state 'absent' or 'present'")
aci.result['url'] = '%(protocol)s://%(hostname)s/' % aci.params + path
aci.get_existing()
if state == 'present':
# Filter out module params with null values
aci.payload(aci_class='vzEntry', class_config=dict(arpOpc=arp_flag,
descr=description,
dFromPort=dst_start,
dToPort=dst_end,
etherT=ether_type,
icmpv4T=icmp_msg_type,
icmpv6T=icmp6_msg_type,
name=entry,
prot=ip_protocol,
stateful=stateful))
# generate config diff which will be used as POST request body
aci.get_diff(aci_class='vzEntry')
# submit changes if module not in check_mode and the proposed is different than existing
aci.post_config()
elif state == 'absent':
aci.delete_config()
module.exit_json(**aci.result)
if __name__ == "__main__":
main()
|
DazWorrall/ansible
|
lib/ansible/modules/network/aci/aci_filter_entry.py
|
Python
|
gpl-3.0
| 10,104 | 0.003068 |
# -*- coding: utf-8 -*-
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('events', '0021_auto_20171023_1358'),
]
operations = [
migrations.AlterField(
model_name='inductioninterest',
name='age',
field=models.CharField(max_length=100, choices=[(b'', b'-----'), (b'20to25', b'20 to 25 years'), (b'26to30', b'26 to 30 years'), (b'31to35', b'31 to 35 years'), (b'35andabove', b'Above 35 years')]),
),
migrations.AlterField(
model_name='inductioninterest',
name='designation',
field=models.CharField(max_length=100, choices=[(b'', b'-----'), (b'Lecturer', b'Lecturer'), (b'AssistantProfessor', b'Assistant Professor'), (b'AssociateProfessor', b'Associate Professor'), (b'Professor', b'Professor'), (b'Other', b'Other')]),
),
migrations.AlterField(
model_name='inductioninterest',
name='experience_in_college',
field=models.CharField(max_length=100, choices=[(b'', b'-----'), (b'Lessthan1year', b'Less than 1 year'), (b'Morethan1yearbutlessthan2years', b'More than 1 year, but less than 2 years'), (b'Morethan2yearsbutlessthan5years', b'More than 2 years but less than 5 years'), (b'Morethan5years', b'More than 5 years')]),
),
migrations.AlterField(
model_name='inductioninterest',
name='gender',
field=models.CharField(max_length=50, choices=[(b'', b'-----'), (b'Male', b'Male'), (b'Female', b'Female')]),
),
migrations.AlterField(
model_name='inductioninterest',
name='medium_of_studies',
field=models.CharField(max_length=100, choices=[(b'', b'-----'), (b'English', b'English'), (b'Other', b'Other')]),
),
migrations.AlterField(
model_name='inductioninterest',
name='phonemob',
field=models.CharField(max_length=100),
),
migrations.AlterField(
model_name='inductioninterest',
name='specialisation',
field=models.CharField(max_length=100, choices=[(b'', b'-----'), (b'Arts', b'Arts'), (b'Science', b'Science'), (b'Commerce', b'Commerce'), (b'EngineeringorComputerScience ', b'Engineering or Computer Science'), (b'Management', b'Management'), (b'Other', b'Other')]),
),
]
|
Spoken-tutorial/spoken-website
|
events/migrations/0022_auto_20171023_1505.py
|
Python
|
gpl-3.0
| 2,412 | 0.002488 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2014-2015
#
# STIC - Universidad de La Laguna (ULL) <gesinv@ull.edu.es>
#
# This file is part of Modelado de Servicios TIC.
#
# Modelado de Servicios TIC is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Modelado de Servicios TIC is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Modelado de Servicios TIC. If not, see
# <http://www.gnu.org/licenses/>.
#
import funcionesxml
import generacionpaginas
ServicioSonda = [] #Vector que contendrá los servicios de una sonda
#Creacion fichero configuracion Nagios
def GeneraNagios():
nagstr1 = ""
nagstr2 = ""
nagstr3 = ""
funcionesxml.inicializacion()
for i in funcionesxml.SondaArray:
#CREACION DE FICHEROS NAGIOS
ficheroservicio = open("./confgSonda/servicio/" + generacionpaginas.formatstring(i[3][0]) + ".cfg","w")
ficherohost = open("./confgSonda/host/" + generacionpaginas.formatstring(i[3][0]) + ".cfg","w")
ficherohost_group = open("./confgSonda/host_group/" + generacionpaginas.formatstring(i[3][0]) + ".cfg","w")
#CREACION FICHEROS SERVICIO
nagstr1 += "## services/" + generacionpaginas.formatstring(i[3][0]) + ".cfg \n\n"
for j in funcionesxml.getGroupServices(funcionesxml.getGroupID(i[4])):
ServicioSonda.append(funcionesxml.getBusinessServiceName(j))
#print "Servicio: "+ str(funcionesxml.getBusinessServiceName(j)) + " PUERTO: " + str(funcionesxml.getPuerto(j))+ " PROTOCOLO: " + str(funcionesxml.getProtocolo(j))+ " URL: " +str(funcionesxml.getURL(j))
for k in ServicioSonda:
nagstr1 += "define service{\n use: "
nagstr1 += k + "\n" + " host_name: " + "---\n" + " contact_groups: " + "---\n"
nagstr1 += "}\n\n"
#CREACION FICHEROS HOST_GROUP
nagstr2 += "## host_group/" + generacionpaginas.formatstring(i[3][0]) + ".cfg \n\n"
nagstr2 += "define hostgroup{\n hostgroup_name: " + "---\n " + "alias: " + "---\n " + "members: " + "---\n"
nagstr2 += "}\n\n"
#CREACION FICHEROS HOST_GROUP
nagstr3 += "## host/" + generacionpaginas.formatstring(i[3][0]) + ".cfg \n\n"
nagstr3 += " "
ficheroservicio.write(nagstr1)
ficherohost.write(nagstr3)
ficherohost_group.write(nagstr2)
ficheroservicio.close
ficherohost.close
ficherohost_group.close
GeneraNagios()
|
RocioDSI/Carta-Servicios-STIC
|
servicios/GeneraNagios.py
|
Python
|
agpl-3.0
| 2,787 | 0.024408 |
#!/usr/bin/python
import sys, getopt, os, urllib2
import Overc
from flask import Flask
from flask import jsonify
from flask import request
from flask_httpauth import HTTPBasicAuth
from passlib.context import CryptContext
app = Flask(__name__)
# Password hash generation with:
#python<<EOF
#from passlib.context import CryptContext
#pwd_context = CryptContext(schemes=["pbkdf2_sha256"])
#print pwd_context.encrypt("adm");
#EOF
# Default admin username and password hash which can be overriden
# if /opt/overc-system-agent/pwfile exists with the format
# UserName:PasswordHash
app.config['ADMIN_USERNAME'] = 'adm'
app.config['ADMIN_PW_HASH'] = '$pbkdf2-sha256$29000$i3EOIeT8P8dY6703BgBgbA$XyesHZZmu.O54HfiwIhSd00rMJpyCKhH0gsh1atxgqA'
app.config['PW_FILE'] = "/opt/overc-system-agent/pwfile"
pwd_context = CryptContext(schemes=["pbkdf2_sha256"])
# Flask Extention
auth = HTTPBasicAuth()
@auth.verify_password
def verify_password(username, password):
if username != app.config['ADMIN_USERNAME']:
return False
if pwd_context.verify(password, app.config['ADMIN_PW_HASH']):
return True
return False
def json_msg(s):
message = {}
message['result'] = s.replace("\n", ";");
resp = jsonify(message)
return resp
@app.route('/system/rollback')
@auth.login_required
def system_rollback():
usage = 'Usage: ' + request.url_root + 'system/rollback?template=[dom0]'
overc=Overc.Overc()
template = request.args.get('template')
if template != 'dom0':
usage += "\n The only supported template is 'dom0'"
return json_msg(usage)
print "System will rollback and reboot!"
overc._system_rollback(template)
@app.route('/system/upgrade')
@auth.login_required
def system_upgrade():
usage = 'Usage: ' + request.url_root + 'system/upgrade?template=[dom0]&reboot=[True|False]&force=[True|False]'
overc=Overc.Overc()
reboot_s = request.args.get('reboot')
force_s = request.args.get('force')
template = request.args.get('template')
reboot=False
force=False
skipscan=True
skip_del=False
if template != 'dom0':
usage += "\n The only supported template is 'dom0'"
return json_msg(usage)
if reboot_s == "True":
print "do reboot"
if force_s == "True":
print "force upgrade"
force=True
overc._system_upgrade(template, reboot, force, skipscan, skip_del)
return json_msg(overc.message)
@app.route('/host/rollback')
@auth.login_required
def host_rollback():
overc=Overc.Overc()
overc.host_rollback()
return json_msg(overc.message)
@app.route('/host/upgrade')
@auth.login_required
def host_upgrade():
usage = 'Usage: ' + request.url_root + 'host/upgrade?reboot=[True|False]&force=[True|False]'
overc=Overc.Overc()
reboot_s = request.args.get('reboot')
force_s = request.args.get('force')
reboot=False
force=False
if reboot_s == "True":
print "do reboot"
reboot = True
if force_s == "True":
print "do force to upgrade"
force=True
overc._host_upgrade(reboot, force)
return json_msg(overc.message)
@app.route('/host/update')
@auth.login_required
def host_update():
overc=Overc.Overc()
overc.host_update()
return json_msg(overc.message)
@app.route('/host/newer')
@auth.login_required
def host_newer():
overc=Overc.Overc()
overc.host_newer()
return json_msg(overc.message)
@app.route('/container/rollback')
@auth.login_required
def container_rollback():
usage = 'Usage: ' + request.url_root + 'container/rollback?name=<container name>&snapshot=<snapshot name>&template=<template name> [snapshot optional]'
overc=Overc.Overc()
container_name = request.args.get('name')
snapshot = request.args.get('snapshot')
template = request.args.get('template')
if container_name is None or template is None:
return json_msg(usage)
overc._container_rollback(container_name, snapshot, template)
return json_msg(overc.message)
@app.route('/container/update')
@auth.login_required
def container_update():
usage = 'Usage: ' + request.url_root + 'container/update?template=<template name>'
overc=Overc.Overc()
template = request.args.get('template')
if template is None:
return json_msg(usage)
overc._container_update(template)
return json_msg(overc.message)
@app.route('/container/list')
@auth.login_required
def container_list():
usage = 'Usage: ' + request.url_root + 'container/list?template=<template name>'
overc=Overc.Overc()
template = request.args.get('template')
if template is None:
return json_msg(usage)
overc._container_list(template)
return json_msg(overc.message)
@app.route('/container/snapshot')
@auth.login_required
def container_snapshot():
usage = 'Usage: ' + request.url_root + 'container/snapshot?name=<container name>&template=<template name>'
overc=Overc.Overc()
template = request.args.get('template')
container_name = request.args.get('name')
if template is None or container_name is None:
return json_msg(usage)
overc._container_snapshot(container_name, template)
return json_msg(overc.message)
@app.route('/container/list_snapshots')
@auth.login_required
def container_list_snapshots():
usage = 'Usage: ' + request.url_root + 'container/list_snapshots?name=<container name>&template=<template name>'
overc=Overc.Overc()
container_name = request.args.get('name')
template = request.args.get('template')
if container_name is None or template is None:
return json_msg(usage)
overc._container_snapshot_list(container_name, template)
return json_msg(overc.message)
@app.route('/container/send_image')
@auth.login_required
def container_send_image():
usage = 'Usage: ' + request.url_root + 'container/send_image?url=<image url>&template=<template name>'
overc=Overc.Overc()
url = request.args.get('url')
template = request.args.get('template')
if url is None or template is None:
return json_msg(usage)
template_list = os.listdir("/etc/overc/container")
if template not in template_list:
usage += "\n The template name is not valid"
return json_msg(usage)
req = urllib2.Request(url)
req.get_method = lambda: 'HEAD'
try:
status = urllib2.urlopen(req)
except Exception,e:
usage += "\n The image url is not valid"
return json_msg(usage)
re_code = status.getcode()
if ((re_code != None) and (re_code != 200)):
usage += "\n The image url is not valid, http status code is: %s" % re_code
return json_msg(usage)
overc._container_send_image(template, url)
return json_msg(overc.message)
@app.route('/container/activate')
@auth.login_required
def container_activate():
usage = 'Usage: ' + request.url_root + 'container/activate?name=<container name>&template=<template name>'
overc=Overc.Overc()
container_name = request.args.get('name')
template = request.args.get('template')
if container_name is None or template is None:
return json_msg(usage)
force = True
overc._container_activate(container_name, template, force)
return json_msg(overc.message)
@app.route('/container/start')
@auth.login_required
def container_start():
usage = 'Usage: ' + request.url_root + 'container/start?name=<container name>&template=<template name>'
overc=Overc.Overc()
container_name = request.args.get('name')
template = request.args.get('template')
if container_name is None or template is None:
return json_msg(usage)
overc._container_start(container_name, template)
return json_msg(overc.message)
@app.route('/container/stop')
@auth.login_required
def container_stop():
usage = 'Usage: ' + request.url_root + 'container/stop?name=<container name>&template=<template name>'
overc=Overc.Overc()
container_name = request.args.get('name')
template = request.args.get('template')
if container_name is None or template is None:
return json_msg(usage)
overc._container_stop(container_name, template)
return json_msg(overc.message)
@app.route('/container/upgrade')
@auth.login_required
def container_upgrade():
usage = 'Usage: ' + request.url_root + 'container/upgrade?name=<container name>&template=<template name>&rpm=yes|no&image=yes|no'
overc=Overc.Overc()
container_name = request.args.get('name')
template = request.args.get('template')
rpm = request.args.get('rpm')
image = request.args.get('image')
if container_name is None or template is None:
return json_msg(usage)
if rpm is None or rpm == 'no':
rpm_upgrade = False
elif rpm == 'yes':
rpm_upgrade = True
elif rpm != 'no':
return json_msg(usage)
if image is None or image == 'no':
image_upgrade = False
elif image == 'yes':
image_upgrade = True
elif image != 'no':
return json_msg(usage)
overc._container_upgrade(container_name, template, rpm_upgrade, image_upgrade)
return json_msg(overc.message)
@app.route('/container/delete')
@auth.login_required
def container_delete():
usage = 'Usage: ' + request.url_root + 'container/delete?name=<container name>&template=<template name>'
overc=Overc.Overc()
container_name = request.args.get('name')
template = request.args.get('template')
if container_name is None or template is None:
return json_msg(usage)
force = True
overc._container_delete(container_name, template, force)
return json_msg(overc.message)
@app.route('/container/delete_snapshots')
@auth.login_required
def container_delete_snapshots():
usage = 'Usage: ' + request.url_root + 'container/delete_snapshots?name=<container name>&template=<template name>'
overc=Overc.Overc()
container_name = request.args.get('name')
template = request.args.get('template')
if container_name is None or template is None:
return json_msg(usage)
overc._container_delete_snapshots(container_name, template)
return json_msg(overc.message)
if __name__ == '__main__':
default_port = 5555
bindaddr = '0.0.0.0'
help_txt = ' [-d] [-p <port>] [-b <bind address>]'
try:
opts, args = getopt.getopt(sys.argv[1:],"b:hdp::",["port="])
except getopt.GetoptError:
print sys.argv[0], help_txt
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print sys.argv[0], help_txt
sys.exit()
elif opt in ("-p", "--port"):
try:
default_port = int(arg)
except ValueError:
print sys.argv[0],' -p <port>'
sys.exit(2)
elif opt in ("-b", "--bind-addr"):
bindaddr = arg
elif opt == '-d':
app.debug = True
if os.path.exists(app.config['PW_FILE']):
pwfile = open(app.config['PW_FILE'],'r')
user_pw = pwfile.read()
(app.config['ADMIN_USERNAME'], app.config['ADMIN_PW_HASH']) = user_pw.rstrip().split(':')
app.run(port=default_port, host=bindaddr)
|
jwessel/meta-overc
|
meta-cube/recipes-support/overc-system-agent/files/overc-system-agent-1.2/run_server.py
|
Python
|
mit
| 11,160 | 0.007437 |
from django.contrib import admin
from . import models
from django_markdown.admin import MarkdownModelAdmin
from django_markdown.widgets import AdminMarkdownWidget
from django.db.models import TextField
# Register your models here.
class SnippetTagAdmin(admin.ModelAdmin):
list_display = ('slug',)
class SnippetAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['snippet_title', 'snippet_body', 'author', 'publish']}),
('Date Information', {'fields': ['modified_date'], 'classes': ['collapse']}),
('Tag Library', {'fields': ['snippet_tags']})
]
list_display = ('snippet_title', 'author', 'create_date', 'modified_date')
search_fields = ['snippet_title']
formfield_overrides = {TextField: {'widget': AdminMarkdownWidget}}
list_filter = ['create_date', 'publish']
# register the classes with the Admin site
admin.site.register(models.Snippet, SnippetAdmin)
admin.site.register(models.SnippetTag, SnippetTagAdmin)
|
craigderington/django-code-library
|
snippets/admin.py
|
Python
|
gpl-3.0
| 978 | 0.005112 |
"""
The MIT License (MIT)
Copyright (c) 2015 Robert Hodgen
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from ndb_users import users
import webapp2
from google.appengine.ext import ndb
import json
import logging
from datetime import datetime, timedelta
import model
import re
import utilities
import setup
from google.appengine.api import mail
class Projects(webapp2.RequestHandler):
def get(self, project_id=None):
""" Return a list of Projects this User has access to. """
response_object = {}
user = users.get_current_user()
if not user:
self.abort(401)
if project_id:
project_key = utilities.key_for_urlsafe_id(project_id)
if not project_key:
self.abort(400)
project = project_key.get()
if not (project and isinstance(project, model.Project)):
self.abort(404)
if user.email not in project.users:
self.abort(401)
response_object = project.json_object()
else:
# Query for Projects this User owns, contributes to, or may observe
projects = model.Project.query(model.Project.users == user.email)
response_object = []
for project in projects:
response_object.append(project.json_object())
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
def post(self):
""" Create a new Project for this User. """
response_object = {}
user = users.get_current_user()
if not user:
self.abort(401)
# Get JSON request body
if not self.request.body:
szelf.abort(400)
request_object = json.loads(self.request.body)
name = request_object.get('name')
if not name:
self.abort(400)
new_project_key = model.Project.create_project(name)
new_project = new_project_key.get()
if len(request_object.keys()) > 1:
# Process optional items...
description = request_object.get('description')
if description:
new_project.description = description
new_project.put()
setup.default_project_labels(new_project)
response_object = new_project.json_object()
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
def put(self, project_id):
""" Update a Project. """
response_object = {}
user = users.get_current_user()
if not user:
self.abort(401)
# GET JSON request body
if not project_id or not self.request.body:
self.abort(400)
request_object = json.loads(self.request.body)
project_key = utilities.key_for_urlsafe_id(project_id)
if not project_key or len(request_object) < 1:
self.abort(400)
project = project_key.get()
if not (project and isinstance(project, model.Project)):
self.abort(404)
if (not project.is_owner(user.email) and not
project.has_contributor(user.email)):
self.abort(401)
# Process changes...
name = request_object.get('name')
if name:
project.name = name
description = request_object.get('description')
if description:
project.description = description
active = request_object.get('active')
if isinstance(active, bool):
project.active = active
project.put()
response_object = project.json_object()
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
def delete(self, project_id):
""" Delete this user's Project. """
response_object = {}
user = users.get_current_user()
if not user:
# No user
self.abort(401)
return None
# Get JSON request body
if not project_id:
self.abort(400)
project_key = utilities.key_for_urlsafe_id(project_id)
if not project_key:
self.abort(400)
project = project.get()
if not (project and isinstance(project, model.Project)):
self.abort(404)
if not project.is_owner(user.email):
self.abort(401)
ndb.delete_multi(ndb.Query(ancestor=project_key).iter(keys_only=True))
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
class Contributors(webapp2.RequestHandler):
def post(self, project_id, contributor_email):
""" Add Contributors to this Project. """
response_object = {}
user = users.get_current_user()
if not user:
self.abort(401)
# Get JSON request body
if not project_id or not contributor_email:
self.abort(400)
project_key = utilities.key_for_urlsafe_id(project_id)
if not project_key:
self.abort(400)
project = project_key.get()
if not (project and isinstance(project, model.Project)):
self.abort(404)
# new_contributor = users.User.user_for_email(contributor_email)
# if not new_contributor:
# self.abort(404)
if not mail.is_email_valid(contributor_email):
self.abort(400)
if (not project.is_owner(user.email) and not
project.has_contributor(user.email)):
self.abort(401)
project.add_contributors([contributor_email])
utilities.send_project_contributor_email(contributor_email, user,
project)
response_object = project.json_object()
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
def delete(self, project_id, contributor_email):
""" Remove Contributors from this Project. """
response_object = {}
user = users.get_current_user()
if not user:
self.abort(401)
# Get JSON request body
if not project_id or not contributor_email:
self.abort(400)
project_key = utilities.key_for_urlsafe_id(project_id)
if not project_key:
self.abort(400)
project = project_key.get()
if not (project and isinstance(project, model.Project)):
self.abort(404)
if not project.is_owner(user.email):
self.abort(401)
project.remove_contributors([contributor_email])
response_object = project.json_object()
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
class TimeRecords(webapp2.RequestHandler):
def get(self, project_id, time_record_id=None):
""" List the Time Records associated with a Project. """
response_object = {}
user = users.get_current_user()
if not user:
self.abort(401)
if not project_id:
self.abort(400)
project_key = utilities.key_for_urlsafe_id(project_id)
project = project_key.get()
if not (project and isinstance(project, model.Project)):
self.abort(404)
if user.email not in project.users:
self.abort(401)
if time_record_id:
# Give a specific Time Record
time_record_key = utilities.key_for_urlsafe_id(time_record_id)
if not time_record_key or (project_key != time_record_key.parent()):
self.abort(400)
time_record = time_record_key.get()
if not (time_record or isinstance(time_record, model.TimeRecord)):
self.abort(404)
response_object = time_record.json_object()
else:
if self.request.GET.get('cursor'):
# Cursor-based request
cursor = ndb.Cursor(urlsafe=self.request.GET.get('cursor'))
time_records, next_cursor, more = model.TimeRecord.query(
ancestor=project_key).order(-model.TimeRecord.created)\
.fetch_page(15, start_cursor=cursor)
response_object = []
for time_record in time_records:
response_object.append(time_record.json_object())
if more:
self.response.headers.add('X-Cursor', next_cursor.urlsafe())
else:
# List all Time Records
time_records, next_cursor, more = model.TimeRecord.query(
ancestor=project_key).order(-model.TimeRecord.created)\
.fetch_page(15)
response_object = []
for time_record in time_records:
response_object.append(time_record.json_object())
if more:
self.response.headers.add('X-Cursor', next_cursor.urlsafe())
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
def post(self, project_id):
""" Create a new Time Record associated with this Project. """
response_object = {}
user = users.get_current_user()
if not user:
self.abort(401)
if not project_id:
self.abort(400)
project_key = utilities.key_for_urlsafe_id(project_id)
if not project_key:
self.abort(400)
project = project_key.get()
if not (project and isinstance(project, model.Project)):
self.abort(404)
if ((user.email not in project.contributors) and not
project.is_owner(user.email)):
self.abort(401)
request_object = {}
if self.request.body:
request_object = json.loads(self.request.body)
completed = request_object.get('completed')
new_time_record_key = model.TimeRecord.create_time_record(
project_key, user.email,
completed=request_object.get('completed'),
name=request_object.get('name'))
else:
new_time_record_key = model.TimeRecord.create_time_record(
project_key, user.email)
new_time_record = new_time_record_key.get()
response_object = new_time_record.json_object()
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
def put(self, project_id, time_record_id):
""" Update the Time Record. """
response_object = {}
user = users.get_current_user()
if not user:
self.abort(401)
if not project_id or not time_record_id or not self.request.body:
self.abort(400)
request_object = json.loads(self.request.body)
project_key = utilities.key_for_urlsafe_id(project_id)
time_record_key = utilities.key_for_urlsafe_id(time_record_id)
if (not project_key or not time_record_key or
(project_key != time_record_key.parent())):
self.abort(400)
project = project_key.get()
time_record = time_record_key.get()
if (not (project and isinstance(project, model.Project)) or not
(time_record and isinstance(time_record, model.TimeRecord))):
self.abort(404)
if ((user.email not in project.contributors) and not
project.is_owner(user.email)):
self.abort(401)
# Process optional items...
name = request_object.get('name')
if name:
time_record.name = name
project.put()
end = request_object.get('end')
time_record.put()
# Check `end` after updating the Project and Time Record;
# avoids a bug whereby the Project's original `completed` time is saved.
if end:
if end is True:
time_record.complete_time_record()
response_object = time_record.json_object()
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
class Comments(webapp2.RequestHandler):
def get(self, project_id, parent_type=None, parent_id=None):
response_object = {}
user = users.get_current_user()
if not user:
self.abort(401)
if not project_id:
self.abort(400)
project_key = utilities.key_for_urlsafe_id(project_id)
if not project_key:
self.abort(400)
project = project_key.get()
if not (project and isinstance(project, model.Project)):
self.abort(404)
if parent_id:
# Fetch by Parent ID
if parent_type == 'milestones':
# Milestones
milestone = model.Milestone.for_number(project_key,
int(parent_id))
if not milestone:
self.abort(404)
parent_key = milestone.key
else:
# assume other...
parent_key = utilities.key_for_urlsafe_id(parent_id)
if not parent_key or (project_key != parent_key.parent()):
self.abort(400)
parent = parent_key.get()
if not parent and not isinstance(parent, model.TimeRecord):
self.abort(404)
comments = model.Comment.query(ancestor=parent_key)
response_object = []
for comment in comments:
response_object.append(comment.json_object())
else:
# Rely upon Project
comments = model.Comment.query(ancestor=project_key)
response_object = []
for comment in comments:
response_object.append(comment.json_object())
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
def post(self, project_id, parent_type=None, parent_id=None):
""" Create a new Comment in the specified Project, bound to another
object (either a Time Record or a Milestone. """
response_object = {}
user = users.get_current_user()
if not user:
self.abort(401)
# Get JSON request body
if not project_id or not self.request.body:
self.abort(400)
request_object = json.loads(self.request.body)
comment_content = request_object.get('comment')
if not comment_content:
self.abort(400)
project_key = utilities.key_for_urlsafe_id(project_id)
if not project_key:
self.abort(400)
project = project_key.get()
if not (project and isinstance(project, model.Project)):
self.abort(404)
if ((user.email not in project.contributors) and not
project.is_owner(user.email)):
self.abort(401)
if parent_id:
# Create with a Object other than the Project as this Comment's parent
if parent_type == 'milestones':
# Milestones
milestone = model.Milestone.for_number(project_key,
int(parent_id))
if not milestone:
self.abort(404)
parent_key = milestone.key
else:
# assume other...
parent_key = utilities.key_for_urlsafe_id(parent_id)
if (not parent_key or (project_key != parent_key.parent())):
self.abort(400)
parent = parent_key.get()
if not (parent and isinstance(parent, model.TimeRecord)):
self.abort(404)
# Create with `Project` and `Parent`
new_comment_key = model.Comment.create_comment(
comment_content, parent_key, project_key, user.email)
comment = new_comment_key.get()
response_object = comment.json_object()
else:
# Create with `Project` as parent
new_comment_key = model.Comment.create_comment(
comment_content, project_key, project_key, user.email)
comment = new_comment_key.get()
response_object = comment.json_object()
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
def put(self, project_id, comment_id):
""" Update a Comment. """
response_object = {}
user = users.get_current_user()
if not user:
self.abort(401)
# Get JSON request body
if not project_id or not comment_id or not self.request.body:
self.abort(400)
project_key = utilities.key_for_urlsafe_id(project_id)
comment_key = utilities.key_for_urlsafe_id(comment_id)
request_object = json.loads(self.request.body)
comment_content = request_object.get('comment')
if (not project_key or not comment_key or not comment_content or
(project_key not in comment_key.parent())): # TODO: Test this!
self.abort(400)
project = project_key.get()
comment = comment_key.get()
if (not (project and isinstance(project, model.Project)) or not
(comment and isinstance(comment, model.Comment))):
self.abort(404)
if ((user.email not in project.contributors) and not
project.is_owner(user.email)):
self.abort(401)
# if comment.project != project_key: # Replaced by check above
# self.abort(409)
comment.comment = comment_content
comment.put()
response_object = comment.json_object()
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
def delete(self, project_id, comment_id):
""" Delete a Comment. """
response_object = {}
user = users.get_current_user()
if not user:
self.abort(401)
if not project_id or not comment_id:
self.abort(400)
project_key = utilities.key_for_urlsafe_id(project_id)
comment_key = utilities.key_for_urlsafe_id(comment_id)
if (not project_key or not
comment_key or
(project_key not in comment_key.parent())): # TODO: Test this!
self.abort(400)
project = project_key.get()
comment = comment_key.get()
if (not (project and isinstance(project, model.Project)) or not
(comment and isinstance(comment, model.Comment))):
self.abort(404)
comment_key.delete()
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
class Milestones(webapp2.RequestHandler):
def get(self, project_id, milestone_id=None):
""" List the Milestones associated with a Project. """
response_object = {}
user = users.get_current_user()
if not user:
self.abort(401)
if not project_id:
self.abort(400)
project_key = utilities.key_for_urlsafe_id(project_id)
if not project_key:
self.abort(400)
project = project_key.get()
if not (project and isinstance(project, model.Project)):
self.abort(404)
if user.email not in project.users:
self.abort(401)
if milestone_id:
# Give a specific Milestone
milestone = model.Milestone.for_number(project_key,
int(milestone_id))
if not milestone:
self.abort(404)
response_object = milestone.json_object()
else:
# Check if we're filtering...
label_ids = self.request.GET.getall('label')
open_str = self.request.GET.get('open')
filters = []
if len(label_ids) > 0 or open_str is not None:
# Use filters
open_bool = utilities.str_to_bool(open_str, allow_none=True)
if open_bool is True or open_bool is False:
filters.append(model.Milestone.open == open_bool)
for label_id in label_ids:
filters.append(model.Milestone.labels == ndb.Key(
model.Label, int(label_id), parent=project_key))
query = model.Milestone.query(
ndb.AND(*filters), ancestor=project_key).order(
-model.Milestone.created)
else:
# No filters
query = model.Milestone.query(
ancestor=project_key).order(-model.Milestone.created)
if self.request.GET.get('cursor'):
# Cursor-based request
cursor = ndb.Cursor(urlsafe=self.request.GET.get('cursor'))
milestones, next_cursor, more = query.fetch_page(
15, start_cursor=cursor)
response_object = []
for milestone in milestones:
response_object.append(milestone.json_object())
if more:
self.response.headers.add('X-Cursor', next_cursor.urlsafe())
else:
# List all Milestones
milestones, next_cursor, more = query.fetch_page(15)
response_object = []
for milestone in milestones:
response_object.append(milestone.json_object())
if more:
self.response.headers.add('X-Cursor', next_cursor.urlsafe())
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
def post(self, project_id):
""" Create a new Milestone associated with this Project. """
response_object = {}
user = users.get_current_user()
if not user:
self.abort(401)
# Get JSON request body
if not project_id or not self.request.body:
self.abort(400)
request_object = json.loads(self.request.body)
project_key = utilities.key_for_urlsafe_id(project_id)
name = request_object.get('name')
if not project_key or not name:
self.abort(400)
project = project_key.get()
if not (project and isinstance(project, model.Project)):
self.abort(404)
if ((user.email not in project.contributors) and not
project.is_owner(user.email)):
self.abort(401)
new_milestone_key = model.Milestone.create_milestone(
name, project_key, user.email)
new_milestone = new_milestone_key.get()
if len(request_object) > 1:
# Process optional items...
description = request_object.get('description')
if description:
new_milestone.description = description
labels = request_object.get('labels')
if isinstance(labels, list):
for label_key_id in labels:
label_key = ndb.Key(urlsafe=label_key_id)
new_milestone.labels.append(label_key)
new_milestone.put()
response_object = new_milestone.json_object()
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
def put(self, project_id, milestone_id):
""" Update a Milestone. """
response_object = {}
user = users.get_current_user()
if not user:
self.abort(401)
if not project_id or not milestone_id or not self.request.body:
self.abort(400)
project_key = utilities.key_for_urlsafe_id(project_id)
request_object = json.loads(self.request.body)
milestone = model.Milestone.for_number(project_key, int(milestone_id))
if not project_key:
self.abort(400)
project = project_key.get()
if (not (project and isinstance(project, model.Project)) or not
milestone):
self.abort(404)
if ((user.email not in project.contributors) and not
project.is_owner(user.email)):
self.abort(401)
# Process optional items...
if len(request_object) > 0:
name = request_object.get('name')
if name:
milestone.name = name
description = request_object.get('description')
if description:
milestone.description = description
open = request_object.get('open')
if open is not None:
milestone.open = bool(open)
# labels = request_object.get('labels')
# if isinstance(labels, list):
# for label_key_id in labels:
# label_key = ndb.Key(urlsafe=label_key_id)
# new_milestone.labels.append(label_key)
milestone.put()
response_object = milestone.json_object()
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
@ndb.transactional(xg=True)
def delete(self, project_id, milestone_id):
""" Delete a Milestone. """
response_object = {}
user = users.get_current_user()
if not user:
self.abort(401)
if not project_id or not milestone_id:
self.abort(400)
project_key = utilities.key_for_urlsafe_id(project_id)
if not project_key:
self.abort(400)
project = project_key.get()
milestone = model.Milestone.for_number(project_key, int(milestone_id))
if (not (project and isinstance(project, model.Project)) or not
milestone):
self.abort(404)
if ((user.email not in project.contributors) and not
project.is_owner(user.email)):
self.abort(401)
# Get all Comments associated with this Milestone
comments = model.Comment.query(ancestor=milestone_key)
for comment in comments:
comment.key.delete()
milestone_key.delete()
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
class Labels(webapp2.RequestHandler):
def get(self, project_id, milestone_id=None):
""" List the Labels associated with this Project. """
response_array = []
user = users.get_current_user()
if not user:
self.abort(401)
# Try getting the associated Project
project_key = utilities.key_for_urlsafe_id(project_id)
if not project_key:
self.abort(400)
project = project_key.get()
if not project or not isinstance(project, model.Project):
self.abort(404)
if user.email not in project.users:
self.abort(401)
if milestone_id:
# Return all Labels assigned to this Milestone
milestone = model.Milestone.for_number(project_key,
int(milestone_id))
if not milestone:
self.abort(404)
label_keys = milestone.labels
for label_key in label_keys:
label = label_key.get()
if label:
response_array.append(label.json_object())
else:
# Query for Projects this User owns, contributes to, or may observe
labels = model.Label.query(ancestor=project.key)
for label in labels:
response_array.append(label.json_object())
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_array))
def post(self, project_id, milestone_id=None):
""" creates a Label associated with this Project. """
response_object = {}
user = users.get_current_user()
if not user:
self.abort(401)
# Try getting the associated Project
project_key = utilities.key_for_urlsafe_id(project_id)
project = project_key.get()
if not project or not self.request.body:
self.abort(400)
request_object = json.loads(self.request.body)
label_id = request_object.get('label_id')
if milestone_id and label_id:
# Add an existing Label to a Milestone
label_key = utilities.key_for_urlsafe_id(label_id)
if not label_key:
self.abort(400)
milestone = model.Milestone.for_number(project_key,
int(milestone_id))
label = label_key.get()
if not milestone or not (label and isinstance(label, model.Label)):
self.abort(404)
milestone.labels.append(label_key)
milestone.put()
response_object = label.json_object()
else:
# Create a new Label
name = request_object.get('name')
color = request_object.get('color')
if not name or not color:
self.abort(400)
color_pattern = r'^#([A-Fa-f0-9]{6}|[A-Fa-f0-9]{3})$'
if not re.match(color_pattern, color):
self.abort(400)
if ((user.email not in project.contributors) and not
project.is_owner(user.email)):
self.abort(401)
new_label = model.Label.create_label(name, color, project.key)
new_label = new_label.get()
response_object = new_label.json_object()
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
def put(self, project_id, label_id):
""" Update a Label associated with this Project. """
response_object = {}
user = users.get_current_user()
if not user:
self.abort(401)
if not project_id or not label_id or not self.request.body:
self.abort(400)
project_key = utilities.key_for_urlsafe_id(project_id)
label_key = utilities.key_for_urlsafe_id(label_id)
request_object = json.loads(self.request.body)
if (not project_key or not label_key or
(project_key != label_key.parent())):
self.abort(400)
project = project_key.get()
label = label_key.get()
if (not (project and isinstance(project, model.Project)) or not
(label and isinstance(label, model.Label))):
self.abort(404)
if ((user.email not in project.contributors) and not
project.is_owner(user.email)):
self.abort(401)
# Process optional items...
if len(request_object) > 0:
name = request_object.get('name')
if name:
label.name = name
color = request_object.get('color')
if color:
label.color = color
label.put()
response_object = label.json_object()
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
def delete(self, project_id, label_id, milestone_id=None,):
""" Deletes a Label associated with this Project. """
response_object = {}
user = users.get_current_user()
if not user:
self.abort(401)
# Try getting the associated Label...
if not project_id or not label_id:
self.abort(400)
project_key = utilities.key_for_urlsafe_id(project_id)
label_key = utilities.key_for_urlsafe_id(label_id)
if (not project_key or not label_key or
(project_key != label_key.parent())):
self.abort(400)
project = project_key.get()
label = label_key.get()
if (not (project and isinstance(project, model.Project)) or not
(label and isinstance(label, model.Label)) or
(project.key != label.key.parent())):
self.abort(404)
if ((user.email not in project.contributors) and not
project.is_owner(user.email)):
self.abort(401)
if milestone_id:
# Delete a label only from a Milestone's `labels` array
milestone = model.Milestone.for_number(project_key,
int(milestone_id))
if not milestone or (project.key != label.key.parent()):
self.abort(404)
if label_key in milestone.labels:
milestone.labels.remove(label_key)
milestone.put()
else:
# Delete an entire label
label.delete_label()
# Send response
self.response.content_type = 'application/json'
self.response.out.write(json.dumps(response_object))
app = webapp2.WSGIApplication([
webapp2.Route(
'/api/v2/projects',
handler=Projects,
methods=['GET', 'POST']
), webapp2.Route(
'/api/v2/projects/<project_id:([a-zA-Z0-9-_]+)>',
handler=Projects,
methods=['GET', 'PUT', 'DELETE']
), webapp2.Route(
'/api/v2/projects/<project_id:([a-zA-Z0-9-_]+)>/contributors/<contributor_email>',
handler=Contributors,
methods=['POST', 'DELETE']
), webapp2.Route(
'/api/v2/projects/<project_id:([a-zA-Z0-9-_]+)>/time-records',
handler=TimeRecords,
methods=['GET', 'POST']
), webapp2.Route(
'/api/v2/projects/<project_id:([a-zA-Z0-9-_]+)>/time-records/<time_record_id:([a-zA-Z0-9-_]+)>',
handler=TimeRecords,
methods=['GET', 'PUT', 'DELETE']
), webapp2.Route(
'/api/v2/projects/<project_id:([a-zA-Z0-9-_]+)>/<parent_type:(time-records|milestones)>/<parent_id:([a-zA-Z0-9-_]+)>/comments',
handler=Comments,
methods=['GET', 'POST']
), webapp2.Route(
'/api/v2/projects/<project_id:([a-zA-Z0-9-_]+)>/comments',
handler=Comments,
methods=['GET', 'POST']
), webapp2.Route(
'/api/v2/projects/<project_id:([a-zA-Z0-9-_]+)>/comments/<comment_id:([a-zA-Z0-9-_]+)>',
handler=Comments,
methods=['PUT', 'DELETE']
), webapp2.Route(
'/api/v2/projects/<project_id:([a-zA-Z0-9-_]+)>/milestones',
handler=Milestones,
methods=['GET', 'POST']
), webapp2.Route(
'/api/v2/projects/<project_id:([a-zA-Z0-9-_]+)>/milestones/<milestone_id:([a-zA-Z0-9-_]+)>',
handler=Milestones,
methods=['GET', 'PUT', 'DELETE']
), webapp2.Route(
'/api/v2/projects/<project_id:([a-zA-Z0-9-_]+)>/labels',
handler=Labels,
methods=['GET', 'POST']
), webapp2.Route(
'/api/v2/projects/<project_id:([a-zA-Z0-9-_]+)>/labels/<label_id:([a-zA-Z0-9-_]+)>',
handler=Labels,
methods=['PUT', 'DELETE']
), webapp2.Route(
'/api/v2/projects/<project_id:([a-zA-Z0-9-_]+)>/milestones/<milestone_id:([a-zA-Z0-9-_]+)>/labels',
handler=Labels,
methods=['GET', 'POST']
), webapp2.Route(
'/api/v2/projects/<project_id:([a-zA-Z0-9-_]+)>/milestones/<milestone_id:([a-zA-Z0-9-_]+)>/labels/<label_id:([a-zA-Z0-9-_]+)>',
handler=Labels,
methods=['DELETE']
)
])
def error_handler_unauthorized(request, response, exception):
""" HTTP/1.1 401 Unauthorized """
logging.exception(exception)
response.content_type = 'application/json'
response.set_status(401)
response.write(json.dumps({
'status': 401,
'message': 'HTTP/1.1 401 Unauthorized'
}))
def error_handler_server_error(request, response, exception):
""" HTTP/1.1 500 Internal Server Error """
logging.exception(exception)
response.content_type = 'application/json'
response.set_status(500)
response.write(json.dumps({
'status': 500,
'message': 'HTTP/1.1 500 Internal Server Error'
}))
app.error_handlers[401] = error_handler_unauthorized
app.error_handlers[500] = error_handler_server_error
|
roberthodgen/thought-jot
|
src/api_v2.py
|
Python
|
mit
| 37,593 | 0.000718 |
from setuptools import setup
from ast import literal_eval
def get_version(source='xpclr/__init__.py'):
with open(source) as sf:
for line in sf:
if line.startswith('__version__'):
return literal_eval(line.split('=')[-1].lstrip())
raise ValueError("__version__ not found")
VERSION = get_version()
DISTNAME = 'xpclr'
PACKAGE_NAME = 'xpclr'
DESCRIPTION = 'Code to compute xpclr as described in Chen 2010'
with open('README.md') as f:
LONG_DESCRIPTION = f.read()
MAINTAINER = 'Nicholas Harding',
MAINTAINER_EMAIL = 'nicholas.harding@bdi.ox.ac.uk',
URL = 'https://github.com/hardingnj/xpclr'
DOWNLOAD_URL = 'http://github.com/hardingnj/xpclr'
LICENSE = 'MIT'
# strictly speaking, allel requires numpy, scipy and numexpr, but numexpr
# won't install unless numpy is already installed, so leave this blank for now
# and require user to pre-install numpy, scipy and numexpr themselves
INSTALL_REQUIRES = []
CLASSIFIERS = []
def setup_package():
metadata = dict(
name=DISTNAME,
maintainer=MAINTAINER,
maintainer_email=MAINTAINER_EMAIL,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
license=LICENSE,
url=URL,
download_url=DOWNLOAD_URL,
version=VERSION,
package_dir={'': '.'},
packages=['xpclr'],
scripts=['bin/xpclr'],
classifiers=CLASSIFIERS,
install_requires=INSTALL_REQUIRES,
)
setup(**metadata)
if __name__ == '__main__':
setup_package()
|
hardingnj/xpclr
|
setup.py
|
Python
|
mit
| 1,538 | 0.0013 |
#!/usr/bin/python
import apt
import apt.progress
import apt_pkg
import logging
import re
import sys
logging.basicConfig(filename1='/var/log/supervisor/rps.log',
format='%(asctime)s %(levelname)s: deb_install: %(message)s',
level=logging.INFO)
logging.getLogger().setLevel(logging.INFO)
class control_parser():
def __init__(self):
apt_pkg.init()
self.cache = apt.Cache()
self.cache.update()
self.cache.open()
def parse(self, path = 'debian/control'):
try:
tagfile = apt_pkg.TagFile(path)
for section in tagfile:
deps = section.get('Build-Depends', None)
if not deps:
continue
packages = deps.split(',')
for p in packages:
self.mark_install(p)
self.install()
except Exception as e:
print "E: %s" % e
def mark_install(self, pstr):
deps = apt_pkg.parse_depends(pstr)
have_version = False
for ord in deps:
if have_version:
break
print pstr, ord
for d in ord:
name = d[0]
version_num = d[1]
version_op = d[2]
p = self.cache[name]
if not p:
logging.error("Could not find package %s in cache", name)
continue
if len(version_num) > 0:
highest_v = None
highest_vnum = 0
for version in p.versions:
if apt_pkg.check_dep(version.version, version_op, version_num):
have_version = True
logging.info("package: %s, version: %s, priority: %s/%d",
name, version.version, version.priority, version.policy_priority)
if (version.policy_priority > highest_vnum):
highest_vnum = version.policy_priority
highest_v = version
if not have_version:
logging.error("Could not required version of the package %s, must be %s %s",
name, version_op, version_num)
# going for the next ORed version if any
continue
p.candidate = highest_v
logging.info("package %s, selected version: %s, priority: %s/%d",
name, p.candidate.version, p.candidate.priority, p.candidate.policy_priority)
logging.info("Going to install package %s", name)
p.mark_install(auto_fix=True, auto_inst=True)
have_version = True
# do not run for the subsequent ORed packages
break
if not have_version:
logging.fatal("Could not find suitable package %s", pstr)
def install(self):
self.cache.commit()
if __name__ == '__main__':
if len(sys.argv) != 2:
print "E: usage: %s /path/to/debian/control" % sys.argv[0]
cp = control_parser()
cp.parse(path = sys.argv[1])
|
bioothod/zbuilder
|
conf.d/deb_install_build_deps.py
|
Python
|
apache-2.0
| 3,249 | 0.004925 |
from django.db import models
from django.utils import timezone
import datetime
class Question(models.Model):
""" Question object model
"""
question_text = models.CharField(max_length=200)
pub_date = models.DateTimeField('date published')
def __unicode__(self): # __unicode__ on Python 2
return self.question_text
def was_published_recently(self):
return self.pub_date >= timezone.now() - datetime.timedelta(days=1)
was_published_recently.admin_order_field = 'pub_date'
was_published_recently.boolean = True
was_published_recently.short_description = 'Published recently?'
class Choice(models.Model):
""" Choice object model
"""
question = models.ForeignKey(Question)
choice_text = models.CharField(max_length=200)
votes = models.IntegerField(default=0)
def __unicode__(self): # __unicode__ on Python 2
return self.choice_text
|
devtronics/heck_site
|
polls/models.py
|
Python
|
agpl-3.0
| 953 | 0.002099 |
#!/usr/bin/python
import simplejson as json
i = open('/proc/cpuinfo')
my_text = i.readlines()
i.close()
username = ""
for line in my_text:
line = line.strip()
ar = line.split(' ')
if ar[0].startswith('Serial'):
username = "a" + ar[1]
if not username:
exit(-1)
o = open('/home/pi/.cgminer/cgminer.conf', 'w');
pools = []
pools.append({"url": "stratum+tcp://ghash.io:3333",
"user": username, "pass": "12345"})
conf = {"pools": pools,
"api-listen" : "true",
"api-port" : "4028",
"api-allow" : "W:127.0.0.1"}
txt = json.dumps(conf, sort_keys=True, indent=4 * ' ')
o.write(txt)
o.write("\n");
o.close()
|
glukolog/calc256
|
cgserial.py
|
Python
|
gpl-3.0
| 673 | 0.007429 |
from django.conf.urls.defaults import *
from django.contrib import admin
from fumblerooski.feeds import CoachesFeed
feeds = {
'coaches': CoachesFeed,
}
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/coach_totals/', "fumblerooski.college.views.admin_coach_totals"),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r"^admin/(.*)", admin.site.root),
url(r"^blog/", include("fumblerooski.blog.urls")),
url(r"^college/", include("fumblerooski.college.urls")),
url(r"^rankings/", include("fumblerooski.rankings.urls")),
url(r"^api/", include("fumblerooski.api.urls")),
url(r"^$", "fumblerooski.college.views.homepage"),
(r'^feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}),
)
urlpatterns += patterns('fumblerooski.college.views',
url(r'^coaches/$', 'coach_index'),
url(r'^coaches/active/$', 'active_coaches'),
url(r'^coaches/feeds/recent_hires/$', 'recent_hires_feed'),
url(r'^coaches/detail/(?P<coach>\d+-[-a-z]+)/$', 'coach_detail', name="coach_detail"),
url(r'^coaches/detail/(?P<coach>\d+-[-a-z]+)/vs/$', 'coach_vs', name="coach_vs"),
url(r'^coaches/detail/(?P<coach>\d+-[-a-z]+)/vs/(?P<coach2>\d+-[-a-z]+)/$', 'coach_compare', name="coach_compare"),
url(r'^coaches/assistants/$', 'assistant_index'),
url(r'^coaches/common/(?P<coach>\d+-[-a-z]+)/(?P<coach2>\d+-[-a-z]+)/$', 'coach_common'),
url(r'^coaches/departures/(?P<year>\d\d\d\d)/$', 'departures'),
url(r'^coaches/hires/(?P<year>\d\d\d\d)/$', 'coaching_hires'),
)
|
dwillis/fumblerooski
|
urls.py
|
Python
|
bsd-3-clause
| 1,591 | 0.005657 |
# -*- coding: utf-8 -*-
'''
(c) Copyright 2013 Telefonica, I+D. Printed in Spain (Europe). All Rights
Reserved.
The copyright to the software program(s) is property of Telefonica I+D.
The program(s) may be used and or copied only with the express written
consent of Telefonica I+D or in accordance with the terms and conditions
stipulated in the agreement/contract under which the program(s) have
been supplied.
'''
HEADERS={'content-type': 'application/json'}
MQTT_BROKER_HOSTNAME='iotagent'
MQTT_BROKER_PORT='1883'
GW_HOSTNAME='iotagent'
GW_PORT='8002'
IOT_PORT='8080'
MANAGER_PORT='8081'
GW_SERVER_ROOT = 'http://{}:{}'.format(GW_HOSTNAME, GW_PORT)
IOT_SERVER_ROOT = 'http://{}:{}'.format(GW_HOSTNAME, IOT_PORT)
MANAGER_SERVER_ROOT = 'http://{}:{}'.format(GW_HOSTNAME, MANAGER_PORT)
CBROKER_URL='http://10.95.213.159:6500'
CBROKER_HEADER='Fiware-Service'
CBROKER_PATH_HEADER='Fiware-ServicePath'
SMPP_URL='http://sbc04:5371'
SMPP_FROM='682996050'
DEF_ENTITY_TYPE='thing'
DEF_TYPE='string'
PATH_UL20_COMMAND='/iot/ngsi/d/updateContext'
PATH_MQTT_COMMAND='/iot/ngsi/mqtt/updateContext'
PATH_UL20_SIMULATOR='/simulaClient/ul20Command'
TIMEOUT_COMMAND=10
MQTT_APIKEY='1234'
UL20_APIKEY='apikey3'
|
hilgroth/fiware-IoTAgent-Cplusplus
|
tests/e2e_tests/common/gw_configuration.py
|
Python
|
agpl-3.0
| 1,230 | 0.017073 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pytest
from unittest.mock import Mock, call
from tinyrpc.server import RPCServer
from tinyrpc.transports import ServerTransport
from tinyrpc.protocols import RPCProtocol, RPCResponse
from tinyrpc.dispatch import RPCDispatcher
CONTEXT='sapperdeflap'
RECMSG='out of receive_message'
PARMSG='out of parse_request'
SERMSG='out of serialize'
@pytest.fixture
def transport():
transport = Mock(ServerTransport)
transport.receive_message = Mock(return_value=(CONTEXT, RECMSG))
return transport
@pytest.fixture
def protocol():
protocol = Mock(RPCProtocol)
protocol.parse_request = Mock(return_value=PARMSG)
return protocol
@pytest.fixture()
def response():
response = Mock(RPCResponse)
response.serialize = Mock(return_value=SERMSG)
return response
@pytest.fixture
def dispatcher(response):
dispatcher = Mock(RPCDispatcher)
dispatcher.dispatch = Mock(return_value=response)
return dispatcher
def test_handle_message(transport, protocol, dispatcher):
server = RPCServer(transport, protocol, dispatcher)
server.receive_one_message()
transport.receive_message.assert_called()
protocol.parse_request.assert_called_with(RECMSG)
dispatcher.dispatch.assert_called_with(PARMSG, None)
dispatcher.dispatch().serialize.assert_called()
transport.send_reply.assert_called_with(CONTEXT, SERMSG)
def test_handle_message_callback(transport, protocol, dispatcher):
server = RPCServer(transport, protocol, dispatcher)
server.trace = Mock(return_value=None)
server.receive_one_message()
assert server.trace.call_args_list == [call('-->', CONTEXT, RECMSG), call('<--', CONTEXT, SERMSG)]
server.trace.assert_called()
|
mbr/tinyrpc
|
tests/test_server.py
|
Python
|
mit
| 1,748 | 0.006293 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 European Synchrotron Radiation Facility, Grenoble, France
#
# Principal author: Wout De Nolf (wout.de_nolf@esrf.eu)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
|
woutdenolf/wdncrunch
|
wdncrunch/tests/__init__.py
|
Python
|
mit
| 1,226 | 0.000816 |
#!/usr/bin/env python
"""Grover's quantum search algorithm example."""
from sympy import pprint
from sympy.physics.quantum import qapply
from sympy.physics.quantum.qubit import IntQubit
from sympy.physics.quantum.grover import (OracleGate, superposition_basis,
WGate, grover_iteration)
def demo_vgate_app(v):
for i in range(2**v.nqubits):
print('qapply(v*IntQubit(%i, %r))' % (i, v.nqubits))
pprint(qapply(v*IntQubit(i, nqubits=v.nqubits)))
qapply(v*IntQubit(i, nqubits=v.nqubits))
def black_box(qubits):
return True if qubits == IntQubit(1, nqubits=qubits.nqubits) else False
def main():
print()
print('Demonstration of Grover\'s Algorithm')
print('The OracleGate or V Gate carries the unknown function f(x)')
print('> V|x> = ((-1)^f(x))|x> where f(x) = 1 when x = a (True in our case)')
print('> and 0 (False in our case) otherwise')
print()
nqubits = 2
print('nqubits = ', nqubits)
v = OracleGate(nqubits, black_box)
print('Oracle or v = OracleGate(%r, black_box)' % nqubits)
print()
psi = superposition_basis(nqubits)
print('psi:')
pprint(psi)
demo_vgate_app(v)
print('qapply(v*psi)')
pprint(qapply(v*psi))
print()
w = WGate(nqubits)
print('WGate or w = WGate(%r)' % nqubits)
print('On a 2 Qubit system like psi, 1 iteration is enough to yield |1>')
print('qapply(w*v*psi)')
pprint(qapply(w*v*psi))
print()
nqubits = 3
print('On a 3 Qubit system, it requires 2 iterations to achieve')
print('|1> with high enough probability')
psi = superposition_basis(nqubits)
print('psi:')
pprint(psi)
v = OracleGate(nqubits, black_box)
print('Oracle or v = OracleGate(%r, black_box)' % nqubits)
print()
print('iter1 = grover.grover_iteration(psi, v)')
iter1 = qapply(grover_iteration(psi, v))
pprint(iter1)
print()
print('iter2 = grover.grover_iteration(iter1, v)')
iter2 = qapply(grover_iteration(iter1, v))
pprint(iter2)
print()
if __name__ == "__main__":
main()
|
kaushik94/sympy
|
examples/advanced/grover_example.py
|
Python
|
bsd-3-clause
| 2,081 | 0.001442 |
# coding=utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import numba
from src_legacy.fourier_series.buffer.ringbuffer import Ringbuffer
@numba.vectorize(nopython=True)
def legendre_recursion(n, x, p1, p2):
if n == 0:
return 1
elif n == 1:
return x
else:
c0 = (2*n-1)/n
c1 = (n-1)/n
return c0 * x * p1 - c1 * p2
class FastLegendreEval:
"""
Pure float-64 class for evaluation of legendre polynomials recursively.
"""
start_index = 0
def __init__(self, arg, max_degree):
if isinstance(arg, np.ndarray):
self.arg = arg
self.size = self.arg.size
elif isinstance(arg, float):
self.arg = arg
self.size = 1
else:
raise ValueError()
self.max_degree = max_degree
# @profile
def generator(self, skip=0):
buffer = Ringbuffer(buffer_size=3,
array_size=self.size,
dtype=float,
start_index=self.start_index,
array_size_increment=None,
array_margin=0)
deg = self.start_index
while self.max_degree is None or deg <= self.max_degree - 1:
p1 = buffer[deg - 1, :]
p2 = buffer[deg - 2, :]
arr = legendre_recursion(deg, self.arg, p1, p2) # ~73%
buffer[:] = arr # ~27%
if skip == 0:
yield deg, buffer
else:
skip -= 1
deg += 1
|
jaantollander/Convergence-of-Fourier-Series
|
src_legacy/fourier_series/basis_functions/legendre/fast_evaluation.py
|
Python
|
mit
| 1,726 | 0 |
# encoding: utf-8
"""
corduroy.config
Internal state
"""
from __future__ import with_statement
import os, sys
from .atoms import odict, adict, Document
# LATER: add some sort of rcfile support...
# from inspect import getouterframes, currentframe
# _,filename,_,_,_,_ = getouterframes(currentframe())[-1]
# print "from", os.path.dirname(os.path.abspath(filename))
defaults = adict({
"host":"http://127.0.0.1",
"port":5984,
"uuid_cache":50,
"types":adict({
"doc":Document,
"dict":adict
}),
"http":adict({
"max_clients":10,
"max_redirects":6,
"timeout":60*60,
"io_loop":None
})
})
try:
import simplejson as _json
except ImportError:
import json as _json
class json(object):
@classmethod
def decode(cls, string, **opts):
"""Decode the given JSON string.
:param string: the JSON string to decode
:type string: basestring
:return: the corresponding Python data structure
:rtype: object
"""
return _json.loads(string, object_hook=defaults.types.dict, **opts)
@classmethod
def encode(cls, obj, **opts):
"""Encode the given object as a JSON string.
:param obj: the Python data structure to encode
:type obj: object
:return: the corresponding JSON string
:rtype: basestring
"""
return _json.dumps(obj, allow_nan=False, ensure_ascii=False, encoding='utf-8', **opts)
|
samizdatco/corduroy
|
corduroy/config.py
|
Python
|
bsd-3-clause
| 1,593 | 0.009416 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Rackspace Hosting
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse # noqa
from django.template.defaultfilters import title # noqa
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import tables
from horizon.utils import filters
from openstack_dashboard import api
STATUS_CHOICES = (
("BUILDING", None),
("COMPLETED", True),
("DELETE_FAILED", False),
("FAILED", False),
("NEW", None),
("SAVING", None),
)
class LaunchLink(tables.LinkAction):
name = "create"
verbose_name = _("Create Backup")
url = "horizon:project:database_backups:create"
classes = ("btn-launch", "ajax-modal")
class RestoreLink(tables.LinkAction):
name = "restore"
verbose_name = _("Restore Backup")
url = "horizon:project:databases:launch"
classes = ("btn-launch", "ajax-modal")
def get_link_url(self, datam):
url = reverse(self.url)
return url + '?backup=%s' % datam.id
class DeleteBackup(tables.BatchAction):
name = "delete"
action_present = _("Delete")
action_past = _("Scheduled deletion of")
data_type_singular = _("Backup")
data_type_plural = _("Backups")
classes = ('btn-danger', 'btn-terminate')
def action(self, request, obj_id):
api.trove.backup_delete(request, obj_id)
class UpdateRow(tables.Row):
ajax = True
def get_data(self, request, backup_id):
backup = api.trove.backup_get(request, backup_id)
try:
backup.instance = api.trove.instance_get(request,
backup.instance_id)
except Exception:
pass
return backup
def db_link(obj):
if not hasattr(obj, 'instance'):
return
if hasattr(obj.instance, 'name'):
return reverse(
'horizon:project:databases:detail',
kwargs={'instance_id': obj.instance_id})
def db_name(obj):
if hasattr(obj.instance, 'name'):
return obj.instance.name
return obj.instance_id
class BackupsTable(tables.DataTable):
name = tables.Column("name",
link=("horizon:project:database_backups:detail"),
verbose_name=_("Name"))
created = tables.Column("created", verbose_name=_("Created At"),
filters=[filters.parse_isotime])
location = tables.Column(lambda obj: _("Download"),
link=lambda obj: obj.locationRef,
verbose_name=_("Backup File"))
instance = tables.Column(db_name, link=db_link,
verbose_name=_("Database"))
status = tables.Column("status",
filters=(title, filters.replace_underscores),
verbose_name=_("Status"),
status=True,
status_choices=STATUS_CHOICES)
class Meta:
name = "backups"
verbose_name = _("Backups")
status_columns = ["status"]
row_class = UpdateRow
table_actions = (LaunchLink, DeleteBackup)
row_actions = (RestoreLink, DeleteBackup)
|
Havate/havate-openstack
|
proto-build/gui/horizon/Horizon_GUI/openstack_dashboard/dashboards/project/database_backups/tables.py
|
Python
|
apache-2.0
| 3,772 | 0 |
"""
Forum attachments models
========================
This module defines models provided by the ``forum_attachments`` application.
"""
from machina.apps.forum_conversation.forum_attachments.abstract_models import AbstractAttachment
from machina.core.db.models import model_factory
Attachment = model_factory(AbstractAttachment)
|
ellmetha/django-machina
|
machina/apps/forum_conversation/forum_attachments/models.py
|
Python
|
bsd-3-clause
| 346 | 0.00578 |
from nose.tools import eq_, ok_
from remo.base.tests import RemoTestCase
from remo.base.utils import get_date
from remo.profiles.forms import ChangeUserForm, UserStatusForm
from remo.profiles.models import UserStatus
from remo.profiles.tests import UserFactory, UserStatusFactory
class ChangeUserFormTest(RemoTestCase):
def test_change_valid_login_email(self):
"""Test change login email with a valid one."""
mentor = UserFactory.create(groups=['Mentor'], userprofile__initial_council=True)
rep = UserFactory.create(groups=['Rep'], userprofile__mentor=mentor, last_name='Doe')
data = {'first_name': rep.first_name,
'last_name': rep.last_name,
'email': rep.email}
form = ChangeUserForm(data=data, instance=rep)
ok_(form.is_valid())
def test_change_invalid_login_email(self):
"""Test change login email with an invalid one."""
mentor = UserFactory.create(groups=['Mentor'], userprofile__initial_council=True)
rep = UserFactory.create(groups=['Rep'], userprofile__mentor=mentor)
data = {'first_name': rep.first_name,
'last_name': rep.last_name,
'email': mentor.email}
form = ChangeUserForm(data=data, instance=rep)
ok_(not form.is_valid())
class UserStatusFormTests(RemoTestCase):
def test_base(self):
mentor = UserFactory.create()
user = UserFactory.create(userprofile__mentor=mentor)
start_date = get_date()
expected_date = get_date(days=1)
data = {'start_date': start_date,
'expected_date': expected_date}
form = UserStatusForm(data, instance=UserStatus(user=user))
ok_(form.is_valid())
db_obj = form.save()
eq_(db_obj.expected_date, get_date(days=1))
eq_(db_obj.user.get_full_name(), user.get_full_name())
def test_invalid_expected_date(self):
mentor = UserFactory.create()
user = UserFactory.create(userprofile__mentor=mentor)
start_date = get_date()
expected_date = get_date(weeks=15)
data = {'start_date': start_date,
'expected_date': expected_date}
form = UserStatusForm(data, instance=UserStatus(user=user))
ok_(not form.is_valid())
ok_('expected_date' in form.errors)
def test_start_date_in_the_past(self):
mentor = UserFactory.create()
user = UserFactory.create(userprofile__mentor=mentor)
start_date = get_date(-1)
expected_date = get_date(days=2)
data = {'start_date': start_date,
'expected_date': expected_date}
form = UserStatusForm(data, instance=UserStatus(user=user))
ok_(not form.is_valid())
ok_('start_date' in form.errors)
def test_expected_date_before_start_date(self):
mentor = UserFactory.create()
user = UserFactory.create(userprofile__mentor=mentor)
start_date = get_date(4)
expected_date = get_date(days=2)
data = {'start_date': start_date,
'expected_date': expected_date}
form = UserStatusForm(data, instance=UserStatus(user=user))
ok_(not form.is_valid())
ok_('expected_date' in form.errors)
def remove_unavailability_status(self):
mentor = UserFactory.create()
user = UserFactory.create(userprofile__mentor=mentor)
start_date = get_date()
expected_date = get_date(days=1)
data = {'start_date': start_date,
'expected_date': expected_date}
user_status = UserStatusFactory.create(user=user,
expected_date=expected_date,
start_date=start_date)
form = UserStatusForm(data, instance=user_status)
ok_(form.is_valid())
ok_(not user_status.end_date)
db_obj = form.save()
eq_(db_obj.expected_date, get_date())
eq_(db_obj.user.get_full_name(), user.get_full_name())
ok_(db_obj.return_date)
|
mozilla/remo
|
remo/profiles/tests/test_forms.py
|
Python
|
bsd-3-clause
| 4,057 | 0.000739 |
"""Public API for Fortran parser.
Module content
--------------
"""
from __future__ import absolute_import
#Author: Pearu Peterson <pearu@cens.ioc.ee>
#Created: Oct 2006
__autodoc__ = ['get_reader', 'parse', 'walk']
from . import Fortran2003
# import all Statement classes:
from .base_classes import EndStatement, classes
from .block_statements import *
# CHAR_BIT is used to convert object bit sizes to byte sizes
from .utils import CHAR_BIT
def get_reader(input, isfree=None, isstrict=None, include_dirs = None, source_only = None,
ignore_comments = True):
""" Returns Fortran reader instance.
Parameters
----------
input : str
Specify a string or filename containing Fortran code.
isfree, isstrict : {None, bool}
Specify input Fortran format. The values are determined from the
input. If that fails then isfree=True and isstrict=False is assumed.
include_dirs : {None, list}
Specify a list of include directories. The default list (when
include_dirs=None) contains the current working directory and
the directory of ``filename``.
source_only : {None, list}
Specify a list of Fortran file names that are searched when the
``USE`` statement is encountered.
Returns
-------
reader : `FortranReader`
Notes
-----
If ``input`` is a C filename then the functions searches for comment
lines starting with ``/*f2py`` and reads following lines as PYF file
content until a line ``*/`` is found.
See also
--------
parse
"""
import os
import re
from .readfortran import FortranFileReader, FortranStringReader
if os.path.isfile(input):
name,ext = os.path.splitext(input)
if ext.lower() in ['.c']:
# get signatures from C file comments starting with `/*f2py` and ending with `*/`.
# TODO: improve parser to take line number offset making line numbers in
# parser messages correct.
f2py_c_comments = re.compile('/[*]\s*f2py\s.*[*]/',re.I | re.M)
f = open(filename,'r')
c_input = ''
for s1 in f2py_c_comments.findall(f.read()):
c_input += s1[2:-2].lstrip()[4:] + '\n'
f.close()
if isfree is None: isfree = True
if isstrict is None: isstrict = True
return parse(c_input, isfree, isstrict, include_dirs)
reader = FortranFileReader(input, include_dirs = include_dirs, source_only = source_only)
elif isinstance(input, str):
reader = FortranStringReader(input, include_dirs = include_dirs, source_only = source_only)
else:
raise TypeError('Expected string or filename input but got %s' % (type(input)))
if isfree is None: isfree = reader.isfree
if isstrict is None: isstrict = reader.isstrict
reader.set_mode(isfree, isstrict)
return reader
def parse(input, isfree=None, isstrict=None, include_dirs = None, source_only = None,
ignore_comments = True, analyze=True):
""" Parse input and return Statement tree.
Parameters
----------
input : str
Specify a string or filename containing Fortran code.
isfree, isstrict : {None, bool}
Specify input Fortran format. The values are determined from the
input. If that fails then isfree=True and isstrict=False is assumed.
include_dirs : {None, list}
Specify a list of include directories. The default list (when
include_dirs=None) contains the current working directory and
the directory of ``filename``.
source_only : {None, list}
Specify a list of Fortran file names that are searched when the
``USE`` statement is encountered.
ignore_comments : bool
When True then discard all comment lines in the Fortran code.
analyze : bool
When True then apply run analyze method on the Fortran code tree.
Returns
-------
block : `fparser.api.BeginSource`
Examples
--------
>>> code = '''
... c comment
... subroutine foo(a)
... integer a
... print*, "a=",a
... end
... '''
>>> tree = parse(code,isfree=False)
>>> print tree
!BEGINSOURCE <cStringIO.StringI object at 0x1798030> mode=fix90
SUBROUTINE foo(a)
INTEGER a
PRINT *, "a=", a
END SUBROUTINE foo
>>> print `tree`
BeginSource
blocktype='beginsource'
name='<cStringIO.StringI object at 0x1798030> mode=fix90'
a=AttributeHolder:
external_subprogram=<dict with keys ['foo']>
content:
Subroutine
args=['a']
item=Line('subroutine foo(a)',(3, 3),'')
a=AttributeHolder:
variables=<dict with keys ['a']>
content:
Integer
selector=('', '')
entity_decls=['a']
item=Line('integer a',(4, 4),'')
Print
item=Line('print*, "a=",a',(5, 5),'')
EndSubroutine
blocktype='subroutine'
name='foo'
item=Line('end',(6, 6),'')
See also
--------
get_reader
"""
from .parsefortran import FortranParser
reader = get_reader(input, isfree, isstrict, include_dirs, source_only)
parser = FortranParser(reader, ignore_comments = ignore_comments)
parser.parse()
if analyze:
parser.analyze()
return parser.block
def walk(stmt, depth=-1, _initial_depth = None):
""" Generate Fortran statements by walking the stmt tree until given depth.
For each block statement in stmt, the walk functions yields a
tuple ``(statement, depth)`` where ``depth`` is the depth of tree
stucture for statement.
Parameters
----------
stmt : Statement
depth : int
If depth is positive then walk in the tree until given depth.
If depth is negative then walk the whole tree.
Returns
-------
generator
Examples
--------
::
from fparser import api
source_str = '''
subroutine foo
integer i, r
do i=1,100
r = r + i
end do
end
'''
tree = api.parse(source_str)
for stmt, depth in api.walk(tree):
print depth, stmt.item
that will print::
1 line #2'subroutine foo'
2 line #3'integer i, r'
2 line #4'do i=1,100'
3 line #5'r = r + i'
2 line #6'end do'
1 line #7'end'
"""
if _initial_depth is None:
if depth==0:
return
_initial_depth = depth
if not isinstance(stmt, classes.BeginSource):
yield stmt, _initial_depth - depth
if isinstance(stmt, classes.BeginStatement):
last_stmt = stmt.content[-1]
last_index = len(stmt.content)
if isinstance(last_stmt, classes.EndStatement):
last_index -= 1
else:
last_stmt = None
if depth != 0:
for substmt in stmt.content[:last_index]:
for statement, statement_depth in walk(substmt, depth-1, _initial_depth):
yield statement, statement_depth
if last_stmt is not None:
yield last_stmt, _initial_depth - depth
|
pearu/f2py
|
fparser/api.py
|
Python
|
bsd-3-clause
| 7,543 | 0.006364 |
#!/usr/bin/env python3
'''
Make a stream emit at the pace of a slower stream
Pros:
Introduce a delay between events in an otherwise rapid stream (like range)
Cons:
When the stream being delayed runs out of events to push, the zipped stream
will keep pushing events, defined with the lambda fn passed to the zip operation.
'''
from time import sleep
from rx import Observable
# Generate an interval sequece, firing once each second
interval = Observable.interval(1000)
# 5..10
numbers = Observable.from_(range(5, 11))
# Zip two streams together so it emits at the pace of the slowest stream
source = Observable.zip(
interval,
numbers,
# Because we only push the elements of the `numbers` stream,
# As soon as it runs out of events, it will keep sending empty
# events to the subscribers
lambda _, n: n
)
sub1 = source.subscribe(
lambda v : print("Value published to observer 1: {0}".format(v)),
lambda e : print("Error! {0}".format(e)),
lambda : print("Completed!")
)
sub2 = source.subscribe(
lambda v : print("Value published to observer 2: {0}".format(v)),
lambda e : print("Error! {0}".format(e)),
lambda : print("Completed!")
)
# As noted above, we have to dispose the subscriptions before the `numbers`
# streams runs out, or the program will get stuck listening to empty events
sleep(5)
sub1.dispose()
sub2.dispose()
# => Value published to observer 1: 5
# => Value published to observer 2: 5
# => Value published to observer 1: 6
# => Value published to observer 2: 6
# => Value published to observer 2: 7
# => Value published to observer 1: 7
# => Value published to observer 2: 8
# => Value published to observer 1: 8
|
Pysellus/streaming-api-test
|
rx-tests/rx-stream-pacing.py
|
Python
|
mit
| 1,709 | 0.004096 |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
from antlr3 import *
from antlr3.compat import set, frozenset
allOrdinals = set([1, 2, 3, 4, 5])
numOrdinals = len(allOrdinals)
HIDDEN = BaseRecognizer.HIDDEN
MONTH=27
THURSDAY=23
FOURTH_OR_FIFTH=16
THIRD=13
DECEMBER=39
FROM=41
EVERY=6
WEDNESDAY=22
QUARTER=40
SATURDAY=25
SYNCHRONIZED=9
JANUARY=28
SUNDAY=26
TUESDAY=21
SEPTEMBER=36
UNKNOWN_TOKEN=45
AUGUST=35
JULY=34
MAY=32
FRIDAY=24
DIGITS=8
FEBRUARY=29
TWO_DIGIT_HOUR_TIME=43
OF=4
WS=44
EOF=-1
APRIL=31
COMMA=10
JUNE=33
OCTOBER=37
TIME=5
FIFTH=15
NOVEMBER=38
FIRST=11
DIGIT=7
FOURTH=14
MONDAY=20
HOURS=17
MARCH=30
SECOND=12
MINUTES=18
TO=42
DAY=19
tokenNames = [
"<invalid>", "<EOR>", "<DOWN>", "<UP>",
"OF", "TIME", "EVERY", "DIGIT", "DIGITS", "SYNCHRONIZED", "COMMA", "FIRST",
"SECOND", "THIRD", "FOURTH", "FIFTH", "FOURTH_OR_FIFTH", "HOURS", "MINUTES",
"DAY", "MONDAY", "TUESDAY", "WEDNESDAY", "THURSDAY", "FRIDAY", "SATURDAY",
"SUNDAY", "MONTH", "JANUARY", "FEBRUARY", "MARCH", "APRIL", "MAY", "JUNE",
"JULY", "AUGUST", "SEPTEMBER", "OCTOBER", "NOVEMBER", "DECEMBER", "QUARTER",
"FROM", "TO", "TWO_DIGIT_HOUR_TIME", "WS", "UNKNOWN_TOKEN"
]
class GrocParser(Parser):
grammarFileName = "Groc.g"
antlr_version = version_str_to_tuple("3.1.1")
antlr_version_str = "3.1.1"
tokenNames = tokenNames
def __init__(self, input, state=None):
if state is None:
state = RecognizerSharedState()
Parser.__init__(self, input, state)
self.dfa4 = self.DFA4(
self, 4,
eot = self.DFA4_eot,
eof = self.DFA4_eof,
min = self.DFA4_min,
max = self.DFA4_max,
accept = self.DFA4_accept,
special = self.DFA4_special,
transition = self.DFA4_transition
)
self.ordinal_set = set()
self.weekday_set = set()
self.month_set = set()
self.monthday_set = set()
self.time_string = ''
self.interval_mins = 0
self.period_string = ''
self.synchronized = False
self.start_time_string = ''
self.end_time_string = ''
valuesDict = {
SUNDAY: 0,
FIRST: 1,
MONDAY: 1,
JANUARY: 1,
TUESDAY: 2,
SECOND: 2,
FEBRUARY: 2,
WEDNESDAY: 3,
THIRD: 3,
MARCH: 3,
THURSDAY: 4,
FOURTH: 4,
APRIL: 4,
FRIDAY: 5,
FIFTH: 5,
MAY: 5,
SATURDAY: 6,
JUNE: 6,
JULY: 7,
AUGUST: 8,
SEPTEMBER: 9,
OCTOBER: 10,
NOVEMBER: 11,
DECEMBER: 12,
}
def ValueOf(self, token_type):
return self.valuesDict.get(token_type, -1)
def timespec(self, ):
try:
try:
pass
alt1 = 2
LA1_0 = self.input.LA(1)
if (LA1_0 == EVERY) :
LA1_1 = self.input.LA(2)
if ((DIGIT <= LA1_1 <= DIGITS)) :
alt1 = 2
elif ((DAY <= LA1_1 <= SUNDAY)) :
alt1 = 1
else:
nvae = NoViableAltException("", 1, 1, self.input)
raise nvae
elif ((DIGIT <= LA1_0 <= DIGITS) or (FIRST <= LA1_0 <= FOURTH_OR_FIFTH)) :
alt1 = 1
else:
nvae = NoViableAltException("", 1, 0, self.input)
raise nvae
if alt1 == 1:
pass
self._state.following.append(self.FOLLOW_specifictime_in_timespec44)
self.specifictime()
self._state.following.pop()
elif alt1 == 2:
pass
self._state.following.append(self.FOLLOW_interval_in_timespec48)
self.interval()
self._state.following.pop()
self.match(self.input, EOF, self.FOLLOW_EOF_in_timespec52)
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def specifictime(self, ):
TIME1 = None
try:
try:
pass
pass
alt4 = 2
alt4 = self.dfa4.predict(self.input)
if alt4 == 1:
pass
pass
alt2 = 2
LA2_0 = self.input.LA(1)
if (LA2_0 == EVERY or (FIRST <= LA2_0 <= FOURTH_OR_FIFTH)) :
alt2 = 1
elif ((DIGIT <= LA2_0 <= DIGITS)) :
alt2 = 2
else:
nvae = NoViableAltException("", 2, 0, self.input)
raise nvae
if alt2 == 1:
pass
pass
self._state.following.append(self.FOLLOW_ordinals_in_specifictime72)
self.ordinals()
self._state.following.pop()
self._state.following.append(self.FOLLOW_weekdays_in_specifictime74)
self.weekdays()
self._state.following.pop()
elif alt2 == 2:
pass
self._state.following.append(self.FOLLOW_monthdays_in_specifictime77)
self.monthdays()
self._state.following.pop()
self.match(self.input, OF, self.FOLLOW_OF_in_specifictime80)
alt3 = 2
LA3_0 = self.input.LA(1)
if ((MONTH <= LA3_0 <= DECEMBER)) :
alt3 = 1
elif ((FIRST <= LA3_0 <= THIRD) or LA3_0 == QUARTER) :
alt3 = 2
else:
nvae = NoViableAltException("", 3, 0, self.input)
raise nvae
if alt3 == 1:
pass
self._state.following.append(self.FOLLOW_monthspec_in_specifictime83)
self.monthspec()
self._state.following.pop()
elif alt3 == 2:
pass
self._state.following.append(self.FOLLOW_quarterspec_in_specifictime85)
self.quarterspec()
self._state.following.pop()
elif alt4 == 2:
pass
pass
self._state.following.append(self.FOLLOW_ordinals_in_specifictime101)
self.ordinals()
self._state.following.pop()
self._state.following.append(self.FOLLOW_weekdays_in_specifictime103)
self.weekdays()
self._state.following.pop()
self.month_set = set(range(1,13))
TIME1=self.match(self.input, TIME, self.FOLLOW_TIME_in_specifictime117)
self.time_string = TIME1.text
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def interval(self, ):
intervalnum = None
period2 = None
try:
try:
pass
pass
self.match(self.input, EVERY, self.FOLLOW_EVERY_in_interval136)
intervalnum = self.input.LT(1)
if (DIGIT <= self.input.LA(1) <= DIGITS):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
self.interval_mins = int(intervalnum.text)
self._state.following.append(self.FOLLOW_period_in_interval164)
period2 = self.period()
self._state.following.pop()
if ((period2 is not None) and [self.input.toString(period2.start,period2.stop)] or [None])[0] == "hours":
self.period_string = "hours"
else:
self.period_string = "minutes"
alt5 = 3
LA5_0 = self.input.LA(1)
if (LA5_0 == FROM) :
alt5 = 1
elif (LA5_0 == SYNCHRONIZED) :
alt5 = 2
if alt5 == 1:
pass
self._state.following.append(self.FOLLOW_time_range_in_interval176)
self.time_range()
self._state.following.pop()
elif alt5 == 2:
pass
pass
self.match(self.input, SYNCHRONIZED, self.FOLLOW_SYNCHRONIZED_in_interval189)
self.synchronized = True
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def ordinals(self, ):
try:
try:
pass
alt7 = 2
LA7_0 = self.input.LA(1)
if (LA7_0 == EVERY) :
alt7 = 1
elif ((FIRST <= LA7_0 <= FOURTH_OR_FIFTH)) :
alt7 = 2
else:
nvae = NoViableAltException("", 7, 0, self.input)
raise nvae
if alt7 == 1:
pass
self.match(self.input, EVERY, self.FOLLOW_EVERY_in_ordinals218)
elif alt7 == 2:
pass
pass
self._state.following.append(self.FOLLOW_ordinal_in_ordinals226)
self.ordinal()
self._state.following.pop()
while True:
alt6 = 2
LA6_0 = self.input.LA(1)
if (LA6_0 == COMMA) :
alt6 = 1
if alt6 == 1:
pass
self.match(self.input, COMMA, self.FOLLOW_COMMA_in_ordinals229)
self._state.following.append(self.FOLLOW_ordinal_in_ordinals231)
self.ordinal()
self._state.following.pop()
else:
break
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def ordinal(self, ):
ord = None
try:
try:
pass
ord = self.input.LT(1)
if (FIRST <= self.input.LA(1) <= FOURTH_OR_FIFTH):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
self.ordinal_set.add(self.ValueOf(ord.type));
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
class period_return(ParserRuleReturnScope):
def __init__(self):
ParserRuleReturnScope.__init__(self)
def period(self, ):
retval = self.period_return()
retval.start = self.input.LT(1)
try:
try:
pass
if (HOURS <= self.input.LA(1) <= MINUTES):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
retval.stop = self.input.LT(-1)
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return retval
def monthdays(self, ):
try:
try:
pass
pass
self._state.following.append(self.FOLLOW_monthday_in_monthdays314)
self.monthday()
self._state.following.pop()
while True:
alt8 = 2
LA8_0 = self.input.LA(1)
if (LA8_0 == COMMA) :
alt8 = 1
if alt8 == 1:
pass
self.match(self.input, COMMA, self.FOLLOW_COMMA_in_monthdays318)
self._state.following.append(self.FOLLOW_monthday_in_monthdays320)
self.monthday()
self._state.following.pop()
else:
break
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def monthday(self, ):
day = None
try:
try:
pass
day = self.input.LT(1)
if (DIGIT <= self.input.LA(1) <= DIGITS):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
self.monthday_set.add(int(day.text));
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def weekdays(self, ):
try:
try:
pass
alt10 = 2
LA10_0 = self.input.LA(1)
if (LA10_0 == DAY) :
alt10 = 1
elif ((MONDAY <= LA10_0 <= SUNDAY)) :
alt10 = 2
else:
nvae = NoViableAltException("", 10, 0, self.input)
raise nvae
if alt10 == 1:
pass
self.match(self.input, DAY, self.FOLLOW_DAY_in_weekdays365)
if self.ordinal_set:
self.monthday_set = self.ordinal_set
self.ordinal_set = set()
else:
self.ordinal_set = self.ordinal_set.union(allOrdinals)
self.weekday_set = set([self.ValueOf(SUNDAY), self.ValueOf(MONDAY),
self.ValueOf(TUESDAY), self.ValueOf(WEDNESDAY),
self.ValueOf(THURSDAY), self.ValueOf(FRIDAY),
self.ValueOf(SATURDAY), self.ValueOf(SUNDAY)])
elif alt10 == 2:
pass
pass
self._state.following.append(self.FOLLOW_weekday_in_weekdays373)
self.weekday()
self._state.following.pop()
while True:
alt9 = 2
LA9_0 = self.input.LA(1)
if (LA9_0 == COMMA) :
alt9 = 1
if alt9 == 1:
pass
self.match(self.input, COMMA, self.FOLLOW_COMMA_in_weekdays376)
self._state.following.append(self.FOLLOW_weekday_in_weekdays378)
self.weekday()
self._state.following.pop()
else:
break
if not self.ordinal_set:
self.ordinal_set = self.ordinal_set.union(allOrdinals)
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def weekday(self, ):
dayname = None
try:
try:
pass
dayname = self.input.LT(1)
if (MONDAY <= self.input.LA(1) <= SUNDAY):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
self.weekday_set.add(self.ValueOf(dayname.type))
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def monthspec(self, ):
try:
try:
pass
alt11 = 2
LA11_0 = self.input.LA(1)
if (LA11_0 == MONTH) :
alt11 = 1
elif ((JANUARY <= LA11_0 <= DECEMBER)) :
alt11 = 2
else:
nvae = NoViableAltException("", 11, 0, self.input)
raise nvae
if alt11 == 1:
pass
self.match(self.input, MONTH, self.FOLLOW_MONTH_in_monthspec459)
self.month_set = self.month_set.union(set([
self.ValueOf(JANUARY), self.ValueOf(FEBRUARY), self.ValueOf(MARCH),
self.ValueOf(APRIL), self.ValueOf(MAY), self.ValueOf(JUNE),
self.ValueOf(JULY), self.ValueOf(AUGUST), self.ValueOf(SEPTEMBER),
self.ValueOf(OCTOBER), self.ValueOf(NOVEMBER),
self.ValueOf(DECEMBER)]))
elif alt11 == 2:
pass
self._state.following.append(self.FOLLOW_months_in_monthspec469)
self.months()
self._state.following.pop()
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def months(self, ):
try:
try:
pass
pass
self._state.following.append(self.FOLLOW_month_in_months486)
self.month()
self._state.following.pop()
while True:
alt12 = 2
LA12_0 = self.input.LA(1)
if (LA12_0 == COMMA) :
alt12 = 1
if alt12 == 1:
pass
self.match(self.input, COMMA, self.FOLLOW_COMMA_in_months489)
self._state.following.append(self.FOLLOW_month_in_months491)
self.month()
self._state.following.pop()
else:
break
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def month(self, ):
monthname = None
try:
try:
pass
monthname = self.input.LT(1)
if (JANUARY <= self.input.LA(1) <= DECEMBER):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
self.month_set.add(self.ValueOf(monthname.type));
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def quarterspec(self, ):
try:
try:
pass
alt13 = 2
LA13_0 = self.input.LA(1)
if (LA13_0 == QUARTER) :
alt13 = 1
elif ((FIRST <= LA13_0 <= THIRD)) :
alt13 = 2
else:
nvae = NoViableAltException("", 13, 0, self.input)
raise nvae
if alt13 == 1:
pass
self.match(self.input, QUARTER, self.FOLLOW_QUARTER_in_quarterspec583)
self.month_set = self.month_set.union(set([
self.ValueOf(JANUARY), self.ValueOf(APRIL), self.ValueOf(JULY),
self.ValueOf(OCTOBER)]))
elif alt13 == 2:
pass
pass
self._state.following.append(self.FOLLOW_quarter_ordinals_in_quarterspec595)
self.quarter_ordinals()
self._state.following.pop()
self.match(self.input, MONTH, self.FOLLOW_MONTH_in_quarterspec597)
self.match(self.input, OF, self.FOLLOW_OF_in_quarterspec599)
self.match(self.input, QUARTER, self.FOLLOW_QUARTER_in_quarterspec601)
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def quarter_ordinals(self, ):
try:
try:
pass
pass
self._state.following.append(self.FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals620)
self.month_of_quarter_ordinal()
self._state.following.pop()
while True:
alt14 = 2
LA14_0 = self.input.LA(1)
if (LA14_0 == COMMA) :
alt14 = 1
if alt14 == 1:
pass
self.match(self.input, COMMA, self.FOLLOW_COMMA_in_quarter_ordinals623)
self._state.following.append(self.FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals625)
self.month_of_quarter_ordinal()
self._state.following.pop()
else:
break
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def month_of_quarter_ordinal(self, ):
offset = None
try:
try:
pass
offset = self.input.LT(1)
if (FIRST <= self.input.LA(1) <= THIRD):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
jOffset = self.ValueOf(offset.type) - 1
self.month_set = self.month_set.union(set([
jOffset + self.ValueOf(JANUARY), jOffset + self.ValueOf(APRIL),
jOffset + self.ValueOf(JULY), jOffset + self.ValueOf(OCTOBER)]))
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def time_range(self, ):
start_time = None
end_time = None
try:
try:
pass
pass
self.match(self.input, FROM, self.FOLLOW_FROM_in_time_range673)
pass
start_time=self.match(self.input, TIME, self.FOLLOW_TIME_in_time_range680)
self.start_time_string = start_time.text
self.match(self.input, TO, self.FOLLOW_TO_in_time_range691)
pass
end_time=self.match(self.input, TIME, self.FOLLOW_TIME_in_time_range698)
self.end_time_string = end_time.text
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
DFA4_eot = DFA.unpack(
u"\13\uffff"
)
DFA4_eof = DFA.unpack(
u"\13\uffff"
)
DFA4_min = DFA.unpack(
u"\1\6\1\23\1\12\1\uffff\2\4\1\13\1\uffff\1\24\1\12\1\4"
)
DFA4_max = DFA.unpack(
u"\1\20\2\32\1\uffff\1\5\1\12\1\20\1\uffff\2\32\1\12"
)
DFA4_accept = DFA.unpack(
u"\3\uffff\1\1\3\uffff\1\2\3\uffff"
)
DFA4_special = DFA.unpack(
u"\13\uffff"
)
DFA4_transition = [
DFA.unpack(u"\1\1\2\3\2\uffff\6\2"),
DFA.unpack(u"\1\4\7\5"),
DFA.unpack(u"\1\6\10\uffff\1\4\7\5"),
DFA.unpack(u""),
DFA.unpack(u"\1\3\1\7"),
DFA.unpack(u"\1\3\1\7\4\uffff\1\10"),
DFA.unpack(u"\6\11"),
DFA.unpack(u""),
DFA.unpack(u"\7\12"),
DFA.unpack(u"\1\6\10\uffff\1\4\7\5"),
DFA.unpack(u"\1\3\1\7\4\uffff\1\10")
]
DFA4 = DFA
FOLLOW_specifictime_in_timespec44 = frozenset([])
FOLLOW_interval_in_timespec48 = frozenset([])
FOLLOW_EOF_in_timespec52 = frozenset([1])
FOLLOW_ordinals_in_specifictime72 = frozenset([19, 20, 21, 22, 23, 24, 25, 26])
FOLLOW_weekdays_in_specifictime74 = frozenset([4])
FOLLOW_monthdays_in_specifictime77 = frozenset([4])
FOLLOW_OF_in_specifictime80 = frozenset([11, 12, 13, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40])
FOLLOW_monthspec_in_specifictime83 = frozenset([5])
FOLLOW_quarterspec_in_specifictime85 = frozenset([5])
FOLLOW_ordinals_in_specifictime101 = frozenset([19, 20, 21, 22, 23, 24, 25, 26])
FOLLOW_weekdays_in_specifictime103 = frozenset([5])
FOLLOW_TIME_in_specifictime117 = frozenset([1])
FOLLOW_EVERY_in_interval136 = frozenset([7, 8])
FOLLOW_set_in_interval146 = frozenset([17, 18])
FOLLOW_period_in_interval164 = frozenset([1, 9, 41])
FOLLOW_time_range_in_interval176 = frozenset([1])
FOLLOW_SYNCHRONIZED_in_interval189 = frozenset([1])
FOLLOW_EVERY_in_ordinals218 = frozenset([1])
FOLLOW_ordinal_in_ordinals226 = frozenset([1, 10])
FOLLOW_COMMA_in_ordinals229 = frozenset([11, 12, 13, 14, 15, 16])
FOLLOW_ordinal_in_ordinals231 = frozenset([1, 10])
FOLLOW_set_in_ordinal252 = frozenset([1])
FOLLOW_set_in_period291 = frozenset([1])
FOLLOW_monthday_in_monthdays314 = frozenset([1, 10])
FOLLOW_COMMA_in_monthdays318 = frozenset([7, 8])
FOLLOW_monthday_in_monthdays320 = frozenset([1, 10])
FOLLOW_set_in_monthday340 = frozenset([1])
FOLLOW_DAY_in_weekdays365 = frozenset([1])
FOLLOW_weekday_in_weekdays373 = frozenset([1, 10])
FOLLOW_COMMA_in_weekdays376 = frozenset([19, 20, 21, 22, 23, 24, 25, 26])
FOLLOW_weekday_in_weekdays378 = frozenset([1, 10])
FOLLOW_set_in_weekday400 = frozenset([1])
FOLLOW_MONTH_in_monthspec459 = frozenset([1])
FOLLOW_months_in_monthspec469 = frozenset([1])
FOLLOW_month_in_months486 = frozenset([1, 10])
FOLLOW_COMMA_in_months489 = frozenset([27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39])
FOLLOW_month_in_months491 = frozenset([1, 10])
FOLLOW_set_in_month510 = frozenset([1])
FOLLOW_QUARTER_in_quarterspec583 = frozenset([1])
FOLLOW_quarter_ordinals_in_quarterspec595 = frozenset([27])
FOLLOW_MONTH_in_quarterspec597 = frozenset([4])
FOLLOW_OF_in_quarterspec599 = frozenset([40])
FOLLOW_QUARTER_in_quarterspec601 = frozenset([1])
FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals620 = frozenset([1, 10])
FOLLOW_COMMA_in_quarter_ordinals623 = frozenset([11, 12, 13, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40])
FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals625 = frozenset([1, 10])
FOLLOW_set_in_month_of_quarter_ordinal644 = frozenset([1])
FOLLOW_FROM_in_time_range673 = frozenset([5])
FOLLOW_TIME_in_time_range680 = frozenset([42])
FOLLOW_TO_in_time_range691 = frozenset([5])
FOLLOW_TIME_in_time_range698 = frozenset([1])
def main(argv, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr):
from antlr3.main import ParserMain
main = ParserMain("GrocLexer", GrocParser)
main.stdin = stdin
main.stdout = stdout
main.stderr = stderr
main.execute(argv)
if __name__ == '__main__':
main(sys.argv)
|
ProfessionalIT/maxigenios-website
|
sdk/google_appengine/google/appengine/cron/GrocParser.py
|
Python
|
mit
| 29,691 | 0.00906 |
"""Deprecated import support. Auto-generated by import_shims/generate_shims.sh."""
# pylint: disable=redefined-builtin,wrong-import-position,wildcard-import,useless-suppression,line-too-long
from import_shims.warn import warn_deprecated_import
warn_deprecated_import('contentstore.management.commands.tests.test_sync_courses', 'cms.djangoapps.contentstore.management.commands.tests.test_sync_courses')
from cms.djangoapps.contentstore.management.commands.tests.test_sync_courses import *
|
eduNEXT/edunext-platform
|
import_shims/studio/contentstore/management/commands/tests/test_sync_courses.py
|
Python
|
agpl-3.0
| 491 | 0.010183 |
from __future__ import division, print_function
from abc import ABCMeta, abstractmethod
import matplotlib as mpl
mpl.use('TkAgg')
from matplotlib.ticker import MaxNLocator, Formatter, Locator
from matplotlib.widgets import Slider, Button
import matplotlib.patches as patches
import matplotlib.pyplot as plt
from matplotlib.colors import LinearSegmentedColormap
from tadtool.tad import GenomicRegion, sub_matrix_regions, sub_data_regions, \
data_array, insulation_index, sub_vector_regions, sub_regions, \
call_tads_insulation_index, directionality_index, call_tads_directionality_index, normalised_insulation_index
import math
import copy
import numpy as np
from bisect import bisect_left
from future.utils import string_types
try:
import Tkinter as tk
import tkFileDialog as filedialog
except ImportError:
import tkinter as tk
from tkinter import filedialog
class BasePlotter(object):
__metaclass__ = ABCMeta
def __init__(self, title):
self._ax = None
self.cax = None
self.title = title
@abstractmethod
def _plot(self, region=None, **kwargs):
raise NotImplementedError("Subclasses need to override _plot function")
@abstractmethod
def plot(self, region=None, **kwargs):
raise NotImplementedError("Subclasses need to override plot function")
@property
def fig(self):
return self._ax.figure
@property
def ax(self):
if not self._ax:
_, self._ax = plt.subplots()
return self._ax
@ax.setter
def ax(self, value):
self._ax = value
class GenomeCoordFormatter(Formatter):
"""
Process axis tick labels to give nice representations
of genomic coordinates
"""
def __init__(self, chromosome, display_scale=True):
"""
:param chromosome: :class:`~kaic.data.genomic.GenomicRegion` or string
:param display_scale: Boolean
Display distance scale at bottom right
"""
if isinstance(chromosome, GenomicRegion):
self.chromosome = chromosome.chromosome
else:
self.chromosome = chromosome
self.display_scale = display_scale
def _format_val(self, x, prec_offset=0):
if x == 0:
oom_loc = 0
else:
oom_loc = int(math.floor(math.log10(abs(x))))
view_range = self.axis.axes.get_xlim()
oom_range = int(math.floor(math.log10(abs(view_range[1] - view_range[0]))))
if oom_loc >= 3:
return "{:.{prec}f}kb".format(x/1000, prec=max(0, 3 + prec_offset - oom_range))
return "{:.0f}b".format(x)
def __call__(self, x, pos=None):
"""
Return label for tick at coordinate x. Relative position of
ticks can be specified with pos. First tick gets chromosome name.
"""
s = self._format_val(x, prec_offset=1)
if pos == 0 or x == 0:
return "{}:{}".format(self.chromosome, s)
return s
def get_offset(self):
"""
Return information about the distances between
tick bars and the size of the view window.
Is called by matplotlib and displayed in lower right corner
of plots.
"""
if not self.display_scale:
return ""
view_range = self.axis.axes.get_xlim()
view_dist = abs(view_range[1] - view_range[0])
tick_dist = self.locs[2] - self.locs[1]
minor_tick_dist = tick_dist/5
minor_tick_dist_str = self._format_val(minor_tick_dist, prec_offset=2)
tick_dist_str = self._format_val(tick_dist, prec_offset=1)
view_dist_str = self._format_val(view_dist)
return "{}|{}|{}".format(minor_tick_dist_str, tick_dist_str, view_dist_str)
class GenomeCoordLocator(MaxNLocator):
"""
Choose locations of genomic coordinate ticks on the plot axis.
Behaves like default Matplotlib locator, except that it always
places a tick at the start and the end of the window.
"""
def __call__(self):
vmin, vmax = self.axis.get_view_interval()
ticks = self.tick_values(vmin, vmax)
# Make sure that first and last tick are the start
# and the end of the genomic range plotted. If next
# ticks are too close, remove them.
ticks[0] = vmin
ticks[-1] = vmax
if ticks[1] - vmin < (vmax - vmin)/(self._nbins*3):
ticks = np.delete(ticks, 1)
if vmax - ticks[-2] < (vmax - vmin)/(self._nbins*3):
ticks = np.delete(ticks, -2)
return self.raise_if_exceeds(np.array(ticks))
class MinorGenomeCoordLocator(Locator):
"""
Choose locations of minor tick marks between major
tick labels. Modification of the Matplotlib AutoMinorLocator,
except that it uses the distance between 2nd and 3rd major
mark as reference, instead of 2nd and 3rd.
"""
def __init__(self, n):
self.ndivs = n
def __call__(self):
majorlocs = self.axis.get_majorticklocs()
try:
majorstep = majorlocs[2] - majorlocs[1]
except IndexError:
# Need at least two major ticks to find minor tick locations
# TODO: Figure out a way to still be able to display minor
# ticks without two major ticks visible. For now, just display
# no ticks at all.
majorstep = 0
if self.ndivs is None:
if majorstep == 0:
# TODO: Need a better way to figure out ndivs
ndivs = 1
else:
x = int(np.round(10 ** (np.log10(majorstep) % 1)))
if x in [1, 5, 10]:
ndivs = 5
else:
ndivs = 4
else:
ndivs = self.ndivs
minorstep = majorstep / ndivs
vmin, vmax = self.axis.get_view_interval()
if vmin > vmax:
vmin, vmax = vmax, vmin
if len(majorlocs) > 0:
t0 = majorlocs[1]
tmin = ((vmin - t0) // minorstep + 1) * minorstep
tmax = ((vmax - t0) // minorstep + 1) * minorstep
locs = np.arange(tmin, tmax, minorstep) + t0
cond = np.abs((locs - t0) % majorstep) > minorstep / 10.0
locs = locs.compress(cond)
else:
locs = []
return self.raise_if_exceeds(np.array(locs))
class BasePlotter1D(BasePlotter):
__metaclass__ = ABCMeta
def __init__(self, title):
BasePlotter.__init__(self, title=title)
def plot(self, region=None, ax=None, **kwargs):
if isinstance(region, string_types):
region = GenomicRegion.from_string(region)
if ax:
self.ax = ax
# set genome tick formatter
self.ax.xaxis.set_major_formatter(GenomeCoordFormatter(region))
self.ax.xaxis.set_major_locator(GenomeCoordLocator(nbins=5))
self.ax.xaxis.set_minor_locator(MinorGenomeCoordLocator(n=5))
self.ax.set_title(self.title)
self._plot(region, **kwargs)
self.ax.set_xlim(region.start, region.end)
return self.fig, self.ax
def prepare_normalization(norm="lin", vmin=None, vmax=None):
if isinstance(norm, mpl.colors.Normalize):
norm.vmin = vmin
norm.vmax = vmax
return norm
if norm == "log":
return mpl.colors.LogNorm(vmin=vmin, vmax=vmax)
elif norm == "lin":
return mpl.colors.Normalize(vmin=vmin, vmax=vmax)
else:
raise ValueError("'{}'' not a valid normalization method.".format(norm))
class BasePlotterHic(object):
__metaclass__ = ABCMeta
def __init__(self, hic_matrix, regions=None, colormap='RdBu', norm="log",
vmin=None, vmax=None, show_colorbar=True, blend_masked=False):
if regions is None:
for i in range(hic_matrix.shape[0]):
regions.append(GenomicRegion(chromosome='', start=i, end=i))
self.regions = regions
self.hic_matrix = hic_matrix
self.colormap = copy.copy(mpl.cm.get_cmap(colormap))
if blend_masked:
self.colormap.set_bad(self.colormap(0))
self._vmin = vmin
self._vmax = vmax
self.norm = prepare_normalization(norm=norm, vmin=vmin, vmax=vmax)
self.colorbar = None
self.slider = None
self.show_colorbar = show_colorbar
def add_colorbar(self, ax=None):
ax = self.cax if ax is None else ax
cmap_data = mpl.cm.ScalarMappable(norm=self.norm, cmap=self.colormap)
cmap_data.set_array([self.vmin, self.vmax])
self.colorbar = plt.colorbar(cmap_data, cax=ax, orientation="vertical")
@property
def vmin(self):
return self._vmin if self._vmin else np.nanmin(self.hic_matrix)
@property
def vmax(self):
return self._vmax if self._vmax else np.nanmax(self.hic_matrix)
class HicPlot(BasePlotter1D, BasePlotterHic):
def __init__(self, hic_matrix, regions=None, title='', colormap='viridis', max_dist=None, norm="log",
vmin=None, vmax=None, show_colorbar=True, blend_masked=False):
BasePlotter1D.__init__(self, title=title)
BasePlotterHic.__init__(self, hic_matrix, regions=regions, colormap=colormap, vmin=vmin, vmax=vmax,
show_colorbar=show_colorbar, norm=norm, blend_masked=blend_masked)
self.max_dist = max_dist
self.hicmesh = None
def _plot(self, region=None, cax=None):
if region is None:
raise ValueError("Cannot plot triangle plot for whole genome.")
hm, sr = sub_matrix_regions(self.hic_matrix, self.regions, region)
hm[np.tril_indices(hm.shape[0])] = np.nan
# Remove part of matrix further away than max_dist
if self.max_dist is not None:
for i in range(hm.shape[0]):
i_region = sr[i]
for j in range(hm.shape[1]):
j_region = sr[j]
if j_region.start-i_region.end > self.max_dist:
hm[i, j] = np.nan
hm_masked = np.ma.MaskedArray(hm, mask=np.isnan(hm))
# prepare an array of the corner coordinates of the Hic-matrix
# Distances have to be scaled by sqrt(2), because the diagonals of the bins
# are sqrt(2)*len(bin_size)
sqrt2 = math.sqrt(2)
bin_coords = np.r_[[(x.start - 1) for x in sr], sr[-1].end]/sqrt2
X, Y = np.meshgrid(bin_coords, bin_coords)
# rotatate coordinate matrix 45 degrees
sin45 = math.sin(math.radians(45))
X_, Y_ = X*sin45 + Y*sin45, X*sin45 - Y*sin45
# shift x coords to correct start coordinate and center the diagonal directly on the
# x-axis
X_ -= X_[1, 0] - (sr[0].start - 1)
Y_ -= .5*np.min(Y_) + .5*np.max(Y_)
# create plot
self.hicmesh = self.ax.pcolormesh(X_, Y_, hm_masked, cmap=self.colormap, norm=self.norm)
# set limits and aspect ratio
#self.ax.set_aspect(aspect="equal")
ylim_max = 0.5*(region.end-region.start)
if self.max_dist is not None and self.max_dist/2 < ylim_max:
ylim_max = self.max_dist/2
self.ax.set_ylim(0, ylim_max)
# remove y ticks
self.ax.set_yticks([])
# hide background patch
self.ax.patch.set_visible(False)
if self.show_colorbar:
self.add_colorbar(cax)
def set_clim(self, vmin, vmax):
self.hicmesh.set_clim(vmin=vmin, vmax=vmax)
if self.colorbar is not None:
self.colorbar.vmin = vmin
self.colorbar.vmax = vmax
self.colorbar.draw_all()
class DataArrayPlot(BasePlotter1D):
def __init__(self, data, window_sizes=None, regions=None, title='', midpoint=None,
colormap='coolwarm_r', vmax=None, current_window_size=0, log_y=True):
if regions is None:
regions = []
for i in range(data.shape[1]):
regions.append(GenomicRegion(chromosome='', start=i, end=i))
self.regions = regions
BasePlotter1D.__init__(self, title=title)
self.da = data
if window_sizes is None:
window_sizes = []
try:
l = len(data)
except TypeError:
l = data.shape[0]
for i in range(l):
window_sizes.append(i)
self.window_sizes = window_sizes
self.colormap = colormap
self.midpoint = midpoint
self.mesh = None
self.vmax = vmax
self.window_size_line = None
self.current_window_size = current_window_size
self.log_y = log_y
def _plot(self, region=None, cax=None):
da_sub, regions_sub = sub_data_regions(self.da, self.regions, region)
da_sub_masked = np.ma.MaskedArray(da_sub, mask=np.isnan(da_sub))
bin_coords = np.r_[[(x.start - 1) for x in regions_sub], regions_sub[-1].end]
x, y = np.meshgrid(bin_coords, self.window_sizes)
self.mesh = self.ax.pcolormesh(x, y, da_sub_masked, cmap=self.colormap, vmax=self.vmax)
self.colorbar = plt.colorbar(self.mesh, cax=cax, orientation="vertical")
self.window_size_line = self.ax.axhline(self.current_window_size, color='red')
if self.log_y:
self.ax.set_yscale("log")
self.ax.set_ylim((np.nanmin(self.window_sizes), np.nanmax(self.window_sizes)))
def set_clim(self, vmin, vmax):
self.mesh.set_clim(vmin=vmin, vmax=vmax)
if self.colorbar is not None:
self.colorbar.vmin = vmin
self.colorbar.vmax = vmax
self.colorbar.draw_all()
def update(self, window_size):
self.window_size_line.set_ydata(window_size)
class TADPlot(BasePlotter1D):
def __init__(self, regions, title='', color='black'):
BasePlotter1D.__init__(self, title=title)
self.regions = regions
self.color = color
self.current_region = None
def _plot(self, region=None, cax=None):
self.current_region = region
try:
sr, start_ix, end_ix = sub_regions(self.regions, region)
trans = self.ax.get_xaxis_transform()
for r in sr:
region_patch = patches.Rectangle(
(r.start, .2),
width=abs(r.end - r.start), height=.6,
transform=trans,
facecolor=self.color,
edgecolor='white',
linewidth=2.
)
self.ax.add_patch(region_patch)
except ValueError:
pass
self.ax.axis('off')
def update(self, regions):
self.regions = regions
self.ax.cla()
self.plot(region=self.current_region, ax=self.ax)
class DataLinePlot(BasePlotter1D):
def __init__(self, data, regions=None, title='', init_row=0, is_symmetric=False):
BasePlotter1D.__init__(self, title=title)
if regions is None:
regions = []
for i in range(len(data)):
regions.append(GenomicRegion(chromosome='', start=i, end=i))
self.init_row = init_row
self.data = data
self.sr = None
self.da_sub = None
self.regions = regions
self.current_region = None
self.line = None
self.current_ix = init_row
self.current_cutoff = None
self.cutoff_line = None
self.cutoff_line_mirror = None
self.is_symmetric = is_symmetric
def _new_region(self, region):
self.current_region = region
self.da_sub, self.sr = sub_data_regions(self.data, self.regions, region)
def _plot(self, region=None, cax=None):
self._new_region(region)
bin_coords = [(x.start - 1) for x in self.sr]
ds = self.da_sub[self.init_row]
self.line, = self.ax.plot(bin_coords, ds)
if not self.is_symmetric:
self.current_cutoff = (self.ax.get_ylim()[1] - self.ax.get_ylim()[0]) / 2 + self.ax.get_ylim()[0]
else:
self.current_cutoff = self.ax.get_ylim()[1]/ 2
self.ax.axhline(0.0, linestyle='dashed', color='grey')
self.cutoff_line = self.ax.axhline(self.current_cutoff, color='r')
if self.is_symmetric:
self.cutoff_line_mirror = self.ax.axhline(-1*self.current_cutoff, color='r')
self.ax.set_ylim((np.nanmin(ds), np.nanmax(ds)))
def update(self, ix=None, cutoff=None, region=None, update_canvas=True):
if region is not None:
self._new_region(region)
if ix is not None and ix != self.current_ix:
ds = self.da_sub[ix]
self.current_ix = ix
self.line.set_ydata(ds)
self.ax.set_ylim((np.nanmin(ds), np.nanmax(ds)))
if cutoff is None:
if not self.is_symmetric:
self.update(cutoff=(self.ax.get_ylim()[1]-self.ax.get_ylim()[0])/2 + self.ax.get_ylim()[0],
update_canvas=False)
else:
self.update(cutoff=self.ax.get_ylim()[1] / 2, update_canvas=False)
if update_canvas:
self.fig.canvas.draw()
if cutoff is not None and cutoff != self.current_cutoff:
if self.is_symmetric:
self.current_cutoff = abs(cutoff)
else:
self.current_cutoff = cutoff
self.cutoff_line.set_ydata(self.current_cutoff)
if self.is_symmetric:
self.cutoff_line_mirror.set_ydata(-1*self.current_cutoff)
if update_canvas:
self.fig.canvas.draw()
class TADtoolPlot(object):
def __init__(self, hic_matrix, regions=None, data=None, window_sizes=None, norm='lin', max_dist=3000000,
max_percentile=99.99, algorithm='insulation', matrix_colormap=None,
data_colormap=None, log_data=True):
self.hic_matrix = hic_matrix
if regions is None:
regions = []
for i in range(hic_matrix.shape[0]):
regions.append(GenomicRegion(chromosome='', start=i, end=i))
self.regions = regions
self.norm = norm
self.fig = None
self.max_dist = max_dist
self.algorithm = algorithm
self.svmax = None
self.min_value = np.nanmin(self.hic_matrix[np.nonzero(self.hic_matrix)])
self.min_value_data = None
self.hic_plot = None
self.tad_plot = None
self.data_plot = None
self.line_plot = None
self.sdata = None
self.data_ax = None
self.line_ax = None
self.da = None
self.ws = None
self.current_window_size = None
self.window_size_text = None
self.tad_cutoff_text = None
self.max_percentile = max_percentile
self.tad_regions = None
self.current_da_ix = None
self.button_save_tads = None
self.button_save_vector = None
self.button_save_matrix = None
self.log_data = log_data
if algorithm == 'insulation':
self.tad_algorithm = insulation_index
self.tad_calling_algorithm = call_tads_insulation_index
self.is_symmetric = False
if matrix_colormap is None:
self.matrix_colormap = LinearSegmentedColormap.from_list('myreds', ['white', 'red'])
if data_colormap is None:
self.data_plot_color = 'plasma'
elif algorithm == 'ninsulation':
self.tad_algorithm = normalised_insulation_index
self.tad_calling_algorithm = call_tads_insulation_index
self.is_symmetric = True
if matrix_colormap is None:
self.matrix_colormap = LinearSegmentedColormap.from_list('myreds', ['white', 'red'])
if data_colormap is None:
self.data_plot_color = LinearSegmentedColormap.from_list('myreds', ['blue', 'white', 'red'])
elif algorithm == 'directionality':
self.tad_algorithm = directionality_index
self.tad_calling_algorithm = call_tads_directionality_index
self.is_symmetric = True
if matrix_colormap is None:
self.matrix_colormap = LinearSegmentedColormap.from_list('myreds', ['white', 'red'])
if data_colormap is None:
self.data_plot_color = LinearSegmentedColormap.from_list('myreds', ['blue', 'white', 'red'])
if data is None:
self.da, self.ws = data_array(hic_matrix=self.hic_matrix, regions=self.regions,
tad_method=self.tad_algorithm, window_sizes=window_sizes)
else:
self.da = data
if window_sizes is None:
raise ValueError("window_sizes parameter cannot be None when providing data!")
self.ws = window_sizes
def vmax_slider_update(self, val):
self.hic_plot.set_clim(self.min_value, val)
def data_slider_update(self, val):
if self.is_symmetric:
self.data_plot.set_clim(-1*val, val)
else:
self.data_plot.set_clim(self.min_value_data, val)
def on_click_save_tads(self, event):
tk.Tk().withdraw() # Close the root window
save_path = filedialog.asksaveasfilename()
if save_path is not None:
with open(save_path, 'w') as o:
for region in self.tad_regions:
o.write("%s\t%d\t%d\n" % (region.chromosome, region.start-1, region.end))
def on_click_save_vector(self, event):
tk.Tk().withdraw() # Close the root window
save_path = filedialog.asksaveasfilename()
if save_path is not None:
da_sub = self.da[self.current_da_ix]
with open(save_path, 'w') as o:
for i, region in enumerate(self.regions):
o.write("%s\t%d\t%d\t.\t%e\n" % (region.chromosome, region.start-1, region.end, da_sub[i]))
def on_click_save_matrix(self, event):
tk.Tk().withdraw() # Close the root window
save_path = filedialog.asksaveasfilename()
if save_path is not None:
with open(save_path, 'w') as o:
# write regions
for i, region in enumerate(self.regions):
o.write("%s:%d-%d" % (region.chromosome, region.start-1, region.end))
if i < len(self.regions)-1:
o.write("\t")
else:
o.write("\n")
# write matrix
n_rows = self.da.shape[0]
n_cols = self.da.shape[1]
for i in range(n_rows):
window_size = self.ws[i]
o.write("%d\t" % window_size)
for j in range(n_cols):
o.write("%e" % self.da[i, j])
if j < n_cols-1:
o.write("\t")
else:
o.write("\n")
def plot(self, region=None):
# set up plotting grid
self.fig = plt.figure(figsize=(10, 10))
# main plots
grid_size = (32, 15)
hic_vmax_slider_ax = plt.subplot2grid(grid_size, (0, 0), colspan=13)
hic_ax = plt.subplot2grid(grid_size, (1, 0), rowspan=9, colspan=13)
hp_cax = plt.subplot2grid(grid_size, (1, 14), rowspan=9, colspan=1)
tad_ax = plt.subplot2grid(grid_size, (10, 0), rowspan=1, colspan=13, sharex=hic_ax)
line_ax = plt.subplot2grid(grid_size, (12, 0), rowspan=6, colspan=13, sharex=hic_ax)
line_cax = plt.subplot2grid(grid_size, (12, 13), rowspan=6, colspan=2)
data_vmax_slider_ax = plt.subplot2grid(grid_size, (19, 0), colspan=13)
data_ax = plt.subplot2grid(grid_size, (20, 0), rowspan=9, colspan=13, sharex=hic_ax)
da_cax = plt.subplot2grid(grid_size, (20, 14), rowspan=9, colspan=1)
# buttons
save_tads_ax = plt.subplot2grid(grid_size, (31, 0), rowspan=1, colspan=4)
self.button_save_tads = Button(save_tads_ax, 'Save TADs')
self.button_save_tads.on_clicked(self.on_click_save_tads)
save_vector_ax = plt.subplot2grid(grid_size, (31, 5), rowspan=1, colspan=4)
self.button_save_vector = Button(save_vector_ax, 'Save current values')
self.button_save_vector.on_clicked(self.on_click_save_vector)
save_matrix_ax = plt.subplot2grid(grid_size, (31, 10), rowspan=1, colspan=4)
self.button_save_matrix = Button(save_matrix_ax, 'Save matrix')
self.button_save_matrix.on_clicked(self.on_click_save_matrix)
# add subplot content
max_value = np.nanpercentile(self.hic_matrix, self.max_percentile)
init_value = .2*max_value
# HI-C VMAX SLIDER
self.svmax = Slider(hic_vmax_slider_ax, 'vmax', self.min_value, max_value, valinit=init_value, color='grey')
self.svmax.on_changed(self.vmax_slider_update)
# HI-C
self.hic_plot = HicPlot(self.hic_matrix, self.regions, max_dist=self.max_dist, norm=self.norm,
vmax=init_value, vmin=self.min_value, colormap=self.matrix_colormap)
self.hic_plot.plot(region, ax=hic_ax, cax=hp_cax)
# generate data array
self.min_value_data = np.nanmin(self.da[np.nonzero(self.da)])
max_value_data = np.nanpercentile(self.da, self.max_percentile)
init_value_data = .5*max_value_data
# LINE PLOT
da_ix = int(self.da.shape[0]/2)
self.current_da_ix = da_ix
self.line_plot = DataLinePlot(self.da, regions=self.regions, init_row=da_ix, is_symmetric=self.is_symmetric)
self.line_plot.plot(region, ax=line_ax)
self.line_ax = line_ax
# line info
self.current_window_size = self.ws[da_ix]
line_cax.text(.1, .8, 'Window size', fontweight='bold')
self.window_size_text = line_cax.text(.3, .6, str(self.current_window_size))
line_cax.text(.1, .4, 'TAD cutoff', fontweight='bold')
self.tad_cutoff_text = line_cax.text(.3, .2, "%.5f" % self.line_plot.current_cutoff)
line_cax.axis('off')
# TAD PLOT
self.tad_regions = self.tad_calling_algorithm(self.da[da_ix], self.line_plot.current_cutoff, self.regions)
self.tad_plot = TADPlot(self.tad_regions)
self.tad_plot.plot(region=region, ax=tad_ax)
# DATA ARRAY
self.data_plot = DataArrayPlot(self.da, self.ws, self.regions, vmax=init_value_data,
colormap=self.data_plot_color, current_window_size=self.ws[da_ix],
log_y=self.log_data)
self.data_plot.plot(region, ax=data_ax, cax=da_cax)
# DATA ARRAY SLIDER
if self.is_symmetric:
self.sdata = Slider(data_vmax_slider_ax, 'vmax', 0.0, max_value_data,
valinit=init_value_data, color='grey')
else:
self.sdata = Slider(data_vmax_slider_ax, 'vmax', self.min_value_data, max_value_data,
valinit=init_value_data, color='grey')
self.sdata.on_changed(self.data_slider_update)
self.data_slider_update(init_value_data)
# clean up
hic_ax.xaxis.set_visible(False)
line_ax.xaxis.set_visible(False)
# enable hover
self.data_ax = data_ax
cid = self.fig.canvas.mpl_connect('button_press_event', self.on_click)
return self.fig, (hic_vmax_slider_ax, hic_ax, line_ax, data_ax, hp_cax, da_cax)
def on_click(self, event):
if event.inaxes == self.data_ax or event.inaxes == self.line_ax:
if event.inaxes == self.data_ax:
ws_ix = bisect_left(self.ws, event.ydata)
self.current_window_size = self.ws[ws_ix]
self.current_da_ix = ws_ix
self.data_plot.update(window_size=self.ws[ws_ix])
self.line_plot.update(ix=ws_ix, update_canvas=False)
self.tad_cutoff_text.set_text("%.5f" % self.line_plot.current_cutoff)
self.window_size_text.set_text(str(self.current_window_size))
elif event.inaxes == self.line_ax:
if self.is_symmetric:
self.line_plot.update(cutoff=abs(event.ydata), update_canvas=False)
else:
self.line_plot.update(cutoff=abs(event.ydata), update_canvas=False)
self.tad_cutoff_text.set_text("%.5f" % self.line_plot.current_cutoff)
# update TADs
self.tad_regions = self.tad_calling_algorithm(self.da[self.current_da_ix], self.line_plot.current_cutoff,
self.regions)
self.tad_plot.update(self.tad_regions)
self.fig.canvas.draw()
|
vaquerizaslab/tadtool
|
tadtool/plot.py
|
Python
|
mit
| 28,848 | 0.002634 |
from django.db import models
from django.contrib.auth.models import User
class OrganisationType(models.Model):
type_desc = models.CharField(max_length=200)
def __unicode__(self):
return self.type_desc
class Address(models.Model):
street_address = models.CharField(max_length=100)
city = models.CharField(max_length=100)
pin = models.CharField(max_length=10)
province = models.CharField(max_length=100)
nationality = models.CharField(max_length=100)
def __unicode__(self):
return self.street_address + ',' + self.city
class HattiUser(models.Model):
user = models.OneToOneField(User)
address = models.ForeignKey(Address)
telephone = models.CharField(max_length=500)
date_joined = models.DateTimeField(auto_now_add=True)
fax = models.CharField(max_length=100)
avatar = models.CharField(max_length=100, null=True, blank=True)
tagline = models.CharField(max_length=140)
class Meta:
abstract = True
class AdminOrganisations(HattiUser):
title = models.CharField(max_length=200)
organisation_type = models.ForeignKey(OrganisationType)
def __unicode__(self):
return self.title
class Customer(HattiUser):
title = models.CharField(max_length=200, blank=True, null=True)
is_org = models.BooleanField();
org_type = models.ForeignKey(OrganisationType)
company = models.CharField(max_length = 200)
def __unicode__(self, arg):
return unicode(self.user)
|
saloni10/librehatti_new
|
src/authentication/models.py
|
Python
|
gpl-2.0
| 1,479 | 0.00879 |
#! /usr/bin/env python
#
# example2_gtk.py -- Simple, configurable FITS viewer.
#
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
from __future__ import print_function
import sys, os
import logging, logging.handlers
from ginga import AstroImage
from ginga.gtkw import GtkHelp
from ginga.gtkw.ImageViewGtk import CanvasView
from ginga.canvas.CanvasObject import get_canvas_types
from ginga import colors
from ginga.misc import log
import gtk
STD_FORMAT = '%(asctime)s | %(levelname)1.1s | %(filename)s:%(lineno)d (%(funcName)s) | %(message)s'
class FitsViewer(object):
def __init__(self, logger):
self.logger = logger
self.drawcolors = colors.get_colors()
self.dc = get_canvas_types()
root = gtk.Window(gtk.WINDOW_TOPLEVEL)
root.set_title("Gtk2 CanvasView Example")
root.set_border_width(2)
root.connect("delete_event", lambda w, e: quit(w))
self.root = root
self.select = GtkHelp.FileSelection(root)
vbox = gtk.VBox(spacing=2)
fi = CanvasView(logger)
fi.enable_autocuts('on')
fi.set_autocut_params('zscale')
fi.enable_autozoom('on')
fi.set_zoom_algorithm('rate')
fi.set_zoomrate(1.4)
fi.show_pan_mark(True)
fi.set_callback('drag-drop', self.drop_file)
fi.set_callback('none-move', self.motion)
fi.set_bg(0.2, 0.2, 0.2)
fi.ui_setActive(True)
self.fitsimage = fi
bd = fi.get_bindings()
bd.enable_all(True)
# canvas that we will draw on
canvas = self.dc.DrawingCanvas()
canvas.enable_draw(True)
canvas.set_drawtype('rectangle', color='lightblue')
canvas.setSurface(fi)
self.canvas = canvas
# add canvas to view
private_canvas = fi.get_canvas()
private_canvas.register_for_cursor_drawing(fi)
private_canvas.add(canvas)
canvas.ui_setActive(True)
self.drawtypes = canvas.get_drawtypes()
self.drawtypes.sort()
# add a color bar
#fi.show_color_bar(True)
fi.show_focus_indicator(True)
# add little mode indicator that shows keyboard modal states
fi.show_mode_indicator(True, corner='ur')
w = fi.get_widget()
w.set_size_request(512, 512)
vbox.pack_start(w, fill=True, expand=True)
self.readout = gtk.Label("")
vbox.pack_start(self.readout, fill=True, expand=False)
hbox = gtk.HBox(spacing=5)
wdrawtype = GtkHelp.combo_box_new_text()
index = 0
for name in self.drawtypes:
wdrawtype.insert_text(index, name)
index += 1
index = self.drawtypes.index('rectangle')
wdrawtype.set_active(index)
wdrawtype.connect('changed', self.set_drawparams)
self.wdrawtype = wdrawtype
wdrawcolor = GtkHelp.combo_box_new_text()
index = 0
for name in self.drawcolors:
wdrawcolor.insert_text(index, name)
index += 1
index = self.drawcolors.index('lightblue')
wdrawcolor.set_active(index)
wdrawcolor.connect('changed', self.set_drawparams)
self.wdrawcolor = wdrawcolor
wfill = GtkHelp.CheckButton("Fill")
wfill.sconnect('toggled', self.set_drawparams)
self.wfill = wfill
walpha = GtkHelp.SpinButton()
adj = walpha.get_adjustment()
adj.configure(0.0, 0.0, 1.0, 0.1, 0.1, 0)
walpha.set_value(1.0)
walpha.set_digits(1)
walpha.sconnect('value-changed', self.set_drawparams)
self.walpha = walpha
wclear = gtk.Button("Clear Canvas")
wclear.connect('clicked', self.clear_canvas)
wopen = gtk.Button("Open File")
wopen.connect('clicked', self.open_file)
wquit = gtk.Button("Quit")
wquit.connect('clicked', quit)
for w in (wquit, wclear, walpha, gtk.Label("Alpha:"),
wfill, wdrawcolor, wdrawtype, wopen):
hbox.pack_end(w, fill=False, expand=False)
vbox.pack_start(hbox, fill=False, expand=False)
root.add(vbox)
def get_widget(self):
return self.root
def set_drawparams(self, w):
index = self.wdrawtype.get_active()
kind = self.drawtypes[index]
index = self.wdrawcolor.get_active()
fill = self.wfill.get_active()
alpha = self.walpha.get_value()
params = { 'color': self.drawcolors[index],
'alpha': alpha,
#'cap': 'ball',
}
if kind in ('circle', 'rectangle', 'polygon', 'triangle',
'righttriangle', 'ellipse', 'square', 'box'):
params['fill'] = fill
params['fillalpha'] = alpha
self.canvas.set_drawtype(kind, **params)
def clear_canvas(self, w):
self.canvas.delete_all_objects()
def load_file(self, filepath):
image = AstroImage.AstroImage(logger=self.logger)
image.load_file(filepath)
self.fitsimage.set_image(image)
self.root.set_title(filepath)
def open_file(self, w):
self.select.popup("Open FITS file", self.load_file)
def drop_file(self, fitsimage, paths):
fileName = paths[0]
self.load_file(fileName)
def motion(self, fitsimage, button, data_x, data_y):
# Get the value under the data coordinates
try:
#value = fitsimage.get_data(data_x, data_y)
# We report the value across the pixel, even though the coords
# change halfway across the pixel
value = fitsimage.get_data(int(data_x+0.5), int(data_y+0.5))
except Exception:
value = None
fits_x, fits_y = data_x + 1, data_y + 1
# Calculate WCS RA
try:
# NOTE: image function operates on DATA space coords
image = fitsimage.get_image()
if image is None:
# No image loaded
return
ra_txt, dec_txt = image.pixtoradec(fits_x, fits_y,
format='str', coords='fits')
except Exception as e:
self.logger.warning("Bad coordinate conversion: %s" % (
str(e)))
ra_txt = 'BAD WCS'
dec_txt = 'BAD WCS'
text = "RA: %s DEC: %s X: %.2f Y: %.2f Value: %s" % (
ra_txt, dec_txt, fits_x, fits_y, value)
self.readout.set_text(text)
def quit(self, w):
gtk.main_quit()
return True
def main(options, args):
logger = log.get_logger("example2", options=options)
# Check whether user wants to use OpenCv
if options.opencv:
from ginga import trcalc
try:
trcalc.use('opencv')
except Exception as e:
logger.warning("failed to set OpenCv preference: %s" % (str(e)))
# Check whether user wants to use OpenCL
elif options.opencl:
from ginga import trcalc
try:
trcalc.use('opencl')
except Exception as e:
logger.warning("failed to set OpenCL preference: %s" % (str(e)))
fv = FitsViewer(logger)
root = fv.get_widget()
root.show_all()
if len(args) > 0:
fv.load_file(args[0])
gtk.main()
if __name__ == "__main__":
# Parse command line options with nifty optparse module
from optparse import OptionParser
usage = "usage: %prog [options] cmd [args]"
optprs = OptionParser(usage=usage, version=('%%prog'))
optprs.add_option("--debug", dest="debug", default=False, action="store_true",
help="Enter the pdb debugger on main()")
optprs.add_option("--opencv", dest="opencv", default=False,
action="store_true",
help="Use OpenCv acceleration")
optprs.add_option("--opencl", dest="opencl", default=False,
action="store_true",
help="Use OpenCL acceleration")
optprs.add_option("--profile", dest="profile", action="store_true",
default=False,
help="Run the profiler on main()")
log.addlogopts(optprs)
(options, args) = optprs.parse_args(sys.argv[1:])
# Are we debugging this?
if options.debug:
import pdb
pdb.run('main(options, args)')
# Are we profiling this?
elif options.profile:
import profile
print(("%s profile:" % sys.argv[0]))
profile.run('main(options, args)')
else:
main(options, args)
# END
|
stscieisenhamer/ginga
|
ginga/examples/gtk/example2_gtk.py
|
Python
|
bsd-3-clause
| 8,631 | 0.00139 |
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import os
from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except:
pass
fixture_data[path] = data
return data
class TestMlnxosModule(ModuleTestCase):
def execute_module(self, failed=False, changed=False, commands=None, is_updates=False, sort=True, transport='cli'):
self.load_fixtures(commands, transport=transport)
if failed:
result = self.failed()
self.assertTrue(result['failed'], result)
else:
result = self.changed(changed)
self.assertEqual(result['changed'], changed, result)
if commands is not None:
if is_updates:
commands_res = result.get('updates')
else:
commands_res = result.get('commands')
if sort:
self.assertEqual(sorted(commands), sorted(commands_res), commands_res)
else:
self.assertEqual(commands, commands_res, commands_res)
return result
def failed(self):
with self.assertRaises(AnsibleFailJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'], result)
return result
def changed(self, changed=False):
with self.assertRaises(AnsibleExitJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertEqual(result['changed'], changed, result)
return result
def load_fixtures(self, commands=None, transport='cli'):
pass
|
le9i0nx/ansible
|
test/units/modules/network/mlnxos/mlnxos_module.py
|
Python
|
gpl-3.0
| 2,693 | 0.001485 |
"""Various useful tools."""
import copy
import datetime
import logging
# FIXME: temporary backward compatibility
from eaf.core import Vec3 as Point
LOG_FORMAT = (
"[%(asctime)s] %(levelname)-8s %(name)s[%(funcName)s]:%(lineno)s: "
"%(message)s"
)
"""Log message format string."""
TIME_FORMAT = "%H:%M:%S,%03d"
"""Log time format string."""
DATE_FORMAT = "%Y-%m-%d %a"
"""Initial log entry date format string."""
def setup_logger(name, debug=False, msgfmt=None, timefmt=None):
"""Setup logger with linked log file.
Do not use it for getting logger, call this once on init,
then use logging.getLogger(__name__) for getting actual logger.
:param str name: logger relative name
:param bool debug: debug mode
:param str msgfmt: message format
:param str timefmt: time format
:return: prepared logger instance
:rtype: `logging.Logger`
"""
logger = logging.getLogger(name)
logger.propagate = False
level = logging.DEBUG if debug else logging.INFO
logger.setLevel(level)
handler = logging.FileHandler("{0}.log".format(name))
handler.setLevel(level)
formatter = logging.Formatter(msgfmt or LOG_FORMAT, timefmt or TIME_FORMAT)
handler.setFormatter(formatter)
logger.addHandler(handler)
date = datetime.date.today().strftime(DATE_FORMAT)
logger.info("*** (%s) Initializing XOInvader ***", date)
return logger
def clamp(val, min_val, max_val):
"""Clamp value between boundaries."""
if max_val < min_val:
raise ValueError("max_val must be >= min_val")
return min(max(val, min_val), max_val)
class dotdict(dict): # pylint: disable=invalid-name
"""Container for dot elements access."""
def __init__(self, *args, **kwargs):
super(dotdict, self).__init__(*args, **kwargs)
self.__dict__ = self
self._wrap_nested()
def _wrap_nested(self):
"""Wrap nested dicts for deep dot access."""
for key, value in self.items():
if isinstance(value, dict):
self[key] = dotdict(value)
def fullcopy(self):
"""Return full copy of internal structure as dotdict.
:return :class:`xoinvader.utils.dotdict`: full copy
"""
return dotdict(copy.deepcopy(self))
class InfiniteList(list):
"""Infinite list container."""
def __init__(self, *args, **kwargs):
super(InfiniteList, self).__init__(*args, **kwargs)
self._index = 0
def select(self, index: int) -> object:
"""Set index and return selected element."""
if not len(self):
raise IndexError("List is empty")
if not (0 <= index < len(self)):
raise IndexError("Index out of bounds.")
self._index = index
return self[self._index]
def current(self) -> object:
"""Return current element."""
return self[self._index]
def next(self) -> object:
"""Select next element and return it."""
try:
self._index = (self._index + 1) % len(self)
except ZeroDivisionError:
raise IndexError("List is empty.")
return self[self._index]
def prev(self) -> object:
"""Select previous element and return it."""
try:
self._index = (self._index - 1) % len(self)
except ZeroDivisionError:
raise IndexError("List is empty.")
return self[self._index]
|
pankshok/xoinvader
|
xoinvader/utils.py
|
Python
|
mit
| 3,433 | 0 |
# -*- coding: utf-8 -*-
import json
from flask import jsonify
from flask import render_template, request, url_for, redirect
import time, random
#------------------------------------------------------------------------------
def get_desktop_items_data():
"""
Returns items for Desktop in JSON array:
title
"""
items = [
{'title': 'OS/2 System', 'icon': '/appmedia/imgs/system_folder.png', 'left': '0px', 'top': '40px', 'action': '/system_folder/'},
{'title': 'Information', 'icon': '/appmedia/imgs/help.png', 'left': '0px', 'top': '120px', 'action': '/appmedia/help/desktop.html'},
{'title': 'Virtual PC', 'icon': '/appmedia/imgs/system/minimized.png', 'left': '0px', 'top': '200px', 'action': '/'},
{'title': 'WebExplorer', 'icon': '/appmedia/imgs/web/explore.gif', 'left': '0px', 'top': '280px', 'action': '/webexplorer/'},
{'title': 'WIN-OS/2 Window', 'icon': '/appmedia/imgs/cmd/win_wnd.png', 'left': '0px', 'top': '360px', 'action': '/cmd/?cmd=win_wnd', 'app': 'yes'},
{'title': 'Solitaire', 'icon': '/appmedia/imgs/files/sol.jpg', 'left': '0px', 'top': '440px', 'action': 'http://www.webolog.com/online_games/solitaire/loaderwm.swf', 'app': 'yes'},
]
#return jsonify(items=items)
return json.dumps(items)
#------------------------------------------------------------------------------
def get_lanchpad_data():
return render_template("lanchpad.html")
#------------------------------------------------------------------------------
def get_window_data():
"Returns rendered window with iframe inside"
title = request.args.get("title", "")
src = request.args.get("src", "")
width = request.args.get("width", "634")
height = request.args.get("height", "450")
win_id = int(time.time())
template = "pm/base_window.html"
if src.find("win_") != -1:
template = "pm/win_window.html"
#title = "Program Manager"
content = {
"title": title,
"src": src,
"win_id": win_id,
"wnd_left": random.randint(120, 300),
"wnd_top": random.randint(20, 100),
"width": width,
"height": height,
}
return render_template(template, **content)
#------------------------------------------------------------------------------
def get_dialog_data():
"Returns rendered dialog"
dlg = request.args.get("dlg", "")
title = request.args.get("title", "")
win_id = int(time.time())
template = "dialogs/%s.html" % dlg
content = {
"title": title,
"dlg": dlg,
"win_id": win_id,
"wnd_left": 400,
"wnd_top": 300,
"width": 290,
"height": 150,
}
return render_template(template, **content)
#------------------------------------------------------------------------------
|
vsergeyev/os2online
|
desktop/desktop_items.py
|
Python
|
mit
| 3,022 | 0.009927 |
#!/usr/bin/env python3
from http.server import HTTPServer, CGIHTTPRequestHandler
port = 8000
httpd = HTTPServer(('', port), CGIHTTPRequestHandler)
print("Starting simple_httpd on port: " + str(httpd.server_port))
httpd.serve_forever()
|
fthuin/artificial-intelligence
|
assignment3/Code/zipremise/SimpleHTTPServer.py
|
Python
|
mit
| 238 | 0 |
"""
Unittest for time.strftime
"""
import calendar
import sys
import os
import re
from test import test_support
import time
import unittest
# helper functions
def fixasctime(s):
if s[8] == ' ':
s = s[:8] + '0' + s[9:]
return s
def escapestr(text, ampm):
"""
Escape text to deal with possible locale values that have regex
syntax while allowing regex syntax used for comparison.
"""
new_text = re.escape(text)
new_text = new_text.replace(re.escape(ampm), ampm)
new_text = new_text.replace('\%', '%')
new_text = new_text.replace('\:', ':')
new_text = new_text.replace('\?', '?')
return new_text
class StrftimeTest(unittest.TestCase):
def __init__(self, *k, **kw):
unittest.TestCase.__init__(self, *k, **kw)
def _update_variables(self, now):
# we must update the local variables on every cycle
self.gmt = time.gmtime(now)
now = time.localtime(now)
if now[3] < 12: self.ampm='(AM|am)'
else: self.ampm='(PM|pm)'
self.jan1 = time.localtime(time.mktime((now[0], 1, 1, 0, 0, 0, 0, 1, 0)))
try:
if now[8]: self.tz = time.tzname[1]
else: self.tz = time.tzname[0]
except AttributeError:
self.tz = ''
if now[3] > 12: self.clock12 = now[3] - 12
elif now[3] > 0: self.clock12 = now[3]
else: self.clock12 = 12
self.now = now
def setUp(self):
try:
import java
java.util.Locale.setDefault(java.util.Locale.US)
except ImportError:
import locale
locale.setlocale(locale.LC_TIME, 'C')
def test_strftime(self):
now = time.time()
self._update_variables(now)
self.strftest1(now)
self.strftest2(now)
if test_support.verbose:
print "Strftime test, platform: %s, Python version: %s" % \
(sys.platform, sys.version.split()[0])
for j in range(-5, 5):
for i in range(25):
arg = now + (i+j*100)*23*3603
self._update_variables(arg)
self.strftest1(arg)
self.strftest2(arg)
def strftest1(self, now):
if test_support.verbose:
print "strftime test for", time.ctime(now)
now = self.now
# Make sure any characters that could be taken as regex syntax is
# escaped in escapestr()
expectations = (
('%a', calendar.day_abbr[now[6]], 'abbreviated weekday name'),
('%A', calendar.day_name[now[6]], 'full weekday name'),
('%b', calendar.month_abbr[now[1]], 'abbreviated month name'),
('%B', calendar.month_name[now[1]], 'full month name'),
# %c see below
('%d', '%02d' % now[2], 'day of month as number (00-31)'),
('%H', '%02d' % now[3], 'hour (00-23)'),
('%I', '%02d' % self.clock12, 'hour (01-12)'),
('%j', '%03d' % now[7], 'julian day (001-366)'),
('%m', '%02d' % now[1], 'month as number (01-12)'),
('%M', '%02d' % now[4], 'minute, (00-59)'),
('%p', self.ampm, 'AM or PM as appropriate'),
('%S', '%02d' % now[5], 'seconds of current time (00-60)'),
('%U', '%02d' % ((now[7] + self.jan1[6])//7),
'week number of the year (Sun 1st)'),
('%w', '0?%d' % ((1+now[6]) % 7), 'weekday as a number (Sun 1st)'),
('%W', '%02d' % ((now[7] + (self.jan1[6] - 1)%7)//7),
'week number of the year (Mon 1st)'),
# %x see below
('%X', '%02d:%02d:%02d' % (now[3], now[4], now[5]), '%H:%M:%S'),
('%y', '%02d' % (now[0]%100), 'year without century'),
('%Y', '%d' % now[0], 'year with century'),
# %Z see below
('%%', '%', 'single percent sign'),
)
for e in expectations:
# musn't raise a value error
try:
result = time.strftime(e[0], now)
except ValueError, error:
self.fail("strftime '%s' format gave error: %s" % (e[0], error))
if re.match(escapestr(e[1], self.ampm), result):
continue
if not result or result[0] == '%':
self.fail("strftime does not support standard '%s' format (%s)"
% (e[0], e[2]))
else:
self.fail("Conflict for %s (%s): expected %s, but got %s"
% (e[0], e[2], e[1], result))
def strftest2(self, now):
nowsecs = str(long(now))[:-1]
now = self.now
nonstandard_expectations = (
# These are standard but don't have predictable output
('%c', fixasctime(time.asctime(now)), 'near-asctime() format'),
('%x', '%02d/%02d/%02d' % (now[1], now[2], (now[0]%100)),
'%m/%d/%y %H:%M:%S'),
('%Z', '%s' % self.tz, 'time zone name'),
# These are some platform specific extensions
('%D', '%02d/%02d/%02d' % (now[1], now[2], (now[0]%100)), 'mm/dd/yy'),
('%e', '%2d' % now[2], 'day of month as number, blank padded ( 0-31)'),
('%h', calendar.month_abbr[now[1]], 'abbreviated month name'),
('%k', '%2d' % now[3], 'hour, blank padded ( 0-23)'),
('%n', '\n', 'newline character'),
('%r', '%02d:%02d:%02d %s' % (self.clock12, now[4], now[5], self.ampm),
'%I:%M:%S %p'),
('%R', '%02d:%02d' % (now[3], now[4]), '%H:%M'),
('%s', nowsecs, 'seconds since the Epoch in UCT'),
('%t', '\t', 'tab character'),
('%T', '%02d:%02d:%02d' % (now[3], now[4], now[5]), '%H:%M:%S'),
('%3y', '%03d' % (now[0]%100),
'year without century rendered using fieldwidth'),
)
for e in nonstandard_expectations:
try:
result = time.strftime(e[0], now)
except ValueError, result:
msg = "Error for nonstandard '%s' format (%s): %s" % \
(e[0], e[2], str(result))
if test_support.verbose:
print msg
continue
if re.match(escapestr(e[1], self.ampm), result):
if test_support.verbose:
print "Supports nonstandard '%s' format (%s)" % (e[0], e[2])
elif not result or result[0] == '%':
if test_support.verbose:
print "Does not appear to support '%s' format (%s)" % \
(e[0], e[2])
else:
if test_support.verbose:
print "Conflict for nonstandard '%s' format (%s):" % \
(e[0], e[2])
print " Expected %s, but got %s" % (e[1], result)
def test_main():
test_support.run_unittest(StrftimeTest)
if __name__ == '__main__':
test_main()
|
mancoast/CPythonPyc_test
|
cpython/266_test_strftime.py
|
Python
|
gpl-3.0
| 6,966 | 0.004594 |
# pylint: disable=I0011,C0301
from __future__ import absolute_import, unicode_literals
import os
from setuptools import find_packages, setup
from namespaced_session import __version__
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-namespaced-session',
version=__version__,
packages=find_packages(exclude=['tests']),
include_package_data=True,
test_suite="runtests.main",
license='MIT',
description='Django app which makes it easier to work with dictionaries in sessions',
long_description=README,
url='https://github.com/ckot/django-namespaced-session/',
author='Scott Silliman',
author_email='scott.t.silliman@gmail.com',
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Framework :: Django :: 1.7',
'Framework :: Django :: 1.8'
'Framework :: Django :: 1.9'
'Framework :: Django :: 1.10',
'Intended Audience :: Developers',
'License :: OSI Approved',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7'
],
)
|
ckot/django-namespaced-session
|
setup.py
|
Python
|
mit
| 1,469 | 0.000681 |
test = {
'name': '',
'points': 1,
'suites': [
{
'cases': [
{
'code': r"""
>>> type(imdb_by_year) == tables.Table
True
>>> imdb_by_year.column('Title').take(range(3))
array(['The Kid (1921)', 'The Gold Rush (1925)', 'The General (1926)'],
dtype='<U75')
""",
'hidden': False,
'locked': False
},
],
'scored': True,
'setup': '',
'teardown': '',
'type': 'doctest'
}
]
}
|
jamesfolberth/NGC_STEM_camp_AWS
|
notebooks/data8_notebooks/lab03/tests/q3_2.py
|
Python
|
bsd-3-clause
| 530 | 0.003774 |
"""
distutils commands for riak-python-client
"""
from distutils import log
from distutils.core import Command
from distutils.errors import DistutilsOptionError
from subprocess import Popen, PIPE
from string import Template
import shutil
import re
import os.path
__all__ = ['create_bucket_types', 'setup_security', 'enable_security',
'disable_security', 'preconfigure', 'configure']
# Exception classes used by this module.
class CalledProcessError(Exception):
"""This exception is raised when a process run by check_call() or
check_output() returns a non-zero exit status.
The exit status will be stored in the returncode attribute;
check_output() will also store the output in the output attribute.
"""
def __init__(self, returncode, cmd, output=None):
self.returncode = returncode
self.cmd = cmd
self.output = output
def __str__(self):
return "Command '%s' returned non-zero exit status %d" % (self.cmd,
self
.returncode)
def check_output(*popenargs, **kwargs):
"""Run command with arguments and return its output as a byte string.
If the exit code was non-zero it raises a CalledProcessError. The
CalledProcessError object will have the return code in the returncode
attribute and output in the output attribute.
The arguments are the same as for the Popen constructor. Example:
>>> check_output(["ls", "-l", "/dev/null"])
'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\n'
The stdout argument is not allowed as it is used internally.
To capture standard error in the result, use stderr=STDOUT.
>>> import sys
>>> check_output(["/bin/sh", "-c",
... "ls -l non_existent_file ; exit 0"],
... stderr=sys.stdout)
'ls: non_existent_file: No such file or directory\n'
"""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be '
'overridden.')
process = Popen(stdout=PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd, output=output)
return output
try:
import simplejson as json
except ImportError:
import json
class create_bucket_types(Command):
"""
Creates bucket-types appropriate for testing. By default this will create:
* `pytest-maps` with ``{"datatype":"map"}``
* `pytest-sets` with ``{"datatype":"set"}``
* `pytest-counters` with ``{"datatype":"counter"}``
* `pytest-consistent` with ``{"consistent":true}``
* `pytest-mr`
* `pytest` with ``{"allow_mult":false}``
"""
description = "create bucket-types used in integration tests"
user_options = [
('riak-admin=', None, 'path to the riak-admin script')
]
_props = {
'pytest-maps': {'datatype': 'map'},
'pytest-sets': {'datatype': 'set'},
'pytest-counters': {'datatype': 'counter'},
'pytest-consistent': {'consistent': True},
'pytest-mr': {},
'pytest': {'allow_mult': False}
}
def initialize_options(self):
self.riak_admin = None
def finalize_options(self):
if self.riak_admin is None:
raise DistutilsOptionError("riak-admin option not set")
def run(self):
if self._check_available():
for name in self._props:
self._create_and_activate_type(name, self._props[name])
def check_output(self, *args, **kwargs):
if self.dry_run:
log.info(' '.join(args))
return bytearray()
else:
return check_output(*args, **kwargs)
def _check_available(self):
try:
self.check_btype_command("list")
return True
except CalledProcessError:
log.error("Bucket types are not supported on this Riak node!")
return False
def _create_and_activate_type(self, name, props):
# Check status of bucket-type
exists = False
active = False
try:
status = self.check_btype_command('status', name)
except CalledProcessError as e:
status = e.output
exists = ('not an existing bucket type' not in status.decode('ascii'))
active = ('is active' in status.decode('ascii'))
if exists or active:
log.info("Updating {0} bucket-type with props {1}"
.format(repr(name), repr(props)))
self.check_btype_command("update", name,
json.dumps({'props': props},
separators=(',', ':')))
else:
log.info("Creating {0} bucket-type with props {1}"
.format(repr(name), repr(props)))
self.check_btype_command("create", name,
json.dumps({'props': props},
separators=(',', ':')))
if not active:
log.info('Activating {0} bucket-type'.format(repr(name)))
self.check_btype_command("activate", name)
def check_btype_command(self, *args):
cmd = self._btype_command(*args)
return self.check_output(cmd)
def run_btype_command(self, *args):
self.spawn(self._btype_command(*args))
def _btype_command(self, *args):
cmd = [self.riak_admin, "bucket-type"]
cmd.extend(args)
return cmd
class security_commands(object):
def check_security_command(self, *args):
cmd = self._security_command(*args)
return self.check_output(cmd)
def run_security_command(self, *args):
self.spawn(self._security_command(*args))
def _security_command(self, *args):
cmd = [self.riak_admin, "security"]
if isinstance(args, tuple):
for elem in args:
cmd.extend(elem)
else:
cmd.extend(args)
return cmd
def check_output(self, *args, **kwargs):
if self.dry_run:
log.info(' '.join(args))
return bytearray()
else:
return check_output(*args, **kwargs)
class setup_security(Command, security_commands):
"""
Sets up security for testing. By default this will create:
* User `testuser` with password `testpassword`
* User `certuser` with password `certpass`
* Two security sources
* Permissions on
* riak_kv.get
* riak_kv.put
* riak_kv.delete
* riak_kv.index
* riak_kv.list_keys
* riak_kv.list_buckets
* riak_kv.mapreduce
* riak_core.get_bucket
* riak_core.set_bucket
* riak_core.get_bucket_type
* riak_core.set_bucket_type
* search.admin
* search.query
"""
description = "create security settings used in integration tests"
user_options = [
('riak-admin=', None, 'path to the riak-admin script'),
('username=', None, 'test user account'),
('password=', None, 'password for test user account'),
('certuser=', None, 'certificate test user account'),
('certpass=', None, 'password for certificate test user account')
]
_commands = [
"add-user $USERNAME password=$PASSWORD",
"add-source $USERNAME 127.0.0.1/32 password",
"add-user $CERTUSER password=$CERTPASS",
"add-source $CERTUSER 127.0.0.1/32 certificate"
]
_grants = {
"riak_kv.get": ["any"],
"riak_kv.put": ["any"],
"riak_kv.delete": ["any"],
"riak_kv.index": ["any"],
"riak_kv.list_keys": ["any"],
"riak_kv.list_buckets": ["any"],
"riak_kv.mapreduce": ["any"],
"riak_core.get_bucket": ["any"],
"riak_core.set_bucket": ["any"],
"riak_core.get_bucket_type": ["any"],
"riak_core.set_bucket_type": ["any"],
"search.admin": ["index", "schema"],
"search.query": ["index", "schema"]
}
def initialize_options(self):
self.riak_admin = None
self.username = None
self.password = None
self.certuser = None
self.certpass = None
def finalize_options(self):
if self.riak_admin is None:
raise DistutilsOptionError("riak-admin option not set")
if self.username is None:
self.username = 'testuser'
if self.password is None:
self.password = 'testpassword'
if self.certuser is None:
self.certuser = 'certuser'
if self.certpass is None:
self.certpass = 'certpass'
def run(self):
if self._check_available():
for cmd in self._commands:
# Replace the username and password if specified
s = Template(cmd)
newcmd = s.substitute(USERNAME=self.username,
PASSWORD=self.password,
CERTUSER=self.certuser,
CERTPASS=self.certpass)
log.info("Security command: {0}".format(repr(newcmd)))
self.run_security_command(tuple(newcmd.split(' ')))
for perm in self._grants:
self._apply_grant(perm, self._grants[perm])
def _check_available(self):
try:
self.check_security_command("status")
return True
except CalledProcessError:
log.error("Security is not supported on this Riak node!")
return False
def _apply_grant(self, perm, targets):
for target in targets:
cmd = ["grant", perm, "on", target, "to", self.username]
log.info("Granting permission {0} on {1} to {2}"
.format(repr(perm), repr(target), repr(self.username)))
self.run_security_command(cmd)
cmd = ["grant", perm, "on", target, "to", self.certuser]
log.info("Granting permission {0} on {1} to {2}"
.format(repr(perm), repr(target), repr(self.certuser)))
self.run_security_command(cmd)
class enable_security(Command, security_commands):
"""
Actually turn on security.
"""
description = "turn on security within Riak"
user_options = [
('riak-admin=', None, 'path to the riak-admin script'),
]
def initialize_options(self):
self.riak_admin = None
def finalize_options(self):
if self.riak_admin is None:
raise DistutilsOptionError("riak-admin option not set")
def run(self):
cmd = "enable"
self.run_security_command(tuple(cmd.split(' ')))
class disable_security(Command, security_commands):
"""
Actually turn off security.
"""
description = "turn off security within Riak"
user_options = [
('riak-admin=', None, 'path to the riak-admin script'),
]
def initialize_options(self):
self.riak_admin = None
def finalize_options(self):
if self.riak_admin is None:
raise DistutilsOptionError("riak-admin option not set")
def run(self):
cmd = "disable"
self.run_security_command(tuple(cmd.split(' ')))
class preconfigure(Command):
"""
Sets up security configuration.
* Update these lines in riak.conf
* storage_backend = leveldb
* search = on
* listener.protobuf.internal = 127.0.0.1:8087
* listener.http.internal = 127.0.0.1:8098
* listener.https.internal = 127.0.0.1:18098
* ssl.certfile = $pwd/tests/resources/server.crt
* ssl.keyfile = $pwd/tests/resources/server.key
* ssl.cacertfile = $pwd/tests/resources/ca.crt
* check_crl = off
"""
description = "preconfigure security settings used in integration tests"
user_options = [
('riak-conf=', None, 'path to the riak.conf file'),
('host=', None, 'IP of host running Riak'),
('pb-port=', None, 'protocol buffers port number'),
('https-port=', None, 'https port number')
]
def initialize_options(self):
self.riak_conf = None
self.host = "127.0.0.1"
self.pb_port = "8087"
self.http_port = "8098"
self.https_port = "18098"
def finalize_options(self):
if self.riak_conf is None:
raise DistutilsOptionError("riak-conf option not set")
def run(self):
self.cert_dir = os.path.dirname(os.path.realpath(__file__)) + \
"/riak/tests/resources"
self._update_riak_conf()
def _update_riak_conf(self):
http_host = self.host + ':' + self.http_port
https_host = self.host + ':' + self.https_port
pb_host = self.host + ':' + self.pb_port
self._backup_file(self.riak_conf)
f = open(self.riak_conf, 'r', buffering=1)
conf = f.read()
f.close()
conf = re.sub(r'search\s+=\s+off', r'search = on', conf)
conf = re.sub(r'##[ ]+ssl\.', r'ssl.', conf)
conf = re.sub(r'ssl.certfile\s+=\s+\S+',
r'ssl.certfile = ' + self.cert_dir + '/server.crt',
conf)
conf = re.sub(r'storage_backend\s+=\s+\S+',
r'storage_backend = leveldb',
conf)
conf = re.sub(r'ssl.keyfile\s+=\s+\S+',
r'ssl.keyfile = ' + self.cert_dir + '/server.key',
conf)
conf = re.sub(r'ssl.cacertfile\s+=\s+\S+',
r'ssl.cacertfile = ' + self.cert_dir +
'/ca.crt',
conf)
conf = re.sub(r'#*[ ]*listener.http.internal\s+=\s+\S+',
r'listener.http.internal = ' + http_host,
conf)
conf = re.sub(r'#*[ ]*listener.https.internal\s+=\s+\S+',
r'listener.https.internal = ' + https_host,
conf)
conf = re.sub(r'listener.protobuf.internal\s+=\s+\S+',
r'listener.protobuf.internal = ' + pb_host,
conf)
conf += 'check_crl = off\n'
# Older versions of OpenSSL client library need to match on the server
conf += 'tls_protocols.tlsv1 = on\n'
conf += 'tls_protocols.tlsv1.1 = on\n'
f = open(self.riak_conf, 'w', buffering=1)
f.write(conf)
f.close()
def _backup_file(self, name):
backup = name + ".bak"
if os.path.isfile(name):
shutil.copyfile(name, backup)
else:
log.info("Cannot backup missing file {0}".format(repr(name)))
class configure(Command):
"""
Sets up security configuration.
* Run setup_security and create_bucket_types
"""
description = "create bucket types and security settings for testing"
user_options = create_bucket_types.user_options + \
setup_security.user_options
def initialize_options(self):
self.riak_admin = None
self.username = None
self.password = None
def finalize_options(self):
bucket = self.distribution.get_command_obj('create_bucket_types')
bucket.riak_admin = self.riak_admin
security = self.distribution.get_command_obj('setup_security')
security.riak_admin = self.riak_admin
security.username = self.username
security.password = self.password
def run(self):
# Run all relevant sub-commands.
for cmd_name in self.get_sub_commands():
self.run_command(cmd_name)
sub_commands = [('create_bucket_types', None),
('setup_security', None)
]
|
GabrielNicolasAvellaneda/riak-python-client
|
commands.py
|
Python
|
apache-2.0
| 15,906 | 0.000063 |
# -*- coding: utf-8 -*-
# Copyright 2015-2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo.addons.connector_carepoint.unit import mapper
from .common import SetUpCarepointBase
class TestCarepointImporterMapper(SetUpCarepointBase):
def setUp(self):
super(TestCarepointImporterMapper, self).setUp()
self.Importer = mapper.CarepointImportMapper
self.model = 'carepoint.carepoint.store'
self.mock_env = self.get_carepoint_helper(
self.model
)
self.importer = self.Importer(self.mock_env)
def test_backend_id(self):
""" It should map backend_id correctly """
res = self.importer.backend_id(True)
expect = {'backend_id': self.importer.backend_record.id}
self.assertDictEqual(expect, res)
def test_company_id(self):
""" It should map company_id correctly """
res = self.importer.company_id(True)
expect = {'company_id': self.importer.backend_record.company_id.id}
self.assertDictEqual(expect, res)
|
laslabs/odoo-connector-carepoint
|
connector_carepoint/tests/test_carepoint_import_mapper.py
|
Python
|
agpl-3.0
| 1,081 | 0 |
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
from PyQt4.QtCore import Qt, QVariant, SIGNAL, QModelIndex, QAbstractItemModel
from PyQt4.QtCore import QString
from PyQt4.QtGui import QColor, QIcon, QStyle, QMessageBox
from PyQt4.Qt import qApp # For platform specific icons
from opus_gui.util.icon_library import IconLibrary
from opus_gui.abstract_manager.models.xml_item import XmlItem
from opus_gui.main.controllers.instance_handlers import update_mainwindow_savestate
# What node types we want checkboxes for
# _CHECKBOX_NODE_TYPES = ('selectable')
class XmlModel(QAbstractItemModel):
'''
A data model for a XML tree.
The model exposes a subset of the entire XML tree containing only
XML nodes that do not have the attribute "hidden" set to "True".
'''
def __init__(self, model_root_node, project = None, parent_widget = None):
'''
@param model_root_node (ElementTree.Element): Root node for this model
@param project (OpusProject): Loaded project file
@param parent_widget (QObject): Parent object for this ItemModel
'''
QAbstractItemModel.__init__(self, parent_widget)
# Root element
self._root_node = model_root_node
# Root for the subtree of visible items
self._root_item = XmlItem(self._root_node, None)
# Rebuild the (whole) tree of visible items
self.rebuild_tree()
# Optional reference to loaded project for inheritance handling.
self.project = project
# NOTE: when setting the dirty flag, make sure to use self.dirty rather than
# self.__dirty.
self.__dirty = False
# Column headers
self._headers = ['Name', 'Value']
# Index of the last inserted item
self.last_inserted_index = None
# use platform specific folder and file icons
self.folderIcon = QIcon()
self.fileIcon = QIcon()
std_icon = qApp.style().standardPixmap
self.fileIcon.addPixmap(std_icon(QStyle.SP_FileIcon))
self.folderIcon.addPixmap(std_icon(QStyle.SP_DirClosedIcon), QIcon.Normal, QIcon.Off)
def __is_dirty(self):
return self.__dirty
def __set_dirty(self, dirty):
self.__dirty = dirty
if self.project is not None:
self.project.dirty = True
dirty = property(__is_dirty, __set_dirty)
def columnCount(self, parent):
''' PyQt API Method -- See the PyQt documentation for a description '''
return len(self._headers)
def rebuild_tree(self):
''' Rebuilds the tree from the underlying XML structure '''
self._root_item.rebuild()
self.emit(SIGNAL('layoutChanged()'))
def rowCount(self, parent_index):
''' PyQt API Method -- See the PyQt documentation for a description '''
if not parent_index.isValid():
item = self._root_item
else:
item = parent_index.internalPointer()
return len(item.child_items)
def remove_node(self, node):
'''
Convenience method to remove a node without bothering with the internal model representation
@param node (Element): Node to remove.
'''
index = self.index_for_node(node)
row = index.row()
parent_index = self.parent(index)
self.removeRow(row, parent_index)
def removeRow(self, row, parent_index):
'''
Removes an object from the data model
@param row (int) row number to remove
@param parent_index (QModelIndex) index of parent element
'''
# Make sure we have a valid parent_index
if parent_index == QModelIndex():
parent_item = self._root_item
else:
parent_item = parent_index.internalPointer()
# Validate the row number
if row < 0 or row > len(parent_item.child_items):
return False
child_item = parent_item.child_item(row)
self.emit(SIGNAL("layoutAboutToBeChanged()"))
self.beginRemoveRows(parent_index, row, row)
# remove the child item from it's parent's list of children
child_item.parent_item.child_items.remove(child_item)
# handle inheritance if we are dealing with a project
reinserted_node = None
if self.project is None:
child_item.node.getparent().remove(child_item.node)
else:
reinserted_node = self.project.delete_node(child_item.node)
self.endRemoveRows()
self.emit(SIGNAL("layoutChanged()"))
if reinserted_node is not None:
self.insertRow(row, parent_index, reinserted_node, reinserting = True)
self.dirty = True
return True
def data(self, index, role):
''' PyQt API Method -- See the PyQt documentation for a description '''
if not index.isValid():
return QVariant()
node = index.internalPointer().node
# Foreground Coloring
if role == Qt.ForegroundRole:
if node.get('inherited'):
return QVariant(QColor(Qt.darkBlue))
return QVariant() # = default color
# Display
elif role == Qt.DisplayRole:
if index.column() == 0:
if node.get('type') == 'selectable':
return QVariant(node.get('return_value') or node.get('name') or node.tag)
return QVariant(node.get('name') or node.tag)
elif index.column() == 1:
if node.get('type') == "password":
return QVariant(QString("*********"))
# hide the text value for checkable nodes
elif node.tag == 'selectable' or node.get('type') == 'boolean':
return QVariant()
elif node.text:
return QVariant(node.text.strip())
return QVariant()
elif role == Qt.ToolTipRole:
if index.column() == 0 and self.project: # don't need to worry about inheritance when there is no project
if node.get('inherited'):
return QVariant('Inherited value from file: %s' % node.get('inherited'))
elif self.project.is_shadowing(node):
prototype_node = self.project.get_prototype_node(node)
return QVariant('Original value defined in file: %s' % prototype_node.get('inherited'))
else:
return QVariant('Value is defined in this file.')
# elif role == Qt.FontRole:
# if index.column() == 0:
# if node.tag == 'model':
# font = QFont()
# font.setPointSize(14)
# return QVariant(font)
# CK: Experimenting with making shadowing nodes bold to differentiate them from local nodes
# elif role == Qt.FontRole:
# f = QFont()
# if self.project is not None:
# f.setBold(self.project.is_shadowing(node))
# return QVariant(f)
# Icons
elif role == Qt.DecorationRole:
if index.column() == 0:
return QVariant(IconLibrary.icon_for_type(node.tag))
# Checkboxes
elif role == Qt.CheckStateRole and index.column() == 1:
if node.tag == 'selectable' or node.get('type') == 'boolean':
return QVariant(Qt.Checked if (node.text.strip() == 'True') else Qt.Unchecked)
# Unhandled index/role
return QVariant()
def index_for_item(self, item):
'''
Looks up a QModelIndex() for a given item.
@param item (XmlItem): item to find in the model
@return: The index (QModelIndex) for the given item.
'''
if item is self._root_item:
return QModelIndex()
parent_index = self.index_for_item(item.parent_item)
return self.index(item.row(), 0, parent_index)
def update_node(self, node):
'''
Refreshes the node by removing it and reinserting it.
'''
item = self.item_for_node(node)
if item is None:
return
parent_index = self.index_for_item(item.parent_item)
if parent_index is None:
return
row = item.row()
self.removeRow(row, parent_index)
self.insertRow(row, parent_index, node)
# CK: This is a pretty ineffective method of finding the node <-> item mapping.
# A dictionary mapping would be better.
def _item_for_node(self, parent_item, node):
'''
Depth first search for the XmlItem containing a given node.
@param parent_item (XmlItem): parent of nodes to scan.
@param node (Element): the node to locate
@return: the found node (Element) if found, None otherwise
'''
for child_item in parent_item.child_items:
if child_item.node is node:
return child_item
found_item = self._item_for_node(child_item, node)
if found_item is not None:
return found_item
return None
def item_for_node(self, node):
'''
Return the item for a given node.
@param node (Element): The node to locate.
@return: The item containing the given node (XmlItem) or None
'''
return self._item_for_node(self._root_item, node)
def index_for_node(self, node):
'''
Return the qt index for a given node.
@param node (Element): The node to locate.
@return: The item containing the given node (XmlItem) or None
'''
item = self._item_for_node(self._root_item, node)
return self.index_for_item(item)
def add_node(self, parent_node, node):
'''
Adds a child node (may contain a subtree) to a given parent node and
updates the model.
For efficient insertion of entire trees; first construct the subtree to
insert using ElementTree, and then call this method once with the root
node for it.
@param parent_node (Element): parent node
@param node (Element): node to insert
'''
parent_item = self.item_for_node(parent_node)
parent_index = self.index_for_item(parent_item)
self.insertRow(0, parent_index, node)
if self.project:
self.project.dirty = True
# TODO update comments to xml 2.0
def insert_node(self, node, parent_node):
'''
Insert a node into the XML and into the model.
This method automatically finds the qt index for the parent index so that the item can be
inserted.
@param node (Element): node to insert
@param parent_node (Element): Parent node to append @node
@return: True if the node was inserted
'''
parent_item = self.item_for_node(parent_node)
if parent_item is None:
msg = ('Tried to insert a node under <%s>, but that node is not in this XmlModel' %
parent_node.tag)
return (False, msg)
parent_index = self.index_for_item(parent_item)
if parent_index is not None:
self.insertRow(0, parent_index, node)
self.project.dirty = True
return (True, 'OK')
else:
msg = ('Tried to insert a node under <%s>, but could not find its index.' %
parent_node.tag)
return (False, msg)
def flags(self, index):
''' PyQt API Method -- See the PyQt documentation for a description '''
if not index.isValid():
return None
node = index.internalPointer().node
is_checkbox_node = node.tag == 'selectable' or node.get('type') == 'boolean'
# Inherited nodes
if node.get('inherited'):
# inherited nodes are generally only selectable and enabled, with the exception
# of checkboxes that are clickable even when they are inherited
if is_checkbox_node:
return (Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsUserCheckable)
return Qt.ItemIsEnabled | Qt.ItemIsSelectable
# Set flags on a per column basis
if index.column() == 0:
return Qt.ItemIsEnabled | Qt.ItemIsSelectable
elif index.column() == 1:
if is_checkbox_node:
return (Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsUserCheckable)
return Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsEditable
# Unhandled index
return QVariant()
def headerData(self, section, orientation, role):
''' PyQt API Method -- See the PyQt documentation for a description '''
if orientation == Qt.Horizontal and role == Qt.DisplayRole:
return QVariant(self._headers[section])
else:
return QVariant()
def index(self, row, column, parent_index = QModelIndex()):
''' PyQt API Method -- See the PyQt documentation for a description '''
if not parent_index.isValid():
parent_item = self._root_item
else:
parent_item = parent_index.internalPointer()
child_item = parent_item.child_item(row)
if child_item:
return self.createIndex(row, column, child_item)
else:
return QModelIndex()
def setData(self, index, value, role):
''' PyQt API Method -- See the PyQt documentation for a description '''
if not index.isValid():
return False
item = index.internalPointer()
node = item.node
is_checkbox_node = node.tag == 'selectable' or node.get('type') == 'boolean'
# only allow editing in second column
if index.column() != 1:
return False
# user clicking on a checkbox
if role == Qt.CheckStateRole and is_checkbox_node:
# ask the users if they want to make inherited nodes local first
if node.get('inherited'):
title = 'Editing inherited node'
msg = ("'%s' is inherited from a parent project. \n\n"
"Do you want to make this node part of this project "
"so that you can edit it?" % node.get('name') or node.tag)
b = (QMessageBox.Yes, QMessageBox.No)
ans = QMessageBox.question(None, title, msg, *b)
if ans == QMessageBox.Yes:
self.make_item_local(item)
else:
return False
del title, msg, b, ans # Clean up namespace
if value.toInt()[0] == Qt.Checked:
value = QVariant('True')
else:
value = QVariant('False')
# convert the value to a string and set the nodes text value
value = value.toString()
changed_value = node.text != value
if changed_value:
node.text = str(value) # avoid QString's in the xml
self.dirty = True
s = SIGNAL("dataChanged(const QModelIndex &, const QModelIndex &)")
self.emit(s, index, index)
return True
def make_item_local(self, item):
if not self.project: return
self.project.make_local(item.node)
def insertRow(self, row, parent_index, node, reinserting = False):
'''
Insert a row into the data model
@param row (int): row to insert into.
@param parent_index (QModelIndex): index of parent item
@param node (Element): node to insert
@param reinserting (bool): if True; assume that the project has already reinserted the node
and just insert it into the internal model. Also skip making it local after inserting.
@return: True if the sibling was inserted, False otherwise
'''
if row < 0 or row > self.rowCount(parent_index):
return False
self.emit(SIGNAL("layoutAboutToBeChanged()"))
self.beginInsertRows(parent_index, row, row)
# Get a valid parent_item
if parent_index == QModelIndex():
parent_item = self._root_item
else:
parent_item = parent_index.internalPointer()
parent_node = parent_item.node
if self.project is None: # no inheritance simple insert into tree
parent_node.insert(row, node)
else:
# when dealing with a project and inheritance we have two cases --
# either insertRow is inserting a node that already exists in the project
# (reinserting is True) or we are inserting a new node.
if not reinserting:
inserted_node = self.project.insert_node(node, parent_node, row)
if inserted_node is None:
# raise RuntimeError('Could not insert node into model')
print 'WARNING: Could not insert %s:%s' % (node.tag, node.get('name'))
return False
self.project.make_local(inserted_node)
else:
inserted_node = node
new_item = XmlItem(inserted_node, parent_item)
new_item.rebuild()
parent_item.child_items.insert(row, new_item)
self.endInsertRows()
self.emit(SIGNAL("layoutChanged()"))
# If the item was created we store it so that XmlViews can access it
self.last_inserted_index = self.index(row, 0, parent_index) if new_item else None
update_mainwindow_savestate()
return True
def insert_sibling(self, node, sibling_index):
'''
Create and insert a sibling node.
@param node (Element): node for the new item
@param sibling_index (QModelIndex): index for the sibling item
@return: True if the sibling was inserted, False otherwise
'''
parent_index = self.parent(sibling_index)
return self.insertRow(sibling_index.row(), parent_index, node)
def parent(self, index):
''' PyQt API Method -- See the PyQt documentation for a description '''
if not index.isValid():
return QModelIndex()
parent_item = index.internalPointer().parent_item
parent_node = index.internalPointer().parent_item.node
if parent_item is self._root_item:
return QModelIndex()
parent_ind = self.createIndex(parent_item.row(), 0, parent_item)
parent_ind.node = parent_node
return parent_ind
# TODO consider doing this with lxml's insert(current index - 1) instead, the current
# implementation gives me a headache (and I think its broken)
def move_up(self, index, view=None):
'''
Moves the specified item up one step
@param index (QModelIndex): index for the item to move
@return index (QModelIndex): index of the new position
'''
if not index.isValid() or index.row() == 0:
return index
parent_item = self.parent(index).internalPointer()
row = index.row()
if view:
this_item_expanded = view.isExpanded(self.index(row, 0, self.parent(index)))
above_item_expanded = view.isExpanded(self.index(row-1, 0, self.parent(index)))
this_item = parent_item.child_items.pop(row)
above_item = parent_item.child_items.pop(row - 1)
parent_item.child_items.insert(row - 1, this_item)
parent_item.child_items.insert(row, above_item)
self.make_item_local(this_item)
self.make_item_local(above_item)
self.emit(SIGNAL('layoutChanged()'))
self.dirty = True
if view:
view.setExpanded(self.index(row, 0, self.parent(index)), above_item_expanded)
view.setExpanded(self.index(row-1, 0, self.parent(index)), this_item_expanded)
return self.index(row - 1, 0, self.parent(index))
def move_down(self, index, view=None):
'''
Moves the specified item down one step
@param index (QModelIndex): index for the item to move
@return index (QModelIndex): index of the new position
'''
if not index.isValid() or index.row() >= (self.rowCount(self.parent(index)) - 1):
return index
parent_item = self.parent(index).internalPointer()
row = index.row()
if view:
this_item_expanded = view.isExpanded(self.index(row, 0, self.parent(index)))
below_item_expanded = view.isExpanded(self.index(row+1, 0, self.parent(index)))
this_item = parent_item.child_items.pop(row)
below_item = parent_item.child_items.pop(row)
parent_item.child_items.insert(row, below_item)
parent_item.child_items.insert(row + 1, this_item)
self.make_item_local(this_item)
self.make_item_local(below_item)
self.emit(SIGNAL('layoutChanged()'))
self.dirty = True
if view:
view.setExpanded(self.index(row+1, 0, self.parent(index)), this_item_expanded)
view.setExpanded(self.index(row, 0, self.parent(index)), below_item_expanded)
return self.index(row + 1, 0, self.parent(index))
def root_node(self):
'''
Get a reference to this model's root node
@return: The models root node (Element)
'''
return self._root_node
def root_item(self):
'''
Get a reference to this model's root item
@return: The models root item (XmlItem)
'''
return self._root_item
|
christianurich/VIBe2UrbanSim
|
3rdparty/opus/src/opus_gui/abstract_manager/models/xml_model.py
|
Python
|
gpl-2.0
| 21,520 | 0.002556 |
from setuptools import setup
setup(
name='quotequail',
version='0.2.3',
url='http://github.com/closeio/quotequail',
license='MIT',
author='Thomas Steinacher',
author_email='engineering@close.io',
maintainer='Thomas Steinacher',
maintainer_email='engineering@close.io',
description='A library that identifies quoted text in plain text and HTML email messages.',
long_description=__doc__,
packages=[
'quotequail',
],
test_suite='tests',
tests_require=['lxml'],
platforms='any',
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Topic :: Communications :: Email',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
elasticsales/quotequail
|
setup.py
|
Python
|
mit
| 1,101 | 0.000908 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import time
class CameraClass(object):
'''
docstring for CameraClass
'''
def __init__(self):
super(CameraClass, self).__init__()
def visible_target(self):
'''
Returns true if target is visible
'''
return True
if __name__ == '__main__':
try:
from picamera import PiCamera
camera = PiCamera()
try:
camera.start_preview()
time.sleep(10)
camera.stop_preview()
finally:
camera.close()
except ImportError:
pass
|
twistedretard/LaserSimulatedSecurityTurret
|
src/turret/camera.py
|
Python
|
mit
| 620 | 0.001613 |
import sys
import socket
from PyQt5.QtWidgets import QApplication
from qt_DisplayWindow import DisplayWindow
from Server import Server
def main(camID):
hostname = socket.gethostname()
ip_address = socket.gethostbyname_ex(hostname)[2][-1]
print(hostname, ip_address)
port = 12349
app = QApplication(sys.argv)
server = Server(ip_address, port)
# set up main display window
display = DisplayWindow(camID, server.get_state)
display.show()
# connect server -> display slots
server.selfie.connect(display.selfie)
server.email.connect(display.email)
server.status.connect(display.show_msg)
server.start()
ret = app.exec_()
server.join()
sys.exit(ret)
if __name__ == '__main__':
main(0)
|
natedileas/ImageRIT
|
Server/qt_main.py
|
Python
|
gpl-3.0
| 762 | 0.002625 |
import os
from pathlib import Path
from PIL import Image
import pyconfig
import pydice
class ImageNotSupported(Exception):
pass
class BeardedDie:
def __init__(self, die):
self.die = die
# Time to strap our to_image to pydice's Die
if pyconfig.get('dicebeard.images_path'):
pydice.dice.Die.images_path = Path(
pyconfig.get('dicebeard.images_path'))
else:
pydice.dice.Die.images_path = Path(
os.path.dirname(__file__)) / 'images'
def __getattr__(self, attr):
return getattr(self.die, attr)
def to_image(self):
'''Emits a PIL.Image of the die is possible'''
die_image_path = (self.images_path /
'd{}'.format(self.faces.stop-1) /
'{}.png'.format(self.result))
try:
return Image.open(str(die_image_path))
except FileNotFoundError:
raise ImageNotSupported(
'{} is not currently supported.'.format(self.name))
|
nasfarley88/dicebeard
|
python/dicebeard/skb_roll/beardeddie.py
|
Python
|
unlicense
| 1,060 | 0 |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Group.auth'
db.add_column('people_group', 'auth', self.gf('django.db.models.fields.BooleanField')(default=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'Group.auth'
db.delete_column('people_group', 'auth')
models = {
'people.address': {
'Meta': {'ordering': "('address1',)", 'object_name': 'Address'},
'address1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'address2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'atype': ('django.db.models.fields.CharField', [], {'default': "'ns'", 'max_length': '10'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'household': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Household']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notes': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'zipcode': ('django.db.models.fields.CharField', [], {'max_length': '25'})
},
'people.group': {
'Meta': {'ordering': "('name',)", 'object_name': 'Group'},
'auth': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'desc': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'gtype': ('django.db.models.fields.CharField', [], {'default': "'general'", 'max_length': '10'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'people.groupadmin': {
'Meta': {'ordering': "('group__name', 'person__lname', 'person__fname')", 'object_name': 'GroupAdmin'},
'can_send': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Person']"})
},
'people.household': {
'Meta': {'ordering': "('name',)", 'object_name': 'Household'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'anniversary': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'barcode': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'first_visit': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'image_temp': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.TempImage']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'ns'", 'max_length': '10'})
},
'people.person': {
'Meta': {'ordering': "('lname', 'fname')", 'object_name': 'Person'},
'alerts': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'allergies': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'bdate': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'ddate': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'fname': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'gender': ('django.db.models.fields.CharField', [], {'default': "'ns'", 'max_length': '10'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['people.Group']", 'null': 'True', 'blank': 'True'}),
'household': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Household']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'image_temp': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.TempImage']", 'null': 'True', 'blank': 'True'}),
'lname': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'mname': ('django.db.models.fields.CharField', [], {'max_length': '150', 'null': 'True', 'blank': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'default': "'ns'", 'max_length': '10'})
},
'people.phone': {
'Meta': {'ordering': "('person__lname', 'person__fname', 'number')", 'object_name': 'Phone'},
'alerts': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.contrib.localflavor.us.models.PhoneNumberField', [], {'max_length': '20'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Person']"}),
'type1': ('django.db.models.fields.CharField', [], {'default': "'ns'", 'max_length': '10'}),
'type2': ('django.db.models.fields.CharField', [], {'default': "'ns'", 'max_length': '10'})
},
'people.tempimage': {
'Meta': {'ordering': "('-ts',)", 'object_name': 'TempImage'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
}
}
complete_apps = ['people']
|
pizzapanther/Church-Source
|
churchsource/people/migrations/0013_auto__add_field_group_auth.py
|
Python
|
gpl-3.0
| 6,849 | 0.008906 |
from pprint import pprint
from base.models import Colaborador
def get_create_colaborador_by_user(user):
try:
colab = Colaborador.objects.get(user__username=user.username)
except Colaborador.DoesNotExist:
colab = Colaborador(
user=user,
matricula=72000+user.id,
cpf=72000+user.id,
)
colab.save()
return colab
|
anselmobd/fo2
|
src/base/queries/models.py
|
Python
|
mit
| 391 | 0 |
import json
import copy
from util.json_request import JsonResponse
from django.http import HttpResponseBadRequest
from django.contrib.auth.decorators import login_required
from django.views.decorators.http import require_http_methods
from django_future.csrf import ensure_csrf_cookie
from edxmako.shortcuts import render_to_response
from django.http import HttpResponseNotFound
from django.core.exceptions import PermissionDenied
from opaque_keys.edx.keys import CourseKey
from xmodule.modulestore.django import modulestore
from contentstore.utils import reverse_course_url
from .access import has_course_access
from xmodule.course_module import CourseDescriptor
from django.utils.translation import ugettext
__all__ = ['checklists_handler']
# pylint: disable=unused-argument
@require_http_methods(("GET", "POST", "PUT"))
@login_required
@ensure_csrf_cookie
def checklists_handler(request, course_key_string, checklist_index=None):
"""
The restful handler for checklists.
GET
html: return html page for all checklists
json: return json representing all checklists. checklist_index is not supported for GET at this time.
POST or PUT
json: updates the checked state for items within a particular checklist. checklist_index is required.
"""
course_key = CourseKey.from_string(course_key_string)
if not has_course_access(request.user, course_key):
raise PermissionDenied()
course_module = modulestore().get_course(course_key)
json_request = 'application/json' in request.META.get('HTTP_ACCEPT', 'application/json')
if request.method == 'GET':
# If course was created before checklists were introduced, copy them over
# from the template.
if not course_module.checklists:
course_module.checklists = CourseDescriptor.checklists.default
modulestore().update_item(course_module, request.user.id)
expanded_checklists = expand_all_action_urls(course_module)
if json_request:
return JsonResponse(expanded_checklists)
else:
handler_url = reverse_course_url('checklists_handler', course_key)
return render_to_response('checklists.html',
{
'handler_url': handler_url,
# context_course is used by analytics
'context_course': course_module,
'checklists': expanded_checklists
})
elif json_request:
# Can now assume POST or PUT because GET handled above.
if checklist_index is not None and 0 <= int(checklist_index) < len(course_module.checklists):
index = int(checklist_index)
persisted_checklist = course_module.checklists[index]
modified_checklist = json.loads(request.body)
# Only thing the user can modify is the "checked" state.
# We don't want to persist what comes back from the client because it will
# include the expanded action URLs (which are non-portable).
for item_index, item in enumerate(modified_checklist.get('items')):
persisted_checklist['items'][item_index]['is_checked'] = item['is_checked']
# seeming noop which triggers kvs to record that the metadata is
# not default
course_module.checklists = course_module.checklists
course_module.save()
modulestore().update_item(course_module, request.user.id)
expanded_checklist = expand_checklist_action_url(course_module, persisted_checklist)
return JsonResponse(localize_checklist_text(expanded_checklist))
else:
return HttpResponseBadRequest(
("Could not save checklist state because the checklist index "
"was out of range or unspecified."),
content_type="text/plain"
)
else:
return HttpResponseNotFound()
def expand_all_action_urls(course_module):
"""
Gets the checklists out of the course module and expands their action urls.
Returns a copy of the checklists with modified urls, without modifying the persisted version
of the checklists.
"""
expanded_checklists = []
for checklist in course_module.checklists:
expanded_checklists.append(localize_checklist_text(expand_checklist_action_url(course_module, checklist)))
return expanded_checklists
def expand_checklist_action_url(course_module, checklist):
"""
Expands the action URLs for a given checklist and returns the modified version.
The method does a copy of the input checklist and does not modify the input argument.
"""
expanded_checklist = copy.deepcopy(checklist)
urlconf_map = {
"ManageUsers": "course_team_handler",
"CourseOutline": "course_handler",
"SettingsDetails": "settings_handler",
"SettingsGrading": "grading_handler",
}
for item in expanded_checklist.get('items'):
action_url = item.get('action_url')
if action_url in urlconf_map:
item['action_url'] = reverse_course_url(urlconf_map[action_url], course_module.id)
return expanded_checklist
def localize_checklist_text(checklist):
"""
Localize texts for a given checklist and returns the modified version.
The method does an in-place operation so the input checklist is modified directly.
"""
# Localize checklist name
checklist['short_description'] = ugettext(checklist['short_description'])
# Localize checklist items
for item in checklist.get('items'):
item['short_description'] = ugettext(item['short_description'])
item['long_description'] = ugettext(item['long_description'])
item['action_text'] = ugettext(item['action_text']) if item['action_text'] != "" else u""
return checklist
|
LICEF/edx-platform
|
cms/djangoapps/contentstore/views/checklist.py
|
Python
|
agpl-3.0
| 6,004 | 0.002665 |
import numpy as np
import sys
R = np.eye(int(sys.argv[2]))
np.savetxt(sys.argv[1]+'/R.txt', R)
|
chocjy/randomized-quantile-regression-solvers
|
hadoop/src/gen_id.py
|
Python
|
apache-2.0
| 97 | 0.010309 |
from symbol.builder import FasterRcnn as Detector
from models.dcn.builder import DCNResNetC4 as Backbone
from symbol.builder import Neck
from symbol.builder import RpnHead
from symbol.builder import RoiAlign as RoiExtractor
from symbol.builder import BboxC5V1Head as BboxHead
from mxnext.complicate import normalizer_factory
def get_config(is_train):
class General:
log_frequency = 10
name = __name__.rsplit("/")[-1].rsplit(".")[-1]
batch_image = 2 if is_train else 1
fp16 = False
class KvstoreParam:
kvstore = "local"
batch_image = General.batch_image
gpus = [0, 1, 2, 3, 4, 5, 6, 7]
fp16 = General.fp16
class NormalizeParam:
# normalizer = normalizer_factory(type="syncbn", ndev=len(KvstoreParam.gpus))
normalizer = normalizer_factory(type="fixbn")
class BackboneParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
depth = 50
num_c3_block = 4
num_c4_block = 6
class NeckParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
class RpnParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
batch_image = General.batch_image
class anchor_generate:
scale = (2, 4, 8, 16, 32)
ratio = (0.5, 1.0, 2.0)
stride = 16
image_anchor = 256
class head:
conv_channel = 512
mean = (0, 0, 0, 0)
std = (1, 1, 1, 1)
class proposal:
pre_nms_top_n = 12000 if is_train else 6000
post_nms_top_n = 2000 if is_train else 300
nms_thr = 0.7
min_bbox_side = 0
class subsample_proposal:
proposal_wo_gt = False
image_roi = 512
fg_fraction = 0.25
fg_thr = 0.5
bg_thr_hi = 0.5
bg_thr_lo = 0.0
class bbox_target:
num_reg_class = 2
class_agnostic = True
weight = (1.0, 1.0, 1.0, 1.0)
mean = (0.0, 0.0, 0.0, 0.0)
std = (0.1, 0.1, 0.2, 0.2)
class BboxParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
num_class = 1 + 80
image_roi = 512
batch_image = General.batch_image
class regress_target:
class_agnostic = True
mean = (0.0, 0.0, 0.0, 0.0)
std = (0.1, 0.1, 0.2, 0.2)
class RoiParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
out_size = 7
stride = 16
class DatasetParam:
if is_train:
image_set = ("coco_train2017", )
else:
image_set = ("coco_val2017", )
backbone = Backbone(BackboneParam)
neck = Neck(NeckParam)
rpn_head = RpnHead(RpnParam)
roi_extractor = RoiExtractor(RoiParam)
bbox_head = BboxHead(BboxParam)
detector = Detector()
if is_train:
train_sym = detector.get_train_symbol(backbone, neck, rpn_head, roi_extractor, bbox_head)
rpn_test_sym = None
test_sym = None
else:
train_sym = None
rpn_test_sym = detector.get_rpn_test_symbol(backbone, neck, rpn_head)
test_sym = detector.get_test_symbol(backbone, neck, rpn_head, roi_extractor, bbox_head)
class ModelParam:
train_symbol = train_sym
test_symbol = test_sym
rpn_test_symbol = rpn_test_sym
from_scratch = False
random = True
memonger = False
memonger_until = "stage3_unit21_plus"
class pretrain:
prefix = "pretrain_model/resnet%s_v1b" % BackboneParam.depth
epoch = 0
fixed_param = ["conv0", "stage1", "gamma", "beta"]
class OptimizeParam:
class optimizer:
type = "sgd"
lr = 0.01 / 8 * len(KvstoreParam.gpus) * KvstoreParam.batch_image
momentum = 0.9
wd = 0.0001
clip_gradient = 35
class schedule:
begin_epoch = 0
end_epoch = 6
lr_iter = [60000 * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image),
80000 * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image)]
class warmup:
type = "gradual"
lr = 0.0
iter = 1000
class TestParam:
min_det_score = 0.05
max_det_per_image = 100
process_roidb = lambda x: x
process_output = lambda x, y: x
class model:
prefix = "experiments/{}/checkpoint".format(General.name)
epoch = OptimizeParam.schedule.end_epoch
class nms:
type = "nms"
thr = 0.5
class coco:
annotation = "data/coco/annotations/instances_minival2014.json"
# data processing
class NormParam:
mean = tuple(i * 255 for i in (0.485, 0.456, 0.406)) # RGB order
std = tuple(i * 255 for i in (0.229, 0.224, 0.225))
class ResizeParam:
short = 800
long = 1200 if is_train else 2000
class PadParam:
short = 800
long = 1200
max_num_gt = 100
class AnchorTarget2DParam:
class generate:
short = 800 // 16
long = 1200 // 16
stride = 16
scales = (2, 4, 8, 16, 32)
aspects = (0.5, 1.0, 2.0)
class assign:
allowed_border = 0
pos_thr = 0.7
neg_thr = 0.3
min_pos_thr = 0.0
class sample:
image_anchor = 256
pos_fraction = 0.5
class RenameParam:
mapping = dict(image="data")
from core.detection_input import ReadRoiRecord, Resize2DImageBbox, \
ConvertImageFromHwcToChw, Flip2DImageBbox, Pad2DImageBbox, \
RenameRecord, AnchorTarget2D, Norm2DImage
if is_train:
transform = [
ReadRoiRecord(None),
Norm2DImage(NormParam),
Resize2DImageBbox(ResizeParam),
Flip2DImageBbox(),
Pad2DImageBbox(PadParam),
ConvertImageFromHwcToChw(),
AnchorTarget2D(AnchorTarget2DParam),
RenameRecord(RenameParam.mapping)
]
data_name = ["data", "im_info", "gt_bbox"]
label_name = ["rpn_cls_label", "rpn_reg_target", "rpn_reg_weight"]
else:
transform = [
ReadRoiRecord(None),
Norm2DImage(NormParam),
Resize2DImageBbox(ResizeParam),
ConvertImageFromHwcToChw(),
RenameRecord(RenameParam.mapping)
]
data_name = ["data", "im_info", "im_id", "rec_id"]
label_name = []
import core.detection_metric as metric
rpn_acc_metric = metric.AccWithIgnore(
"RpnAcc",
["rpn_cls_loss_output"],
["rpn_cls_label"]
)
rpn_l1_metric = metric.L1(
"RpnL1",
["rpn_reg_loss_output"],
["rpn_cls_label"]
)
# for bbox, the label is generated in network so it is an output
box_acc_metric = metric.AccWithIgnore(
"RcnnAcc",
["bbox_cls_loss_output", "bbox_label_blockgrad_output"],
[]
)
box_l1_metric = metric.L1(
"RcnnL1",
["bbox_reg_loss_output", "bbox_label_blockgrad_output"],
[]
)
metric_list = [rpn_acc_metric, rpn_l1_metric, box_acc_metric, box_l1_metric]
return General, KvstoreParam, RpnParam, RoiParam, BboxParam, DatasetParam, \
ModelParam, OptimizeParam, TestParam, \
transform, data_name, label_name, metric_list
|
TuSimple/simpledet
|
config/dcn/faster_dcnv2_r50v1bc4_c5_512roi_1x.py
|
Python
|
apache-2.0
| 7,639 | 0.004451 |
# -*- coding: utf-8 -*-
"""
:copyright: 2005-2008 by The PIDA Project
:license: GPL 2 or later (see README/COPYING/LICENSE)
"""
import gtk
from pygtkhelpers.delegates import SlaveView
# locale
from pida.core.locale import Locale
locale = Locale('pida')
_ = locale.gettext
class PidaView(SlaveView):
# Set this to make your views memorable.
key = None
icon_name = gtk.STOCK_INFO
label_text = _('Pida View')
pane = None
def create_ui(self):
"""Create the user interface here"""
def create_tab_label_icon(self):
return gtk.image_new_from_stock(self.icon_name, gtk.ICON_SIZE_MENU)
def get_parent_window(self):
return self.toplevel.get_parent_window()
parent_window = property(get_parent_window)
def on_remove_attempt(self, pane):
return not self.can_be_closed()
def can_be_closed(self):
return False
gladefile = None
def __init__(self, service, title=None, icon=None, *args, **kw):
if not self.builder_file:
self.builder_file = self.gladefile
self.svc = service
self.label_text = title or self.label_text
self.icon_name = icon or self.icon_name
if self.key:
pass
#self.toplevel.set_name(self.key.replace(".", "_"))
super(PidaView, self).__init__()
def get_toplevel(self):
return self.widget
toplevel = property(get_toplevel)
def add_main_widget(self, widget, *args, **kw):
self.widget.pack_start(widget, *args, **kw)
class WindowConfig(object):
"""
WindowConfig objects are used to register
a window in the windows service so they
can get proper shortcuts
"""
key = None
label_text = ""
description = ""
default_shortcut = ""
action = None
|
fermat618/pida
|
pida/ui/views.py
|
Python
|
gpl-2.0
| 1,810 | 0.001105 |
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Notebook front-end to TensorFlow.
When you run this binary, you'll see something like below, which indicates
the serving URL of the notebook:
The IPython Notebook is running at: http://127.0.0.1:8888/
Press "Shift+Enter" to execute a cell
Press "Enter" on a cell to go into edit mode.
Press "Escape" to go back into command mode and use arrow keys to navigate.
Press "a" in command mode to insert cell above or "b" to insert cell below.
Your root notebooks directory is FLAGS.notebook_dir
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import socket
import sys
# pylint: disable=g-import-not-at-top
# Official recommended way of turning on fast protocol buffers as of 10/21/14
os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "cpp"
os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION"] = "2"
from tensorflow.python.platform import app
from tensorflow.python.platform import flags
FLAGS = flags.FLAGS
flags.DEFINE_string(
"password", None,
"Password to require. If set, the server will allow public access."
" Only used if notebook config file does not exist.")
flags.DEFINE_string("notebook_dir", "experimental/brain/notebooks",
"root location where to store notebooks")
ORIG_ARGV = sys.argv
# Main notebook process calls itself with argv[1]="kernel" to start kernel
# subprocesses.
IS_KERNEL = len(sys.argv) > 1 and sys.argv[1] == "kernel"
def main(unused_argv):
sys.argv = ORIG_ARGV
if not IS_KERNEL:
# Drop all flags.
sys.argv = [sys.argv[0]]
# NOTE(sadovsky): For some reason, putting this import at the top level
# breaks inline plotting. It's probably a bug in the stone-age version of
# matplotlib.
from IPython.html.notebookapp import NotebookApp # pylint: disable=g-import-not-at-top
notebookapp = NotebookApp.instance()
notebookapp.open_browser = True
# password functionality adopted from quality/ranklab/main/tools/notebook.py
# add options to run with "password"
if FLAGS.password:
from IPython.lib import passwd # pylint: disable=g-import-not-at-top
notebookapp.ip = "0.0.0.0"
notebookapp.password = passwd(FLAGS.password)
else:
print("\nNo password specified; Notebook server will only be available"
" on the local machine.\n")
notebookapp.initialize(argv=["--notebook-dir", FLAGS.notebook_dir])
if notebookapp.ip == "0.0.0.0":
proto = "https" if notebookapp.certfile else "http"
url = "%s://%s:%d%s" % (proto, socket.gethostname(), notebookapp.port,
notebookapp.base_project_url)
print("\nNotebook server will be publicly available at: %s\n" % url)
notebookapp.start()
return
# Drop the --flagfile flag so that notebook doesn't complain about an
# "unrecognized alias" when parsing sys.argv.
sys.argv = ([sys.argv[0]] +
[z for z in sys.argv[1:] if not z.startswith("--flagfile")])
from IPython.kernel.zmq.kernelapp import IPKernelApp # pylint: disable=g-import-not-at-top
kernelapp = IPKernelApp.instance()
kernelapp.initialize()
# Enable inline plotting. Equivalent to running "%matplotlib inline".
ipshell = kernelapp.shell
ipshell.enable_matplotlib("inline")
kernelapp.start()
if __name__ == "__main__":
# When the user starts the main notebook process, we don't touch sys.argv.
# When the main process launches kernel subprocesses, it writes all flags
# to a tmpfile and sets --flagfile to that tmpfile, so for kernel
# subprocesses here we drop all flags *except* --flagfile, then call
# app.run(), and then (in main) restore all flags before starting the
# kernel app.
if IS_KERNEL:
# Drop everything except --flagfile.
sys.argv = ([sys.argv[0]] +
[x for x in sys.argv[1:] if x.startswith("--flagfile")])
app.run()
|
DailyActie/Surrogate-Model
|
01-codes/tensorflow-master/tensorflow/python/client/notebook.py
|
Python
|
mit
| 4,766 | 0.002098 |
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class REvd(RPackage):
"""evd: Functions for Extreme Value Distributions"""
homepage = "https://cloud.r-project.org/package=evd"
url = "https://cloud.r-project.org/src/contrib/evd_2.3-3.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/evd"
version('2.3-3', sha256='2fc5ef2e0c3a2a9392425ddd45914445497433d90fb80b8c363877baee4559b4')
|
iulian787/spack
|
var/spack/repos/builtin/packages/r-evd/package.py
|
Python
|
lgpl-2.1
| 597 | 0.00335 |
# ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
import unittest
import inspect
import warnings
from skbio.util._decorator import classproperty, overrides
from skbio.util._decorator import (stable, experimental, deprecated,
_state_decorator)
from skbio.util._exception import OverrideError
class TestOverrides(unittest.TestCase):
def test_raises_when_missing(self):
class A(object):
pass
with self.assertRaises(OverrideError):
class B(A):
@overrides(A)
def test(self):
pass
def test_doc_inherited(self):
class A(object):
def test(self):
"""Docstring"""
pass
class B(A):
@overrides(A)
def test(self):
pass
self.assertEqual(B.test.__doc__, "Docstring")
def test_doc_not_inherited(self):
class A(object):
def test(self):
"""Docstring"""
pass
class B(A):
@overrides(A)
def test(self):
"""Different"""
pass
self.assertEqual(B.test.__doc__, "Different")
class TestClassProperty(unittest.TestCase):
def test_getter_only(self):
class Foo(object):
_foo = 42
@classproperty
def foo(cls):
return cls._foo
# class-level getter
self.assertEqual(Foo.foo, 42)
# instance-level getter
f = Foo()
self.assertEqual(f.foo, 42)
with self.assertRaises(AttributeError):
f.foo = 4242
class TestStabilityState(unittest.TestCase):
# the indentation spacing gets weird, so I'm defining the
# input doc string explicitly and adding it after function
# defintion
_test_docstring = (" Add 42, or something else, to x.\n"
"\n"
" Parameters\n"
" ----------\n"
" x : int, x\n"
" y : int, optional\n")
class TestBase(TestStabilityState):
def test_get_indentation_level(self):
c = _state_decorator()
self.assertEqual(c._get_indentation_level([]), 0)
self.assertEqual(
c._get_indentation_level([], default_no_existing_docstring=3), 3)
self.assertEqual(c._get_indentation_level([""]), 4)
self.assertEqual(
c._get_indentation_level([""], default_existing_docstring=3), 3)
in_ = (["summary"])
self.assertEqual(c._get_indentation_level(in_), 4)
in_ = (["summary", "", "", " ", "", " ", ""])
self.assertEqual(c._get_indentation_level(in_), 4)
in_ = (["summary", " More indentation", " Less indentation"])
self.assertEqual(c._get_indentation_level(in_), 5)
def test_update_docstring(self):
c = _state_decorator()
in_ = None
exp = ("""State: Test!!""")
self.assertEqual(c._update_docstring(in_, "Test!!"), exp)
in_ = """"""
exp = ("""\n\n State: Test!!""")
self.assertEqual(c._update_docstring(in_, "Test!!"), exp)
in_ = ("""Short summary\n\n Parameters\n\n----------\n """
"""x : int\n""")
exp = ("""Short summary\n\n State: Test!!\n\n"""
""" Parameters\n\n----------\n x : int\n""")
self.assertEqual(c._update_docstring(in_, "Test!!"), exp)
in_ = ("""Short summary\n\n Parameters\n\n----------\n """
"""x : int\n""")
exp = ("""Short summary\n\n State: Test!!\n\n"""
""" Parameters\n\n----------\n x : int\n""")
self.assertEqual(c._update_docstring(in_, "Test!!"), exp)
in_ = ("""Short summary\n\n Parameters\n\n----------\n """
"""x : int\n""")
exp = ("""Short summary\n\n State: Test!!Test!!Test!!Test!!Test!!"""
"""Test!!Test!!Test!!Test!!Test!!Test!!Te\n st!!T"""
"""est!!Test!!Test!!Test!!Test!!Test!!Test!!Test!!\n\n"""
""" Parameters\n\n----------\n x : int\n""")
self.assertEqual(c._update_docstring(in_, "Test!!"*20), exp)
class TestStable(TestStabilityState):
def _get_f(self, as_of):
def f(x, y=42):
return x + y
f.__doc__ = self._test_docstring
f = stable(as_of=as_of)(f)
return f
def test_function_output(self):
f = self._get_f('0.1.0')
self.assertEqual(f(1), 43)
def test_function_docstring(self):
f = self._get_f('0.1.0')
e1 = (" Add 42, or something else, to x.\n\n"
" State: Stable as of 0.1.0.\n\n"
" Parameters")
self.assertTrue(f.__doc__.startswith(e1))
f = self._get_f('0.1.1')
e1 = (" Add 42, or something else, to x.\n\n"
" State: Stable as of 0.1.1.\n\n"
" Parameters")
self.assertTrue(f.__doc__.startswith(e1))
def test_function_signature(self):
f = self._get_f('0.1.0')
expected = inspect.ArgSpec(
args=['x', 'y'], varargs=None, keywords=None, defaults=(42,))
self.assertEqual(inspect.getargspec(f), expected)
self.assertEqual(f.__name__, 'f')
def test_missing_kwarg(self):
self.assertRaises(ValueError, stable)
self.assertRaises(ValueError, stable, '0.1.0')
class TestExperimental(TestStabilityState):
def _get_f(self, as_of):
def f(x, y=42):
return x + y
f.__doc__ = self._test_docstring
f = experimental(as_of=as_of)(f)
return f
def test_function_output(self):
f = self._get_f('0.1.0')
self.assertEqual(f(1), 43)
def test_function_docstring(self):
f = self._get_f('0.1.0')
e1 = (" Add 42, or something else, to x.\n\n"
" State: Experimental as of 0.1.0.\n\n"
" Parameters")
self.assertTrue(f.__doc__.startswith(e1))
f = self._get_f('0.1.1')
e1 = (" Add 42, or something else, to x.\n\n"
" State: Experimental as of 0.1.1.\n\n"
" Parameters")
self.assertTrue(f.__doc__.startswith(e1))
def test_function_signature(self):
f = self._get_f('0.1.0')
expected = inspect.ArgSpec(
args=['x', 'y'], varargs=None, keywords=None, defaults=(42,))
self.assertEqual(inspect.getargspec(f), expected)
self.assertEqual(f.__name__, 'f')
def test_missing_kwarg(self):
self.assertRaises(ValueError, experimental)
self.assertRaises(ValueError, experimental, '0.1.0')
class TestDeprecated(TestStabilityState):
def _get_f(self, as_of, until, reason):
def f(x, y=42):
return x + y
f.__doc__ = self._test_docstring
f = deprecated(as_of=as_of, until=until, reason=reason)(f)
return f
def test_function_output(self):
f = self._get_f('0.1.0', until='0.1.4',
reason='You should now use skbio.g().')
self.assertEqual(f(1), 43)
def test_deprecation_warning(self):
f = self._get_f('0.1.0', until='0.1.4',
reason='You should now use skbio.g().')
# adapted from SO example here: http://stackoverflow.com/a/3892301
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
f(1)
self.assertTrue(issubclass(w[0].category, DeprecationWarning))
expected_str = "is deprecated as of scikit-bio version 0.1.0"
self.assertTrue(expected_str in str(w[0].message))
def test_function_docstring(self):
f = self._get_f('0.1.0', until='0.1.4',
reason='You should now use skbio.g().')
e1 = (" Add 42, or something else, to x.\n\n"
" .. note:: Deprecated as of 0.1.0 for "
"removal in 0.1.4. You should now use\n"
" skbio.g().\n\n"
" Parameters")
self.assertTrue(f.__doc__.startswith(e1))
f = self._get_f('0.1.1', until='0.1.5',
reason='You should now use skbio.h().')
e1 = (" Add 42, or something else, to x.\n\n"
" .. note:: Deprecated as of 0.1.1 for "
"removal in 0.1.5. You should now use\n"
" skbio.h().\n\n"
" Parameters")
self.assertTrue(f.__doc__.startswith(e1))
def test_function_signature(self):
f = self._get_f('0.1.0', until='0.1.4',
reason='You should now use skbio.g().')
expected = inspect.ArgSpec(
args=['x', 'y'], varargs=None, keywords=None, defaults=(42,))
self.assertEqual(inspect.getargspec(f), expected)
self.assertEqual(f.__name__, 'f')
def test_missing_kwarg(self):
self.assertRaises(ValueError, deprecated)
self.assertRaises(ValueError, deprecated, '0.1.0')
self.assertRaises(ValueError, deprecated, as_of='0.1.0')
self.assertRaises(ValueError, deprecated, as_of='0.1.0', until='0.1.4')
if __name__ == '__main__':
unittest.main()
|
demis001/scikit-bio
|
skbio/util/tests/test_decorator.py
|
Python
|
bsd-3-clause
| 9,694 | 0.000103 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import django_castle
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
version = django_castle.__version__
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
os.system('python setup.py bdist_wheel upload')
sys.exit()
if sys.argv[-1] == 'tag':
print("Tagging the version on github:")
os.system("git tag -a %s -m 'version %s'" % (version, version))
os.system("git push --tags")
sys.exit()
readme = open('README.rst').read()
history = open('HISTORY.rst').read().replace('.. :changelog:', '')
setup(
name='django-castle',
version=version,
description="""A django integration for the castle.io service""",
long_description=readme + '\n\n' + history,
author='Jens Alm',
author_email='jens.alm@prorenata.se',
url='https://github.com/ulmus/django-castle',
packages=[
'django_castle',
],
include_package_data=True,
install_requires=[
],
license="BSD",
zip_safe=False,
keywords='django-castle',
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
ProReNata/django-castle
|
setup.py
|
Python
|
bsd-3-clause
| 1,596 | 0 |
# The MIT License
#
# Copyright (c) 2008 James Piechota
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys
import os.path
# Node definition
class WReader:
def __init__(self):
self._fullName = ""
self._path = ""
self._maxInfluences = 0
self.deformers = []
self.weights = []
def name(self):
return self._fullName
def read( self, fullName ):
'''Load skin weights from a Massive .w (weights) file'''
try:
if not os.path.isfile(fullName):
return
self._fullName = fullName
self._path = os.path.dirname( fullName )
fileHandle = open(self._fullName, "r")
deformers = []
tokens = []
weights = []
maxInfluences = 0
for line in fileHandle:
tokens = line.strip().split()
if tokens:
if tokens[0][0] == "#":
# Comment
continue
elif tokens[0] == "deformer":
id = int(tokens[1])
numDeformers = len(self.deformers)
if id >= numDeformers:
self.deformers.extend([ "" ] * (id - numDeformers + 1))
self.deformers[id] = tokens[2]
else:
# TODO: see if storing 0s for joints that have
# no influence is a problem. Storing the influences
# sparsely may make applying the weights later more
# complex
#
numTokens = len(tokens)
vtx = int(tokens[0][:-1])
influences = [0] * len(self.deformers)
count = 0
for i in range(1, numTokens, 2):
influences[int(tokens[i])] = float(tokens[i+1])
count += 1
# keep track of the maximum number of influences on a
# given vertex so we can use it to optimize the skin
# deformers later
#
if count > self._maxInfluences:
self._maxInfluences = count
self.weights.append(influences)
fileHandle.close()
except:
print >> sys.stderr, "Error reading Weights file: %s" % self._fullName
raise
|
redpawfx/massiveImporter
|
python/ns/bridge/io/WReader.py
|
Python
|
mit
| 2,920 | 0.037329 |
# Generated by Django 2.2.11 on 2020-11-09 17:00
import daphne_context.utils
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('daphne_context', '0010_userinformation_mycroft_connection'),
]
operations = [
migrations.RemoveField(
model_name='userinformation',
name='mycroft_session',
),
migrations.CreateModel(
name='MycroftUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mycroft_session', models.CharField(default=daphne_context.utils.generate_mycroft_session, max_length=9)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
seakers/daphne_brain
|
daphne_context/migrations/0011_auto_20201109_1100.py
|
Python
|
mit
| 1,011 | 0.002967 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Archive',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('label', models.CharField(help_text=b'Short label to identify an archive', max_length=10)),
('name', models.CharField(help_text=b'repository name (subarea) in EAD to identify finding aids associated with this archive', max_length=255)),
('svn', models.URLField(help_text=b'URL to subversion repository containing EAD for this archive', verbose_name=b'Subversion Repository')),
('slug', models.SlugField(help_text=b'shorthand id\n (auto-generated from label; do not modify after initial archive definition)')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Deleted',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('eadid', models.CharField(unique=True, max_length=50, verbose_name=b'EAD Identifier')),
('title', models.CharField(max_length=200)),
('date', models.DateTimeField(auto_now_add=True, verbose_name=b'Date removed')),
('note', models.CharField(help_text=b'Optional: Enter the reason this document is being deleted. These comments will be displayed to anyone who had the finding aid bookmarked and returns after it is gone.', max_length=400, blank=True)),
],
options={
'verbose_name': 'Deleted Record',
},
bases=(models.Model,),
),
]
|
emory-libraries/findingaids
|
findingaids/fa/migrations/0001_initial.py
|
Python
|
apache-2.0
| 1,937 | 0.004646 |
import logging
import inspect
import numpy as np
from pybar.analysis.analyze_raw_data import AnalyzeRawData
from pybar.fei4.register_utils import invert_pixel_mask, make_xtalk_mask, make_pixel_mask
from pybar.fei4_run_base import Fei4RunBase
from pybar.fei4.register_utils import scan_loop
from pybar.run_manager import RunManager
from pybar.analysis.plotting.plotting import plot_occupancy
class CrosstalkScan(Fei4RunBase):
'''Crosstalk Scan
Implementation of a crosstalk scan. Injection in long edge pixels (row - 1, row + 1).
Crosstalk exists when a threshold higher 0 can be measured (s-curve fit successful).
'''
_default_run_conf = {
"broadcast_commands": True,
"threaded_scan": False,
"mask_steps": 6, # number of injections per PlsrDAC step
"n_injections": 100, # number of injections per PlsrDAC step
"scan_parameters": [('PlsrDAC', [None, 800])], # the PlsrDAC range
"step_size": 10, # step size of the PlsrDAC during scan
"use_enable_mask": False, # if True, use Enable mask during scan, if False, all pixels will be enabled
"enable_shift_masks": ["Enable"], # enable masks shifted during scan
"disable_shift_masks": [], # disable masks shifted during scan
"xtalk_shift_mask": ["C_High", "C_Low"], # crosstalk mask derived from enable_shift_masks
"pulser_dac_correction": False # PlsrDAC correction for each double column
}
def configure(self):
commands = []
commands.extend(self.register.get_commands("ConfMode"))
# C_Low
if "C_Low".lower() in map(lambda x: x.lower(), self.enable_shift_masks):
self.register.set_pixel_register_value('C_Low', 1)
commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=True, name='C_Low'))
else:
self.register.set_pixel_register_value('C_Low', 0)
commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=True, name='C_Low'))
# C_High
if "C_High".lower() in map(lambda x: x.lower(), self.enable_shift_masks):
self.register.set_pixel_register_value('C_High', 1)
commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=True, name='C_High'))
else:
self.register.set_pixel_register_value('C_High', 0)
commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=True, name='C_High'))
commands.extend(self.register.get_commands("RunMode"))
self.register_utils.send_commands(commands)
def scan(self):
scan_parameter_range = [0, (2 ** self.register.global_registers['PlsrDAC']['bitlength'])]
if self.scan_parameters.PlsrDAC[0]:
scan_parameter_range[0] = self.scan_parameters.PlsrDAC[0]
if self.scan_parameters.PlsrDAC[1]:
scan_parameter_range[1] = self.scan_parameters.PlsrDAC[1]
scan_parameter_range = range(scan_parameter_range[0], scan_parameter_range[1] + 1, self.step_size)
logging.info("Scanning %s from %d to %d", 'PlsrDAC', scan_parameter_range[0], scan_parameter_range[-1])
def set_xtalk_mask():
frame = inspect.currentframe()
if frame.f_back.f_locals['index'] == 0:
mask = make_pixel_mask(steps=self.mask_steps, shift=frame.f_back.f_locals['mask_step'])
mask = make_xtalk_mask(mask)
map(lambda mask_name: self.register.set_pixel_register_value(mask_name, mask), self.disable_shift_masks)
commands = []
commands.append(self.register.get_commands("ConfMode")[0])
commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=True, name=self.xtalk_shift_mask, joint_write=True))
commands.append(self.register.get_commands("RunMode")[0])
self.register_utils.send_commands(commands, concatenate=True)
for scan_parameter_value in scan_parameter_range:
if self.stop_run.is_set():
break
commands = []
commands.extend(self.register.get_commands("ConfMode"))
self.register.set_global_register_value('PlsrDAC', scan_parameter_value)
commands.extend(self.register.get_commands("WrRegister", name=['PlsrDAC']))
self.register_utils.send_commands(commands)
with self.readout(PlsrDAC=scan_parameter_value):
cal_lvl1_command = self.register.get_commands("CAL")[0] + self.register.get_commands("zeros", length=40)[0] + self.register.get_commands("LV1")[0]
scan_loop(self, cal_lvl1_command, repeat_command=self.n_injections, use_delay=True, mask_steps=self.mask_steps, enable_mask_steps=None, enable_double_columns=None, same_mask_for_all_dc=False, fast_dc_loop=False, bol_function=set_xtalk_mask, eol_function=None, digital_injection=False, enable_shift_masks=self.enable_shift_masks, disable_shift_masks=self.disable_shift_masks, restore_shift_masks=False, mask=invert_pixel_mask(self.register.get_pixel_register_value('Enable')) if self.use_enable_mask else None, double_column_correction=self.pulser_dac_correction)
def analyze(self):
with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data:
analyze_raw_data.create_tot_hist = False
analyze_raw_data.create_fitted_threshold_hists = True
analyze_raw_data.create_threshold_mask = True
analyze_raw_data.n_injections = 100
analyze_raw_data.interpreter.set_warning_output(False) # so far the data structure in a threshold scan was always bad, too many warnings given
analyze_raw_data.interpret_word_table()
analyze_raw_data.interpreter.print_summary()
analyze_raw_data.plot_histograms()
thr_hist = analyze_raw_data.out_file_h5.root.HistThresholdFitted[:, :].T
xtalk_mask = np.zeros(shape=thr_hist.shape, dtype=np.dtype('>u1'))
xtalk_mask[thr_hist > 0.0] = 1
plot_occupancy(xtalk_mask.T, title='Crosstalk', z_max=1, filename=analyze_raw_data.output_pdf)
if __name__ == "__main__":
with RunManager('configuration.yaml') as runmngr:
runmngr.run_run(CrosstalkScan)
|
SiLab-Bonn/pyBAR
|
pybar/scans/scan_crosstalk.py
|
Python
|
bsd-3-clause
| 6,325 | 0.004111 |
#!/usr/bin/env python
"""
Parse a file and write output to another.
"""
from optparse import OptionParser
import re
from collections import OrderedDict
parser = OptionParser()
parser.add_option("-i", "--input", dest="input_filepath", help="input filepath")
parser.add_option("-o", "--output", dest="output_filepath", help="output filepath")
(options, args) = parser.parse_args()
#print options
#print args
input_filepath = options.input_filepath
output_filepath = options.output_filepath
lines = {}
pattern_key = re.compile(r'ednKey="(.*?)"')
pattern_value = re.compile(r'ednvalue="(.*?)"')
with open(input_filepath, 'r') as input_file:
for line in input_file:
line = line.strip()
key = pattern_key.search(line)
value = pattern_value.search(line)
if (key and value):
lines[key.group(1)] = value.group(1)
ordered_lines = OrderedDict(sorted(lines.items(), key = lambda t: int(t[0])))
with open(output_filepath, 'w') as output_file:
for line in ordered_lines.items():
#output_file.write('%s,%s\n' % (line[0], line[1]))
output_file.write("{0} => __( '{1}', 'ev' ),\n".format(line[0], line[1]))
print "Completed"
|
alexisbellido/programming-in-python
|
parse_file.py
|
Python
|
bsd-3-clause
| 1,192 | 0.00755 |
def f(m,n):
ans = 1
while (m - n >= 0):
(ans,m) = (ans*2,m-n)
return(ans)
|
selvagit/experiments
|
nptel/nptel_programming_data_structure/week_1/q3.py
|
Python
|
gpl-3.0
| 97 | 0.082474 |
"""Test that resize event works correctly.
Expected behaviour:
One window will be opened. Resize the window and ensure that the
dimensions printed to the terminal are correct. You should see
a green border inside the window but no red.
Close the window or press ESC to end the test.
"""
import unittest
from pyglet import window
from tests.interactive.window import window_util
class EVENT_RESIZE(unittest.TestCase):
def on_resize(self, width, height):
print('Window resized to %dx%d.' % (width, height))
def test_resize(self):
w = window.Window(200, 200, resizable=True)
w.push_handlers(self)
while not w.has_exit:
w.dispatch_events()
window_util.draw_client_border(w)
w.flip()
w.close()
|
bitcraft/pyglet
|
tests/interactive/window/event_resize.py
|
Python
|
bsd-3-clause
| 801 | 0 |
from zope.interface import Interface
class IUWOshThemeLayer(Interface):
"""
Marker interface that defines a browser layer
"""
|
uwosh/uwosh.themebase
|
uwosh/themebase/browser/interfaces.py
|
Python
|
gpl-2.0
| 138 | 0.014493 |
# Copyright 2006 James Tauber and contributors
# Copyright (C) 2009 Luke Kenneth Casson Leighton <lkcl@lkcl.net>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyjamas import DOM
from pyjamas import Factory
from pyjamas import Window
from pyjamas.ui import Applier
def setStyleName(element, style, add):
oldStyle = DOM.getAttribute(element, "className")
if oldStyle is None:
oldStyle = ""
idx = oldStyle.find(style)
# Calculate matching index
lastPos = len(oldStyle)
while idx != -1:
if idx == 0 or (oldStyle[idx - 1] == " "):
last = idx + len(style)
if (last == lastPos) or ((last < lastPos) and (oldStyle[last] == " ")):
break
idx = oldStyle.find(style, idx + 1)
if add:
if idx == -1:
DOM.setAttribute(element, "className", oldStyle + " " + style)
else:
if idx != -1:
if idx == 0:
begin = ''
else:
begin = oldStyle[:idx-1]
end = oldStyle[idx + len(style):]
DOM.setAttribute(element, "className", begin + end)
class UIObject(Applier):
_props = [ ("visible", "Visibility", "Visible", None),
("element", "Element", "Element", None),
("stylename", "Style name", "StyleName", None),
("width", "Width", "Width", None),
("height", "Height", "Height", None),
("size", "Size", "Size", None),
("title", "Title", "Title", None),
("zindex", "Z Index", "zIndex", None),
]
@classmethod
def _getProps(self):
return Applier._getProps() + self._props
def __init__(self, **kwargs):
# do not initialise element, here, to None, whatever you do.
# there are circumstances where UIObject.__init__ is the last
# thing that is done in derived classes, where self.setElement
# will _already_ have been called.
Applier.__init__(self, **kwargs)
def getAbsoluteLeft(self):
return DOM.getAbsoluteLeft(self.getElement())
def getAbsoluteTop(self):
return DOM.getAbsoluteTop(self.getElement())
def getElement(self):
"""Get the DOM element associated with the UIObject, if any"""
return self.element
def getOffsetHeight(self):
return DOM.getIntAttribute(self.element, "offsetHeight")
def getOffsetWidth(self):
return DOM.getIntAttribute(self.element, "offsetWidth")
def getStyleName(self):
return DOM.getAttribute(self.element, "className")
def getStylePrimaryName(self):
"""Return with the first className if there are multiples"""
fullClassName = self.getStyleName()
if fullClassName: return fullClassName.split()[0]
def getStyleAttribute(self, attribute):
""" can be called with two forms:
getStyleAttribute(self, attr) - returns value
getStyleAttribute(self, (attr1,attr2,...)) - returns dictionary
of attr:value pairs
"""
if isinstance(attribute, basestring):
return DOM.getStyleAttribute(self.getElement(), attribute)
# if attribute is not a string, assume it is iterable,
# and return the multi-attribute form
el = self.getElement()
result = {}
for attr in attribute:
result[attr] = DOM.getStyleAttribute(el,attr)
return result
def getTitle(self):
return DOM.getAttribute(self.element, "title")
def setElement(self, element):
"""Set the DOM element associated with the UIObject."""
self.element = element
def setHeight(self, height):
"""Set the height of the element associated with this UIObject. The
value should be given as a CSS value, such as 100px, 30%, or 50pi
"""
if height is None:
height = ""
DOM.setStyleAttribute(self.element, "height", str(height))
def getHeight(self):
return DOM.getStyleAttribute(self.element, "height")
def setPixelSize(self, width, height):
"""Set the width and height of the element associated with this UIObject
in pixels. Width and height should be numbers.
"""
if width >= 0:
self.setWidth("%dpx" % width)
if height >= 0:
self.setHeight("%dpx" % height)
def setSize(self, width, height):
"""Set the width and height of the element associated with this
UIObject. The values should be given as a CSS value,
such as 100px, 30%, or 50pi
"""
self.setWidth(width)
self.setHeight(height)
def addStyleName(self, style):
"""Append a style to the element associated with this UIObject.
This is a CSS class name. It will be added after any
already-assigned CSS class for the element.
"""
self.setStyleName(self.element, style, True)
def addStyleDependentName(self, styleSuffix):
"""Adds a secondary or dependent style name to this element.
For example if the primary stylename is gwt-TextBox,
self.addStyleDependentName("readonly") will return
gwt-TextBox-readonly.
"""
self.addStyleName(self.getStylePrimaryName()+"-"+styleSuffix)
def removeStyleName(self, style):
"""Remove a style from the element associated with this UIObject. This is
a CSS class name."""
self.setStyleName(self.element, style, False)
def removeStyleDependentName(self, styleSuffix):
"""Remove a dependent style name by specifying the style name's suffix.
"""
self.removeStyleName(self.getStylePrimaryName()+"-"+styleSuffix)
# also callable as: setStyleName(self, style)
def setStyleName(self, element, style=None, add=True):
"""When called with a single argument, this replaces all the CSS
classes associated with this UIObject's element with the given
parameter. Otherwise, this is assumed to be a worker function
for addStyleName and removeStyleName.
"""
# emulate setStyleName(self, style)
if style is not None:
setStyleName(element, style, add)
return
style = element
DOM.setAttribute(self.element, "className", style)
def setStyleAttribute(self, attribute, value=None):
""" can be called with two forms:
single attr: setStyleAttribute(self, attr, value)
multi attr: setStyleAttribute(self, {attr1:val1, attr2:val2, ...})
"""
if value is not None: # assume single attr form
DOM.setStyleAttribute(self.getElement(), attribute, value)
return
# assume multi value form
el = self.getElement()
for attr, val in attribute.items():
DOM.setStyleAttribute(el, attr, val)
def setTitle(self, title):
DOM.setAttribute(self.element, "title", title)
def setWidth(self, width):
"""Set the width of the element associated with this UIObject. The
value should be given as a CSS value, such as 100px, 30%, or 50pi
"""
if width is None:
width = ""
DOM.setStyleAttribute(self.element, "width", str(width))
def getWidth(self):
return DOM.getStyleAttribute(self.element, "width")
def sinkEvents(self, eventBitsToAdd):
"""Request that the given events be delivered to the event handler
for this element. The event bits passed are added (using inclusive
OR) to the events already "sunk" for the element associated with
the UIObject. The event bits are a combination of values from
class L{Event}.
"""
if self.element:
DOM.sinkEvents(self.getElement(),
eventBitsToAdd | DOM.getEventsSunk(self.getElement()))
def setzIndex(self, index):
DOM.setIntStyleAttribute(self.element, "zIndex", index)
def isVisible(self, element=None):
""" XXX DEPRECATED - use getVisible
"""
return self.getVisible(element)
def getVisible(self, element=None):
"""Determine whether this element is currently visible, by checking
the CSS property 'display'
"""
if not element:
element = self.element
try: # yuk!
return element.style.display != "none"
except AttributeError: # not been set (yet?)
return True
# also callable as: setVisible(visible)
def setVisible(self, element, visible=None):
"""Set whether this element is visible or not. If a single parameter
is given, the self.element is used. This modifies the CSS
property 'display', which means that an invisible element not
only is not drawn, but doesn't occupy any space on the page.
"""
if visible is None:
visible = element
element = self.element
if visible:
DOM.setStyleAttribute(element, 'display', "")
else:
DOM.setStyleAttribute(element, 'display', "none")
def unsinkEvents(self, eventBitsToRemove):
"""Reverse the operation of sinkEvents. See L{UIObject.sinkevents}.
"""
DOM.sinkEvents(self.getElement(),
~eventBitsToRemove & DOM.getEventsSunk(self.getElement()))
Factory.registerClass('pyjamas.ui.UIObject', 'UIObject', UIObject)
|
anandology/pyjamas
|
library/gwt/ui/UIObject.py
|
Python
|
apache-2.0
| 10,085 | 0.002776 |
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import division, print_function, unicode_literals, \
absolute_import
import os
import unittest
from pymatgen.io.lammps.sets import LammpsInputSet
__author__ = 'Kiran Mathew'
__email__ = 'kmathew@lbl.gov'
test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..", "..",
"test_files", "lammps")
class TestLammpsInputSet(unittest.TestCase):
def setUp(self):
template_file = os.path.join(test_dir, "in.peptide.template")
data_file = os.path.join(test_dir, "data.peptide")
self.data_filename = "test_data.peptide"
self.input_filename = "test_input.peptide"
self.settings = {
"pair_style": "lj/charmm/coul/long 8.0 10.0 10.0",
"kspace_style": "pppm 0.0001",
"fix_1": "1 all nvt temp 275.0 275.0 100.0 tchain 1",
"fix_2": "2 all shake 0.0001 10 100 b 4 6 8 10 12 14 18 a 31"
}
self.lammps_input_set = LammpsInputSet.from_file(
"test", template_file, self.settings, lammps_data=data_file,
data_filename=self.data_filename)
def test_input(self):
self.assertEqual(self.lammps_input_set.lammps_input.settings["data_file"],
self.data_filename)
for k, v in self.settings.items():
self.assertEqual(self.lammps_input_set.lammps_input.settings[k], v)
def test_write_input_set(self):
self.lammps_input_set.write_input(self.input_filename)
self.assertTrue(os.path.exists(self.input_filename))
self.assertTrue(os.path.exists(self.data_filename))
os.remove(self.input_filename)
os.remove(self.data_filename)
# now change both input and data filenames
self.lammps_input_set.write_input("xxxx.input", "yyy.data")
self.assertTrue(os.path.exists("xxxx.input"))
self.assertTrue(os.path.exists("yyy.data"))
os.remove("xxxx.input")
os.remove("yyy.data")
if __name__ == "__main__":
unittest.main()
|
johnson1228/pymatgen
|
pymatgen/io/lammps/tests/test_sets.py
|
Python
|
mit
| 2,130 | 0.000469 |
from csv import DictReader
import os
from rest_framework import status
from rest_framework.viewsets import ViewSet
from rest_framework.exceptions import NotFound
from rest_framework.response import Response
import odatagym_app.settings as ods
import logging
logger = logging.getLogger('odata_gym')
class DatasetsHandler(ViewSet):
def get(self, request, dataset_folder, dataset_name, format=None):
DELIMITERS_MAP = {
'c': ',',
'sc': ';',
'sp': ' '
}
dataset_path = os.path.join(ods.DATASETS_DIR, dataset_folder, dataset_name)
print dataset_path
if os.path.exists(dataset_path):
print request.query_params
delimiter = request.GET.get('file_delimiter', 'c')
print 'Delimiter is %s' % delimiter
with open(dataset_path) as dataset:
reader = DictReader(dataset, delimiter=DELIMITERS_MAP[delimiter])
data = [x for x in reader]
return Response(data, status=status.HTTP_200_OK)
else:
raise NotFound('There is no dataset %s for %s' % (dataset_name, dataset_folder))
|
lucalianas/opendata_gym
|
odatagym_app/datasets_handler/views.py
|
Python
|
mit
| 1,158 | 0.002591 |
#!/usr/bin/env python
import re
import os
import time
import sys
import unittest
import ConfigParser
from setuptools import setup, Command
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
class SQLiteTest(Command):
"""
Run the tests on SQLite
"""
description = "Run tests on SQLite"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
os.environ['TRYTOND_DATABASE_URI'] = 'sqlite://'
os.environ['DB_NAME'] = ':memory:'
from tests import suite
test_result = unittest.TextTestRunner(verbosity=3).run(suite())
if test_result.wasSuccessful():
sys.exit(0)
sys.exit(-1)
class PostgresTest(Command):
"""
Run the tests on Postgres.
"""
description = "Run tests on Postgresql"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
os.environ['TRYTOND_DATABASE_URI'] = 'postgresql://'
os.environ['DB_NAME'] = 'test_' + str(int(time.time()))
from tests import suite
test_result = unittest.TextTestRunner(verbosity=3).run(suite())
if test_result.wasSuccessful():
sys.exit(0)
sys.exit(-1)
config = ConfigParser.ConfigParser()
config.readfp(open('tryton.cfg'))
info = dict(config.items('tryton'))
for key in ('depends', 'extras_depend', 'xml'):
if key in info:
info[key] = info[key].strip().splitlines()
major_version, minor_version, _ = info.get('version', '0.0.1').split('.', 2)
major_version = int(major_version)
minor_version = int(minor_version)
requires = []
MODULE2PREFIX = {
'report_webkit': 'openlabs'
}
MODULE = "waiting_customer_shipment_report"
PREFIX = "fio"
for dep in info.get('depends', []):
if not re.match(r'(ir|res|webdav)(\W|$)', dep):
requires.append(
'%s_%s >= %s.%s, < %s.%s' % (
MODULE2PREFIX.get(dep, 'trytond'), dep,
major_version, minor_version, major_version,
minor_version + 1
)
)
requires.append(
'trytond >= %s.%s, < %s.%s' % (
major_version, minor_version, major_version, minor_version + 1
)
)
setup(
name='%s_%s' % (PREFIX, MODULE),
version=info.get('version', '0.0.1'),
description="",
author="Fulfil.IO Inc., Openlabs Technologies and Consulting (P) Ltd.",
author_email='info@fulfil.io',
url='http://www.fulfil.io/',
package_dir={'trytond.modules.%s' % MODULE: '.'},
packages=[
'trytond.modules.%s' % MODULE,
'trytond.modules.%s.tests' % MODULE,
],
package_data={
'trytond.modules.%s' % MODULE: info.get('xml', []) +
info.get('translation', []) +
['tryton.cfg', 'locale/*.po', 'tests/*.rst', 'reports/*.odt'] +
['view/*.xml', 'reports/*.html', 'reports/css/bootstrap/css/*'] +
['reports/css/bootstrap/fonts/*', 'reports/css/font-awesome/css/*'] +
['reports/css/font-awesome/fonts/*', 'reports/js/*.js']
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Plugins',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Tryton',
'Topic :: Office/Business',
],
long_description=open('README.rst').read(),
license='BSD',
install_requires=requires,
zip_safe=False,
entry_points="""
[trytond.modules]
%s = trytond.modules.%s
""" % (MODULE, MODULE),
test_suite='tests',
test_loader='trytond.test_loader:Loader',
cmdclass={
'test': SQLiteTest,
'test_on_postgres': PostgresTest,
}
)
|
fulfilio/trytond-waiting-customer-shipment-report
|
setup.py
|
Python
|
bsd-3-clause
| 4,152 | 0 |
#!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings.prod')
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
monovertex/ygorganizer
|
manage.py
|
Python
|
mit
| 247 | 0 |
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
This module contains functions and methods to authenticate with OAuth1
providers.
"""
__revision__ = \
"$Id$"
from invenio.containerutils import get_substructure
from invenio.dbquery import run_sql
from invenio.external_authentication import ExternalAuth
class ExternalOAuth1(ExternalAuth):
"""
Contains methods for authenticate with an OpenID provider.
"""
@staticmethod
def __init_req(req):
req.g['oauth1_provider_name'] = ''
req.g['oauth1_debug'] = 0
req.g['oauth1_msg'] = ''
req.g['oauth1_debug_msg'] = ''
req.g['oauth1_response'] = None
def auth_user(self, username, password, req=None):
"""
Tries to find email and identity of the user from OAuth1 provider. If it
doesn't find any of them, returns (None, None)
@param username: Isn't used in this function
@type username: str
@param password: Isn't used in this function
@type password: str
@param req: request
@type req: invenio.webinterface_handler_wsgi.SimulatedModPythonRequest
@rtype: str|NoneType, str|NoneType
"""
from invenio.access_control_config import CFG_OAUTH1_CONFIGURATIONS
from invenio.access_control_config import CFG_OAUTH1_PROVIDERS
from invenio.webinterface_handler import wash_urlargd
from rauth.service import OAuth1Service
self.__init_req(req)
args = wash_urlargd(req.form, {'provider': (str, ''),
'login_method': (str, ''),
'oauth_token': (str, ''),
'oauth_verifier': (str, ''),
'denied': (str, '')
})
provider_name = req.g['oauth1_provider_name'] = args['provider']
if not provider_name in CFG_OAUTH1_PROVIDERS:
req.g['oauth1_msg'] = 22
return None, None
# Load the configurations to construct OAuth1 service
config = CFG_OAUTH1_CONFIGURATIONS[args['provider']]
req.g['oauth1_debug'] = config.get('debug', 0)
if not args['oauth_token']:
# In case of an error, display corresponding message
if args['denied']:
req.g['oauth1_msg'] = 21
return None, None
else:
req.g['oauth1_msg'] = 22
return None, None
provider = OAuth1Service(
name = req.g['oauth1_provider_name'],
consumer_key = config['consumer_key'],
consumer_secret = config['consumer_secret'],
request_token_url = config['request_token_url'],
access_token_url = config['access_token_url'],
authorize_url = config['authorize_url'],
header_auth = True)
# Get the request token secret from database and exchange it with the
# access token.
query = """SELECT secret FROM oauth1_storage WHERE token = %s"""
params = (args['oauth_token'],)
try:
# If the request token is already used, return
request_token_secret = run_sql(query, params)[0][0]
except IndexError:
req.g['oauth1_msg'] = 22
return None, None
response = provider.get_access_token(
'GET',
request_token = args['oauth_token'],
request_token_secret = request_token_secret,
params = {
'oauth_verifier': args['oauth_verifier']
}
)
if req.g['oauth1_debug']:
req.g['oauth1_debug_msg'] = str(response.content) + "<br/>"
# Some providers send the identity and access token together.
email, identity = self._get_user_email_and_id(response.content, req)
if not identity and config.has_key('request_url'):
# For some providers, to reach user profile we need to make request
# to a specific url.
params = config.get('request_parameters', {})
response = provider.get(config['request_url'],
params = params,
access_token = response.content['oauth_token'],
access_token_secret = response.content['oauth_token_secret']
)
if req.oauth1_debug:
req.g['oauth1_debug_msg'] += str(response.content) + "<br/>"
email, identity = self._get_user_email_and_id(response.content, req)
if identity:
# If identity is found, add the name of the provider at the
# beginning of the identity because different providers may have
# different users with same id.
identity = "%s:%s" % (req.g['oauth1_provider_name'], identity)
else:
req.g['oauth1_msg'] = 23
# Delete the token saved in the database since it is useless now.
query = """
DELETE FROM oauth1_storage
WHERE token=%s
OR date_creation < DATE_SUB(NOW(), INTERVAL 1 HOUR)
"""
params = (args['oauth_token'],)
run_sql(query, params)
if req.g['oauth1_debug']:
req.g['oauth1_msg'] = "<code>%s</code>" % req.g['oauth1_debug_msg'].replace("\n", "<br/>")
return None, None
return email, identity
def fetch_user_nickname(self, username, password=None, req=None):
"""
Fetches the OAuth1 provider for nickname of the user. If it doesn't
find any, returns None.
This function doesn't need username, password or req. They are exist
just because this class is derived from ExternalAuth
@param username: Isn't used in this function
@type username: str
@param password: Isn't used in this function
@type password: str
@param req: Isn't used in this function
@type req: invenio.webinterface_handler_wsgi.SimulatedModPythonRequest
@rtype: str or NoneType
"""
from invenio.access_control_config import CFG_OAUTH1_CONFIGURATIONS
if req.g['oauth1_provider_name']:
path = None
if CFG_OAUTH1_CONFIGURATIONS[req.g['oauth1_provider_name']].has_key(
'nickname'
):
path = CFG_OAUTH1_CONFIGURATIONS[req.g['oauth1_provider_name']]['nickname']
if path:
return get_substructure(req.oauth1_response, path)
else:
return None
def _get_user_email_and_id(self, container, req):
"""
Returns external identity and email address together. Since identity is
essential for OAuth1 authentication, if it doesn't find external
identity returns None, None.
@param container: container which contains email and id
@type container: list|dict
@rtype str|NoneType, str|NoneType
"""
from invenio.access_control_config import CFG_OAUTH1_CONFIGURATIONS
identity = None
email = None
if CFG_OAUTH1_CONFIGURATIONS[req.g['oauth1_provider_name']].has_key('id'):
path = CFG_OAUTH1_CONFIGURATIONS[req.g['oauth1_provider_name']]['id']
identity = get_substructure(container, path)
if identity:
if CFG_OAUTH1_CONFIGURATIONS[req.g['oauth1_provider_name']].has_key('email'):
path = CFG_OAUTH1_CONFIGURATIONS[req.g['oauth1_provider_name']]['email']
email = get_substructure(container, path)
req.g['oauth1_response'] = container
return email, identity
@staticmethod
def get_msg(req):
return req.g['oauth1_msg']
|
Panos512/invenio
|
modules/webaccess/lib/external_authentication_oauth1.py
|
Python
|
gpl-2.0
| 8,849 | 0.007232 |
# All nodes are of the form [path1, child1, path2, child2]
# or <value>
from ethereum import utils
from ethereum.db import EphemDB, ListeningDB
import rlp, sys
import copy
hashfunc = utils.sha3
HASHLEN = 32
# 0100000101010111010000110100100101001001 -> ASCII
def decode_bin(x):
return ''.join([chr(int(x[i:i+8], 2)) for i in range(0, len(x), 8)])
# ASCII -> 0100000101010111010000110100100101001001
def encode_bin(x):
o = ''
for c in x:
c = ord(c)
p = ''
for i in range(8):
p = str(c % 2) + p
c /= 2
o += p
return o
# Encodes a binary list [0,1,0,1,1,0] of any length into bytes
def encode_bin_path(li):
if li == []:
return ''
b = ''.join([str(x) for x in li])
b2 = '0' * ((4 - len(b)) % 4) + b
prefix = ['00', '01', '10', '11'][len(b) % 4]
if len(b2) % 8 == 4:
return decode_bin('00' + prefix + b2)
else:
return decode_bin('100000' + prefix + b2)
# Decodes bytes into a binary list
def decode_bin_path(p):
if p == '':
return []
p = encode_bin(p)
if p[0] == '1':
p = p[4:]
assert p[0:2] == '00'
L = ['00', '01', '10', '11'].index(p[2:4])
p = p[4+((4 - L) % 4):]
return [(1 if x == '1' else 0) for x in p]
# Get a node from a database if needed
def dbget(node, db):
if len(node) == HASHLEN:
return rlp.decode(db.get(node))
return node
# Place a node into a database if needed
def dbput(node, db):
r = rlp.encode(node)
if len(r) == HASHLEN or len(r) > HASHLEN * 2:
h = hashfunc(r)
db.put(h, r)
return h
return node
# Get a value from a tree
def get(node, db, key):
node = dbget(node, db)
if key == []:
return node[0]
elif len(node) == 1 or len(node) == 0:
return ''
else:
sub = dbget(node[key[0]], db)
if len(sub) == 2:
subpath, subnode = sub
else:
subpath, subnode = '', sub[0]
subpath = decode_bin_path(subpath)
if key[1:len(subpath)+1] != subpath:
return ''
return get(subnode, db, key[len(subpath)+1:])
# Get length of shared prefix of inputs
def get_shared_length(l1, l2):
i = 0
while i < len(l1) and i < len(l2) and l1[i] == l2[i]:
i += 1
return i
# Replace ['', v] with [v] and compact nodes into hashes
# if needed
def contract_node(n, db):
if len(n[0]) == 2 and n[0][0] == '':
n[0] = [n[0][1]]
if len(n[1]) == 2 and n[1][0] == '':
n[1] = [n[1][1]]
if len(n[0]) != 32:
n[0] = dbput(n[0], db)
if len(n[1]) != 32:
n[1] = dbput(n[1], db)
return dbput(n, db)
# Update a trie
def update(node, db, key, val):
node = dbget(node, db)
# Unfortunately this particular design does not allow
# a node to have one child, so at the root for empty
# tries we need to add two dummy children
if node == '':
node = [dbput([encode_bin_path([]), ''], db),
dbput([encode_bin_path([1]), ''], db)]
if key == []:
node = [val]
elif len(node) == 1:
raise Exception("DB must be prefix-free")
else:
assert len(node) == 2, node
sub = dbget(node[key[0]], db)
if len(sub) == 2:
_subpath, subnode = sub
else:
_subpath, subnode = '', sub[0]
subpath = decode_bin_path(_subpath)
sl = get_shared_length(subpath, key[1:])
if sl == len(subpath):
node[key[0]] = [_subpath, update(subnode, db, key[sl+1:], val)]
else:
subpath_next = subpath[sl]
n = [0, 0]
n[subpath_next] = [encode_bin_path(subpath[sl+1:]), subnode]
n[(1 - subpath_next)] = [encode_bin_path(key[sl+2:]), [val]]
n = contract_node(n, db)
node[key[0]] = dbput([encode_bin_path(subpath[:sl]), n], db)
return contract_node(node, db)
# Compression algorithm specialized for merkle proof databases
# The idea is similar to standard compression algorithms, where
# you replace an instance of a repeat with a pointer to the repeat,
# except that here you replace an instance of a hash of a value
# with the pointer of a value. This is useful since merkle branches
# usually include nodes which contain hashes of each other
magic = '\xff\x39'
def compress_db(db):
out = []
values = db.kv.values()
keys = [hashfunc(x) for x in values]
assert len(keys) < 65300
for v in values:
o = ''
pos = 0
while pos < len(v):
done = False
if v[pos:pos+2] == magic:
o += magic + magic
done = True
pos += 2
for i, k in enumerate(keys):
if v[pos:].startswith(k):
o += magic + chr(i // 256) + chr(i % 256)
done = True
pos += len(k)
break
if not done:
o += v[pos]
pos += 1
out.append(o)
return rlp.encode(out)
def decompress_db(ins):
ins = rlp.decode(ins)
vals = [None] * len(ins)
def decipher(i):
if vals[i] is None:
v = ins[i]
o = ''
pos = 0
while pos < len(v):
if v[pos:pos+2] == magic:
if v[pos+2:pos+4] == magic:
o += magic
else:
ind = ord(v[pos+2]) * 256 + ord(v[pos+3])
o += hashfunc(decipher(ind))
pos += 4
else:
o += v[pos]
pos += 1
vals[i] = o
return vals[i]
for i in range(len(ins)):
decipher(i)
o = EphemDB()
for v in vals:
o.put(hashfunc(v), v)
return o
# Convert a merkle branch directly into RLP (ie. remove
# the hashing indirection). As it turns out, this is a
# really compact way to represent a branch
def compress_branch(db, root):
o = dbget(copy.copy(root), db)
def evaluate_node(x):
for i in range(len(x)):
if len(x[i]) == HASHLEN and x[i] in db.kv:
x[i] = evaluate_node(dbget(x[i], db))
elif isinstance(x, list):
x[i] = evaluate_node(x[i])
return x
o2 = rlp.encode(evaluate_node(o))
return o2
def decompress_branch(branch):
branch = rlp.decode(branch)
db = EphemDB()
def evaluate_node(x):
if isinstance(x, list):
x = [evaluate_node(n) for n in x]
x = dbput(x, db)
return x
evaluate_node(branch)
return db
# Test with n nodes and k branch picks
def test(n, m=100):
assert m <= n
db = EphemDB()
x = ''
for i in range(n):
k = hashfunc(str(i))
v = hashfunc('v'+str(i))
x = update(x, db, [int(a) for a in encode_bin(rlp.encode(k))], v)
print(x)
print(sum([len(val) for key, val in db.db.items()]))
l1 = ListeningDB(db)
o = 0
p = 0
q = 0
ecks = x
for i in range(m):
x = copy.deepcopy(ecks)
k = hashfunc(str(i))
v = hashfunc('v'+str(i))
l2 = ListeningDB(l1)
v2 = get(x, l2, [int(a) for a in encode_bin(rlp.encode(k))])
assert v == v2
o += sum([len(val) for key, val in l2.kv.items()])
cdb = compress_db(l2)
p += len(cdb)
assert decompress_db(cdb).kv == l2.kv
cbr = compress_branch(l2, x)
q += len(cbr)
dbranch = decompress_branch(cbr)
assert v == get(x, dbranch, [int(a) for a in encode_bin(rlp.encode(k))])
# for k in l2.kv:
# assert k in dbranch.kv
o = {
'total_db_size': sum([len(val) for key, val in l1.kv.items()]),
'avg_proof_size': sum([len(val) for key, val in l1.kv.items()]),
'avg_compressed_proof_size': (p // min(n, m)),
'avg_branch_size': (q // min(n, m)),
'compressed_db_size': len(compress_db(l1))
}
return o
|
EthereumWebhooks/blockhooks
|
lib/ethereum/tests/bintrie.py
|
Python
|
apache-2.0
| 8,042 | 0.000373 |
# Copyright 2010 Ramon Xuriguera
#
# This file is part of BibtexIndexMaker.
#
# BibtexIndexMaker is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# BibtexIndexMaker is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BibtexIndexMaker. If not, see <http://www.gnu.org/licenses/>.
from PyQt4 import QtCore, QtGui #@UnresolvedImport
from bibim.gui.ui.ui_file_chooser import Ui_FileChooser
from bibim.gui.ui.ui_new_collection_dialog import Ui_NewWrapperCollection
class FileChooser(QtGui.QWidget):
DIR = 0
FILE = 1
pathChanged = QtCore.pyqtSignal()
def __init__(self):
super(FileChooser, self).__init__()
# Setup ui
self.ui = Ui_FileChooser()
self.ui.setupUi(self)
self.path = QtCore.QString()
self.mode = self.DIR
# Connect signals and slots
#self.connect(self.ui.browseButton, QtCore.SIGNAL('clicked()'), self.chooseFile)
self.ui.browseButton.clicked.connect(self.chooseFile)
def get_path(self):
return self.__path
def set_path(self, value):
self.__path = value
self.pathChanged.emit()
path = QtCore.pyqtProperty(QtCore.QString, get_path, set_path)
@QtCore.pyqtSlot()
def chooseFile(self):
if self.mode == self.DIR:
self.path = QtGui.QFileDialog.getExistingDirectory(self)
else:
self.path = QtGui.QFileDialog.getOpenFileName(self)
if self.path:
self.ui.pathLine.setText(self.path)
class LogsTextEdit(QtGui.QTextEdit):
colors = {'DEBUG':QtGui.QColor(100, 100, 100),
'INFO':QtGui.QColor(0, 0, 0),
'WARNING':QtGui.QColor(222, 145, 2),
'ERROR':QtGui.QColor(191, 21, 43),
'CRITICAL':QtGui.QColor(191, 21, 43)}
def __init__(self, parent):
QtGui.QTextEdit.__init__(self, parent)
self.setReadOnly(True)
@QtCore.pyqtSlot(QtCore.QString, QtCore.QString)
def updateText(self, message, level='INFO'):
self.setTextColor(self.colors[str(level)])
self.append(message)
class WrapperCollectionBox(QtGui.QDialog):
def __init__(self, parent=None):
super(WrapperCollectionBox, self).__init__()
self.ui = Ui_NewWrapperCollection()
self.ui.setupUi(self)
self.setModal(True)
# OK Button disabled until both url and field are not empty
self.ok_button = self.ui.buttonBox.button(QtGui.QDialogButtonBox.Ok)
self.ok_button.setEnabled(False)
self.ui.urlLine.textChanged.connect(self._enable_ok_button)
self.ui.fieldLine.textChanged.connect(self._enable_ok_button)
def _enable_ok_button(self):
if not (self.ui.urlLine.text() and self.ui.fieldLine.text()):
self.ok_button.setEnabled(False)
else:
self.ok_button.setEnabled(True)
class ConfirmMessageBox(QtGui.QMessageBox):
def __init__(self, parent=None):
super(ConfirmMessageBox, self).__init__(parent)
self.setModal(True)
self.setStandardButtons(QtGui.QMessageBox.Ok |
QtGui.QMessageBox.Cancel)
self.setDefaultButton(QtGui.QMessageBox.Cancel)
self.setIcon(QtGui.QMessageBox.Question)
|
rxuriguera/bibtexIndexMaker
|
src/bibim/gui/custom_widgets.py
|
Python
|
gpl-3.0
| 3,769 | 0.006898 |
#!/usr/bin/python
import urllib2
import json, csv
import subprocess
import sys
import platform
import getopt
all_flag = False
download_flag = False
filename=None
offcore_events=[]
try:
opts, args = getopt.getopt(sys.argv[1:],'a,f:,d',['all','file=','download'])
for o, a in opts:
if o in ('-a','--all'):
all_flag=True
if o in ('-f','--file'):
filename=a
if o in ('-d','--download'):
download_flag=True
except getopt.GetoptError, err:
print("parse error: %s\n" %(str(err)))
exit(-2)
if filename == None:
map_file_raw=urllib2.urlopen('https://download.01.org/perfmon/mapfile.csv')
map_dict = csv.DictReader(map_file_raw)
map_file = []
core_path = ''
offcore_path = ''
while True:
try:
map_file.append(map_dict.next())
except StopIteration:
break
if platform.system() == 'CYGWIN_NT-6.1':
p = subprocess.Popen(['./pcm-core.exe -c'],stdout=subprocess.PIPE,shell=True)
elif platform.system() == 'Windows':
p = subprocess.Popen(['pcm-core.exe -c'],stdout=subprocess.PIPE,shell=True)
else:
p = subprocess.Popen(['./pcm-core.x -c'],stdout=subprocess.PIPE,shell=True)
(output, err) = p.communicate()
p_status = p.wait()
for model in map_file:
if model['Family-model'] in output:
if(model['EventType'] == 'core'):
core_path = model['Filename']
elif(model['EventType'] == 'offcore'):
offcore_path = model['Filename']
print (model)
if core_path != '':
json_core_data=urllib2.urlopen('https://download.01.org/perfmon'+core_path)
core_events=json.load(json_core_data)
if(download_flag == True):
with open(core_path.split('/')[-1],'w') as outfile:
json.dump(core_events, outfile, sort_keys=True, indent=4)
else:
print ('no core event found for %s CPU, program abort...' % (output))
exit(-1)
if offcore_path != '':
json_offcore_data=urllib2.urlopen('https://download.01.org/perfmon'+offcore_path)
offcore_events=json.load(json_offcore_data)
if(download_flag == True):
with open(offcore_path.split('/')[-1],'w') as outfile:
json.dump(offcore_events, outfile, sort_keys=True, indent=4)
else:
core_events=json.load(open(filename))
if all_flag == True:
for event in core_events+offcore_events:
if event.has_key('EventName') and event.has_key('BriefDescription'):
print (event['EventName']+':'+event['BriefDescription'])
sys.exit(0)
name=raw_input("Event to query (empty enter to quit):")
while(name != ''):
for event in core_events+offcore_events:
if event.has_key('EventName') and name.lower() in event['EventName'].lower():
print (event['EventName']+':'+event['BriefDescription'])
for ev_code in event['EventCode'].split(', '):
print ('cpu/umask=%s,event=%s,name=%s%s%s%s%s/' % (
event['UMask'], ev_code, event['EventName'],
(',offcore_rsp=%s' % (event['MSRValue'])) if event['MSRValue'] != '0' else '',
(',inv=%s' % (event['Invert'])) if event['Invert'] != '0' else '',
(',any=%s' % (event['AnyThread'])) if event['AnyThread'] != '0' else '',
(',edge') if event['EdgeDetect'] != '0' else ''))
name=raw_input("Event to query (empty enter to quit):")
|
jcmcclurg/serverpower
|
utilities/intel_pcm/pmu-query.py
|
Python
|
gpl-2.0
| 3,641 | 0.014556 |
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.template.response import TemplateResponse
from django.test import TestCase
from django.test.utils import override_settings
from .models import Action
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class AdminCustomUrlsTest(TestCase):
fixtures = ['users.json', 'actions.json']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def testBasicAddGet(self):
"""
A smoke test to ensure GET on the add_view works.
"""
response = self.client.get('/custom_urls/admin/admin_custom_urls/action/!add/')
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def testAddWithGETArgs(self):
response = self.client.get('/custom_urls/admin/admin_custom_urls/action/!add/', {'name': 'My Action'})
self.assertEqual(response.status_code, 200)
self.assertTrue(
'value="My Action"' in response.content,
"Couldn't find an input with the right value in the response."
)
def testBasicAddPost(self):
"""
A smoke test to ensure POST on add_view works.
"""
post_data = {
'_popup': u'1',
"name": u'Action added through a popup',
"description": u"Description of added action",
}
response = self.client.post('/custom_urls/admin/admin_custom_urls/action/!add/', post_data)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'dismissAddAnotherPopup')
self.assertContains(response, 'Action added through a popup')
def testAdminUrlsNoClash(self):
"""
Test that some admin URLs work correctly. The model has a CharField
PK and the add_view URL has been customized.
"""
# Should get the change_view for model instance with PK 'add', not show
# the add_view
response = self.client.get('/custom_urls/admin/admin_custom_urls/action/add/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Change action')
# Ditto, but use reverse() to build the URL
path = reverse('admin:%s_action_change' % Action._meta.app_label,
args=('add',))
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Change action')
# Should correctly get the change_view for the model instance with the
# funny-looking PK
path = reverse('admin:%s_action_change' % Action._meta.app_label,
args=("path/to/html/document.html",))
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Change action')
self.assertContains(response, 'value="path/to/html/document.html"')
|
lzw120/django
|
tests/regressiontests/admin_custom_urls/tests.py
|
Python
|
bsd-3-clause
| 3,056 | 0.002291 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.