text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
from .formSubmission import FormSubmission
from django.contrib.auth.models import User
from django.db import models
from django.template.defaultfilters import slugify
class Log(models.Model):
"""
Form Submission Log Database Model
Attributes:
* owner - user submitting the message
* submission - form submission associated
* timestamp - time of submission entry
* private - display to non-owners?
* message - log entry
* mtype - type of log entry
* 1 - user message (default)
* 2 - system action
* 3 - form status change
* 4 - attached file
* file - attached file entry
"""
owner = models.ForeignKey(User, blank=True, null=True)
submission = models.ForeignKey(FormSubmission)
timestamp = models.DateTimeField(auto_now_add=True)
private = models.BooleanField(default=False)
message = models.TextField(blank=True)
mtype = models.IntegerField(default=1)
file = models.FileField(upload_to='private/constellation_forms/log_files/')
class Meta:
db_table = "form_log"
ordering = ("timestamp",)
@property
def extension(self):
return self.file.name.split(".")[-1]
@property
def content_type(self):
if self.extension == "pdf":
return "application/pdf"
if self.extension == "txt":
return "text/plain"
if self.extension == "png":
return "image/png"
if self.extension == "jpeg" or self.extension == "jpg":
return "image/jpeg"
if self.extension == "gif":
return "image/gif"
return "application/force-download"
@property
def file_name(self):
return slugify("{0}_{1}_{2}".format(self.submission.form.name, self.pk,
self.owner.username)) + "." + \
self.extension
|
ConstellationApps/Forms
|
constellation_forms/models/log.py
|
Python
|
isc
| 1,879 | 0 |
"""Utilities for working with data structures.
Version Added:
2.1
"""
from __future__ import unicode_literals
from collections import OrderedDict
from django_evolution.compat import six
def filter_dup_list_items(items):
"""Return list items with duplicates filtered out.
The order of items will be preserved, but only the first occurrence of
any given item will remain in the list.
Version Added:
2.1
Args:
items (list):
The list of items.
Returns:
list:
The resulting de-duplicated list of items.
"""
return list(six.iterkeys(OrderedDict(
(item, True)
for item in items
)))
def merge_dicts(dest, source):
"""Merge two dictionaries together.
This will recursively merge a source dictionary into a destination
dictionary with the following rules:
* Any keys in the source that aren't in the destination will be placed
directly to the destination (using the same instance of the value, not
a copy).
* Any lists that are in both the source and destination will be combined
by appending the source list to the destinataion list (and this will not
recurse into lists).
* Any dictionaries that are in both the source and destinataion will be
merged using this function.
* Any keys that are not a list or dictionary that exist in both
dictionaries will result in a :py:exc:`TypeError`.
Version Added:
2.1
Args:
dest (dict):
The destination dictionary to merge into.
source (dict):
The source dictionary to merge into the destination.
Raises:
TypeError:
A key was present in both dictionaries with a type that could not
be merged.
"""
for key, value in six.iteritems(source):
if key in dest:
if isinstance(value, list):
if not isinstance(dest[key], list):
raise TypeError(
'Cannot merge a list into a %r for key "%s".'
% (type(dest[key]), key))
dest[key] += value
elif isinstance(value, dict):
if not isinstance(dest[key], dict):
raise TypeError(
'Cannot merge a dictionary into a %r for key "%s".'
% (type(dest[key]), key))
merge_dicts(dest[key], value)
else:
raise TypeError(
'Key "%s" was not an expected type (found %r) '
'when merging dictionaries.'
% (key, type(value)))
else:
dest[key] = value
|
beanbaginc/django-evolution
|
django_evolution/utils/datastructures.py
|
Python
|
bsd-3-clause
| 2,717 | 0 |
# -*- coding: utf-8 -*-
'''
Copyright (c) 2015 Heidelberg University Library
Distributed under the GNU GPL v3. For full terms see the file
LICENSE.md
'''
from ompannouncements import Announcements
def index():
a = Announcements(myconf, db, locale)
news_list = a.create_announcement_list()
return locals()
|
UB-Heidelberg/UBHD-OMPArthistorikum
|
controllers/home.py
|
Python
|
gpl-3.0
| 318 | 0.006289 |
import os
from functools import reduce, lru_cache
import logging
import re
import subprocess
from randrctl import DISPLAY, XAUTHORITY
from randrctl.exception import XrandrException, ParseException
from randrctl.model import Profile, Viewport, XrandrConnection, Display
logger = logging.getLogger(__name__)
class Xrandr:
"""
Interface for xrandr application. Provides methods for calling xrandr operating with python objects such as
randrctl.profile.Profile
"""
EXECUTABLE = "/usr/bin/xrandr"
OUTPUT_KEY = "--output"
MODE_KEY = "--mode"
POS_KEY = "--pos"
ROTATE_KEY = "--rotate"
PANNING_KEY = "--panning"
RATE_KEY = "--rate"
SCALE_KEY = "--scale"
PRIMARY_KEY = "--primary"
CRTC_KEY = "--crtc"
QUERY_KEY = "-q"
VERBOSE_KEY = "--verbose"
OFF_KEY = "--off"
OUTPUT_DETAILS_REGEX = re.compile(
'(?P<primary>primary )?(?P<geometry>[\dx\+]+) (?:(?P<rotate>\w+) )?.*?(?:panning (?P<panning>[\dx\+]+))?$')
MODE_REGEX = re.compile("(\d+x\d+)\+(\d+\+\d+)")
CURRENT_MODE_REGEX = re.compile("\s*(\S+)\s+([0-9\.]+)(.*$)")
def __init__(self, display: str, xauthority: str):
env = dict(os.environ)
if display:
env[DISPLAY] = display
if xauthority:
env[XAUTHORITY] = xauthority
self.env = env
def apply(self, profile: Profile):
"""
Apply given profile by calling xrandr
"""
logger.debug("Applying profile %s", profile.name)
args = self._compose_mode_args(profile, self.get_all_outputs())
self._xrandr(*args)
@lru_cache()
def _xrandr(self, *args):
"""
Perform call to xrandr executable with passed arguments.
Returns subprocess.Popen object
"""
args = list(args)
logger.debug("Calling xrandr with args %s", args)
args.insert(0, self.EXECUTABLE)
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, env=self.env)
err = p.stderr.readlines()
if err:
# close descriptors
p.stderr.close()
p.stdout.close()
err_str = ''.join(map(lambda x: x.decode(), err)).strip()
raise XrandrException(err_str, args)
out = list(map(lambda x: x.decode(), p.stdout.readlines()))
if out:
out.pop(0) # remove first line. It describes Screen
return out
def _compose_mode_args(self, profile: Profile, xrandr_connections: list):
"""
Composes list of arguments to xrandr to apply profile settings and disable the other outputs
"""
args = []
active_names = []
for name, o in profile.outputs.items():
active_names.append(name)
args.append(self.OUTPUT_KEY)
args.append(name)
args.append(self.MODE_KEY)
args.append(o.mode)
args.append(self.POS_KEY)
args.append(o.pos)
args.append(self.ROTATE_KEY)
args.append(o.rotate)
args.append(self.PANNING_KEY)
args.append(o.panning)
args.append(self.SCALE_KEY)
args.append(o.scale)
if o.rate:
args.append(self.RATE_KEY)
args.append(str(o.rate))
if name == profile.primary:
args.append(self.PRIMARY_KEY)
if o.crtc is not None:
args.append(self.CRTC_KEY)
args.append(str(o.crtc))
# turn off the others
for c in xrandr_connections:
if active_names.count(c.name) == 0:
args.append(self.OUTPUT_KEY)
args.append(c.name)
args.append(self.OFF_KEY)
return args
def get_all_outputs(self):
"""
Query xrandr for all supported outputs.
Performs call to xrandr with -q key and parses output.
Returns list of outputs with some properties missing (only name and status are guaranteed)
"""
outputs = []
items = self._xrandr(self.QUERY_KEY)
items = self._group_query_result(items)
logger.debug("Detected total %d outputs", len(items))
crtcs = self._get_verbose_fields('CRTC')
for i in items:
o = self._parse_xrandr_connection(i)
o.crtc = int(crtcs[o.name]) if o.name in crtcs and len(crtcs[o.name]) else None
outputs.append(o)
return outputs
def get_connected_outputs(self):
"""
Query xrandr and return list of connected outputs.
Performs call to xrandr with -q and --verbose keys.
Returns list of connected outputs with all properties set
"""
outputs = list(filter(lambda o: o.display is not None, self.get_all_outputs()))
edids = self._get_verbose_fields('EDID')
for o in outputs:
o.display.edid = edids[o.name]
if logger.isEnabledFor(logging.DEBUG):
logger.debug("Connected outputs: %s", list(map(lambda o: o.name, outputs)))
return outputs
def _get_verbose_fields(self, field):
"""
Get particular field of all connected displays.
Return dictionary of {"connection_name": field_value}
"""
ret = dict()
items = self._xrandr(self.QUERY_KEY, self.VERBOSE_KEY)
items = self._group_query_result(items)
items = filter(lambda x: x[0].find(' connected') > 0, items)
for i in items:
name_idx = i[0].find(' ')
name = i[0][:name_idx]
ret[name] = self._field_from_query_item(i, field)
return ret
def _field_from_query_item(self, item_lines: list, field: str):
"""
Extracts display field from xrandr --verbose output
"""
val = ''
indent = ''
in_field = False
lines_collected = 0
for i, line in enumerate(item_lines):
m = re.match(r'(\s+)(.*):\s*(.*)$', line)
if m and m.group(2).lower() == field.lower():
indent = m.group(1)
in_field = True
val = m.group(3).strip()
elif in_field and m and (len(indent) >= len(m.group(1)) or m.group(1) == indent):
return val
elif in_field and not line.startswith(indent):
return val
elif in_field:
val += line.strip()
lines_collected += 1
if field == 'EDID' and lines_collected >= 8:
return val
return val
def _parse_xrandr_connection(self, item_lines: list):
"""
Creates XrandrConnection from lines returned by xrandr --query.
Example:
LVDS1 connected primary 1366x768+0+312 (normal left inverted right x axis y axis) 277mm x 156mm
1366x768 60.02*+
1024x768 60.00
"""
connection_info = item_lines[0]
name, status, state = connection_info.split(' ', 2)
if status != 'connected':
# We are not connected, do not parse the rest.
return XrandrConnection(name)
# We are connected parse connected display.
display = self._parse_display(item_lines[1:])
if not display.is_on():
# inactive output
return XrandrConnection(name, display)
parsed = self.OUTPUT_DETAILS_REGEX.match(state)
if parsed is None:
raise ParseException(name, status, state)
primary = parsed.group('primary') is not None
rotate = parsed.group('rotate')
panning = parsed.group('panning')
geometry = parsed.group('geometry')
size, pos = self._parse_geometry(geometry)
is_rotated = rotate in ['left', 'right']
if is_rotated:
size = 'x'.join(size.split('x')[::-1])
scale = '1x1'
if size != display.mode:
dw, dh = map(lambda s: int(s), display.mode.split('x'))
vw, vh = map(lambda s: int(s), size.split('x'))
sw, sh = vw / dw, vh / dh
if is_rotated:
sw, sh = sh, sw
scale = "{}x{}".format(sw, sh)
viewport = Viewport(size, pos, rotate, panning, scale)
return XrandrConnection(name, display, viewport, primary)
def _parse_display(self, lines: list):
supported_modes = []
preferred_mode = None
current_mode = None
current_rate = None
for mode_line in lines:
mode_line = mode_line.strip()
(mode, rate, extra) = self.CURRENT_MODE_REGEX.match(mode_line).groups()
current = (extra.find("*") >= 0)
preferred = (extra.find("+") >= 0)
supported_modes.append(mode)
if current:
current_mode = mode
current_rate = rate
if preferred:
preferred_mode = mode
return Display(supported_modes, preferred_mode, current_mode, current_rate)
def _group_query_result(self, query_result: list):
"""
Group input list of lines such that every line starting with a non-whitespace character is a start of a
group, and every subsequent line starting with whitespace is a member of that group.
:param query_result: list of lines
:return: list of lists of lines
"""
def group_fn(result, line):
# We append
if type(result) is str:
if line.startswith(' ') or line.startswith('\t'):
return [[result, line]]
else:
return [[result], [line]]
else:
if line.startswith(' ') or line.startswith('\t'):
last = result[len(result) - 1]
last.append(line)
return result
else:
result.append([line])
return result
# TODO rewrite in imperative code
grouped = reduce(lambda result, line: group_fn(result, line), query_result)
return grouped
def _parse_geometry(self, s: str):
"""
Parses geometry string (i.e. 1111x2222+333+444) into tuple (widthxheight, leftxtop)
"""
match = self.MODE_REGEX.match(s)
mode = match.group(1)
pos = match.group(2).replace('+', 'x')
return mode, pos
|
edio/randrctl
|
randrctl/xrandr.py
|
Python
|
gpl-3.0
| 10,440 | 0.002969 |
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import *
import json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-r", "--retain", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
parser.add_argument("-v", "--verbose", type=int,
help="after how many lines we should print a status message.")
args = parser.parse_args()
fn = args.timeline_filename
print(fn)
print("Loading file " + fn)
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(open(fn), object_hook = bju.object_hook)
for i, entry in enumerate(entries):
entry["user_id"] = override_uuid
if not args.retain:
del entry["_id"]
if args.verbose is not None and i % args.verbose == 0:
print("About to save %s" % entry)
tsdb.save(entry)
|
sunil07t/e-mission-server
|
bin/debug/load_timeline_for_day_and_user.py
|
Python
|
bsd-3-clause
| 1,612 | 0.008685 |
# coding: utf-8
""" This file is where things are stuffed away. Probably you don't ever need to alter these definitions.
"""
import sys
import os.path
import uuid
import dateutil.parser
import datetime
from bs4 import BeautifulSoup
from urllib.parse import urlparse, urljoin
import gzip
import requests
import json
# internal
import _privatekeys as privatekeys
i = 0 # global iterator
def writeFile(file, content):
"""Writes a file at given location
Attributes: file for location, content for the file's contents
"""
f = open(file, 'w')
f.write(content)
f.close()
def delete_file(file):
os.remove(file)
def getUniqueId(length=5):
return str(uuid.uuid1()).replace('-', '')[:length]
def getKey(item):
return item[0]
def fetchUrlsFromSitemap(url, limit=None):
"""Given a URL of a sitemap or sitemapindex the contained URLs are returned as a list with tuples. Optional to limit the age of URLs.
Attributes: url (string), limit (datetime)
"""
# Documentation for sitemaps - https://www.sitemaps.org
found_urls = list()
sitemap = httpRequestGetContent(url)
global i
if limit is not None:
limit = dateutil.parser.parse(limit).replace(tzinfo=None) # converts to same format
if ('<sitemapindex' in str(sitemap)): # is the sitemap itself an index of sitemaps
sitemap_content = BeautifulSoup(sitemap, "html.parser")
for url in sitemap_content.findAll("loc"):
print("Siteindex found. Including URL:s from sitemap: '{0}'".format(url.text))
# fetching sitemap
sitemap_from_index = httpRequestGetContent(url.text)
sitemap_iteration = BeautifulSoup(sitemap_from_index, "html.parser")
for lvl1_url in sitemap_iteration.findAll("url"):
date = None
if (".pdf" not in lvl1_url.text.lower()) and (
".jpg" not in lvl1_url.text.lower()) and (
".mp4" not in lvl1_url.text.lower()) and (
".mp3" not in lvl1_url.text.lower()) and (
".txt" not in lvl1_url.text.lower()) and (
".png" not in lvl1_url.text.lower()) and (
".gif" not in lvl1_url.text.lower()) and (
".svg" not in lvl1_url.text.lower()) and (
".eps" not in lvl1_url.text.lower()) and (
".doc" not in lvl1_url.text.lower()) and (
".docx" not in lvl1_url.text.lower()) and (
".xls" not in lvl1_url.text.lower()) and (
".js" not in lvl1_url.text.lower()) and (
".css" not in lvl1_url.text.lower()) and (
".xlsx" not in lvl1_url.text.lower()) and (
".ttf" not in lvl1_url.text.lower()) and (
".eot" not in lvl1_url.text.lower()) and (
".bak" not in lvl1_url.text.lower()) and (
".woff" not in lvl1_url.text.lower()) and (
"javascript:" not in lvl1_url.text.lower()) and (
"tel:" not in lvl1_url.text.lower()) and (
"mailto:" not in lvl1_url.text.lower()) and (
"#" not in lvl1_url.text.lower()):
if lvl1_url.lastmod is not None:
date = dateutil.parser.parse(lvl1_url.lastmod.string).replace(tzinfo=None)
if limit is not None and date is not None and date > limit:
date_and_url = (lvl1_url.lastmod.string, lvl1_url.loc.string)
found_urls.append(
date_and_url) # if date (lastmod) is missing the URL will not be checked
print(
'Found {0} URLs from multiple sitemaps in the siteindex you provided.'.format(
len(found_urls)))
return sorted(found_urls, key=getKey, reverse=True)
else:
soup = BeautifulSoup(sitemap, "html.parser")
for url in soup.findAll("url"):
date = None
if url.lastmod is not None:
date = dateutil.parser.parse(url.lastmod.string).replace(tzinfo=None)
if limit is not None and date is not None and date > limit:
date_and_url = (url.lastmod.string, url.loc.string)
found_urls.append(
date_and_url) # if date (lastmod) is missing the URL will not be checked
print('Found {0} URLs in the sitemap you provided.'.format(len(found_urls)))
return sorted(found_urls, key=getKey, reverse=True)
def fetchUrlsFromPage(url, num_limit=None, local_only=True):
"""Given a URL contained URLs are returned as a list with tuples. Optional to number of URLs and if to only include URLs within the local website.
Attributes: url (string), num_limit (integer), local_only (bool)
"""
main_url = urlparse(url)
found_urls = list()
page = httpRequestGetContent(url)
soup = BeautifulSoup(page, "html.parser")
i = 0
for the_url in soup.find_all('a', href=True):
if (".pdf" not in the_url['href'].lower()) and (
".jpg" not in the_url['href'].lower()) and (
".mp4" not in the_url['href'].lower()) and (
".mp3" not in the_url['href'].lower()) and (
".txt" not in the_url['href'].lower()) and (
".png" not in the_url['href'].lower()) and (
".gif" not in the_url['href'].lower()) and (
".svg" not in the_url['href'].lower()) and (
".eps" not in the_url['href'].lower()) and (
".doc" not in the_url['href'].lower()) and (
".docx" not in the_url['href'].lower()) and (
".xls" not in the_url['href'].lower()) and (
".js" not in the_url['href'].lower()) and (
".css" not in the_url['href'].lower()) and (
".xlsx" not in the_url['href'].lower()) and (
".ttf" not in the_url['href'].lower()) and (
".eot" not in the_url['href'].lower()) and (
".bak" not in the_url['href'].lower()) and (
".woff" not in the_url['href'].lower()) and (
"javascript:" not in the_url['href'].lower()) and (
"tel:" not in the_url['href'].lower()) and (
"callto:" not in the_url['href'].lower()) and (
"mailto:" not in the_url['href'].lower()) and (
"#" not in the_url['href'].lower()):
found_url = urlparse(the_url['href'])
if local_only and (len(found_url.netloc) is 0 or found_url.netloc is main_url.netloc):
if len(found_url.netloc) is 0:
found_url = urljoin(url, found_url.geturl())
if found_url not in found_urls: # making the entries unique
found_urls.append(found_url)
i+=1
if num_limit is not None:
found_urls = found_urls[:num_limit]
print('Found {0} URLs on the page you provided, returning {1} of them.'.format(i, len(found_urls)))
return found_urls[:num_limit]
def getGzipedContentFromUrl(url):
"""
Fetching a gziped file from Internet, unpacks it and returns its contents.
"""
unique_id = getUniqueId(5)
file_name = 'tmp/file-{0}.gz'.format(unique_id)
try:
r = requests.get(url, stream=True)
with open(file_name, 'wb') as fd:
for chunk in r.iter_content(chunk_size=128):
fd.write(chunk)
with gzip.open(file_name, 'rb') as f:
file_content = f.read()
return file_content
except SSLError:
if 'http://' in url: # trying the same URL over SSL/TLS
return getGzipedContentFromUrl(url.replace('http://', 'https://'))
else:
return None
except:
print(
'Error! Unfortunately the request for URL "{0}" either timed out or failed for other reason(s). The timeout is set to {1} seconds.\nMessage:\n{2}'.format(
url, timeout_in_seconds, sys.exc_info()[0]))
return None
def httpRequestGetContent(url):
"""Trying to fetch the response content
Attributes: url, as for the URL to fetch
"""
if '.gz' in url or '.gzip' in url:
# the url indicates that it is compressed using Gzip
return getGzipedContentFromUrl(url)
timeout_in_seconds = 30
try:
a = requests.get(url)
return a.text
except requests.exceptions.SSLError:
if 'http://' in url: # trying the same URL over SSL/TLS
print('Info: Trying SSL before giving up.')
return httpRequestGetContent(url.replace('http://', 'https://'))
except requests.exceptions.ConnectionError:
print(
'Connection error! Unfortunately the request for URL "{0}" failed.\nMessage:\n{1}'.format(url, sys.exc_info()[0]))
pass
except:
print(
'Error! Unfortunately the request for URL "{0}" either timed out or failed for other reason(s). The timeout is set to {1} seconds.\nMessage:\n{2}'.format(url, timeout_in_seconds, sys.exc_info()[0]))
pass
def is_sitemap(content):
"""Check a string to see if its content is a sitemap or siteindex.
Attributes: content (string)
"""
if 'http://www.sitemaps.org/schemas/sitemap/' in content or '<sitemapindex' in content:
return True
return False
"""
If file is executed on itself then call a definition, mostly for testing purposes
"""
if __name__ == '__main__':
# fetchUrlsFromSitemap('http://webbstrategiforalla.se/sitemap.xml')
# tmp = fetchUrlsFromSitemap('http://www.varberg.se/sitemap.xml', '2017-02-17T06:19:00+01:00')
# print(len(tmp))
# for bla in tmp:
# print('{0} lastmod for {1}'.format(bla[0], bla[1]))
for url in fetchUrlsFromPage('https://www.arbetsformedlingen.se/', 20):
print(url)
# httpRequestGetContent('http://vgregion.se')
|
Vastra-Gotalandsregionen/verifierad.nu
|
helper.py
|
Python
|
mit
| 10,385 | 0.003467 |
""" Models for representing top-level plot objects.
"""
from __future__ import absolute_import
from six import string_types
from ..enums import Location
from ..mixins import LineProps, TextProps
from ..plot_object import PlotObject
from ..properties import Bool, Int, String, Color, Enum, Auto, Instance, Either, List, Dict, Include
from ..query import find
from ..util.string import nice_join
from .glyphs import Glyph
from .ranges import Range, Range1d
from .renderers import Renderer, GlyphRenderer
from .sources import DataSource, ColumnDataSource
from .tools import Tool, ToolEvents
from .widget import Widget
def _select_helper(args, kwargs):
"""
Allow fexible selector syntax.
Returns:
a dict
"""
if len(args) > 1:
raise TypeError("select accepts at most ONE positional argument.")
if len(args) > 0 and len(kwargs) > 0:
raise TypeError("select accepts EITHER a positional argument, OR keyword arguments (not both).")
if len(args) == 0 and len(kwargs) == 0:
raise TypeError("select requires EITHER a positional argument, OR keyword arguments.")
if args:
arg = args[0]
if isinstance(arg, dict):
selector = arg
elif isinstance(arg, string_types):
selector = dict(name=arg)
elif issubclass(arg, PlotObject):
selector = {"type" : arg}
else:
raise RuntimeError("Selector must be a dictionary, string or plot object.")
else:
selector = kwargs
return selector
class PlotContext(PlotObject):
""" A container for multiple plot objects.
``PlotContext`` objects are a source of confusion. Their purpose
is to collect together different top-level objects (e.g., ``Plot``
or layout widgets). The reason for this is that different plots may
need to share ranges or data sources between them. A ``PlotContext``
is a container in which such sharing can occur between the contained
objects.
"""
children = List(Instance(PlotObject), help="""
A list of top level objects in this ``PlotContext`` container.
""")
# TODO (bev) : is this used anywhere?
class PlotList(PlotContext):
# just like plot context, except plot context has special meaning
# everywhere, so plotlist is the generic one
pass
class Plot(Widget):
""" Model representing a plot, containing glyphs, guides, annotations.
"""
def __init__(self, **kwargs):
if "tool_events" not in kwargs:
kwargs["tool_events"] = ToolEvents()
super(Plot, self).__init__(**kwargs)
def select(self, *args, **kwargs):
''' Query this object and all of its references for objects that
match the given selector.
There are a few different ways to call the ``select`` method.
The most general is to supply a JSON-like query dictionary as the
single argument or as keyword arguments:
Args:
selector (JSON-like) : some sample text
Keyword Arguments:
kwargs : query dict key/values as keyword arguments
For convenience, queries on just names can be made by supplying
the ``name`` string as the single parameter:
Args:
name (str) : the name to query on
Also queries on just type can be made simply by supplying the
``PlotObject`` subclass as the single parameter:
Args:
type (PlotObject) : the type to query on
Returns:
seq[PlotObject]
Examples:
.. code-block:: python
# These two are equivalent
p.select({"type": HoverTool})
p.select(HoverTool)
# These two are also equivalent
p.select({"name": "mycircle"})
p.select("mycircle")
# Keyword arguments can be supplied in place of selector dict
p.select({"name": "foo", "type": HoverTool})
p.select(name="foo", type=HoverTool)
'''
selector = _select_helper(args, kwargs)
# Want to pass selector that is a dictionary
from ..plotting_helpers import _list_attr_splat
return _list_attr_splat(find(self.references(), selector, {'plot': self}))
def row(self, row, gridplot):
''' Return whether this plot is in a given row of a GridPlot.
Args:
row (int) : index of the row to test
gridplot (GridPlot) : the GridPlot to check
Returns:
bool
'''
return self in gridplot.row(row)
def column(self, col, gridplot):
''' Return whether this plot is in a given column of a GridPlot.
Args:
col (int) : index of the column to test
gridplot (GridPlot) : the GridPlot to check
Returns:
bool
'''
return self in gridplot.column(col)
def add_layout(self, obj, place='center'):
''' Adds an object to the plot in a specified place.
Args:
obj (Renderer) : the object to add to the Plot
place (str, optional) : where to add the object (default: 'center')
Valid places are: 'left', 'right', 'above', 'below', 'center'.
Returns:
None
'''
valid_places = ['left', 'right', 'above', 'below', 'center']
if place not in valid_places:
raise ValueError(
"Invalid place '%s' specified. Valid place values are: %s" % (place, nice_join(valid_places))
)
if hasattr(obj, 'plot'):
if obj.plot is not None:
raise ValueError("object to be added already has 'plot' attribute set")
obj.plot = self
self.renderers.append(obj)
if place is not 'center':
getattr(self, place).append(obj)
def add_tools(self, *tools):
''' Adds an tools to the plot.
Args:
*tools (Tool) : the tools to add to the Plot
Returns:
None
'''
if not all(isinstance(tool, Tool) for tool in tools):
raise ValueError("All arguments to add_tool must be Tool subclasses.")
for tool in tools:
if tool.plot is not None:
raise ValueError("tool %s to be added already has 'plot' attribute set" % tool)
tool.plot = self
self.tools.append(tool)
def add_glyph(self, source_or_glyph, glyph=None, **kw):
''' Adds a glyph to the plot with associated data sources and ranges.
This function will take care of creating and configurinf a Glyph object,
and then add it to the plot's list of renderers.
Args:
source (DataSource) : a data source for the glyphs to all use
glyph (Glyph) : the glyph to add to the Plot
Keyword Arguments:
Any additional keyword arguments are passed on as-is to the
Glyph initializer.
Returns:
glyph : Glyph
'''
if glyph is not None:
source = source_or_glyph
else:
source, glyph = ColumnDataSource(), source_or_glyph
if not isinstance(source, DataSource):
raise ValueError("'source' argument to add_glyph() must be DataSource subclass")
if not isinstance(glyph, Glyph):
raise ValueError("'glyph' argument to add_glyph() must be Glyph subclass")
g = GlyphRenderer(data_source=source, glyph=glyph, **kw)
self.renderers.append(g)
return g
x_range = Instance(Range, help="""
The (default) data range of the horizontal dimension of the plot.
""")
y_range = Instance(Range, help="""
The (default) data range of the vertical dimension of the plot.
""")
x_mapper_type = Either(Auto, String, help="""
What kind of mapper to use to convert x-coordinates in data space
into x-coordinates in screen space.
Typically this can be determined automatically, but this property
can be useful to, e.g., show datetime values as floating point
"seconds since epoch" instead of formatted dates.
""")
y_mapper_type = Either(Auto, String, help="""
What kind of mapper to use to convert y-coordinates in data space
into y-coordinates in screen space.
Typically this can be determined automatically, but this property
can be useful to, e.g., show datetime values as floating point
"seconds since epoch" instead of formatted dates
""")
extra_x_ranges = Dict(String, Instance(Range1d), help="""
Additional named ranges to make available for mapping x-coordinates.
This is useful for adding additional axes.
""")
extra_y_ranges = Dict(String, Instance(Range), help="""
Additional named ranges to make available for mapping y-coordinates.
This is useful for adding additional axes.
""")
title = String('', help="""
A title for the plot.
""")
title_props = Include(TextProps, help="""
The %s for the plot title.
""")
outline_props = Include(LineProps, help="""
The %s for the plot border outline.
""")
renderers = List(Instance(Renderer), help="""
A list of all renderers for this plot, including guides and annotations
in addition to glyphs and markers.
This property can be manipulated by hand, but the ``add_glyph`` and
``add_layout`` methods are recommended to help make sure all necessary
setup is performed.
""")
tools = List(Instance(Tool), help="""
A list of tools to add to the plot.
""")
tool_events = Instance(ToolEvents, help="""
A ToolEvents object to share and report tool events.
""")
left = List(Instance(Renderer), help="""
A list of renderers to occupy the area to the left of the plot.
""")
right = List(Instance(Renderer), help="""
A list of renderers to occupy the area to the right of the plot.
""")
above = List(Instance(Renderer), help="""
A list of renderers to occupy the area above of the plot.
""")
below = List(Instance(Renderer), help="""
A list of renderers to occupy the area below of the plot.
""")
toolbar_location = Enum(Location, help="""
Where the toolbar will be located. If set to None, no toolbar
will be attached to the plot.
""")
logo = Enum("normal", "grey", help="""
What version of the Bokeh logo to display on the toolbar. If
set to None, no logo will be displayed.
""")
plot_height = Int(600, help="""
Total height of the entire plot (including any axes, titles,
border padding, etc.)
.. note::
This corresponds directly to the height of the HTML
canvas that will be used.
""")
plot_width = Int(600, help="""
Total width of the entire plot (including any axes, titles,
border padding, etc.)
.. note::
This corresponds directly to the width of the HTML
canvas that will be used.
""")
background_fill = Color("white", help="""
""")
border_fill = Color("white", help="""
""")
min_border_top = Int(50, help="""
Minimum size in pixels of the padding region above the top of the
central plot region.
.. note::
This is a *minimum*. The padding region may expand as needed to
accommodate titles or axes, etc.
""")
min_border_bottom = Int(50, help="""
Minimum size in pixels of the padding region below the bottom of
the central plot region.
.. note::
This is a *minimum*. The padding region may expand as needed to
accommodate titles or axes, etc.
""")
min_border_left = Int(50, help="""
Minimum size in pixels of the padding region to the left of
the central plot region.
.. note::
This is a *minimum*. The padding region may expand as needed to
accommodate titles or axes, etc.
""")
min_border_right = Int(50, help="""
Minimum size in pixels of the padding region to the right of
the central plot region.
.. note::
This is a *minimum*. The padding region may expand as needed to
accommodate titles or axes, etc.
""")
min_border = Int(50, help="""
A convenience property to set all all the ``min_X_border`` properties
to the same value. If an individual border property is explicitly set,
it will override ``min_border``.
""")
h_symmetry = Bool(True, help="""
Whether the total horizontal padding on both sides of the plot will
be made equal (the left or right padding amount, whichever is larger).
""")
v_symmetry = Bool(False, help="""
Whether the total vertical padding on both sides of the plot will
be made equal (the top or bottom padding amount, whichever is larger).
""")
lod_factor = Int(10, help="""
Decimation factor to use when applying level-of-detail decimation.
""")
lod_threshold = Int(2000, help="""
A number of data points, above which level-of-detail downsampling may
be performed by glyph renderers. Set to ``None`` to disable any
level-of-detail downsampling.
""")
lod_interval = Int(300, help="""
Interval (in ms) during which an interactive tool event will enable
level-of-detail downsampling.
""")
lod_timeout = Int(500, help="""
Timeout (in ms) for checking whether interactive tool events are still
occurring. Once level-of-detail mode is enabled, a check is made every
``lod_timeout`` ms. If no interactive tool events have happened,
level-of-detail mode is disabled.
""")
class GridPlot(Plot):
""" A 2D grid of plots rendered on separate canvases in an HTML table.
"""
children = List(List(Instance(Plot)), help="""
An array of plots to display in a grid, given as a list of lists of
Plot objects. To leave a position in the grid empty, pass None for
that position in the ``children`` list.
""")
border_space = Int(0, help="""
Distance (in pixels) between adjacent plots.
""")
def select(self, *args, **kwargs):
''' Query this object and all of its references for objects that
match the given selector. See Plot.select for detailed usage infomation.
Returns:
seq[PlotObject]
'''
selector = _select_helper(args, kwargs)
# Want to pass selector that is a dictionary
from ..plotting_helpers import _list_attr_splat
return _list_attr_splat(find(self.references(), selector, {'gridplot': self}))
def column(self, col):
''' Return a given column of plots from this GridPlot.
Args:
col (int) : index of the column to return
Returns:
seq[Plot] : column of plots
'''
try:
return [row[col] for row in self.children]
except:
return []
def row(self, row):
''' Return a given row of plots from this GridPlot.
Args:
rwo (int) : index of the row to return
Returns:
seq[Plot] : row of plots
'''
try:
return self.children[row]
except:
return []
|
birdsarah/bokeh
|
bokeh/models/plots.py
|
Python
|
bsd-3-clause
| 15,281 | 0.001701 |
import unittest
from locust.util.timespan import parse_timespan
from locust.util.rounding import proper_round
class TestParseTimespan(unittest.TestCase):
def test_parse_timespan_invalid_values(self):
self.assertRaises(ValueError, parse_timespan, None)
self.assertRaises(ValueError, parse_timespan, "")
self.assertRaises(ValueError, parse_timespan, "q")
def test_parse_timespan(self):
self.assertEqual(7, parse_timespan("7"))
self.assertEqual(7, parse_timespan("7s"))
self.assertEqual(60, parse_timespan("1m"))
self.assertEqual(7200, parse_timespan("2h"))
self.assertEqual(3787, parse_timespan("1h3m7s"))
class TestRounding(unittest.TestCase):
def test_rounding_down(self):
self.assertEqual(1, proper_round(1.499999999))
self.assertEqual(5, proper_round(5.499999999))
self.assertEqual(2, proper_round(2.05))
self.assertEqual(3, proper_round(3.05))
def test_rounding_up(self):
self.assertEqual(2, proper_round(1.5))
self.assertEqual(3, proper_round(2.5))
self.assertEqual(4, proper_round(3.5))
self.assertEqual(5, proper_round(4.5))
self.assertEqual(6, proper_round(5.5))
|
heyman/locust
|
locust/test/test_util.py
|
Python
|
mit
| 1,232 | 0.000812 |
import socket
import sys
def set_keepalive(sock, interval=1, probes=5):
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, interval)
if hasattr(socket, 'TCP_KEEPCNT'):
sock.setsockopt(socket.SOL_TCP, socket.TCP_KEEPCNT, probes)
if hasattr(socket, 'TCP_KEEPIDLE'):
sock.setsockopt(socket.SOL_TCP, socket.TCP_KEEPIDLE, interval)
if hasattr(socket, 'TCP_KEEPINTVL'):
sock.setsockopt(socket.SOL_TCP, socket.TCP_KEEPINTVL, interval)
s = socket.socket()
s.bind(('', 0))
print s.getsockname()
set_keepalive(s)
s.listen(1)
while True:
csock, addr = s.accept()
set_keepalive(csock)
print csock.recv(512)
|
dw/scratch
|
tcp_ka2.py
|
Python
|
mit
| 658 | 0 |
# -*- coding: utf-8 -*-
# Copyright (c) Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
API Issues to work out:
- MatrixTransform and STTransform both have 'scale' and 'translate'
attributes, but they are used in very different ways. It would be nice
to keep this consistent, but how?
- Need a transform.map_rect function that returns the bounding rectangle of
a rect after transformation. Non-linear transforms might need to work
harder at this, but we can provide a default implementation that
works by mapping a selection of points across a grid within the original
rect.
"""
from __future__ import division
from ..shaders import Function
from ...util.event import EventEmitter
class BaseTransform(object):
"""
BaseTransform is a base class that defines a pair of complementary
coordinate mapping functions in both python and GLSL.
All BaseTransform subclasses define map() and imap() methods that map
an object through the forward or inverse transformation, respectively.
The two class variables glsl_map and glsl_imap are instances of
shaders.Function that define the forward- and inverse-mapping GLSL
function code.
Optionally, an inverse() method returns a new transform performing the
inverse mapping.
Note that although all classes should define both map() and imap(), it
is not necessarily the case that imap(map(x)) == x; there may be instances
where the inverse mapping is ambiguous or otherwise meaningless.
"""
glsl_map = None # Must be GLSL code
glsl_imap = None
# Flags used to describe the transformation. Subclasses should define each
# as True or False.
# (usually used for making optimization decisions)
# If True, then for any 3 colinear points, the
# transformed points will also be colinear.
Linear = None
# The transformation's effect on one axis is independent
# of the input position along any other axis.
Orthogonal = None
# If True, then the distance between two points is the
# same as the distance between the transformed points.
NonScaling = None
# Scale factors are applied equally to all axes.
Isometric = None
def __init__(self):
self._inverse = None
self._dynamic = False
self.changed = EventEmitter(source=self, type='transform_changed')
if self.glsl_map is not None:
self._shader_map = Function(self.glsl_map)
if self.glsl_imap is not None:
self._shader_imap = Function(self.glsl_imap)
def map(self, obj):
"""
Return *obj* mapped through the forward transformation.
Parameters
----------
obj : tuple (x,y) or (x,y,z)
array with shape (..., 2) or (..., 3)
"""
raise NotImplementedError()
def imap(self, obj):
"""
Return *obj* mapped through the inverse transformation.
Parameters
----------
obj : tuple (x,y) or (x,y,z)
array with shape (..., 2) or (..., 3)
"""
raise NotImplementedError()
@property
def inverse(self):
""" The inverse of this transform.
"""
if self._inverse is None:
self._inverse = InverseTransform(self)
return self._inverse
@property
def dynamic(self):
"""Boolean flag that indicates whether this transform is expected to
change frequently.
Transforms that are flagged as dynamic will not be collapsed in
``ChainTransform.simplified``. This allows changes to the transform
to propagate through the chain without requiring the chain to be
re-simplified.
"""
return self._dynamic
@dynamic.setter
def dynamic(self, d):
self._dynamic = d
def shader_map(self):
"""
Return a shader Function that accepts only a single vec4 argument
and defines new attributes / uniforms supplying the Function with
any static input.
"""
return self._shader_map
def shader_imap(self):
"""
see shader_map.
"""
return self._shader_imap
def _shader_object(self):
""" This method allows transforms to be assigned directly to shader
template variables.
Example::
code = 'void main() { gl_Position = $transform($position); }'
func = shaders.Function(code)
tr = STTransform()
func['transform'] = tr # use tr's forward mapping for $function
"""
return self.shader_map()
def update(self, *args):
"""
Called to inform any listeners that this transform has changed.
"""
self.changed(*args)
def __mul__(self, tr):
"""
Transform multiplication returns a new transform that is equivalent to
the two operands performed in series.
By default, multiplying two Transforms `A * B` will return
ChainTransform([A, B]). Subclasses may redefine this operation to
return more optimized results.
To ensure that both operands have a chance to simplify the operation,
all subclasses should follow the same procedure. For `A * B`:
1. A.__mul__(B) attempts to generate an optimized transform product.
2. If that fails, it must:
* return super(A).__mul__(B) OR
* return NotImplemented if the superclass would return an
invalid result.
3. When BaseTransform.__mul__(A, B) is called, it returns
NotImplemented, which causes B.__rmul__(A) to be invoked.
4. B.__rmul__(A) attempts to generate an optimized transform product.
5. If that fails, it must:
* return super(B).__rmul__(A) OR
* return ChainTransform([B, A]) if the superclass would return
an invalid result.
6. When BaseTransform.__rmul__(B, A) is called, ChainTransform([A, B])
is returned.
"""
# switch to __rmul__ attempts.
# Don't use the "return NotImplemted" trick, because that won't work if
# self and tr are of the same type.
return tr.__rmul__(self)
def __rmul__(self, tr):
return ChainTransform([tr, self])
def __repr__(self):
return "<%s at 0x%x>" % (self.__class__.__name__, id(self))
def __del__(self):
# we can remove ourselves from *all* events in this situation.
self.changed.disconnect()
class InverseTransform(BaseTransform):
def __init__(self, transform):
BaseTransform.__init__(self)
self._inverse = transform
self.map = transform.imap
self.imap = transform.map
@property
def Linear(self):
return self._inverse.Linear
@property
def Orthogonal(self):
return self._inverse.Orthogonal
@property
def NonScaling(self):
return self._inverse.NonScaling
@property
def Isometric(self):
return self._inverse.Isometric
@property
def shader_map(self):
return self._inverse.shader_imap
@property
def shader_imap(self):
return self._inverse.shader_map
def __repr__(self):
return ("<Inverse of %r>" % repr(self._inverse))
# import here to avoid import cycle; needed for BaseTransform.__mul__.
from .chain import ChainTransform # noqa
|
Eric89GXL/vispy
|
vispy/visuals/transforms/base_transform.py
|
Python
|
bsd-3-clause
| 7,578 | 0.001715 |
import unittest
from flumine import config
class ConfigTest(unittest.TestCase):
def test_init(self):
self.assertFalse(config.simulated)
self.assertTrue(config.simulated_strategy_isolation)
self.assertIsInstance(config.customer_strategy_ref, str)
self.assertIsInstance(config.process_id, int)
self.assertIsNone(config.current_time)
self.assertFalse(config.raise_errors)
self.assertEqual(config.max_execution_workers, 32)
self.assertFalse(config.async_place_orders)
self.assertEqual(config.place_latency, 0.120)
self.assertEqual(config.cancel_latency, 0.170)
self.assertEqual(config.update_latency, 0.150)
self.assertEqual(config.replace_latency, 0.280)
self.assertEqual(config.order_sep, "-")
self.assertEqual(config.execution_retry_attempts, 10)
|
liampauling/flumine
|
tests/test_config.py
|
Python
|
mit
| 865 | 0 |
import numpy
import math
def mkRamp(*args):
''' mkRamp(SIZE, DIRECTION, SLOPE, INTERCEPT, ORIGIN)
Compute a matrix of dimension SIZE (a [Y X] 2-vector, or a scalar)
containing samples of a ramp function, with given gradient DIRECTION
(radians, CW from X-axis, default = 0), SLOPE (per pixel, default =
1), and a value of INTERCEPT (default = 0) at the ORIGIN (default =
(size+1)/2, [1 1] = upper left). All but the first argument are
optional '''
if len(args) == 0:
print("mkRamp(SIZE, DIRECTION, SLOPE, INTERCEPT, ORIGIN)")
print("first argument is required")
exit(1)
else:
sz = args[0]
if isinstance(sz, (int)):
sz = (sz, sz)
elif not isinstance(sz, (tuple)):
print("first argument must be a two element tuple or an integer")
exit(1)
# OPTIONAL args:
if len(args) > 1:
direction = args[1]
else:
direction = 0
if len(args) > 2:
slope = args[2]
else:
slope = 1
if len(args) > 3:
intercept = args[3]
else:
intercept = 0
if len(args) > 4:
origin = args[4]
else:
origin = (float(sz[0] - 1) / 2.0, float(sz[1] - 1) / 2.0)
#--------------------------
xinc = slope * math.cos(direction)
yinc = slope * math.sin(direction)
[xramp, yramp] = numpy.meshgrid(xinc * (numpy.array(list(range(sz[1]))) - origin[1]),
yinc * (numpy.array(list(range(sz[0]))) - origin[0]))
res = intercept + xramp + yramp
return res
|
tochikuji/pyPyrTools
|
pyrtools/mkRamp.py
|
Python
|
mit
| 1,617 | 0.002474 |
from subprocess import *
import gzip
import string
import os
import time
import ApplePythonReporter
class ApplePythonReport:
vendorId = YOUR_VENDOR_ID
userId = 'YOUR_ITUNES_CONNECT_ACCOUNT_MAIL'
password = 'ITUNES_CONNECT_PASSWORD'
account = 'ACCOUNT_ID'
mode = 'Robot.XML'
dateType = 'Daily'
eventIndex = 1
activeSubscriberIndex = 16
quantityIndex = 25
subscribers = 0
cancellations = 0
activeSubscribers = 0
maxAttempts = 5
def __init__(self, reportDate):
self.DownloadSubscriptionEventReport(reportDate)
self.DownloadSubscriptionReport(reportDate)
self.FetchSubscriptionEventData(reportDate)
self.FetchSubscriptionData(reportDate)
self.CleanUp(reportDate)
def DownloadSubscriptionEventReport(self, date):
print 'Downloading Apple Financial Report for Subscriptions (' + date + ')..'
credentials = (self.userId, self.password, self.account, self.mode)
command = 'Sales.getReport, {0},SubscriptionEvent,Summary,{1},{2}'.format(self.vendorId, self.dateType, date)
try:
ApplePythonReporter.output_result(ApplePythonReporter.post_request(ApplePythonReporter.ENDPOINT_SALES,
credentials, command))
except Exception:
pass
#return iter(p.stdout.readline, b'')
def DownloadSubscriptionReport(self, date):
print 'Downloading Apple Financial Report for Active Users (' + date + ')..'
credentials = (self.userId, self.password, self.account, self.mode)
command = 'Sales.getReport, {0},Subscription,Summary,{1},{2}'.format(self.vendorId, self.dateType, date)
try:
ApplePythonReporter.output_result(ApplePythonReporter.post_request(ApplePythonReporter.ENDPOINT_SALES,
credentials, command))
except:
pass
#return iter(p.stdout.readline, b'')
#Uncompress and extract needed values (cancellations and new subscribers)
def FetchSubscriptionEventData(self, date):
fileName = 'Subscription_Event_'+self.vendorId+'_' + date + '.txt'
attempts = 0
while not os.path.isfile(fileName):
if(attempts >= self.maxAttempts):
break
attempts += 1
time.sleep(1)
if os.path.isfile(fileName):
print 'Fetching SubscriptionEvents..'
with open(fileName, 'rb') as inF:
text = inF.read().splitlines()
for row in text[1:]:
line = string.split(row, '\t')
# print line[self.eventIndex].__str__()
if line[0].__str__().endswith(date[-2:]):
if line[self.eventIndex] == 'Cancel':
self.cancellations += int(line[self.quantityIndex])
if line[self.eventIndex] == 'Subscribe':
self.subscribers += int(line[self.quantityIndex])
else:
print 'SubscriptionEvent: There were no sales for the date specified'
# Uncompress and extract needed values (active users)
def FetchSubscriptionData(self, date):
fileName = 'Subscription_'+self.vendorId+'_' + date + '.txt'
attempts = 0
while not os.path.isfile(fileName):
if (attempts >= self.maxAttempts):
break
attempts += 1
time.sleep(1)
if os.path.isfile(fileName):
print 'Fetching Subscriptions..'
with open(fileName, 'rb') as inF:
text = inF.read().splitlines()
for row in text[1:]:
line = string.split(row, '\t')
# print line[0].__str__()
self.activeSubscribers += int(line[self.activeSubscriberIndex])
else:
print 'Subscription: There were no sales for the date specified'
def CleanUp(self, date):
if os.path.isfile('Subscription_'+self.vendorId.__str__() +'_' + date + '.txt'):
os.remove('Subscription_'+self.vendorId.__str__()+'_' + date + '.txt')
else:
print 'Subscription_'+self.vendorId.__str__()+'_' + date + '.txt doesnt exist: Maybe there were no Sales at the specified date'
if os.path.isfile('Subscription_Event_'+self.vendorId.__str__()+'_' + date + '.txt'):
os.remove('Subscription_Event_'+self.vendorId.__str__()+'_' + date + '.txt')
else:
print 'Subscription_Event_'+self.vendorId.__str__()+'_' + date + '.txt doesnt exist: Maybe there were no Sales at the specified date'
|
Acimaz/Google_Apple_Financial_Reporter
|
AppleReporter.py
|
Python
|
mit
| 4,732 | 0.005283 |
#!/usr/bin/env python
from __future__ import (
unicode_literals,
absolute_import,
print_function,
division,
)
import aaf2
import traceback
import subprocess
import json
import os
import datetime
import sys
import tempfile
import shutil
import time
import fractions
from aaf2 import auid
from pprint import pprint
FFMPEG_EXEC = "ffmpeg"
FFPROBE_EXEC = "ffprobe"
Audio_Profiles = aaf2.audio.pcm_profiles
Video_Profiles = aaf2.video.dnx_profiles
# FFMPEG_EXEC = "/Users/mark/Dev/ffmpeg/ffmpeg_g"
# FFPROBE_EXEC = "/Users/mark/Dev/ffmpeg/ffprobe_g"
def probe(path, show_packets=False):
cmd = [FFPROBE_EXEC, '-of','json','-show_format','-show_streams', path]
if show_packets:
cmd.extend(['-show_packets',])
print(subprocess.list2cmdline(cmd))
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout,stderr = p.communicate()
if p.returncode != 0:
raise subprocess.CalledProcessError(p.returncode, subprocess.list2cmdline(cmd), stderr)
return json.loads(stdout)
def timecode_to_seconds(time_string):
try:
return float(time_string)
except:
pass
for format in ("%H:%M:%S.%f", "%H:%M:%S", "%M:%S.%f","%M:%S"):
try:
t = datetime.datetime.strptime(time_string, format)
seconds = 0
if t.minute:
seconds += 60*t.minute
if t.hour:
seconds += 60 * 60 * t.hour
seconds += t.second
seconds += float(t.strftime(".%f"))
return seconds
except:
#print traceback.format_exc()
pass
raise ValueError("invalid time format: %s" % time_string)
def seconds_to_timecode(seconds):
format = "%S.%f"
t = datetime.timedelta(seconds=float(seconds))
return str(t)
def has_alpha(stream):
if stream['pix_fmt'] in ('yuva444p10le','rgba'):
return True
return False
def conform_media(path,
output_dir,
start=None,
end=None,
duration=None,
width=None,
height=None,
frame_rate=None,
video_profile_name=None,
audio_profile_name=None,
ignore_alpha=False):
if not video_profile_name:
video_profile_name = 'dnx_1080p_36_23.97'
if not audio_profile_name:
audio_profile_name = 'pcm_48000_s16le'
video_profile = Video_Profiles[video_profile_name]
audio_profile = Audio_Profiles[audio_profile_name]
format = probe(path)
out_files = []
cmd = [FFMPEG_EXEC,'-y', '-nostdin']
# cmd.extend(['-loglevel', 'debug'])
if end:
duration = timecode_to_seconds(end) - timecode_to_seconds(start)
duration = seconds_to_timecode(duration)
end = None
if start:
start_seconds = timecode_to_seconds(start)
fast_start = max(0,int(start_seconds-30))
if fast_start:
start = seconds_to_timecode(start_seconds - fast_start)
cmd.extend(['-ss', seconds_to_timecode(fast_start)])
frame_rate = video_profile['frame_rate']
pix_fmt = video_profile['pix_fmt']
bitrate = video_profile['bitrate']
dnxhd_profile = video_profile.get("video_profile", None)
if format['format']['format_name'] == "image2":
frame_rate = frame_rate or "24000/1001"
cmd.extend([ '-framerate', frame_rate])
cmd.extend(['-i', path,])
if video_profile['size']:
width, height = video_profile['size']
else:
width = None
height = None
interlaced = video_profile['interlaced']
#sample_rate =44100
sample_rate = audio_profile['sample_rate']
for stream in format['streams']:
#pprint(stream)
stream_index = stream['index']
if stream['codec_type'] == 'video':
out_meta = {}
# pprint(stream)
alpha = has_alpha(stream)
passes = 1
if alpha and not ignore_alpha:
passes = 2
for i in range(passes):
if i == 1:
cmd.extend(['-an', '-f', 'rawvideo', '-pix_fmt', 'gray'])
if frame_rate:
cmd.extend(['-r', frame_rate])
else:
cmd.extend(['-an','-vcodec', 'dnxhd', '-pix_fmt', pix_fmt])
if dnxhd_profile:
cmd.extend(['-profile:v', dnxhd_profile])
if bitrate:
cmd.extend(['-vb', '%dM' % bitrate])
if frame_rate:
cmd.extend(['-r', frame_rate])
if not start is None:
cmd.extend(['-ss', str(start)])
if not duration is None:
cmd.extend(['-t', str(duration)])
vfilter = []
if i == 1:
vfilter.append("alphaextract")
if width and height:
out_width = width
out_height = height
input_width = stream['width']
input_height = stream['height']
max_width = width
max_height = height
scale = min(max_width/ float(input_width), max_height/float(input_height) )
scale_width = int(input_width*scale)
scale_height = int(input_height*scale)
padding_ofs_x = (max_width - scale_width)//2
padding_ofs_y = (max_height - scale_height)//2
vfilter.append("scale=%d:%d,pad=%d:%d:%d:%d" % (scale_width,scale_height,
max_width,max_height, padding_ofs_x,padding_ofs_y))
else:
out_width = stream['width']
out_height = stream['height']
if vfilter:
cmd.extend(['-vf', ','.join(vfilter)])
# cmd.extend(['-s', "%dx%d" % (width, height)])
if i == 1:
out_file = os.path.join(output_dir, 'out_%d.alpha' % (stream_index))
out_meta['path_alpha'] = out_file
else:
out_rate = frame_rate or str(stream['avg_frame_rate'])
out_file = os.path.join(output_dir, 'out_%d.dnxhd' % (stream_index))
out_meta = {'path':out_file, 'frame_rate':out_rate, 'type': 'video', 'profile':video_profile_name}
out_meta['width'] = out_width
out_meta['height'] = out_height
cmd.extend([out_file])
#pprint(stream)
print("USING FRAMREATE", out_rate, str(stream['avg_frame_rate']))
out_files.append(out_meta)
elif stream['codec_type'] == 'audio':
input_sample_rate = int(stream['sample_rate'])
channels = stream['channels']
cmd.extend(['-vn', '-acodec', 'pcm_s16le', '-ar', str(sample_rate)])
# afilter = ['-af', "aresample=async=1:first_pts=0"]
# cmd.extend(afilter)
if not start is None:
cmd.extend(['-ss', str(start)])
if not duration is None:
cmd.extend(['-t', str(duration)])
out_file = os.path.join(output_dir, 'out_%d_%d_%d.wav' % (stream_index, sample_rate, channels))
cmd.extend([out_file])
out_files.append({'path':out_file, 'sample_rate':sample_rate, 'channels':channels,'type': 'audio'})
print(subprocess.list2cmdline(cmd))
subprocess.check_call(cmd)
return out_files
def create_matte_key_definition(f):
opdef = f.create.OperationDef(auid.AUID("0c864774-e428-3b2d-8115-1c736806191a"), 'MatteKey_2')
opdef['IsTimeWarp'].value = False
opdef['OperationCategory'].value = 'OperationCategory_Effect'
opdef['NumberInputs'].value = 3
opdef['Bypass'].value = 2
opdef.media_kind = "picture"
f.dictionary.register_def(opdef)
return opdef
def import_video_essence(f, mastermob, stream, compmob=None, tapemob=None):
tape = None
edit_rate = stream['frame_rate']
if tapemob:
timecode_fps= int(round(float(fractions.Fraction(edit_rate))))
start_time = timecode_fps * 60 * 60
tape = tapemob.create_source_clip(1, start=start_time)
alpha_path = stream.get("path_alpha", None)
color_slot = mastermob.import_dnxhd_essence(stream['path'], edit_rate, tape=tape)
if alpha_path:
pixel_layout = [{u'Code': u'CompAlpha', u'Size': 8}]
width = stream['width']
height = stream['height']
source_mob = f.create.SourceMob()
f.content.mobs.append(source_mob)
if tapemob:
tape = tapemob.create_source_clip(1, start=start_time)
source_slot = source_mob.import_rawvideo_essence(alpha_path, edit_rate, width, height, pixel_layout, tape=tape)
length = source_slot.segment.length
essence_group = f.create.EssenceGroup()
alpha_slot = mastermob.create_picture_slot(edit_rate)
alpha_slot.segment = essence_group
source_clip = source_mob.create_source_clip(source_slot.slot_id)
source_clip.length = length
essence_group['Choices'].append(source_clip)
essence_group.length = length
opdef = create_matte_key_definition(f)
slot = compmob.create_picture_slot(edit_rate)
op_group = f.create.OperationGroup(opdef)
slot.segment = op_group
scope = f.create.ScopeReference()
scope['RelativeScope'].value = 1
scope['RelativeSlot'].value = 1
scope.length = length
sequence = f.create.Sequence(length=length)
sequence.components.append(scope)
op_group.segments.append(sequence)
op_group.segments.append(mastermob.create_source_clip(color_slot.slot_id, length=length))
op_group.segments.append(mastermob.create_source_clip(alpha_slot.slot_id, length=length))
def create_aaf(path, media_streams, mobname, tape_name=None, start_timecode=None):
with aaf2.open(path, 'w') as f:
mastermob = f.create.MasterMob(mobname)
f.content.mobs.append(mastermob)
edit_rate = None
for stream in media_streams:
if stream['type'] == 'video':
edit_rate =fractions.Fraction(stream['frame_rate'])
break
alpha = False
compmob = None
for stream in media_streams:
if stream.get('path_alpha', False):
alpha = True
compmob = f.create.CompositionMob(mastermob.name)
compmob.usage = 'Usage_Template'
f.content.mobs.append(compmob)
# this hides the mastermob in avid bin
mastermob['AppCode'].value = 1
mastermob.usage = "Usage_LowerLevel"
break
tapemob = None
timecode_fps= int(round(float(edit_rate)))
if tape_name:
tapemob = f.create.SourceMob()
tapemob.create_tape_slots(tape_name, edit_rate, timecode_fps)
f.content.mobs.append(tapemob)
for stream in media_streams:
if stream['type'] == 'video':
print("importing video...")
start = time.time()
import_video_essence(f, mastermob, stream, compmob, tapemob)
print("imported video in %f secs" % (time.time()- start))
for stream in media_streams:
if stream['type'] == 'audio':
print("importing audio...")
start = time.time()
sample_rate = stream['sample_rate']
slot = mastermob.import_audio_essence(stream['path'], edit_rate)
if compmob:
sound_slot = compmob.create_sound_slot(edit_rate)
sound_slot.segment = mastermob.create_source_clip(slot.slot_id, length = slot.segment.length)
print("imported audio in %f secs" % (time.time()- start))
if __name__ == "__main__":
from optparse import OptionParser
usage = "usage: %prog [options] output_aaf_file media_file"
parser = OptionParser(usage=usage)
parser.add_option('-s', '--start', type="string", dest="start",default=None,
help = "start recording at, in timecode or seconds")
parser.add_option('-e', '--end', type="string", dest='end',default=None,
help = "end recording at in timecode or seconds")
parser.add_option('-d', '--duration', type="string", dest='duration',default=None,
help = "record duration in timecode or seconds")
parser.add_option('--tape', type="string", dest="tape_name",default=None,
help = "tape name")
parser.add_option('--start_timecode', type="string", dest="start_timecode", default=None,
help = "start timecode [default 01:00:00:00]")
parser.add_option('--ignore_alpha', action='store_true', dest="ignore_alpha", default=False,
help = "ignore alpha channel if present")
parser.add_option("-v", '--video-profile', type='string', dest = 'video_profile', default="dnx_1080p_36_23.97",
help = "encoding profile for video [default: 1080p_36_23.97]")
parser.add_option("-a", '--audio-profile', type='string', dest = 'audio_profile',default='pcm_48000_s16le',
help = 'encoding profile for audio [default: pcm_48000]')
parser.add_option("--size", type='string', dest='size', default=None,
help = "video resolution for dnxhr [default: src size]")
parser.add_option("--framerate", type='string', dest='framerate',
help = "video framerate for dnxhr [default: use src rate]")
parser.add_option('--list-profiles', dest='list_profiles',
action="store_true",default=False,
help = "lists profiles")
(options, args) = parser.parse_args()
if options.list_profiles:
titles = ['Audio Profile', 'Sample Rate', 'Sample Fmt']
row_format ="{:<25}{:<15}{:<15}"
print("")
print(row_format.format( *titles))
print("")
for key,value in sorted(Audio_Profiles.items()):
print(row_format.format(key, value['sample_rate'], value['sample_format']))
titles = ['Video Profile', "Size", 'Frame Rate', "Bitrate", "Pix Fmt", "Codec"]
row_format ="{:<25}{:<15}{:<15}{:<10}{:<12}{:<10}"
print("")
print(row_format.format( *titles))
print("")
for key, value in sorted(Video_Profiles.items()):
codec = 'dnxhd'
if key.startswith("dnxhr"):
codec = 'dnxhr'
print(row_format.format(key, value['size'],
value['frame_rate'], value['bitrate'], value['pix_fmt'], codec))
sys.exit()
if len(args) < 2:
parser.error("not enough args")
details = probe(args[1])
#if not os.path.exists(args[1]):
#parser.error("No such file or directory: %s" % args[1])
if options.end and options.duration:
parser.error("Can only use --duration or --end not both")
print(options.audio_profile)
if not options.audio_profile in Audio_Profiles:
parser.error("No such audio profile: %s" % options.audio_profile)
if not options.video_profile.lower() in Video_Profiles:
parser.error("No such video profile: %s" % options.video_profile)
aaf_file = args[0]
# tempdir = os.path.join(os.getcwd(), 'samples', 'convert')
# if not os.path.exists(tempdir):
# os.makedirs(tempdir)
tempdir = tempfile.mkdtemp("-aaf_import")
print(tempdir)
media_streams = []
width = None
height = None
if options.size and options.video_profile.lower().startswith("dnxhr"):
width,height = options.size.split("x")
width = int(width)
height = int(height)
try:
for src in args[1:]:
media_streams.extend(conform_media(src,
output_dir=tempdir,
start=options.start,
end=options.end,
duration=options.duration,
width=width,
height=height,
frame_rate=options.framerate,
video_profile_name = options.video_profile.lower(),
audio_profile_name = options.audio_profile.lower(),
ignore_alpha = options.ignore_alpha)
)
except:
print(traceback.format_exc())
shutil.rmtree(tempdir)
parser.error("error conforming media")
try:
basename = os.path.basename(args[1])
name,ext = os.path.splitext(basename)
if details['format']['format_name'] == 'image2':
name, padding = os.path.splitext(name)
create_aaf(aaf_file, media_streams, name, options.tape_name, options.start_timecode)
finally:
pass
shutil.rmtree(tempdir)
|
markreidvfx/pyaaf2
|
examples/import_media.py
|
Python
|
mit
| 17,403 | 0.008045 |
import numpy as np
def extrapolate(xs_name):
"""Extrapolate cross section based on thermal salt expansion feedback.
Extrapolates cross section data at 900 K to 1500 K at 50 K intervals
based on the thermal salt expansion feedback formula from [1]. Writes
the extrapolated data back into the .txt cross section files in the
Moltres-compatible format.
Parameters
----------
xs_name : list of str
Names of cross sections to be extrapolated.
Returns
-------
None
References
----------
[1] Tiberga et al., "Results from a multi-physics nurmerical benchmark for
codes dedicated to molten salt fast reactors," Annals of Nuclear Energy,
vol. 142, July 2020, 107428.
"""
rho_900 = 2.0e3 # Density at 900 K [kg m-3]
alpha = 2.0e-4 # Thermal expansion coeff [K-1]
input_file = "benchmark_" + xs_name + ".txt"
# Setup temperature values to extrapolate to
temp = np.linspace(950, 1500, 12)
# Read cross section data at 900K
f = open(input_file, 'r+')
lines = f.readlines()
data_900 = list(lines[0].split())
f.close()
# Setup space separated data to be written back into txt
s = " "
xs = [s.join(data_900) + "\n"]
h = open(input_file, 'w')
for i in range(len(temp)):
# Calculate density at temp[i]
rho = rho_900 * (1 - alpha * (temp[i]-900))
# Apply extrapolation formula at temp[i]
data_next = [0, ] * len(data_900)
data_next[0] = str(temp[i])
for i in range(1, len(data_900)):
if xs_name == "DIFFCOEF":
data = float(data_900[i]) / rho * rho_900
data_next[i] = '{:0.5e}'.format(data)
else:
data = float(data_900[i]) * rho / rho_900
data_next[i] = '{:0.5e}'.format(data)
data_next = s.join(data_next) + "\n"
xs.append(data_next)
# Write cross section data into txt file
h = open(input_file, 'w')
h.writelines(xs)
h.close()
return
def main():
"""Runs extrapolate() for the relevant cross sections.
"""
xs_names = ["DIFFCOEF", "FISS", "NSF", "REMXS", "SP0"]
for i in xs_names:
extrapolate(i)
return
if __name__ == "__main__":
main()
|
arfc/moltres
|
property_file_dir/cnrs-benchmark/feedback.py
|
Python
|
lgpl-2.1
| 2,292 | 0 |
#!/usr/bin/env python3
# Copyright (c) 2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test logic for setting nMaxTipAge on command line.
Nodes don't consider themselves out of "initial block download" as long as
their best known block header time is more than nMaxTipAge in the past.
"""
import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
DEFAULT_MAX_TIP_AGE = 24 * 60 * 60
class MaxTipAgeTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def test_maxtipage(self, maxtipage, set_parameter=True):
node_miner = self.nodes[0]
node_ibd = self.nodes[1]
self.restart_node(1, [f'-maxtipage={maxtipage}'] if set_parameter else None)
self.connect_nodes(0, 1)
# tips older than maximum age -> stay in IBD
cur_time = int(time.time())
node_ibd.setmocktime(cur_time)
for delta in [5, 4, 3, 2, 1]:
node_miner.setmocktime(cur_time - maxtipage - delta)
self.generate(node_miner, 1)
assert_equal(node_ibd.getblockchaininfo()['initialblockdownload'], True)
# tip within maximum age -> leave IBD
node_miner.setmocktime(cur_time - maxtipage)
self.generate(node_miner, 1)
assert_equal(node_ibd.getblockchaininfo()['initialblockdownload'], False)
def run_test(self):
self.log.info("Test IBD with maximum tip age of 24 hours (default).")
self.test_maxtipage(DEFAULT_MAX_TIP_AGE, set_parameter=False)
for hours in [20, 10, 5, 2, 1]:
maxtipage = hours * 60 * 60
self.log.info(f"Test IBD with maximum tip age of {hours} hours (-maxtipage={maxtipage}).")
self.test_maxtipage(maxtipage)
if __name__ == '__main__':
MaxTipAgeTest().main()
|
particl/particl-core
|
test/functional/feature_maxtipage.py
|
Python
|
mit
| 1,997 | 0.002003 |
import pytest
from dateutil.parser import parse
from django import forms
from adhocracy4.forms.fields import DateTimeField
class DateTimeForm(forms.Form):
date = DateTimeField(
time_format='%H:%M',
required=False,
require_all_fields=False,
)
@pytest.mark.django_db
def test_datetimefield_valid(user):
data = {'date_0': '2023-01-01', 'date_1': '12:30'}
form = DateTimeForm(data=data)
assert form.is_valid()
assert form.cleaned_data['date'] == \
parse('2023-01-01 12:30:00 UTC')
@pytest.mark.django_db
def test_datetimefield_invalid(user):
data = {'date_0': 'not a date', 'date_1': '12:30'}
form = DateTimeForm(data=data)
assert not form.is_valid()
@pytest.mark.django_db
def test_datetimefield_empty_none(user):
data = {'date_0': '', 'date_1': ''}
form = DateTimeForm(data=data)
assert form.is_valid()
assert form.cleaned_data['date'] is None
@pytest.mark.django_db
def test_datetimefield_default_time(user):
data = {'date_0': '2023-01-01', 'date_1': ''}
form = DateTimeForm(data=data)
assert form.is_valid()
assert form.cleaned_data['date'] == \
parse('2023-01-01 00:00:00 UTC')
|
liqd/adhocracy4
|
tests/forms/test_forms.py
|
Python
|
agpl-3.0
| 1,198 | 0 |
# Copyright 2011 OpenStack Foundation
# aLL Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
import mock
from oslo_utils import timeutils
import webob
from manila.api.v1 import share_types as types
from manila.api.views import types as views_types
from manila.common import constants
from manila import exception
from manila import policy
from manila.share import share_types
from manila import test
from manila.tests.api import fakes
def stub_share_type(id):
specs = {
"key1": "value1",
"key2": "value2",
"key3": "value3",
"key4": "value4",
"key5": "value5",
constants.ExtraSpecs.DRIVER_HANDLES_SHARE_SERVERS: "true",
}
return dict(
id=id,
name='share_type_%s' % str(id),
extra_specs=specs,
required_extra_specs={
constants.ExtraSpecs.DRIVER_HANDLES_SHARE_SERVERS: "true",
}
)
def return_share_types_get_all_types(context, search_opts=None):
return dict(
share_type_1=stub_share_type(1),
share_type_2=stub_share_type(2),
share_type_3=stub_share_type(3)
)
def return_empty_share_types_get_all_types(context, search_opts=None):
return {}
def return_share_types_get_share_type(context, id=1):
if id == "777":
raise exception.ShareTypeNotFound(share_type_id=id)
return stub_share_type(int(id))
def return_share_types_get_by_name(context, name):
if name == "777":
raise exception.ShareTypeNotFoundByName(share_type_name=name)
return stub_share_type(int(name.split("_")[2]))
@ddt.ddt
class ShareTypesApiTest(test.TestCase):
def setUp(self):
super(ShareTypesApiTest, self).setUp()
self.controller = types.ShareTypesController()
self.mock_object(policy, 'check_policy',
mock.Mock(return_value=True))
@ddt.data(True, False)
def test_share_types_index(self, admin):
self.mock_object(share_types, 'get_all_types',
return_share_types_get_all_types)
req = fakes.HTTPRequest.blank('/v2/fake/types',
use_admin_context=admin)
res_dict = self.controller.index(req)
self.assertEqual(3, len(res_dict['share_types']))
expected_names = ['share_type_1', 'share_type_2', 'share_type_3']
actual_names = map(lambda e: e['name'], res_dict['share_types'])
self.assertEqual(set(actual_names), set(expected_names))
for entry in res_dict['share_types']:
if admin:
self.assertEqual('value1', entry['extra_specs'].get('key1'))
else:
self.assertIsNone(entry['extra_specs'].get('key1'))
self.assertTrue('required_extra_specs' in entry)
required_extra_spec = entry['required_extra_specs'].get(
constants.ExtraSpecs.DRIVER_HANDLES_SHARE_SERVERS, '')
self.assertEqual('true', required_extra_spec)
policy.check_policy.assert_called_once_with(
req.environ['manila.context'], types.RESOURCE_NAME, 'index')
def test_share_types_index_no_data(self):
self.mock_object(share_types, 'get_all_types',
return_empty_share_types_get_all_types)
req = fakes.HTTPRequest.blank('/v2/fake/types')
res_dict = self.controller.index(req)
self.assertEqual(0, len(res_dict['share_types']))
policy.check_policy.assert_called_once_with(
req.environ['manila.context'], types.RESOURCE_NAME, 'index')
def test_share_types_show(self):
self.mock_object(share_types, 'get_share_type',
return_share_types_get_share_type)
req = fakes.HTTPRequest.blank('/v2/fake/types/1')
res_dict = self.controller.show(req, 1)
self.assertEqual(2, len(res_dict))
self.assertEqual('1', res_dict['share_type']['id'])
self.assertEqual('share_type_1', res_dict['share_type']['name'])
policy.check_policy.assert_called_once_with(
req.environ['manila.context'], types.RESOURCE_NAME, 'show')
def test_share_types_show_not_found(self):
self.mock_object(share_types, 'get_share_type',
return_share_types_get_share_type)
req = fakes.HTTPRequest.blank('/v2/fake/types/777')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.show,
req, '777')
policy.check_policy.assert_called_once_with(
req.environ['manila.context'], types.RESOURCE_NAME, 'show')
def test_share_types_default(self):
self.mock_object(share_types, 'get_default_share_type',
return_share_types_get_share_type)
req = fakes.HTTPRequest.blank('/v2/fake/types/default')
res_dict = self.controller.default(req)
self.assertEqual(2, len(res_dict))
self.assertEqual('1', res_dict['share_type']['id'])
self.assertEqual('share_type_1', res_dict['share_type']['name'])
policy.check_policy.assert_called_once_with(
req.environ['manila.context'], types.RESOURCE_NAME, 'default')
def test_share_types_default_not_found(self):
self.mock_object(share_types, 'get_default_share_type',
mock.Mock(side_effect=exception.ShareTypeNotFound(
share_type_id="fake")))
req = fakes.HTTPRequest.blank('/v2/fake/types/default')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.default, req)
policy.check_policy.assert_called_once_with(
req.environ['manila.context'], types.RESOURCE_NAME, 'default')
def test_view_builder_show(self):
view_builder = views_types.ViewBuilder()
now = timeutils.isotime()
raw_share_type = dict(
name='new_type',
deleted=False,
created_at=now,
updated_at=now,
extra_specs={},
deleted_at=None,
required_extra_specs={},
id=42,
)
request = fakes.HTTPRequest.blank("/v2")
output = view_builder.show(request, raw_share_type)
self.assertIn('share_type', output)
expected_share_type = dict(
name='new_type',
extra_specs={},
required_extra_specs={},
id=42,
)
self.assertDictMatch(output['share_type'], expected_share_type)
def test_view_builder_list(self):
view_builder = views_types.ViewBuilder()
now = timeutils.isotime()
raw_share_types = []
for i in range(0, 10):
raw_share_types.append(
dict(
name='new_type',
deleted=False,
created_at=now,
updated_at=now,
extra_specs={},
required_extra_specs={},
deleted_at=None,
id=42 + i
)
)
request = fakes.HTTPRequest.blank("/v2")
output = view_builder.index(request, raw_share_types)
self.assertIn('share_types', output)
for i in range(0, 10):
expected_share_type = dict(
name='new_type',
extra_specs={},
required_extra_specs={},
id=42 + i
)
self.assertDictMatch(output['share_types'][i],
expected_share_type)
@ddt.data(None, True, 'true', 'false', 'all')
def test_parse_is_public_valid(self, value):
result = self.controller._parse_is_public(value)
self.assertTrue(result in (True, False, None))
def test_parse_is_public_invalid(self):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._parse_is_public,
'fakefakefake')
|
redhat-openstack/manila
|
manila/tests/api/v1/test_share_types.py
|
Python
|
apache-2.0
| 8,424 | 0 |
# Copyright 2018-present Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import os
from artificialproject.field_generators import (
GenerationFailedException,
StringGenerator,
)
from artificialproject.random import weighted_choice
class FilePathGenerator:
BUILD_FILE_NAME = "BUCK"
def __init__(self):
self._component_generator = StringGenerator()
self._file_samples = collections.defaultdict(
lambda: collections.defaultdict(set)
)
self._file_samples_dirty = False
self._package_depths = collections.Counter()
self._file_depths_in_package = collections.Counter()
self._sizes_by_depth = collections.defaultdict(collections.Counter)
self._sizes_by_depth_in_package = collections.defaultdict(collections.Counter)
self._build_file_sizes = collections.Counter()
self._root = {}
self._package_paths = {}
self._available_directories = {}
self._last_package_path = None
self._last_package_remaining_targets = None
def analyze_project_data(self, project_data):
dir_entries = collections.defaultdict(set)
build_file_entries = collections.defaultdict(set)
for target_data in project_data.values():
base_path = target_data["buck.base_path"]
build_file_entries[base_path].add(target_data["name"])
components = self._split_path_into_components(base_path)
# TODO(jakubzika): Targets in the root of the repo are ignored
# because _generate_path does not handle depth == 0.
if components:
self._package_depths.update([len(components)])
for component in components:
self._component_generator.add_string_sample(component)
for i, name in enumerate(components):
prefix = components[:i]
dir_entries[tuple(prefix)].add(name)
for base_path, names in build_file_entries.items():
self._build_file_sizes.update([len(names)])
for path, entries in dir_entries.items():
self._sizes_by_depth[len(path)].update([len(entries)])
def add_package_file_sample(self, package_path, relative_path):
components = self._split_path_into_components(relative_path)
self._file_depths_in_package.update([len(components)])
for i, name in enumerate(components):
prefix = components[:i]
self._file_samples[package_path][tuple(prefix)].add(name)
self._file_samples_dirty = True
def generate_package_path(self):
if self._last_package_path is not None:
path = self._last_package_path
self._last_package_remaining_targets -= 1
if self._last_package_remaining_targets <= 0:
self._last_package_path = None
return path
depth = weighted_choice(self._package_depths)
path, parent_dir = self._generate_path(
"//", self._root, depth, self._sizes_by_depth, self._component_generator
)
directory = {self.BUILD_FILE_NAME.lower(): None}
parent_dir[os.path.basename(path).lower()] = directory
self._last_package_path = path
self._last_package_remaining_targets = (
weighted_choice(self._build_file_sizes) - 1
)
return path
def generate_path_in_package(
self, package_path, depth, component_generator, extension
):
if depth == 0:
return ""
if self._file_samples_dirty:
self._sizes_by_depth_in_package.clear()
for dir_entries in self._file_samples.values():
for path, entries in dir_entries.items():
self._sizes_by_depth_in_package[len(path)].update([len(entries)])
self._file_samples_dirty = False
root = self._root
components = self._split_path_into_components(package_path)
for component in components:
root = root[component.lower()]
path, parent_dir = self._generate_path(
package_path,
root,
depth,
self._sizes_by_depth_in_package,
component_generator,
extension,
)
parent_dir[os.path.basename(path).lower()] = None
return path
def register_path(self, path):
directory = self._root
existed = True
for component in self._split_path_into_components(path):
if component not in directory:
directory[component] = {}
existed = False
directory = directory[component]
if directory is None:
raise GenerationFailedException()
if existed:
raise GenerationFailedException()
def _split_path_into_components(self, path):
components = []
while path:
path, component = os.path.split(path)
components.append(component)
return components[::-1]
def _generate_path(
self,
package_key,
root,
depth,
sizes_by_depth,
component_generator,
extension=None,
):
assert depth >= 1
parent_path, parent_dir = self._generate_parent(
package_key, root, depth - 1, sizes_by_depth, component_generator
)
name = self._generate_name(parent_dir, component_generator, extension)
return os.path.join(parent_path, name), parent_dir
def _generate_parent(
self, package_key, root, depth, sizes_by_depth, component_generator
):
if depth == 0:
return "", root
key = (package_key, depth)
value = self._available_directories.get(key)
if value is not None:
key_found = True
path, directory, size = value
else:
key_found = False
parent_path, parent_dir = self._generate_parent(
package_key, root, depth - 1, sizes_by_depth, component_generator
)
name = self._generate_name(parent_dir, component_generator)
path = os.path.join(parent_path, name)
directory = {}
parent_dir[name.lower()] = directory
size = weighted_choice(sizes_by_depth[depth])
size -= 1
if size > 0:
self._available_directories[key] = (path, directory, size)
elif key_found:
del self._available_directories[key]
return path, directory
def _generate_name(self, directory, generator, extension=None):
for i in range(1000):
name = generator.generate_string()
if extension is not None:
name += extension
if (
name.lower() not in directory
and name.lower() != self.BUILD_FILE_NAME.lower()
):
return name
raise GenerationFailedException()
|
brettwooldridge/buck
|
scripts/artificialproject/file_path_generator.py
|
Python
|
apache-2.0
| 7,481 | 0.000535 |
data = [
b'\x04\x0e\x04\x01\x05 \x00',
b'\x04\x0e\x04\x01\x0b \x00',
b'\x04\x0e\x04\x01\x0c \x00',
b'\x04>+\x02\x01\x03\x01\x97\xe7/s\x18b\x1f\x1e\xff\x06\x00\x01\t \x02[=cdI\xb9kQl\x977W\xc2V?\xa2k\xe7\x1c\xf4\x9d\xd7\x85\xc9',
b'\x04>\x1a\x02\x01\x00\x01\x07\xbb\xd8!p\\\x0e\x02\x01\x06\n\xffL\x00\x10\x05\x0b\x1c\xfd\xf3\xc6\xad',
b'\x04>\x0c\x02\x01\x04\x01\x07\xbb\xd8!p\\\x00\xae',
b'\x04>\x1a\x02\x01\x00\x01\xd1e\xa9\x85\x0bI\x0e\x02\x01\x1a\n\xffL\x00\x10\x05\x03\x18\x9bF\x86\xa7',
b'\x04>(\x02\x01\x02\x01\xc9\x9b1\xca\x82i\x1c\x1b\xff\xff\xff\xbe\xacH%>Yr$Dc\xb9\xb8\x03?\xfa\xb5\x81\x04\x00{\x01A\xbc\x00\xb2',
b'\x04>\x0c\x02\x01\x04\x01\xc9\x9b1\xca\x82i\x00\xb3',
b'\x04>\x1e\x02\x01\x00\x01\x1bQm\xb7Qd\x12\x02\x01\x1a\x02\n\x0c\x0b\xffL\x00\x10\x06\x03\x1e\xa0\xdeI?\xac',
b'\x04>\x0c\x02\x01\x04\x01\x1bQm\xb7Qd\x00\xad',
b"\x04>'\x02\x01\x02\x01\n\t9\x1b\xf6y\x1b\x1a\xffL\x00\x02\x15j\xb1|\x17\xf4{MA\x806Rj\xee\xd2/s\x01\x16\x03h\xbf\xb6",
b'\x04>\x0c\x02\x01\x04\x01\n\t9\x1b\xf6y\x00\xb6',
b"\x04>\x1f\x02\x01\x02\x01\x9c\xa0\xd0L'P\x13\x03\x03\xaa\xfe\x0e\x16\xaa\xfe\x10\xbd\x01firstuk\x01\xb4",
b"\x04>\x0c\x02\x01\x04\x01\x9c\xa0\xd0L'P\x00\xb4",
b'\x04>(\x02\x01\x02\x01\xb9\xf6\x0f\xfd\xe2\\\x1c\x03\x03\x9f\xfe\x17\x16\x9f\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xab',
b'\x04>\x0c\x02\x01\x04\x01\xd1e\xa9\x85\x0bI\x00\xa8',
b'\x04>\x16\x02\x01\x04\x01\xb9\xf6\x0f\xfd\xe2\\\n\t\xff\xe0\x00\x01z\xca\x86\xa1\xca\xaa',
b'\x04>(\x02\x01\x03\x00k\xa0\xd0.\x04\xf8\x1c\x1b\xffu\x00B\x04\x01\x80\xac\xf8\x04.\xd0\xa0k\xfa\x04.\xd0\xa0j\x01\x17@\x00\x00\x00\x00\xa8',
b'\x04>\x1a\x02\x01\x00\x01\xc7\xaf\x92\x15!b\x0e\x02\x01\x1a\n\xffL\x00\x10\x05\x13\x1c\x0c\xb2G\xa4',
b'\x04>\x0c\x02\x01\x04\x01\xc7\xaf\x92\x15!b\x00\xa5',
b'\x04>&\x02\x01\x02\x01\xf9u\xa8r\x14r\x1a\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x00\xbf\xfb5\xfd\r\x17i\x1dd\xaa\x90\xab\xcd\xef\t\x87e\xc0',
b'\x04\x0e\x04\x01\x0c \x00',
b'\x04\x0f\x04\x00\x01\x01\x04',
b'\x04\x0e\x04\x01\x05 \x00',
b'\x04\x0e\x04\x01\x0b \x00',
b'\x04\x0e\x04\x01\x0c \x00',
b'\x04>+\x02\x01\x03\x01\x97\xe7/s\x18b\x1f\x1e\xff\x06\x00\x01\t \x02[=cdI\xb9kQl\x977W\xc2V?\xa2k\xe7\x1c\xf4\x9d\xd7\x85\xc9',
b'\x04>(\x02\x01\x02\x01\xc9\x9b1\xca\x82i\x1c\x1b\xff\xff\xff\xbe\xacH%>Yr$Dc\xb9\xb8\x03?\xfa\xb5\x81\x04\x00{\x01A\xbc\x00\xb1',
b'\x04>\x0c\x02\x01\x04\x01\xc9\x9b1\xca\x82i\x00\xb1',
b'\x04>\x1a\x02\x01\x00\x01\xd1e\xa9\x85\x0bI\x0e\x02\x01\x1a\n\xffL\x00\x10\x05\x03\x18\x9bF\x86\xab',
b'\x04>&\x02\x01\x02\x01\xf9u\xa8r\x14r\x1a\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x00\xbf\xfb5\xfd\r\x17i\x1dd\xaa\x90\xab\xcd\xef\t\x87e\xb9',
b'\x04>\x0c\x02\x01\x04\x01\xd1e\xa9\x85\x0bI\x00\xac',
b'\x04>\x1e\x02\x01\x00\x01\x1bQm\xb7Qd\x12\x02\x01\x1a\x02\n\x0c\x0b\xffL\x00\x10\x06\x03\x1e\xa0\xdeI?\xae',
b'\x04>\x0c\x02\x01\x04\x01\x1bQm\xb7Qd\x00\xac',
b'\x04>(\x02\x01\x02\x01\xb9\xf6\x0f\xfd\xe2\\\x1c\x03\x03\x9f\xfe\x17\x16\x9f\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xaf',
b'\x04>\x16\x02\x01\x04\x01\xb9\xf6\x0f\xfd\xe2\\\n\t\xff\xe0\x00\x01z\xca\x86\xa1\xca\xb0',
b"\x04>\x1f\x02\x01\x02\x01\x9c\xa0\xd0L'P\x13\x03\x03\xaa\xfe\x0e\x16\xaa\xfe\x10\xbd\x01firstuk\x01\xb2",
b"\x04>\x0c\x02\x01\x04\x01\x9c\xa0\xd0L'P\x00\xb1"
b"\x04>'\x02\x01\x02\x01\n\t9\x1b\xf6y\x1b\x1a\xffL\x00\x02\x15j\xb1|\x17\xf4{MA\x806Rj\xee\xd2/s\x01\x16\x03h\xbf\xb6"
b'\x04>\x0c\x02\x01\x04\x01\n\t9\x1b\xf6y\x00\xb5',
b'\x04>\x1a\x02\x01\x00\x01\x07\xbb\xd8!p\\\x0e\x02\x01\x06\n\xffL\x00\x10\x05\x0b\x1c\xfd\xf3\xc6\xa8',
b'\x04>)\x02\x01\x03\x01\xbeC\xe75\x82\xde\x1d\x02\x01\x06\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x10\xf6\x03ukbaz.github.io\xbb',
b'\x04>\x0c\x02\x01\x04\x01\x07\xbb\xd8!p\\\x00\xae',
]
beacon_only = [
b"\x04>\x1f\x02\x01\x02\x01\x9c\xa0\xd0L'P\x13\x03\x03\xaa\xfe\x0e\x16\xaa\xfe\x10\xbd\x01firstuk\x01\xb4",
b"\x04>\x1f\x02\x01\x02\x01\x9c\xa0\xd0L'P\x13\x03\x03\xaa\xfe\x0e\x16\xaa\xfe\x10\xbd\x01firstuk\x01\xb2",
b'\x04>)\x02\x01\x03\x01\xbeC\xe75\x82\xde\x1d\x02\x01\x06\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x10\xf6\x03ukbaz.github.io\xbd',
b'\x04>)\x02\x01\x03\x01\xbeC\xe75\x82\xde\x1d\x02\x01\x06\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x10\xf6\x03ukbaz.github.io\xbb',
b'\x04>&\x02\x01\x02\x01\xf9u\xa8r\x14r\x1a\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x00\xbf\xfb5\xfd\r\x17i\x1dd\xaa\x90\xab\xcd\xef\t\x87e\xc0',
b'\x04>&\x02\x01\x02\x01\xf9u\xa8r\x14r\x1a\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x00\xbf\xfb5\xfd\r\x17i\x1dd\xaa\x90\xab\xcd\xef\t\x87e\xbc',
b"\x04>'\x02\x01\x02\x01\n\t9\x1b\xf6y\x1b\x1a\xffL\x00\x02\x15j\xb1|\x17\xf4{MA\x806Rj\xee\xd2/s\x01\x16\x03h\xbf\xb6",
b"\x04>'\x02\x01\x02\x01\n\t9\x1b\xf6y\x1b\x1a\xffL\x00\x02\x15j\xb1|\x17\xf4{MA\x806Rj\xee\xd2/s\x01\x16\x03h\xbf\xb6",
b'\x04>(\x02\x01\x02\x01\xc9\x9b1\xca\x82i\x1c\x1b\xff\xff\xff\xbe\xacH%>Yr$Dc\xb9\xb8\x03?\xfa\xb5\x81\x04\x00{\x01A\xbc\x00\xb2',
b'\x04>(\x02\x01\x02\x01\xc9\x9b1\xca\x82i\x1c\x1b\xff\xff\xff\xbe\xacH%>Yr$Dc\xb9\xb8\x03?\xfa\xb5\x81\x04\x00{\x01A\xbc\x00\xb1',
]
bytes_only = [
# Eddystone URL with secondary URL encoding (e.g. \x01 = .org
b'\x04\x3e\x1f\x02\x01\x02\x01\x9c\xa0\xd0\x4c\x27\x50\x13\x03\x03\xaa\xfe\x0e\x16\xaa\xfe\x10\xbd\x01\x66\x69\x72\x73\x74\x75\x6b\x01\xb4',
b'\x04\x3e\x1f\x02\x01\x02\x01\x9c\xa0\xd0\x4c\x27\x50\x13\x03\x03\xaa\xfe\x0e\x16\xaa\xfe\x10\xbd\x01\x66\x69\x72\x73\x74\x75\x6b\x01\xb2',
# Eddystone URL without secondary URL encoding
b'\x04\x3e\x29\x02\x01\x03\x01\xbe\x43\xe7\x35\x82\xde\x1d\x02\x01\x06\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x10\xf6\x03\x75\x6b\x62\x61\x7a\x2e\x67\x69\x74\x68\x75\x62\x2e\x69\x6f\xbd',
b'\x04\x3e\x29\x02\x01\x03\x01\xbe\x43\xe7\x35\x82\xde\x1d\x02\x01\x06\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x10\xf6\x03\x75\x6b\x62\x61\x7a\x2e\x67\x69\x74\x68\x75\x62\x2e\x69\x6f\xbb',
# Eddystone UID
b'\x04\x3e\x26\x02\x01\x02\x01\xf9u\xa8r\x14r\x1a\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x00\xbf\xfb\x35\xfd\r\x17\x69\x1d\x64\xaa\x90\xab\xcd\xef\t\x87\x65\xc0',
b'\x04\x3e\x26\x02\x01\x02\x01\xf9u\xa8r\x14r\x1a\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x00\xbf\xfb\x35\xfd\r\x17\x69\x1d\x64\xaa\x90\xab\xcd\xef\t\x87\x64\xbc',
# iBeacon
b'\x04\x3e\x27\x02\x01\x02\x01\x0a\x09\x39\x1b\xf6\x79\x1b\x1a\xff\x4c\x00\x02\x15\x6a\xb1\x7c\x17\xf4\x7b\x4d\x41\x80\x36\x52\x6a\xee\xd2\x2f\x73\x01\x16\x03\x68\xbf\xb6',
b'\x04\x3e\x27\x02\x01\x02\x01\x0a\x09\x39\x1b\xf6\x79\x1b\x1a\xff\x4c\x00\x02\x15\x6a\xb1\x7c\x17\xf4\x7b\x4d\x41\x80\x36\x52\x6a\xee\xd2\x2f\x73\x01\x16\x03\x68\xbf\xb6',
# Alt Beacon
b'\x04\x3e\x28\x02\x01\x02\x01\xc9\x9b\x31\xca\x82\x69\x1c\x1b\xff\xff\xff\xbe\xac\x48\x25\x3e\x59\x72\x24\x44\x63\xb9\xb8\x03\x3f\xfa\xb5\x81\x04\x00\x7b\x01\x41\xbc\x00\xb2',
b'\x04\x3e\x28\x02\x01\x02\x01\xc9\x9b\x31\xca\x82\x69\x1c\x1b\xff\xff\xff\xbe\xac\x48\x25\x3e\x59\x72\x24\x44\x63\xb9\xb8\x03\x3f\xfa\xb5\x81\x04\x00\x7b\x01\x41\xbc\x00\xb1',
]
more_beacons = [
b'\x04\x0e\x04\x01\x05 \x00',
b'\x04\x0e\x04\x01\x0b \x00',
b'\x04\x0e\x04\x01\x0c \x00',
b'\x04>\x1e\x02\x01\x00\x01\x95\xfdu\xa2>N\x12\x02\x01\x1a\x02\n\x0c\x0b\xffL\x00\x10\x06\x03\x1e\x86\xdc\xb98\xac',
b'\x04>\x0c\x02\x01\x04\x01\x95\xfdu\xa2>N\x00\xab',
b'\x04>(\x02\x01\x03\x00iX\xf1\xf4\xc3\x00\x1c\x1b\xffu\x00B\x04\x01\x80`\x00\xc3\xf4\xf1Xi\x02\xc3\xf4\xf1Xh\x01\x00\x00\x00\x00\x00\x00\xa7',
b'\x04>\x1f\x02\x01\x02\x01^\x89\xf3\x12\xfa_\x13\x03\x03\xaa\xfe\x0e\x16\xaa\xfe\x10\xbd\x01firstuk\x01\xb9',
b'\x04>\x0c\x02\x01\x04\x01^\x89\xf3\x12\xfa_\x00\xb9',
b"\x04>'\x02\x01\x02\x01\x8eml\xe2\x83b\x1b\x1a\xffL\x00\x02\x15j\xb1|\x17{\x00MA\x806Rj\xee\xd2/s\x01\x16\x03h\xbf\xb5",
b'\x04>\x0c\x02\x01\x04\x01\x8eml\xe2\x83b\x00\xb5',
b'\x04>\x1e\x02\x01\x00\x01W\xc32c!K\x12\x02\x01\x1a\x02\n\x0c\x0b\xffL\x00\x10\x06G\x1d\x96[\x97\x80\xac',
b'\x04>\x0c\x02\x01\x04\x01W\xc32c!K\x00\xac',
b'\x04>\x1a\x02\x01\x00\x01\x07\xbb\xd8!p\\\x0e\x02\x01\x06\n\xffL\x00\x10\x05\x01\x10\xfd\xf3\xc6\xa4',
b'\x04>&\x02\x01\x02\x01t\x85\xcb\xb2H\\\x1a\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x00\xbf\xfb5\xfd\r\x17i\x1dd\xaa\x90\xab\xcd\xef\t\x87e\xb5',
b'\x04>\x0c\x02\x01\x04\x01t\x85\xcb\xb2H\\\x00\xb4',
b'\x04>(\x02\x01\x02\x01%\xae\x8a\x05S~\x1c\x03\x03\x9f\xfe\x17\x16\x9f\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xae',
b'\x04>\x16\x02\x01\x04\x01%\xae\x8a\x05S~\n\t\xff\xe0\x00\x01z\xca\x86\xa1\xca\xab',
b'\x04>(\x02\x01\x02\x01p\x95\xcc,\x1fN\x1c\x1b\xff\xff\xff\xbe\xacH%>Yr$Dc\xb9\xb8\x03?\xfa\xb5\x81\x04\x00{\x01A\xbc\x00\xb9',
b'\x04>\x0c\x02\x01\x04\x01p\x95\xcc,\x1fN\x00\xb9',
b'\x04>+\x02\x01\x00\x01\xe0b\xe1\xf7v\xdc\x1f\x02\x01\x06\x11\xff3\x01\x1bd\x0e\x10\x00`\x00\xe8\x02\x0f(]\x01\x00\t\tDC76F7E1\xb0',
b'\x04>)\x02\x01\x04\x01\xe0b\xe1\xf7v\xdc\x1d\x1c\xff3\x01(e(L(*\x00\x00\x00\xf0\x02(\x00\xcf\x02\x04\x00\xe4\x02\x10\x00\x00\x00\x00\x00\xaf',
b'\x04>\x0c\x02\x01\x04\x01\x07\xbb\xd8!p\\\x00\xad',
b'\x04>)\x02\x01\x03\x01\xbeC\xe75\x82\xde\x1d\x02\x01\x06\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x00\xf6\x00\x00\x00\x00resaec\x00\x00\x00\x00\x00\x0f\xb9',
b'\x04>\x1a\x02\x01\x00\x01\xc8\xd4\xf9\xf46w\x0e\x02\x01\x1a\n\xffL\x00\x10\x05\x03\x18mQ\xeb\xa9',
b'\x04>\x0c\x02\x01\x04\x01\xc8\xd4\xf9\xf46w\x00\xaa',
b'\x04>\x1d\x02\x01\x00\x01\x15\xc1\xedPH\x7f\x11\x02\x01\x1a\x02\n\x0c\n\xffL\x00\x10\x05\x03\x1c\xeeQ<\xa3',
b'\x04>\x0c\x02\x01\x04\x01\x15\xc1\xedPH\x7f\x00\xa4',
b'\x04>\x1d\x02\x01\x00\x01\x93\x0c6\x94\xf3Q\x11\x02\x01\x1a\x02\n\x0c\n\xffL\x00\x10\x05\x04\x1c\x1df\x9a\xa0',
b'\x04>\x0c\x02\x01\x04\x01\x93\x0c6\x94\xf3Q\x00\xa2',
b'\x04>\x1d\x02\x01\x00\x01\x95|\xedj^V\x11\x02\x01\x1a\x02\n\x0c\n\xffL\x00\x10\x05\x13\x18}\xdf\x0c\xa7',
b'\x04>\x0c\x02\x01\x04\x01\x95|\xedj^V\x00\xa7',
b'\x04>(\x02\x01\x03\x00k\xa0\xd0.\x04\xf8\x1c\x1b\xffu\x00B\x04\x01\x80\xac\xf8\x04.\xd0\xa0k\xfa\x04.\xd0\xa0j\x01\x17@\x00\x00\x00\x00\x9e',
b'\x04>\x1e\x02\x01\x00\x01[a\xc6\x83\xc9a\x12\x02\x01\x1a\x02\n\x0c\x0b\xffL\x00\x10\x06\x02\x1e\x96g\x0e0\x9f',
b'\x04>\x0c\x02\x01\x04\x01[a\xc6\x83\xc9a\x00\x9f',
b'\x04>\x1a\x02\x01\x00\x01\xec[Z\xf2\x1fr\x0e\x02\x01\x1a\n\xffL\x00\x10\x05\x03\x1c\xa4\xcb\xb3\xa3',
b'\x04>\x0c\x02\x01\x04\x01\xec[Z\xf2\x1fr\x00\xa4',
b'\x04>\x1d\x02\x01\x00\x01\xa7\x1d\xfed\xd5l\x11\x02\x01\x1a\x02\n\x0c\n\xffL\x00\x10\x05G\x1cz\xc0\xb2\xa1',
b'\x04>\x0c\x02\x01\x04\x01\xa7\x1d\xfed\xd5l\x00\xa2',
b'\x04>\x1a\x02\x01\x00\x01\xb1C\x93\xb2\xfad\x0e\x02\x01\x1a\n\xffL\x00\x10\x05\x13\x18\xe8\xdc\xbf\x9e',
b'\x04>+\x02\x01\x03\x01\xbb5z\xa0\xabY\x1f\x1e\xffL\x00\x07\x19\x01\x02 \x0bV\x8f\x00\x00\x00\xf8\xe40\x97\xc3\x8c\xad\xb21\xbf\x85\x0b\x14\x03\n\xc8\xa2',
b'\x04>\x1e\x02\x01\x00\x01\xfcQ\xbf\xf8\xbei\x12\x02\x01\x1a\x02\n\x08\x0b\xffL\x00\x10\x06!\x1a\x03\x86:\xeb\xa0',
b'\x04>\x0c\x02\x01\x04\x01\xb1C\x93\xb2\xfad\x00\xae',
b'\x04\x0e\x04\x01\x0c \x00',
b'\x04\x0f\x04\x00\x01\x01\x04',
b'\x04\x0e\x04\x01\x05 \x00',
b'\x04\x0e\x04\x01\x0b \x00',
b'\x04\x0e\x04\x01\x0c \x00',
b'\x04>(\x02\x01\x02\x01p\x95\xcc,\x1fN\x1c\x1b\xff\xff\xff\xbe\xacH%>Yr$Dc\xb9\xb8\x03?\xfa\xb5\x81\x04\x00{\x01A\xbc\x00\xb8',
b'\x04>\x0c\x02\x01\x04\x01p\x95\xcc,\x1fN\x00\xb8',
b'\x04>&\x02\x01\x02\x01t\x85\xcb\xb2H\\\x1a\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x00\xbf\xfb5\xfd\r\x17i\x1dd\xaa\x90\xab\xcd\xef\t\x87e\xba',
b'\x04>\x0c\x02\x01\x04\x01t\x85\xcb\xb2H\\\x00\xba',
b'\x04>\x1a\x02\x01\x00\x01\x07\xbb\xd8!p\\\x0e\x02\x01\x06\n\xffL\x00\x10\x05\x01\x10\xfd\xf3\xc6\xac',
b'\x04>\x0c\x02\x01\x04\x01\x07\xbb\xd8!p\\\x00\xad',
b'\x04>\x1e\x02\x01\x00\x01\x95\xfdu\xa2>N\x12\x02\x01\x1a\x02\n\x0c\x0b\xffL\x00\x10\x06\x03\x1e\x86\xdc\xb98\xae',
b'\x04>\x0c\x02\x01\x04\x01\x95\xfdu\xa2>N\x00\xae',
b'\x04>+\x02\x01\x00\x01\xe0b\xe1\xf7v\xdc\x1f\x02\x01\x06\x11\xff3\x01\x1bd\x0e\x10\x00`\x00\xe8\x02\x0f(]\x01\x00\t\tDC76F7E1\xb0',
b'\x04>)\x02\x01\x04\x01\xe0b\xe1\xf7v\xdc\x1d\x1c\xff3\x01(e(L(*\x00\x00\x00\xf0\x02(\x00\xcf\x02\x04\x00\xe4\x02\x10\x00\x00\x00\x00\x00\xaf',
b'\x04>\x1f\x02\x01\x02\x01^\x89\xf3\x12\xfa_\x13\x03\x03\xaa\xfe\x0e\x16\xaa\xfe\x10\xbd\x01firstuk\x01\xb8',
b'\x04>\x0c\x02\x01\x04\x01^\x89\xf3\x12\xfa_\x00\xb8',
b"\x04>'\x02\x01\x02\x01\x8eml\xe2\x83b\x1b\x1a\xffL\x00\x02\x15j\xb1|\x17\xf4{MA\x806Rj\xee\xd2/s\x01\x16\x03h\xbf\xb9",
b'\x04>\x0c\x02\x01\x04\x01\x8eml\xe2\x83b\x00\xb9',
b'\x04>)\x02\x01\x03\x01\xbeC\xe75\x82\xde\x1d\x02\x01\x06\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x00\xf6\x00\x00\x00\x00resaec\x00\x00\x00\x00\x00\x0f\xae',
b'\x04>(\x02\x01\x02\x01%\xae\x8a\x05S~\x1c\x03\x03\x9f\xfe\x17\x16\x9f\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa9',
b'\x04>\x16\x02\x01\x04\x01%\xae\x8a\x05S~\n\t\xff\xe0\x00\x01z\xca\x86\xa1\xca\xaa',
b'\x04>\x1e\x02\x01\x00\x01W\xc32c!K\x12\x02\x01\x1a\x02\n\x0c\x0b\xffL\x00\x10\x06G\x1d\x96[\x97\x80\xaa',
b'\x04>\x0c\x02\x01\x04\x01W\xc32c!K\x00\xab',
b'\x04>\x1a\x02\x01\x00\x01\xb1C\x93\xb2\xfad\x0e\x02\x01\x1a\n\xffL\x00\x10\x05\x13\x18\xe8\xdc\xbf\xae',
b'\x04>\x0c\x02\x01\x04\x01\xb1C\x93\xb2\xfad\x00\xae',
b'\x04>\x1d\x02\x01\x00\x01\xc2\x0e&N\xd7C\x11\x02\x01\x1a\x02\n\x0c\n\xffL\x00\x10\x05\x01\x18N\xa5\xcb\xa1',
b'\x04>\x0c\x02\x01\x04\x01\xc2\x0e&N\xd7C\x00\x9f',
b'\x04>*\x02\x01\x00\x01|\xd7\xf7w\x0c\xf1\x1e\x02\x01\x06\x11\x06\xbaV\x89\xa6\xfa\xbf\xa2\xbd\x01F}n\x00\xfb\xab\xad\x08\x16\n\x18\x16\x04s\x83\x03\xa8',
b'\x04>\x19\x02\x01\x04\x01|\xd7\xf7w\x0c\xf1\r\t\tCharge 2\x02\n\x05\xa8',
b'\x04>\x1d\x02\x01\x00\x01\xa7\x1d\xfed\xd5l\x11\x02\x01\x1a\x02\n\x0c\n\xffL\x00\x10\x05G\x1cz\xc0\xb2\xa1',
b'\x04>\x1a\x02\x01\x00\x01\xec[Z\xf2\x1fr\x0e\x02\x01\x1a\n\xffL\x00\x10\x05\x03\x1c\xa4\xcb\xb3\xa4',
b'\x04>\x0c\x02\x01\x04\x01\xec[Z\xf2\x1fr\x00\xa5',
b'\x04>\x1a\x02\x01\x00\x01\xc8\xd4\xf9\xf46w\x0e\x02\x01\x1a\n\xffL\x00\x10\x05\x03\x18mQ\xeb\xa9',
b'\x04>\x0c\x02\x01\x04\x01\xc8\xd4\xf9\xf46w\x00\xa9',
b'\x04>\x1d\x02\x01\x00\x01\x95|\xedj^V\x11\x02\x01\x1a\x02\n\x0c\n\xffL\x00\x10\x05\x13\x18}\xdf\x0c\xa7',
b'\x04>\x0c\x02\x01\x04\x01\x95|\xedj^V\x00\xa7',
b'\x04>\x1d\x02\x01\x00\x01\x93\x0c6\x94\xf3Q\x11\x02\x01\x1a\x02\n\x0c\n\xffL\x00\x10\x05\x04\x1c\x1df\x9a\xa4',
b'\x04>\x0c\x02\x01\x04\x01\x93\x0c6\x94\xf3Q\x00\xa3',
b'\x04>(\x02\x01\x03\x00iX\xf1\xf4\xc3\x00\x1c\x1b\xffu\x00B\x04\x01\x80`\x00\xc3\xf4\xf1Xi\x02\xc3\xf4\xf1Xh\x01\x00\x00\x00\x00\x00\x00\xa7',
b'\x04>\x1d\x02\x01\x00\x01\x15\xc1\xedPH\x7f\x11\x02\x01\x1a\x02\n\x0c\n\xffL\x00\x10\x05\x03\x1c\xeeQ<\xa1',
b'\x04>\x0c\x02\x01\x04\x01\x15\xc1\xedPH\x7f\x00\xa3',
b'\x04>+\x02\x01\x03\x01\xbb5z\xa0\xabY\x1f\x1e\xffL\x00\x07\x19\x01\x02 \x0bV\x8f\x00\x00\x00\xf8\xe40\x97\xc3\x8c\xad\xb21\xbf\x85\x0b\x14\x03\n\xc8\xa2',
b'\x04>\x1e\x02\x01\x00\x01[a\xc6\x83\xc9a\x12\x02\x01\x1a\x02\n\x0c\x0b\xffL\x00\x10\x06\x02\x1e\x96g\x0e0\x9d',
b'\x04>(\x02\x01\x03\x00k\xa0\xd0.\x04\xf8\x1c\x1b\xffu\x00B\x04\x01\x80\xac\xf8k\x00\xfa\x04.\xd0\xa0j\x01\x17@\x00\x00\x00\x00\xa0\x04>\x1e',
b'\x04>\x0c\x02\x01\x04\x01[a\xc6\x83\xc9a\x00\xa5',
b'\x04\x0e\x04\x01\x0c \x00',
b'\x04\x0f\x04\x00\x01\x01\x04',
b'\x04\x0e\x04\x01\x05 \x00',
b'\x04\x0e\x04\x01\x0b \x00',
b'\x04\x0e\x04\x01\x0c \x00',
b'\x04>\x1a\x02\x01\x00\x01\xec[Z\xf2\x1fr\x0e\x02\x01\x1a\n\xffL\x00\x10\x05\x03\x1c\xa4\xcb\xb3\xa7',
b'\x04>\x0c\x02\x01\x04\x01\xec[Z\xf2\x1fr\x00\xa5',
b'\x04>\x1d\x02\x01\x00\x01\x95|\xedj^V\x11\x02\x01\x1a\x02\n\x0c\n\xffL\x00\x10\x05\x13\x18}\xdf\x0c\xa4',
b'\x04>\x0c\x02\x01\x04\x01\x95|\xedj^V\x00\xa4',
b'\x04>+\x02\x01\x03\x01\xbb5z\xa0\xabY\x1f\x1e\xffL\x00\x07\x19\x01\x02 \x0bU\x8f\x00\x00\x00\xe7p\xdc\xa2\x0fO\x8c\xc6.\xf3\xac\x07\xab\xc1\xf1\x06\xac',
b'\x04>\x1e\x02\x01\x00\x01W\xc32c!K\x12\x02\x01\x1a\x02\n\x0c\x0b\xffL\x00\x10\x06K\x1d\x96[\x97\x80\xb6',
b'\x04>\x0c\x02\x01\x04\x01W\xc32c!K\x00\xb6',
b'\x04>\x1d\x02\x01\x00\x01\xc2\x0e&N\xd7C\x11\x02\x01\x1a\x02\n\x0c\n\xffL\x00\x10\x05\x01\x18N\xa5\xcb\xaa',
b'\x04>(\x02\x01\x03\x00k\xa0\xd0.\x04\xf8\x1c\x1b\xffu\x00B\x04\x01\x80\xac\xf8\xd0\x00\xa0k\xfa\x04.\xd0\xa0j\x01\x17@\x00\x00\x00\x00\xa0\x04',
b'\x04>\x1a\x02\x01\x00\x01\x07\xbb\xd8!p\\\x0e\x02\x01\x06\n\xffL\x00\x10\x05\x01\x10\xfd\xf3\xc6\xae',
b'\x04>\x0c\x02\x01\x04\x01\x07\xbb\xd8!p\\\x00\xae',
b'\x04>\x1d\x02\x01\x00\x01\x15\xc1\xedPH\x7f\x11\x02\x01\x1a\x02\n\x0c\n\xffL\x00\x1c\x00\xeeQ<\x9f\x04>',
b'\x04>)\x02\x01\x03\x01\xbeC\xe75\x82\xde\x1d\x02\x01\x06\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x00\xf6\x00\x00\x00\x00resaec\x00\x00\x00\x00\x00\x0f\xb7',
b'\x04>\x1e\x02\x01\x00\x01[a\xc6\x83\xc9a\x12\x02\x01\x1a\x02\n\x0c\x0b\xffL\x00\x10\x06\x02\x1e\x96g\x0e0\xa1',
b'\x04>\x0c\x02\x01\x04\x01[a\xc6\x83\xc9a\x00\xa3',
b'\x04>\x1e\x02\x01\x00\x01\x95\xfdu\xa2>N\x12\x02\x01\x1a\x02\n\x0c\x0b\xffL\x00\x10\x06\x03\x1e\x86\xdc\xb98\xac',
b'\x04>\x0c\x02\x01\x04\x01\x95\xfdu\xa2>N\x00\xad',
b'\x04>\x1d\x02\x01\x00\x01\x93\x0c6\x94\xf3Q\x11\x02\x01\x1a\x02\n\x0c\n\xffL\x00\x04\x00>\x0c\x02\x01\x04\x01',
b'\x04>(\x02\x01\x02\x01%\xae\x8a\x05S~\x1c\x03\x03\x9f\xfe\x17\x16\x9f\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xaf',
b'\x04>\x16\x02\x01\x04\x01%\xae\x8a\x05S~\n\t\xff\xe0\x00\x01z\xca\x86\xa1\xca\xb0',
b'\x04>\x1e\x02\x01\x00\x01\xfcQ\xbf\xf8\xbei\x12\x02\x01\x1a\x02\n\x08\x0b\xffL\x00:\x00\xeb\x9f\x04>\x1f\x02\x01',
b'\x04>\x0c\x02\x01\x04\x01^\x89\xf3\x12\xfa_\x00\xb5'
]
more_beacons_list = [
bytearray([0x04, 0x0e, 0x04, 0x01, 0x05, 0x20, 0x00]),
bytearray([0x04, 0x0e, 0x04, 0x01, 0x0b, 0x20, 0x00]),
bytearray([0x04, 0x0e, 0x04, 0x01, 0x0c, 0x20, 0x00]),
bytearray(
[0x04, 0x3e, 0x1e, 0x02, 0x01, 0x00, 0x01, 0x95, 0xfd, 0x75, 0xa2, 0x3e, 0x4e, 0x12, 0x02, 0x01, 0x1a, 0x02,
0x0a, 0x0c, 0x0b, 0xff, 0x4c, 0x00, 0x10, 0x06, 0x03, 0x1e, 0x86, 0xdc, 0xb9, 0x38, 0xac]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x95, 0xfd, 0x75, 0xa2, 0x3e, 0x4e, 0x00, 0xab]),
bytearray(
[0x04, 0x3e, 0x28, 0x02, 0x01, 0x03, 0x00, 0x69, 0x58, 0xf1, 0xf4, 0xc3, 0x00, 0x1c, 0x1b, 0xff, 0x75, 0x00,
0x42, 0x04, 0x01, 0x80, 0x60, 0x00, 0xc3, 0xf4, 0xf1, 0x58, 0x69, 0x02, 0xc3, 0xf4, 0xf1, 0x58, 0x68, 0x01,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xa7]),
bytearray(
[0x04, 0x3e, 0x1f, 0x02, 0x01, 0x02, 0x01, 0x5e, 0x89, 0xf3, 0x12, 0xfa, 0x5f, 0x13, 0x03, 0x03, 0xaa, 0xfe,
0x0e, 0x16, 0xaa, 0xfe, 0x10, 0xbd, 0x01, 0x66, 0x69, 0x72, 0x73, 0x74, 0x75, 0x6b, 0x01, 0xb9]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x5e, 0x89, 0xf3, 0x12, 0xfa, 0x5f, 0x00, 0xb9]),
bytearray(
[0x04, 0x3e, 0x27, 0x02, 0x01, 0x02, 0x01, 0x8e, 0x6d, 0x6c, 0xe2, 0x83, 0x62, 0x1b, 0x1a, 0xff, 0x4c, 0x00,
0x02, 0x15, 0x6a, 0xb1, 0x7c, 0x17, 0x7b, 0x00, 0x4d, 0x41, 0x80, 0x36, 0x52, 0x6a, 0xee, 0xd2, 0x2f, 0x73,
0x01, 0x16, 0x03, 0x68, 0xbf, 0xb5]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x8e, 0x6d, 0x6c, 0xe2, 0x83, 0x62, 0x00, 0xb5]),
bytearray(
[0x04, 0x3e, 0x1e, 0x02, 0x01, 0x00, 0x01, 0x57, 0xc3, 0x32, 0x63, 0x21, 0x4b, 0x12, 0x02, 0x01, 0x1a, 0x02,
0x0a, 0x0c, 0x0b, 0xff, 0x4c, 0x00, 0x10, 0x06, 0x47, 0x1d, 0x96, 0x5b, 0x97, 0x80, 0xac]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x57, 0xc3, 0x32, 0x63, 0x21, 0x4b, 0x00, 0xac]),
bytearray(
[0x04, 0x3e, 0x1a, 0x02, 0x01, 0x00, 0x01, 0x07, 0xbb, 0xd8, 0x21, 0x70, 0x5c, 0x0e, 0x02, 0x01, 0x06, 0x0a,
0xff, 0x4c, 0x00, 0x10, 0x05, 0x01, 0x10, 0xfd, 0xf3, 0xc6, 0xa4]),
bytearray(
[0x04, 0x3e, 0x26, 0x02, 0x01, 0x02, 0x01, 0x74, 0x85, 0xcb, 0xb2, 0x48, 0x5c, 0x1a, 0x03, 0x03, 0xaa, 0xfe,
0x15, 0x16, 0xaa, 0xfe, 0x00, 0xbf, 0xfb, 0x35, 0xfd, 0x0d, 0x17, 0x69, 0x1d, 0x64, 0xaa, 0x90, 0xab, 0xcd,
0xef, 0x09, 0x87, 0x65, 0xb5]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x74, 0x85, 0xcb, 0xb2, 0x48, 0x5c, 0x00, 0xb4]),
bytearray(
[0x04, 0x3e, 0x28, 0x02, 0x01, 0x02, 0x01, 0x25, 0xae, 0x8a, 0x05, 0x53, 0x7e, 0x1c, 0x03, 0x03, 0x9f, 0xfe,
0x17, 0x16, 0x9f, 0xfe, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xae]),
bytearray(
[0x04, 0x3e, 0x16, 0x02, 0x01, 0x04, 0x01, 0x25, 0xae, 0x8a, 0x05, 0x53, 0x7e, 0x0a, 0x09, 0xff, 0xe0, 0x00,
0x01, 0x7a, 0xca, 0x86, 0xa1, 0xca, 0xab]),
bytearray(
[0x04, 0x3e, 0x28, 0x02, 0x01, 0x02, 0x01, 0x70, 0x95, 0xcc, 0x2c, 0x1f, 0x4e, 0x1c, 0x1b, 0xff, 0xff, 0xff,
0xbe, 0xac, 0x48, 0x25, 0x3e, 0x59, 0x72, 0x24, 0x44, 0x63, 0xb9, 0xb8, 0x03, 0x3f, 0xfa, 0xb5, 0x81, 0x04,
0x00, 0x7b, 0x01, 0x41, 0xbc, 0x00, 0xb9]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x70, 0x95, 0xcc, 0x2c, 0x1f, 0x4e, 0x00, 0xb9]),
bytearray(
[0x04, 0x3e, 0x2b, 0x02, 0x01, 0x00, 0x01, 0xe0, 0x62, 0xe1, 0xf7, 0x76, 0xdc, 0x1f, 0x02, 0x01, 0x06, 0x11,
0xff, 0x33, 0x01, 0x1b, 0x64, 0x0e, 0x10, 0x00, 0x60, 0x00, 0xe8, 0x02, 0x0f, 0x28, 0x5d, 0x01, 0x00, 0x09,
0x09, 0x44, 0x43, 0x37, 0x36, 0x46, 0x37, 0x45, 0x31, 0xb0]),
bytearray(
[0x04, 0x3e, 0x29, 0x02, 0x01, 0x04, 0x01, 0xe0, 0x62, 0xe1, 0xf7, 0x76, 0xdc, 0x1d, 0x1c, 0xff, 0x33, 0x01,
0x28, 0x65, 0x28, 0x4c, 0x28, 0x2a, 0x00, 0x00, 0x00, 0xf0, 0x02, 0x28, 0x00, 0xcf, 0x02, 0x04, 0x00, 0xe4,
0x02, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0xaf]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x07, 0xbb, 0xd8, 0x21, 0x70, 0x5c, 0x00, 0xad]),
bytearray(
[0x04, 0x3e, 0x29, 0x02, 0x01, 0x03, 0x01, 0xbe, 0x43, 0xe7, 0x35, 0x82, 0xde, 0x1d, 0x02, 0x01, 0x06, 0x03,
0x03, 0xaa, 0xfe, 0x15, 0x16, 0xaa, 0xfe, 0x00, 0xf6, 0x00, 0x00, 0x00, 0x00, 0x72, 0x65, 0x73, 0x61, 0x65,
0x63, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0f, 0xb9]),
bytearray(
[0x04, 0x3e, 0x1a, 0x02, 0x01, 0x00, 0x01, 0xc8, 0xd4, 0xf9, 0xf4, 0x36, 0x77, 0x0e, 0x02, 0x01, 0x1a, 0x0a,
0xff, 0x4c, 0x00, 0x10, 0x05, 0x03, 0x18, 0x6d, 0x51, 0xeb, 0xa9]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0xc8, 0xd4, 0xf9, 0xf4, 0x36, 0x77, 0x00, 0xaa]),
bytearray(
[0x04, 0x3e, 0x1d, 0x02, 0x01, 0x00, 0x01, 0x15, 0xc1, 0xed, 0x50, 0x48, 0x7f, 0x11, 0x02, 0x01, 0x1a, 0x02,
0x0a, 0x0c, 0x0a, 0xff, 0x4c, 0x00, 0x10, 0x05, 0x03, 0x1c, 0xee, 0x51, 0x3c, 0xa3]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x15, 0xc1, 0xed, 0x50, 0x48, 0x7f, 0x00, 0xa4]),
bytearray(
[0x04, 0x3e, 0x1d, 0x02, 0x01, 0x00, 0x01, 0x93, 0x0c, 0x36, 0x94, 0xf3, 0x51, 0x11, 0x02, 0x01, 0x1a, 0x02,
0x0a, 0x0c, 0x0a, 0xff, 0x4c, 0x00, 0x10, 0x05, 0x04, 0x1c, 0x1d, 0x66, 0x9a, 0xa0]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x93, 0x0c, 0x36, 0x94, 0xf3, 0x51, 0x00, 0xa2]),
bytearray(
[0x04, 0x3e, 0x1d, 0x02, 0x01, 0x00, 0x01, 0x95, 0x7c, 0xed, 0x6a, 0x5e, 0x56, 0x11, 0x02, 0x01, 0x1a, 0x02,
0x0a, 0x0c, 0x0a, 0xff, 0x4c, 0x00, 0x10, 0x05, 0x13, 0x18, 0x7d, 0xdf, 0x0c, 0xa7]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x95, 0x7c, 0xed, 0x6a, 0x5e, 0x56, 0x00, 0xa7]),
bytearray(
[0x04, 0x3e, 0x28, 0x02, 0x01, 0x03, 0x00, 0x6b, 0xa0, 0xd0, 0x2e, 0x04, 0xf8, 0x1c, 0x1b, 0xff, 0x75, 0x00,
0x42, 0x04, 0x01, 0x80, 0xac, 0xf8, 0x04, 0x2e, 0xd0, 0xa0, 0x6b, 0xfa, 0x04, 0x2e, 0xd0, 0xa0, 0x6a, 0x01,
0x17, 0x40, 0x00, 0x00, 0x00, 0x00, 0x9e]),
bytearray(
[0x04, 0x3e, 0x1e, 0x02, 0x01, 0x00, 0x01, 0x5b, 0x61, 0xc6, 0x83, 0xc9, 0x61, 0x12, 0x02, 0x01, 0x1a, 0x02,
0x0a, 0x0c, 0x0b, 0xff, 0x4c, 0x00, 0x10, 0x06, 0x02, 0x1e, 0x96, 0x67, 0x0e, 0x30, 0x9f]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x5b, 0x61, 0xc6, 0x83, 0xc9, 0x61, 0x00, 0x9f]),
bytearray(
[0x04, 0x3e, 0x1a, 0x02, 0x01, 0x00, 0x01, 0xec, 0x5b, 0x5a, 0xf2, 0x1f, 0x72, 0x0e, 0x02, 0x01, 0x1a, 0x0a,
0xff, 0x4c, 0x00, 0x10, 0x05, 0x03, 0x1c, 0xa4, 0xcb, 0xb3, 0xa3]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0xec, 0x5b, 0x5a, 0xf2, 0x1f, 0x72, 0x00, 0xa4]),
bytearray(
[0x04, 0x3e, 0x1d, 0x02, 0x01, 0x00, 0x01, 0xa7, 0x1d, 0xfe, 0x64, 0xd5, 0x6c, 0x11, 0x02, 0x01, 0x1a, 0x02,
0x0a, 0x0c, 0x0a, 0xff, 0x4c, 0x00, 0x10, 0x05, 0x47, 0x1c, 0x7a, 0xc0, 0xb2, 0xa1]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0xa7, 0x1d, 0xfe, 0x64, 0xd5, 0x6c, 0x00, 0xa2]),
bytearray(
[0x04, 0x3e, 0x1a, 0x02, 0x01, 0x00, 0x01, 0xb1, 0x43, 0x93, 0xb2, 0xfa, 0x64, 0x0e, 0x02, 0x01, 0x1a, 0x0a,
0xff, 0x4c, 0x00, 0x10, 0x05, 0x13, 0x18, 0xe8, 0xdc, 0xbf, 0x9e]),
bytearray(
[0x04, 0x3e, 0x2b, 0x02, 0x01, 0x03, 0x01, 0xbb, 0x35, 0x7a, 0xa0, 0xab, 0x59, 0x1f, 0x1e, 0xff, 0x4c, 0x00,
0x07, 0x19, 0x01, 0x02, 0x20, 0x0b, 0x56, 0x8f, 0x00, 0x00, 0x00, 0xf8, 0xe4, 0x30, 0x97, 0xc3, 0x8c, 0xad,
0xb2, 0x31, 0xbf, 0x85, 0x0b, 0x14, 0x03, 0x0a, 0xc8, 0xa2]),
bytearray(
[0x04, 0x3e, 0x1e, 0x02, 0x01, 0x00, 0x01, 0xfc, 0x51, 0xbf, 0xf8, 0xbe, 0x69, 0x12, 0x02, 0x01, 0x1a, 0x02,
0x0a, 0x08, 0x0b, 0xff, 0x4c, 0x00, 0x10, 0x06, 0x21, 0x1a, 0x03, 0x86, 0x3a, 0xeb, 0xa0]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0xb1, 0x43, 0x93, 0xb2, 0xfa, 0x64, 0x00, 0xae]),
bytearray([0x04, 0x0e, 0x04, 0x01, 0x0c, 0x20, 0x00]),
bytearray([0x04, 0x0f, 0x04, 0x00, 0x01, 0x01, 0x04]),
bytearray([0x04, 0x0e, 0x04, 0x01, 0x05, 0x20, 0x00]),
bytearray([0x04, 0x0e, 0x04, 0x01, 0x0b, 0x20, 0x00]),
bytearray([0x04, 0x0e, 0x04, 0x01, 0x0c, 0x20, 0x00]),
bytearray(
[0x04, 0x3e, 0x28, 0x02, 0x01, 0x02, 0x01, 0x70, 0x95, 0xcc, 0x2c, 0x1f, 0x4e, 0x1c, 0x1b, 0xff, 0xff, 0xff,
0xbe, 0xac, 0x48, 0x25, 0x3e, 0x59, 0x72, 0x24, 0x44, 0x63, 0xb9, 0xb8, 0x03, 0x3f, 0xfa, 0xb5, 0x81, 0x04,
0x00, 0x7b, 0x01, 0x41, 0xbc, 0x00, 0xb8]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x70, 0x95, 0xcc, 0x2c, 0x1f, 0x4e, 0x00, 0xb8]),
bytearray(
[0x04, 0x3e, 0x26, 0x02, 0x01, 0x02, 0x01, 0x74, 0x85, 0xcb, 0xb2, 0x48, 0x5c, 0x1a, 0x03, 0x03, 0xaa, 0xfe,
0x15, 0x16, 0xaa, 0xfe, 0x00, 0xbf, 0xfb, 0x35, 0xfd, 0x0d, 0x17, 0x69, 0x1d, 0x64, 0xaa, 0x90, 0xab, 0xcd,
0xef, 0x09, 0x87, 0x65, 0xba]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x74, 0x85, 0xcb, 0xb2, 0x48, 0x5c, 0x00, 0xba]),
bytearray(
[0x04, 0x3e, 0x1a, 0x02, 0x01, 0x00, 0x01, 0x07, 0xbb, 0xd8, 0x21, 0x70, 0x5c, 0x0e, 0x02, 0x01, 0x06, 0x0a,
0xff, 0x4c, 0x00, 0x10, 0x05, 0x01, 0x10, 0xfd, 0xf3, 0xc6, 0xac]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x07, 0xbb, 0xd8, 0x21, 0x70, 0x5c, 0x00, 0xad]),
bytearray(
[0x04, 0x3e, 0x1e, 0x02, 0x01, 0x00, 0x01, 0x95, 0xfd, 0x75, 0xa2, 0x3e, 0x4e, 0x12, 0x02, 0x01, 0x1a, 0x02,
0x0a, 0x0c, 0x0b, 0xff, 0x4c, 0x00, 0x10, 0x06, 0x03, 0x1e, 0x86, 0xdc, 0xb9, 0x38, 0xae]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x95, 0xfd, 0x75, 0xa2, 0x3e, 0x4e, 0x00, 0xae]),
bytearray(
[0x04, 0x3e, 0x2b, 0x02, 0x01, 0x00, 0x01, 0xe0, 0x62, 0xe1, 0xf7, 0x76, 0xdc, 0x1f, 0x02, 0x01, 0x06, 0x11,
0xff, 0x33, 0x01, 0x1b, 0x64, 0x0e, 0x10, 0x00, 0x60, 0x00, 0xe8, 0x02, 0x0f, 0x28, 0x5d, 0x01, 0x00, 0x09,
0x09, 0x44, 0x43, 0x37, 0x36, 0x46, 0x37, 0x45, 0x31, 0xb0]),
bytearray(
[0x04, 0x3e, 0x29, 0x02, 0x01, 0x04, 0x01, 0xe0, 0x62, 0xe1, 0xf7, 0x76, 0xdc, 0x1d, 0x1c, 0xff, 0x33, 0x01,
0x28, 0x65, 0x28, 0x4c, 0x28, 0x2a, 0x00, 0x00, 0x00, 0xf0, 0x02, 0x28, 0x00, 0xcf, 0x02, 0x04, 0x00, 0xe4,
0x02, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0xaf]),
bytearray(
[0x04, 0x3e, 0x1f, 0x02, 0x01, 0x02, 0x01, 0x5e, 0x89, 0xf3, 0x12, 0xfa, 0x5f, 0x13, 0x03, 0x03, 0xaa, 0xfe,
0x0e, 0x16, 0xaa, 0xfe, 0x10, 0xbd, 0x01, 0x66, 0x69, 0x72, 0x73, 0x74, 0x75, 0x6b, 0x01, 0xb8]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x5e, 0x89, 0xf3, 0x12, 0xfa, 0x5f, 0x00, 0xb8]),
bytearray(
[0x04, 0x3e, 0x27, 0x02, 0x01, 0x02, 0x01, 0x8e, 0x6d, 0x6c, 0xe2, 0x83, 0x62, 0x1b, 0x1a, 0xff, 0x4c, 0x00,
0x02, 0x15, 0x6a, 0xb1, 0x7c, 0x17, 0xf4, 0x7b, 0x4d, 0x41, 0x80, 0x36, 0x52, 0x6a, 0xee, 0xd2, 0x2f, 0x73,
0x01, 0x16, 0x03, 0x68, 0xbf, 0xb9]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x8e, 0x6d, 0x6c, 0xe2, 0x83, 0x62, 0x00, 0xb9]),
bytearray(
[0x04, 0x3e, 0x29, 0x02, 0x01, 0x03, 0x01, 0xbe, 0x43, 0xe7, 0x35, 0x82, 0xde, 0x1d, 0x02, 0x01, 0x06, 0x03,
0x03, 0xaa, 0xfe, 0x15, 0x16, 0xaa, 0xfe, 0x00, 0xf6, 0x00, 0x00, 0x00, 0x00, 0x72, 0x65, 0x73, 0x61, 0x65,
0x63, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0f, 0xae]),
bytearray(
[0x04, 0x3e, 0x28, 0x02, 0x01, 0x02, 0x01, 0x25, 0xae, 0x8a, 0x05, 0x53, 0x7e, 0x1c, 0x03, 0x03, 0x9f, 0xfe,
0x17, 0x16, 0x9f, 0xfe, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xa9]),
bytearray(
[0x04, 0x3e, 0x16, 0x02, 0x01, 0x04, 0x01, 0x25, 0xae, 0x8a, 0x05, 0x53, 0x7e, 0x0a, 0x09, 0xff, 0xe0, 0x00,
0x01, 0x7a, 0xca, 0x86, 0xa1, 0xca, 0xaa]),
bytearray(
[0x04, 0x3e, 0x1e, 0x02, 0x01, 0x00, 0x01, 0x57, 0xc3, 0x32, 0x63, 0x21, 0x4b, 0x12, 0x02, 0x01, 0x1a, 0x02,
0x0a, 0x0c, 0x0b, 0xff, 0x4c, 0x00, 0x10, 0x06, 0x47, 0x1d, 0x96, 0x5b, 0x97, 0x80, 0xaa]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x57, 0xc3, 0x32, 0x63, 0x21, 0x4b, 0x00, 0xab]),
bytearray(
[0x04, 0x3e, 0x1a, 0x02, 0x01, 0x00, 0x01, 0xb1, 0x43, 0x93, 0xb2, 0xfa, 0x64, 0x0e, 0x02, 0x01, 0x1a, 0x0a,
0xff, 0x4c, 0x00, 0x10, 0x05, 0x13, 0x18, 0xe8, 0xdc, 0xbf, 0xae]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0xb1, 0x43, 0x93, 0xb2, 0xfa, 0x64, 0x00, 0xae]),
bytearray(
[0x04, 0x3e, 0x1d, 0x02, 0x01, 0x00, 0x01, 0xc2, 0x0e, 0x26, 0x4e, 0xd7, 0x43, 0x11, 0x02, 0x01, 0x1a, 0x02,
0x0a, 0x0c, 0x0a, 0xff, 0x4c, 0x00, 0x10, 0x05, 0x01, 0x18, 0x4e, 0xa5, 0xcb, 0xa1]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0xc2, 0x0e, 0x26, 0x4e, 0xd7, 0x43, 0x00, 0x9f]),
bytearray(
[0x04, 0x3e, 0x2a, 0x02, 0x01, 0x00, 0x01, 0x7c, 0xd7, 0xf7, 0x77, 0x0c, 0xf1, 0x1e, 0x02, 0x01, 0x06, 0x11,
0x06, 0xba, 0x56, 0x89, 0xa6, 0xfa, 0xbf, 0xa2, 0xbd, 0x01, 0x46, 0x7d, 0x6e, 0x00, 0xfb, 0xab, 0xad, 0x08,
0x16, 0x0a, 0x18, 0x16, 0x04, 0x73, 0x83, 0x03, 0xa8]),
bytearray(
[0x04, 0x3e, 0x19, 0x02, 0x01, 0x04, 0x01, 0x7c, 0xd7, 0xf7, 0x77, 0x0c, 0xf1, 0x0d, 0x09, 0x09, 0x43, 0x68,
0x61, 0x72, 0x67, 0x65, 0x20, 0x32, 0x02, 0x0a, 0x05, 0xa8]),
bytearray(
[0x04, 0x3e, 0x1d, 0x02, 0x01, 0x00, 0x01, 0xa7, 0x1d, 0xfe, 0x64, 0xd5, 0x6c, 0x11, 0x02, 0x01, 0x1a, 0x02,
0x0a, 0x0c, 0x0a, 0xff, 0x4c, 0x00, 0x10, 0x05, 0x47, 0x1c, 0x7a, 0xc0, 0xb2, 0xa1]),
bytearray(
[0x04, 0x3e, 0x1a, 0x02, 0x01, 0x00, 0x01, 0xec, 0x5b, 0x5a, 0xf2, 0x1f, 0x72, 0x0e, 0x02, 0x01, 0x1a, 0x0a,
0xff, 0x4c, 0x00, 0x10, 0x05, 0x03, 0x1c, 0xa4, 0xcb, 0xb3, 0xa4]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0xec, 0x5b, 0x5a, 0xf2, 0x1f, 0x72, 0x00, 0xa5]),
bytearray(
[0x04, 0x3e, 0x1a, 0x02, 0x01, 0x00, 0x01, 0xc8, 0xd4, 0xf9, 0xf4, 0x36, 0x77, 0x0e, 0x02, 0x01, 0x1a, 0x0a,
0xff, 0x4c, 0x00, 0x10, 0x05, 0x03, 0x18, 0x6d, 0x51, 0xeb, 0xa9]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0xc8, 0xd4, 0xf9, 0xf4, 0x36, 0x77, 0x00, 0xa9]),
bytearray(
[0x04, 0x3e, 0x1d, 0x02, 0x01, 0x00, 0x01, 0x95, 0x7c, 0xed, 0x6a, 0x5e, 0x56, 0x11, 0x02, 0x01, 0x1a, 0x02,
0x0a, 0x0c, 0x0a, 0xff, 0x4c, 0x00, 0x10, 0x05, 0x13, 0x18, 0x7d, 0xdf, 0x0c, 0xa7]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x95, 0x7c, 0xed, 0x6a, 0x5e, 0x56, 0x00, 0xa7]),
bytearray(
[0x04, 0x3e, 0x1d, 0x02, 0x01, 0x00, 0x01, 0x93, 0x0c, 0x36, 0x94, 0xf3, 0x51, 0x11, 0x02, 0x01, 0x1a, 0x02,
0x0a, 0x0c, 0x0a, 0xff, 0x4c, 0x00, 0x10, 0x05, 0x04, 0x1c, 0x1d, 0x66, 0x9a, 0xa4]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x93, 0x0c, 0x36, 0x94, 0xf3, 0x51, 0x00, 0xa3]),
bytearray(
[0x04, 0x3e, 0x28, 0x02, 0x01, 0x03, 0x00, 0x69, 0x58, 0xf1, 0xf4, 0xc3, 0x00, 0x1c, 0x1b, 0xff, 0x75, 0x00,
0x42, 0x04, 0x01, 0x80, 0x60, 0x00, 0xc3, 0xf4, 0xf1, 0x58, 0x69, 0x02, 0xc3, 0xf4, 0xf1, 0x58, 0x68, 0x01,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xa7]),
bytearray(
[0x04, 0x3e, 0x1d, 0x02, 0x01, 0x00, 0x01, 0x15, 0xc1, 0xed, 0x50, 0x48, 0x7f, 0x11, 0x02, 0x01, 0x1a, 0x02,
0x0a, 0x0c, 0x0a, 0xff, 0x4c, 0x00, 0x10, 0x05, 0x03, 0x1c, 0xee, 0x51, 0x3c, 0xa1]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x15, 0xc1, 0xed, 0x50, 0x48, 0x7f, 0x00, 0xa3]),
bytearray(
[0x04, 0x3e, 0x2b, 0x02, 0x01, 0x03, 0x01, 0xbb, 0x35, 0x7a, 0xa0, 0xab, 0x59, 0x1f, 0x1e, 0xff, 0x4c, 0x00,
0x07, 0x19, 0x01, 0x02, 0x20, 0x0b, 0x56, 0x8f, 0x00, 0x00, 0x00, 0xf8, 0xe4, 0x30, 0x97, 0xc3, 0x8c, 0xad,
0xb2, 0x31, 0xbf, 0x85, 0x0b, 0x14, 0x03, 0x0a, 0xc8, 0xa2]),
bytearray(
[0x04, 0x3e, 0x1e, 0x02, 0x01, 0x00, 0x01, 0x5b, 0x61, 0xc6, 0x83, 0xc9, 0x61, 0x12, 0x02, 0x01, 0x1a, 0x02,
0x0a, 0x0c, 0x0b, 0xff, 0x4c, 0x00, 0x10, 0x06, 0x02, 0x1e, 0x96, 0x67, 0x0e, 0x30, 0x9d]),
bytearray(
[0x04, 0x3e, 0x28, 0x02, 0x01, 0x03, 0x00, 0x6b, 0xa0, 0xd0, 0x2e, 0x04, 0xf8, 0x1c, 0x1b, 0xff, 0x75, 0x00,
0x42, 0x04, 0x01, 0x80, 0xac, 0xf8, 0x6b, 0x00, 0xfa, 0x04, 0x2e, 0xd0, 0xa0, 0x6a, 0x01, 0x17, 0x40, 0x00,
0x00, 0x00, 0x00, 0xa0, 0x04, 0x3e, 0x1e]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x5b, 0x61, 0xc6, 0x83, 0xc9, 0x61, 0x00, 0xa5]),
bytearray([0x04, 0x0e, 0x04, 0x01, 0x0c, 0x20, 0x00]),
bytearray([0x04, 0x0f, 0x04, 0x00, 0x01, 0x01, 0x04]),
bytearray([0x04, 0x0e, 0x04, 0x01, 0x05, 0x20, 0x00]),
bytearray([0x04, 0x0e, 0x04, 0x01, 0x0b, 0x20, 0x00]),
bytearray([0x04, 0x0e, 0x04, 0x01, 0x0c, 0x20, 0x00]),
bytearray(
[0x04, 0x3e, 0x1a, 0x02, 0x01, 0x00, 0x01, 0xec, 0x5b, 0x5a, 0xf2, 0x1f, 0x72, 0x0e, 0x02, 0x01, 0x1a, 0x0a,
0xff, 0x4c, 0x00, 0x10, 0x05, 0x03, 0x1c, 0xa4, 0xcb, 0xb3, 0xa7]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0xec, 0x5b, 0x5a, 0xf2, 0x1f, 0x72, 0x00, 0xa5]),
bytearray(
[0x04, 0x3e, 0x1d, 0x02, 0x01, 0x00, 0x01, 0x95, 0x7c, 0xed, 0x6a, 0x5e, 0x56, 0x11, 0x02, 0x01, 0x1a, 0x02,
0x0a, 0x0c, 0x0a, 0xff, 0x4c, 0x00, 0x10, 0x05, 0x13, 0x18, 0x7d, 0xdf, 0x0c, 0xa4]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x95, 0x7c, 0xed, 0x6a, 0x5e, 0x56, 0x00, 0xa4]),
bytearray(
[0x04, 0x3e, 0x2b, 0x02, 0x01, 0x03, 0x01, 0xbb, 0x35, 0x7a, 0xa0, 0xab, 0x59, 0x1f, 0x1e, 0xff, 0x4c, 0x00,
0x07, 0x19, 0x01, 0x02, 0x20, 0x0b, 0x55, 0x8f, 0x00, 0x00, 0x00, 0xe7, 0x70, 0xdc, 0xa2, 0x0f, 0x4f, 0x8c,
0xc6, 0x2e, 0xf3, 0xac, 0x07, 0xab, 0xc1, 0xf1, 0x06, 0xac]),
bytearray(
[0x04, 0x3e, 0x1e, 0x02, 0x01, 0x00, 0x01, 0x57, 0xc3, 0x32, 0x63, 0x21, 0x4b, 0x12, 0x02, 0x01, 0x1a, 0x02,
0x0a, 0x0c, 0x0b, 0xff, 0x4c, 0x00, 0x10, 0x06, 0x4b, 0x1d, 0x96, 0x5b, 0x97, 0x80, 0xb6]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x57, 0xc3, 0x32, 0x63, 0x21, 0x4b, 0x00, 0xb6]),
bytearray(
[0x04, 0x3e, 0x1d, 0x02, 0x01, 0x00, 0x01, 0xc2, 0x0e, 0x26, 0x4e, 0xd7, 0x43, 0x11, 0x02, 0x01, 0x1a, 0x02,
0x0a, 0x0c, 0x0a, 0xff, 0x4c, 0x00, 0x10, 0x05, 0x01, 0x18, 0x4e, 0xa5, 0xcb, 0xaa]),
bytearray(
[0x04, 0x3e, 0x28, 0x02, 0x01, 0x03, 0x00, 0x6b, 0xa0, 0xd0, 0x2e, 0x04, 0xf8, 0x1c, 0x1b, 0xff, 0x75, 0x00,
0x42, 0x04, 0x01, 0x80, 0xac, 0xf8, 0xd0, 0x00, 0xa0, 0x6b, 0xfa, 0x04, 0x2e, 0xd0, 0xa0, 0x6a, 0x01, 0x17,
0x40, 0x00, 0x00, 0x00, 0x00, 0xa0, 0x04]),
bytearray(
[0x04, 0x3e, 0x1a, 0x02, 0x01, 0x00, 0x01, 0x07, 0xbb, 0xd8, 0x21, 0x70, 0x5c, 0x0e, 0x02, 0x01, 0x06, 0x0a,
0xff, 0x4c, 0x00, 0x10, 0x05, 0x01, 0x10, 0xfd, 0xf3, 0xc6, 0xae]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x07, 0xbb, 0xd8, 0x21, 0x70, 0x5c, 0x00, 0xae]),
bytearray(
[0x04, 0x3e, 0x1d, 0x02, 0x01, 0x00, 0x01, 0x15, 0xc1, 0xed, 0x50, 0x48, 0x7f, 0x11, 0x02, 0x01, 0x1a, 0x02,
0x0a, 0x0c, 0x0a, 0xff, 0x4c, 0x00, 0x1c, 0x00, 0xee, 0x51, 0x3c, 0x9f, 0x04, 0x3e]),
bytearray(
[0x04, 0x3e, 0x29, 0x02, 0x01, 0x03, 0x01, 0xbe, 0x43, 0xe7, 0x35, 0x82, 0xde, 0x1d, 0x02, 0x01, 0x06, 0x03,
0x03, 0xaa, 0xfe, 0x15, 0x16, 0xaa, 0xfe, 0x00, 0xf6, 0x00, 0x00, 0x00, 0x00, 0x72, 0x65, 0x73, 0x61, 0x65,
0x63, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0f, 0xb7]),
bytearray(
[0x04, 0x3e, 0x1e, 0x02, 0x01, 0x00, 0x01, 0x5b, 0x61, 0xc6, 0x83, 0xc9, 0x61, 0x12, 0x02, 0x01, 0x1a, 0x02,
0x0a, 0x0c, 0x0b, 0xff, 0x4c, 0x00, 0x10, 0x06, 0x02, 0x1e, 0x96, 0x67, 0x0e, 0x30, 0xa1]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x5b, 0x61, 0xc6, 0x83, 0xc9, 0x61, 0x00, 0xa3]),
bytearray(
[0x04, 0x3e, 0x1e, 0x02, 0x01, 0x00, 0x01, 0x95, 0xfd, 0x75, 0xa2, 0x3e, 0x4e, 0x12, 0x02, 0x01, 0x1a, 0x02,
0x0a, 0x0c, 0x0b, 0xff, 0x4c, 0x00, 0x10, 0x06, 0x03, 0x1e, 0x86, 0xdc, 0xb9, 0x38, 0xac]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x95, 0xfd, 0x75, 0xa2, 0x3e, 0x4e, 0x00, 0xad]),
bytearray(
[0x04, 0x3e, 0x1d, 0x02, 0x01, 0x00, 0x01, 0x93, 0x0c, 0x36, 0x94, 0xf3, 0x51, 0x11, 0x02, 0x01, 0x1a, 0x02,
0x0a, 0x0c, 0x0a, 0xff, 0x4c, 0x00, 0x04, 0x00, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01]),
bytearray(
[0x04, 0x3e, 0x28, 0x02, 0x01, 0x02, 0x01, 0x25, 0xae, 0x8a, 0x05, 0x53, 0x7e, 0x1c, 0x03, 0x03, 0x9f, 0xfe,
0x17, 0x16, 0x9f, 0xfe, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xaf]),
bytearray(
[0x04, 0x3e, 0x16, 0x02, 0x01, 0x04, 0x01, 0x25, 0xae, 0x8a, 0x05, 0x53, 0x7e, 0x0a, 0x09, 0xff, 0xe0, 0x00,
0x01, 0x7a, 0xca, 0x86, 0xa1, 0xca, 0xb0]),
bytearray(
[0x04, 0x3e, 0x1e, 0x02, 0x01, 0x00, 0x01, 0xfc, 0x51, 0xbf, 0xf8, 0xbe, 0x69, 0x12, 0x02, 0x01, 0x1a, 0x02,
0x0a, 0x08, 0x0b, 0xff, 0x4c, 0x00, 0x3a, 0x00, 0xeb, 0x9f, 0x04, 0x3e, 0x1f, 0x02, 0x01]),
bytearray([0x04, 0x3e, 0x0c, 0x02, 0x01, 0x04, 0x01, 0x5e, 0x89, 0xf3, 0x12, 0xfa, 0x5f, 0x00, 0xb5])
]
|
ukBaz/ble_beacon
|
tests/data/pkt_capture.py
|
Python
|
gpl-2.0
| 39,109 | 0.006648 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
try:
import torch
except ImportError:
pass # soft dep
from ray.rllib.models.action_dist import ActionDistribution
from ray.rllib.utils.annotations import override
class TorchDistributionWrapper(ActionDistribution):
"""Wrapper class for torch.distributions."""
@override(ActionDistribution)
def logp(self, actions):
return self.dist.log_prob(actions)
@override(ActionDistribution)
def entropy(self):
return self.dist.entropy()
@override(ActionDistribution)
def kl(self, other):
return torch.distributions.kl.kl_divergence(self.dist, other)
@override(ActionDistribution)
def sample(self):
return self.dist.sample()
class TorchCategorical(TorchDistributionWrapper):
"""Wrapper class for PyTorch Categorical distribution."""
@override(ActionDistribution)
def __init__(self, inputs):
self.dist = torch.distributions.categorical.Categorical(logits=inputs)
class TorchDiagGaussian(TorchDistributionWrapper):
"""Wrapper class for PyTorch Normal distribution."""
@override(ActionDistribution)
def __init__(self, inputs):
mean, log_std = torch.chunk(inputs, 2, dim=1)
self.dist = torch.distributions.normal.Normal(mean, torch.exp(log_std))
@override(TorchDistributionWrapper)
def logp(self, actions):
return TorchDistributionWrapper.logp(self, actions).sum(-1)
|
atumanov/ray
|
python/ray/rllib/models/torch_action_dist.py
|
Python
|
apache-2.0
| 1,516 | 0 |
# Copyright (c) 2015, Matt Layman
"""Tests for tappy"""
from tap.tests.testcase import TestCase # NOQA
|
cans/tappy-pkg
|
tap/tests/__init__.py
|
Python
|
bsd-2-clause
| 105 | 0 |
"""
.. module: lemur.auth.views
:platform: Unix
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
:license: Apache, see LICENSE for more details.
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
"""
import jwt
import base64
import requests
from flask import g, Blueprint, current_app
from flask.ext.restful import reqparse, Resource, Api
from flask.ext.principal import Identity, identity_changed
from lemur.common.utils import get_psuedo_random_string
from lemur.users import service as user_service
from lemur.roles import service as role_service
from lemur.auth.service import create_token, fetch_token_header, get_rsa_public_key
mod = Blueprint('auth', __name__)
api = Api(mod)
class Login(Resource):
"""
Provides an endpoint for Lemur's basic authentication. It takes a username and password
combination and returns a JWT token.
This token token is required for each API request and must be provided in the Authorization Header for the request.
::
Authorization:Bearer <token>
Tokens have a set expiration date. You can inspect the token expiration be base64 decoding the token and inspecting
it's contents.
.. note:: It is recommended that the token expiration is fairly short lived (hours not days). This will largely depend \
on your uses cases but. It is important to not that there is currently no build in method to revoke a users token \
and force re-authentication.
"""
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(Login, self).__init__()
def post(self):
"""
.. http:post:: /auth/login
Login with username:password
**Example request**:
.. sourcecode:: http
POST /auth/login HTTP/1.1
Host: example.com
Accept: application/json, text/javascript
{
"username": "test",
"password": "test"
}
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: text/javascript
{
"token": "12343243243"
}
:arg username: username
:arg password: password
:statuscode 401: invalid credentials
:statuscode 200: no error
"""
self.reqparse.add_argument('username', type=str, required=True, location='json')
self.reqparse.add_argument('password', type=str, required=True, location='json')
args = self.reqparse.parse_args()
if '@' in args['username']:
user = user_service.get_by_email(args['username'])
else:
user = user_service.get_by_username(args['username'])
if user and user.check_password(args['password']):
# Tell Flask-Principal the identity changed
identity_changed.send(current_app._get_current_object(),
identity=Identity(user.id))
return dict(token=create_token(user))
return dict(message='The supplied credentials are invalid'), 401
def get(self):
return {'username': g.current_user.username, 'roles': [r.name for r in g.current_user.roles]}
class Ping(Resource):
"""
This class serves as an example of how one might implement an SSO provider for use with Lemur. In
this example we use a OpenIDConnect authentication flow, that is essentially OAuth2 underneath. If you have an
OAuth2 provider you want to use Lemur there would be two steps:
1. Define your own class that inherits from :class:`flask.ext.restful.Resource` and create the HTTP methods the \
provider uses for it's callbacks.
2. Add or change the Lemur AngularJS Configuration to point to your new provider
"""
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(Ping, self).__init__()
def post(self):
self.reqparse.add_argument('clientId', type=str, required=True, location='json')
self.reqparse.add_argument('redirectUri', type=str, required=True, location='json')
self.reqparse.add_argument('code', type=str, required=True, location='json')
args = self.reqparse.parse_args()
# take the information we have received from the provider to create a new request
params = {
'client_id': args['clientId'],
'grant_type': 'authorization_code',
'scope': 'openid email profile address',
'redirect_uri': args['redirectUri'],
'code': args['code']
}
# you can either discover these dynamically or simply configure them
access_token_url = current_app.config.get('PING_ACCESS_TOKEN_URL')
user_api_url = current_app.config.get('PING_USER_API_URL')
# the secret and cliendId will be given to you when you signup for the provider
basic = base64.b64encode('{0}:{1}'.format(args['clientId'], current_app.config.get("PING_SECRET")))
headers = {'Authorization': 'Basic {0}'.format(basic)}
# exchange authorization code for access token.
r = requests.post(access_token_url, headers=headers, params=params)
id_token = r.json()['id_token']
access_token = r.json()['access_token']
# fetch token public key
header_data = fetch_token_header(id_token)
jwks_url = current_app.config.get('PING_JWKS_URL')
# retrieve the key material as specified by the token header
r = requests.get(jwks_url)
for key in r.json()['keys']:
if key['kid'] == header_data['kid']:
secret = get_rsa_public_key(key['n'], key['e'])
algo = header_data['alg']
break
else:
return dict(message='Key not found'), 403
# validate your token based on the key it was signed with
try:
jwt.decode(id_token, secret, algorithms=[algo], audience=args['clientId'])
except jwt.DecodeError:
return dict(message='Token is invalid'), 403
except jwt.ExpiredSignatureError:
return dict(message='Token has expired'), 403
except jwt.InvalidTokenError:
return dict(message='Token is invalid'), 403
user_params = dict(access_token=access_token, schema='profile')
# retrieve information about the current user.
r = requests.get(user_api_url, params=user_params)
profile = r.json()
user = user_service.get_by_email(profile['email'])
# update their google 'roles'
roles = []
for group in profile['googleGroups']:
role = role_service.get_by_name(group)
if not role:
role = role_service.create(group, description='This is a google group based role created by Lemur')
roles.append(role)
# if we get an sso user create them an account
# we still pick a random password in case sso is down
if not user:
# every user is an operator (tied to a default role)
if current_app.config.get('LEMUR_DEFAULT_ROLE'):
v = role_service.get_by_name(current_app.config.get('LEMUR_DEFAULT_ROLE'))
if v:
roles.append(v)
user = user_service.create(
profile['email'],
get_psuedo_random_string(),
profile['email'],
True,
profile.get('thumbnailPhotoUrl'),
roles
)
else:
# we add 'lemur' specific roles, so they do not get marked as removed
for ur in user.roles:
if ur.authority_id:
roles.append(ur)
# update any changes to the user
user_service.update(
user.id,
profile['email'],
profile['email'],
True,
profile.get('thumbnailPhotoUrl'), # incase profile isn't google+ enabled
roles
)
# Tell Flask-Principal the identity changed
identity_changed.send(current_app._get_current_object(), identity=Identity(user.id))
return dict(token=create_token(user))
api.add_resource(Login, '/auth/login', endpoint='login')
api.add_resource(Ping, '/auth/ping', endpoint='ping')
|
rhoml/lemur
|
lemur/auth/views.py
|
Python
|
apache-2.0
| 8,442 | 0.002961 |
"""
pluginconf.d configuration file - Files
=======================================
Shared mappers for parsing and extracting data from
``/etc/yum/pluginconf.d/*.conf`` files. Parsers contained
in this module are:
PluginConfD - files ``/etc/yum/pluginconf.d/*.conf``
---------------------------------------------------
PluginConfDIni - files ``/etc/yum/pluginconf.d/*.conf``
-------------------------------------------------------
"""
from insights.core import IniConfigFile, LegacyItemAccess, Parser
from insights.core.plugins import parser
from insights.parsers import get_active_lines
from insights.specs import Specs
from insights.util import deprecated
@parser(Specs.pluginconf_d)
class PluginConfD(LegacyItemAccess, Parser):
"""
.. warning::
This parser is deprecated, please use
:py:class:`insights.parsers.pluginconf_d.PluginConfDIni` instead
Class to parse configuration file under ``pluginconf.d``
Sample configuration::
[main]
enabled = 0
gpgcheck = 1
timeout = 120
# You can specify options per channel, e.g.:
#
#[rhel-i386-server-5]
#enabled = 1
#
#[some-unsigned-custom-channel]
#gpgcheck = 0
"""
def parse_content(self, content):
deprecated(PluginConfD, "Deprecated. Use 'PluginConfDIni' instead.")
plugin_dict = {}
section_dict = {}
key = None
for line in get_active_lines(content):
if line.startswith('['):
section_dict = {}
plugin_dict[line[1:-1]] = section_dict
elif '=' in line:
key, _, value = line.partition("=")
key = key.strip()
section_dict[key] = value.strip()
else:
if key:
section_dict[key] = ','.join([section_dict[key], line])
self.data = plugin_dict
def __iter__(self):
for sec in self.data:
yield sec
@parser(Specs.pluginconf_d)
class PluginConfDIni(IniConfigFile):
"""
Read yum plugin config files, in INI format, using the standard INI file
parser class.
Sample configuration::
[main]
enabled = 0
gpgcheck = 1
timeout = 120
# You can specify options per channel, e.g.:
#
#[rhel-i386-server-5]
#enabled = 1
#
#[some-unsigned-custom-channel]
#gpgcheck = 0
[test]
test_multiline_config = http://example.com/repos/test/
http://mirror_example.com/repos/test/
Examples:
>>> type(conf)
<class 'insights.parsers.pluginconf_d.PluginConfDIni'>
>>> conf.sections()
['main', 'test']
>>> conf.has_option('main', 'gpgcheck')
True
>>> conf.get("main", "enabled")
'0'
>>> conf.getint("main", "timeout")
120
>>> conf.getboolean("main", "enabled")
False
>>> conf.get("test", "test_multiline_config")
'http://example.com/repos/test/ http://mirror_example.com/repos/test/'
"""
pass
|
RedHatInsights/insights-core
|
insights/parsers/pluginconf_d.py
|
Python
|
apache-2.0
| 3,141 | 0 |
import collections
class Solution:
def numSimilarGroups(self, A):
UF = {}
for i in range(len(A)): UF[i] = i
def find(x):
if x != UF[x]:
UF[x] = find(UF[x])
return UF[x]
def union(x, y):
UF.setdefault(x, x)
UF.setdefault(y, y)
UF[find(x)] = find(y)
def match(s1, s2):
i = 0
j = -1
while i<len(s1):
if s1[i] != s2[i]:
if j == -1: j = i
else: break
i += 1
return s1[i+1:] == s2[i+1:]
N, W = len(A), len(A[0])
if N < W*W:
for i in range(len(A)): UF[i] = i
for i in range(len(A)):
for j in range(i+1, len(A)):
if match(A[i], A[j]):
union(i, j)
else:
d = collections.defaultdict(set)
for idx, w in enumerate(A):
lw = list(w)
for i in range(W):
for j in range(i+1, W):
lw[i], lw[j] = lw[j], lw[i]
d["".join(lw)].add(idx)
lw[i], lw[j] = lw[j], lw[i]
for i, w in enumerate(A):
for j in d[w]:
union(i, j)
return len({find(x) for x in UF})
print(Solution().numSimilarGroups(["tars","rats","arts","star"]))
|
zuun77/givemegoogletshirts
|
leetcode/python/839_similar-string-groups.py
|
Python
|
apache-2.0
| 1,451 | 0.009649 |
# -*- coding: utf-8 -*-
from queue.producer import Producer
from queue.consumer import Consumer
from queue.bloom_filter import BloomFilter
class Dytt:
def main():
for i in range(15):
# Producer().start()
Consumer().start()
if __name__ == '__main__':
main()
|
zoucaitou/azeroth-spider
|
azeroth_spider/dytt.py
|
Python
|
mit
| 310 | 0.006452 |
"""
Helper Methods
"""
import six
def _get_key(key_or_id, key_cls):
"""
Helper method to get a course/usage key either from a string or a key_cls,
where the key_cls (CourseKey or UsageKey) will simply be returned.
"""
return (
key_cls.from_string(key_or_id)
if isinstance(key_or_id, six.string_types)
else key_or_id
)
|
ESOedX/edx-platform
|
lms/djangoapps/utils.py
|
Python
|
agpl-3.0
| 368 | 0 |
# -*- coding:utf-8 -*-
import logging
import warnings
from flypwd.config import config
with warnings.catch_warnings():
warnings.simplefilter("ignore")
from Crypto.PublicKey import RSA
from Crypto.Cipher import PKCS1_v1_5
log = logging.getLogger(__name__)
def check_key(keyfile):
"""
checks the RSA key file
raises ValueError if not valid
"""
with open(keyfile, 'r') as f:
return RSA.importKey(f.read(), passphrase="")
def gen_key():
return RSA.generate(config.getint('keys', 'dimension'))
def encrypt_with_pub(pwd, pub):
cipher = PKCS1_v1_5.new(pub)
return cipher.encrypt(pwd.encode('utf-8'))
|
giupo/flypwd
|
flypwd/keys.py
|
Python
|
bsd-3-clause
| 658 | 0.00304 |
#!/usr/bin/env python2
"""
Copyright (c) 2015 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from myhdl import *
import os
import wb
import mcb
module = 'wb_mcb_32'
srcs = []
srcs.append("../rtl/%s.v" % module)
srcs.append("test_%s.v" % module)
src = ' '.join(srcs)
build_cmd = "iverilog -o test_%s.vvp %s" % (module, src)
def dut_wb_mcb_32(clk,
rst,
current_test,
wb_adr_i,
wb_dat_i,
wb_dat_o,
wb_we_i,
wb_sel_i,
wb_stb_i,
wb_ack_o,
wb_cyc_i,
mcb_cmd_clk,
mcb_cmd_en,
mcb_cmd_instr,
mcb_cmd_bl,
mcb_cmd_byte_addr,
mcb_cmd_empty,
mcb_cmd_full,
mcb_wr_clk,
mcb_wr_en,
mcb_wr_mask,
mcb_wr_data,
mcb_wr_empty,
mcb_wr_full,
mcb_wr_underrun,
mcb_wr_count,
mcb_wr_error,
mcb_rd_clk,
mcb_rd_en,
mcb_rd_data,
mcb_rd_empty,
mcb_rd_full,
mcb_rd_overflow,
mcb_rd_count,
mcb_rd_error):
if os.system(build_cmd):
raise Exception("Error running build command")
return Cosimulation("vvp -m myhdl test_%s.vvp -lxt2" % module,
clk=clk,
rst=rst,
current_test=current_test,
wb_adr_i=wb_adr_i,
wb_dat_i=wb_dat_i,
wb_dat_o=wb_dat_o,
wb_we_i=wb_we_i,
wb_sel_i=wb_sel_i,
wb_stb_i=wb_stb_i,
wb_ack_o=wb_ack_o,
wb_cyc_i=wb_cyc_i,
mcb_cmd_clk=mcb_cmd_clk,
mcb_cmd_en=mcb_cmd_en,
mcb_cmd_instr=mcb_cmd_instr,
mcb_cmd_bl=mcb_cmd_bl,
mcb_cmd_byte_addr=mcb_cmd_byte_addr,
mcb_cmd_empty=mcb_cmd_empty,
mcb_cmd_full=mcb_cmd_full,
mcb_wr_clk=mcb_wr_clk,
mcb_wr_en=mcb_wr_en,
mcb_wr_mask=mcb_wr_mask,
mcb_wr_data=mcb_wr_data,
mcb_wr_empty=mcb_wr_empty,
mcb_wr_full=mcb_wr_full,
mcb_wr_underrun=mcb_wr_underrun,
mcb_wr_count=mcb_wr_count,
mcb_wr_error=mcb_wr_error,
mcb_rd_clk=mcb_rd_clk,
mcb_rd_en=mcb_rd_en,
mcb_rd_data=mcb_rd_data,
mcb_rd_empty=mcb_rd_empty,
mcb_rd_full=mcb_rd_full,
mcb_rd_overflow=mcb_rd_overflow,
mcb_rd_count=mcb_rd_count,
mcb_rd_error=mcb_rd_error)
def bench():
# Parameters
# Inputs
clk = Signal(bool(0))
rst = Signal(bool(0))
current_test = Signal(intbv(0)[8:])
wb_adr_i = Signal(intbv(0)[32:])
wb_dat_i = Signal(intbv(0)[32:])
wb_we_i = Signal(bool(0))
wb_sel_i = Signal(intbv(0)[4:])
wb_stb_i = Signal(bool(0))
wb_cyc_i = Signal(bool(0))
mcb_cmd_empty = Signal(bool(0))
mcb_cmd_full = Signal(bool(0))
mcb_wr_empty = Signal(bool(0))
mcb_wr_full = Signal(bool(0))
mcb_wr_underrun = Signal(bool(0))
mcb_wr_count = Signal(intbv(0)[7:])
mcb_wr_error = Signal(bool(0))
mcb_rd_data = Signal(intbv(0)[32:])
mcb_rd_empty = Signal(bool(0))
mcb_rd_full = Signal(bool(0))
mcb_rd_overflow = Signal(bool(0))
mcb_rd_count = Signal(intbv(0)[7:])
mcb_rd_error = Signal(bool(0))
# Outputs
wb_dat_o = Signal(intbv(0)[32:])
wb_ack_o = Signal(bool(0))
mcb_cmd_clk = Signal(bool(0))
mcb_cmd_en = Signal(bool(0))
mcb_cmd_instr = Signal(intbv(0)[3:])
mcb_cmd_bl = Signal(intbv(0)[6:])
mcb_cmd_byte_addr = Signal(intbv(0)[32:])
mcb_wr_clk = Signal(bool(0))
mcb_wr_en = Signal(bool(0))
mcb_wr_mask = Signal(intbv(0)[4:])
mcb_wr_data = Signal(intbv(0)[32:])
mcb_rd_clk = Signal(bool(0))
mcb_rd_en = Signal(bool(1))
# WB master
wbm_inst = wb.WBMaster()
wbm_logic = wbm_inst.create_logic(clk,
adr_o=wb_adr_i,
dat_i=wb_dat_o,
dat_o=wb_dat_i,
we_o=wb_we_i,
sel_o=wb_sel_i,
stb_o=wb_stb_i,
ack_i=wb_ack_o,
cyc_o=wb_cyc_i,
name='master')
# MCB model
mcb_inst = mcb.MCB(2**16)
mcb_controller = mcb_inst.create_controller(clk, rst)
mcb_port0 = mcb_inst.create_readwrite_port(cmd_clk=mcb_cmd_clk,
cmd_en=mcb_cmd_en,
cmd_instr=mcb_cmd_instr,
cmd_bl=mcb_cmd_bl,
cmd_byte_addr=mcb_cmd_byte_addr,
cmd_empty=mcb_cmd_empty,
cmd_full=mcb_cmd_full,
wr_clk=mcb_wr_clk,
wr_en=mcb_wr_en,
wr_mask=mcb_wr_mask,
wr_data=mcb_wr_data,
wr_empty=mcb_wr_empty,
wr_full=mcb_wr_full,
wr_underrun=mcb_wr_underrun,
wr_count=mcb_wr_count,
wr_error=mcb_wr_error,
rd_clk=mcb_rd_clk,
rd_en=mcb_rd_en,
rd_data=mcb_rd_data,
rd_empty=mcb_rd_empty,
rd_full=mcb_rd_full,
rd_overflow=mcb_rd_overflow,
rd_count=mcb_rd_count,
rd_error=mcb_rd_error,
name='port0')
# DUT
dut = dut_wb_mcb_32(clk,
rst,
current_test,
wb_adr_i,
wb_dat_i,
wb_dat_o,
wb_we_i,
wb_sel_i,
wb_stb_i,
wb_ack_o,
wb_cyc_i,
mcb_cmd_clk,
mcb_cmd_en,
mcb_cmd_instr,
mcb_cmd_bl,
mcb_cmd_byte_addr,
mcb_cmd_empty,
mcb_cmd_full,
mcb_wr_clk,
mcb_wr_en,
mcb_wr_mask,
mcb_wr_data,
mcb_wr_empty,
mcb_wr_full,
mcb_wr_underrun,
mcb_wr_count,
mcb_wr_error,
mcb_rd_clk,
mcb_rd_en,
mcb_rd_data,
mcb_rd_empty,
mcb_rd_full,
mcb_rd_overflow,
mcb_rd_count,
mcb_rd_error)
@always(delay(4))
def clkgen():
clk.next = not clk
@instance
def check():
yield delay(100)
yield clk.posedge
rst.next = 1
yield clk.posedge
rst.next = 0
yield clk.posedge
yield delay(100)
yield clk.posedge
yield clk.posedge
print("test 1: write")
current_test.next = 1
wbm_inst.init_write(4, '\x11\x22\x33\x44')
yield wbm_inst.wait()
yield clk.posedge
data = mcb_inst.read_mem(0, 32)
for i in range(0, len(data), 16):
print(" ".join("{:02x}".format(ord(c)) for c in data[i:i+16]))
assert mcb_inst.read_mem(4,4) == '\x11\x22\x33\x44'
yield delay(100)
yield clk.posedge
print("test 2: read")
current_test.next = 2
wbm_inst.init_read(4, 4)
yield wbm_inst.wait()
yield clk.posedge
data = wbm_inst.get_read_data()
assert data[0] == 4
assert data[1] == '\x11\x22\x33\x44'
yield delay(100)
yield clk.posedge
print("test 3: various writes")
current_test.next = 3
for length in range(1,8):
for offset in range(4):
wbm_inst.init_write(256*(16*offset+length)+offset, '\x11\x22\x33\x44\x55\x66\x77\x88'[0:length])
yield wbm_inst.wait()
yield clk.posedge
data = mcb_inst.read_mem(256*(16*offset+length), 32)
for i in range(0, len(data), 16):
print(" ".join("{:02x}".format(ord(c)) for c in data[i:i+16]))
assert mcb_inst.read_mem(256*(16*offset+length)+offset,length) == '\x11\x22\x33\x44\x55\x66\x77\x88'[0:length]
yield delay(100)
yield clk.posedge
print("test 4: various reads")
current_test.next = 4
for length in range(1,8):
for offset in range(4):
wbm_inst.init_read(256*(16*offset+length)+offset, length)
yield wbm_inst.wait()
yield clk.posedge
data = wbm_inst.get_read_data()
assert data[0] == 256*(16*offset+length)+offset
assert data[1] == '\x11\x22\x33\x44\x55\x66\x77\x88'[0:length]
yield delay(100)
raise StopSimulation
return dut, wbm_logic, mcb_controller, mcb_port0, clkgen, check
def test_bench():
sim = Simulation(bench())
sim.run()
if __name__ == '__main__':
print("Running test...")
test_bench()
|
alexforencich/hdg2000
|
fpga/tb/test_wb_mcb_32.py
|
Python
|
mit
| 10,990 | 0.012648 |
#### ZarcFit.py #### for interactive model fitting of spectral electrical impedance observations.
# Seogi Kang and Randy Enkin, developed starting November 2015.
# Based on ZarcFit.vi, written in LabView by Randy Enkin, Geological Survey of Canada
# Using Python version 3.4 and QT version 4.8
#
# requires files ZarcFit2015-12-01.ui, ZarcfitCalculations.py, whichsystem.py
import numpy as np
import sys, glob, os, time
from PyQt4 import QtGui, QtCore
from PyQt4.uic import loadUiType
import matplotlib
matplotlib.use('Qt4Agg')
from matplotlib.figure import Figure
import matplotlib.gridspec as gridspec
import matplotlib.pyplot as plt
from matplotlib.backends.backend_qt4agg import (
FigureCanvasQTAgg as FigureCanvas,
NavigationToolbar2QT as NavigationToolbar)
from ZarcfitCalculations import *
from whichsystem import whichsystem
matplotlib.rcParams['axes.facecolor']="white"
Ui_MainWindow, QMainWindow = loadUiType('ZarcFit2016-01-26.ui')
class PathPicker(QtGui.QWidget):
pathNameStr = None
def __init__(self, ZarcFitWindow, parent=None):
# create GUI
super(PathPicker, self).__init__()
self.setWindowTitle('path picker')
# Set the window dimensions
self.resize(300,75)
# vertical layout for widgets
self.vbox = QtGui.QVBoxLayout()
self.setLayout(self.vbox)
# Create a label which displays the path to our chosen path
self.lbl = QtGui.QLabel('No path selected')
self.vbox.addWidget(self.lbl)
# Create a push button labelled 'choose' and add it to our layout
btn = QtGui.QPushButton('Choose path', self)
self.vbox.addWidget(btn)
# Connect the clicked signal to the getPathName handler
self.connect(btn, QtCore.SIGNAL('clicked()'), self.getPathName)
# Create a push button labelled 'Return' and add it to our layout
btn1 = QtGui.QPushButton('Return to main window', self)
self.vbox.addWidget(btn1)
# Connect the clicked signal to the getPathName handler
self.connect(btn1, QtCore.SIGNAL('clicked()'), self.close)
# Connect to ZarcFitWindow
self.ZarcFitWindow = ZarcFitWindow
def getPathName(self):
"""
Handler called when 'choose path' is clicked
"""
# When you call getOpenPathName, a path picker dialog is created
# and if the user selects a path, it's path is returned, and if not
# (ie, the user cancels the operation) None is returned
fname = QtGui.QFileDialog.getExistingDirectory(self, "Select Path")
self.pathNameStr = str(fname)
if fname:
self.lbl.setText(fname)
self.ZarcFitWindow.pathNameStr = fname
self.ZarcFitWindow.lineEditPath.setText(fname)
self.ZarcFitWindow.getObsFName()
with open(scriptPath+mysys.filesep+"ZarcFit.ini", "w") as ini_file:
print(fname, file=ini_file)
else:
self.lbl.setText('No path selected')
class Main(QMainWindow, Ui_MainWindow):
fwdType = "series"
plotType = "bode"
axComplexReal = None
axComplexImag = None
obsFName = None
nFreq = None
freqIndLow = None
freqIndHigh = None
frequencyOrig = None
obsOrig = None
t0 = None
forcePlot = False
thresholdtime = 0.2
def __init__(ZarcFitWindow, pathNameStr, zarc, obs, frequency):
super(Main, ZarcFitWindow).__init__()
ZarcFitWindow.setupUi(ZarcFitWindow)
ZarcFitWindow.zarc = zarc
ZarcFitWindow.obs = obs
ZarcFitWindow.obsorig = obs.copy()
ZarcFitWindow.PathPickerWindow = PathPicker(ZarcFitWindow)
# Set-up frequency range
ZarcFitWindow.frequency = frequency
ZarcFitWindow.frequencyorig = frequency.copy()
ZarcFitWindow.nfreq = ZarcFitWindow.frequency.size
ZarcFitWindow.spinBoxHighFreq.setValue(0)
ZarcFitWindow.labelHighFreq.setText("{:,}".format(ZarcFitWindow.frequencyorig[0])+" Hz")
ZarcFitWindow.freqindlow = 0
ZarcFitWindow.spinBoxLowFreq.setValue(frequencyN-1)
ZarcFitWindow.labelLowFreq.setText("{:,}".format(ZarcFitWindow.frequencyorig[-1])+" Hz")
ZarcFitWindow.freqindhigh = ZarcFitWindow.nfreq
ZarcFitWindow.initializeFigure()
ZarcFitWindow.addmplCole()
ZarcFitWindow.t0 = time.time()
# super(Main, ZarcFitWindow).__init__()
# ZarcFitWindow.setupUi(ZarcFitWindow)
# ZarcFitWindow.t0 = time.time()
# ZarcFitWindow.zarc = zarc
# ZarcFitWindow.obs = obs
# ZarcFitWindow.obsOrig = obs.copy()
# ZarcFitWindow.PathPickerWindow = PathPicker(ZarcFitWindow)
# # Set-up frequency range
# ZarcFitWindow.frequency = frequency
# ZarcFitWindow.frequencyOrig = frequency.copy()
# ZarcFitWindow.nFreq = ZarcFitWindow.frequency.size
# ZarcFitWindow.spinBoxHighFreq.setValue(0)
# ZarcFitWindow.labelHighFreq.setText("{:,}".format(ZarcFitWindow.frequencyOrig[0])+" Hz")
# ZarcFitWindow.freqIndLow = 0
# ZarcFitWindow.spinBoxLowFreq.setValue(frequencyN-1)
# ZarcFitWindow.labelLowFreq.setText("{:,}".format(ZarcFitWindow.frequencyOrig[-1])+" Hz")
# ZarcFitWindow.freqIndHigh = ZarcFitWindow.nFreq
# ZarcFitWindow.initializeFigure()
#Read in Obs files from path in ZarcFit.ini and plot first file
ZarcFitWindow.pathNameStr = pathNameStr
ZarcFitWindow.lineEditPath.setText(pathNameStr)
ZarcFitWindow.getObsFName()
# Observed Data File Events
ZarcFitWindow.actionSelect_Path.triggered.connect(ZarcFitWindow.PickPath)
ZarcFitWindow.actionSelect_Parameter_File.triggered.connect(ZarcFitWindow.SelectParameterFile)
ZarcFitWindow.actionObs_File_Type.triggered.connect(ZarcFitWindow.SelectObsFileType)
ZarcFitWindow.actionNext_Obs_File.triggered.connect(ZarcFitWindow.NextObsFile)
ZarcFitWindow.pushButtonNextFile.clicked.connect(ZarcFitWindow.NextObsFile)
ZarcFitWindow.actionPrev_Obs_File.triggered.connect(ZarcFitWindow.PrevObsFile)
ZarcFitWindow.pushButtonPrevFile.clicked.connect(ZarcFitWindow.PrevObsFile)
ZarcFitWindow.spinBoxObsFileNumber.valueChanged.connect(ZarcFitWindow.ReadObsFile)
#Model Fitting Events
ZarcFitWindow.actionF1_Fit_Spectrum_Cartesian_Cole.triggered.connect(ZarcFitWindow.FitCole)
ZarcFitWindow.pushButtonFitCole.clicked.connect(ZarcFitWindow.FitCole)
ZarcFitWindow.actionF2_Fit_Spectrum_Polar_Bode.triggered.connect(ZarcFitWindow.FitBode)
ZarcFitWindow.pushButtonFitBode.clicked.connect(ZarcFitWindow.FitBode)
#Frequency Range Events
ZarcFitWindow.spinBoxHighFreq.valueChanged.connect(ZarcFitWindow.updateHighFreq)
ZarcFitWindow.spinBoxLowFreq.valueChanged.connect(ZarcFitWindow.updateLowFreq)
ZarcFitWindow.actionF3_All_Freq_s.triggered.connect(ZarcFitWindow.AllFreqs)
ZarcFitWindow.pushButtonAllFreqs.clicked.connect(ZarcFitWindow.AllFreqs)
#Parameter File Events
ZarcFitWindow.actionF7_Read_Parameters.triggered.connect(ZarcFitWindow.ReadParameters)
ZarcFitWindow.pushButtonReadParams.clicked.connect(ZarcFitWindow.ReadParameters)
ZarcFitWindow.actionF8_Default_Start_Model.triggered.connect(ZarcFitWindow.DefaultStartModel)
ZarcFitWindow.pushButtonDefaultParams.clicked.connect(ZarcFitWindow.DefaultStartModel)
ZarcFitWindow.actionWrite_Header.triggered.connect(ZarcFitWindow.WriteHeader)
ZarcFitWindow.actionF4_Write_Fit.triggered.connect(ZarcFitWindow.WriteParam)
ZarcFitWindow.pushButtonWriteParam.clicked.connect(ZarcFitWindow.WriteParam)
ZarcFitWindow.actionOptions.triggered.connect(ZarcFitWindow.Options)
#Help Events
ZarcFitWindow.actionZarcFit_Help.triggered.connect(ZarcFitWindow.ZarcFitHelp)
ZarcFitWindow.actionAbout_ZarcFit.triggered.connect(ZarcFitWindow.AboutZarcFit)
#Connect parameter sliders
ZarcFitWindow.SliderLinf.valueChanged.connect(ZarcFitWindow.updateSldOutLinf)
ZarcFitWindow.SldOutLinf.textChanged.connect(ZarcFitWindow.updateSliderLinf)
ZarcFitWindow.SliderRinf.valueChanged.connect(ZarcFitWindow.updateSldOutRinf)
ZarcFitWindow.SldOutRinf.textChanged.connect(ZarcFitWindow.updateSliderRinf)
ZarcFitWindow.SliderRh.valueChanged.connect(ZarcFitWindow.updateSldOutRh)
ZarcFitWindow.SldOutRh.textChanged.connect(ZarcFitWindow.updateSliderRh)
ZarcFitWindow.SliderFh.valueChanged.connect(ZarcFitWindow.updateSldOutFh)
ZarcFitWindow.SldOutFh.textChanged.connect(ZarcFitWindow.updateSliderFh)
ZarcFitWindow.SliderPh.valueChanged.connect(ZarcFitWindow.updateSldOutPh)
ZarcFitWindow.SldOutPh.textChanged.connect(ZarcFitWindow.updateSliderPh)
ZarcFitWindow.SliderRm.valueChanged.connect(ZarcFitWindow.updateSldOutRm)
ZarcFitWindow.SldOutRm.textChanged.connect(ZarcFitWindow.updateSliderRm)
ZarcFitWindow.SliderFm.valueChanged.connect(ZarcFitWindow.updateSldOutFm)
ZarcFitWindow.SldOutFm.textChanged.connect(ZarcFitWindow.updateSliderFm)
ZarcFitWindow.SliderPm.valueChanged.connect(ZarcFitWindow.updateSldOutPm)
ZarcFitWindow.SldOutPm.textChanged.connect(ZarcFitWindow.updateSliderPm)
ZarcFitWindow.SliderRl.valueChanged.connect(ZarcFitWindow.updateSldOutRl)
ZarcFitWindow.SldOutRl.textChanged.connect(ZarcFitWindow.updateSliderRl)
ZarcFitWindow.SliderFl.valueChanged.connect(ZarcFitWindow.updateSldOutFl)
ZarcFitWindow.SldOutFl.textChanged.connect(ZarcFitWindow.updateSliderFl)
ZarcFitWindow.SliderPl.valueChanged.connect(ZarcFitWindow.updateSldOutPl)
ZarcFitWindow.SldOutPl.textChanged.connect(ZarcFitWindow.updateSliderPl)
ZarcFitWindow.SliderRe.valueChanged.connect(ZarcFitWindow.updateSldOutRe)
ZarcFitWindow.SldOutRe.textChanged.connect(ZarcFitWindow.updateSliderRe)
ZarcFitWindow.SliderQe.valueChanged.connect(ZarcFitWindow.updateSldOutQe)
ZarcFitWindow.SldOutQe.textChanged.connect(ZarcFitWindow.updateSliderQe)
ZarcFitWindow.SliderPef.valueChanged.connect(ZarcFitWindow.updateSldOutPef)
ZarcFitWindow.SldOutPef.textChanged.connect(ZarcFitWindow.updateSliderPef)
ZarcFitWindow.SliderPei.valueChanged.connect(ZarcFitWindow.updateSldOutPei)
ZarcFitWindow.SldOutPei.textChanged.connect(ZarcFitWindow.updateSliderPei)
#Connect QRadiobutton
ZarcFitWindow.radioButtonSerial.clicked.connect(ZarcFitWindow.updateRadiOutSerial)
ZarcFitWindow.radioButtonParallel.clicked.connect(ZarcFitWindow.updateRadiOutParallel)
ZarcFitWindow.radioButtonBodePlots.clicked.connect(ZarcFitWindow.updateRadiOutBodePlots)
ZarcFitWindow.radioButtonComplexPlots.clicked.connect(ZarcFitWindow.updateRadiOutComplexPlots)
#### Matplotlib window ####
def initializeFigure(ZarcFitWindow):
hmlFreq = np.array([ZarcFitWindow.zarc.Fh,
ZarcFitWindow.zarc.Fm,
ZarcFitWindow.zarc.Fl,])
figCole = plt.figure(figsize=(30,30), facecolor="white")
gs = gridspec.GridSpec(7, 7)
axCole = figCole.add_subplot(gs[:, :3]) #Left
axColeRT = figCole.add_subplot( gs[:3,4:]) #Right-Top
axColeRB = figCole.add_subplot(gs[4:,4:]) #Right-Bottom
if ZarcFitWindow.radioButtonSerial.isChecked():
Z = ZarcFitWindow.zarc.Zseries(ZarcFitWindow.frequency)
Zhml = ZarcFitWindow.zarc.Zseries(hmlFreq)
elif ZarcFitWindow.radioButtonParallel.isChecked():
Z = ZarcFitWindow.zarc.Zparallel(ZarcFitWindow.frequency)
Zhml = ZarcFitWindow.zarc.Zparallel(hmlFreq)
else:
Exception("Not implemented!! choose either series or parallel")
mergedZreal = np.append(np.concatenate((Z.real, obs.real)), 0.)
mergedZimag = np.append(np.concatenate((-Z.imag, -obs.imag)), 0.)
# Cole-Cole Plot: Real vs Imag
lineColeZeroImag,= axCole.plot([min(mergedZreal), max(mergedZreal)], [0., 0.],
color='salmon', linewidth=1)
lineColeZeroReal,= axCole.plot([0., 0.], [min(mergedZimag), max(mergedZimag)],
color='salmon', linewidth=1)
lineColeFh, =axCole.plot(Zhml[0].real, -Zhml[0].imag,
color='red', marker='+', markersize=20, markeredgewidth=2)
lineColeFm, =axCole.plot(Zhml[1].real, -Zhml[1].imag,
color='green', marker='+', markersize=20, markeredgewidth=2)
lineColeFl, =axCole.plot(Zhml[2].real, -Zhml[2].imag,
color='blue', marker='+', markersize=20, markeredgewidth=2)
lineCole,= axCole.plot(Z.real, -Z.imag,
color='cyan', marker='D', markersize=3, linewidth=2)
lineColeobs,= axCole.plot(obs.real, -obs.imag,
color='green', marker='s', markersize=2, linewidth=1)
# axCole.invert_yaxis()
axCole.set_xlabel("Real [kOhm]")
axCole.set_ylabel("-Imag [kOhm]")
axColexlim = axCole.set_xlim(0., mergedZreal.max())
axColeylim = axCole.set_ylim(0., mergedZimag.max())
axCole.hold (False)
if ZarcFitWindow.radioButtonBodePlots.isChecked():
lineColeRTFh, =axColeRT.plot(ZarcFitWindow.zarc.Fh, abs(Zhml[0]),
color='red', marker='+', markersize=20, markeredgewidth=2)
lineColeRTFm, =axColeRT.plot(ZarcFitWindow.zarc.Fm, abs(Zhml[1]),
color='green', marker='+', markersize=20, markeredgewidth=2)
lineColeRTFl, =axColeRT.plot(ZarcFitWindow.zarc.Fl, abs(Zhml[2]),
color='blue', marker='+', markersize=20, markeredgewidth=2)
lineColeRTobs, = axColeRT.loglog(frequency, abs(obs),
color='green', marker='s', markersize=2, linewidth=1)
lineColeRTpred, = axColeRT.loglog(frequency, abs(Z),
color='cyan', marker='D', markersize=3, linewidth=2)
lineColeRBFh, =axColeRB.plot(ZarcFitWindow.zarc.Fh, abs(np.angle(Zhml[0], deg=True)),
color='red', marker='+', markersize=20, markeredgewidth=2)
lineColeRBFm, =axColeRB.plot(ZarcFitWindow.zarc.Fm, abs(np.angle(Zhml[1], deg=True)),
color='green', marker='+', markersize=20, markeredgewidth=2)
lineColeRBFl, =axColeRB.plot(ZarcFitWindow.zarc.Fl, abs(np.angle(Zhml[2], deg=True)),
color='blue', marker='+', markersize=20, markeredgewidth=2)
lineColeRBobs,= axColeRB.loglog(frequency, abs(np.angle(obs, deg=True)),
color='green', marker='s', markersize=2, linewidth=1)
lineColeRBpred,= axColeRB.loglog(frequency, abs(np.angle(Z, deg=True)),
color='cyan', marker='D', markersize=3, linewidth=2)
axColeRT.set_ylabel("Total Impedance [Ohm]")
axColeRB.set_ylabel("abs(Phase) [deg]")
elif ZarcFitWindow.radioButtonComplexPlots.isChecked():
lineColeRTFh, =axColeRT.plot(ZarcFitWindow.zarc.Fh, Zhml[0].real,
color='red', marker='+', markersize=20, markeredgewidth=2)
lineColeRTFm, =axColeRT.plot(ZarcFitWindow.zarc.Fm, Zhml[1].real,
color='green', marker='+', markersize=20, markeredgewidth=2)
lineColeRTFl, =axColeRT.plot(ZarcFitWindow.zarc.Fl, Zhml[2].real,
color='blue', marker='+', markersize=20, markeredgewidth=2)
lineColeRTobs, = axColeRT.loglog(frequency, obs.real,
color='green', marker='s', markersize=2, linewidth=1)
lineColeRTpred, = axColeRT.loglog(frequency, Z.real,
color='cyan', marker='D', markersize=3, linewidth=2)
lineColeRBFh, =axColeRB.plot(ZarcFitWindow.zarc.Fh, abs(Zhml[0].imag),
color='red', marker='+', markersize=20, markeredgewidth=2)
lineColeRBFm, =axColeRB.plot(ZarcFitWindow.zarc.Fm, abs(Zhml[1].imag),
color='green', marker='+', markersize=20, markeredgewidth=2)
lineColeRBFl, =axColeRB.plot(ZarcFitWindow.zarc.Fl, abs(Zhml[2].imag),
color='blue', marker='+', markersize=20, markeredgewidth=2)
lineColeRBobs,= axColeRB.loglog(frequency, abs(obs.imag),
color='green', marker='s', markersize=2, linewidth=1)
lineColeRBpred,= axColeRB.loglog(frequency, abs(Z.imag),
color='cyan', marker='D', markersize=3, linewidth=2)
axColeRT.set_ylabel("Real [Ohm]")
axColeRB.set_ylabel("abs(Imag) [Ohm]")
else:
Exception("Not implemented!! choose either bode or complex")
axColeRT.invert_xaxis()
axColeRT.set_xlabel("Frequency [Hz]")
axColeRT.xaxis.set_ticks_position('none')
axColeRT.yaxis.set_ticks_position('none')
# axColeRT.legend(("Obs","Pred"), bbox_to_anchor=(1.25, 1.), fontsize = 10)
axColeRT.hold (False)
ZarcFitWindow.radioButtonBodePlots.clicked.connect(ZarcFitWindow.updateRadiOutBodePlots)
ZarcFitWindow.radioButtonComplexPlots.clicked.connect(ZarcFitWindow.updateRadiOutComplexPlots)
axColeRB.invert_xaxis()
axColeRB.set_xlabel("Frequency [Hz]")
axColeRB.xaxis.set_ticks_position('none')
axColeRB.yaxis.set_ticks_position('none')
axColeRB.hold (False)
figColebackground = figCole.canvas.copy_from_bbox(figCole.bbox)
ZarcFitWindow.figCole = figCole
ZarcFitWindow.figColebackground = figColebackground
ZarcFitWindow.axCole = axCole
ZarcFitWindow.axColeRT = axColeRT
ZarcFitWindow.axColeRB = axColeRB
ZarcFitWindow.lineColeFh = lineColeFh
ZarcFitWindow.lineColeFm = lineColeFm
ZarcFitWindow.lineColeFl = lineColeFl
ZarcFitWindow.lineColeRTFh = lineColeRTFh
ZarcFitWindow.lineColeRTFm = lineColeRTFm
ZarcFitWindow.lineColeRTFl = lineColeRTFl
ZarcFitWindow.lineColeRBFh = lineColeRBFh
ZarcFitWindow.lineColeRBFm = lineColeRBFm
ZarcFitWindow.lineColeRBFl = lineColeRBFl
ZarcFitWindow.lineCole = lineCole
ZarcFitWindow.lineColeRTpred = lineColeRTpred
ZarcFitWindow.lineColeRBpred = lineColeRBpred
ZarcFitWindow.lineColeobs = lineColeobs
ZarcFitWindow.lineColeRTobs = lineColeRTobs
ZarcFitWindow.lineColeRBobs = lineColeRBobs
def addmplCole(ZarcFitWindow):
ZarcFitWindow.canvas = FigureCanvas(ZarcFitWindow.figCole)
ZarcFitWindow.mplCole.addWidget(ZarcFitWindow.canvas)
ZarcFitWindow.canvas.draw()
ZarcFitWindow.toolbar = NavigationToolbar(ZarcFitWindow.canvas, ZarcFitWindow, coordinates=True)
ZarcFitWindow.addToolBar(ZarcFitWindow.toolbar)
def updateFigs(ZarcFitWindow):
ZarcFitWindow.t1 = time.time()
elapsedTime = ZarcFitWindow.t1-ZarcFitWindow.t0
if elapsedTime > ZarcFitWindow.thresholdtime or ZarcFitWindow.forcePlot:
ZarcFitWindow.t0 = ZarcFitWindow.t1
ZarcFitWindow.forcePlot = False
hmlFreq = np.array([ZarcFitWindow.zarc.Fh,
ZarcFitWindow.zarc.Fm,
ZarcFitWindow.zarc.Fl,])
ZarcFitWindow.figCole.canvas.restore_region(ZarcFitWindow.figColebackground)
if ZarcFitWindow.radioButtonSerial.isChecked():
Z = ZarcFitWindow.zarc.Zseries(ZarcFitWindow.frequency)
Zhml = ZarcFitWindow.zarc.Zseries(hmlFreq)
elif ZarcFitWindow.radioButtonParallel.isChecked():
Z = ZarcFitWindow.zarc.Zparallel(ZarcFitWindow.frequency)
Zhml = ZarcFitWindow.zarc.Zparallel(hmlFreq)
else:
Exception("Not implemented!! choose either series or parallel")
vminR, vmaxR = (np.r_[Z.real, ZarcFitWindow.obs.real]).min(), (np.r_[Z.real, ZarcFitWindow.obs.real]).max()
vminI, vmaxI = (np.r_[-Z.imag, -ZarcFitWindow.obs.imag]).min(),(np.r_[-Z.imag, -ZarcFitWindow.obs.imag]).max()
ZarcFitWindow.lineCole.set_data(Z.real, -Z.imag)
ZarcFitWindow.lineCole.axes.set_xlim(0., vmaxR*1.2)
ZarcFitWindow.lineCole.axes.set_ylim(vminI, vmaxI*1.2)
ZarcFitWindow.lineColeFh.set_data(Zhml[0].real, -Zhml[0].imag)
ZarcFitWindow.lineColeFm.set_data(Zhml[1].real, -Zhml[1].imag)
ZarcFitWindow.lineColeFl.set_data(Zhml[2].real, -Zhml[2].imag)
ZarcFitWindow.lineColeobs.set_data(ZarcFitWindow.obs.real, -ZarcFitWindow.obs.imag)
ZarcFitWindow.figCole.draw_artist(ZarcFitWindow.figCole.patch)
ZarcFitWindow.axCole.draw_artist(ZarcFitWindow.axCole.patch)
ZarcFitWindow.axCole.draw_artist(ZarcFitWindow.axCole.get_yaxis())
ZarcFitWindow.axCole.draw_artist(ZarcFitWindow.axCole.get_xaxis())
ZarcFitWindow.axCole.draw_artist(ZarcFitWindow.lineColeFh)
ZarcFitWindow.axCole.draw_artist(ZarcFitWindow.lineColeFm)
ZarcFitWindow.axCole.draw_artist(ZarcFitWindow.lineColeFl)
ZarcFitWindow.axCole.draw_artist(ZarcFitWindow.lineCole)
ZarcFitWindow.axCole.draw_artist(ZarcFitWindow.lineColeobs)
ZarcFitWindow.axCole.draw_artist(ZarcFitWindow.axCole.spines['left'])
ZarcFitWindow.axCole.draw_artist(ZarcFitWindow.axCole.spines['right'])
ZarcFitWindow.axCole.draw_artist(ZarcFitWindow.axCole.spines['bottom'])
ZarcFitWindow.axCole.draw_artist(ZarcFitWindow.axCole.spines['top'])
if ZarcFitWindow.radioButtonBodePlots.isChecked():
zpredabs = abs(Z)
zobsabs = abs(ZarcFitWindow.obs)
ZarcFitWindow.lineColeRTpred.set_data(ZarcFitWindow.frequency, zpredabs)
ZarcFitWindow.lineColeRTobs.set_data(ZarcFitWindow.frequency,zobsabs)
zpredphase = abs(np.angle(Z, deg=True))
zobsphase = abs(np.angle(ZarcFitWindow.obs, deg=True))
ZarcFitWindow.lineColeRBpred.set_data(ZarcFitWindow.frequency, zpredphase)
ZarcFitWindow.lineColeRBobs.set_data(ZarcFitWindow.frequency, zobsphase)
vminAbs, vmaxAbs = (np.r_[zpredabs, zobsabs]).min(), (np.r_[zpredabs, zobsabs]).max()
vminPhase, vmaxPhase = (np.r_[zpredphase, zobsphase]).min(), (np.r_[zpredphase, zobsphase]).max()
ZarcFitWindow.lineColeRTpred.axes.set_ylim(vminAbs*0.8, vmaxAbs*1.2)
ZarcFitWindow.lineColeRBpred.axes.set_ylim(vminPhase*0.8, vmaxPhase*1.2)
ZarcFitWindow.lineColeRTpred.axes.set_ylabel("Total Impedance [Ohm]")
ZarcFitWindow.lineColeRBpred.axes.set_ylabel("abs(Phase) [deg]")
ZarcFitWindow.lineColeRTFh.set_data(ZarcFitWindow.zarc.Fh, abs(Zhml[0]))
ZarcFitWindow.lineColeRTFm.set_data(ZarcFitWindow.zarc.Fm, abs(Zhml[1]))
ZarcFitWindow.lineColeRTFl.set_data(ZarcFitWindow.zarc.Fl, abs(Zhml[2]))
ZarcFitWindow.lineColeRBFh.set_data(ZarcFitWindow.zarc.Fh, abs(np.angle(Zhml[0], deg=True) ))
ZarcFitWindow.lineColeRBFm.set_data(ZarcFitWindow.zarc.Fm, abs(np.angle(Zhml[1], deg=True) ))
ZarcFitWindow.lineColeRBFl.set_data(ZarcFitWindow.zarc.Fl, abs(np.angle(Zhml[2], deg=True) ))
elif ZarcFitWindow.radioButtonComplexPlots.isChecked():
zpredreal = Z.real
zobsreal = ZarcFitWindow.obs.real
ZarcFitWindow.lineColeRTpred.set_data(ZarcFitWindow.frequency, zpredreal)
ZarcFitWindow.lineColeRTobs.set_data(ZarcFitWindow.frequency, zobsreal)
zpredimag = -Z.imag
zobsimag = -ZarcFitWindow.obs.imag
ZarcFitWindow.lineColeRBpred.set_data(ZarcFitWindow.frequency, zpredimag)
ZarcFitWindow.lineColeRBobs.set_data(ZarcFitWindow.frequency, zobsimag)
ZarcFitWindow.lineColeRTpred.axes.set_ylim(vminR*0.8, vmaxR*1.2)
ZarcFitWindow.lineColeRBpred.axes.set_ylim(vminI*0.8, vmaxI*11.2)
ZarcFitWindow.lineColeRTpred.axes.set_ylabel("Real [Ohm]")
ZarcFitWindow.lineColeRBpred.axes.set_ylabel("abs(Imag) [Ohm]")
ZarcFitWindow.lineColeRTFh.set_data(ZarcFitWindow.zarc.Fh, Zhml[0].real)
ZarcFitWindow.lineColeRTFm.set_data(ZarcFitWindow.zarc.Fm, Zhml[1].real)
ZarcFitWindow.lineColeRTFl.set_data(ZarcFitWindow.zarc.Fl, Zhml[2].real)
ZarcFitWindow.lineColeRBFh.set_data(ZarcFitWindow.zarc.Fh, abs(Zhml[0].imag))
ZarcFitWindow.lineColeRBFm.set_data(ZarcFitWindow.zarc.Fm, abs(Zhml[1].imag))
ZarcFitWindow.lineColeRBFl.set_data(ZarcFitWindow.zarc.Fl, abs(Zhml[2].imag))
else:
Exception("Not implemented!! choose either bode or complex")
ZarcFitWindow.axColeRT.draw_artist(ZarcFitWindow.axColeRT.patch)
ZarcFitWindow.axColeRT.draw_artist(ZarcFitWindow.axColeRT.get_yaxis())
ZarcFitWindow.axColeRT.draw_artist(ZarcFitWindow.axColeRT.get_xaxis())
ZarcFitWindow.axColeRT.draw_artist(ZarcFitWindow.axColeRT.spines['left'])
ZarcFitWindow.axColeRT.draw_artist(ZarcFitWindow.axColeRT.spines['right'])
ZarcFitWindow.axColeRT.draw_artist(ZarcFitWindow.axColeRT.spines['bottom'])
ZarcFitWindow.axColeRT.draw_artist(ZarcFitWindow.axColeRT.spines['top'])
ZarcFitWindow.axColeRT.draw_artist(ZarcFitWindow.lineColeRTFh)
ZarcFitWindow.axColeRT.draw_artist(ZarcFitWindow.lineColeRTFm)
ZarcFitWindow.axColeRT.draw_artist(ZarcFitWindow.lineColeRTFl)
ZarcFitWindow.axColeRT.draw_artist(ZarcFitWindow.lineColeRTpred)
ZarcFitWindow.axColeRT.draw_artist(ZarcFitWindow.lineColeRTobs)
ZarcFitWindow.axColeRB.draw_artist(ZarcFitWindow.axColeRB.patch)
ZarcFitWindow.axColeRB.draw_artist(ZarcFitWindow.axColeRB.get_yaxis())
ZarcFitWindow.axColeRB.draw_artist(ZarcFitWindow.axColeRB.get_xaxis())
ZarcFitWindow.axColeRB.draw_artist(ZarcFitWindow.axColeRB.spines['left'])
ZarcFitWindow.axColeRB.draw_artist(ZarcFitWindow.axColeRB.spines['right'])
ZarcFitWindow.axColeRB.draw_artist(ZarcFitWindow.axColeRB.spines['bottom'])
ZarcFitWindow.axColeRB.draw_artist(ZarcFitWindow.axColeRB.spines['top'])
ZarcFitWindow.axColeRB.draw_artist(ZarcFitWindow.lineColeRBFh)
ZarcFitWindow.axColeRB.draw_artist(ZarcFitWindow.lineColeRBFm)
ZarcFitWindow.axColeRB.draw_artist(ZarcFitWindow.lineColeRBFl)
ZarcFitWindow.axColeRB.draw_artist(ZarcFitWindow.lineColeRBpred)
ZarcFitWindow.axColeRB.draw_artist(ZarcFitWindow.lineColeRBobs)
ZarcFitWindow.figCole.canvas.update()
#### Menus and Buttons ####
# # # Files # # #
def PickPath(ZarcFitWindow):
ZarcFitWindow.PathPickerWindow.show()
def getObsFName(ZarcFitWindow):
ZarcFitWindow.obsFName = []
ZarcFitWindow.obsData = []
os.chdir(ZarcFitWindow.pathNameStr)
# Read *.z file in the path
for file in glob.glob("*.z"):
ZarcFitWindow.obsFName.append(file)
tempObs = np.loadtxt(file, skiprows=11, delimiter=',')
ZarcFitWindow.obsData.append(tempObs)
# print (ZarcFitWindow.PathPickerWindow.pathNameStr+ZarcFitWindow.filesep+ZarcFitWindow.filesep+file)
ZarcFitWindow.obsFNameDirSize = len(ZarcFitWindow.obsFName)
# Set maximum filenumber in ui
ZarcFitWindow.horizontalSliderObsFileNumber.setMaximum(ZarcFitWindow.obsFNameDirSize-1)
ZarcFitWindow.spinBoxObsFileNumber.setMaximum(ZarcFitWindow.obsFNameDirSize-1)
ZarcFitWindow.label_LastFile.setText(str(ZarcFitWindow.obsFNameDirSize-1))
#Read in first Obs file
ZarcFitWindow.ReadObsFile(0)
ZarcFitWindow.spinBoxObsFileNumber.setValue(0)
print (ZarcFitWindow.obsFNameDirSize, ZarcFitWindow.pathNameStr)
# ZarcFitWindow.forcePlot = True
# ZarcFitWindow.updateFigs()
def ReadObsFile(ZarcFitWindow, value):
ZarcFitWindow.obs = ZarcFitWindow.obsData[value][:,4]+ZarcFitWindow.obsData[value][:,5]*1j
ZarcFitWindow.obsOrig = ZarcFitWindow.obs.copy()
ZarcFitWindow.frequency = ZarcFitWindow.obsData[value][:,0]
ZarcFitWindow.frequencyOrig = ZarcFitWindow.frequency.copy()
ZarcFitWindow.nFreq = ZarcFitWindow.frequency.size
ZarcFitWindow.spinBoxHighFreq.setValue(0)
ZarcFitWindow.labelHighFreq.setText("{:,}".format(ZarcFitWindow.frequencyOrig[0])+" Hz")
ZarcFitWindow.freqIndLow = 0
ZarcFitWindow.spinBoxLowFreq.setValue(ZarcFitWindow.nFreq-1)
ZarcFitWindow.spinBoxLowFreq.setMaximum(ZarcFitWindow.nFreq-1)
ZarcFitWindow.labelLowFreq.setText("{:,}".format(ZarcFitWindow.frequencyOrig[-1])+" Hz")
ZarcFitWindow.freqIndHigh = ZarcFitWindow.nFreq
ZarcFitWindow.forcePlot = True
ZarcFitWindow.updateFigs()
print (value, ZarcFitWindow.obsFName[value], ZarcFitWindow.nFreq, ZarcFitWindow.frequencyOrig[0],ZarcFitWindow.frequencyOrig[-1],)
ZarcFitWindow.lineEditObsFName.setText(ZarcFitWindow.obsFName[value])
def SelectParameterFile(ZarcFitWindow):
print ("SelectParameterFile")
def SelectObsFileType(ZarcFitWindow):
print ("SelectObsFileType")
def Options(ZarcFitWindow):
print ("Options")
print(scriptPath+mysys.filesep+"ZarcFit.ini")
with open(scriptPath+mysys.filesep+"ZarcFit.ini", "r") as ini_file:
pathNameStr1 = ini_file.read()
print (pathNameStr1)
def NextObsFile(ZarcFitWindow):
ZarcFitWindow.spinBoxObsFileNumber.setValue(ZarcFitWindow.spinBoxObsFileNumber.value() + 1)
def PrevObsFile(ZarcFitWindow):
ZarcFitWindow.spinBoxObsFileNumber.setValue(ZarcFitWindow.spinBoxObsFileNumber.value() - 1)
# # # Fits # # #
def updateHighFreq(ZarcFitWindow, value):
ZarcFitWindow.spinBoxHighFreq.setValue(value)
ZarcFitWindow.labelHighFreq.setText("{:,}".format(ZarcFitWindow.frequencyOrig[value])+" Hz")
ZarcFitWindow.freqIndLow = value
ZarcFitWindow.frequency = ZarcFitWindow.frequencyOrig[ZarcFitWindow.freqIndLow:ZarcFitWindow.freqIndHigh]
ZarcFitWindow.obs = ZarcFitWindow.obsOrig[ZarcFitWindow.freqIndLow:ZarcFitWindow.freqIndHigh]
ZarcFitWindow.updateFigs()
def updateLowFreq(ZarcFitWindow, value):
ZarcFitWindow.spinBoxLowFreq.setValue(value)
ZarcFitWindow.labelLowFreq.setText("{:,}".format(ZarcFitWindow.frequencyOrig[value])+" Hz")
ZarcFitWindow.freqIndHigh = value+1
ZarcFitWindow.frequency = ZarcFitWindow.frequencyOrig[ZarcFitWindow.freqIndLow:ZarcFitWindow.freqIndHigh]
ZarcFitWindow.obs = ZarcFitWindow.obsOrig[ZarcFitWindow.freqIndLow:ZarcFitWindow.freqIndHigh]
ZarcFitWindow.updateFigs()
def AllFreqs(ZarcFitWindow):
ZarcFitWindow.spinBoxHighFreq.setValue(0)
ZarcFitWindow.labelHighFreq.setText("{:,}".format(ZarcFitWindow.frequencyOrig[0])+" Hz")
ZarcFitWindow.freqIndLow = 0
ZarcFitWindow.spinBoxLowFreq.setValue(ZarcFitWindow.nFreq)
ZarcFitWindow.labelLowFreq.setText("{:,}".format(ZarcFitWindow.frequencyOrig[ZarcFitWindow.nFreq])+" Hz")
ZarcFitWindow.freqIndHigh = ZarcFitWindow.nFreq+1
ZarcFitWindow.obs = ZarcFitWindow.obsOrig[ZarcFitWindow.freqIndLow:ZarcFitWindow.freqIndHigh]
ZarcFitWindow.updateFigs()
def ReadParameters(ZarcFitWindow):
print ("ReadParameters")
def DefaultStartModel(ZarcFitWindow):
ZarcFitWindow.zarc.Linf, ZarcFitWindow.zarc.Rinf, ZarcFitWindow.zarc.Rh, ZarcFitWindow.zarc.Fh, \
ZarcFitWindow.zarc.Ph, ZarcFitWindow.zarc.Rl, ZarcFitWindow.zarc.Fl, ZarcFitWindow.zarc.Pl, \
ZarcFitWindow.zarc.Rm, ZarcFitWindow.zarc.Fm, ZarcFitWindow.zarc.Pm, ZarcFitWindow.zarc.Re, \
ZarcFitWindow.zarc.Qe, ZarcFitWindow.zarc.Pef, ZarcFitWindow.zarc.Pei = SetDefaultParameters()
ZarcFitWindow.SldOutLinf.setText("{:.2E}".format(ZarcFitWindow.zarc.Linf))
ZarcFitWindow.SldOutRinf.setText("{:.2E}".format(ZarcFitWindow.zarc.Rinf))
ZarcFitWindow.SldOutRh.setText("{:.2E}".format(ZarcFitWindow.zarc.Rh))
ZarcFitWindow.SldOutFh.setText("{:.2E}".format(ZarcFitWindow.zarc.Fh))
ZarcFitWindow.SldOutPh.setText("{:.3f}".format(ZarcFitWindow.zarc.Ph))
ZarcFitWindow.SldOutRm.setText("{:.2E}".format(ZarcFitWindow.zarc.Rm))
ZarcFitWindow.SldOutFm.setText("{:.2E}".format(ZarcFitWindow.zarc.Fm))
ZarcFitWindow.SldOutPm.setText("{:.3f}".format(ZarcFitWindow.zarc.Pm))
ZarcFitWindow.SldOutRl.setText("{:.2E}".format(ZarcFitWindow.zarc.Rl))
ZarcFitWindow.SldOutFl.setText("{:.2E}".format(ZarcFitWindow.zarc.Fl))
ZarcFitWindow.SldOutPl.setText("{:.3f}".format(ZarcFitWindow.zarc.Pl))
ZarcFitWindow.SldOutRe.setText("{:.2E}".format(ZarcFitWindow.zarc.Re))
ZarcFitWindow.SldOutQe.setText("{:.2E}".format(ZarcFitWindow.zarc.Qe))
ZarcFitWindow.SldOutPef.setText("{:.3f}".format(ZarcFitWindow.zarc.Pef))
ZarcFitWindow.SldOutPei.setText("{:.3f}".format(ZarcFitWindow.zarc.Pei))
ZarcFitWindow.forcePlot = True
ZarcFitWindow.updateFigs()
print ("DefaultStartModel")
def FitCole(ZarcFitWindow):
print ("Fit Cole")
def FitBode(ZarcFitWindow):
print ("Fit Bode")
def WriteHeader(ZarcFitWindow):
print ("Write Header")
def WriteParam(ZarcFitWindow):
print ("Write Param")
def updateRadiOutSerial(ZarcFitWindow, value):
ZarcFitWindow.fwdType = "series"
ZarcFitWindow.updateFigs()
def updateRadiOutParallel(ZarcFitWindow, value):
ZarcFitWindow.fwdType = "parallel"
ZarcFitWindow.updateFigs()
def updateRadiOutBodePlots(ZarcFitWindow, value):
ZarcFitWindow.updateFigs()
def updateRadiOutComplexPlots(ZarcFitWindow, value):
ZarcFitWindow.updateFigs()
# # # Help # # #
def ZarcFitHelp(ZarcFitWindow):
print ("ZarcFitHelp")
def AboutZarcFit(ZarcFitWindow):
print ("AboutZarcFit")
#### Update Sliders ####
def updateSldOutLinf(ZarcFitWindow, value):
Linf = 10**(value/1000.)
ZarcFitWindow.SldOutLinf.setText("{:.2E}".format(Linf))
ZarcFitWindow.zarc.Linf = Linf
# ZarcFitWindow.updateFigs()
def updateSliderLinf(ZarcFitWindow, value):
Linf = float(value)
ZarcFitWindow.SliderLinf.setValue(int(np.log10(Linf)*1000.))
ZarcFitWindow.zarc.Linf = Linf
ZarcFitWindow.updateFigs()
def updateSldOutRinf(ZarcFitWindow, value):
Rinf = 10**(value/1000.)
ZarcFitWindow.SldOutRinf.setText("{:.2E}".format(Rinf))
ZarcFitWindow.zarc.Rinf = Rinf
# ZarcFitWindow.updateFigs()
def updateSliderRinf(ZarcFitWindow, value):
Rinf = float(value)
ZarcFitWindow.SliderRinf.setValue(int(np.log10(Rinf)*1000.))
ZarcFitWindow.zarc.Rinf = Rinf
ZarcFitWindow.updateFigs()
def updateSldOutRh(ZarcFitWindow, value):
Rh = 10**(value/1000.)
ZarcFitWindow.SldOutRh.setText("{:.2E}".format(Rh))
ZarcFitWindow.zarc.Rh = Rh
# ZarcFitWindow.updateFigs()
def updateSliderRh(ZarcFitWindow, value):
Rh = float(value)
ZarcFitWindow.SliderRh.setValue(int(np.log10(Rh)*1000.))
ZarcFitWindow.zarc.Rh = Rh
ZarcFitWindow.updateFigs()
def updateSldOutFh(ZarcFitWindow, value):
Fh = 10**(value/1000.)
ZarcFitWindow.SldOutFh.setText("{:.2E}".format(Fh))
ZarcFitWindow.zarc.Fh = Fh
# ZarcFitWindow.updateFigs()
def updateSliderFh(ZarcFitWindow, value):
Fh = float(value)
ZarcFitWindow.SliderFh.setValue(int(np.log10(Fh)*1000.))
ZarcFitWindow.zarc.Fh = Fh
ZarcFitWindow.updateFigs()
def updateSldOutPh(ZarcFitWindow, value):
Ph = value/1000.
ZarcFitWindow.SldOutPh.setText("{:.3f}".format(Ph))
ZarcFitWindow.zarc.Ph = Ph
# ZarcFitWindow.updateFigs()
def updateSliderPh(ZarcFitWindow, value):
Ph = float(value)
ZarcFitWindow.SliderPh.setValue(Ph*1000)
ZarcFitWindow.zarc.Ph = Ph
ZarcFitWindow.updateFigs()
def updateSldOutRm(ZarcFitWindow, value):
Rm = 10**(value/1000.)
ZarcFitWindow.SldOutRm.setText("{:.2E}".format(Rm))
ZarcFitWindow.zarc.Rm = Rm
# ZarcFitWindow.updateFigs()
def updateSliderRm(ZarcFitWindow, value):
Rm = float(value)
ZarcFitWindow.SliderRm.setValue(int(np.log10(Rm)*1000.))
ZarcFitWindow.zarc.Rm = Rm
ZarcFitWindow.updateFigs()
def updateSldOutFm(ZarcFitWindow, value):
Fm = 10**(value/1000.)
ZarcFitWindow.SldOutFm.setText("{:.2E}".format(Fm))
ZarcFitWindow.zarc.Fm = Fm
# ZarcFitWindow.updateFigs()
def updateSliderFm(ZarcFitWindow, value):
Fm = float(value)
ZarcFitWindow.SliderFm.setValue(int(np.log10(Fm)*1000.))
ZarcFitWindow.zarc.Fm = Fm
ZarcFitWindow.updateFigs()
def updateSldOutPm(ZarcFitWindow, value):
Pm = value/1000.
ZarcFitWindow.SldOutPm.setText("{:.3f}".format(Pm))
ZarcFitWindow.zarc.Pm = Pm
# ZarcFitWindow.updateFigs()
def updateSliderPm(ZarcFitWindow, value):
Pm = float(value)
ZarcFitWindow.SliderPm.setValue(Pm*1000)
ZarcFitWindow.zarc.Pm = Pm
ZarcFitWindow.updateFigs()
def updateSldOutRl(ZarcFitWindow, value):
Rl = 10**(value/1000.)
ZarcFitWindow.SldOutRl.setText("{:.2E}".format(Rl))
ZarcFitWindow.zarc.Rl = Rl
# ZarcFitWindow.updateFigs()
def updateSliderRl(ZarcFitWindow, value):
Rl = float(value)
ZarcFitWindow.SliderRl.setValue(int(np.log10(Rl)*1000.))
ZarcFitWindow.zarc.Rl = Rl
ZarcFitWindow.updateFigs()
def updateSldOutFl(ZarcFitWindow, value):
Fl = 10**(value/1000.)
ZarcFitWindow.SldOutFl.setText("{:.2E}".format(Fl))
ZarcFitWindow.zarc.Fl = Fl
# ZarcFitWindow.updateFigs()
def updateSliderFl(ZarcFitWindow, value):
Fl = float(value)
ZarcFitWindow.SliderFl.setValue(int(np.log10(Fl)*1000.))
ZarcFitWindow.zarc.Fl = Fl
ZarcFitWindow.updateFigs()
def updateSldOutPl(ZarcFitWindow, value):
Pl = value/1000.
ZarcFitWindow.SldOutPl.setText("{:.3f}".format(Pl))
ZarcFitWindow.zarc.Pl = Pl
# ZarcFitWindow.updateFigs()
def updateSliderPl(ZarcFitWindow, value):
Pl = float(value)
ZarcFitWindow.SliderPl.setValue(Pl*1000)
ZarcFitWindow.zarc.Pl = Pl
ZarcFitWindow.updateFigs()
def updateSldOutRe(ZarcFitWindow, value):
Re = 10**(value/1000.)
ZarcFitWindow.SldOutRe.setText("{:.2E}".format(Re))
ZarcFitWindow.zarc.Re = Re
# ZarcFitWindow.updateFigs()
def updateSliderRe(ZarcFitWindow, value):
Re = float(value)
ZarcFitWindow.SliderRe.setValue(int(np.log10(Re)*1000.))
ZarcFitWindow.zarc.Re = Re
# ZarcFitWindow.updateFigs()
def updateSldOutQe(ZarcFitWindow, value):
Qe = 10**(value/1000.)
ZarcFitWindow.SldOutQe.setText("{:.2E}".format(Qe))
ZarcFitWindow.zarc.Qe = Qe
# ZarcFitWindow.updateFigs()
def updateSliderQe(ZarcFitWindow, value):
Qe = float(value)
ZarcFitWindow.SliderQe.setValue(int(np.log10(Qe)*1000.))
ZarcFitWindow.zarc.Qe = Qe
ZarcFitWindow.updateFigs()
def updateSldOutPef(ZarcFitWindow, value):
Pef = value/1000.
ZarcFitWindow.SldOutPef.setText("{:.3f}".format(Pef))
ZarcFitWindow.zarc.Pef = Pef
# ZarcFitWindow.updateFigs()
def updateSliderPef(ZarcFitWindow, value):
Pef = float(value)
ZarcFitWindow.SliderPef.setValue(Pef*1000)
ZarcFitWindow.zarc.Pef = Pef
ZarcFitWindow.updateFigs()
def updateSldOutPei(ZarcFitWindow, value):
Pei = value/1000.
ZarcFitWindow.SldOutPei.setText("{:.3f}".format(Pei))
ZarcFitWindow.zarc.Pei = Pei
# ZarcFitWindow.updateFigs()
def updateSliderPei(ZarcFitWindow, value):
Pei = float(value)
ZarcFitWindow.SliderPei.setValue(Pei*1000)
ZarcFitWindow.zarc.Pei = Pei
ZarcFitWindow.updateFigs()
def SetDefaultParameters():
Linf = 1.E-4
Rinf = 1.E4
Rh = 1.E5
Fh = 1e5
Ph = 0.8
Rm = 1e-1
Fm = 1e-1
Pm = 0.5
Rl = 1.E4
Fl = 1.e1
Pl = 0.5
Re = 1.E10
Qe = 1.E-4
Pef = 0.5
Pei = 0.05
return Linf, Rinf, Rh, Fh, Ph, Rl, Fl, Pl, Rm, Fm, Pm, Re, Qe, Pef, Pei
###############################################################################
###############################################################################
if __name__ == '__main__':
#Read initial data
mysys = whichsystem()
mysys.run()
scriptPath = os.getcwd()
print(scriptPath+mysys.filesep+"ZarcFit.ini")
with open(scriptPath+mysys.filesep+"ZarcFit.ini", "r") as ini_file:
pathNameStr = ini_file.read()
pathNameStr = pathNameStr.rstrip('\n')
################ The following lines are unecessary, as Main now starts by reading the pathNameStr directory.
path = "../data/HVC2014_10Grenon/"
fnameobs = "BC13867-A 2014-10-23.z"
pathobs = path+fnameobs
temp = np.loadtxt(pathobs, skiprows=11, delimiter=",")
obs = temp[:,4]+1j*temp[:,5]
frequency = temp[:,0].copy()
frequencyN = len (frequency)
zarc = ZarcfitCalculations(obs, frequency)
Linf, Rinf, Rh, Fh, Ph, Rl, Fl, Pl, Rm, Fm, Pm, Re, Qe, Pef, Pei = SetDefaultParameters()
zarc.SetParametersSeries(Linf, Rinf, Rh, Fh, Ph, Rl, Fl, Pl, Rm, Fm, Pm, Re, Qe, Pef, Pei)
# Start the ZarcFit GUI
app = QtGui.QApplication(sys.argv)
main = Main(pathNameStr, zarc, obs, frequency)
# main.addmplCole() ####### Moved to ZarcFitWindow.__init__
main.show()
sys.exit(app.exec_())
|
sgkang/PhysPropIP
|
codes/ZarcFit2016-01-26.py
|
Python
|
mit
| 44,212 | 0.011626 |
# vim: ts=4:sw=4:expandtab
# -*- coding: UTF-8 -*-
# BleachBit
# Copyright (C) 2008-2015 Andrew Ziem
# http://bleachbit.sourceforge.net
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Preferences dialog
"""
import gtk
import os
import sys
import traceback
from Common import _, _p, online_update_notification_enabled
from Options import options
import GuiBasic
if 'nt' == os.name:
import Windows
if 'posix' == os.name:
import Unix
LOCATIONS_WHITELIST = 1
LOCATIONS_CUSTOM = 2
class PreferencesDialog:
"""Present the preferences dialog and save changes"""
def __init__(self, parent, cb_refresh_operations):
self.cb_refresh_operations = cb_refresh_operations
self.parent = parent
self.dialog = gtk.Dialog(title=_("Preferences"),
parent=parent,
flags=gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT)
self.dialog.set_default_size(300, 200)
notebook = gtk.Notebook()
notebook.append_page(self.__general_page(), gtk.Label(_("General")))
notebook.append_page(self.__locations_page(
LOCATIONS_CUSTOM), gtk.Label(_("Custom")))
notebook.append_page(self.__drives_page(), gtk.Label(_("Drives")))
if 'posix' == os.name:
notebook.append_page(
self.__languages_page(), gtk.Label(_("Languages")))
notebook.append_page(self.__locations_page(
LOCATIONS_WHITELIST), gtk.Label(_("Whitelist")))
self.dialog.vbox.pack_start(notebook, True)
self.dialog.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_CLOSE)
def __toggle_callback(self, cell, path):
"""Callback function to toggle option"""
options.toggle(path)
if online_update_notification_enabled:
self.cb_beta.set_sensitive(options.get('check_online_updates'))
if 'nt' == os.name:
self.cb_winapp2.set_sensitive(
options.get('check_online_updates'))
if 'auto_hide' == path:
self.cb_refresh_operations()
if 'auto_start' == path:
if 'nt' == os.name:
swc = Windows.start_with_computer
if 'posix' == os.name:
swc = Unix.start_with_computer
try:
swc(options.get(path))
except:
traceback.print_exc()
dlg = gtk.MessageDialog(self.parent,
type=gtk.MESSAGE_ERROR,
buttons=gtk.BUTTONS_OK,
message_format=str(sys.exc_info()[1]))
dlg.run()
dlg.destroy()
def __general_page(self):
"""Return a widget containing the general page"""
if 'nt' == os.name:
swcc = Windows.start_with_computer_check
if 'posix' == os.name:
swcc = Unix.start_with_computer_check
options.set('auto_start', swcc())
vbox = gtk.VBox()
if online_update_notification_enabled:
cb_updates = gtk.CheckButton(
_("Check periodically for software updates via the Internet"))
cb_updates.set_active(options.get('check_online_updates'))
cb_updates.connect(
'toggled', self.__toggle_callback, 'check_online_updates')
cb_updates.set_tooltip_text(
_("If an update is found, you will be given the option to view information about it. Then, you may manually download and install the update."))
vbox.pack_start(cb_updates, False)
updates_box = gtk.VBox()
updates_box.set_border_width(10)
self.cb_beta = gtk.CheckButton(_("Check for new beta releases"))
self.cb_beta.set_active(options.get('check_beta'))
self.cb_beta.set_sensitive(options.get('check_online_updates'))
self.cb_beta.connect(
'toggled', self.__toggle_callback, 'check_beta')
updates_box.pack_start(self.cb_beta, False)
if 'nt' == os.name:
self.cb_winapp2 = gtk.CheckButton(
_("Download and update cleaners from community (winapp2.ini)"))
self.cb_winapp2.set_active(options.get('update_winapp2'))
self.cb_winapp2.set_sensitive(
options.get('check_online_updates'))
self.cb_winapp2.connect(
'toggled', self.__toggle_callback, 'update_winapp2')
updates_box.pack_start(self.cb_winapp2, False)
vbox.pack_start(updates_box, False)
# TRANSLATORS: This means to hide cleaners which would do
# nothing. For example, if Firefox were never used on
# this system, this option would hide Firefox to simplify
# the list of cleaners.
cb_auto_hide = gtk.CheckButton(_("Hide irrelevant cleaners"))
cb_auto_hide.set_active(options.get('auto_hide'))
cb_auto_hide.connect('toggled', self.__toggle_callback, 'auto_hide')
vbox.pack_start(cb_auto_hide, False)
# TRANSLATORS: Overwriting is the same as shredding. It is a way
# to prevent recovery of the data. You could also translate
# 'Shred files to prevent recovery.'
cb_shred = gtk.CheckButton(_("Overwrite files to hide contents"))
cb_shred.set_active(options.get('shred'))
cb_shred.connect('toggled', self.__toggle_callback, 'shred')
cb_shred.set_tooltip_text(
_("Overwriting is ineffective on some file systems and with certain BleachBit operations. Overwriting is significantly slower."))
vbox.pack_start(cb_shred, False)
cb_start = gtk.CheckButton(_("Start BleachBit with computer"))
cb_start.set_active(options.get('auto_start'))
cb_start.connect('toggled', self.__toggle_callback, 'auto_start')
vbox.pack_start(cb_start, False)
# Close the application after cleaning is complete.
cb_exit = gtk.CheckButton(_("Exit after cleaning"))
cb_exit.set_active(options.get('exit_done'))
cb_exit.connect('toggled', self.__toggle_callback, 'exit_done')
vbox.pack_start(cb_exit, False)
# Disable delete confirmation message.
cb_popup = gtk.CheckButton(_("Confirm before delete"))
cb_popup.set_active(options.get('delete_confirmation'))
cb_popup.connect(
'toggled', self.__toggle_callback, 'delete_confirmation')
vbox.pack_start(cb_popup, False)
return vbox
def __drives_page(self):
"""Return widget containing the drives page"""
def add_drive_cb(button):
"""Callback for adding a drive"""
title = _("Choose a folder")
pathname = GuiBasic.browse_folder(self.parent, title,
multiple=False, stock_button=gtk.STOCK_ADD)
if pathname:
liststore.append([pathname])
pathnames.append(pathname)
options.set_list('shred_drives', pathnames)
def remove_drive_cb(button):
"""Callback for removing a drive"""
treeselection = treeview.get_selection()
(model, _iter) = treeselection.get_selected()
if None == _iter:
# nothing selected
return
pathname = model[_iter][0]
liststore.remove(_iter)
pathnames.remove(pathname)
options.set_list('shred_drives', pathnames)
vbox = gtk.VBox()
# TRANSLATORS: 'free' means 'unallocated'
notice = gtk.Label(
_("Choose a writable folder for each drive for which to overwrite free space."))
notice.set_line_wrap(True)
vbox.pack_start(notice, False)
liststore = gtk.ListStore(str)
pathnames = options.get_list('shred_drives')
if pathnames:
pathnames = sorted(pathnames)
if not pathnames:
pathnames = []
for pathname in pathnames:
liststore.append([pathname])
treeview = gtk.TreeView(model=liststore)
crt = gtk.CellRendererText()
tvc = gtk.TreeViewColumn(None, crt, text=0)
treeview.append_column(tvc)
vbox.pack_start(treeview)
# TRANSLATORS: In the preferences dialog, this button adds a path to
# the list of paths
button_add = gtk.Button(_p('button', 'Add'))
button_add.connect("clicked", add_drive_cb)
# TRANSLATORS: In the preferences dialog, this button removes a path
# from the list of paths
button_remove = gtk.Button(_p('button', 'Remove'))
button_remove.connect("clicked", remove_drive_cb)
button_box = gtk.HButtonBox()
button_box.set_layout(gtk.BUTTONBOX_START)
button_box.pack_start(button_add)
button_box.pack_start(button_remove)
vbox.pack_start(button_box, False)
return vbox
def __languages_page(self):
"""Return widget containing the languages page"""
def preserve_toggled_cb(cell, path, liststore):
"""Callback for toggling the 'preserve' column"""
__iter = liststore.get_iter_from_string(path)
value = not liststore.get_value(__iter, 0)
liststore.set(__iter, 0, value)
langid = liststore[path][1]
options.set_language(langid, value)
vbox = gtk.VBox()
notice = gtk.Label(
_("All languages will be deleted except those checked."))
vbox.pack_start(notice, False)
# populate data
liststore = gtk.ListStore('gboolean', str, str)
for lang, native in sorted(Unix.Locales.native_locale_names.items()):
liststore.append([(options.get_language(lang)), lang, native])
# create treeview
treeview = gtk.TreeView(liststore)
# create column views
self.renderer0 = gtk.CellRendererToggle()
self.renderer0.set_property('activatable', True)
self.renderer0.connect('toggled', preserve_toggled_cb, liststore)
self.column0 = gtk.TreeViewColumn(
_("Preserve"), self.renderer0, active=0)
treeview.append_column(self.column0)
self.renderer1 = gtk.CellRendererText()
self.column1 = gtk.TreeViewColumn(_("Code"), self.renderer1, text=1)
treeview.append_column(self.column1)
self.renderer2 = gtk.CellRendererText()
self.column2 = gtk.TreeViewColumn(_("Name"), self.renderer2, text=2)
treeview.append_column(self.column2)
treeview.set_search_column(2)
# finish
swindow = gtk.ScrolledWindow()
swindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
swindow.set_size_request(300, 200)
swindow.add(treeview)
vbox.pack_start(swindow)
return vbox
def __locations_page(self, page_type):
"""Return a widget containing a list of files and folders"""
def add_whitelist_file_cb(button):
"""Callback for adding a file"""
title = _("Choose a file")
pathname = GuiBasic.browse_file(self.parent, title)
if pathname:
for this_pathname in pathnames:
if pathname == this_pathname[1]:
print "warning: '%s' already exists in whitelist" % pathname
return
liststore.append([_('File'), pathname])
pathnames.append(['file', pathname])
options.set_whitelist_paths(pathnames)
def add_whitelist_folder_cb(button):
"""Callback for adding a folder"""
title = _("Choose a folder")
pathname = GuiBasic.browse_folder(self.parent, title,
multiple=False, stock_button=gtk.STOCK_ADD)
if pathname:
for this_pathname in pathnames:
if pathname == this_pathname[1]:
print "warning: '%s' already exists in whitelist" % pathname
return
liststore.append([_('Folder'), pathname])
pathnames.append(['folder', pathname])
options.set_whitelist_paths(pathnames)
def remove_whitelist_path_cb(button):
"""Callback for removing a path"""
treeselection = treeview.get_selection()
(model, _iter) = treeselection.get_selected()
if None == _iter:
# nothing selected
return
pathname = model[_iter][1]
liststore.remove(_iter)
for this_pathname in pathnames:
if this_pathname[1] == pathname:
pathnames.remove(this_pathname)
options.set_whitelist_paths(pathnames)
def add_custom_file_cb(button):
"""Callback for adding a file"""
title = _("Choose a file")
pathname = GuiBasic.browse_file(self.parent, title)
if pathname:
for this_pathname in pathnames:
if pathname == this_pathname[1]:
print "warning: '%s' already exists in whitelist" % pathname
return
liststore.append([_('File'), pathname])
pathnames.append(['file', pathname])
options.set_custom_paths(pathnames)
def add_custom_folder_cb(button):
"""Callback for adding a folder"""
title = _("Choose a folder")
pathname = GuiBasic.browse_folder(self.parent, title,
multiple=False, stock_button=gtk.STOCK_ADD)
if pathname:
for this_pathname in pathnames:
if pathname == this_pathname[1]:
print "warning: '%s' already exists in whitelist" % pathname
return
liststore.append([_('Folder'), pathname])
pathnames.append(['folder', pathname])
options.set_custom_paths(pathnames)
def remove_custom_path_cb(button):
"""Callback for removing a path"""
treeselection = treeview.get_selection()
(model, _iter) = treeselection.get_selected()
if None == _iter:
# nothing selected
return
pathname = model[_iter][1]
liststore.remove(_iter)
for this_pathname in pathnames:
if this_pathname[1] == pathname:
pathnames.remove(this_pathname)
options.set_custom_paths(pathnames)
vbox = gtk.VBox()
# load data
if LOCATIONS_WHITELIST == page_type:
pathnames = options.get_whitelist_paths()
elif LOCATIONS_CUSTOM == page_type:
pathnames = options.get_custom_paths()
liststore = gtk.ListStore(str, str)
for paths in pathnames:
type_code = paths[0]
type_str = None
if type_code == 'file':
type_str = _('File')
elif type_code == 'folder':
type_str = _('Folder')
else:
raise RuntimeError("Invalid type code: '%s'" % type_code)
path = paths[1]
liststore.append([type_str, path])
if LOCATIONS_WHITELIST == page_type:
# TRANSLATORS: "Paths" is used generically to refer to both files
# and folders
notice = gtk.Label(
_("Theses paths will not be deleted or modified."))
elif LOCATIONS_CUSTOM == page_type:
notice = gtk.Label(
_("These locations can be selected for deletion."))
vbox.pack_start(notice, False)
# create treeview
treeview = gtk.TreeView(liststore)
# create column views
self.renderer0 = gtk.CellRendererText()
self.column0 = gtk.TreeViewColumn(_("Type"), self.renderer0, text=0)
treeview.append_column(self.column0)
self.renderer1 = gtk.CellRendererText()
# TRANSLATORS: In the tree view "Path" is used generically to refer to a
# file, a folder, or a pattern describing either
self.column1 = gtk.TreeViewColumn(_("Path"), self.renderer1, text=1)
treeview.append_column(self.column1)
treeview.set_search_column(1)
# finish tree view
swindow = gtk.ScrolledWindow()
swindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
swindow.set_size_request(300, 200)
swindow.add(treeview)
vbox.pack_start(swindow)
# buttons that modify the list
button_add_file = gtk.Button(_p('button', 'Add file'))
if LOCATIONS_WHITELIST == page_type:
button_add_file.connect("clicked", add_whitelist_file_cb)
elif LOCATIONS_CUSTOM == page_type:
button_add_file.connect("clicked", add_custom_file_cb)
button_add_folder = gtk.Button(_p('button', 'Add folder'))
if LOCATIONS_WHITELIST == page_type:
button_add_folder.connect("clicked", add_whitelist_folder_cb)
elif LOCATIONS_CUSTOM == page_type:
button_add_folder.connect("clicked", add_custom_folder_cb)
button_remove = gtk.Button(_p('button', 'Remove'))
if LOCATIONS_WHITELIST == page_type:
button_remove.connect("clicked", remove_whitelist_path_cb)
elif LOCATIONS_CUSTOM == page_type:
button_remove.connect("clicked", remove_custom_path_cb)
button_box = gtk.HButtonBox()
button_box.set_layout(gtk.BUTTONBOX_START)
button_box.pack_start(button_add_file)
button_box.pack_start(button_add_folder)
button_box.pack_start(button_remove)
vbox.pack_start(button_box, False)
# return page
return vbox
def run(self):
"""Run the dialog"""
self.dialog.show_all()
self.dialog.run()
self.dialog.destroy()
|
uudiin/bleachbit
|
bleachbit/GuiPreferences.py
|
Python
|
gpl-3.0
| 18,737 | 0.000907 |
from dgs2.discogs_client.exceptions import HTTPError
from dgs2.discogs_client.utils import parse_timestamp, update_qs, omit_none
class SimpleFieldDescriptor(object):
"""
An attribute that determines its value using the object's fetch() method.
If transform is a callable, the value will be passed through transform when
read. Useful for strings that should be ints, parsing timestamps, etc.
Shorthand for:
@property
def foo(self):
return self.fetch('foo')
"""
def __init__(self, name, writable=False, transform=None):
self.name = name
self.writable = writable
self.transform = transform
def __get__(self, instance, owner):
if instance is None:
return self
value = instance.fetch(self.name)
if self.transform:
value = self.transform(value)
return value
def __set__(self, instance, value):
if self.writable:
instance.changes[self.name] = value
return
raise AttributeError("can't set attribute")
class ObjectFieldDescriptor(object):
"""
An attribute that determines its value using the object's fetch() method,
and passes the resulting value through an APIObject.
If optional = True, the value will be None (rather than an APIObject
instance) if the key is missing from the response.
If as_id = True, the value is treated as an ID for the new APIObject rather
than a partial dict of the APIObject.
Shorthand for:
@property
def baz(self):
return BazClass(self.client, self.fetch('baz'))
"""
def __init__(self, name, class_name, optional=False, as_id=False):
self.name = name
self.class_name = class_name
self.optional = optional
self.as_id = as_id
def __get__(self, instance, owner):
if instance is None:
return self
wrapper_class = CLASS_MAP[self.class_name.lower()]
response_dict = instance.fetch(self.name)
if self.optional and not response_dict:
return None
if self.as_id:
# Response_dict wasn't really a dict. Make it so.
response_dict = {'id': response_dict}
return wrapper_class(instance.client, response_dict)
def __set__(self, instance, value):
raise AttributeError("can't set attribute")
class ListFieldDescriptor(object):
"""
An attribute that determines its value using the object's fetch() method,
and passes each item in the resulting list through an APIObject.
Shorthand for:
@property
def bar(self):
return [BarClass(self.client, d) for d in self.fetch('bar', [])]
"""
def __init__(self, name, class_name):
self.name = name
self.class_name = class_name
def __get__(self, instance, owner):
if instance is None:
return self
wrapper_class = CLASS_MAP[self.class_name.lower()]
return [wrapper_class(instance.client, d) for d in instance.fetch(self.name, [])]
def __set__(self, instance, value):
raise AttributeError("can't set attribute")
class ObjectCollectionDescriptor(object):
"""
An attribute that determines its value by fetching a URL to a paginated
list of related objects, and passes each item in the resulting list through
an APIObject.
Shorthand for:
@property
def frozzes(self):
return PaginatedList(self.client, self.fetch('frozzes_url'), 'frozzes', FrozClass)
"""
def __init__(self, name, class_name, url_key=None, list_class=None):
self.name = name
self.class_name = class_name
if url_key is None:
url_key = name + '_url'
self.url_key = url_key
if list_class is None:
list_class = PaginatedList
self.list_class = list_class
def __get__(self, instance, owner):
if instance is None:
return self
wrapper_class = CLASS_MAP[self.class_name.lower()]
return self.list_class(instance.client, instance.fetch(self.url_key), self.name, wrapper_class)
def __set__(self, instance, value):
raise AttributeError("can't set attribute")
class Field(object):
"""
A placeholder for a descriptor. Is transformed into a descriptor by the
APIObjectMeta metaclass when the APIObject classes are created.
"""
_descriptor_class = None
def __init__(self, *args, **kwargs):
self.key = kwargs.pop('key', None)
self.args = args
self.kwargs = kwargs
def to_descriptor(self, attr_name):
return self._descriptor_class(self.key or attr_name, *self.args, **self.kwargs)
class SimpleField(Field):
"""A field that just returns the value of a given JSON key."""
_descriptor_class = SimpleFieldDescriptor
class ListField(Field):
"""A field that returns a list of APIObjects."""
_descriptor_class = ListFieldDescriptor
class ObjectField(Field):
"""A field that returns a single APIObject."""
_descriptor_class = ObjectFieldDescriptor
class ObjectCollection(Field):
"""A field that returns a paginated list of APIObjects."""
_descriptor_class = ObjectCollectionDescriptor
class APIObjectMeta(type):
def __new__(cls, name, bases, dict_):
for k, v in dict_.iteritems():
if isinstance(v, Field):
dict_[k] = v.to_descriptor(k)
return super(APIObjectMeta, cls).__new__(cls, name, bases, dict_)
class APIObject(object):
__metaclass__ = APIObjectMeta
class PrimaryAPIObject(APIObject):
"""A first-order API object that has a canonical endpoint of its own."""
def __init__(self, client, dict_):
self.data = dict_
self.client = client
self._known_invalid_keys = []
self.changes = {}
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.id == other.id
return NotImplemented
def __ne__(self, other):
equal = self.__eq__(other)
return NotImplemented if equal is NotImplemented else not equal
def refresh(self):
if self.data.get('resource_url'):
data = self.client._get(self.data['resource_url'])
self.data.update(data)
self.changes = {}
def save(self):
if self.data.get('resource_url'):
# TODO: This should be PATCH
self.client._post(self.data['resource_url'], self.changes)
# Refresh the object, in case there were side-effects
self.refresh()
def delete(self):
if self.data.get('resource_url'):
self.client._delete(self.data['resource_url'])
def fetch(self, key, default=None):
if key in self._known_invalid_keys:
return default
try:
# First, look in the cache of pending changes
return self.changes[key]
except KeyError:
pass
try:
# Next, look in the potentially incomplete local cache
return self.data[key]
except KeyError:
pass
# Now refresh the object from its resource_url.
# The key might exist but not be in our cache.
self.refresh()
try:
return self.data[key]
except:
self._known_invalid_keys.append(key)
return default
# This is terribly cheesy, but makes the client API more consistent
class SecondaryAPIObject(APIObject):
"""
An object that wraps parts of a response and doesn't have its own
endpoint.
"""
def __init__(self, client, dict_):
self.client = client
self.data = dict_
def fetch(self, key, default=None):
return self.data.get(key, default)
class BasePaginatedResponse(object):
"""Base class for lists of objects spread across many URLs."""
def __init__(self, client, url):
self.client = client
self.url = url
self._num_pages = None
self._num_items = None
self._pages = {}
self._per_page = 50
self._list_key = 'items'
self._sort_key = None
self._sort_order = 'asc'
self._filters = {}
@property
def per_page(self):
return self._per_page
@per_page.setter
def per_page(self, value):
self._per_page = value
self._invalidate()
def _invalidate(self):
self._pages = {}
self._num_pages = None
self._num_items = None
def _load_pagination_info(self):
data = self.client._get(self._url_for_page(1))
self._num_pages = data['pagination']['pages']
self._num_items = data['pagination']['items']
def _url_for_page(self, page):
base_qs = {
'page': page,
'per_page': self._per_page,
}
if self._sort_key is not None:
base_qs.update({
'sort': self._sort_key,
'sort_order': self._sort_order,
})
base_qs.update(self._filters)
return update_qs(self.url, base_qs)
def sort(self, key, order='asc'):
if not order in ('asc', 'desc'):
raise ValueError("Order must be one of 'asc', 'desc'")
self._sort_key = key
self._sort_order = order
self._invalidate()
return self
def filter(self, **kwargs):
self._filters = kwargs
self._invalidate()
return self
@property
def pages(self):
if self._num_pages is None:
self._load_pagination_info()
return self._num_pages
@property
def count(self):
if self._num_items is None:
self._load_pagination_info()
return self._num_items
def page(self, index):
if not index in self._pages:
data = self.client._get(self._url_for_page(index))
self._pages[index] = [
self._transform(item) for item in data[self._list_key]
]
return self._pages[index]
def _transform(self, item):
return item
def __getitem__(self, index):
page_index = index / self.per_page + 1
offset = index % self.per_page
try:
page = self.page(page_index)
except HTTPError, e:
if e.status_code == 404:
raise IndexError(e.msg)
else:
raise
return page[offset]
def __len__(self):
return self.count
def __iter__(self):
for i in xrange(1, self.pages + 1):
page = self.page(i)
for item in page:
yield item
class PaginatedList(BasePaginatedResponse):
"""A paginated list of objects of a particular class."""
def __init__(self, client, url, key, class_):
super(PaginatedList, self).__init__(client, url)
self._list_key = key
self.class_ = class_
def _transform(self, item):
return self.class_(self.client, item)
class Wantlist(PaginatedList):
def add(self, release, notes=None, notes_public=None, rating=None):
release_id = release.id if isinstance(release, Release) else release
data = {
'release_id': release_id,
'notes': notes,
'notes_public': notes_public,
'rating': rating,
}
self.client._put(self.url + '/' + str(release_id), omit_none(data))
self._invalidate()
def remove(self, release):
release_id = release.id if isinstance(release, Release) else release
self.client._delete(self.url + '/' + str(release_id))
self._invalidate()
class OrderMessagesList(PaginatedList):
def add(self, message=None, status=None, email_buyer=True, email_seller=False):
data = {
'message': message,
'status': status,
'email_buyer': email_buyer,
'email_seller': email_seller,
}
self.client._post(self.url, omit_none(data))
self._invalidate()
class MixedPaginatedList(BasePaginatedResponse):
"""A paginated list of objects identified by their type parameter."""
def __init__(self, client, url, key):
super(MixedPaginatedList, self).__init__(client, url)
self._list_key = key
def _transform(self, item):
# In some cases, we want to map the 'title' key we get back in search
# results to 'name'. This way, you can repr() a page of search results
# without making 50 requests.
if item['type'] in ('label', 'artist'):
item['name'] = item['title']
return CLASS_MAP[item['type']](self.client, item)
class Artist(PrimaryAPIObject):
id = SimpleField()
name = SimpleField()
real_name = SimpleField(key='realname')
profile = SimpleField()
data_quality = SimpleField()
name_variations = SimpleField(key='namevariations')
url = SimpleField('uri')
urls = SimpleField()
aliases = ListField('Artist')
members = ListField('Artist')
groups = ListField('Artist')
def __init__(self, client, dict_):
super(Artist, self).__init__(client, dict_)
self.data['resource_url'] = client._base_url + '/artists/%d' % dict_['id']
@property
def releases(self):
return MixedPaginatedList(self.client, self.fetch('releases_url'), 'releases')
def __repr__(self):
return '<Artist %r %r>' % (self.id, self.name)
class Release(PrimaryAPIObject):
id = SimpleField()
title = SimpleField()
year = SimpleField()
thumb = SimpleField()
data_quality = SimpleField()
status = SimpleField()
genres = SimpleField()
country = SimpleField()
notes = SimpleField()
formats = SimpleField()
url = SimpleField('uri')
videos = ListField('Video')
tracklist = ListField('Track')
artists = ListField('Artist')
credits = ListField('Artist', key='extraartists')
labels = ListField('Label')
companies = ListField('Label')
def __init__(self, client, dict_):
super(Release, self).__init__(client, dict_)
self.data['resource_url'] = client._base_url + '/releases/%d' % dict_['id']
@property
def master(self):
master_id = self.fetch('master_id')
if master_id:
return Master(self.client, {'id': master_id})
else:
return None
def __repr__(self):
return '<Release %r %r>' % (self.id, self.title)
class Master(PrimaryAPIObject):
id = SimpleField()
title = SimpleField()
data_quality = SimpleField()
styles = SimpleField()
genres = SimpleField()
images = SimpleField()
url = SimpleField('uri')
videos = ListField('Video')
tracklist = ListField('Track')
main_release = ObjectField('Release', as_id=True)
versions = ObjectCollection('Release')
def __init__(self, client, dict_):
super(Master, self).__init__(client, dict_)
self.data['resource_url'] = client._base_url + '/masters/%d' % dict_['id']
def __repr__(self):
return '<Master %r %r>' % (self.id, self.title)
class Label(PrimaryAPIObject):
id = SimpleField()
name = SimpleField()
profile = SimpleField()
urls = SimpleField()
images = SimpleField()
contact_info = SimpleField()
data_quality = SimpleField()
url = SimpleField('uri')
sublabels = ListField('Label')
parent_label = ObjectField('Label', optional=True)
releases = ObjectCollection('Release')
def __init__(self, client, dict_):
super(Label, self).__init__(client, dict_)
self.data['resource_url'] = client._base_url + '/labels/%d' % dict_['id']
def __repr__(self):
return '<Label %r %r>' % (self.id, self.name)
class User(PrimaryAPIObject):
id = SimpleField()
username = SimpleField()
releases_contributed = SimpleField()
num_collection = SimpleField()
num_wantlist = SimpleField()
num_lists = SimpleField()
rank = SimpleField()
rating_avg = SimpleField()
url = SimpleField('uri')
name = SimpleField(writable=True)
profile = SimpleField(writable=True)
location = SimpleField(writable=True)
home_page = SimpleField(writable=True)
registered = SimpleField(transform=parse_timestamp)
inventory = ObjectCollection('Listing', key='listings', url_key='inventory_url')
wantlist = ObjectCollection('WantlistItem', key='wants', url_key='wantlist_url', list_class=Wantlist)
def __init__(self, client, dict_):
super(User, self).__init__(client, dict_)
self.data['resource_url'] = client._base_url + '/users/%s' % dict_['username']
@property
def orders(self):
return PaginatedList(self.client, self.client._base_url + '/marketplace/orders', 'orders', Order)
@property
def collection_folders(self):
resp = self.client._get(self.fetch('collection_folders_url'))
return [CollectionFolder(self.client, d) for d in resp['folders']]
def __repr__(self):
return '<User %r %r>' % (self.id, self.username)
class WantlistItem(PrimaryAPIObject):
id = SimpleField()
rating = SimpleField(writable=True)
notes = SimpleField(writable=True)
notes_public = SimpleField(writable=True)
release = ObjectField('Release', key='basic_information')
def __init__(self, client, dict_):
super(WantlistItem, self).__init__(client, dict_)
def __repr__(self):
return '<WantlistItem %r %r>' % (self.id, self.release.title)
# TODO: folder_id should be a Folder object; needs folder_url
# TODO: notes should be first-order (somehow); needs resource_url
class CollectionItemInstance(PrimaryAPIObject):
id = SimpleField()
rating = SimpleField()
folder_id = SimpleField()
notes = SimpleField()
release = ObjectField('Release', key='basic_information')
def __init__(self, client, dict_):
super(CollectionItemInstance, self).__init__(client, dict_)
def __repr__(self):
return '<CollectionItemInstance %r %r>' % (self.id, self.release.title)
class CollectionFolder(PrimaryAPIObject):
id = SimpleField()
name = SimpleField()
count = SimpleField()
def __init__(self, client, dict_):
super(CollectionFolder, self).__init__(client, dict_)
@property
def releases(self):
# TODO: Needs releases_url
return PaginatedList(self.client, self.fetch('resource_url') + '/releases', 'releases', CollectionItemInstance)
def __repr__(self):
return '<CollectionFolder %r %r>' % (self.id, self.name)
class Listing(PrimaryAPIObject):
id = SimpleField()
status = SimpleField()
allow_offers = SimpleField()
condition = SimpleField()
sleeve_condition = SimpleField()
ships_from = SimpleField()
comments = SimpleField()
audio = SimpleField()
url = SimpleField('uri')
price = ObjectField('Price')
release = ObjectField('Release')
seller = ObjectField('User')
posted = SimpleField(transform=parse_timestamp)
def __init__(self, client, dict_):
super(Listing, self).__init__(client, dict_)
self.data['resource_url'] = client._base_url + '/marketplace/listings/%d' % dict_['id']
def __repr__(self):
return '<Listing %r %r>' % (self.id, self.release.data['description'])
class Order(PrimaryAPIObject):
id = SimpleField()
next_status = SimpleField()
shipping_address = SimpleField()
additional_instructions = SimpleField()
url = SimpleField('uri')
status = SimpleField(writable=True)
fee = ObjectField('Price')
buyer = ObjectField('User')
seller = ObjectField('User')
created = SimpleField(transform=parse_timestamp)
last_activity = SimpleField(transform=parse_timestamp)
messages = ObjectCollection('OrderMessage', list_class=OrderMessagesList)
items = ListField('Listing')
def __init__(self, client, dict_):
super(Order, self).__init__(client, dict_)
self.data['resource_url'] = client._base_url + '/marketplace/orders/%s' % dict_['id']
# Setting shipping is a little weird -- you can't change the
# currency, and you use the 'shipping' key instead of 'value'
@property
def shipping(self):
return Price(self.client, self.fetch('shipping'))
@shipping.setter
def shipping(self, value):
self.changes['shipping'] = value
def __repr__(self):
return '<Order %r>' % self.id
class OrderMessage(SecondaryAPIObject):
subject = SimpleField()
message = SimpleField()
to = ObjectField('User')
order = ObjectField('Order')
timestamp = SimpleField(transform=parse_timestamp)
def __repr__(self):
return '<OrderMessage to:%r>' % self.to.username
class Track(SecondaryAPIObject):
duration = SimpleField()
position = SimpleField()
title = SimpleField()
artists = ListField('Artist')
credits = ListField('Artist', key='extraartists')
def __repr__(self):
return '<Track %r %r>' % (self.position, self.title)
class Price(SecondaryAPIObject):
currency = SimpleField()
value = SimpleField()
def __repr__(self):
return '<Price %r %r>' % (self.value, self.currency)
class Video(SecondaryAPIObject):
duration = SimpleField()
embed = SimpleField()
title = SimpleField()
description = SimpleField()
url = SimpleField('uri')
def __repr__(self):
return '<Video %r>' % (self.title)
CLASS_MAP = {
'artist': Artist,
'release': Release,
'master': Master,
'label': Label,
'price': Price,
'video': Video,
'track': Track,
'user': User,
'order': Order,
'listing': Listing,
'wantlistitem': WantlistItem,
'ordermessage': OrderMessage,
}
|
hzlf/openbroadcast
|
website/tools/dgs2/discogs_client/models.py
|
Python
|
gpl-3.0
| 21,790 | 0.000964 |
#!/usr/bin/env python
"""
The LibVMI Library is an introspection library that simplifies access to
memory in a target virtual machine or in a file containing a dump of
a system's physical memory. LibVMI is based on the XenAccess Library.
Copyright 2011 Sandia Corporation. Under the terms of Contract
DE-AC04-94AL85000 with Sandia Corporation, the U.S. Government
retains certain rights in this software.
Author: Bryan D. Payne (bdpayne@acm.org)
This file is part of LibVMI.
LibVMI is free software: you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
LibVMI is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
License for more details.
You should have received a copy of the GNU Lesser General Public License
along with LibVMI. If not, see <http://www.gnu.org/licenses/>.
"""
import pyvmi
import sys
def get_processes(vmi):
tasks_offset = vmi.get_offset("win_tasks")
name_offset = vmi.get_offset("win_pname") - tasks_offset
pid_offset = vmi.get_offset("win_pid") - tasks_offset
list_head = vmi.read_addr_ksym("PsInitialSystemProcess")
next_process = vmi.read_addr_va(list_head + tasks_offset, 0)
list_head = next_process
while True:
procname = vmi.read_str_va(next_process + name_offset, 0)
pid = vmi.read_32_va(next_process + pid_offset, 0)
next_process = vmi.read_addr_va(next_process, 0)
if (pid < 1<<16):
yield pid, procname
if (list_head == next_process):
break
def main(argv):
vmi = pyvmi.init(argv[1], "complete")
for pid, procname in get_processes(vmi):
print "[%5d] %s" % (pid, procname)
if __name__ == "__main__":
main(sys.argv)
|
jie-lin/libvmi
|
tools/pyvmi/examples/process-list.py
|
Python
|
gpl-3.0
| 1,982 | 0.001009 |
# Function to stack raster bands.
import numpy as np
from osgeo import gdal
def stack_bands(filenames):
"""Returns a 3D array containing all band data from all files."""
bands = []
for fn in filenames:
ds = gdal.Open(fn)
for i in range(1, ds.RasterCount + 1):
bands.append(ds.GetRasterBand(i).ReadAsArray())
return np.dstack(bands)
|
cgarrard/osgeopy-code
|
Chapter12/listing12_1.py
|
Python
|
mit
| 378 | 0.002646 |
#!/usr/bin/env python3
import os
import sys
import copy
import re
import time
import datetime
from urllib.request import urlopen
import numpy as np
import nltk
from nltk.stem.wordnet import WordNetLemmatizer
from nltk.stem.porter import PorterStemmer
import json
import torch
import torch.autograd as autograd
import torch.nn as nn
import torch.nn.functional as F
# training with SGLD with annealing and save models
def train(X_train, y_train, X_valid, y_valid, X_test, y_test, model, args):
model.train()
batch = args.batch_size
parameters = [parameter for parameter in model.parameters()]
set_scale = [parameter.data.std().item() for parameter in model.parameters()]
set_scale = [scale / max(set_scale) for scale in set_scale] # normalize
for epoch in range(1, args.epochs+1):
corrects = 0
epsilon = args.lr * ((epoch * 1.0) ** (-0.333)) # optimal decay rate
for idx in range(int(X_train.shape[0]/batch) + 1):
feature = torch.LongTensor(X_train[(idx*batch):(idx*batch+batch),])
target = torch.LongTensor(y_train[(idx*batch):(idx*batch+batch)])
if args.cuda:
feature, target = feature.cuda(), target.cuda()
logit = model(feature)
loss = F.cross_entropy(logit, target)
model.zero_grad()
loss.backward()
for layer_no, param in enumerate(model.parameters()):
if args.static and layer_no == 0: # fixed embedding layer cannot update
continue
# by default I assume you train the models using GPU
noise = torch.cuda.FloatTensor(param.data.size()).normal_() * np.sqrt(epsilon / args.t)
#noise = torch.cuda.FloatTensor(param.data.size()).normal_() * set_scale[layer_no]
parameters[layer_no].data += (- epsilon / 2 * param.grad + noise)
corrects += (torch.max(logit, 1)[1].view(target.size()).data == target.data).sum().item()
accuracy = 100.0 * corrects / batch / (idx + 1)
sys.stdout.write('\rEpoch[{}] Batch[{}] - loss: {:.4f} acc: {:.2f}%({}/{}) tempreture: {}'.format(
epoch, idx, loss.item(), accuracy, corrects, batch * (idx + 1), int(args.t)))
args.t = args.t + 1 # annealing
if epoch % 5 != 0:
continue
'''
try:
set_scale = [parameter.grad.data.std().item() for parameter in model.parameters()]
set_scale = [scale / max(set_scale) for scale in set_scale] # normalize
except:
set_scale = [parameter.data.std().item() for parameter in model.parameters()]
set_scale = [scale / max(set_scale) for scale in set_scale] # normalize
'''
save(model, args.save_dir, epoch)
print()
eval(X_valid, y_valid, model, 'Validation', args)
eval(X_test, y_test, model, 'Testing ', args)
def eval(X, y, model, term, args):
model.eval()
corrects, TP, avg_loss = 0, 0, 0
correct_part, total_part = {0.2:0, 0.4:0}, {0.2:1e-16, 0.4:1e-16}
batch = args.batch_size
for idx in range(int(X.shape[0]/batch) + 1):
feature = torch.LongTensor(X[(idx*batch):(idx*batch+batch),])
target = torch.LongTensor(y[(idx*batch):(idx*batch+batch)])
if args.cuda:
feature, target = feature.cuda(), target.cuda()
logit = model(feature)
loss = F.cross_entropy(logit, target, size_average=False)
avg_loss += loss.data.item()
predictor = torch.exp(logit[:, 1]) / (torch.exp(logit[:, 0]) + torch.exp(logit[:, 1]))
for xnum in range(1, 3):
thres = round(0.2 * xnum, 1)
idx_thres = (predictor > 0.5 + thres) + (predictor < 0.5 - thres)
correct_part[thres] += (torch.max(logit, 1)[1][idx_thres] == target.data[idx_thres]).sum().item()
total_part[thres] += idx_thres.sum().item()
corrects += (torch.max(logit, 1)[1] == target.data).sum().item()
TP += (((torch.max(logit, 1)[1] == target.data).int() + (torch.max(logit, 1)[1]).int()) == 2).sum().item()
size = y.shape[0]
avg_loss /= size
accuracy = 100.0 * corrects / size
# TP, TN: True Positive/True Negative
print(' {} - loss: {:.4f} acc: {:.2f}%({}/{}) {:.2f}%({}/{}) {:.2f}%({}/{}) TP/TN: ({}/{}) \n'.format(term,
avg_loss, accuracy, corrects, size, 100.0 * correct_part[0.2] / total_part[0.2], correct_part[0.2], int(total_part[0.2]),
100.0 * correct_part[0.4] / total_part[0.4], correct_part[0.4], int(total_part[0.4]), TP, corrects - TP))
return accuracy
def bma_eval(X, y, mymodels, term, args):
corrects, TP, avg_loss = 0, 0, 0
correct_part, total_part = {0.2:0, 0.4:0}, {0.2:1e-16,0.4:1e-16}
batch = args.batch_size
for model in mymodels:
model.eval()
for idx in range(int(X.shape[0]/batch) + 1):
feature = torch.LongTensor(X[(idx*batch):(idx*batch+batch),])
target = torch.LongTensor(y[(idx*batch):(idx*batch+batch)])
if args.cuda:
feature, target = feature.cuda(), target.cuda()
logit = model(feature)
loss = F.cross_entropy(logit, target, size_average=False)
avg_loss += loss.data.item() / (len(mymodels) * 1.0)
predictor = torch.exp(logit[:, 1]) / (torch.exp(logit[:, 0]) + torch.exp(logit[:, 1]))
for xnum in range(1, 3):
thres = round(0.2 * xnum, 1)
idx_thres = (predictor > 0.5 + thres) + (predictor < 0.5 - thres)
correct_part[thres] += (torch.max(logit, 1)[1][idx_thres] == target.data[idx_thres]).sum().item() / (len(mymodels) * 1.0)
total_part[thres] += idx_thres.sum().item() / (len(mymodels) * 1.0)
corrects += (torch.max(logit, 1)[1] == target.data).sum().item() / (len(mymodels) * 1.0)
TP += (((torch.max(logit, 1)[1] == target.data).int() + (torch.max(logit, 1)[1]).int()) == 2).sum().item()
size = y.shape[0]
avg_loss /= size
accuracy = 100.0 * corrects / size
TP = TP * 1.0 / (len(mymodels) * 1.0)
print(' {} - loss: {:.4f} acc: {:.2f}%({}/{}) {:.2f}%({}/{}) {:.2f}%({}/{}) TP/TN: ({}/{}) \n'.format(term,
avg_loss, accuracy, corrects, size, 100.0 * correct_part[0.2] / total_part[0.2], correct_part[0.2], int(total_part[0.2]),
100.0 * correct_part[0.4] / total_part[0.4], correct_part[0.4], int(total_part[0.4]), TP, corrects - TP))
return accuracy
def predictor_preprocess(cnn, args):
# load trained thinning samples (Bayesian CNN models) from input/models/
mymodels = []
for num, each_model in enumerate(os.listdir(args.save_dir)):
print(args.save_dir + each_model)
if args.cuda:
cnn.load_state_dict(torch.load(args.save_dir + each_model))
else:
cnn.load_state_dict(torch.load(args.save_dir + each_model, map_location=lambda storage, loc: storage))
mymodels.append(copy.deepcopy(cnn))
if num > 30: # in case memory overloads
break
with open('./input/word2idx', 'r') as file:
word2idx = json.load(file)
stopWords = set()
with open('./input/stopWords') as file:
for word in file:
stopWords.add(word.strip())
return(mymodels, word2idx, stopWords)
def predict(sentence, mymodels, word2idx, stopWords, args):
tokens = tokenize_news(sentence, stopWords)
tokens = [word2idx[t] if t in word2idx else word2idx['UNKNOWN'] for t in tokens]
if len(tokens) < 5 or tokens == [word2idx['UNKNOWN']] * len(tokens): # tokens cannot be too short or unknown
signal = 'Unknown'
else:
feature = torch.LongTensor([tokens])
logits = []
for model in mymodels:
model.eval()
if args.cuda:
feature = feature.cuda()
logit = model(feature)
predictor = torch.exp(logit[:, 1]) / (torch.exp(logit[:, 0]) + torch.exp(logit[:, 1]))
logits.append(predictor.item())
signal = signals(np.mean(logits))
return(signal)
def daily_predict(cnn, args):
mymodels, word2idx, stopWords = predictor_preprocess(cnn, args)
output = './input/news/' + args.date[:4] + '/news_' + args.date + '.csv'
fout = open(output + '_bak', 'w')
with open(output) as f:
for num, line in enumerate(f):
line = line.strip().split(',')
if len(line) == 6:
ticker, name, day, headline, body, newsType = line
elif len(line) == 7:
ticker, name, day, headline, body, newsType, signal = line
else:
continue
#if newsType != 'topStory': # newsType: [topStory, normal]
# signal = 'Unknown'
content = headline + ' ' + body
signal = predict(content, mymodels, word2idx, stopWords, args)
fout.write(','.join([ticker, name, day, headline, body, newsType, signal]) + '\n')
fout.close()
print('change file name')
print('mv ' + output + '_bak ' + output)
os.system('mv ' + output + '_bak ' + output)
def save(model, save_dir, steps):
if not os.path.isdir(save_dir):
os.makedirs(save_dir)
save_path = '{}/model_{}.pt'.format(save_dir,steps)
torch.save(model.state_dict(), save_path)
def signals(digit):
strong_signal = 0.4
unknown_thres = 0.05
if digit > 0.5 + strong_signal:
return('Strong Buy')
elif digit > 0.5 + unknown_thres:
return('Buy')
elif digit > 0.5 - unknown_thres:
return('Unknown')
elif digit > 0.5 - strong_signal:
return('Sell')
else:
return('Strong Sell')
def padding(sentencesVec, keepNum):
shape = sentencesVec.shape[0]
ownLen = sentencesVec.shape[1]
if ownLen < keepNum:
return np.hstack((np.ones([shape, keepNum-ownLen]), sentencesVec)).flatten()
else:
return sentencesVec[:, -keepNum:].flatten()
def dateGenerator(numdays): # generate N days until now, eg [20151231, 20151230]
base = datetime.datetime.today()
date_list = [base - datetime.timedelta(days=x) for x in range(0, numdays)]
for i in range(len(date_list)):
date_list[i] = date_list[i].strftime("%Y%m%d")
return set(date_list)
def generate_past_n_days(numdays):
"""Generate N days until now, e.g., [20151231, 20151230]."""
base = datetime.datetime.today()
date_range = [base - datetime.timedelta(days=x) for x in range(0, numdays)]
return [x.strftime("%Y%m%d") for x in date_range]
def unify_word(word): # went -> go, apples -> apple, BIG -> big
"""unify verb tense and noun singular"""
ADJ, ADJ_SAT, ADV, NOUN, VERB = 'a', 's', 'r', 'n', 'v'
for wt in [ADJ, ADJ_SAT, ADV, NOUN, VERB]:
try:
word = WordNetLemmatizer().lemmatize(word, pos=wt)
except:
pass
return word.lower()
def digit_filter(word):
check = re.match(r'\d*\.?\d*', word).group()
if check == "":
return word
else:
return ""
def unify_word_meaning(word):
if word in ["bigger-than-expected", "higher-than-expected", "better-than-expected", "stronger-than-expected"]:
return "better"
elif word in ["smaller-than-expected", "lower-than-expected", "weaker-than-expected", "worse-than-expected"]:
return "lower"
elif word in ["no", "not", "n't"]:
return "not"
else:
return word
def get_soup_with_repeat(url, repeat_times=3, verbose=True):
for i in range(repeat_times): # repeat in case of http failure
try:
time.sleep(np.random.poisson(3))
response = urlopen(url)
data = response.read().decode('utf-8')
return BeautifulSoup(data, "lxml")
except Exception as e:
if i == 0:
print(e)
if verbose:
print('retry...')
continue
def tokenize_news(headline, stopWords):
tokens = nltk.word_tokenize(headline) #+ nltk.word_tokenize(body)
tokens = list(map(unify_word, tokens))
tokens = list(map(unify_word, tokens)) # some words fail filtering in the 1st time
tokens = list(map(digit_filter, tokens))
tokens = list(map(unify_word_meaning, tokens))
tokens = [t for t in tokens if t not in stopWords and t != ""]
return(tokens)
def value2int(y, clusters=2):
label = np.copy(y)
label[y < np.percentile(y, 100 / clusters)] = 0
for i in range(1, clusters):
label[y > np.percentile(y, 100 * i / clusters)] = i
return label
def value2int_simple(y):
label = np.copy(y)
label[y < 0] = 0
label[y >= 0] = 1
return label
def model_eval(net, data_loader, if_print=1):
net.eval()
correct = 0
total = 0
for cnt, (images, labels) in enumerate(data_loader):
images, labels = Variable(images), Variable(labels)
if torch.cuda.is_available():
images, labels = images.cuda(), labels.cuda()
outputs = net.forward(images)
prediction = outputs.data.max(1)[1]
correct += prediction.eq(labels.data).sum().item()
print('\nTest set: Accuracy: {:0.2f}%'.format(100.0 * correct / len(data_loader.dataset)))
|
WayneDW/Sentiment-Analysis-in-Event-Driven-Stock-Price-Movement-Prediction
|
util.py
|
Python
|
mit
| 13,305 | 0.006238 |
# Copyright 2012 OpenStack Foundation
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011,2012 Akira YOSHIYAMA <akirayoshiyama@gmail.com>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This source code is based ./auth_token.py and ./ec2_token.py.
# See them for their copyright.
"""
S3 TOKEN MIDDLEWARE
This WSGI component:
* Get a request from the swift3 middleware with an S3 Authorization
access key.
* Validate s3 token in Keystone.
* Transform the account name to AUTH_%(tenant_name).
"""
import logging
from oslo.serialization import jsonutils
import requests
import six
from six.moves import urllib
import webob
PROTOCOL_NAME = 'S3 Token Authentication'
# TODO(kun): remove it after oslo merge this.
def split_path(path, minsegs=1, maxsegs=None, rest_with_last=False):
"""Validate and split the given HTTP request path.
**Examples**::
['a'] = split_path('/a')
['a', None] = split_path('/a', 1, 2)
['a', 'c'] = split_path('/a/c', 1, 2)
['a', 'c', 'o/r'] = split_path('/a/c/o/r', 1, 3, True)
:param path: HTTP Request path to be split
:param minsegs: Minimum number of segments to be extracted
:param maxsegs: Maximum number of segments to be extracted
:param rest_with_last: If True, trailing data will be returned as part
of last segment. If False, and there is
trailing data, raises ValueError.
:returns: list of segments with a length of maxsegs (non-existent
segments will return as None)
:raises: ValueError if given an invalid path
"""
if not maxsegs:
maxsegs = minsegs
if minsegs > maxsegs:
raise ValueError('minsegs > maxsegs: %d > %d' % (minsegs, maxsegs))
if rest_with_last:
segs = path.split('/', maxsegs)
minsegs += 1
maxsegs += 1
count = len(segs)
if (segs[0] or count < minsegs or count > maxsegs or
'' in segs[1:minsegs]):
raise ValueError('Invalid path: %s' % urllib.parse.quote(path))
else:
minsegs += 1
maxsegs += 1
segs = path.split('/', maxsegs)
count = len(segs)
if (segs[0] or count < minsegs or count > maxsegs + 1 or
'' in segs[1:minsegs] or
(count == maxsegs + 1 and segs[maxsegs])):
raise ValueError('Invalid path: %s' % urllib.parse.quote(path))
segs = segs[1:maxsegs]
segs.extend([None] * (maxsegs - 1 - len(segs)))
return segs
class ServiceError(Exception):
pass
class S3Token(object):
"""Auth Middleware that handles S3 authenticating client calls."""
def __init__(self, app, conf):
"""Common initialization code."""
self.app = app
self.logger = logging.getLogger(conf.get('log_name', __name__))
self.logger.debug('Starting the %s component', PROTOCOL_NAME)
self.logger.warning(
'This middleware module is deprecated as of v0.11.0 in favor of '
'keystonemiddleware.s3_token - please update your WSGI pipeline '
'to reference the new middleware package.')
self.reseller_prefix = conf.get('reseller_prefix', 'AUTH_')
# where to find the auth service (we use this to validate tokens)
auth_host = conf.get('auth_host')
auth_port = int(conf.get('auth_port', 35357))
auth_protocol = conf.get('auth_protocol', 'https')
self.request_uri = '%s://%s:%s' % (auth_protocol, auth_host, auth_port)
# SSL
insecure = conf.get('insecure', False)
cert_file = conf.get('certfile')
key_file = conf.get('keyfile')
if insecure:
self.verify = False
elif cert_file and key_file:
self.verify = (cert_file, key_file)
elif cert_file:
self.verify = cert_file
else:
self.verify = None
def deny_request(self, code):
error_table = {
'AccessDenied': (401, 'Access denied'),
'InvalidURI': (400, 'Could not parse the specified URI'),
}
resp = webob.Response(content_type='text/xml')
resp.status = error_table[code][0]
error_msg = ('<?xml version="1.0" encoding="UTF-8"?>\r\n'
'<Error>\r\n <Code>%s</Code>\r\n '
'<Message>%s</Message>\r\n</Error>\r\n' %
(code, error_table[code][1]))
if six.PY3:
error_msg = error_msg.encode()
resp.body = error_msg
return resp
def _json_request(self, creds_json):
headers = {'Content-Type': 'application/json'}
try:
response = requests.post('%s/v2.0/s3tokens' % self.request_uri,
headers=headers, data=creds_json,
verify=self.verify)
except requests.exceptions.RequestException as e:
self.logger.info('HTTP connection exception: %s', e)
resp = self.deny_request('InvalidURI')
raise ServiceError(resp)
if response.status_code < 200 or response.status_code >= 300:
self.logger.debug('Keystone reply error: status=%s reason=%s',
response.status_code, response.reason)
resp = self.deny_request('AccessDenied')
raise ServiceError(resp)
return response
def __call__(self, environ, start_response):
"""Handle incoming request. authenticate and send downstream."""
req = webob.Request(environ)
self.logger.debug('Calling S3Token middleware.')
try:
parts = split_path(req.path, 1, 4, True)
version, account, container, obj = parts
except ValueError:
msg = 'Not a path query, skipping.'
self.logger.debug(msg)
return self.app(environ, start_response)
# Read request signature and access id.
if 'Authorization' not in req.headers:
msg = 'No Authorization header. skipping.'
self.logger.debug(msg)
return self.app(environ, start_response)
token = req.headers.get('X-Auth-Token',
req.headers.get('X-Storage-Token'))
if not token:
msg = 'You did not specify an auth or a storage token. skipping.'
self.logger.debug(msg)
return self.app(environ, start_response)
auth_header = req.headers['Authorization']
try:
access, signature = auth_header.split(' ')[-1].rsplit(':', 1)
except ValueError:
msg = 'You have an invalid Authorization header: %s'
self.logger.debug(msg, auth_header)
return self.deny_request('InvalidURI')(environ, start_response)
# NOTE(chmou): This is to handle the special case with nova
# when we have the option s3_affix_tenant. We will force it to
# connect to another account than the one
# authenticated. Before people start getting worried about
# security, I should point that we are connecting with
# username/token specified by the user but instead of
# connecting to its own account we will force it to go to an
# another account. In a normal scenario if that user don't
# have the reseller right it will just fail but since the
# reseller account can connect to every account it is allowed
# by the swift_auth middleware.
force_tenant = None
if ':' in access:
access, force_tenant = access.split(':')
# Authenticate request.
creds = {'credentials': {'access': access,
'token': token,
'signature': signature}}
creds_json = jsonutils.dumps(creds)
self.logger.debug('Connecting to Keystone sending this JSON: %s',
creds_json)
# NOTE(vish): We could save a call to keystone by having
# keystone return token, tenant, user, and roles
# from this call.
#
# NOTE(chmou): We still have the same problem we would need to
# change token_auth to detect if we already
# identified and not doing a second query and just
# pass it through to swiftauth in this case.
try:
resp = self._json_request(creds_json)
except ServiceError as e:
resp = e.args[0]
msg = 'Received error, exiting middleware with error: %s'
self.logger.debug(msg, resp.status_code)
return resp(environ, start_response)
self.logger.debug('Keystone Reply: Status: %d, Output: %s',
resp.status_code, resp.content)
try:
identity_info = resp.json()
token_id = str(identity_info['access']['token']['id'])
tenant = identity_info['access']['token']['tenant']
except (ValueError, KeyError):
error = 'Error on keystone reply: %d %s'
self.logger.debug(error, resp.status_code, resp.content)
return self.deny_request('InvalidURI')(environ, start_response)
req.headers['X-Auth-Token'] = token_id
tenant_to_connect = force_tenant or tenant['id']
self.logger.debug('Connecting with tenant: %s', tenant_to_connect)
new_tenant_name = '%s%s' % (self.reseller_prefix, tenant_to_connect)
environ['PATH_INFO'] = environ['PATH_INFO'].replace(account,
new_tenant_name)
return self.app(environ, start_response)
def filter_factory(global_conf, **local_conf):
"""Returns a WSGI filter app for use with paste.deploy."""
conf = global_conf.copy()
conf.update(local_conf)
def auth_filter(app):
return S3Token(app, conf)
return auth_filter
|
alexpilotti/python-keystoneclient
|
keystoneclient/middleware/s3_token.py
|
Python
|
apache-2.0
| 10,573 | 0 |
# coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from tapi_server.models.base_model_ import Model
from tapi_server.models.tapi_oam_meg_ref import TapiOamMegRef # noqa: F401,E501
from tapi_server import util
class TapiOamMipRef(Model):
"""NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
Do not edit the class manually.
"""
def __init__(self, meg_uuid=None, mip_local_id=None): # noqa: E501
"""TapiOamMipRef - a model defined in OpenAPI
:param meg_uuid: The meg_uuid of this TapiOamMipRef. # noqa: E501
:type meg_uuid: str
:param mip_local_id: The mip_local_id of this TapiOamMipRef. # noqa: E501
:type mip_local_id: str
"""
self.openapi_types = {
'meg_uuid': str,
'mip_local_id': str
}
self.attribute_map = {
'meg_uuid': 'meg-uuid',
'mip_local_id': 'mip-local-id'
}
self._meg_uuid = meg_uuid
self._mip_local_id = mip_local_id
@classmethod
def from_dict(cls, dikt) -> 'TapiOamMipRef':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The tapi.oam.MipRef of this TapiOamMipRef. # noqa: E501
:rtype: TapiOamMipRef
"""
return util.deserialize_model(dikt, cls)
@property
def meg_uuid(self):
"""Gets the meg_uuid of this TapiOamMipRef.
none # noqa: E501
:return: The meg_uuid of this TapiOamMipRef.
:rtype: str
"""
return self._meg_uuid
@meg_uuid.setter
def meg_uuid(self, meg_uuid):
"""Sets the meg_uuid of this TapiOamMipRef.
none # noqa: E501
:param meg_uuid: The meg_uuid of this TapiOamMipRef.
:type meg_uuid: str
"""
self._meg_uuid = meg_uuid
@property
def mip_local_id(self):
"""Gets the mip_local_id of this TapiOamMipRef.
none # noqa: E501
:return: The mip_local_id of this TapiOamMipRef.
:rtype: str
"""
return self._mip_local_id
@mip_local_id.setter
def mip_local_id(self, mip_local_id):
"""Sets the mip_local_id of this TapiOamMipRef.
none # noqa: E501
:param mip_local_id: The mip_local_id of this TapiOamMipRef.
:type mip_local_id: str
"""
self._mip_local_id = mip_local_id
|
karthik-sethuraman/ONFOpenTransport
|
RI/flask_server/tapi_server/models/tapi_oam_mip_ref.py
|
Python
|
apache-2.0
| 2,530 | 0.000395 |
#!/usr/bin/python
import yaml
import pprint
import os
import pdb
import re
import cgi
import codecs
import sys
import cgitb
cgitb.enable()
if (sys.stdout.encoding is None):
print >> sys.stderr, "please set python env PYTHONIOENCODING=UTF-8, example: export PYTHONIOENCODING=UTF-8, when write to stdout."
exit(1)
specsFile = open('../source/syntax.yml')
specs = yaml.safe_load(specsFile)
specsFile.close()
htmlTest = re.compile("(?i)<\/?\w+((\s+\w+(\s*=\s*(?:\".*?\"|'.*?'|[^'\">\s]+))?)+\s*|\s*)\/?>")
def paragraphy(text):
if htmlTest.match(text):
return text
else:
return "\t\t<p>" + cgi.escape(text).strip() + "\n\t\t</p>\n"
def loadExample(example):
if ("annotation" in example):
result = paragraphy(example["annotation"])
else:
result = ""
result = result + "\t\t<div class=\"code2\">\n\t\t\t<p>Example</p>\n\t\t\t<pre>\n"
if type(example) is str or type(example) is unicode:
result = result + cgi.escape(example).strip()
elif type(example) is dict:
if ("example" in example):
result += cgi.escape(example["example"]).strip()
else:
raise ValueError("every entry must contain an example element")
else:
raise ValueError("unrecognized type for example data")
result = result + "\n</pre>\n\t\t</div>"
return result
indexPageContents = "<meta charset='utf-8'/>\n<meta name=\"viewport\" content=\"width=device-width, initial-scale=0.6\">\n<html>\n\t<head>\n\t\t<title>Syntax Listing - Plange</title>\n\t\t<link rel=StyleSheet href='../css/general.css' type='text/css' />\n\t</head>\n\t<body>\n\t\t<?php require('../header.php') ?>\n\n\n\t\t<p>This page is generated from the <a href='/source/syntax.yml'>syntax specification</a>. Tags, identities, and all {IC} (optional whitespace and comment regions) are filtered from the syntax display to improve readability. See the specification for the full grammar specification. The root production of the grammar is \"STATEMENT_SCOPE\".</p>\n\t\t<h2>Subpage Listing</h2>\n\t\t<table>\n"
names = specs.keys()
names.sort()
regexs = {name: re.compile("\\b" + name + "\\b") for name in names}
stripRegex = re.compile("(\\$)|(%[_\w0-9]+)|(\\{IC\\})")
openParenSpaceRegex = re.compile("\\( ")
openBraceSpaceRegex = re.compile("\\{ ")
openBracketSpaceREgex = re.compile("\\[ ")
spaceCloseParenRegex = re.compile(" \\)")
spaceCloseBraceRegex = re.compile(" \\}")
spaceCloseBracketRegex = re.compile(" \\]")
def simplifySyntaxString(syntax):
syntax = cgi.escape(syntax).strip()
syntax = stripRegex.sub("", syntax)
syntax = openParenSpaceRegex.sub("(", syntax)
syntax = openBraceSpaceRegex.sub("{", syntax)
syntax = openBracketSpaceREgex.sub("[", syntax)
syntax = spaceCloseParenRegex.sub(")", syntax)
syntax = spaceCloseBraceRegex.sub("}", syntax)
syntax = spaceCloseBracketRegex.sub("]", syntax)
syntax = spaceCloseBracketRegex.sub("]", syntax)
return syntax
def simplifySyntaxStringAddAnchors(syntax):
syntax = simplifySyntaxString(syntax)
for refName in names:
if refName == name:
continue
syntax = regexs[refName].sub("<a href=\"/documentation/syntax.php#" + refName + "\">" + refName + "</a>", syntax)
return syntax
def simplifySyntaxStringAddLinks(syntax):
syntax = simplifySyntaxString(syntax)
for refName in names:
if refName == name:
continue
syntax = regexs[refName].sub("<a href=\"/documentation/syntax.php?name=" + refName + "\">" + refName + "</a>", syntax)
return syntax
if len(sys.argv) == 1: # output the table for the syntax listing page
print "\n\t\t<table>\n"
for name in names:
details = specs[name]
syntaxString = simplifySyntaxStringAddAnchors(details["syntax"])
print "\t\t\t<tr>\n"
print "\t\t\t\t<td><a id=\"" + name + "\" href=\"/documentation/syntax.php?name=" + name + "\">" + name + "</a></td>\n"
if "doc" in details:
print "\t\t\t\t<td>" + details["doc"].strip() + "</td>\n"
else:
print "\t\t\t\t<td>no doc string</td>\n"
print "\t\t\t\t<td>" + syntaxString + "</td>\n"
print "\t\t\t</tr>\n"
print "\t\t</table>\n"
else:
name = sys.argv[1]
details = specs[name]
if "doc" in details:
print "\t\t<p>" + details["doc"].strip() + "</p>\n\n"
if "syntax" in details:
syntaxString = simplifySyntaxStringAddLinks(details["syntax"])
title = "syntax"
if "assoc" in details:
title = title + " (associativity: " + details["assoc"] + ")"
print "\t\t<div class=\"syntax\">\n\t\t\t<p>" + title + "</p>\n\t\t\t<div>" + syntaxString + "</div>\n\t\t</div>\n"
else:
raise ValueError("every entry must contain a syntax element")
if "example" in details:
print loadExample(details["example"])
if "examples" in details:
for example in details["examples"]:
print loadExample(example)
if "notes" in details:
print "\t\t<h2>Notes</h2>\n\t\t" + paragraphy(details["notes"])
if "see" in details:
print "\t\t<p>See:"
for i in details["see"]:
print " <a href=\"syntax.php?name=" + i + "\">" + i + "</a>"
print "\n\t\t</p>\n"
|
coder0xff/Plange
|
documentation/syntax-cgi.py
|
Python
|
bsd-3-clause
| 5,936 | 0.012298 |
# Lint as: python3
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Documentation control decorators."""
from typing import Iterable, Optional, TypeVar
T = TypeVar("T")
_DEPRECATED = "_tf_docs_deprecated"
def set_deprecated(obj: T) -> T:
"""Explicitly tag an object as deprecated for the doc generator."""
setattr(obj, _DEPRECATED, None)
return obj
def is_deprecated(obj) -> bool:
return hasattr(obj, _DEPRECATED)
_INHERITABLE_HEADER = "_tf_docs_inheritable_header"
def inheritable_header(text: str):
def _wrapped(cls):
setattr(cls, _INHERITABLE_HEADER, text)
return cls
return _wrapped
def get_inheritable_header(cls) -> Optional[str]:
return getattr(cls, _INHERITABLE_HEADER, None)
_NO_SEARCH_HINTS = "_tf_docs_no_search_hints"
def hide_from_search(obj: T) -> T:
"""Marks an object so metadata search hints will not be included on it's page.
The page is set to "noindex" to hide it from search.
Note: This only makes sense to apply to functions, classes and modules.
Constants, and methods do not get their own pages.
Args:
obj: the object to hide.
Returns:
The object.
"""
setattr(obj, _NO_SEARCH_HINTS, None)
return obj
def should_hide_from_search(obj) -> bool:
"""Returns true if metadata search hints should not be included."""
return hasattr(obj, _NO_SEARCH_HINTS)
_CUSTOM_PAGE_BUILDER_CLS = "_tf_docs_custom_page_builder_cls"
def set_custom_page_builder_cls(obj, cls):
"""Replace most of the generated page with custom content."""
setattr(obj, _CUSTOM_PAGE_BUILDER_CLS, cls)
def get_custom_page_builder_cls(obj):
"""Gets custom page content if available."""
return getattr(obj, _CUSTOM_PAGE_BUILDER_CLS, None)
_DO_NOT_DOC = "_tf_docs_do_not_document"
def do_not_generate_docs(obj: T) -> T:
"""A decorator: Do not generate docs for this object.
For example the following classes:
```
class Parent(object):
def method1(self):
pass
def method2(self):
pass
class Child(Parent):
def method1(self):
pass
def method2(self):
pass
```
Produce the following api_docs:
```
/Parent.md
# method1
# method2
/Child.md
# method1
# method2
```
This decorator allows you to skip classes or methods:
```
@do_not_generate_docs
class Parent(object):
def method1(self):
pass
def method2(self):
pass
class Child(Parent):
@do_not_generate_docs
def method1(self):
pass
def method2(self):
pass
```
This will only produce the following docs:
```
/Child.md
# method2
```
Note: This is implemented by adding a hidden attribute on the object, so it
cannot be used on objects which do not allow new attributes to be added. So
this decorator must go *below* `@property`, `@classmethod`,
or `@staticmethod`:
```
class Example(object):
@property
@do_not_generate_docs
def x(self):
return self._x
```
Args:
obj: The object to hide from the generated docs.
Returns:
obj
"""
setattr(obj, _DO_NOT_DOC, None)
return obj
_DO_NOT_DOC_INHERITABLE = "_tf_docs_do_not_doc_inheritable"
def do_not_doc_inheritable(obj: T) -> T:
"""A decorator: Do not generate docs for this method.
This version of the decorator is "inherited" by subclasses. No docs will be
generated for the decorated method in any subclass. Even if the sub-class
overrides the method.
For example, to ensure that `method1` is **never documented** use this
decorator on the base-class:
```
class Parent(object):
@do_not_doc_inheritable
def method1(self):
pass
def method2(self):
pass
class Child(Parent):
def method1(self):
pass
def method2(self):
pass
```
This will produce the following docs:
```
/Parent.md
# method2
/Child.md
# method2
```
When generating docs for a class's arributes, the `__mro__` is searched and
the attribute will be skipped if this decorator is detected on the attribute
on any class in the `__mro__`.
Note: This is implemented by adding a hidden attribute on the object, so it
cannot be used on objects which do not allow new attributes to be added. So
this decorator must go *below* `@property`, `@classmethod`,
or `@staticmethod`:
```
class Example(object):
@property
@do_not_doc_inheritable
def x(self):
return self._x
```
Args:
obj: The class-attribute to hide from the generated docs.
Returns:
obj
"""
setattr(obj, _DO_NOT_DOC_INHERITABLE, None)
return obj
_FOR_SUBCLASS_IMPLEMENTERS = "_tf_docs_tools_for_subclass_implementers"
def for_subclass_implementers(obj: T) -> T:
"""A decorator: Only generate docs for this method in the defining class.
Also group this method's docs with and `@abstractmethod` in the class's docs.
No docs will generated for this class attribute in sub-classes.
The canonical use case for this is `tf.keras.layers.Layer.call`: It's a
public method, essential for anyone implementing a subclass, but it should
never be called directly.
Works on method, or other class-attributes.
When generating docs for a class's arributes, the `__mro__` is searched and
the attribute will be skipped if this decorator is detected on the attribute
on any **parent** class in the `__mro__`.
For example:
```
class Parent(object):
@for_subclass_implementers
def method1(self):
pass
def method2(self):
pass
class Child1(Parent):
def method1(self):
pass
def method2(self):
pass
class Child2(Parent):
def method1(self):
pass
def method2(self):
pass
```
This will produce the following docs:
```
/Parent.md
# method1
# method2
/Child1.md
# method2
/Child2.md
# method2
```
Note: This is implemented by adding a hidden attribute on the object, so it
cannot be used on objects which do not allow new attributes to be added. So
this decorator must go *below* `@property`, `@classmethod`,
or `@staticmethod`:
```
class Example(object):
@property
@for_subclass_implementers
def x(self):
return self._x
```
Args:
obj: The class-attribute to hide from the generated docs.
Returns:
obj
"""
setattr(obj, _FOR_SUBCLASS_IMPLEMENTERS, None)
return obj
do_not_doc_in_subclasses = for_subclass_implementers
_DOC_PRIVATE = "_tf_docs_doc_private"
def doc_private(obj: T) -> T:
"""A decorator: Generates docs for private methods/functions.
For example:
```
class Try:
@doc_controls.doc_private
def _private(self):
...
```
As a rule of thumb, private(beginning with `_`) methods/functions are
not documented.
This decorator allows to force document a private method/function.
Args:
obj: The class-attribute to hide from the generated docs.
Returns:
obj
"""
setattr(obj, _DOC_PRIVATE, None)
return obj
def should_doc_private(obj) -> bool:
return hasattr(obj, _DOC_PRIVATE)
_DOC_IN_CURRENT_AND_SUBCLASSES = "_tf_docs_doc_in_current_and_subclasses"
def doc_in_current_and_subclasses(obj: T) -> T:
"""Overrides `do_not_doc_in_subclasses` decorator.
If this decorator is set on a child class's method whose parent's method
contains `do_not_doc_in_subclasses`, then that will be overriden and the
child method will get documented. All classes inherting from the child will
also document that method.
For example:
```
class Parent:
@do_not_doc_in_subclasses
def method1(self):
pass
def method2(self):
pass
class Child1(Parent):
@doc_in_current_and_subclasses
def method1(self):
pass
def method2(self):
pass
class Child2(Parent):
def method1(self):
pass
def method2(self):
pass
class Child11(Child1):
pass
```
This will produce the following docs:
```
/Parent.md
# method1
# method2
/Child1.md
# method1
# method2
/Child2.md
# method2
/Child11.md
# method1
# method2
```
Args:
obj: The class-attribute to hide from the generated docs.
Returns:
obj
"""
setattr(obj, _DOC_IN_CURRENT_AND_SUBCLASSES, None)
return obj
def should_skip(obj) -> bool:
"""Returns true if docs generation should be skipped for this object.
Checks for the `do_not_generate_docs` or `do_not_doc_inheritable` decorators.
Args:
obj: The object to document, or skip.
Returns:
True if the object should be skipped
"""
if isinstance(obj, type):
# For classes, only skip if the attribute is set on _this_ class.
if _DO_NOT_DOC in obj.__dict__:
return True
else:
return False
# Unwrap fget if the object is a property
if isinstance(obj, property):
obj = obj.fget
return hasattr(obj, _DO_NOT_DOC) or hasattr(obj, _DO_NOT_DOC_INHERITABLE)
def _unwrap_func(obj):
# Unwrap fget if the object is a property or static method or classmethod.
if isinstance(obj, property):
return obj.fget
if isinstance(obj, (classmethod, staticmethod)):
return obj.__func__
return obj
def _cls_attr_has_tag(cls, attr, tag):
"""Check if a class attribute `attr` is decorated with `dec`."""
# Use __dict__, it doesn't go up the __mro__ like getattr.
obj = cls.__dict__.get(attr, None)
if obj is None:
return False
obj = _unwrap_func(obj)
if isinstance(obj, type):
# The attribute is a class. Check __dict__ to see if the attribute is set
# on _this_ class, not its parents.
if tag in obj.__dict__:
return True
else:
return False
return hasattr(obj, tag)
def should_skip_class_attr(cls, name):
"""Returns true if docs should be skipped for this class attribute.
Args:
cls: The class the attribute belongs to.
name: The name of the attribute.
Returns:
True if the attribute should be skipped.
"""
# Get the object with standard lookup, from the nearest
# defining parent.
try:
obj = getattr(cls, name)
except AttributeError:
# This can fail for a variety of reasons. Always skip if `getattr` fails.
return True
# Unwrap fget if the object is a property
obj = _unwrap_func(obj)
# Skip if the object is decorated with `do_not_generate_docs` or
# `do_not_doc_inheritable`
if should_skip(obj):
return True
classes = getattr(cls, "__mro__", [cls])
# Find where all the decorators turn docs on and off.
# All these lists contain `(level, skip)` pairs.
for_subclass_levels = [
# The [1:] is because `for_subclass_implementers` turns off docs
# one level down (and you don't want to consider level -1).
(i, True)
for (i, mro_cls) in enumerate(classes[1:])
if _cls_attr_has_tag(mro_cls, name, _FOR_SUBCLASS_IMPLEMENTERS)
]
not_below_levels = [
(i, True)
for (i, mro_cls) in enumerate(classes)
if _cls_attr_has_tag(mro_cls, name, _DO_NOT_DOC_INHERITABLE)
]
doc_below_levels = [
(i, False)
for (i, mro_cls) in enumerate(classes)
if _cls_attr_has_tag(mro_cls, name, _DOC_IN_CURRENT_AND_SUBCLASSES)
]
all_levels = not_below_levels + for_subclass_levels + doc_below_levels
if all_levels:
# Find the lowest `(level, skip)` pair, and return `skip`
return min(all_levels)[1]
# No decorators --> don't skip
return False
def decorate_all_class_attributes(decorator, cls, skip: Iterable[str]):
"""Applies `decorator` to every attribute defined in `cls`.
Args:
decorator: The decorator to apply.
cls: The class to apply the decorator to.
skip: A collection of attribute names that the decorator should not be
aplied to.
"""
skip = frozenset(skip)
class_contents = list(cls.__dict__.items())
for name, obj in class_contents:
if name in skip:
continue
# Otherwise, exclude from documentation.
if isinstance(obj, property):
obj = obj.fget
if isinstance(obj, (staticmethod, classmethod)):
obj = obj.__func__
try:
decorator(obj)
except AttributeError:
pass
|
tensorflow/docs
|
tools/tensorflow_docs/api_generator/doc_controls.py
|
Python
|
apache-2.0
| 12,723 | 0.006445 |
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
from structure import __version__
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
# the setup
setup(
name='structure',
version=__version__,
description='An demonstration of PyPi.',
# long_description=read('README'),
url='https://github.com/kengz/structure',
author='kengz',
author_email='kengzwl@gmail.com',
license='MIT',
keywords='example pypi tutorial',
packages=find_packages(exclude=('docs', 'tests', 'env', 'index.py')),
include_package_data=True,
install_requires=[
],
extras_require={
'dev': [],
'docs': [],
'testing': [],
},
classifiers=[],
)
|
kengz/python-structure
|
setup.py
|
Python
|
mit
| 976 | 0.004098 |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Local File system implementation for accessing files on disk."""
from __future__ import absolute_import
import glob
import os
import shutil
from apache_beam.io.filesystem import BeamIOError
from apache_beam.io.filesystem import CompressedFile
from apache_beam.io.filesystem import CompressionTypes
from apache_beam.io.filesystem import FileMetadata
from apache_beam.io.filesystem import FileSystem
from apache_beam.io.filesystem import MatchResult
class LocalFileSystem(FileSystem):
"""A Local ``FileSystem`` implementation for accessing files on disk.
"""
def mkdirs(self, path):
"""Recursively create directories for the provided path.
Args:
path: string path of the directory structure that should be created
Raises:
IOError if leaf directory already exists.
"""
try:
os.makedirs(path)
except OSError as err:
raise IOError(err)
def match(self, patterns, limits=None):
"""Find all matching paths to the pattern provided.
Args:
patterns: list of string for the file path pattern to match against
limits: list of maximum number of responses that need to be fetched
Returns: list of ``MatchResult`` objects.
Raises:
``BeamIOError`` if any of the pattern match operations fail
"""
if limits is None:
limits = [None] * len(patterns)
else:
err_msg = "Patterns and limits should be equal in length"
assert len(patterns) == len(limits), err_msg
def _match(pattern, limit):
"""Find all matching paths to the pattern provided.
"""
files = glob.glob(pattern)
metadata = [FileMetadata(f, os.path.getsize(f)) for f in files[:limit]]
return MatchResult(pattern, metadata)
exceptions = {}
result = []
for pattern, limit in zip(patterns, limits):
try:
result.append(_match(pattern, limit))
except Exception as e: # pylint: disable=broad-except
exceptions[pattern] = e
if exceptions:
raise BeamIOError("Match operation failed", exceptions)
return result
def _path_open(self, path, mode, mime_type='application/octet-stream',
compression_type=CompressionTypes.AUTO):
"""Helper functions to open a file in the provided mode.
"""
compression_type = FileSystem._get_compression_type(path, compression_type)
raw_file = open(path, mode)
if compression_type == CompressionTypes.UNCOMPRESSED:
return raw_file
else:
return CompressedFile(raw_file, compression_type=compression_type)
def create(self, path, mime_type='application/octet-stream',
compression_type=CompressionTypes.AUTO):
"""Returns a write channel for the given file path.
Args:
path: string path of the file object to be written to the system
mime_type: MIME type to specify the type of content in the file object
compression_type: Type of compression to be used for this object
Returns: file handle with a close function for the user to use
"""
return self._path_open(path, 'wb', mime_type, compression_type)
def open(self, path, mime_type='application/octet-stream',
compression_type=CompressionTypes.AUTO):
"""Returns a read channel for the given file path.
Args:
path: string path of the file object to be written to the system
mime_type: MIME type to specify the type of content in the file object
compression_type: Type of compression to be used for this object
Returns: file handle with a close function for the user to use
"""
return self._path_open(path, 'rb', mime_type, compression_type)
def copy(self, source_file_names, destination_file_names):
"""Recursively copy the file tree from the source to the destination
Args:
source_file_names: list of source file objects that needs to be copied
destination_file_names: list of destination of the new object
Raises:
``BeamIOError`` if any of the copy operations fail
"""
err_msg = ("source_file_names and destination_file_names should "
"be equal in length")
assert len(source_file_names) == len(destination_file_names), err_msg
def _copy_path(source, destination):
"""Recursively copy the file tree from the source to the destination
"""
try:
if os.path.exists(destination):
if os.path.isdir(destination):
shutil.rmtree(destination)
else:
os.remove(destination)
if os.path.isdir(source):
shutil.copytree(source, destination)
else:
shutil.copy2(source, destination)
except OSError as err:
raise IOError(err)
exceptions = {}
for source, destination in zip(source_file_names, destination_file_names):
try:
_copy_path(source, destination)
except Exception as e: # pylint: disable=broad-except
exceptions[(source, destination)] = e
if exceptions:
raise BeamIOError("Copy operation failed", exceptions)
def rename(self, source_file_names, destination_file_names):
"""Rename the files at the source list to the destination list.
Source and destination lists should be of the same size.
Args:
source_file_names: List of file paths that need to be moved
destination_file_names: List of destination_file_names for the files
Raises:
``BeamIOError`` if any of the rename operations fail
"""
err_msg = ("source_file_names and destination_file_names should "
"be equal in length")
assert len(source_file_names) == len(destination_file_names), err_msg
def _rename_file(source, destination):
"""Rename a single file object"""
try:
os.rename(source, destination)
except OSError as err:
raise IOError(err)
exceptions = {}
for source, destination in zip(source_file_names, destination_file_names):
try:
_rename_file(source, destination)
except Exception as e: # pylint: disable=broad-except
exceptions[(source, destination)] = e
if exceptions:
raise BeamIOError("Rename operation failed", exceptions)
def exists(self, path):
"""Check if the provided path exists on the FileSystem.
Args:
path: string path that needs to be checked.
Returns: boolean flag indicating if path exists
"""
return os.path.exists(path)
def delete(self, paths):
"""Deletes files or directories at the provided paths.
Directories will be deleted recursively.
Args:
paths: list of paths that give the file objects to be deleted
Raises:
``BeamIOError`` if any of the delete operations fail
"""
def _delete_path(path):
"""Recursively delete the file or directory at the provided path.
"""
try:
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
except OSError as err:
raise IOError(err)
exceptions = {}
for path in paths:
try:
_delete_path(path)
except Exception as e: # pylint: disable=broad-except
exceptions[path] = e
if exceptions:
raise BeamIOError("Delete operation failed", exceptions)
|
amitsela/incubator-beam
|
sdks/python/apache_beam/io/localfilesystem.py
|
Python
|
apache-2.0
| 8,015 | 0.005989 |
# -*- coding: UTF-8 -*-
# ..#######.########.#######.##....#..######..######.########....###...########.#######.########..######.
# .##.....#.##.....#.##......###...#.##....#.##....#.##.....#...##.##..##.....#.##......##.....#.##....##
# .##.....#.##.....#.##......####..#.##......##......##.....#..##...##.##.....#.##......##.....#.##......
# .##.....#.########.######..##.##.#..######.##......########.##.....#.########.######..########..######.
# .##.....#.##.......##......##..###.......#.##......##...##..########.##.......##......##...##........##
# .##.....#.##.......##......##...##.##....#.##....#.##....##.##.....#.##.......##......##....##.##....##
# ..#######.##.......#######.##....#..######..######.##.....#.##.....#.##.......#######.##.....#..######.
#######################################################################
# ----------------------------------------------------------------------------
# "THE BEER-WARE LICENSE" (Revision 42):
# @Daddy_Blamo wrote this file. As long as you retain this notice you
# can do whatever you want with this stuff. If we meet some day, and you think
# this stuff is worth it, you can buy me a beer in return. - Muad'Dib
# ----------------------------------------------------------------------------
#######################################################################
# Addon Name: Placenta
# Addon id: plugin.video.placenta
# Addon Provider: Mr.Blamo
import base64
import json
import re
import urllib
import urlparse
from openscrapers.modules import cleantitle
from openscrapers.modules import client
from openscrapers.modules import directstream
from openscrapers.modules import dom_parser
from openscrapers.modules import source_utils
class source:
def __init__(self):
self.priority = 1
self.language = ['de']
self.domains = ['tata.to']
self.base_link = 'http://tata.to'
self.search_link = '/filme?suche=%s&type=alle'
self.ajax_link = '/ajax/stream/%s'
def movie(self, imdb, title, localtitle, aliases, year):
try:
url = self.__search_movie(imdb, year)
return url if url else None
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
url = {'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'localtvshowtitle': localtvshowtitle,
'aliases': aliases, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if not url:
return
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
tvshowtitle = data['tvshowtitle']
localtvshowtitle = data['localtvshowtitle']
aliases = source_utils.aliases_to_array(eval(data['aliases']))
year = re.findall('(\d{4})', premiered)
year = year[0] if year else data['year']
url = self.__search([localtvshowtitle] + aliases, year, season, episode)
if not url and tvshowtitle != localtvshowtitle:
url = self.__search([tvshowtitle] + aliases, year, season, episode)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
sources = []
try:
if not url:
return sources
ref = urlparse.urljoin(self.base_link, url)
url = urlparse.urljoin(self.base_link, self.ajax_link % re.findall('-(\w+)$', ref)[0])
headers = {'Referer': ref, 'User-Agent': client.randomagent()}
result = client.request(url, headers=headers, post='')
result = base64.decodestring(result)
result = json.loads(result).get('playinfo', [])
if isinstance(result, basestring):
result = result.replace('embed.html', 'index.m3u8')
base_url = re.sub('index\.m3u8\?token=[\w\-]+[^/$]*', '', result)
r = client.request(result, headers=headers)
r = [(i[0], i[1]) for i in
re.findall('#EXT-X-STREAM-INF:.*?RESOLUTION=\d+x(\d+)[^\n]+\n([^\n]+)', r, re.DOTALL) if i]
r = [(source_utils.label_to_quality(i[0]), i[1] + source_utils.append_headers(headers)) for i in r]
r = [{'quality': i[0], 'url': base_url + i[1]} for i in r]
for i in r: sources.append(
{'source': 'CDN', 'quality': i['quality'], 'language': 'de', 'url': i['url'], 'direct': True,
'debridonly': False})
elif result:
result = [i.get('link_mp4') for i in result]
result = [i for i in result if i]
for i in result:
try:
sources.append(
{'source': 'gvideo', 'quality': directstream.googletag(i)[0]['quality'], 'language': 'de',
'url': i, 'direct': True, 'debridonly': False})
except:
pass
return sources
except:
return
def resolve(self, url):
return url
def __search_movie(self, imdb, year):
try:
query = urlparse.urljoin(self.base_link, self.search_link % imdb)
y = ['%s' % str(year), '%s' % str(int(year) + 1), '%s' % str(int(year) - 1), '0']
r = client.request(query)
r = dom_parser.parse_dom(r, 'div', attrs={'class': 'container'})
r = dom_parser.parse_dom(r, 'div', attrs={'class': 'ml-item-content'})
r = [(dom_parser.parse_dom(i, 'a', attrs={'class': 'ml-image'}, req='href'),
dom_parser.parse_dom(i, 'ul', attrs={'class': 'item-params'})) for i in r]
r = [(i[0][0].attrs['href'], re.findall('calendar.+?>.+?(\d{4})', ''.join([x.content for x in i[1]]))) for i
in r if i[0] and i[1]]
r = [(i[0], i[1][0] if len(i[1]) > 0 else '0') for i in r]
r = sorted(r, key=lambda i: int(i[1]), reverse=True) # with year > no year
r = [i[0] for i in r if i[1] in y][0]
return source_utils.strip_domain(r)
except:
return
def __search(self, titles, year, season=0, episode=False):
try:
query = self.search_link % (urllib.quote_plus(cleantitle.query(titles[0])))
query = urlparse.urljoin(self.base_link, query)
t = [cleantitle.get(i) for i in set(titles) if i]
y = ['%s' % str(year), '%s' % str(int(year) + 1), '%s' % str(int(year) - 1), '0']
r = client.request(query)
r = dom_parser.parse_dom(r, 'div', attrs={'class': 'container'})
r = dom_parser.parse_dom(r, 'div', attrs={'class': 'ml-item-content'})
f = []
for i in r:
_url = dom_parser.parse_dom(i, 'a', attrs={'class': 'ml-image'}, req='href')[0].attrs['href']
_title = re.sub('<.+?>|</.+?>', '', dom_parser.parse_dom(i, 'h6')[0].content).strip()
try:
_title = re.search('(.*?)\s(?:staf+el|s)\s*(\d+)', _title, re.I).group(1)
except:
pass
_season = '0'
_year = re.findall('calendar.+?>.+?(\d{4})', ''.join(
[x.content for x in dom_parser.parse_dom(i, 'ul', attrs={'class': 'item-params'})]))
_year = _year[0] if len(_year) > 0 else '0'
if season > 0:
s = dom_parser.parse_dom(i, 'span', attrs={'class': 'season-label'})
s = dom_parser.parse_dom(s, 'span', attrs={'class': 'el-num'})
if s: _season = s[0].content.strip()
if cleantitle.get(_title) in t and _year in y and int(_season) == int(season):
f.append((_url, _year))
r = f
r = sorted(r, key=lambda i: int(i[1]), reverse=True) # with year > no year
r = [i[0] for i in r if r[0]][0]
url = source_utils.strip_domain(r)
if episode:
r = client.request(urlparse.urljoin(self.base_link, url))
r = dom_parser.parse_dom(r, 'div', attrs={'class': 'season-list'})
r = dom_parser.parse_dom(r, 'li')
r = dom_parser.parse_dom(r, 'a', req='href')
r = [(i.attrs['href'], i.content) for i in r]
r = [i[0] for i in r if i[1] and int(i[1]) == int(episode)][0]
url = source_utils.strip_domain(r)
return url
except:
return
|
repotvsupertuga/tvsupertuga.repository
|
script.module.openscrapers/lib/openscrapers/sources_openscrapers/de/tata.py
|
Python
|
gpl-2.0
| 8,804 | 0.005793 |
"""Support for the Hive devices."""
import logging
from pyhiveapi import Pyhiveapi
import voluptuous as vol
from homeassistant.const import (
CONF_PASSWORD, CONF_SCAN_INTERVAL, CONF_USERNAME)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import load_platform
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'hive'
DATA_HIVE = 'data_hive'
DEVICETYPES = {
'binary_sensor': 'device_list_binary_sensor',
'climate': 'device_list_climate',
'water_heater': 'device_list_water_heater',
'light': 'device_list_light',
'switch': 'device_list_plug',
'sensor': 'device_list_sensor',
}
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Optional(CONF_SCAN_INTERVAL, default=2): cv.positive_int,
})
}, extra=vol.ALLOW_EXTRA)
class HiveSession:
"""Initiate Hive Session Class."""
entities = []
core = None
heating = None
hotwater = None
light = None
sensor = None
switch = None
weather = None
attributes = None
def setup(hass, config):
"""Set up the Hive Component."""
session = HiveSession()
session.core = Pyhiveapi()
username = config[DOMAIN][CONF_USERNAME]
password = config[DOMAIN][CONF_PASSWORD]
update_interval = config[DOMAIN][CONF_SCAN_INTERVAL]
devicelist = session.core.initialise_api(
username, password, update_interval)
if devicelist is None:
_LOGGER.error("Hive API initialization failed")
return False
session.sensor = Pyhiveapi.Sensor()
session.heating = Pyhiveapi.Heating()
session.hotwater = Pyhiveapi.Hotwater()
session.light = Pyhiveapi.Light()
session.switch = Pyhiveapi.Switch()
session.weather = Pyhiveapi.Weather()
session.attributes = Pyhiveapi.Attributes()
hass.data[DATA_HIVE] = session
for ha_type, hive_type in DEVICETYPES.items():
for key, devices in devicelist.items():
if key == hive_type:
for hivedevice in devices:
load_platform(hass, ha_type, DOMAIN, hivedevice, config)
return True
|
jabesq/home-assistant
|
homeassistant/components/hive/__init__.py
|
Python
|
apache-2.0
| 2,196 | 0 |
import pickle
from deap import tools
from stats import record
logbook = tools.Logbook()
logbook.record(gen=0, evals=30, **record)
print(logbook)
gen, avg = logbook.select("gen", "avg")
pickle.dump(logbook, open("logbook.pkl", "w"))
# Cleaning the pickle file ...
import os
os.remove("logbook.pkl")
logbook.header = "gen", "avg", "spam"
print(logbook)
print(logbook.stream)
logbook.record(gen=1, evals=15, **record)
print(logbook.stream)
from multistats import record
logbook = tools.Logbook()
logbook.record(gen=0, evals=30, **record)
logbook.header = "gen", "evals", "fitness", "size"
logbook.chapters["fitness"].header = "min", "avg", "max"
logbook.chapters["size"].header = "min", "avg", "max"
print(logbook)
gen = logbook.select("gen")
fit_mins = logbook.chapters["fitness"].select("min")
size_avgs = logbook.chapters["size"].select("avg")
import matplotlib.pyplot as plt
fig, ax1 = plt.subplots()
line1 = ax1.plot(gen, fit_mins, "b-", label="Minimum Fitness")
ax1.set_xlabel("Generation")
ax1.set_ylabel("Fitness", color="b")
for tl in ax1.get_yticklabels():
tl.set_color("b")
ax2 = ax1.twinx()
line2 = ax2.plot(gen, size_avgs, "r-", label="Average Size")
ax2.set_ylabel("Size", color="r")
for tl in ax2.get_yticklabels():
tl.set_color("r")
lns = line1 + line2
labs = [l.get_label() for l in lns]
ax1.legend(lns, labs, loc="center right")
plt.show()
|
marcioweck/PSSLib
|
reference/deap/doc/code/tutorials/part_3/logbook.py
|
Python
|
lgpl-3.0
| 1,381 | 0.003621 |
# Created By: Virgil Dupras
# Created On: 2008-08-12
# Copyright 2015 Hardcoded Software (http://www.hardcoded.net)
#
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.gnu.org/licenses/gpl-3.0.html
from ..testutil import CallLogger, eq_
from ..gui.table import Table, GUITable, Row
class TestRow(Row):
def __init__(self, table, index, is_new=False):
Row.__init__(self, table)
self.is_new = is_new
self._index = index
def load(self):
pass
def save(self):
self.is_new = False
@property
def index(self):
return self._index
class TestGUITable(GUITable):
def __init__(self, rowcount):
GUITable.__init__(self)
self.view = CallLogger()
self.rowcount = rowcount
self.updated_rows = None
def _do_add(self):
return TestRow(self, len(self), is_new=True), len(self)
def _is_edited_new(self):
return self.edited is not None and self.edited.is_new
def _fill(self):
for i in range(self.rowcount):
self.append(TestRow(self, i))
def _update_selection(self):
self.updated_rows = self.selected_rows[:]
def table_with_footer():
table = Table()
table.append(TestRow(table, 0))
footer = TestRow(table, 1)
table.footer = footer
return table, footer
def table_with_header():
table = Table()
table.append(TestRow(table, 1))
header = TestRow(table, 0)
table.header = header
return table, header
#--- Tests
def test_allow_edit_when_attr_is_property_with_fset():
# When a row has a property that has a fset, by default, make that cell editable.
class TestRow(Row):
@property
def foo(self):
pass
@property
def bar(self):
pass
@bar.setter
def bar(self, value):
pass
row = TestRow(Table())
assert row.can_edit_cell('bar')
assert not row.can_edit_cell('foo')
assert not row.can_edit_cell('baz') # doesn't exist, can't edit
def test_can_edit_prop_has_priority_over_fset_checks():
# When a row has a cen_edit_* property, it's the result of that property that is used, not the
# result of a fset check.
class TestRow(Row):
@property
def bar(self):
pass
@bar.setter
def bar(self, value):
pass
can_edit_bar = False
row = TestRow(Table())
assert not row.can_edit_cell('bar')
def test_in():
# When a table is in a list, doing "in list" with another instance returns false, even if
# they're the same as lists.
table = Table()
some_list = [table]
assert Table() not in some_list
def test_footer_del_all():
# Removing all rows doesn't crash when doing the footer check.
table, footer = table_with_footer()
del table[:]
assert table.footer is None
def test_footer_del_row():
# Removing the footer row sets it to None
table, footer = table_with_footer()
del table[-1]
assert table.footer is None
eq_(len(table), 1)
def test_footer_is_appened_to_table():
# A footer is appended at the table's bottom
table, footer = table_with_footer()
eq_(len(table), 2)
assert table[1] is footer
def test_footer_remove():
# remove() on footer sets it to None
table, footer = table_with_footer()
table.remove(footer)
assert table.footer is None
def test_footer_replaces_old_footer():
table, footer = table_with_footer()
other = Row(table)
table.footer = other
assert table.footer is other
eq_(len(table), 2)
assert table[1] is other
def test_footer_rows_and_row_count():
# rows() and row_count() ignore footer.
table, footer = table_with_footer()
eq_(table.row_count, 1)
eq_(table.rows, table[:-1])
def test_footer_setting_to_none_removes_old_one():
table, footer = table_with_footer()
table.footer = None
assert table.footer is None
eq_(len(table), 1)
def test_footer_stays_there_on_append():
# Appending another row puts it above the footer
table, footer = table_with_footer()
table.append(Row(table))
eq_(len(table), 3)
assert table[2] is footer
def test_footer_stays_there_on_insert():
# Inserting another row puts it above the footer
table, footer = table_with_footer()
table.insert(3, Row(table))
eq_(len(table), 3)
assert table[2] is footer
def test_header_del_all():
# Removing all rows doesn't crash when doing the header check.
table, header = table_with_header()
del table[:]
assert table.header is None
def test_header_del_row():
# Removing the header row sets it to None
table, header = table_with_header()
del table[0]
assert table.header is None
eq_(len(table), 1)
def test_header_is_inserted_in_table():
# A header is inserted at the table's top
table, header = table_with_header()
eq_(len(table), 2)
assert table[0] is header
def test_header_remove():
# remove() on header sets it to None
table, header = table_with_header()
table.remove(header)
assert table.header is None
def test_header_replaces_old_header():
table, header = table_with_header()
other = Row(table)
table.header = other
assert table.header is other
eq_(len(table), 2)
assert table[0] is other
def test_header_rows_and_row_count():
# rows() and row_count() ignore header.
table, header = table_with_header()
eq_(table.row_count, 1)
eq_(table.rows, table[1:])
def test_header_setting_to_none_removes_old_one():
table, header = table_with_header()
table.header = None
assert table.header is None
eq_(len(table), 1)
def test_header_stays_there_on_insert():
# Inserting another row at the top puts it below the header
table, header = table_with_header()
table.insert(0, Row(table))
eq_(len(table), 3)
assert table[0] is header
def test_refresh_view_on_refresh():
# If refresh_view is not False, we refresh the table's view on refresh()
table = TestGUITable(1)
table.refresh()
table.view.check_gui_calls(['refresh'])
table.view.clear_calls()
table.refresh(refresh_view=False)
table.view.check_gui_calls([])
def test_restore_selection():
# By default, after a refresh, selection goes on the last row
table = TestGUITable(10)
table.refresh()
eq_(table.selected_indexes, [9])
def test_restore_selection_after_cancel_edits():
# _restore_selection() is called after cancel_edits(). Previously, only _update_selection would
# be called.
class MyTable(TestGUITable):
def _restore_selection(self, previous_selection):
self.selected_indexes = [6]
table = MyTable(10)
table.refresh()
table.add()
table.cancel_edits()
eq_(table.selected_indexes, [6])
def test_restore_selection_with_previous_selection():
# By default, we try to restore the selection that was there before a refresh
table = TestGUITable(10)
table.refresh()
table.selected_indexes = [2, 4]
table.refresh()
eq_(table.selected_indexes, [2, 4])
def test_restore_selection_custom():
# After a _fill() called, the virtual _restore_selection() is called so that it's possible for a
# GUITable subclass to customize its post-refresh selection behavior.
class MyTable(TestGUITable):
def _restore_selection(self, previous_selection):
self.selected_indexes = [6]
table = MyTable(10)
table.refresh()
eq_(table.selected_indexes, [6])
def test_row_cell_value():
# *_cell_value() correctly mangles attrnames that are Python reserved words.
row = Row(Table())
row.from_ = 'foo'
eq_(row.get_cell_value('from'), 'foo')
row.set_cell_value('from', 'bar')
eq_(row.get_cell_value('from'), 'bar')
def test_sort_table_also_tries_attributes_without_underscores():
# When determining a sort key, after having unsuccessfully tried the attribute with the,
# underscore, try the one without one.
table = Table()
row1 = Row(table)
row1._foo = 'a' # underscored attr must be checked first
row1.foo = 'b'
row1.bar = 'c'
row2 = Row(table)
row2._foo = 'b'
row2.foo = 'a'
row2.bar = 'b'
table.append(row1)
table.append(row2)
table.sort_by('foo')
assert table[0] is row1
assert table[1] is row2
table.sort_by('bar')
assert table[0] is row2
assert table[1] is row1
def test_sort_table_updates_selection():
table = TestGUITable(10)
table.refresh()
table.select([2, 4])
table.sort_by('index', desc=True)
# Now, the updated rows should be 7 and 5
eq_(len(table.updated_rows), 2)
r1, r2 = table.updated_rows
eq_(r1.index, 7)
eq_(r2.index, 5)
def test_sort_table_with_footer():
# Sorting a table with a footer keeps it at the bottom
table, footer = table_with_footer()
table.sort_by('index', desc=True)
assert table[-1] is footer
def test_sort_table_with_header():
# Sorting a table with a header keeps it at the top
table, header = table_with_header()
table.sort_by('index', desc=True)
assert table[0] is header
|
stuckj/dupeguru
|
hscommon/tests/table_test.py
|
Python
|
gpl-3.0
| 9,340 | 0.006852 |
from django.conf import settings
from django.conf.urls.defaults import handler500, handler404, patterns, include, \
url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^jsi18n/(?P<packages>\S+?)/$', 'django.views.i18n.javascript_catalog'),
url(r'^media/cms/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.CMS_MEDIA_ROOT, 'show_indexes': True}),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
url(r'^', include('cms.test_utils.project.second_cms_urls_for_apphook_tests')),
)
|
hzlf/openbroadcast
|
website/cms/test_utils/project/second_urls_for_apphook_tests.py
|
Python
|
gpl-3.0
| 696 | 0.005747 |
from nanoplay import PayloadProtocol, ControlProtocol, Player, CustomServer
|
nanonyme/nanoplay
|
nanoplay/__init__.py
|
Python
|
mit
| 76 | 0 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "eksi.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
hanakamer/eskisozluk-clone
|
App/eksi/manage.py
|
Python
|
gpl-2.0
| 247 | 0 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import email
import mimetypes
from email.MIMEMultipart import MIMEMultipart
from email.MIMEText import MIMEText
from email.MIMEImage import MIMEImage
import smtplib
from time import sleep
def sendEmail(authInfo, fromAdd, toAdd, subject, plainText, htmlText):
strFrom = fromAdd
strTo = ', '.join(toAdd)
server = authInfo.get('server')
user = authInfo.get('user')
passwd = authInfo.get('password')
if not (server and user and passwd) :
print 'incomplete login info, exit now'
return
# 设定root信息
msgRoot = MIMEMultipart('related')
msgRoot['Subject'] = subject
msgRoot['From'] = strFrom
msgRoot['To'] = strTo
msgRoot.preamble = 'This is a multi-part message in MIME format.'
# Encapsulate the plain and HTML versions of the message body in an
# 'alternative' part, so message agents can decide which they want to display.
msgAlternative = MIMEMultipart('alternative')
msgRoot.attach(msgAlternative)
#设定纯文本信息
#msgText = MIMEText(plainText, 'plain', 'GB18030')
msgText = MIMEText(plainText, 'plain', 'utf-8')
msgAlternative.attach(msgText)
#设定HTML信息
#msgText = MIMEText(htmlText, 'html', 'GB18030')
msgText = MIMEText(htmlText, 'html', 'utf-8')
msgAlternative.attach(msgText)
#设定内置图片信息
#fp = open('test.jpg', 'rb')
#msgImage = MIMEImage(fp.read())
#fp.close()
#msgImage.add_header('Content-ID', '<image1>')
#msgRoot.attach(msgImage)
#发送邮件
smtp = smtplib.SMTP()
#设定调试级别,依情况而定
# 1-open log-
#smtp.set_debuglevel(1)
# 0-close log
smtp.set_debuglevel(0)
smtp.connect(server)
smtp.login(user, passwd)
smtp.sendmail(strFrom, strTo, msgRoot.as_string())
smtp.quit()
return
def sendmail(_title,_content,_toUserList,_html=None):
#(authInfo, fromAdd, toAdd, subject, plainText, htmlText):
print "start to send mail start"
authInfo = {}
authInfo['server'] = 'smtp.exmail.qq.com'
authInfo['user'] = 'm@localhost.com'
authInfo['password'] = '123'
fromAdd = 'm@localhost.com'
#toAdd = ["root@localhost.com"]
subject = 'search exception category'
if(_title):
subject =_title
plainText = _content
#plainText = '服务器异常状态报警'
htmlText = _html
for t in _toUserList:
#print t
tarr=[]
tarr.append(t)
sendEmail(authInfo, fromAdd, tarr, subject, plainText, htmlText)
sleep(2)
print 'send mail success.'
"""
by zcl at 2016.6.15
"""
def rendar_table(title,notice,rhead_list,rdata_list):
html ="""
<p class="section">{0}</p>
<p class="section">{1}</p>
<table cellpadding="5" cellspacing="0" border="1" bordercolor="#04B4AE" style="text-align: center; font-family: Arial; border-collapse: collapse; width: auto;">
<tbody>
<tr>
<td colspan="{2}"><div>{0}</div></td>
</tr>
<tr>
""".format(title,notice,str(len(rhead_list)))
for rhead in rhead_list:
rhead = rhead.encode('utf8')
tmp = """<th style="background-color: #04B4AE; color: #ffffff">{0}</th>
""".format(str(rhead))
html+=tmp
html+="</tr>"
for o in rdata_list:
line_html=''
line_html+="<tr>"
for key in rhead_list:
val = o[key]
key = key.encode('utf8')
line_html+="<td>"+str(val)+"</td>"
line_html+="</tr>"
html+=line_html
html+="""
</tbody>
</table>
<hr>
"""
return html
if __name__ == '__main__' :
toUserList = ['root@localhost.com']
sendmail('test','sorry to disturb, this mail is just for test',toUserList)
#sendmail('[热门行业统计]'+title,'',toUserList,html.encode('utf8'))
#sendEmail(authInfo, fromAdd, toAdd, subject, plainText, htmlText)
|
zhaochl/python-utils
|
utils/mail_util.py
|
Python
|
apache-2.0
| 3,970 | 0.013465 |
# !usr/bin/env python2
# -*- coding: utf-8 -*-
#
# Licensed under a 3-clause BSD license.
#
# @Author: Brian Cherinka
# @Date: 2017-06-20 16:36:37
# @Last modified by: Brian Cherinka
# @Last Modified time: 2017-11-13 15:16:57
from __future__ import print_function, division, absolute_import
from marvin.utils.general.images import getImagesByList, getImagesByPlate, getRandomImages, getDir3d, showImage
from marvin.tests.conftest import Galaxy, tempafile
from marvin.tests import marvin_test_if
from marvin.core.exceptions import MarvinError, MarvinUserWarning
import pytest
import os
import warnings
try:
from sdss_access import RsyncAccess, AccessError
except ImportError:
Path = None
RsyncAccess = None
imagelist = ['8485-1901', '7443-12701', '7443-1901']
newgals = ['7495-1901']
@pytest.fixture(scope='function')
def rsync(mode):
''' fixture to create generic rsync object '''
rsync = RsyncAccess(label='marvin_getlist', verbose=False)
if mode != 'local':
rsync.remote()
yield rsync
rsync.reset()
rsync = None
localredux = os.getenv('MANGA_SPECTRO_REDUX')
remoteredux = 'https://sdss@dtn01.sdss.org/sas/mangawork/manga/spectro/redux'
remoteurl = 'https://data.sdss.org/sas/mangawork/manga/spectro/redux'
bases = [localredux, remoteredux, remoteurl]
rmodes = ['full', 'url']
@pytest.fixture()
def base(mode, asurl):
if asurl is False:
return localredux
else:
if mode != 'local':
return remoteredux
else:
return remoteurl
@pytest.fixture(scope='session', params=newgals)
def newgalaxy(request, maindb, get_params, set_sasurl):
release, bintype, template = get_params
gal = Galaxy(request.param)
gal.set_params(bintype=bintype, template=template)
gal.set_filepaths()
yield gal
@pytest.fixture()
def get_cube(newgalaxy, rsync):
if not os.path.isfile(newgalaxy.cubepath):
rsync.add('mangacube', **newgalaxy.access_kwargs)
rsync.set_stream()
rsync.commit()
yield newgalaxy
@pytest.fixture(params=rmodes)
def asurl(request):
if request.param == 'full':
return False
elif request.param == 'url':
return True
@pytest.fixture()
def make_paths(request, rsync, mode, asurl, release):
inputs = request.param if hasattr(request, 'param') else None
rmode = 'url' if asurl else 'full'
fullpaths = []
inputs = inputs if inputs else imagelist
for plateifu in inputs:
gal = Galaxy(plateifu)
gal.set_params(release=release)
gal.set_filepaths()
if mode == 'local':
path = rsync.__getattribute__(rmode)('mangaimage', **gal.access_kwargs)
fullpaths.append(path)
else:
rsync.add('mangaimage', **gal.access_kwargs)
rsync.set_stream()
path = rsync.get_urls() if asurl else rsync.get_paths()
fullpaths.extend(path)
return fullpaths
class TestImagesGetDir3d(object):
@pytest.mark.parametrize('expval', [('stack')])
def test_getdir3d(self, galaxy, expval, mode, db):
dir3d = getDir3d(galaxy.plateifu, mode=mode, release=galaxy.release)
assert expval == dir3d
@pytest.mark.parametrize('expval', [('stack')])
def test_getdir3d_plate(self, galaxy, expval, mode, db):
dir3d = getDir3d(galaxy.plate, mode=mode, release=galaxy.release)
assert expval == dir3d
@pytest.mark.xfail()
@pytest.mark.timeout(40)
class TestImagesByList(object):
@pytest.mark.parametrize('imglist, mode, errmsg',
[('7495-1901', 'local', 'Input must be of type list or Numpy array'),
(['nogoodid'], 'local', 'Input must be of type plate-ifu or mangaid'),
(imagelist, 'notvalidmode', 'Mode must be either auto, local, or remote')],
ids=['notlist', 'badid', 'badmode'])
def test_failures(self, imglist, mode, errmsg, release):
with pytest.raises(AssertionError) as cm:
image = getImagesByList(imglist, mode=mode, release=release)
assert cm.type == AssertionError
assert errmsg in str(cm.value)
def test_get_imagelist(self, make_paths, mode, asurl, release):
images = getImagesByList(imagelist, mode=mode, as_url=asurl, release=release)
assert set(make_paths) == set(images)
# @pytest.mark.parametrize('make_paths', [(['7495-1901'])], indirect=True, ids=['newplateifu'])
# def test_download(self, monkeymanga, temp_scratch, get_cube):
# imgpath = tempafile(get_cube.imgpath, temp_scratch)
# #assert os.path.isfile(get_cube.imgpath) is False
# assert imgpath.check(file=0) is True
# image = getImagesByList([get_cube.plateifu], mode='remote', as_url=True, download=True, release=get_cube.release)
# #assert os.path.isfile(get_cube.imgpath) is True
# assert imgpath.check(file=1) is True
# assert image is None
# @pytest.mark.parametrize('make_paths', [(['7495-1901'])], indirect=True, ids=['newplateifu'])
# def test_download_fails(self, monkeymanga, temp_scratch, get_cube):
# imgpath = tempafile(get_cube.imgpath, temp_scratch)
# assert imgpath.check(file=0) is True
# errmsg = 'Download not available when in local mode'
# with warnings.catch_warnings(record=True) as cm:
# warnings.simplefilter('always')
# image = getImagesByList([get_cube.plateifu], mode='local', as_url=True, download=True)
# assert cm[-1].category is MarvinUserWarning
# assert errmsg in str(cm[-1].message)
class TestImagesByPlate(object):
@pytest.mark.parametrize('plateid, mode, errmsg',
[('8485abcd', 'local', 'Plateid must be a numeric integer value'),
(None, 'notvalidmode', 'Mode must be either auto, local, or remote')],
ids=['badid', 'badmode'])
def test_failures(self, galaxy, plateid, mode, errmsg):
plateid = plateid if plateid else galaxy.plate
with pytest.raises(AssertionError) as cm:
image = getImagesByPlate(plateid, mode=mode, release=galaxy.release)
assert cm.type == AssertionError
assert errmsg in str(cm.value)
@pytest.mark.parametrize('make_paths, plate', [(['8485-1901'], '8485')], indirect=['make_paths'], ids=['plateifu'])
def test_get_imageplate(self, make_paths, plate, mode, asurl, release):
images = getImagesByPlate(plate, mode=mode, as_url=asurl, release=release)
assert make_paths[0] in images
# @pytest.mark.parametrize('make_paths', [(['7495-1901'])], indirect=True, ids=['newplateifu'])
# def test_download(self, monkeymanga, temp_scratch, get_cube):
# imgpath = tempafile(get_cube.imgpath, temp_scratch)
# assert imgpath.check(file=0) is True
# image = getImagesByPlate(get_cube.plate, mode='remote', as_url=True, download=True)
# assert imgpath.check(file=1) is True
# assert image is None
# def test_get_images_download_local_fail(self, monkeymanga, temp_scratch, get_cube):
# imgpath = tempafile(get_cube.imgpath, temp_scratch)
# assert imgpath.check(file=0) is True
# errmsg = 'Download not available when in local mode'
# with warnings.catch_warnings(record=True) as cm:
# warnings.simplefilter("always")
# image = getImagesByPlate(self.new_plate, mode='local', as_url=True, download=True)
# self.assertIs(cm[-1].category, MarvinUserWarning)
# self.assertIn(errmsg, str(cm[-1].message))
class TestRandomImages(object):
@pytest.mark.parametrize('mode, errmsg',
[('notvalidmode', 'Mode must be either auto, local, or remote')],
ids=['badmode'])
def test_failures(self, mode, errmsg, release):
with pytest.raises(AssertionError) as cm:
image = getRandomImages(mode=mode, release=release)
assert cm.type == AssertionError
assert errmsg in str(cm.value)
@pytest.mark.parametrize('num', [(10), (5)], ids=['num10', 'num5'])
def test_get_image_random(self, base, num, mode, asurl, release):
images = getRandomImages(num=num, mode=mode, as_url=asurl, release=release)
assert images is not None
assert num == len(images)
assert isinstance(images, list) is True
assert base in images[0]
class TestShowImage(object):
def _assert_image(self, galaxy, image):
assert image is not None
assert image.size == (562, 562)
assert image.format == 'PNG'
assert str(galaxy.plate) in image.filename
assert galaxy.ifu in image.filename
@pytest.mark.parametrize('return_image', [(True), (False)], ids=['returnyes', 'returnno'])
def test_show_image(self, galaxy, mode, return_image):
image = showImage(plateifu=galaxy.plateifu, mode=mode, release=galaxy.release, return_image=return_image, show_image=False)
if return_image:
self._assert_image(galaxy, image)
else:
assert image is None
return image
@pytest.mark.parametrize('param, error, errmsg',
[({'mode': 'notvalidmode'}, AssertionError, 'Mode must be either auto, local, or remote'),
({}, AssertionError, 'A filepath or plateifu must be specified!'),
({'plateifu': '8485-1905'}, MarvinError, 'Error: remote filepath'),
({'path': '/tmp/image.png'}, MarvinError, 'Error: local filepath /tmp/image.png does not exist.'),
({'path': ['/tmp/image.png', '/tmp/image1.png']}, MarvinError, 'showImage currently only works on a single input at a time')],
ids=['badmode', 'noinput', 'badplateifu', 'badfile', 'toomany'])
def test_failures(self, param, error, errmsg, release):
if 'mode' not in param:
param.update({'mode': 'auto'})
with pytest.raises(error) as cm:
image = showImage(release=release, **param)
assert cm.type == error
assert errmsg in str(cm.value)
def test_withpaths(self, galaxy, mode):
if mode != 'local':
galaxy.set_params()
galaxy.set_filepaths(pathtype='url')
image = showImage(path=galaxy.imgpath, mode=mode, return_image=True, show_image=False)
self._assert_image(galaxy, image)
@pytest.mark.parametrize('base, mode, errmsg',
[(localredux, 'remote', 'Local path not allowed in remote mode'),
(remoteurl, 'local', 'Remote url path not allowed in local mode')],
ids=['remoteuserdir', 'localhttp'])
def test_path_fails_wrongmodes(self, base, galaxy, mode, errmsg):
path = os.path.join(base, galaxy.get_location(galaxy.imgpath))
with pytest.raises(MarvinError) as cm:
image = showImage(release=galaxy.release, path=path, mode=mode)
assert cm.type == MarvinError
assert errmsg in str(cm.value)
|
albireox/marvin
|
python/marvin/tests/utils/test_images.py
|
Python
|
bsd-3-clause
| 11,203 | 0.002856 |
# To change this license header, choose License Headers in Project Properties.
# To change this template file, choose Tools | Templates
# and open the template in the editor.
|
tzuria/Shift-It-Easy
|
webApp/shift-it-easy-2015/web/pages/__init__.py
|
Python
|
mit
| 177 | 0.00565 |
#!/usr/bin/python
# Copyright (c) 2016 Thomas Stringer, <tomstr@microsoft.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_loadbalancer
version_added: "2.4"
short_description: Manage Azure load balancers.
description:
- Create, update and delete Azure load balancers
options:
resource_group:
description:
- Name of a resource group where the load balancer exists or will be created.
required: true
name:
description:
- Name of the load balancer.
required: true
state:
description:
- Assert the state of the load balancer. Use C(present) to create/update a load balancer, or
C(absent) to delete one.
default: present
choices:
- absent
- present
location:
description:
- Valid azure location. Defaults to location of the resource group.
sku:
description:
The load balancer SKU.
choices:
- Basic
- Standard
version_added: 2.6
frontend_ip_configurations:
description: List of frontend IPs to be used
suboptions:
name:
description: Name of the frontend ip configuration.
required: True
public_ip_address:
description: Name of an existing public IP address object in the current resource group to associate with the security group.
private_ip_address:
description: The reference of the Public IP resource.
version_added: 2.6
private_ip_allocation_method:
description: The Private IP allocation method.
choices:
- Static
- Dynamic
version_added: 2.6
subnet:
description:
- The reference of the subnet resource.
- Should be an existing subnet's resource id.
version_added: 2.6
version_added: 2.5
backend_address_pools:
description: List of backend address pools
suboptions:
name:
description: Name of the backend address pool.
required: True
version_added: 2.5
probes:
description: List of probe definitions used to check endpoint health.
suboptions:
name:
description: Name of the probe.
required: True
port:
description: Probe port for communicating the probe. Possible values range from 1 to 65535, inclusive.
required: True
protocol:
description:
- The protocol of the end point to be probed.
- If 'Tcp' is specified, a received ACK is required for the probe to be successful.
- If 'Http' is specified, a 200 OK response from the specified URL is required for the probe to be successful.
choices:
- Tcp
- Http
default: Tcp
interval:
description:
- The interval, in seconds, for how frequently to probe the endpoint for health status.
- Slightly less than half the allocated timeout period, which allows two full probes before taking the instance out of rotation.
- The default value is 15, the minimum value is 5.
default: 15
fail_count:
description:
- The number of probes where if no response, will result in stopping further traffic from being delivered to the endpoint.
- This values allows endpoints to be taken out of rotation faster or slower than the typical times used in Azure.
default: 3
aliases:
- number_of_probes
request_path:
description:
- The URI used for requesting health status from the VM.
- Path is required if a protocol is set to http. Otherwise, it is not allowed.
version_added: 2.5
inbound_nat_pools:
description:
- Defines an external port range for inbound NAT to a single backend port on NICs associated with a load balancer.
- Inbound NAT rules are created automatically for each NIC associated with the Load Balancer using an external port from this range.
- Defining an Inbound NAT pool on your Load Balancer is mutually exclusive with defining inbound Nat rules.
- Inbound NAT pools are referenced from virtual machine scale sets.
- NICs that are associated with individual virtual machines cannot reference an inbound NAT pool.
- They have to reference individual inbound NAT rules.
suboptions:
name:
description: Name of the inbound NAT pool.
required: True
frontend_ip_configuration_name:
description: A reference to frontend IP addresses.
required: True
protocol:
description: IP protocol for the NAT pool
choices:
- Tcp
- Udp
- All
default: Tcp
frontend_port_range_start:
description:
- The first port in the range of external ports that will be used to provide inbound NAT to NICs associated with the load balancer.
- Acceptable values range between 1 and 65534.
required: True
frontend_port_range_end:
description:
- The last port in the range of external ports that will be used to provide inbound NAT to NICs associated with the load balancer.
- Acceptable values range between 1 and 65535.
required: True
backend_port:
description:
- The port used for internal connections on the endpoint.
- Acceptable values are between 1 and 65535.
version_added: 2.5
load_balancing_rules:
description:
- Object collection representing the load balancing rules Gets the provisioning.
suboptions:
name:
description: name of the load balancing rule.
required: True
frontend_ip_configuration:
description: A reference to frontend IP addresses.
required: True
backend_address_pool:
description: A reference to a pool of DIPs. Inbound traffic is randomly load balanced across IPs in the backend IPs.
required: True
probe:
description: The name of the load balancer probe this rule should use for health checks.
required: True
protocol:
description: IP protocol for the load balancing rule.
choices:
- Tcp
- Udp
- All
default: Tcp
load_distribution:
description:
- The session persistence policy for this rule; C(Default) is no persistence.
choices:
- Default
- SourceIP
- SourceIPProtocol
default: Default
frontend_port:
description:
- The port for the external endpoint.
- Frontend port numbers must be unique across all rules within the load balancer.
- Acceptable values are between 0 and 65534.
- Note that value 0 enables "Any Port"
backend_port:
description:
- The port used for internal connections on the endpoint.
- Acceptable values are between 0 and 65535.
- Note that value 0 enables "Any Port"
idle_timeout:
description:
- The timeout for the TCP idle connection.
- The value can be set between 4 and 30 minutes.
- The default value is 4 minutes.
- This element is only used when the protocol is set to TCP.
enable_floating_ip:
description:
- Configures SNAT for the VMs in the backend pool to use the publicIP address specified in the frontend of the load balancing rule.
version_added: 2.5
public_ip_address_name:
description:
- (deprecated) Name of an existing public IP address object to associate with the security group.
- This option has been deprecated, and will be removed in 2.9. Use I(frontend_ip_configurations) instead.
aliases:
- public_ip_address
- public_ip_name
- public_ip
required: false
probe_port:
description:
- (deprecated) The port that the health probe will use.
- This option has been deprecated, and will be removed in 2.9. Use I(probes) instead.
required: false
probe_protocol:
description:
- (deprecated) The protocol to use for the health probe.
- This option has been deprecated, and will be removed in 2.9. Use I(probes) instead.
required: false
choices:
- Tcp
- Http
probe_interval:
description:
- (deprecated) Time (in seconds) between endpoint health probes.
- This option has been deprecated, and will be removed in 2.9. Use I(probes) instead.
default: 15
required: false
probe_fail_count:
description:
- (deprecated) The amount of probe failures for the load balancer to make a health determination.
- This option has been deprecated, and will be removed in 2.9. Use I(probes) instead.
default: 3
required: false
probe_request_path:
description:
- (deprecated) The URL that an HTTP probe will use (only relevant if probe_protocol is set to Http).
- This option has been deprecated, and will be removed in 2.9. Use I(probes) instead.
required: false
protocol:
description:
- (deprecated) The protocol (TCP or UDP) that the load balancer will use.
- This option has been deprecated, and will be removed in 2.9. Use I(load_balancing_rules) instead.
required: false
choices:
- Tcp
- Udp
load_distribution:
description:
- (deprecated) The type of load distribution that the load balancer will employ.
- This option has been deprecated, and will be removed in 2.9. Use I(load_balancing_rules) instead.
required: false
choices:
- Default
- SourceIP
- SourceIPProtocol
frontend_port:
description:
- (deprecated) Frontend port that will be exposed for the load balancer.
- This option has been deprecated, and will be removed in 2.9. Use I(load_balancing_rules) instead.
required: false
backend_port:
description:
- (deprecated) Backend port that will be exposed for the load balancer.
- This option has been deprecated, and will be removed in 2.9. Use I(load_balancing_rules) instead.
required: false
idle_timeout:
description:
- (deprecated) Timeout for TCP idle connection in minutes.
- This option has been deprecated, and will be removed in 2.9. Use I(load_balancing_rules) instead.
default: 4
required: false
natpool_frontend_port_start:
description:
- (deprecated) Start of the port range for a NAT pool.
- This option has been deprecated, and will be removed in 2.9. Use I(inbound_nat_pools) instead.
required: false
natpool_frontend_port_end:
description:
- (deprecated) End of the port range for a NAT pool.
- This option has been deprecated, and will be removed in 2.9. Use I(inbound_nat_pools) instead.
required: false
natpool_backend_port:
description:
- (deprecated) Backend port used by the NAT pool.
- This option has been deprecated, and will be removed in 2.9. Use I(inbound_nat_pools) instead.
required: false
natpool_protocol:
description:
- (deprecated) The protocol for the NAT pool.
- This option has been deprecated, and will be removed in 2.9. Use I(inbound_nat_pools) instead.
required: false
extends_documentation_fragment:
- azure
- azure_tags
author:
- "Thomas Stringer (@tstringer)"
- "Yuwei Zhou (@yuwzho)"
'''
EXAMPLES = '''
- name: create load balancer
azure_rm_loadbalancer:
resource_group: testrg
name: testloadbalancer1
frontend_ip_configurations:
- name: frontendipconf0
public_ip_address: testpip
backend_address_pools:
- name: backendaddrpool0
probes:
- name: prob0
port: 80
inbound_nat_pools:
- name: inboundnatpool0
frontend_ip_configuration_name: frontendipconf0
protocol: Tcp
frontend_port_range_start: 80
frontend_port_range_end: 81
backend_port: 8080
load_balancing_rules:
- name: lbrbalancingrule0
frontend_ip_configuration: frontendipconf0
backend_address_pool: backendaddrpool0
frontend_port: 80
backend_port: 80
probe: prob0
'''
RETURN = '''
state:
description: Current state of the load balancer
returned: always
type: dict
changed:
description: Whether or not the resource has changed
returned: always
type: bool
'''
import random
from ansible.module_utils.azure_rm_common import AzureRMModuleBase, format_resource_id
try:
from msrestazure.tools import parse_resource_id
from msrestazure.azure_exceptions import CloudError
except ImportError:
# This is handled in azure_rm_common
pass
frontend_ip_configuration_spec = dict(
name=dict(
type='str',
required=True
),
public_ip_address=dict(
type='str'
),
private_ip_address=dict(
type='str'
),
private_ip_allocation_method=dict(
type='str'
),
subnet=dict(
type='str'
)
)
backend_address_pool_spec = dict(
name=dict(
type='str',
required=True
)
)
probes_spec = dict(
name=dict(
type='str',
required=True
),
port=dict(
type='int',
required=True
),
protocol=dict(
type='str',
choices=['Tcp', 'Http'],
default='Tcp'
),
interval=dict(
type='int',
default=15
),
fail_count=dict(
type='int',
default=3,
aliases=['number_of_probes']
),
request_path=dict(
type='str'
)
)
inbound_nat_pool_spec = dict(
name=dict(
type='str',
required=True
),
frontend_ip_configuration_name=dict(
type='str',
required=True
),
protocol=dict(
type='str',
choices=['Tcp', 'Udp', 'All'],
default='Tcp'
),
frontend_port_range_start=dict(
type='int',
required=True
),
frontend_port_range_end=dict(
type='int',
required=True
),
backend_port=dict(
type='int',
required=True
)
)
load_balancing_rule_spec = dict(
name=dict(
type='str',
required=True
),
frontend_ip_configuration=dict(
type='str',
required=True
),
backend_address_pool=dict(
type='str',
required=True
),
probe=dict(
type='str',
required=True
),
protocol=dict(
type='str',
choices=['Tcp', 'Udp', 'All'],
default='Tcp'
),
load_distribution=dict(
type='str',
choices=['Default', 'SourceIP', 'SourceIPProtocol'],
default='Default'
),
frontend_port=dict(
type='int',
required=True
),
backend_port=dict(
type='int'
),
idle_timeout=dict(
type='int',
default=4
),
enable_floating_ip=dict(
type='bool'
)
)
class AzureRMLoadBalancer(AzureRMModuleBase):
"""Configuration class for an Azure RM load balancer resource"""
def __init__(self):
self.module_args = dict(
resource_group=dict(
type='str',
required=True
),
name=dict(
type='str',
required=True
),
state=dict(
type='str',
default='present',
choices=['present', 'absent']
),
location=dict(
type='str'
),
sku=dict(
type='str',
choices=['Basic', 'Standard']
),
frontend_ip_configurations=dict(
type='list',
elements='dict',
options=frontend_ip_configuration_spec
),
backend_address_pools=dict(
type='list',
elements='dict',
options=backend_address_pool_spec
),
probes=dict(
type='list',
elements='dict',
options=probes_spec
),
inbound_nat_pools=dict(
type='list',
elements='dict',
options=inbound_nat_pool_spec
),
load_balancing_rules=dict(
type='list',
elements='dict',
options=load_balancing_rule_spec
),
public_ip_address_name=dict(
type='str',
aliases=['public_ip_address', 'public_ip_name', 'public_ip']
),
probe_port=dict(
type='int'
),
probe_protocol=dict(
type='str',
choices=['Tcp', 'Http']
),
probe_interval=dict(
type='int',
default=15
),
probe_fail_count=dict(
type='int',
default=3
),
probe_request_path=dict(
type='str'
),
protocol=dict(
type='str',
choices=['Tcp', 'Udp']
),
load_distribution=dict(
type='str',
choices=['Default', 'SourceIP', 'SourceIPProtocol']
),
frontend_port=dict(
type='int'
),
backend_port=dict(
type='int'
),
idle_timeout=dict(
type='int',
default=4
),
natpool_frontend_port_start=dict(
type='int'
),
natpool_frontend_port_end=dict(
type='int'
),
natpool_backend_port=dict(
type='int'
),
natpool_protocol=dict(
type='str'
)
)
self.resource_group = None
self.name = None
self.location = None
self.sku = None
self.frontend_ip_configurations = None
self.backend_address_pools = None
self.probes = None
self.inbound_nat_pools = None
self.load_balancing_rules = None
self.public_ip_address_name = None
self.state = None
self.probe_port = None
self.probe_protocol = None
self.probe_interval = None
self.probe_fail_count = None
self.probe_request_path = None
self.protocol = None
self.load_distribution = None
self.frontend_port = None
self.backend_port = None
self.idle_timeout = None
self.natpool_frontend_port_start = None
self.natpool_frontend_port_end = None
self.natpool_backend_port = None
self.natpool_protocol = None
self.tags = None
self.results = dict(changed=False, state=dict())
super(AzureRMLoadBalancer, self).__init__(
derived_arg_spec=self.module_args,
supports_check_mode=True
)
def exec_module(self, **kwargs):
"""Main module execution method"""
for key in list(self.module_args.keys()) + ['tags']:
setattr(self, key, kwargs[key])
changed = False
resource_group = self.get_resource_group(self.resource_group)
if not self.location:
self.location = resource_group.location
load_balancer = self.get_load_balancer()
if self.state == 'present':
# compatible parameters
if not self.frontend_ip_configurations and not self.backend_address_pools and not self.probes and not self.inbound_nat_pools:
self.deprecate('Discrete load balancer config settings are deprecated and will be removed.'
' Use frontend_ip_configurations, backend_address_pools, probes, inbound_nat_pools lists instead.', version='2.9')
frontend_ip_name = 'frontendip0'
backend_address_pool_name = 'backendaddrp0'
prob_name = 'prob0'
inbound_nat_pool_name = 'inboundnatp0'
lb_rule_name = 'lbr'
self.frontend_ip_configurations = [dict(
name=frontend_ip_name,
public_ip_address=self.public_ip_address_name
)]
self.backend_address_pools = [dict(
name=backend_address_pool_name
)]
self.probes = [dict(
name=prob_name,
port=self.probe_port,
protocol=self.probe_protocol,
interval=self.probe_interval,
fail_count=self.probe_fail_count,
request_path=self.probe_request_path
)] if self.probe_protocol else None
self.inbound_nat_pools = [dict(
name=inbound_nat_pool_name,
frontend_ip_configuration_name=frontend_ip_name,
protocol=self.natpool_protocol,
frontend_port_range_start=self.natpool_frontend_port_start,
frontend_port_range_end=self.natpool_frontend_port_end,
backend_port=self.natpool_backend_port
)] if self.natpool_protocol else None
self.load_balancing_rules = [dict(
name=lb_rule_name,
frontend_ip_configuration=frontend_ip_name,
backend_address_pool=backend_address_pool_name,
probe=prob_name,
protocol=self.protocol,
load_distribution=self.load_distribution,
frontend_port=self.frontend_port,
backend_port=self.backend_port,
idle_timeout=self.idle_timeout,
enable_floating_ip=False
)] if self.protocol else None
if load_balancer:
# check update, NIE
changed = False
else:
changed = True
elif self.state == 'absent' and load_balancer:
changed = True
self.results['state'] = load_balancer_to_dict(load_balancer)
if 'tags' in self.results['state']:
update_tags, self.results['state']['tags'] = self.update_tags(self.results['state']['tags'])
if update_tags:
changed = True
else:
if self.tags:
changed = True
self.results['changed'] = changed
if self.state == 'present' and changed:
# create or update
frontend_ip_configurations_param = [self.network_models.FrontendIPConfiguration(
name=item.get('name'),
public_ip_address=self.get_public_ip_address_instance(item.get('public_ip_address')) if item.get('public_ip_address') else None,
private_ip_address=item.get('private_ip_address'),
private_ip_allocation_method=item.get('private_ip_allocation_method'),
subnet=self.network_models.Subnet(id=item.get('subnet')) if item.get('subnet') else None
) for item in self.frontend_ip_configurations] if self.frontend_ip_configurations else None
backend_address_pools_param = [self.network_models.BackendAddressPool(
name=item.get('name')
) for item in self.backend_address_pools] if self.backend_address_pools else None
probes_param = [self.network_models.Probe(
name=item.get('name'),
port=item.get('port'),
protocol=item.get('protocol'),
interval_in_seconds=item.get('interval'),
request_path=item.get('request_path'),
number_of_probes=item.get('fail_count')
) for item in self.probes] if self.probes else None
inbound_nat_pools_param = [self.network_models.InboundNatPool(
name=item.get('name'),
frontend_ip_configuration=self.network_models.SubResource(
frontend_ip_configuration_id(
self.subscription_id,
self.resource_group,
self.name,
item.get('frontend_ip_configuration_name'))),
protocol=item.get('protocol'),
frontend_port_range_start=item.get('frontend_port_range_start'),
frontend_port_range_end=item.get('frontend_port_range_end'),
backend_port=item.get('backend_port')
) for item in self.inbound_nat_pools] if self.inbound_nat_pools else None
load_balancing_rules_param = [self.network_models.LoadBalancingRule(
name=item.get('name'),
frontend_ip_configuration=self.network_models.SubResource(
frontend_ip_configuration_id(
self.subscription_id,
self.resource_group,
self.name,
item.get('frontend_ip_configuration')
)
),
backend_address_pool=self.network_models.SubResource(
backend_address_pool_id(
self.subscription_id,
self.resource_group,
self.name,
item.get('backend_address_pool')
)
),
probe=self.network_models.SubResource(
probe_id(
self.subscription_id,
self.resource_group,
self.name,
item.get('probe')
)
),
protocol=item.get('protocol'),
load_distribution=item.get('load_distribution'),
frontend_port=item.get('frontend_port'),
backend_port=item.get('backend_port'),
idle_timeout_in_minutes=item.get('idle_timeout'),
enable_floating_ip=item.get('enable_floating_ip')
) for item in self.load_balancing_rules] if self.load_balancing_rules else None
param = self.network_models.LoadBalancer(
sku=self.network_models.LoadBalancerSku(self.sku) if self.sku else None,
location=self.location,
tags=self.tags,
frontend_ip_configurations=frontend_ip_configurations_param,
backend_address_pools=backend_address_pools_param,
probes=probes_param,
inbound_nat_pools=inbound_nat_pools_param,
load_balancing_rules=load_balancing_rules_param
)
self.results['state'] = self.create_or_update_load_balancer(param)
elif self.state == 'absent' and changed:
self.delete_load_balancer()
self.results['state'] = None
return self.results
def get_public_ip_address_instance(self, id):
"""Get a reference to the public ip address resource"""
self.log('Fetching public ip address {}'.format(id))
resource_id = format_resource_id(id, self.subscription_id, 'Microsoft.Network', 'publicIPAddresses', self.resource_group)
return self.network_models.PublicIPAddress(id=resource_id)
def get_load_balancer(self):
"""Get a load balancer"""
self.log('Fetching loadbalancer {0}'.format(self.name))
try:
return self.network_client.load_balancers.get(self.resource_group, self.name)
except CloudError:
return None
def delete_load_balancer(self):
"""Delete a load balancer"""
self.log('Deleting loadbalancer {0}'.format(self.name))
try:
poller = self.network_client.load_balancers.delete(self.resource_group, self.name)
return self.get_poller_result(poller)
except CloudError as exc:
self.fail("Error deleting loadbalancer {0} - {1}".format(self.name, str(exc)))
def create_or_update_load_balancer(self, param):
try:
poller = self.network_client.load_balancers.create_or_update(self.resource_group, self.name, param)
new_lb = self.get_poller_result(poller)
return load_balancer_to_dict(new_lb)
except CloudError as exc:
self.fail("Error creating or updating load balancer {0} - {1}".format(self.name, str(exc)))
def load_balancer_to_dict(load_balancer):
"""Seralialize a LoadBalancer object to a dict"""
if not load_balancer:
return dict()
result = dict(
id=load_balancer.id,
name=load_balancer.name,
location=load_balancer.location,
sku=load_balancer.sku.name,
tags=load_balancer.tags,
provisioning_state=load_balancer.provisioning_state,
etag=load_balancer.etag,
frontend_ip_configurations=[],
backend_address_pools=[],
load_balancing_rules=[],
probes=[],
inbound_nat_rules=[],
inbound_nat_pools=[],
outbound_nat_rules=[]
)
if load_balancer.frontend_ip_configurations:
result['frontend_ip_configurations'] = [dict(
id=_.id,
name=_.name,
etag=_.etag,
provisioning_state=_.provisioning_state,
private_ip_address=_.private_ip_address,
private_ip_allocation_method=_.private_ip_allocation_method,
subnet=dict(
id=_.subnet.id,
name=_.subnet.name,
address_prefix=_.subnet.address_prefix
) if _.subnet else None,
public_ip_address=dict(
id=_.public_ip_address.id,
location=_.public_ip_address.location,
public_ip_allocation_method=_.public_ip_address.public_ip_allocation_method,
ip_address=_.public_ip_address.ip_address
) if _.public_ip_address else None
) for _ in load_balancer.frontend_ip_configurations]
if load_balancer.backend_address_pools:
result['backend_address_pools'] = [dict(
id=_.id,
name=_.name,
provisioning_state=_.provisioning_state,
etag=_.etag
) for _ in load_balancer.backend_address_pools]
if load_balancer.load_balancing_rules:
result['load_balancing_rules'] = [dict(
id=_.id,
name=_.name,
protocol=_.protocol,
frontend_ip_configuration_id=_.frontend_ip_configuration.id,
backend_address_pool_id=_.backend_address_pool.id,
probe_id=_.probe.id,
load_distribution=_.load_distribution,
frontend_port=_.frontend_port,
backend_port=_.backend_port,
idle_timeout_in_minutes=_.idle_timeout_in_minutes,
enable_floating_ip=_.enable_floating_ip,
provisioning_state=_.provisioning_state,
etag=_.etag
) for _ in load_balancer.load_balancing_rules]
if load_balancer.probes:
result['probes'] = [dict(
id=_.id,
name=_.name,
protocol=_.protocol,
port=_.port,
interval_in_seconds=_.interval_in_seconds,
number_of_probes=_.number_of_probes,
request_path=_.request_path,
provisioning_state=_.provisioning_state
) for _ in load_balancer.probes]
if load_balancer.inbound_nat_rules:
result['inbound_nat_rules'] = [dict(
id=_.id,
name=_.name,
frontend_ip_configuration_id=_.frontend_ip_configuration.id,
protocol=_.protocol,
frontend_port=_.frontend_port,
backend_port=_.backend_port,
idle_timeout_in_minutes=_.idle_timeout_in_minutes,
enable_floating_point_ip=_.enable_floating_point_ip if hasattr(_, 'enable_floating_point_ip') else False,
provisioning_state=_.provisioning_state,
etag=_.etag
) for _ in load_balancer.inbound_nat_rules]
if load_balancer.inbound_nat_pools:
result['inbound_nat_pools'] = [dict(
id=_.id,
name=_.name,
frontend_ip_configuration_id=_.frontend_ip_configuration.id,
protocol=_.protocol,
frontend_port_range_start=_.frontend_port_range_start,
frontend_port_range_end=_.frontend_port_range_end,
backend_port=_.backend_port,
provisioning_state=_.provisioning_state,
etag=_.etag
) for _ in load_balancer.inbound_nat_pools]
if load_balancer.outbound_nat_rules:
result['outbound_nat_rules'] = [dict(
id=_.id,
name=_.name,
allocated_outbound_ports=_.allocated_outbound_ports,
frontend_ip_configuration_id=_.frontend_ip_configuration.id,
backend_address_pool=_.backend_address_pool.id,
provisioning_state=_.provisioning_state,
etag=_.etag
) for _ in load_balancer.outbound_nat_rules]
return result
def frontend_ip_configuration_id(subscription_id, resource_group_name, load_balancer_name, name):
"""Generate the id for a frontend ip configuration"""
return '/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Network/loadBalancers/{}/frontendIPConfigurations/{}'.format(
subscription_id,
resource_group_name,
load_balancer_name,
name
)
def backend_address_pool_id(subscription_id, resource_group_name, load_balancer_name, name):
"""Generate the id for a backend address pool"""
return '/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Network/loadBalancers/{}/backendAddressPools/{}'.format(
subscription_id,
resource_group_name,
load_balancer_name,
name
)
def probe_id(subscription_id, resource_group_name, load_balancer_name, name):
"""Generate the id for a probe"""
return '/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Network/loadBalancers/{}/probes/{}'.format(
subscription_id,
resource_group_name,
load_balancer_name,
name
)
def main():
"""Main execution"""
AzureRMLoadBalancer()
if __name__ == '__main__':
main()
|
caphrim007/ansible
|
lib/ansible/modules/cloud/azure/azure_rm_loadbalancer.py
|
Python
|
gpl-3.0
| 36,330 | 0.002175 |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Helper class for creating an InstanceGroup object.
"""
from vm_network_migration.modules.instance_group_modules.regional_managed_instance_group import RegionalManagedInstanceGroup
from vm_network_migration.modules.instance_group_modules.unmanaged_instance_group import UnmanagedInstanceGroup
from vm_network_migration.modules.instance_group_modules.zonal_managed_instance_group import ZonalManagedInstanceGroup
from vm_network_migration.modules.instance_group_modules.instance_group import InstanceGroup
from vm_network_migration.utils import initializer
class InstanceGroupHelper:
@initializer
def __init__(self, compute, project, instance_group_name,
region, zone, network, subnetwork, preserve_instance_ip=False):
""" Initialize an instance group helper object
Args:
compute: google compute engine
project: project ID
instance_group_name: name of the instance group
region: region of the instance group
zone: zone of the instance group
preserve_instance_ip: only valid for an unmanaged instance group
"""
def build_instance_group(self) -> InstanceGroup:
""" Build an object which is an instance of the InstanceGroup's subclass
"""
# try to build a zonal instance group
try:
instance_group_configs = self.get_instance_group_in_zone()
except Exception:
# It is not a single zone instance group
pass
else:
if 'Instance Group Manager' not in instance_group_configs[
'description']:
return UnmanagedInstanceGroup(self.compute, self.project,
self.instance_group_name,
self.network,
self.subnetwork,
self.preserve_instance_ip,
self.zone)
else:
return ZonalManagedInstanceGroup(self.compute,
self.project,
self.instance_group_name,
self.network,
self.subnetwork,
self.preserve_instance_ip,
self.zone)
# try to build a regional instance group
try:
self.get_instance_group_in_region()
except Exception as e:
raise e
else:
return RegionalManagedInstanceGroup(self.compute, self.project,
self.instance_group_name,
self.network,
self.subnetwork,
self.preserve_instance_ip,
self.region)
def get_instance_group_in_zone(self) -> dict:
""" Get a zonal instance group's configurations
Returns: instance group's configurations
"""
return self.compute.instanceGroups().get(
project=self.project,
zone=self.zone,
instanceGroup=self.instance_group_name).execute()
def get_instance_group_in_region(self) -> dict:
""" Get a regional instance group's configurations
Returns: instance group's configurations
"""
return self.compute.regionInstanceGroups().get(
project=self.project,
region=self.region,
instanceGroup=self.instance_group_name).execute()
|
googleinterns/vm-network-migration
|
vm_network_migration/module_helpers/instance_group_helper.py
|
Python
|
apache-2.0
| 4,376 | 0.0016 |
# The Nexus software is licensed under the BSD 2-Clause license.
#
# You should have recieved a copy of this license with the software.
# If you did not, you can find one at the following link.
#
# http://opensource.org/licenses/bsd-license.php
from core.plugins import ProtocolPlugin
from core.decorators import *
from core.constants import *
class FetchPlugin(ProtocolPlugin):
commands = {
"respawn": "commandRespawn",
}
@player_list
@mod_only
@only_username_command
def commandRespawn(self, username, fromloc, rankoverride):
"/respawn username - Mod\nRespawns the user."
if username in self.client.factory.usernames:
self.client.factory.usernames[username].respawn()
else:
self.client.sendServerMessage("%s is not on the server." % username)
return
self.client.factory.usernames[username].sendServerMessage("You have been respawned by %s." % self.client.username)
self.client.sendServerMessage("%s respawned." % username)
|
TheArchives/Nexus
|
core/plugins/respawn.py
|
Python
|
bsd-2-clause
| 1,072 | 0.012127 |
from .default import default
import os
import re
class image_png(default):
def __init__(self, key, stat):
default.__init__(self, key, stat)
self.data = {}
def compile(self, prop):
if not os.path.exists(prop['value']):
print("Image '{}' not found.".format(prop['value']))
else:
# Convert SVG to PNG
m = re.search("\.svg$", prop['value'])
if m:
from wand.image import Image
from wand.color import Color
from wand.api import library
dest = self.stat['icons_dir'] + "/" + prop['value'].replace('/', '_') + ".png"
print("svg icon detected. converting '{0}' to '{1}'".format(prop['value'], dest))
with Image() as img:
with Color('transparent') as bg_color:
library.MagickSetBackgroundColor(img.wand, bg_color.resource)
img.read(blob=open(prop['value'], 'rb').read())
dest_img = img.make_blob('png32')
with open(dest, 'wb') as out:
out.write(dest_img)
return repr(dest)
return repr(prop['value'])
def stat_value(self, prop):
if prop['value'] is None:
return prop['value']
if os.path.exists(prop['value']):
from wand.image import Image
img = Image(filename=prop['value'])
self.data[prop['value']] = img.size
if not prop['key'] in self.stat['global_data']:
self.stat['global_data'][prop['key']] = {}
self.stat['global_data'][prop['key']][prop['value']] = img.size
return prop['value']
def get_global_data(self):
self.stat.property_values(self.key)
return self.data
|
plepe/pgmapcss
|
pgmapcss/types/image_png.py
|
Python
|
agpl-3.0
| 1,840 | 0.002717 |
# -*- coding: utf-8 -*-
"""
Created on Sun Mar 10 10:43:53 2019
@author: Heathro
Description: Reduces a vcf file to meta section and
one line for each chromosome number for testing and
debugging purposes.
"""
# Open files to read from and write to
vcfpath = open("D:/MG_GAP/Ali_w_767.vcf", "rU")
testvcf = open("REDUCED_ali.vcf", "w")
# Keep track of chromosome number so we can get one of each
temp_chrom = 0
counter = 0
for line_index, line in enumerate(vcfpath):
# Found a chromosome line
if line[0:8] == "sNNffold":
column = line.split('\t')
first_col = column[0].split('_')
current_chrom = first_col[1]
# Write up to 1000 lines of each chromosome
if current_chrom == temp_chrom:
counter = counter + 1
if counter < 1000:
testvcf.write(line)
# If a new chromosome, write a line, start counter at 0
elif current_chrom != temp_chrom:
counter = 0
temp_chrom = current_chrom
testvcf.write(line)
# Include the meta lines and header line
else:
testvcf.write(line)
testvcf.close()
vcfpath.close()
|
davidfarr/mg-gap
|
mg-gap/mg-gap-py/mg-gap/test_files/reduceVCF.py
|
Python
|
mit
| 1,210 | 0.005785 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""@package docstring
Yowsup connector for wxpyWha (a simple wxWidgets GUI wrapper atop yowsup).
Uses WhaLayer to build the Yowsup stack.
This is based on code from the yowsup echo example, the yowsup cli and pywhatsapp.
"""
SECONDS_RECONNECT_DELAY = 10
import sys
# from echo stack
from yowsup.stacks import YowStackBuilder
from yowsup.layers.auth import AuthError
from yowsup.layers.network import YowNetworkLayer
# from cli stack
try:
from yowsup.layers.axolotl.props import PROP_IDENTITY_AUTOTRUST #tgalal
except ImportError as ie:
sys.stderr.write("WARNING: PROP_IDENTITY_AUTOTRUST could not be imported from yowsup.layers.axolotl.props. Using hardcoded value instead.\n")
PROP_IDENTITY_AUTOTRUST = "org.openwhatsapp.yowsup.prop.axolotl.INDENTITY_AUTOTRUST" #as done by jlguardi
# from cli layer
from yowsup.layers import YowLayerEvent
# from http://stackoverflow.com/questions/3702675/how-to-print-the-full-traceback-without-halting-the-program
import traceback
# from https://github.com/tgalal/yowsup/issues/1069
import logging
try:
import queue
except ImportError: # python2 compatibility
import Queue as queue
from whalayer import WhaLayer
class WhaClient(object):
def __init__(self, credentials, encryptionEnabled = True):
stackBuilder = YowStackBuilder()
self.stack = stackBuilder\
.pushDefaultLayers(encryptionEnabled)\
.push(WhaLayer)\
.build()
self.stack.setCredentials(credentials)
self.stack.setProp(PROP_IDENTITY_AUTOTRUST, True) #not in jlguardi
self.wantReconnect = True
self.abortReconnectWait = queue.Queue()
def setYowsupEventHandler(self, handler):
interface = self.stack.getLayerInterface(WhaLayer)
interface.enventHandler = handler
def sendMessage(self, outgoingMessage):
interface = self.stack.getLayerInterface(WhaLayer)
interface.sendMessage(outgoingMessage)
def disconnect(self):
interface = self.stack.getLayerInterface(WhaLayer)
interface.disconnect()
def start(self):
logging.basicConfig(level=logging.WARNING)
while (self.wantReconnect):
self.stack.broadcastEvent(YowLayerEvent(YowNetworkLayer.EVENT_STATE_CONNECT))
try:
self.stack.loop()
except AuthError as e:
sys.stderr.write("Authentication Error\n")
except KeyboardInterrupt:
# This is only relevant if this is the main module
# TODO: disconnect cleanly
print("\nExit")
sys.exit(0)
except: # catch *all* exceptions
sys.stderr.write("Unhandled exception.\n")
traceback.print_exc()
# TODO: regard connection state in the GUI
sys.stderr.write("Yowsup WhaClient exited.\nYOU ARE NOW DISCONNECTED.\n")
if (self.wantReconnect):
sys.stderr.write("Auto-reconnect enabled. Waiting up to %d seconds before reconnecting...\n"%(SECONDS_RECONNECT_DELAY))
try:
self.abortReconnectWait.get(timeout=SECONDS_RECONNECT_DELAY)
except queue.Empty:
pass
def setEnableReconnect(self, b = True):
self.wantReconnect = b
self.abortReconnectWait.put(b)
if __name__ == "__main__":
client = WhaClient(("login","base64passwd"))
client.start()
|
hoehermann/wxpyWha
|
whastack.py
|
Python
|
gpl-3.0
| 3,522 | 0.010789 |
from sqlalchemy.orm import joinedload
from datetime import datetime
from changes.api.base import APIView
from changes.api.build_index import execute_build
from changes.config import db
from changes.constants import Result, Status
from changes.models import Build, Job, JobStep, ItemStat
class BuildRestartAPIView(APIView):
def post(self, build_id):
build = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
joinedload('source').joinedload('revision'),
).get(build_id)
if build is None:
return '', 404
if build.status != Status.finished:
return '', 400
# ItemStat doesnt cascade by itself
stat_ids = [build.id]
job_ids = [
j[0] for j in
db.session.query(Job.id).filter(Job.build_id == build.id)
]
if job_ids:
step_ids = [
s[0] for s in
db.session.query(JobStep.id).filter(JobStep.job_id.in_(job_ids))
]
stat_ids.extend(job_ids)
stat_ids.extend(step_ids)
if stat_ids:
ItemStat.query.filter(
ItemStat.item_id.in_(stat_ids),
).delete(synchronize_session=False)
# remove any existing job data
# TODO(dcramer): this is potentially fairly slow with cascades
Job.query.filter(
Job.build_id == build.id
).delete(synchronize_session=False)
build.date_started = datetime.utcnow()
build.date_modified = build.date_started
build.date_finished = None
build.duration = None
build.status = Status.queued
build.result = Result.unknown
db.session.add(build)
execute_build(build=build)
return self.respond(build)
|
alex/changes
|
changes/api/build_restart.py
|
Python
|
apache-2.0
| 1,841 | 0.000543 |
"""Print all records in the pickle for the specified test"""
import sys
import argparse
from autocms.core import (load_configuration, load_records)
def main():
"""Print all records corresponding to test given as an argument"""
parser = argparse.ArgumentParser(description='Submit one or more jobs.')
parser.add_argument('testname', help='test directory')
parser.add_argument('-c', '--configfile', type=str,
default='autocms.cfg',
help='AutoCMS configuration file name')
args = parser.parse_args()
config = load_configuration(args.configfile)
records = load_records(args.testname,config)
for job in records:
print str(job)+'\n'
return 0
if __name__ == '__main__':
status = main()
sys.exit(status)
|
appeltel/AutoCMS
|
print_records.py
|
Python
|
mit
| 801 | 0.002497 |
# /usr/bin/env python
'''
Written by Kong Xiaolu and CBIG under MIT license:
https://github.com/ThomasYeoLab/CBIG/blob/master/LICENSE.md
'''
import os
import numpy as np
import torch
import CBIG_pMFM_basic_functions as fc
def CBIG_mfm_test_desikan_main(gpu_index=0):
'''
This function is to implement the testing processes of mean field model.
The objective function is the summation of FC correlation cost and FCD KS statistics cost.
Args:
gpu_index: index of gpu used for optimization
input_path: input directory to load validation data
output_path: output directory for saving selected model parameters and costs on test set
Returns:
None
'''
input_path = '../output/rsfcpc2_rsfc/validation/'
output_path = '../output/rsfcpc2_rsfc/test/'
if not os.path.isdir(output_path):
os.makedirs(output_path)
torch.cuda.set_device(gpu_index)
torch.cuda.manual_seed(1)
n_set = 100
n_dup = 10
n_node = 68
vali_raw_all = np.zeros((3 * n_node + 1 + 8, 1))
print('Get data')
for i in range(1, 11):
load_file = 'random_seed_' + str(i) + '.csv'
load_path = os.path.join(input_path, load_file)
xmin = fc.csv_matrix_read(load_path)
index_mat = np.zeros((2, xmin.shape[1]))
index_mat[0, :] = i
index_mat[1, :] = np.arange(xmin.shape[1])
xmin = np.concatenate((index_mat, xmin), axis=0)
vali_raw_all = np.concatenate((vali_raw_all, xmin), axis=1)
vali_raw_all = vali_raw_all[:, 1:]
vali_index = np.argsort(vali_raw_all[7, :])
vali_sort_all = vali_raw_all[:, vali_index]
vali_sel_num = 10
i = 0
vali_sel = np.zeros((vali_raw_all.shape[0], vali_sel_num))
p = 0
p_set = np.zeros(vali_sel_num)
print('select data')
while i < vali_sel_num and p < vali_raw_all.shape[1]:
corr_t = np.zeros(vali_sel_num, dtype=bool)
corr_tr = np.zeros((vali_sel_num, 3))
for j in range(vali_sel_num):
w_corr = np.corrcoef(vali_sel[8:8 + n_node, j:j + 1].T,
vali_sort_all[8:8 + n_node, p:p + 1].T)
i_corr = np.corrcoef(
vali_sel[8 + n_node:8 + 2 * n_node, j:j + 1].T,
vali_sort_all[8 + n_node:8 + 2 * n_node, p:p + 1].T)
s_corr = np.corrcoef(vali_sel[9 + 2 * n_node:, j:j + 1].T,
vali_sort_all[9 + 2 * n_node:, p:p + 1].T)
corr_tr[j, 0] = w_corr[0, 1]
corr_tr[j, 1] = i_corr[0, 1]
corr_tr[j, 2] = s_corr[0, 1]
for k in range(vali_sel_num):
corr_t[k] = (corr_tr[k, :] > 0.98).all()
if not corr_t.any():
vali_sel[:, i] = vali_sort_all[:, p]
p_set[i] = p
i += 1
p += 1
result_save = np.zeros((3 * n_node + 1 + 11, vali_sel_num))
result_save[0:8, :] = vali_sel[0:8, :]
result_save[11:, :] = vali_sel[8:, :]
print('Start testing')
for j in range(vali_sel_num):
test_cost = np.zeros((3, n_set))
for k in range(1):
arx = np.tile(vali_sel[8:, j:j + 1], [1, n_set])
total_cost, fc_cost, fcd_cost = fc.CBIG_combined_cost_test(
arx, n_dup)
test_cost[0, n_set * k:n_set * (k + 1)] = fc_cost
test_cost[1, n_set * k:n_set * (k + 1)] = fcd_cost
test_cost[2, n_set * k:n_set * (k + 1)] = total_cost
test_file = os.path.join(output_path,
'test_num_' + str(j + 1) + '.csv')
np.savetxt(test_file, test_cost, delimiter=',')
result_save[8, j] = np.nanmean(test_cost[0, :])
result_save[9, j] = np.nanmean(test_cost[1, :])
result_save[10, j] = np.nanmean(test_cost[2, :])
print('**************** finish top ' + str(j + 1) +
' test ****************')
test_file_all = os.path.join(output_path, 'test_all.csv')
np.savetxt(test_file_all, result_save, delimiter=',')
if __name__ == '__main__':
CBIG_mfm_test_desikan_main(gpu_index=0)
|
ThomasYeoLab/CBIG
|
stable_projects/fMRI_dynamics/Kong2021_pMFM/part2_pMFM_control_analysis/Primary_gradients/scripts/CBIG_pMFM_step33_test_GradPC2Grad.py
|
Python
|
mit
| 4,115 | 0.000486 |
#!/usr/bin/env python3
# Uses the wikipedia module to define words on the command line
import wikipedia
import sys
sys.argv.pop(0)
for word in sys.argv:
try:
if word[0] != '-':
if '-full' in sys.argv:
print(wikipedia.summary(word))
else:
print(wikipedia.summary(word, sentences=1))
except:
print("* Unknown word: " + word)
|
dendory/scripts
|
wikipedia_define.py
|
Python
|
mit
| 347 | 0.028818 |
import os
import logging
# standardize use of logging module in fs-drift
def start_log(prefix, verbosity=0):
log = logging.getLogger(prefix)
if os.getenv('LOGLEVEL_DEBUG') != None or verbosity != 0:
log.setLevel(logging.DEBUG)
else:
log.setLevel(logging.INFO)
log_format = prefix + ' %(asctime)s - %(levelname)s - %(message)s'
formatter = logging.Formatter(log_format)
h = logging.StreamHandler()
h.setFormatter(formatter)
h.setLevel(logging.INFO)
log.addHandler(h)
h2 = logging.FileHandler('/var/tmp/fsd.%s.log' % prefix)
h2.setFormatter(formatter)
log.addHandler(h2)
log.info('starting log')
return log
# assumptions:
# - there is only 1 FileHandler associated with logger
# - you don't want to change loglevel of StreamHandler
def change_loglevel(logger, loglevel):
for h in logger.handlers:
if isinstance(h, logging.FileHandler):
logger.info('changing log level of FileHandler to %s' % loglevel)
h.setLevel(loglevel)
if __name__ == '__main__':
log = start_log('fsd_log_test')
log.error('level %s', 'error')
log.warn('level %s', 'warn')
log.info('level %s', 'info')
log.debug('level %s', 'debug')
change_loglevel(log, logging.DEBUG)
log.debug('level %s', 'debug - should see this one in the log file /var/tmp/fsd.fsd_log_test.log')
change_loglevel(log, logging.INFO)
log.debug('level %s', 'debug - should NOT see this one there')
|
bengland2/fsstress
|
fsd_log.py
|
Python
|
apache-2.0
| 1,486 | 0.004038 |
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from functools import partial
from inspect import getmembers, isfunction
from unittest import mock
from unittest.mock import ANY, PropertyMock
import pytest
import torch
from torch.utils.data import DataLoader
from pytorch_lightning import __version__, Callback, LightningDataModule, LightningModule, Trainer
from tests.helpers import BoringDataModule, BoringModel, RandomDataset
from tests.helpers.runif import RunIf
@pytest.mark.parametrize("max_steps", [1, 2, 3])
def test_on_before_zero_grad_called(tmpdir, max_steps):
class CurrentTestModel(BoringModel):
on_before_zero_grad_called = 0
def on_before_zero_grad(self, optimizer):
self.on_before_zero_grad_called += 1
model = CurrentTestModel()
trainer = Trainer(default_root_dir=tmpdir, max_steps=max_steps, max_epochs=2)
assert 0 == model.on_before_zero_grad_called
trainer.fit(model)
assert max_steps == model.on_before_zero_grad_called
model.on_before_zero_grad_called = 0
trainer.test(model)
assert 0 == model.on_before_zero_grad_called
def test_training_epoch_end_metrics_collection(tmpdir):
"""Test that progress bar metrics also get collected at the end of an epoch."""
num_epochs = 3
class CurrentModel(BoringModel):
def training_step(self, *args, **kwargs):
output = super().training_step(*args, **kwargs)
self.log_dict({"step_metric": torch.tensor(-1), "shared_metric": 100}, logger=False, prog_bar=True)
return output
def training_epoch_end(self, outputs):
epoch = self.current_epoch
# both scalar tensors and Python numbers are accepted
self.log_dict(
{f"epoch_metric_{epoch}": torch.tensor(epoch), "shared_metric": 111}, logger=False, prog_bar=True
)
model = CurrentModel()
trainer = Trainer(max_epochs=num_epochs, default_root_dir=tmpdir, overfit_batches=2)
trainer.fit(model)
assert trainer.state.finished, f"Training failed with {trainer.state}"
metrics = trainer.progress_bar_dict
# metrics added in training step should be unchanged by epoch end method
assert metrics["step_metric"] == -1
# a metric shared in both methods gets overwritten by epoch_end
assert metrics["shared_metric"] == 111
# metrics are kept after each epoch
for i in range(num_epochs):
assert metrics[f"epoch_metric_{i}"] == i
def test_training_epoch_end_metrics_collection_on_override(tmpdir):
"""Test that batch end metrics are collected when training_epoch_end is overridden at the end of an epoch."""
class OverriddenModel(BoringModel):
def __init__(self):
super().__init__()
self.len_outputs = 0
def on_train_epoch_start(self):
self.num_train_batches = 0
def training_epoch_end(self, outputs):
self.len_outputs = len(outputs)
def on_train_batch_end(self, outputs, batch, batch_idx, dataloader_idx):
self.num_train_batches += 1
class NotOverriddenModel(BoringModel):
def on_train_epoch_start(self):
self.num_train_batches = 0
def on_train_batch_end(self, outputs, batch, batch_idx, dataloader_idx):
self.num_train_batches += 1
overridden_model = OverriddenModel()
not_overridden_model = NotOverriddenModel()
not_overridden_model.training_epoch_end = None
trainer = Trainer(max_epochs=1, default_root_dir=tmpdir, overfit_batches=2)
trainer.fit(overridden_model)
assert overridden_model.len_outputs == overridden_model.num_train_batches
@RunIf(min_gpus=1)
@mock.patch("pytorch_lightning.accelerators.accelerator.Accelerator.lightning_module", new_callable=PropertyMock)
def test_apply_batch_transfer_handler(model_getter_mock):
expected_device = torch.device("cuda", 0)
class CustomBatch:
def __init__(self, data):
self.samples = data[0]
self.targets = data[1]
class CurrentTestModel(BoringModel):
rank = 0
transfer_batch_to_device_hook_rank = None
on_before_batch_transfer_hook_rank = None
on_after_batch_transfer_hook_rank = None
def on_before_batch_transfer(self, batch, dataloader_idx):
assert dataloader_idx == 0
self.on_before_batch_transfer_hook_rank = self.rank
self.rank += 1
batch.samples += 1
return batch
def on_after_batch_transfer(self, batch, dataloader_idx):
assert dataloader_idx == 0
assert batch.samples.device == batch.targets.device == expected_device
self.on_after_batch_transfer_hook_rank = self.rank
self.rank += 1
batch.targets *= 2
return batch
def transfer_batch_to_device(self, batch, device, dataloader_idx):
assert dataloader_idx == 0
self.transfer_batch_to_device_hook_rank = self.rank
self.rank += 1
batch.samples = batch.samples.to(device)
batch.targets = batch.targets.to(device)
return batch
model = CurrentTestModel()
batch = CustomBatch((torch.zeros(5, 32), torch.ones(5, 1, dtype=torch.long)))
trainer = Trainer(gpus=1)
# running .fit() would require us to implement custom data loaders, we mock the model reference instead
model_getter_mock.return_value = model
batch_gpu = trainer.accelerator.batch_to_device(batch, expected_device)
assert model.on_before_batch_transfer_hook_rank == 0
assert model.transfer_batch_to_device_hook_rank == 1
assert model.on_after_batch_transfer_hook_rank == 2
assert batch_gpu.samples.device == batch_gpu.targets.device == expected_device
assert torch.allclose(batch_gpu.samples.cpu(), torch.ones(5, 32))
assert torch.allclose(batch_gpu.targets.cpu(), torch.ones(5, 1, dtype=torch.long) * 2)
@RunIf(min_gpus=2, special=True)
def test_transfer_batch_hook_ddp(tmpdir):
"""
Test custom data are properly moved to the right device using ddp
"""
class CustomBatch:
def __init__(self, data):
self.samples = data[0]
def to(self, device, **kwargs):
self.samples = self.samples.to(device, **kwargs)
return self
def collate_fn(batch):
return CustomBatch(batch)
class TestModel(BoringModel):
def training_step(self, batch, batch_idx):
assert batch.samples.device == self.device
assert isinstance(batch_idx, int)
def train_dataloader(self):
return torch.utils.data.DataLoader(RandomDataset(32, 64), collate_fn=collate_fn)
model = TestModel()
model.validation_step = None
model.training_epoch_end = None
trainer = Trainer(
default_root_dir=tmpdir,
limit_train_batches=2,
limit_val_batches=0,
max_epochs=1,
weights_summary=None,
accelerator="ddp",
gpus=2,
)
trainer.fit(model)
def get_members(cls):
return {h for h, _ in getmembers(cls, predicate=isfunction) if not h.startswith("_")}
class HookedCallback(Callback):
def __init__(self, called):
def call(hook, fn, *args, **kwargs):
out = fn(*args, **kwargs)
d = {"name": f"Callback.{hook}"}
if args:
d["args"] = args
if kwargs:
d["kwargs"] = kwargs
called.append(d)
return out
for h in get_members(Callback):
attr = getattr(self, h)
setattr(self, h, partial(call, h, attr))
def on_save_checkpoint(*args, **kwargs):
return {"foo": True}
class HookedModel(BoringModel):
def __init__(self, called):
super().__init__()
pl_module_hooks = get_members(LightningModule)
# remove non-hooks
pl_module_hooks.difference_update({"optimizers"})
# remove most `nn.Module` hooks
module_hooks = get_members(torch.nn.Module)
module_hooks.difference_update({"forward", "zero_grad", "train"})
pl_module_hooks.difference_update(module_hooks)
def call(hook, fn, *args, **kwargs):
out = fn(*args, **kwargs)
d = {"name": hook}
if args:
d["args"] = args
elif hook == "train":
# DeepSpeed calls `train(mode)` but we do not. Standardize
# https://github.com/microsoft/DeepSpeed/pull/571
d["args"] = (True,)
if kwargs:
d["kwargs"] = kwargs
called.append(d)
return out
for h in pl_module_hooks:
attr = getattr(self, h)
setattr(self, h, partial(call, h, attr))
def validation_epoch_end(self, *args, **kwargs):
# `BoringModel` does not have a return for `validation_step_end` so this would fail
pass
def test_epoch_end(self, *args, **kwargs):
# `BoringModel` does not have a return for `test_step_end` so this would fail
pass
def _train_batch(self, *args, **kwargs):
if self.automatic_optimization:
return self._auto_train_batch(*args, **kwargs)
return self._manual_train_batch(*args, **kwargs)
@staticmethod
def _auto_train_batch(trainer, model, batches, device=torch.device("cpu"), current_epoch=0, **kwargs):
using_native_amp = kwargs.get("amp_backend") == "native"
using_deepspeed = kwargs.get("plugins") == "deepspeed"
out = []
on_before_optimizer_step = [
dict(name="Callback.on_before_optimizer_step", args=(trainer, model, ANY, 0)),
dict(name="on_before_optimizer_step", args=(ANY, 0)),
]
for i in range(batches):
out.extend(
[
dict(name="on_before_batch_transfer", args=(ANY, 0)),
dict(name="transfer_batch_to_device", args=(ANY, device, 0)),
dict(name="on_after_batch_transfer", args=(ANY, 0)),
# TODO: `on_batch_{start,end}`
dict(name="Callback.on_batch_start", args=(trainer, model)),
dict(name="Callback.on_train_batch_start", args=(trainer, model, ANY, i, 0)),
dict(name="on_train_batch_start", args=(ANY, i, 0)),
# these are before the training step because
# they are not part of the `training_step_and_backward` closure, however,
# with native amp, the closure is run first and then the optimizer step.
*(on_before_optimizer_step if not using_native_amp else []),
dict(name="forward", args=(ANY,)),
dict(name="training_step", args=(ANY, i)),
dict(name="training_step_end", args=(dict(loss=ANY),)),
dict(name="Callback.on_before_zero_grad", args=(trainer, model, ANY)),
dict(name="on_before_zero_grad", args=(ANY,)),
dict(name="optimizer_zero_grad", args=(current_epoch, i, ANY, 0)),
dict(name="Callback.on_before_backward", args=(trainer, model, ANY)),
dict(name="on_before_backward", args=(ANY,)),
# DeepSpeed handles backward internally
*([dict(name="backward", args=(ANY, ANY, 0))] if not using_deepspeed else []),
dict(name="Callback.on_after_backward", args=(trainer, model)),
dict(name="on_after_backward"),
*(on_before_optimizer_step if using_native_amp else []),
dict(
name="optimizer_step",
args=(current_epoch, i, ANY, 0, ANY),
kwargs=dict(on_tpu=False, using_lbfgs=False, using_native_amp=using_native_amp),
),
dict(name="Callback.on_train_batch_end", args=(trainer, model, dict(loss=ANY), ANY, i, 0)),
dict(name="on_train_batch_end", args=(dict(loss=ANY), ANY, i, 0)),
dict(name="Callback.on_batch_end", args=(trainer, model)),
]
)
return out
@staticmethod
def _manual_train_batch(trainer, model, batches, device=torch.device("cpu"), **kwargs):
using_deepspeed = kwargs.get("plugins") == "deepspeed"
out = []
for i in range(batches):
out.extend(
[
dict(name="on_before_batch_transfer", args=(ANY, 0)),
dict(name="transfer_batch_to_device", args=(ANY, device, 0)),
dict(name="on_after_batch_transfer", args=(ANY, 0)),
# TODO: `on_batch_{start,end}`
dict(name="Callback.on_batch_start", args=(trainer, model)),
dict(name="Callback.on_train_batch_start", args=(trainer, model, ANY, i, 0)),
dict(name="on_train_batch_start", args=(ANY, i, 0)),
dict(name="forward", args=(ANY,)),
dict(name="Callback.on_before_backward", args=(trainer, model, ANY)),
dict(name="on_before_backward", args=(ANY,)),
# DeepSpeed handles backward internally
*([dict(name="backward", args=(ANY, None, None))] if not using_deepspeed else []),
dict(name="Callback.on_after_backward", args=(trainer, model)),
dict(name="on_after_backward"),
# `manual_backward` calls the previous 3
dict(name="manual_backward", args=(ANY,)),
dict(name="Callback.on_before_optimizer_step", args=(trainer, model, ANY, 0)),
dict(name="on_before_optimizer_step", args=(ANY, 0)),
dict(name="training_step", args=(ANY, i)),
dict(name="training_step_end", args=(dict(loss=ANY),)),
dict(name="Callback.on_train_batch_end", args=(trainer, model, dict(loss=ANY), ANY, i, 0)),
dict(name="on_train_batch_end", args=(dict(loss=ANY), ANY, i, 0)),
dict(name="Callback.on_batch_end", args=(trainer, model)),
]
)
return out
@staticmethod
def _eval_epoch(fn, trainer, model, batches, key, device=torch.device("cpu")):
outputs = {key: ANY}
return [
dict(name="Callback.on_epoch_start", args=(trainer, model)),
dict(name="on_epoch_start"),
dict(name=f"Callback.on_{fn}_epoch_start", args=(trainer, model)),
dict(name=f"on_{fn}_epoch_start"),
*HookedModel._eval_batch(fn, trainer, model, batches, key, device=device),
dict(name=f"{fn}_epoch_end", args=([outputs] * batches,)),
dict(name=f"Callback.on_{fn}_epoch_end", args=(trainer, model)),
dict(name=f"on_{fn}_epoch_end"),
dict(name="Callback.on_epoch_end", args=(trainer, model)),
dict(name="on_epoch_end"),
]
@staticmethod
def _eval_batch(fn, trainer, model, batches, key, device=torch.device("cpu")):
out = []
outputs = {key: ANY}
for i in range(batches):
out.extend(
[
dict(name="on_before_batch_transfer", args=(ANY, 0)),
dict(name="transfer_batch_to_device", args=(ANY, device, 0)),
dict(name="on_after_batch_transfer", args=(ANY, 0)),
# TODO: `{,Callback}.on_batch_{start,end}`
dict(name=f"Callback.on_{fn}_batch_start", args=(trainer, model, ANY, i, 0)),
dict(name=f"on_{fn}_batch_start", args=(ANY, i, 0)),
dict(name="forward", args=(ANY,)),
dict(name=f"{fn}_step", args=(ANY, i)),
dict(name=f"{fn}_step_end", args=(outputs,)),
dict(name=f"Callback.on_{fn}_batch_end", args=(trainer, model, outputs, ANY, i, 0)),
dict(name=f"on_{fn}_batch_end", args=(outputs, ANY, i, 0)),
]
)
return out
@staticmethod
def _predict_batch(trainer, model, batches):
out = []
for i in range(batches):
out.extend(
[
dict(name="on_before_batch_transfer", args=(ANY, 0)),
dict(name="transfer_batch_to_device", args=(ANY, torch.device("cpu"), 0)),
dict(name="on_after_batch_transfer", args=(ANY, 0)),
# TODO: `{,Callback}.on_batch_{start,end}`
dict(name="Callback.on_predict_batch_start", args=(trainer, model, ANY, i, 0)),
dict(name="on_predict_batch_start", args=(ANY, i, 0)),
dict(name="forward", args=(ANY,)),
dict(name="predict_step", args=(ANY, i)),
# TODO: `predict_step_end`
dict(name="Callback.on_predict_batch_end", args=(trainer, model, ANY, ANY, i, 0)),
dict(name="on_predict_batch_end", args=(ANY, ANY, i, 0)),
]
)
return out
@pytest.mark.parametrize(
"kwargs",
[
{},
# these precision plugins modify the optimization flow, so testing them explicitly
pytest.param(dict(gpus=1, precision=16, plugins="deepspeed"), marks=RunIf(deepspeed=True, min_gpus=1)),
pytest.param(dict(gpus=1, precision=16, amp_backend="native"), marks=RunIf(amp_native=True, min_gpus=1)),
pytest.param(dict(gpus=1, precision=16, amp_backend="apex"), marks=RunIf(amp_apex=True, min_gpus=1)),
],
)
@pytest.mark.parametrize("automatic_optimization", (True, False))
def test_trainer_model_hook_system_fit(tmpdir, kwargs, automatic_optimization):
called = []
class TestModel(HookedModel):
def __init__(self, *args):
super().__init__(*args)
self.automatic_optimization = automatic_optimization
def training_step(self, batch, batch_idx):
if self.automatic_optimization:
return super().training_step(batch, batch_idx)
loss = self.step(batch[0])
opt = self.optimizers()
opt.zero_grad()
self.manual_backward(loss)
opt.step()
return {"loss": loss}
model = TestModel(called)
callback = HookedCallback(called)
train_batches = 2
val_batches = 2
trainer = Trainer(
default_root_dir=tmpdir,
max_epochs=1,
limit_train_batches=train_batches,
limit_val_batches=val_batches,
progress_bar_refresh_rate=0,
weights_summary=None,
callbacks=[callback],
**kwargs,
)
assert called == [
dict(name="Callback.on_init_start", args=(trainer,)),
dict(name="Callback.on_init_end", args=(trainer,)),
]
trainer.fit(model)
saved_ckpt = {
"callbacks": ANY,
"epoch": 1,
"global_step": train_batches,
"lr_schedulers": ANY,
"optimizer_states": ANY,
"pytorch-lightning_version": __version__,
"state_dict": ANY,
}
if kwargs.get("amp_backend") == "native":
saved_ckpt["native_amp_scaling_state"] = ANY
elif kwargs.get("amp_backend") == "apex":
saved_ckpt["amp_scaling_state"] = ANY
device = torch.device("cuda:0" if "gpus" in kwargs else "cpu")
expected = [
dict(name="Callback.on_init_start", args=(trainer,)),
dict(name="Callback.on_init_end", args=(trainer,)),
dict(name="prepare_data"),
dict(name="configure_callbacks"),
dict(name="Callback.on_before_accelerator_backend_setup", args=(trainer, model)),
# DeepSpeed needs the batch size to figure out throughput logging
*([dict(name="train_dataloader")] if kwargs.get("plugins") == "deepspeed" else []),
dict(name="Callback.setup", args=(trainer, model), kwargs=dict(stage="fit")),
dict(name="setup", kwargs=dict(stage="fit")),
dict(name="configure_sharded_model"),
dict(name="Callback.on_configure_sharded_model", args=(trainer, model)),
# DeepSpeed skips initializing optimizers here as they are handled via config
*([dict(name="configure_optimizers")] if kwargs.get("plugins") != "deepspeed" else []),
dict(name="Callback.on_fit_start", args=(trainer, model)),
dict(name="on_fit_start"),
# TODO: explore whether DeepSpeed can have the same flow for optimizers
# DeepSpeed did not find any optimizer in the config so they are loaded here
*([dict(name="configure_optimizers")] if kwargs.get("plugins") == "deepspeed" else []),
dict(name="Callback.on_pretrain_routine_start", args=(trainer, model)),
dict(name="on_pretrain_routine_start"),
dict(name="Callback.on_pretrain_routine_end", args=(trainer, model)),
dict(name="on_pretrain_routine_end"),
dict(name="Callback.on_sanity_check_start", args=(trainer, model)),
dict(name="on_val_dataloader"),
dict(name="val_dataloader"),
dict(name="train", args=(False,)),
dict(name="on_validation_model_eval"),
dict(name="zero_grad"),
dict(name="Callback.on_validation_start", args=(trainer, model)),
dict(name="on_validation_start"),
*model._eval_epoch("validation", trainer, model, val_batches, "x", device=device),
dict(name="Callback.on_validation_end", args=(trainer, model)),
dict(name="on_validation_end"),
dict(name="train", args=(True,)),
dict(name="on_validation_model_train"),
dict(name="Callback.on_sanity_check_end", args=(trainer, model)),
# duplicate `train` because `_run_train` calls it again in case validation wasn't run
dict(name="train", args=(True,)),
dict(name="on_train_dataloader"),
dict(name="train_dataloader"),
dict(name="Callback.on_train_start", args=(trainer, model)),
dict(name="on_train_start"),
dict(name="Callback.on_epoch_start", args=(trainer, model)),
dict(name="on_epoch_start"),
dict(name="Callback.on_train_epoch_start", args=(trainer, model)),
dict(name="on_train_epoch_start"),
*model._train_batch(trainer, model, train_batches, device=device, **kwargs),
dict(name="train", args=(False,)),
dict(name="on_validation_model_eval"),
dict(name="zero_grad"),
dict(name="Callback.on_validation_start", args=(trainer, model)),
dict(name="on_validation_start"),
*model._eval_epoch("validation", trainer, model, val_batches, "x", device=device),
dict(name="Callback.on_validation_end", args=(trainer, model)),
dict(name="on_validation_end"),
dict(name="train", args=(True,)),
dict(name="on_validation_model_train"),
dict(name="training_epoch_end", args=([dict(loss=ANY)] * train_batches,)),
dict(name="Callback.on_train_epoch_end", args=(trainer, model)),
# `ModelCheckpoint.save_checkpoint` is called here from `Callback.on_train_epoch_end`
dict(name="Callback.on_save_checkpoint", args=(trainer, model, saved_ckpt)),
dict(name="on_save_checkpoint", args=(saved_ckpt,)),
dict(name="on_train_epoch_end"),
dict(name="Callback.on_epoch_end", args=(trainer, model)),
dict(name="on_epoch_end"),
dict(name="Callback.on_train_end", args=(trainer, model)),
dict(name="on_train_end"),
dict(name="Callback.on_fit_end", args=(trainer, model)),
dict(name="on_fit_end"),
dict(name="Callback.teardown", args=(trainer, model), kwargs=dict(stage="fit")),
dict(name="teardown", kwargs=dict(stage="fit")),
]
assert called == expected
def test_trainer_model_hook_system_fit_no_val_and_resume(tmpdir):
# initial training to get a checkpoint
model = BoringModel()
trainer = Trainer(
default_root_dir=tmpdir,
max_steps=1,
limit_val_batches=0,
progress_bar_refresh_rate=0,
weights_summary=None,
callbacks=[HookedCallback([])],
)
trainer.fit(model)
best_model_path = trainer.checkpoint_callback.best_model_path
# resume from checkpoint with HookedModel
called = []
model = HookedModel(called)
callback = HookedCallback(called)
train_batches = 2
trainer = Trainer(
default_root_dir=tmpdir,
# already performed 1 step, now resuming to do an additional 2
max_steps=(1 + train_batches),
limit_val_batches=0,
progress_bar_refresh_rate=0,
weights_summary=None,
resume_from_checkpoint=best_model_path,
callbacks=[callback],
)
assert called == [
dict(name="Callback.on_init_start", args=(trainer,)),
dict(name="Callback.on_init_end", args=(trainer,)),
]
trainer.fit(model)
saved_ckpt = {
"callbacks": ANY,
"epoch": 2, # TODO: wrong saved epoch
"global_step": (1 + train_batches),
"lr_schedulers": ANY,
"optimizer_states": ANY,
"pytorch-lightning_version": __version__,
"state_dict": ANY,
}
expected = [
dict(name="Callback.on_init_start", args=(trainer,)),
dict(name="Callback.on_init_end", args=(trainer,)),
dict(name="prepare_data"),
dict(name="configure_callbacks"),
dict(name="Callback.on_before_accelerator_backend_setup", args=(trainer, model)),
dict(name="Callback.setup", args=(trainer, model), kwargs=dict(stage="fit")),
dict(name="setup", kwargs=dict(stage="fit")),
dict(
name="on_load_checkpoint",
args=(
{
"callbacks": ANY,
"epoch": 1,
"global_step": 1,
"lr_schedulers": ANY,
"optimizer_states": ANY,
"pytorch-lightning_version": __version__,
"state_dict": ANY,
},
),
),
dict(name="Callback.on_load_checkpoint", args=(trainer, model, {"foo": True})),
dict(name="configure_sharded_model"),
dict(name="Callback.on_configure_sharded_model", args=(trainer, model)),
dict(name="configure_optimizers"),
dict(name="Callback.on_fit_start", args=(trainer, model)),
dict(name="on_fit_start"),
dict(name="Callback.on_pretrain_routine_start", args=(trainer, model)),
dict(name="on_pretrain_routine_start"),
dict(name="Callback.on_pretrain_routine_end", args=(trainer, model)),
dict(name="on_pretrain_routine_end"),
dict(name="train", args=(True,)),
dict(name="on_train_dataloader"),
dict(name="train_dataloader"),
# even though no validation runs, we initialize the val dataloader for properties like `num_val_batches`
dict(name="on_val_dataloader"),
dict(name="val_dataloader"),
dict(name="Callback.on_train_start", args=(trainer, model)),
dict(name="on_train_start"),
dict(name="Callback.on_epoch_start", args=(trainer, model)),
dict(name="on_epoch_start"),
dict(name="Callback.on_train_epoch_start", args=(trainer, model)),
dict(name="on_train_epoch_start"),
# TODO: wrong current epoch after reload
*model._train_batch(trainer, model, train_batches, current_epoch=1),
dict(name="training_epoch_end", args=([dict(loss=ANY)] * train_batches,)),
dict(name="Callback.on_train_epoch_end", args=(trainer, model)),
dict(name="Callback.on_save_checkpoint", args=(trainer, model, saved_ckpt)),
dict(name="on_save_checkpoint", args=(saved_ckpt,)),
dict(name="on_train_epoch_end"),
dict(name="Callback.on_epoch_end", args=(trainer, model)),
dict(name="on_epoch_end"),
dict(name="Callback.on_train_end", args=(trainer, model)),
dict(name="on_train_end"),
dict(name="Callback.on_fit_end", args=(trainer, model)),
dict(name="on_fit_end"),
dict(name="Callback.teardown", args=(trainer, model), kwargs=dict(stage="fit")),
dict(name="teardown", kwargs=dict(stage="fit")),
]
assert called == expected
@pytest.mark.parametrize("batches", (0, 2))
@pytest.mark.parametrize(
["verb", "noun", "dataloader", "key"], [("validate", "validation", "val", "x"), ("test", "test", "test", "y")]
)
def test_trainer_model_hook_system_eval(tmpdir, batches, verb, noun, dataloader, key):
called = []
model = HookedModel(called)
callback = HookedCallback(called)
trainer = Trainer(
default_root_dir=tmpdir,
max_epochs=1,
limit_val_batches=batches,
limit_test_batches=batches,
progress_bar_refresh_rate=0,
weights_summary=None,
callbacks=[callback],
)
assert called == [
dict(name="Callback.on_init_start", args=(trainer,)),
dict(name="Callback.on_init_end", args=(trainer,)),
]
fn = getattr(trainer, verb)
fn(model, verbose=False)
hooks = [
dict(name="train", args=(False,)),
dict(name=f"on_{noun}_model_eval"),
dict(name="zero_grad"),
dict(name=f"Callback.on_{noun}_start", args=(trainer, model)),
dict(name=f"on_{noun}_start"),
*model._eval_epoch(noun, trainer, model, batches, key),
dict(name=f"Callback.on_{noun}_end", args=(trainer, model)),
dict(name=f"on_{noun}_end"),
dict(name="train", args=(True,)),
dict(name=f"on_{noun}_model_train"),
]
expected = [
dict(name="Callback.on_init_start", args=(trainer,)),
dict(name="Callback.on_init_end", args=(trainer,)),
dict(name="prepare_data"),
dict(name="configure_callbacks"),
dict(name="Callback.on_before_accelerator_backend_setup", args=(trainer, model)),
dict(name="Callback.setup", args=(trainer, model), kwargs=dict(stage=verb)),
dict(name="setup", kwargs=dict(stage=verb)),
dict(name="configure_sharded_model"),
dict(name="Callback.on_configure_sharded_model", args=(trainer, model)),
dict(name=f"on_{dataloader}_dataloader"),
dict(name=f"{dataloader}_dataloader"),
*(hooks if batches else []),
dict(name="Callback.teardown", args=(trainer, model), kwargs=dict(stage=verb)),
dict(name="teardown", kwargs=dict(stage=verb)),
]
assert called == expected
def test_trainer_model_hook_system_predict(tmpdir):
called = []
model = HookedModel(called)
callback = HookedCallback(called)
batches = 2
trainer = Trainer(
default_root_dir=tmpdir, limit_predict_batches=batches, progress_bar_refresh_rate=0, callbacks=[callback]
)
assert called == [
dict(name="Callback.on_init_start", args=(trainer,)),
dict(name="Callback.on_init_end", args=(trainer,)),
]
trainer.predict(model)
expected = [
dict(name="Callback.on_init_start", args=(trainer,)),
dict(name="Callback.on_init_end", args=(trainer,)),
dict(name="prepare_data"),
dict(name="configure_callbacks"),
dict(name="Callback.on_before_accelerator_backend_setup", args=(trainer, model)),
dict(name="Callback.setup", args=(trainer, model), kwargs=dict(stage="predict")),
dict(name="setup", kwargs=dict(stage="predict")),
dict(name="configure_sharded_model"),
dict(name="Callback.on_configure_sharded_model", args=(trainer, model)),
dict(name="on_predict_dataloader"),
dict(name="predict_dataloader"),
dict(name="train", args=(False,)),
dict(name="on_predict_model_eval"),
dict(name="zero_grad"),
dict(name="Callback.on_predict_start", args=(trainer, model)),
dict(name="on_predict_start"),
# TODO: `{,Callback}.on_epoch_{start,end}`
dict(name="Callback.on_predict_epoch_start", args=(trainer, model)),
dict(name="on_predict_epoch_start"),
*model._predict_batch(trainer, model, batches),
# TODO: `predict_epoch_end`
dict(name="Callback.on_predict_epoch_end", args=(trainer, model, [[ANY] * batches])),
dict(name="on_predict_epoch_end", args=([[ANY] * batches],)),
dict(name="Callback.on_predict_end", args=(trainer, model)),
dict(name="on_predict_end"),
# TODO: `on_predict_model_train`
dict(name="Callback.teardown", args=(trainer, model), kwargs=dict(stage="predict")),
dict(name="teardown", kwargs=dict(stage="predict")),
]
assert called == expected
# TODO: add test for tune
def test_hooks_with_different_argument_names(tmpdir):
"""
Test that argument names can be anything in the hooks
"""
class CustomBoringModel(BoringModel):
def assert_args(self, x, batch_nb):
assert isinstance(x, torch.Tensor)
assert x.size() == (1, 32)
assert isinstance(batch_nb, int)
def training_step(self, x1, batch_nb1):
self.assert_args(x1, batch_nb1)
return super().training_step(x1, batch_nb1)
def validation_step(self, x2, batch_nb2):
self.assert_args(x2, batch_nb2)
return super().validation_step(x2, batch_nb2)
def test_step(self, x3, batch_nb3, dl_idx3):
self.assert_args(x3, batch_nb3)
assert isinstance(dl_idx3, int)
return super().test_step(x3, batch_nb3)
def predict(self, x4, batch_nb4, dl_idx4):
self.assert_args(x4, batch_nb4)
assert isinstance(dl_idx4, int)
return super().predict(x4, batch_nb4, dl_idx4)
def test_dataloader(self):
return [DataLoader(RandomDataset(32, 64)), DataLoader(RandomDataset(32, 64))]
def predict_dataloader(self):
return [DataLoader(RandomDataset(32, 64)), DataLoader(RandomDataset(32, 64))]
model = CustomBoringModel()
model.test_epoch_end = None
trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=5)
trainer.fit(model)
assert trainer.state.finished, f"Training failed with {trainer.state}"
trainer.test(model)
preds = trainer.predict(model)
assert len(preds) == 2
assert all(len(x) == 5 for x in preds)
def test_trainer_datamodule_hook_system(tmpdir):
"""Test the LightningDataModule hook system."""
class HookedDataModule(BoringDataModule):
def __init__(self, called):
super().__init__()
def call(hook, fn, *args, **kwargs):
out = fn(*args, **kwargs)
d = {"name": hook}
if args:
d["args"] = args
if kwargs:
d["kwargs"] = kwargs
called.append(d)
return out
for h in get_members(LightningDataModule):
attr = getattr(self, h)
setattr(self, h, partial(call, h, attr))
model = BoringModel()
batches = 2
trainer = Trainer(
default_root_dir=tmpdir,
max_epochs=1,
limit_train_batches=batches,
limit_val_batches=batches,
limit_test_batches=batches,
limit_predict_batches=batches,
progress_bar_refresh_rate=0,
weights_summary=None,
reload_dataloaders_every_epoch=True,
)
called = []
dm = HookedDataModule(called)
trainer.fit(model, datamodule=dm)
batch_transfer = [
dict(name="on_before_batch_transfer", args=(ANY, 0)),
dict(name="transfer_batch_to_device", args=(ANY, torch.device("cpu"), 0)),
dict(name="on_after_batch_transfer", args=(ANY, 0)),
]
expected = [
dict(name="prepare_data"),
dict(name="setup", kwargs=dict(stage="fit")),
dict(name="val_dataloader"),
*batch_transfer * batches,
dict(name="train_dataloader"),
*batch_transfer * batches,
dict(name="val_dataloader"),
*batch_transfer * batches,
dict(
name="on_save_checkpoint",
args=(
{
"callbacks": ANY,
"epoch": 1,
"global_step": 2,
"lr_schedulers": ANY,
"optimizer_states": ANY,
"pytorch-lightning_version": __version__,
"state_dict": ANY,
},
),
),
dict(name="teardown", kwargs=dict(stage="fit")),
]
assert called == expected
called = []
dm = HookedDataModule(called)
trainer.validate(model, datamodule=dm, verbose=False)
expected = [
dict(name="prepare_data"),
dict(name="setup", kwargs=dict(stage="validate")),
dict(name="val_dataloader"),
*batch_transfer * batches,
dict(name="teardown", kwargs=dict(stage="validate")),
]
assert called == expected
called = []
dm = HookedDataModule(called)
trainer.test(model, datamodule=dm, verbose=False)
expected = [
dict(name="prepare_data"),
dict(name="setup", kwargs=dict(stage="test")),
dict(name="test_dataloader"),
*batch_transfer * batches,
dict(name="teardown", kwargs=dict(stage="test")),
]
assert called == expected
called = []
dm = HookedDataModule(called)
trainer.predict(model, datamodule=dm)
expected = [
dict(name="prepare_data"),
dict(name="setup", kwargs=dict(stage="predict")),
dict(name="predict_dataloader"),
*batch_transfer * batches,
dict(name="teardown", kwargs=dict(stage="predict")),
]
assert called == expected
|
williamFalcon/pytorch-lightning
|
tests/models/test_hooks.py
|
Python
|
apache-2.0
| 38,377 | 0.002528 |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tensor utility functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import inspect
import re
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import decorator_utils
def _add_deprecated_function_notice_to_docstring(doc, date, instructions):
"""Adds a deprecation notice to a docstring for deprecated functions."""
return decorator_utils.add_notice_to_docstring(
doc, instructions,
'DEPRECATED FUNCTION',
'(deprecated)', [
'THIS FUNCTION IS DEPRECATED. It will be removed after %s.' % date,
'Instructions for updating:'])
def _add_deprecated_arg_notice_to_docstring(doc, date, instructions):
"""Adds a deprecation notice to a docstring for deprecated arguments."""
return decorator_utils.add_notice_to_docstring(
doc, instructions,
'DEPRECATED FUNCTION ARGUMENTS',
'(deprecated arguments)', [
'SOME ARGUMENTS ARE DEPRECATED. '
'They will be removed after %s.' % date,
'Instructions for updating:'])
def _validate_deprecation_args(date, instructions):
if not date:
raise ValueError('Tell us what date this will be deprecated!')
if not re.match(r'20\d\d-[01]\d-[0123]\d', date):
raise ValueError('Date must be YYYY-MM-DD.')
if not instructions:
raise ValueError('Don\'t deprecate things without conversion instructions!')
def _call_location(level=2):
"""Returns call location given level up from current call."""
stack = inspect.stack()
# Check that stack has enough elements.
if len(stack) > level:
location = stack[level]
return '%s:%d in %s.' % (location[1], location[2], location[3])
return '<unknown>'
def deprecated(date, instructions):
"""Decorator for marking functions or methods deprecated.
This decorator logs a deprecation warning whenever the decorated function is
called. It has the following format:
<function> (from <module>) is deprecated and will be removed after <date>.
Instructions for updating:
<instructions>
<function> will include the class name if it is a method.
It also edits the docstring of the function: ' (deprecated)' is appended
to the first line of the docstring and a deprecation notice is prepended
to the rest of the docstring.
Args:
date: String. The date the function is scheduled to be removed. Must be
ISO 8601 (YYYY-MM-DD).
instructions: String. Instructions on how to update code using the
deprecated function.
Returns:
Decorated function or method.
Raises:
ValueError: If date is not in ISO 8601 format, or instructions are empty.
"""
_validate_deprecation_args(date, instructions)
def deprecated_wrapper(func):
"""Deprecation wrapper."""
decorator_utils.validate_callable(func, 'deprecated')
@functools.wraps(func)
def new_func(*args, **kwargs):
logging.warning(
'From %s: %s (from %s) is deprecated and will be removed '
'after %s.\n'
'Instructions for updating:\n%s',
_call_location(), decorator_utils.get_qualified_name(func),
func.__module__, date, instructions)
return func(*args, **kwargs)
new_func.__doc__ = _add_deprecated_function_notice_to_docstring(
func.__doc__, date, instructions)
return new_func
return deprecated_wrapper
DeprecatedArgSpec = collections.namedtuple(
'DeprecatedArgSpec', ['position', 'has_ok_value', 'ok_value'])
def deprecated_args(date, instructions, *deprecated_arg_names_or_tuples):
"""Decorator for marking specific function arguments as deprecated.
This decorator logs a deprecation warning whenever the decorated function is
called with the deprecated argument. It has the following format:
Calling <function> (from <module>) with <arg> is deprecated and will be
removed after <date>. Instructions for updating:
<instructions>
<function> will include the class name if it is a method.
It also edits the docstring of the function: ' (deprecated arguments)' is
appended to the first line of the docstring and a deprecation notice is
prepended to the rest of the docstring.
Args:
date: String. The date the function is scheduled to be removed. Must be
ISO 8601 (YYYY-MM-DD).
instructions: String. Instructions on how to update code using the
deprecated function.
*deprecated_arg_names_or_tuples: String. or 2-Tuple(String,
[ok_vals]). The string is the deprecated argument name.
Optionally, an ok-value may be provided. If the user provided
argument equals this value, the warning is suppressed.
Returns:
Decorated function or method.
Raises:
ValueError: If date is not in ISO 8601 format, instructions are
empty, the deprecated arguments are not present in the function
signature, or the second element of a deprecated_tuple is not a
list.
"""
_validate_deprecation_args(date, instructions)
if not deprecated_arg_names_or_tuples:
raise ValueError('Specify which argument is deprecated.')
def _get_arg_names_to_ok_vals():
"""Returns a dict mapping arg_name to DeprecatedArgSpec w/o position."""
d = {}
for name_or_tuple in deprecated_arg_names_or_tuples:
if isinstance(name_or_tuple, tuple):
d[name_or_tuple[0]] = DeprecatedArgSpec(-1, True, name_or_tuple[1])
else:
d[name_or_tuple] = DeprecatedArgSpec(-1, False, None)
return d
def _get_deprecated_positional_arguments(names_to_ok_vals, arg_spec):
"""Builds a dictionary from deprecated arguments to thier spec.
Returned dict is keyed by argument name.
Each value is a DeprecatedArgSpec with the following fields:
position: The zero-based argument position of the argument
within the signature. None if the argument isn't found in
the signature.
ok_values: Values of this argument for which warning will be
suppressed.
Args:
names_to_ok_vals: dict from string arg_name to a list of values,
possibly empty, which should not elicit a warning.
arg_spec: Output from inspect.getargspec on the called function.
Returns:
Dictionary from arg_name to DeprecatedArgSpec.
"""
arg_name_to_pos = dict(
(name, pos) for (pos, name) in enumerate(arg_spec.args))
deprecated_positional_args = {}
for arg_name, spec in iter(names_to_ok_vals.items()):
if arg_name in arg_name_to_pos:
pos = arg_name_to_pos[arg_name]
deprecated_positional_args[arg_name] = DeprecatedArgSpec(
pos, spec.has_ok_value, spec.ok_value)
return deprecated_positional_args
def deprecated_wrapper(func):
"""Deprecation decorator."""
decorator_utils.validate_callable(func, 'deprecated_args')
deprecated_arg_names = _get_arg_names_to_ok_vals()
arg_spec = inspect.getargspec(func)
deprecated_positions = _get_deprecated_positional_arguments(
deprecated_arg_names, arg_spec)
is_varargs_deprecated = arg_spec.varargs in deprecated_arg_names
is_kwargs_deprecated = arg_spec.keywords in deprecated_arg_names
if (len(deprecated_positions) + is_varargs_deprecated + is_kwargs_deprecated
!= len(deprecated_arg_names_or_tuples)):
known_args = arg_spec.args + [arg_spec.varargs, arg_spec.keywords]
missing_args = [arg_name for arg_name in deprecated_arg_names
if arg_name not in known_args]
raise ValueError('The following deprecated arguments are not present '
'in the function signature: %s. '
'Found next arguments: %s.' % (missing_args, known_args))
@functools.wraps(func)
def new_func(*args, **kwargs):
"""Deprecation wrapper."""
invalid_args = []
named_args = inspect.getcallargs(func, *args, **kwargs)
for arg_name, spec in iter(deprecated_positions.items()):
if (spec.position < len(args) and
not (spec.has_ok_value and
named_args[arg_name] == spec.ok_value)):
invalid_args.append(arg_name)
if is_varargs_deprecated and len(args) > len(arg_spec.args):
invalid_args.append(arg_spec.varargs)
if is_kwargs_deprecated and kwargs:
invalid_args.append(arg_spec.keywords)
for arg_name in deprecated_arg_names:
if (arg_name in kwargs and
not (deprecated_positions[arg_name].has_ok_value and
(named_args[arg_name] ==
deprecated_positions[arg_name].ok_value))):
invalid_args.append(arg_name)
for arg_name in invalid_args:
logging.warning(
'From %s: calling %s (from %s) with %s is deprecated and will '
'be removed after %s.\nInstructions for updating:\n%s',
_call_location(), decorator_utils.get_qualified_name(func),
func.__module__, arg_name, date, instructions)
return func(*args, **kwargs)
new_func.__doc__ = _add_deprecated_arg_notice_to_docstring(
func.__doc__, date, instructions)
return new_func
return deprecated_wrapper
def deprecated_arg_values(date, instructions, **deprecated_kwargs):
"""Decorator for marking specific function argument values as deprecated.
This decorator logs a deprecation warning whenever the decorated function is
called with the deprecated argument values. It has the following format:
Calling <function> (from <module>) with <arg>=<value> is deprecated and
will be removed after <date>. Instructions for updating:
<instructions>
<function> will include the class name if it is a method.
It also edits the docstring of the function: ' (deprecated arguments)' is
appended to the first line of the docstring and a deprecation notice is
prepended to the rest of the docstring.
Args:
date: String. The date the function is scheduled to be removed. Must be
ISO 8601 (YYYY-MM-DD).
instructions: String. Instructions on how to update code using the
deprecated function.
**deprecated_kwargs: The deprecated argument values.
Returns:
Decorated function or method.
Raises:
ValueError: If date is not in ISO 8601 format, or instructions are empty.
"""
_validate_deprecation_args(date, instructions)
if not deprecated_kwargs:
raise ValueError('Specify which argument values are deprecated.')
def deprecated_wrapper(func):
"""Deprecation decorator."""
decorator_utils.validate_callable(func, 'deprecated_arg_values')
@functools.wraps(func)
def new_func(*args, **kwargs):
"""Deprecation wrapper."""
named_args = inspect.getcallargs(func, *args, **kwargs)
for arg_name, arg_value in deprecated_kwargs.items():
if arg_name in named_args and named_args[arg_name] == arg_value:
logging.warning(
'From %s: calling %s (from %s) with %s=%s is deprecated and will '
'be removed after %s.\nInstructions for updating:\n%s',
_call_location(), decorator_utils.get_qualified_name(func),
func.__module__, arg_name, arg_value, date, instructions)
return func(*args, **kwargs)
new_func.__doc__ = _add_deprecated_arg_notice_to_docstring(
func.__doc__, date, instructions)
return new_func
return deprecated_wrapper
|
laosiaudi/tensorflow
|
tensorflow/python/util/deprecation.py
|
Python
|
apache-2.0
| 12,098 | 0.004877 |
__all__ = ["Transition"]
class Transition(object):
def __init__(self, startState, nextState, word, suffix, marked):
self.startState = startState
self.nextState = nextState
self.word = word
self.suffix = suffix
self.marked = False
def similarTransitions(self, transitions):
for transition in transitions:
if (self.startState == transition.startState and
self.nextState == transition.nextState):
yield transition
|
otuncelli/turkish-stemmer-python
|
TurkishStemmer/transitions/__init__.py
|
Python
|
apache-2.0
| 516 | 0.007782 |
from unittest import TestCase
EXAMPLES_PATH = '../examples'
SKIPPED_EXAMPLES = {472, 473, 477}
def _set_test_class():
import re
from imp import load_module, find_module, PY_SOURCE
from pathlib import Path
def _load_module(name, file, pathname, description):
try:
load_module(name, file, pathname, description)
finally:
if file:
file.close()
def make_method(module_name, module_tuple):
def _m(self):
print('Running: {}'.format(module_name))
_load_module(module_name, *module_tuple)
return _m
sols_module_name = 'solutions'
_load_module(sols_module_name, *find_module(sols_module_name, [EXAMPLES_PATH]))
pat_example = re.compile(r'\d+\. .+\.py')
attrs = {}
for i, example_path in enumerate(Path(EXAMPLES_PATH).iterdir()):
if not re.match(pat_example, example_path.name):
continue
module_name = example_path.stem
if int(module_name.split('. ')[0]) in SKIPPED_EXAMPLES:
continue
module_tuple = open(str(example_path), 'rb'), example_path.stem, ('.py', 'rb', PY_SOURCE)
func_name = module_name.replace(' ', '_').replace('.', '').lower()
func_name = 'test_' + ''.join(c for c in func_name if c.isalnum() or c == '_')
attrs[func_name] = make_method(module_name, module_tuple)
class_name = 'TestByExamples'
globals()[class_name] = type(class_name, (TestCase,), attrs)
_set_test_class()
del _set_test_class
|
yehzhang/RapidTest
|
tests/test_by_examples.py
|
Python
|
mit
| 1,535 | 0.001954 |
# coding=utf-8
import requests
def download(url):
resp = requests.get(url) # TODO add retries
return resp.content, resp.headers
|
ahmetalpbalkan/permalinker
|
application/downloader.py
|
Python
|
apache-2.0
| 140 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2017-03-19 02:09
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pos', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='itemingredient',
name='exclusive',
field=models.BooleanField(default=False),
),
]
|
nuxis/p0sX-server
|
p0sx/pos/migrations/0002_itemingredient_exclusive.py
|
Python
|
mit
| 448 | 0 |
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
from pyflink.table import EnvironmentSettings
from pyflink.testing.test_case_utils import PythonAPICompletenessTestCase, PyFlinkTestCase
class EnvironmentSettingsCompletenessTests(PythonAPICompletenessTestCase, PyFlinkTestCase):
"""
Tests whether the Python :class:`EnvironmentSettings` is consistent with
Java `org.apache.flink.table.api.EnvironmentSettings`.
"""
@classmethod
def python_class(cls):
return EnvironmentSettings
@classmethod
def java_class(cls):
return "org.apache.flink.table.api.EnvironmentSettings"
@classmethod
def excluded_methods(cls):
# internal interfaces, no need to expose to users.
return {'getPlanner', 'getExecutor'}
class EnvironmentSettingsBuilderCompletenessTests(PythonAPICompletenessTestCase, PyFlinkTestCase):
"""
Tests whether the Python :class:`EnvironmentSettings.Builder` is consistent with
Java `org.apache.flink.table.api.EnvironmentSettings$Builder`.
"""
@classmethod
def python_class(cls):
return EnvironmentSettings.Builder
@classmethod
def java_class(cls):
return "org.apache.flink.table.api.EnvironmentSettings$Builder"
if __name__ == '__main__':
import unittest
try:
import xmlrunner
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports')
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
|
lincoln-lil/flink
|
flink-python/pyflink/table/tests/test_environment_settings_completeness.py
|
Python
|
apache-2.0
| 2,417 | 0.002482 |
import logging
logger = logging.getLogger(__name__)
class Singleton(type):
def __init__(cls, name, bases, dict):
super(Singleton, cls).__init__(name, bases, dict)
cls.instance = None
def __call__(cls, keep=True, *args, **kwargs):
logger.debug("Handle singleton instance for %s with args (keep=%s): %s, %s" % (cls, keep, args, kwargs))
if keep:
if cls.instance is None:
logger.debug("Return and keep singleton instance for %s with args (keep=%s): %s, %s" % (cls, keep, args, kwargs))
cls.instance = super(Singleton, cls).__call__(*args, **kwargs)
return cls.instance
else:
logger.debug("Return cached singleton instance for %s with args (keep=%s): %s, %s" % (cls, keep, args, kwargs))
return cls.instance
else:
logger.debug("Return new singleton instance for %s with args (keep=%s): %s, %s" % (cls, keep, args, kwargs))
return super(Singleton, cls).__call__(*args, **kwargs)
return None
|
sahlinet/fastapp
|
fastapp/plugins/singleton.py
|
Python
|
mit
| 1,074 | 0.003724 |
from unittest import TestCase
import validictory
class TestItems(TestCase):
def test_property(self):
schema = {
"type": "object",
"properties": {
"foo": {
"default": "bar"
},
"baz": {
"type": "integer"
}
}
}
data = {'baz': 2}
result = validictory.validate(data, schema, required_by_default=False)
self.assertEqual(result, {"foo": "bar", "baz": 2})
def test_item(self):
schema = {
'type': 'object',
'type': 'array',
'items': [
{
'type': 'any'
},
{
'type': 'string'
},
{
'default': 'baz'
},
]
}
data = ['foo', 'bar']
result = validictory.validate(data, schema, required_by_default=False)
self.assertEqual(result, ["foo", "bar", "baz"])
|
jalaziz/validictory
|
validictory/tests/test_defaults.py
|
Python
|
mit
| 1,074 | 0 |
import os
from airtng_flask.config import config_env_files
from flask import Flask
from flask_bcrypt import Bcrypt
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
db = SQLAlchemy()
bcrypt = Bcrypt()
login_manager = LoginManager()
def create_app(config_name='development', p_db=db, p_bcrypt=bcrypt, p_login_manager=login_manager):
new_app = Flask(__name__)
config_app(config_name, new_app)
new_app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
p_db.init_app(new_app)
p_bcrypt.init_app(new_app)
p_login_manager.init_app(new_app)
p_login_manager.login_view = 'register'
return new_app
def config_app(config_name, new_app):
new_app.config.from_object(config_env_files[config_name])
app = create_app()
import airtng_flask.views
|
TwilioDevEd/airtng-flask
|
airtng_flask/__init__.py
|
Python
|
mit
| 801 | 0.002497 |
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 2 10:56:34 2016
@author: jmjj (Jari Juopperi, jmjj@juopperi.org)
"""
from .main import *
|
jmjj/messages2json
|
messages2json/__init__.py
|
Python
|
mit
| 140 | 0 |
# -*- coding: utf-8 -*-
from openerp.http import request, STATIC_CACHE
from openerp.addons.web import http
import json
import io
from PIL import Image, ImageFont, ImageDraw
from openerp import tools
import cStringIO
import werkzeug.wrappers
import time
import logging
logger = logging.getLogger(__name__)
class Web_Editor(http.Controller):
#------------------------------------------------------
# Backend snippet
#------------------------------------------------------
@http.route('/web_editor/snippets', type='json', auth="user")
def snippets(self, **kwargs):
return request.env.ref('web_editor.snippets').render(None)
#------------------------------------------------------
# Backend html field
#------------------------------------------------------
@http.route('/web_editor/field/html', type='http', auth="user")
def FieldTextHtml(self, model=None, res_id=None, field=None, callback=None, **kwargs):
cr, uid, context = request.cr, request.uid, request.context
kwargs.update(
model=model,
res_id=res_id,
field=field,
datarecord=json.loads(kwargs['datarecord']),
debug='debug' in kwargs)
for k in kwargs:
if isinstance(kwargs[k], basestring) and kwargs[k].isdigit():
kwargs[k] = int(kwargs[k])
trans = dict(
lang=kwargs.get('lang', context.get('lang')),
translatable=kwargs.get('translatable'),
edit_translations=kwargs.get('edit_translations'),
editable=kwargs.get('enable_editor'))
context.update(trans)
kwargs.update(trans)
record = None
if model and kwargs.get('res_id'):
record = request.registry[model].browse(cr, uid, kwargs.get('res_id'), context)
kwargs.update(content=record and getattr(record, field) or "")
return request.render(kwargs.get("template") or "web_editor.FieldTextHtml", kwargs, uid=request.uid)
#------------------------------------------------------
# Backend html field in inline mode
#------------------------------------------------------
@http.route('/web_editor/field/html/inline', type='http', auth="user")
def FieldTextHtmlInline(self, model=None, res_id=None, field=None, callback=None, **kwargs):
kwargs['inline_mode'] = True
kwargs['dont_load_assets'] = not kwargs.get('enable_editor') and not kwargs.get('edit_translations')
return self.FieldTextHtml(model, res_id, field, callback, **kwargs)
#------------------------------------------------------
# convert font into picture
#------------------------------------------------------
@http.route([
'/web_editor/font_to_img/<icon>',
'/web_editor/font_to_img/<icon>/<color>',
'/web_editor/font_to_img/<icon>/<color>/<int:size>',
'/web_editor/font_to_img/<icon>/<color>/<int:size>/<int:alpha>',
], type='http', auth="none")
def export_icon_to_png(self, icon, color='#000', size=100, alpha=255, font='/web/static/lib/fontawesome/fonts/fontawesome-webfont.ttf'):
""" This method converts an unicode character to an image (using Font
Awesome font by default) and is used only for mass mailing because
custom fonts are not supported in mail.
:param icon : decimal encoding of unicode character
:param color : RGB code of the color
:param size : Pixels in integer
:param alpha : transparency of the image from 0 to 255
:param font : font path
:returns PNG image converted from given font
"""
# Make sure we have at least size=1
size = max(1, size)
# Initialize font
addons_path = http.addons_manifest['web']['addons_path']
font_obj = ImageFont.truetype(addons_path + font, size)
# if received character is not a number, keep old behaviour (icon is character)
icon = unichr(int(icon)) if icon.isdigit() else icon
# Determine the dimensions of the icon
image = Image.new("RGBA", (size, size), color=(0, 0, 0, 0))
draw = ImageDraw.Draw(image)
boxw, boxh = draw.textsize(icon, font=font_obj)
draw.text((0, 0), icon, font=font_obj)
left, top, right, bottom = image.getbbox()
# Create an alpha mask
imagemask = Image.new("L", (boxw, boxh), 0)
drawmask = ImageDraw.Draw(imagemask)
drawmask.text((-left, -top), icon, font=font_obj, fill=alpha)
# Create a solid color image and apply the mask
if color.startswith('rgba'):
color = color.replace('rgba', 'rgb')
color = ','.join(color.split(',')[:-1])+')'
iconimage = Image.new("RGBA", (boxw, boxh), color)
iconimage.putalpha(imagemask)
# Create output image
outimage = Image.new("RGBA", (boxw, size), (0, 0, 0, 0))
outimage.paste(iconimage, (left, top))
# output image
output = io.BytesIO()
outimage.save(output, format="PNG")
response = werkzeug.wrappers.Response()
response.mimetype = 'image/png'
response.data = output.getvalue()
response.headers['Cache-Control'] = 'public, max-age=604800'
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'GET, POST'
response.headers['Connection'] = 'close'
response.headers['Date'] = time.strftime("%a, %d-%b-%Y %T GMT", time.gmtime())
response.headers['Expires'] = time.strftime("%a, %d-%b-%Y %T GMT", time.gmtime(time.time()+604800*60))
return response
#------------------------------------------------------
# add attachment (images or link)
#------------------------------------------------------
@http.route('/web_editor/attachment/add', type='http', auth='user', methods=['POST'])
def attach(self, func, upload=None, url=None, disable_optimization=None, **kwargs):
# the upload argument doesn't allow us to access the files if more than
# one file is uploaded, as upload references the first file
# therefore we have to recover the files from the request object
Attachments = request.registry['ir.attachment'] # registry for the attachment table
uploads = []
message = None
if not upload: # no image provided, storing the link and the image name
name = url.split("/").pop() # recover filename
attachment_id = Attachments.create(request.cr, request.uid, {
'name': name,
'type': 'url',
'url': url,
'public': True,
'res_model': 'ir.ui.view',
}, request.context)
uploads += Attachments.read(request.cr, request.uid, [attachment_id], ['name', 'mimetype', 'checksum', 'url'], request.context)
else: # images provided
try:
attachment_ids = []
for c_file in request.httprequest.files.getlist('upload'):
data = c_file.read()
try:
image = Image.open(cStringIO.StringIO(data))
w, h = image.size
if w*h > 42e6: # Nokia Lumia 1020 photo resolution
raise ValueError(
u"Image size excessive, uploaded images must be smaller "
u"than 42 million pixel")
if not disable_optimization and image.format in ('PNG', 'JPEG'):
data = tools.image_save_for_web(image)
except IOError, e:
pass
attachment_id = Attachments.create(request.cr, request.uid, {
'name': c_file.filename,
'datas': data.encode('base64'),
'datas_fname': c_file.filename,
'public': True,
'res_model': 'ir.ui.view',
}, request.context)
attachment_ids.append(attachment_id)
uploads += Attachments.read(request.cr, request.uid, attachment_ids, ['name', 'mimetype', 'checksum', 'url'], request.context)
except Exception, e:
logger.exception("Failed to upload image to attachment")
message = unicode(e)
return """<script type='text/javascript'>
window.parent['%s'](%s, %s);
</script>""" % (func, json.dumps(uploads), json.dumps(message))
#------------------------------------------------------
# remove attachment (images or link)
#------------------------------------------------------
@http.route('/web_editor/attachment/remove', type='json', auth='user')
def remove(self, ids, **kwargs):
""" Removes a web-based image attachment if it is used by no view (template)
Returns a dict mapping attachments which would not be removed (if any)
mapped to the views preventing their removal
"""
cr, uid, context = request.cr, request.uid, request.context
Attachment = request.registry['ir.attachment']
Views = request.registry['ir.ui.view']
attachments_to_remove = []
# views blocking removal of the attachment
removal_blocked_by = {}
for attachment in Attachment.browse(cr, uid, ids, context=context):
# in-document URLs are html-escaped, a straight search will not
# find them
url = tools.html_escape(attachment.local_url)
ids = Views.search(cr, uid, ["|", ('arch_db', 'like', '"%s"' % url), ('arch_db', 'like', "'%s'" % url)], context=context)
if ids:
removal_blocked_by[attachment.id] = Views.read(
cr, uid, ids, ['name'], context=context)
else:
attachments_to_remove.append(attachment.id)
if attachments_to_remove:
Attachment.unlink(cr, uid, attachments_to_remove, context=context)
return removal_blocked_by
|
ChawalitK/odoo
|
addons/web_editor/controllers/main.py
|
Python
|
gpl-3.0
| 10,264 | 0.00341 |
## Progam packages
from .credit_model_classes import credit_model_base
from ...asset.Asset_data import Asset_data
from ..generator_correlated_variables import generator_correlated_variables
from ...core_math.function_optim import function_optim
from ...core_math.functions_credit import generator_matrix, exp_matrix
## Python packages
from scipy.linalg import inv, norm
from numpy import linalg as la
from scipy.optimize import minimize
import numpy as np
class JLT(credit_model_base):
"""
The JLT model is inplemented here
Attributes:
==========
Input:
_______
1. pi_0 : initial value of the risk premium
Type : float
2. mu : long term average parameter
Type : float
3. alpha : speed of adjustment parameter
Type : float
4. recovery_rate : recorevry rate when default
Type : float
5. sigma : volatility parameter
Type : float
6. market_name : market name
Type : string
Output:
_______
1. RN_migration_matrix : risk-neutral migration matrix
Type : matrix 7x7
2. spreads : credit spreads
Type : vector of length 7
Methods:
_______
1. add_time_horizon
2. get_spread
3. get_hist_transition_matrix
4. calibrate_spread
5. calibrate_price
6. generate_spreads_and_matrix
7. test_diffusion_pi
"""
def __init__(self, pi_0= None, mu= None, alpha= None, sigma= None, recovery_rate= None, market_name= None):
self.time_horizon=0
self.recovery_rate = recovery_rate
# initiate
self.market_spread = None
self.eigenval_hist_gen= None
self.eigenvect_hist_gen= None
self.historical_transition_matrix = None
self.RN_migration_matrix=[]
self.spreads=[]
self.mu=mu
self.alpha=alpha
self.sigma=sigma
self.corr_matrix= None
self.fixed_seed = None
self.num_instrument = 0
self.pi_0=pi_0
self.market_name=market_name
# prend comme entrée IR_model, ou pas... On a défini également une méthode qui permet d'aller récupérer les taux zéro coupons d'un modèle IR
# ici, on a peut-etre seulement besoin de tout initialiser à vide
# l'intérêt de la définition est qu'il est prêt d'être utilisé, plus simple
# or une méthode permet de modifier/ d'accéder à des attributes depuis extérieur.
def getMatrixJLT(self,t,T):
out = None
d = self.eigenval_hist_gen
if self.sigma !=0:
v = np.sqrt(self.alpha**2 - 2*d*self.sigma**2)
denominator = (v+self.alpha)*(np.exp(v*(T-t))-1)+2*v
A = (2*self.alpha*self.mu)/(self.sigma**2)*np.log((2*v*np.exp(0.5*(self.alpha+v)*(T-t)))/denominator)
B = - (2*d*(np.exp(v*(T-t))-1))/denominator
value = np.exp(A - B*self.risk_premium[t])
out = np.diag(value)
else:
temp = (self.risk_premium[t]+np.exp(-self.alpha*t))*(T-t) + 1/(self.alpha)*(np.exp(-self.alpha*T)-np.exp(-self.alpha*t))
value = np.exp(d*temp)
out = np.diag(value)
return out
def add_time_horizon(self,time_horizon):
"""
Method : add_time_horizon
Function : add the time horizon
Parameter :
1. time_horizon
Type : int
Function : correspond to the time horizon
"""
self.time_horizon = time_horizon
def get_spread(self,asset_data):
"""
Method : get_spread
Function : retrieve the spread from the pickle file
Parameter : None
"""
# read the market spread data ''of time 0''
market = asset_data.get_list_market(self.market_name)
spread_list = market.spread_list
col_index = market.col_index
row_index = market.row_index
self.market_spread = spread_list, col_index, row_index
def get_hist_transition_matrix(self, asset_data):
"""
Method : get_hist_transition_matrix
Function : retrieve the historical transition matrix from the pickle file and then deduce the generator matrix, its eigenvectors and its eigenvalues.
Parameter : None
"""
market = asset_data.get_list_market(self.market_name)
historical_transition_matrix = market.historical_transition_matrix
self.historical_transition_matrix = historical_transition_matrix
self.historical_generator_matrix = generator_matrix(self.historical_transition_matrix)
w, v = la.eig(self.historical_generator_matrix)
eigenval_hist_gen = w.real
eigenvect_hist_gen = (v.T).real
for l in range(len(eigenvect_hist_gen)):
eigenvect_hist_gen[l] = eigenvect_hist_gen[l]/norm(eigenvect_hist_gen[l])
eigenvect_hist_gen = eigenvect_hist_gen.T
self.eigenval_hist_gen= eigenval_hist_gen
self.eigenvect_hist_gen= eigenvect_hist_gen
def calibrate_spread(self, asset_data, AAA_AA):
"""
Method : calibrate_spread
Function : calibrate the model on the market data of spread
Parameter :
1. asset_data
Type : instance of Asset_data class
Function : see class Asset_data for more details.
2. AAA_AA
Type : boolean
Function : if it is true, then only spreads of AAA and AA ratings are used for the calibration
"""
market = asset_data.get_list_market(self.market_name)
if self.mu is None:
self.mu = market.JLT_mu
if self.sigma is None:
self.sigma = market.JLT_sigma
if self.alpha is None:
self.alpha = market.JLT_alpha
if self.pi_0 is None:
self.pi_0 = market.JLT_pi
if self.recovery_rate is None:
self.recovery_rate = market.recovery_rate
spread_list, col_index, row_index = self.market_spread
def f(pi_0):
return function_optim(pi_0, self.alpha, self.mu, self.sigma, self.recovery_rate,
self.eigenvect_hist_gen, self.eigenval_hist_gen,
row_index, col_index, spread_list,AAA_AA)
bds = [(0.001,None)]
res = minimize(f,x0=2, bounds=bds )
self.pi_0 = res.x[0]
return self.pi_0
def calibrate_price(self, asset_data):
"""
Method : calibrate_price
Function : calibrate the model on the market data of bonds' price
Parameter :
1. asset_data
Type : instance of Asset_data class
Function : see class Asset_data for more details.
"""
market = asset_data.get_list_market(self.market_name)
if self.mu is None:
self.mu = market.JLT_mu
if self.sigma is None:
self.sigma = market.JLT_sigma
if self.alpha is None:
self.alpha = market.JLT_alpha
if self.pi_0 is None:
self.pi_0 = market.JLT_pi
if self.recovery_rate is None:
self.recovery_rate = market.recovery_rate
spread_list, col_index, row_index = self.market_spread
def f(pi_0):
return function_optim(pi_0, self.alpha, self.mu, self.sigma,
self.recovery_rate, self.eigenvect_hist_gen, self.eigenval_hist_gen,
row_index, col_index, spread_list)
res = minimize(f,x0=2)
self.pi_0 = res.x[0]
return self.pi_0
def generate_spreads_and_matrix(self):
"""
Method : generate_spreads_and_matrix
Function : generate the spreads and risk-neutral transition matrix with parameters in the model
Parameter : None
"""
self.spreads=[]
self.RN_migration_matrix=[]
dw = generator_correlated_variables(corr_matrix = self.corr_matrix, time_horizon = self.time_horizon, fixed_seed = self.fixed_seed)
# ===================================
# Generate CIR process
# ===================================
self.risk_premium=[self.pi_0]
for time_step in range(1,self.time_horizon+1):
dpi = self.alpha*(self.mu-self.risk_premium[-1]) + self.sigma*np.sqrt(self.risk_premium[-1])*dw[2,time_step-1]
self.risk_premium.append(max(0,self.risk_premium[-1] + dpi))
for t in range(self.time_horizon+1):
#une boucle de bas de temps
RN_generator_matrix_t = np.dot(np.dot(self.eigenvect_hist_gen, np.diag(self.risk_premium[t]*self.eigenval_hist_gen)), inv(self.eigenvect_hist_gen))
RN_migration_matrix_t = exp_matrix(RN_generator_matrix_t).astype('Float64')
self.RN_migration_matrix.append(RN_migration_matrix_t)
for t in range(self.time_horizon+1):
spread_T = []
for T in range(t+1,t+21):
spread_t_T = []
JLTmatrix = self.getMatrixJLT(t,T)
I = np.identity(len(self.eigenval_hist_gen))
RN_migration_matrix_t_T = I + np.dot(np.dot(self.eigenvect_hist_gen,(JLTmatrix-I)),inv(self.eigenvect_hist_gen))
if all(1-(1-self.recovery_rate)*RN_migration_matrix_t_T.T[-1][:-1] > 0):
spread_t_T = -1/(T-t)* np.log(1-(1-self.recovery_rate)*RN_migration_matrix_t_T.T[-1][:-1])
else:
raise ValueError('value in log not defined!')
spread_T.append(spread_t_T)
self.spreads.append(spread_T)
# self.spreads est une liste qui possède trois dimensions : 1. bas de temps; 2. maturité; 3. notation
return self.RN_migration_matrix, self.spreads
|
jbalm/ActuarialCashFlowModel
|
esg/credit_risk/JLT.py
|
Python
|
gpl-3.0
| 10,706 | 0.014413 |
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A setup module for the GRPC Python package."""
from distutils import cygwinccompiler
from distutils import extension as _extension
from distutils import util
import os
import os.path
import pkg_resources
import platform
import re
import shlex
import shutil
import sys
import sysconfig
import setuptools
from setuptools.command import egg_info
import subprocess
from subprocess import PIPE
# Redirect the manifest template from MANIFEST.in to PYTHON-MANIFEST.in.
egg_info.manifest_maker.template = 'PYTHON-MANIFEST.in'
PY3 = sys.version_info.major == 3
PYTHON_STEM = os.path.join('src', 'python', 'grpcio')
CORE_INCLUDE = ('include', '.',)
ABSL_INCLUDE = (os.path.join('third_party', 'abseil-cpp'),)
ADDRESS_SORTING_INCLUDE = (os.path.join('third_party', 'address_sorting', 'include'),)
CARES_INCLUDE = (
os.path.join('third_party', 'cares'),
os.path.join('third_party', 'cares', 'cares'),)
if 'darwin' in sys.platform:
CARES_INCLUDE += (os.path.join('third_party', 'cares', 'config_darwin'),)
if 'freebsd' in sys.platform:
CARES_INCLUDE += (os.path.join('third_party', 'cares', 'config_freebsd'),)
if 'linux' in sys.platform:
CARES_INCLUDE += (os.path.join('third_party', 'cares', 'config_linux'),)
if 'openbsd' in sys.platform:
CARES_INCLUDE += (os.path.join('third_party', 'cares', 'config_openbsd'),)
SSL_INCLUDE = (os.path.join('third_party', 'boringssl-with-bazel', 'src', 'include'),)
UPB_INCLUDE = (os.path.join('third_party', 'upb'),)
UPB_GRPC_GENERATED_INCLUDE = (os.path.join('src', 'core', 'ext', 'upb-generated'),)
ZLIB_INCLUDE = (os.path.join('third_party', 'zlib'),)
README = os.path.join(PYTHON_STEM, 'README.rst')
# Ensure we're in the proper directory whether or not we're being used by pip.
os.chdir(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, os.path.abspath(PYTHON_STEM))
# Break import-style to ensure we can actually find our in-repo dependencies.
import _parallel_compile_patch
import _spawn_patch
import commands
import grpc_core_dependencies
import grpc_version
_parallel_compile_patch.monkeypatch_compile_maybe()
_spawn_patch.monkeypatch_spawn()
LICENSE = 'Apache License 2.0'
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'License :: OSI Approved :: Apache Software License',
]
# Environment variable to determine whether or not the Cython extension should
# *use* Cython or use the generated C files. Note that this requires the C files
# to have been generated by building first *with* Cython support. Even if this
# is set to false, if the script detects that the generated `.c` file isn't
# present, then it will still attempt to use Cython.
BUILD_WITH_CYTHON = os.environ.get('GRPC_PYTHON_BUILD_WITH_CYTHON', False)
# Export this variable to use the system installation of openssl. You need to
# have the header files installed (in /usr/include/openssl) and during
# runtime, the shared library must be installed
BUILD_WITH_SYSTEM_OPENSSL = os.environ.get('GRPC_PYTHON_BUILD_SYSTEM_OPENSSL',
False)
# Export this variable to use the system installation of zlib. You need to
# have the header files installed (in /usr/include/) and during
# runtime, the shared library must be installed
BUILD_WITH_SYSTEM_ZLIB = os.environ.get('GRPC_PYTHON_BUILD_SYSTEM_ZLIB',
False)
# Export this variable to use the system installation of cares. You need to
# have the header files installed (in /usr/include/) and during
# runtime, the shared library must be installed
BUILD_WITH_SYSTEM_CARES = os.environ.get('GRPC_PYTHON_BUILD_SYSTEM_CARES',
False)
# For local development use only: This skips building gRPC Core and its
# dependencies, including protobuf and boringssl. This allows "incremental"
# compilation by first building gRPC Core using make, then building only the
# Python/Cython layers here.
#
# Note that this requires libboringssl.a in the libs/{dbg,opt}/ directory, which
# may require configuring make to not use the system openssl implementation:
#
# make HAS_SYSTEM_OPENSSL_ALPN=0
#
# TODO(ericgribkoff) Respect the BUILD_WITH_SYSTEM_* flags alongside this option
USE_PREBUILT_GRPC_CORE = os.environ.get(
'GRPC_PYTHON_USE_PREBUILT_GRPC_CORE', False)
# If this environmental variable is set, GRPC will not try to be compatible with
# libc versions old than the one it was compiled against.
DISABLE_LIBC_COMPATIBILITY = os.environ.get('GRPC_PYTHON_DISABLE_LIBC_COMPATIBILITY', False)
# Environment variable to determine whether or not to enable coverage analysis
# in Cython modules.
ENABLE_CYTHON_TRACING = os.environ.get(
'GRPC_PYTHON_ENABLE_CYTHON_TRACING', False)
# Environment variable specifying whether or not there's interest in setting up
# documentation building.
ENABLE_DOCUMENTATION_BUILD = os.environ.get(
'GRPC_PYTHON_ENABLE_DOCUMENTATION_BUILD', False)
def check_linker_need_libatomic():
"""Test if linker on system needs libatomic."""
code_test = (b'#include <atomic>\n' +
b'int main() { return std::atomic<int64_t>{}; }')
cc_test = subprocess.Popen(['cc', '-x', 'c++', '-std=c++11', '-'],
stdin=PIPE,
stdout=PIPE,
stderr=PIPE)
cc_test.communicate(input=code_test)
return cc_test.returncode != 0
# There are some situations (like on Windows) where CC, CFLAGS, and LDFLAGS are
# entirely ignored/dropped/forgotten by distutils and its Cygwin/MinGW support.
# We use these environment variables to thus get around that without locking
# ourselves in w.r.t. the multitude of operating systems this ought to build on.
# We can also use these variables as a way to inject environment-specific
# compiler/linker flags. We assume GCC-like compilers and/or MinGW as a
# reasonable default.
EXTRA_ENV_COMPILE_ARGS = os.environ.get('GRPC_PYTHON_CFLAGS', None)
EXTRA_ENV_LINK_ARGS = os.environ.get('GRPC_PYTHON_LDFLAGS', None)
if EXTRA_ENV_COMPILE_ARGS is None:
EXTRA_ENV_COMPILE_ARGS = ' -std=c++11'
if 'win32' in sys.platform:
if sys.version_info < (3, 5):
EXTRA_ENV_COMPILE_ARGS += ' -D_hypot=hypot'
# We use define flags here and don't directly add to DEFINE_MACROS below to
# ensure that the expert user/builder has a way of turning it off (via the
# envvars) without adding yet more GRPC-specific envvars.
# See https://sourceforge.net/p/mingw-w64/bugs/363/
if '32' in platform.architecture()[0]:
EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime32 -D_timeb=__timeb32 -D_ftime_s=_ftime32_s'
else:
EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime64 -D_timeb=__timeb64'
else:
# We need to statically link the C++ Runtime, only the C runtime is
# available dynamically
EXTRA_ENV_COMPILE_ARGS += ' /MT'
elif "linux" in sys.platform:
EXTRA_ENV_COMPILE_ARGS += ' -std=gnu99 -fvisibility=hidden -fno-wrapv -fno-exceptions'
elif "darwin" in sys.platform:
EXTRA_ENV_COMPILE_ARGS += ' -stdlib=libc++ -fvisibility=hidden -fno-wrapv -fno-exceptions'
if EXTRA_ENV_LINK_ARGS is None:
EXTRA_ENV_LINK_ARGS = ''
if "linux" in sys.platform or "darwin" in sys.platform:
EXTRA_ENV_LINK_ARGS += ' -lpthread'
if check_linker_need_libatomic():
EXTRA_ENV_LINK_ARGS += ' -latomic'
elif "win32" in sys.platform and sys.version_info < (3, 5):
msvcr = cygwinccompiler.get_msvcr()[0]
EXTRA_ENV_LINK_ARGS += (
' -static-libgcc -static-libstdc++ -mcrtdll={msvcr}'
' -static -lshlwapi'.format(msvcr=msvcr))
if "linux" in sys.platform:
EXTRA_ENV_LINK_ARGS += ' -Wl,-wrap,memcpy -static-libgcc'
EXTRA_COMPILE_ARGS = shlex.split(EXTRA_ENV_COMPILE_ARGS)
EXTRA_LINK_ARGS = shlex.split(EXTRA_ENV_LINK_ARGS)
CYTHON_EXTENSION_PACKAGE_NAMES = ()
CYTHON_EXTENSION_MODULE_NAMES = ('grpc._cython.cygrpc',)
CYTHON_HELPER_C_FILES = ()
CORE_C_FILES = tuple(grpc_core_dependencies.CORE_SOURCE_FILES)
if "win32" in sys.platform:
CORE_C_FILES = filter(lambda x: 'third_party/cares' not in x, CORE_C_FILES)
if BUILD_WITH_SYSTEM_OPENSSL:
CORE_C_FILES = filter(lambda x: 'third_party/boringssl' not in x, CORE_C_FILES)
CORE_C_FILES = filter(lambda x: 'src/boringssl' not in x, CORE_C_FILES)
SSL_INCLUDE = (os.path.join('/usr', 'include', 'openssl'),)
if BUILD_WITH_SYSTEM_ZLIB:
CORE_C_FILES = filter(lambda x: 'third_party/zlib' not in x, CORE_C_FILES)
ZLIB_INCLUDE = (os.path.join('/usr', 'include'),)
if BUILD_WITH_SYSTEM_CARES:
CORE_C_FILES = filter(lambda x: 'third_party/cares' not in x, CORE_C_FILES)
CARES_INCLUDE = (os.path.join('/usr', 'include'),)
EXTENSION_INCLUDE_DIRECTORIES = (
(PYTHON_STEM,) +
CORE_INCLUDE +
ABSL_INCLUDE +
ADDRESS_SORTING_INCLUDE +
CARES_INCLUDE +
SSL_INCLUDE +
UPB_INCLUDE +
UPB_GRPC_GENERATED_INCLUDE +
ZLIB_INCLUDE)
EXTENSION_LIBRARIES = ()
if "linux" in sys.platform:
EXTENSION_LIBRARIES += ('rt',)
if not "win32" in sys.platform:
EXTENSION_LIBRARIES += ('m',)
if "win32" in sys.platform:
EXTENSION_LIBRARIES += ('advapi32', 'ws2_32', 'dbghelp',)
if BUILD_WITH_SYSTEM_OPENSSL:
EXTENSION_LIBRARIES += ('ssl', 'crypto',)
if BUILD_WITH_SYSTEM_ZLIB:
EXTENSION_LIBRARIES += ('z',)
if BUILD_WITH_SYSTEM_CARES:
EXTENSION_LIBRARIES += ('cares',)
DEFINE_MACROS = (('OPENSSL_NO_ASM', 1), ('_WIN32_WINNT', 0x600))
if not DISABLE_LIBC_COMPATIBILITY:
DEFINE_MACROS += (('GPR_BACKWARDS_COMPATIBILITY_MODE', 1),)
if "win32" in sys.platform:
# TODO(zyc): Re-enable c-ares on x64 and x86 windows after fixing the
# ares_library_init compilation issue
DEFINE_MACROS += (('WIN32_LEAN_AND_MEAN', 1), ('CARES_STATICLIB', 1),
('GRPC_ARES', 0), ('NTDDI_VERSION', 0x06000000),
('NOMINMAX', 1),)
if '64bit' in platform.architecture()[0]:
DEFINE_MACROS += (('MS_WIN64', 1),)
elif sys.version_info >= (3, 5):
# For some reason, this is needed to get access to inet_pton/inet_ntop
# on msvc, but only for 32 bits
DEFINE_MACROS += (('NTDDI_VERSION', 0x06000000),)
else:
DEFINE_MACROS += (('HAVE_CONFIG_H', 1), ('GRPC_ENABLE_FORK_SUPPORT', 1),)
LDFLAGS = tuple(EXTRA_LINK_ARGS)
CFLAGS = tuple(EXTRA_COMPILE_ARGS)
if "linux" in sys.platform or "darwin" in sys.platform:
pymodinit_type = 'PyObject*' if PY3 else 'void'
pymodinit = 'extern "C" __attribute__((visibility ("default"))) {}'.format(pymodinit_type)
DEFINE_MACROS += (('PyMODINIT_FUNC', pymodinit),)
DEFINE_MACROS += (('GRPC_POSIX_FORK_ALLOW_PTHREAD_ATFORK', 1),)
# By default, Python3 distutils enforces compatibility of
# c plugins (.so files) with the OSX version Python3 was built with.
# For Python3.4, this is OSX 10.6, but we need Thread Local Support (__thread)
if 'darwin' in sys.platform and PY3:
mac_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
if mac_target and (pkg_resources.parse_version(mac_target) <
pkg_resources.parse_version('10.7.0')):
os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.7'
os.environ['_PYTHON_HOST_PLATFORM'] = re.sub(
r'macosx-[0-9]+\.[0-9]+-(.+)',
r'macosx-10.7-\1',
util.get_platform())
def cython_extensions_and_necessity():
cython_module_files = [os.path.join(PYTHON_STEM,
name.replace('.', '/') + '.pyx')
for name in CYTHON_EXTENSION_MODULE_NAMES]
config = os.environ.get('CONFIG', 'opt')
prefix = 'libs/' + config + '/'
if USE_PREBUILT_GRPC_CORE:
extra_objects = [prefix + 'libares.a',
prefix + 'libboringssl.a',
prefix + 'libgpr.a',
prefix + 'libgrpc.a']
core_c_files = []
else:
core_c_files = list(CORE_C_FILES)
extra_objects = []
extensions = [
_extension.Extension(
name=module_name,
sources=[module_file] + list(CYTHON_HELPER_C_FILES) + core_c_files,
include_dirs=list(EXTENSION_INCLUDE_DIRECTORIES),
libraries=list(EXTENSION_LIBRARIES),
define_macros=list(DEFINE_MACROS),
extra_objects=extra_objects,
extra_compile_args=list(CFLAGS),
extra_link_args=list(LDFLAGS),
) for (module_name, module_file) in zip(list(CYTHON_EXTENSION_MODULE_NAMES), cython_module_files)
]
need_cython = BUILD_WITH_CYTHON
if not BUILD_WITH_CYTHON:
need_cython = need_cython or not commands.check_and_update_cythonization(extensions)
# TODO: the strategy for conditional compiling and exposing the aio Cython
# dependencies will be revisited by https://github.com/grpc/grpc/issues/19728
return commands.try_cythonize(extensions, linetracing=ENABLE_CYTHON_TRACING, mandatory=BUILD_WITH_CYTHON), need_cython
CYTHON_EXTENSION_MODULES, need_cython = cython_extensions_and_necessity()
PACKAGE_DIRECTORIES = {
'': PYTHON_STEM,
}
INSTALL_REQUIRES = (
"six>=1.5.2",
"futures>=2.2.0; python_version<'3.2'",
"enum34>=1.0.4; python_version<'3.4'",
)
SETUP_REQUIRES = INSTALL_REQUIRES + (
'Sphinx~=1.8.1',
'six>=1.10',
) if ENABLE_DOCUMENTATION_BUILD else ()
try:
import Cython
except ImportError:
if BUILD_WITH_CYTHON:
sys.stderr.write(
"You requested a Cython build via GRPC_PYTHON_BUILD_WITH_CYTHON, "
"but do not have Cython installed. We won't stop you from using "
"other commands, but the extension files will fail to build.\n")
elif need_cython:
sys.stderr.write(
'We could not find Cython. Setup may take 10-20 minutes.\n')
SETUP_REQUIRES += ('cython>=0.23',)
COMMAND_CLASS = {
'doc': commands.SphinxDocumentation,
'build_project_metadata': commands.BuildProjectMetadata,
'build_py': commands.BuildPy,
'build_ext': commands.BuildExt,
'gather': commands.Gather,
}
# Ensure that package data is copied over before any commands have been run:
credentials_dir = os.path.join(PYTHON_STEM, 'grpc', '_cython', '_credentials')
try:
os.mkdir(credentials_dir)
except OSError:
pass
shutil.copyfile(os.path.join('etc', 'roots.pem'),
os.path.join(credentials_dir, 'roots.pem'))
PACKAGE_DATA = {
# Binaries that may or may not be present in the final installation, but are
# mentioned here for completeness.
'grpc._cython': [
'_credentials/roots.pem',
'_windows/grpc_c.32.python',
'_windows/grpc_c.64.python',
],
}
PACKAGES = setuptools.find_packages(PYTHON_STEM)
setuptools.setup(
name='grpcio',
version=grpc_version.VERSION,
description='HTTP/2-based RPC framework',
author='The gRPC Authors',
author_email='grpc-io@googlegroups.com',
url='https://grpc.io',
license=LICENSE,
classifiers=CLASSIFIERS,
long_description=open(README).read(),
ext_modules=CYTHON_EXTENSION_MODULES,
packages=list(PACKAGES),
package_dir=PACKAGE_DIRECTORIES,
package_data=PACKAGE_DATA,
install_requires=INSTALL_REQUIRES,
setup_requires=SETUP_REQUIRES,
cmdclass=COMMAND_CLASS,
)
|
firebase/grpc-SwiftPM
|
setup.py
|
Python
|
apache-2.0
| 15,931 | 0.006403 |
#--
# Copyright (c) 2012-2014 Net-ng.
# All rights reserved.
#
# This software is licensed under the BSD License, as described in
# the file LICENSE.txt, which you should have received as part of
# this distribution.
#--
import peak
import datetime
from nagare import presentation, security, ajax, i18n
from nagare.i18n import _, format_date
from .comp import DueDate
@peak.rules.when(ajax.py2js, (datetime.date,))
def py2js(value, h):
"""Generic method to transcode a Datetime
In:
- ``value`` -- the datetime object
- ``h`` -- the current renderer
Return:
- transcoded javascript
"""
dt = i18n.to_timezone(value)
return 'new Date("%s", "%s", "%s")' % (
dt.year, dt.month - 1, dt.day)
@peak.rules.when(ajax.py2js, (DueDate,))
def py2js(value, h):
if value.due_date:
return ajax.py2js(value.due_date, h)
return None
@presentation.render_for(DueDate)
def render_DueDate(self, h, comp, model):
return h.root
@presentation.render_for(DueDate, model='badge')
def render_DueDate_badge(self, h, *args):
"""Gallery badge for the card"""
if self.due_date:
with h.span(class_='badge'):
h << h.span(h.i(class_='icon-alarm'), ' ', self.get_days_count(), class_='label due-date ' + self.get_class(), title=format_date(self.due_date, 'full'))
return h.root
@presentation.render_for(DueDate, model='action')
def render_DueDate_button(self, h, comp, *args):
if security.has_permissions('due_date', self.card):
self._init_calendar()
id_ = h.generate_id()
if self.due_date:
classes = ['btn', 'btn-due-date', self.get_class()]
with h.a(class_=u' '.join(classes), id_=id_).action(self.calendar().toggle):
h << h.i(class_='icon-alarm duedate-icon')
h << format_date(self.due_date, 'short')
else:
with h.a(class_='btn', id_=id_).action(self.calendar().toggle):
h << h.i(class_='icon-alarm')
h << _('Due date')
h << self.calendar.on_answer(self.set_value)
return h.root
|
Net-ng/kansha
|
kansha/card_addons/due_date/view.py
|
Python
|
bsd-3-clause
| 2,112 | 0.001894 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2018, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigiq_application_fastl4_udp
short_description: Manages BIG-IQ FastL4 UDP applications
description:
- Manages BIG-IQ applications used for load balancing a UDP-based application
with a FastL4 profile.
version_added: 2.6
options:
name:
description:
- Name of the new application.
type: str
required: True
description:
description:
- Description of the application.
type: str
servers:
description:
- A list of servers that the application is hosted on.
- If you are familiar with other BIG-IP setting, you might also refer to this
list as the list of pool members.
- When creating a new application, at least one server is required.
suboptions:
address:
description:
- The IP address of the server.
type: str
required: True
port:
description:
- The port of the server.
- When creating a new application and specifying a server, if this parameter
is not provided, the default of C(8000) will be used.
type: str
default: 8000
type: list
inbound_virtual:
description:
- Settings to configure the virtual which will receive the inbound connection.
suboptions:
address:
description:
- Specifies destination IP address information to which the virtual server
sends traffic.
- This parameter is required when creating a new application.
type: str
required: True
netmask:
description:
- Specifies the netmask to associate with the given C(destination).
- This parameter is required when creating a new application.
type: str
required: True
port:
description:
- The port that the virtual listens for connections on.
- When creating a new application, if this parameter is not specified, the
default value of C(53) will be used.
type: str
default: 53
service_environment:
description:
- Specifies the name of service environment that the application will be
deployed to.
- When creating a new application, this parameter is required.
- The service environment type will be discovered by this module automatically.
Therefore, it is crucial that you maintain unique names for items in the
different service environment types.
- SSGs are not supported for this type of application.
type: str
add_analytics:
description:
- Collects statistics of the BIG-IP that the application is deployed to.
- This parameter is only relevant when specifying a C(service_environment) which
is a BIG-IP; not an SSG.
type: bool
default: no
state:
description:
- The state of the resource on the system.
- When C(present), guarantees that the resource exists with the provided attributes.
- When C(absent), removes the resource from the system.
type: str
choices:
- absent
- present
default: present
wait:
description:
- If the module should wait for the application to be created, deleted or updated.
type: bool
default: yes
extends_documentation_fragment: f5
notes:
- This module does not support updating of your application (whether deployed or not).
If you need to update the application, the recommended practice is to remove and
re-create.
- This module will not work on BIGIQ version 6.1.x or greater.
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Load balance a UDP-based application with a FastL4 profile
bigiq_application_fastl4_udp:
name: my-app
description: My description
service_environment: my-bigip-device
servers:
- address: 1.2.3.4
port: 8080
- address: 5.6.7.8
port: 8080
inbound_virtual:
name: foo
address: 2.2.2.2
netmask: 255.255.255.255
port: 53
provider:
password: secret
server: lb.mydomain.com
user: admin
state: present
delegate_to: localhost
'''
RETURN = r'''
description:
description: The new description of the application of the resource.
returned: changed
type: str
sample: My application
service_environment:
description: The environment which the service was deployed to.
returned: changed
type: str
sample: my-ssg1
inbound_virtual_destination:
description: The destination of the virtual that was created.
returned: changed
type: str
sample: 6.7.8.9
inbound_virtual_netmask:
description: The network mask of the provided inbound destination.
returned: changed
type: str
sample: 255.255.255.0
inbound_virtual_port:
description: The port the inbound virtual address listens on.
returned: changed
type: int
sample: 80
servers:
description: List of servers, and their ports, that make up the application.
type: complex
returned: changed
contains:
address:
description: The IP address of the server.
returned: changed
type: str
sample: 2.3.4.5
port:
description: The port that the server listens on.
returned: changed
type: int
sample: 8080
sample: hash/dictionary of values
'''
import time
from distutils.version import LooseVersion
from ansible.module_utils.basic import AnsibleModule
try:
from library.module_utils.network.f5.bigiq import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.ipaddress import is_valid_ip
from library.module_utils.network.f5.icontrol import bigiq_version
except ImportError:
from ansible.module_utils.network.f5.bigiq import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.ipaddress import is_valid_ip
from ansible.module_utils.network.f5.icontrol import bigiq_version
class Parameters(AnsibleF5Parameters):
api_map = {
'templateReference': 'template_reference',
'subPath': 'sub_path',
'configSetName': 'config_set_name',
'defaultDeviceReference': 'default_device_reference',
'addAnalytics': 'add_analytics'
}
api_attributes = [
'resources', 'description', 'configSetName', 'subPath', 'templateReference',
'defaultDeviceReference', 'addAnalytics'
]
returnables = [
'resources', 'description', 'config_set_name', 'sub_path', 'template_reference',
'default_device_reference', 'servers', 'inbound_virtual', 'add_analytics'
]
updatables = [
'resources', 'description', 'config_set_name', 'sub_path', 'template_reference',
'default_device_reference', 'servers', 'add_analytics'
]
class ApiParameters(Parameters):
pass
class ModuleParameters(Parameters):
@property
def http_profile(self):
return "profile_http"
@property
def config_set_name(self):
return self.name
@property
def sub_path(self):
return self.name
@property
def template_reference(self):
filter = "name+eq+'Default-f5-FastL4-UDP-lb-template'"
uri = "https://{0}:{1}/mgmt/cm/global/templates/?$filter={2}&$top=1&$select=selfLink".format(
self.client.provider['server'],
self.client.provider['server_port'],
filter
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status == 200 and response['totalItems'] == 0:
raise F5ModuleError(
"No default HTTP LB template was found."
)
elif 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp._content)
result = dict(
link=response['items'][0]['selfLink']
)
return result
@property
def default_device_reference(self):
if is_valid_ip(self.service_environment):
# An IP address was specified
filter = "address+eq+'{0}'".format(self.service_environment)
else:
# Assume a hostname was specified
filter = "hostname+eq+'{0}'".format(self.service_environment)
uri = "https://{0}:{1}/mgmt/shared/resolver/device-groups/cm-adccore-allbigipDevices/devices/?$filter={2}&$top=1&$select=selfLink".format(
self.client.provider['server'],
self.client.provider['server_port'],
filter
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status == 200 and response['totalItems'] == 0:
raise F5ModuleError(
"The specified service_environment '{0}' was found.".format(self.service_environment)
)
elif 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp._content)
result = dict(
link=response['items'][0]['selfLink']
)
return result
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
@property
def resources(self):
result = dict()
result.update(self.udp_monitor)
result.update(self.virtual)
result.update(self.pool)
result.update(self.nodes)
return result
@property
def virtual(self):
result = dict()
result['ltm:virtual:c2e739ba116f'] = [
dict(
parameters=dict(
name='virtual',
destinationAddress=self.inbound_virtual['address'],
mask=self.inbound_virtual['netmask'],
destinationPort=self.inbound_virtual.get('port', 53)
),
subcollectionResources=self.profiles
)
]
return result
@property
def profiles(self):
result = {
'profiles:53f9b3028d90': [
dict(
parameters=dict()
)
]
}
return result
@property
def pool(self):
result = dict()
result['ltm:pool:e6879775458c'] = [
dict(
parameters=dict(
name='pool_0'
),
subcollectionResources=self.pool_members
)
]
return result
@property
def pool_members(self):
result = dict()
result['members:b19842fe713a'] = []
for x in self.servers:
member = dict(
parameters=dict(
port=x.get('port', 8000),
nodeReference=dict(
link='#/resources/ltm:node:b19842fe713a/{0}'.format(x['address']),
fullPath='# {0}'.format(x['address'])
)
)
)
result['members:b19842fe713a'].append(member)
return result
@property
def udp_monitor(self):
result = dict()
result['ltm:monitor:udp:22cdcfda0a40'] = [
dict(
parameters=dict(
name='monitor-udp'
)
)
]
return result
@property
def nodes(self):
result = dict()
result['ltm:node:b19842fe713a'] = []
for x in self.servers:
tmp = dict(
parameters=dict(
name=x['address'],
address=x['address']
)
)
result['ltm:node:b19842fe713a'].append(tmp)
return result
@property
def node_addresses(self):
result = [x['address'] for x in self.servers]
return result
class ReportableChanges(Changes):
pass
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.want = ModuleParameters(params=self.module.params)
self.want.client = self.client
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def check_bigiq_version(self):
version = bigiq_version(self.client)
if LooseVersion(version) >= LooseVersion('6.1.0'):
raise F5ModuleError(
'Module supports only BIGIQ version 6.0.x or lower.'
)
def exec_module(self):
self.check_bigiq_version()
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def present(self):
if self.exists():
return False
else:
return self.create()
def exists(self):
uri = "https://{0}:{1}/mgmt/ap/query/v1/tenants/default/reports/AllApplicationsList?$filter=name+eq+'{2}'".format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.name
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status == 200 and 'result' in response and 'totalItems' in response['result'] and response['result']['totalItems'] == 0:
return False
return True
def remove(self):
if self.module.check_mode:
return True
self_link = self.remove_from_device()
if self.want.wait:
self.wait_for_apply_template_task(self_link)
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def create(self):
if self.want.service_environment is None:
raise F5ModuleError(
"A 'service_environment' must be specified when creating a new application."
)
if self.want.servers is None:
raise F5ModuleError(
"At least one 'servers' item is needed when creating a new application."
)
if self.want.inbound_virtual is None:
raise F5ModuleError(
"An 'inbound_virtual' must be specified when creating a new application."
)
self._set_changed_options()
if self.module.check_mode:
return True
self_link = self.create_on_device()
if self.want.wait:
self.wait_for_apply_template_task(self_link)
if not self.exists():
raise F5ModuleError(
"Failed to deploy application."
)
return True
def create_on_device(self):
params = self.changes.api_params()
params['mode'] = 'CREATE'
uri = 'https://{0}:{1}/mgmt/cm/global/tasks/apply-template'.format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp._content)
return response['selfLink']
def absent(self):
if self.exists():
return self.remove()
return False
def remove_from_device(self):
params = dict(
configSetName=self.want.name,
mode='DELETE'
)
uri = 'https://{0}:{1}/mgmt/cm/global/tasks/apply-template'.format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp._content)
return response['selfLink']
def wait_for_apply_template_task(self, self_link):
host = 'https://{0}:{1}'.format(
self.client.provider['server'],
self.client.provider['server_port']
)
uri = self_link.replace('https://localhost', host)
while True:
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if response['status'] == 'FINISHED' and response.get('currentStep', None) == 'DONE':
return True
elif 'errorMessage' in response:
raise F5ModuleError(response['errorMessage'])
time.sleep(5)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
description=dict(),
servers=dict(
type='list',
options=dict(
address=dict(required=True),
port=dict(default=8000)
)
),
inbound_virtual=dict(
type='dict',
options=dict(
address=dict(required=True),
netmask=dict(required=True),
port=dict(default=53)
)
),
service_environment=dict(),
add_analytics=dict(type='bool', default='no'),
state=dict(
default='present',
choices=['present', 'absent']
),
wait=dict(type='bool', default='yes')
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
|
alxgu/ansible
|
lib/ansible/modules/network/f5/bigiq_application_fastl4_udp.py
|
Python
|
gpl-3.0
| 21,905 | 0.00137 |
#!/usr/bin/env python3
import os
from i3_lemonbar_conf import *
cwd = os.path.dirname(os.path.abspath(__file__))
lemon = "lemonbar -p -f '%s' -f '%s' -g '%s' -B '%s' -F '%s'" % (font, iconfont, geometry, color_back, color_fore)
feed = "python3 -c 'import i3_lemonbar_feeder; i3_lemonbar_feeder.run()'"
check_output('cd %s; %s | %s' % (cwd, feed, lemon), shell=True)
|
jesseops/i3-lemonbar
|
i3_lemonbar.py
|
Python
|
mit
| 370 | 0.002703 |
from __future__ import unicode_literals
from django.db import transaction
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class UserProfile(models.Model):
user = models.OneToOneField(User, unique=True, verbose_name=('user'))
phone = models.CharField(max_length=20)
USER_SOURCE = (
('LO', 'Local'),
('WB', 'Weibo'),
('QQ', 'QQ'),
)
source = models.CharField(max_length=2, choices=USER_SOURCE, default='LO')
created_date = models.DateTimeField(auto_now_add=True)
last_updated_date = models.DateTimeField(auto_now=True)
@transaction.atomic
def createUser(self):
self.user.save()
self.save()
|
passren/Roxd
|
member/models.py
|
Python
|
gpl-2.0
| 727 | 0.005502 |
import os, unicodedata
from django.utils.translation import ugettext_lazy as _
from django.core.files.storage import FileSystemStorage
from django.db.models.fields.files import FileField
from django.core.files.storage import default_storage
from django.conf import settings
from django.utils.safestring import mark_safe
class AdminThumbnailMixin(object):
thumbnail_options = {'size': (60, 60)}
thumbnail_image_field_name = 'image'
thumbnail_alt_field_name = None
def _thumb(self, image, options={'size': (60, 60)}, alt=None):
from easy_thumbnails.files import get_thumbnailer
media = getattr(settings, 'THUMBNAIL_MEDIA_URL', settings.MEDIA_URL)
attrs = []
try:
src = "%s%s" % (media, get_thumbnailer(image).get_thumbnail(options))
except:
src = ""
if alt is not None: attrs.append('alt="%s"' % alt)
return mark_safe('<img src="%s" %s />' % (src, " ".join(attrs)))
def thumbnail(self, obj):
kwargs = {'options': self.thumbnail_options}
if self.thumbnail_alt_field_name:
kwargs['alt'] = getattr(obj, self.thumbnail_alt_field_name)
return self._thumb(getattr(obj, self.thumbnail_image_field_name), **kwargs)
thumbnail.allow_tags = True
thumbnail.short_description = _('Thumbnail')
def file_cleanup(sender, **kwargs):
"""
File cleanup callback used to emulate the old delete
behavior using signals. Initially django deleted linked
files when an object containing a File/ImageField was deleted.
Usage:
>>> from django.db.models.signals import post_delete
>>> post_delete.connect(file_cleanup, sender=MyModel, dispatch_uid="mymodel.file_cleanup")
"""
for fieldname in sender._meta.get_all_field_names():
try:
field = sender._meta.get_field(fieldname)
except:
field = None
if field and isinstance(field, FileField):
inst = kwargs['instance']
f = getattr(inst, fieldname)
m = inst.__class__._default_manager
if hasattr(f, 'path') and os.path.exists(f.path) \
and not m.filter(**{'%s__exact' % fieldname: getattr(inst, fieldname)})\
.exclude(pk=inst._get_pk_val()):
try:
#os.remove(f.path)
default_storage.delete(f.path)
except:
pass
class ASCIISafeFileSystemStorage(FileSystemStorage):
"""
Same as FileSystemStorage, but converts unicode characters
in file name to ASCII characters before saving the file. This
is mostly useful for the non-English world.
Usage (settings.py):
>>> DEFAULT_FILE_STORAGE = 'webcore.utils.storage.ASCIISafeFileSystemStorage'
"""
def get_valid_name(self, name):
name = unicodedata.normalize('NFKD', unicode(name.replace(' ', '_'))).encode('ascii', 'ignore')
return super(ASCIISafeFileSystemStorage, self).get_valid_name(name)
|
Krozark/django-slider
|
slider/utils.py
|
Python
|
bsd-2-clause
| 3,032 | 0.005937 |
from django.contrib.sitemaps import Sitemap
from .models import BlogEntry
class BlogEntrySitemap(Sitemap):
changefreq = "yearly"
priority = 0.6
protocol = 'https'
def items(self):
return BlogEntry.on_site.filter(is_visible=True)
def lastmod(self, item):
return item.modification
|
nim65s/MarkDownBlog
|
dmdb/sitemaps.py
|
Python
|
gpl-3.0
| 320 | 0 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2014:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
# Gregory Starck, g.starck@gmail.com
# Hartmut Goebel, h.goebel@goebel-consult.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
from item import Item, Items
from shinken.util import strip_and_uniq
from shinken.property import BoolProp, IntegerProp, StringProp
from shinken.log import logger, naglog_result
_special_properties = ('service_notification_commands', 'host_notification_commands',
'service_notification_period', 'host_notification_period',
'service_notification_options', 'host_notification_options',
'host_notification_commands', 'contact_name')
_simple_way_parameters = ('service_notification_period', 'host_notification_period',
'service_notification_options', 'host_notification_options',
'service_notification_commands', 'host_notification_commands',
'min_business_impact')
class Contact(Item):
id = 1 # zero is always special in database, so we do not take risk here
my_type = 'contact'
properties = Item.properties.copy()
properties.update({
'contact_name': StringProp(fill_brok=['full_status']),
'alias': StringProp(default='none', fill_brok=['full_status']),
'contactgroups': StringProp(default='', fill_brok=['full_status']),
'host_notifications_enabled': BoolProp(default='1', fill_brok=['full_status']),
'service_notifications_enabled': BoolProp(default='1', fill_brok=['full_status']),
'host_notification_period': StringProp(fill_brok=['full_status']),
'service_notification_period': StringProp(fill_brok=['full_status']),
'host_notification_options': StringProp(fill_brok=['full_status']),
'service_notification_options': StringProp(fill_brok=['full_status']),
'host_notification_commands': StringProp(fill_brok=['full_status']),
'service_notification_commands': StringProp(fill_brok=['full_status']),
'min_business_impact': IntegerProp(default='0', fill_brok=['full_status']),
'email': StringProp(default='none', fill_brok=['full_status']),
'pager': StringProp(default='none', fill_brok=['full_status']),
'address1': StringProp(default='none', fill_brok=['full_status']),
'address2': StringProp(default='none', fill_brok=['full_status']),
'address3': StringProp(default='none', fill_brok=['full_status']),
'address4': StringProp(default='none', fill_brok=['full_status']),
'address5': StringProp(default='none', fill_brok=['full_status']),
'address6': StringProp(default='none', fill_brok=['full_status']),
'can_submit_commands': BoolProp(default='0', fill_brok=['full_status']),
'is_admin': BoolProp(default='0', fill_brok=['full_status']),
'retain_status_information': BoolProp(default='1', fill_brok=['full_status']),
'notificationways': StringProp(default='', fill_brok=['full_status']),
'password': StringProp(default='NOPASSWORDSET', fill_brok=['full_status']),
})
running_properties = Item.running_properties.copy()
running_properties.update({
'modified_attributes': IntegerProp(default=0L, fill_brok=['full_status'], retention=True),
'downtimes': StringProp(default=[], fill_brok=['full_status'], retention=True),
})
# This tab is used to transform old parameters name into new ones
# so from Nagios2 format, to Nagios3 ones.
# Or Shinken deprecated names like criticity
old_properties = {
'min_criticity': 'min_business_impact',
}
macros = {
'CONTACTNAME': 'contact_name',
'CONTACTALIAS': 'alias',
'CONTACTEMAIL': 'email',
'CONTACTPAGER': 'pager',
'CONTACTADDRESS1': 'address1',
'CONTACTADDRESS2': 'address2',
'CONTACTADDRESS3': 'address3',
'CONTACTADDRESS4': 'address4',
'CONTACTADDRESS5': 'address5',
'CONTACTADDRESS6': 'address6',
'CONTACTGROUPNAME': 'get_groupname',
'CONTACTGROUPNAMES': 'get_groupnames'
}
# For debugging purpose only (nice name)
def get_name(self):
try:
return self.contact_name
except AttributeError:
return 'UnnamedContact'
# Search for notification_options with state and if t is
# in service_notification_period
def want_service_notification(self, t, state, type, business_impact, cmd=None):
if not self.service_notifications_enabled:
return False
# If we are in downtime, we do nto want notification
for dt in self.downtimes:
if dt.is_in_effect:
return False
# Now the rest is for sub notificationways. If one is OK, we are ok
# We will filter in another phase
for nw in self.notificationways:
nw_b = nw.want_service_notification(t, state, type, business_impact, cmd)
if nw_b:
return True
# Oh... no one is ok for it? so no, sorry
return False
# Search for notification_options with state and if t is in
# host_notification_period
def want_host_notification(self, t, state, type, business_impact, cmd=None):
if not self.host_notifications_enabled:
return False
# If we are in downtime, we do nto want notification
for dt in self.downtimes:
if dt.is_in_effect:
return False
# Now it's all for sub notificationways. If one is OK, we are OK
# We will filter in another phase
for nw in self.notificationways:
nw_b = nw.want_host_notification(t, state, type, business_impact, cmd)
if nw_b:
return True
# Oh, nobody..so NO :)
return False
# Call to get our commands to launch a Notification
def get_notification_commands(self, type):
r = []
# service_notification_commands for service
notif_commands_prop = type + '_notification_commands'
for nw in self.notificationways:
r.extend(getattr(nw, notif_commands_prop))
return r
# Check is required prop are set:
# contacts OR contactgroups is need
def is_correct(self):
state = True
cls = self.__class__
# All of the above are checks in the notificationways part
for prop, entry in cls.properties.items():
if prop not in _special_properties:
if not hasattr(self, prop) and entry.required:
logger.error("[contact::%s] %s property not set", self.get_name(), prop)
state = False # Bad boy...
# There is a case where there is no nw: when there is not special_prop defined
# at all!!
if self.notificationways == []:
for p in _special_properties:
if not hasattr(self, p):
logger.error("[contact::%s] %s property is missing", self.get_name(), p)
state = False
if hasattr(self, 'contact_name'):
for c in cls.illegal_object_name_chars:
if c in self.contact_name:
logger.error("[contact::%s] %s character not allowed in contact_name", self.get_name(), c)
state = False
else:
if hasattr(self, 'alias'): # take the alias if we miss the contact_name
self.contact_name = self.alias
return state
# Raise a log entry when a downtime begins
# CONTACT DOWNTIME ALERT: test_contact;STARTED; Contact has entered a period of scheduled downtime
def raise_enter_downtime_log_entry(self):
naglog_result('info', "CONTACT DOWNTIME ALERT: %s;STARTED; Contact has "
"entered a period of scheduled downtime"
% self.get_name())
# Raise a log entry when a downtime has finished
# CONTACT DOWNTIME ALERT: test_contact;STOPPED; Contact has exited from a period of scheduled downtime
def raise_exit_downtime_log_entry(self):
naglog_result('info', "CONTACT DOWNTIME ALERT: %s;STOPPED; Contact has "
"exited from a period of scheduled downtime"
% self.get_name())
# Raise a log entry when a downtime prematurely ends
# CONTACT DOWNTIME ALERT: test_contact;CANCELLED; Contact has entered a period of scheduled downtime
def raise_cancel_downtime_log_entry(self):
naglog_result('info', "CONTACT DOWNTIME ALERT: %s;CANCELLED; Scheduled "
"downtime for contact has been cancelled."
% self.get_name())
class Contacts(Items):
name_property = "contact_name"
inner_class = Contact
def linkify(self, timeperiods, commands, notificationways):
#self.linkify_with_timeperiods(timeperiods, 'service_notification_period')
#self.linkify_with_timeperiods(timeperiods, 'host_notification_period')
#self.linkify_command_list_with_commands(commands, 'service_notification_commands')
#self.linkify_command_list_with_commands(commands, 'host_notification_commands')
self.linkify_with_notificationways(notificationways)
# We've got a notificationways property with , separated contacts names
# and we want have a list of NotificationWay
def linkify_with_notificationways(self, notificationways):
for i in self:
if not hasattr(i, 'notificationways'):
continue
new_notificationways = []
for nw_name in strip_and_uniq(i.notificationways.split(',')):
nw = notificationways.find_by_name(nw_name)
if nw is not None:
new_notificationways.append(nw)
else:
err = "The 'notificationways' of the %s '%s' named '%s' is unknown!" % (i.__class__.my_type, i.get_name(), nw_name)
i.configuration_errors.append(err)
# Get the list, but first make elements uniq
i.notificationways = list(set(new_notificationways))
def late_linkify_c_by_commands(self, commands):
for i in self:
for nw in i.notificationways:
nw.late_linkify_nw_by_commands(commands)
# We look for contacts property in contacts and
def explode(self, contactgroups, notificationways):
# Contactgroups property need to be fullfill for got the informations
self.apply_partial_inheritance('contactgroups')
# _special properties maybe came from a template, so
# import them before grok ourselves
for prop in _special_properties:
if prop == 'contact_name':
continue
self.apply_partial_inheritance(prop)
# Register ourself into the contactsgroups we are in
for c in self:
if c.is_tpl() or not (hasattr(c, 'contact_name') and hasattr(c, 'contactgroups')):
continue
for cg in c.contactgroups.split(','):
contactgroups.add_member(c.contact_name, cg.strip())
# Now create a notification way with the simple parameter of the
# contacts
for c in self:
if not c.is_tpl():
need_notificationway = False
params = {}
for p in _simple_way_parameters:
if hasattr(c, p):
need_notificationway = True
params[p] = getattr(c, p)
else: # put a default text value
# Remove the value and put a default value
setattr(c, p, '')
if need_notificationway:
#print "Create notif way with", params
cname = getattr(c, 'contact_name', getattr(c, 'alias', ''))
nw_name = cname + '_inner_notificationway'
notificationways.new_inner_member(nw_name, params)
if not hasattr(c, 'notificationways'):
c.notificationways = nw_name
else:
c.notificationways = c.notificationways + ',' + nw_name
|
h4wkmoon/shinken
|
shinken/objects/contact.py
|
Python
|
agpl-3.0
| 13,143 | 0.004337 |
import torch
from transformers import PreTrainedModel
from .custom_configuration import CustomConfig, NoSuperInitConfig
class CustomModel(PreTrainedModel):
config_class = CustomConfig
def __init__(self, config):
super().__init__(config)
self.linear = torch.nn.Linear(config.hidden_size, config.hidden_size)
def forward(self, x):
return self.linear(x)
def _init_weights(self, module):
pass
class NoSuperInitModel(PreTrainedModel):
config_class = NoSuperInitConfig
def __init__(self, config):
super().__init__(config)
self.linear = torch.nn.Linear(config.attribute, config.attribute)
def forward(self, x):
return self.linear(x)
def _init_weights(self, module):
pass
|
huggingface/transformers
|
utils/test_module/custom_modeling.py
|
Python
|
apache-2.0
| 772 | 0 |
import unittest
try:
from unittest import mock
except ImportError:
import mock
from pi3bar.plugins.uptime import get_uptime_seconds, uptime_format, Uptime
class GetUptimeSecondsTestCase(unittest.TestCase):
def test(self):
m = mock.mock_open(read_data='5')
m.return_value.readline.return_value = '5' # py33
with mock.patch('pi3bar.plugins.uptime.open', m, create=True):
seconds = get_uptime_seconds()
self.assertEqual(5, seconds)
class UptimeFormatTestCase(unittest.TestCase):
def test_seconds(self):
s = uptime_format(5)
self.assertEqual('0:00:00:05', s)
def test_minutes(self):
s = uptime_format(3540)
self.assertEqual('0:00:59:00', s)
def test_hours(self):
s = uptime_format(49020)
self.assertEqual('0:13:37:00', s)
def test_days(self):
s = uptime_format(135420)
self.assertEqual('1:13:37:00', s)
def test_format_days_applied_to_hours(self):
s = uptime_format(135420, '%H:%M:%S')
self.assertEqual('37:37:00', s)
def test_format_hours_applied_to_minutes(self):
s = uptime_format(49020, '%M:%S')
self.assertEqual('817:00', s)
class UptimeTestCase(unittest.TestCase):
def test(self):
plugin = Uptime()
self.assertEqual('%d days %H:%M:%S up', plugin.full_format)
self.assertEqual('%dd %H:%M up', plugin.short_format)
@mock.patch('pi3bar.plugins.uptime.get_uptime_seconds')
def test_cycle(self, mock_get_uptime_seconds):
plugin = Uptime()
mock_get_uptime_seconds.return_value = 49020
plugin.cycle()
self.assertEqual('0 days 13:37:00 up', plugin.full_text)
self.assertEqual('0d 13:37 up', plugin.short_text)
|
knoppo/pi3bar
|
pi3bar/tests/plugins/test_uptime.py
|
Python
|
mit
| 1,771 | 0 |
# -*- coding: utf-8 -*-
import os
import re
try:
import simplejson as json
except ImportError:
import json
from ToolBoxAssistant.app import AppFactory
from ToolBoxAssistant.helpers import get_svn_url, readfile, find_versionned_folders, yes_no, Color
from ToolBoxAssistant.log import logger
VERSION = '0.1'
class ToolBoxAssistant(object):
"""
The main class
"""
config_basedir = os.path.join(os.path.expanduser('~'), '.tba')
tba_required_fields = ['path', 'apps']
app_required_fields = ['type', 'url', 'path']
vcs_repo_finders = {
'git': (
'.git/config',
re.compile(r'\[remote "origin"\]\s+url = (.*)$', re.M),
lambda regex, cfg: regex.search(readfile(cfg)).group(1)
),
'hg': (
'.hg/hgrc',
re.compile(r'default = (.*)$'),
lambda regex, cfg: regex.search(readfile(cfg)).group(1)
),
'svn': (
'',
re.compile(r'Repository Root: (.*)$', re.M),
get_svn_url
)
}
def __init__(self):
self.config_dir = None
def setup_config_dir(self, path):
self.config_dir = os.path.join(
self.config_basedir,
path.replace(os.path.sep, '_').strip('_')
)
if not os.path.exists(self.config_dir):
logger.debug('creating configuration folder: %s' % Color.GREEN+self.config_dir+Color.END)
os.makedirs(self.config_dir)
def load_specs(self, fpath):
"""
Loads a specifications file and checks for missing fields.
"""
with open(fpath) as ifile:
logger.debug('loading specfile: %s' % Color.GREEN+fpath+Color.END)
data = json.load(ifile)
for field in self.tba_required_fields:
if field not in data:
logger.error('missing top-level field in specs: %s' % Color.GREEN+field+Color.END)
return None
for app_name in data['apps']:
app_specs = data['apps'][app_name]
for app_field in self.app_required_fields:
if app_field not in app_specs:
logger.error('missing app field in specs: %s' % Color.GREEN+app_field+Color.END)
return None
return data
def do_sync(self, args):
"""
Synchronizes installed application with the specfile.
"""
if (not os.path.exists(args.file)) or (not os.path.isfile(args.file)):
logger.error('file not found: %s' % Color.GREEN+args.file+Color.END)
return
specs = self.load_specs(args.file)
if specs is None:
return
self.setup_config_dir(specs['path'])
rootpath = specs['path']
for app_name in specs['apps']:
app_specs = specs['apps'][app_name]
if not app_specs['path'].startswith(os.path.sep):
app_specs['path'] = os.path.join(rootpath, app_specs['path'])
app = AppFactory.load(self, app_name, app_specs)
app.sync()
if app.is_updated:
app.build()
if args.unlisted:
for _, folder in find_versionned_folders(rootpath):
folder, app_name = os.path.split(folder)
logger.warn('found unlisted application in %s: %s' % (
folder, Color.GREEN+app_name+Color.END
))
def do_genspec(self, args):
"""
Scans current folder for versionned applications and
creates a specfile accordingly.
"""
self.setup_config_dir(args.path)
new_specs = {
'path': args.path,
'apps': {}
}
if args.merge is not None:
new_specs = self.load_specs(args.merge)
apps_specs = new_specs['apps']
new_apps_found = False
for vcs_type, app_folder in find_versionned_folders(args.path):
app_path = app_folder[len(args.path)+1:]
if app_path not in [apps_specs[a]['path'] for a in apps_specs]:
new_apps_found = True
folder, app_name = os.path.split(app_folder)
logger.info('found%s application in %s: %s (%s)' % (
' new' if args.merge is not None else '',
folder, Color.GREEN+app_name+Color.END, vcs_type
))
cfg_file, regex, handler = self.vcs_repo_finders[vcs_type]
cfg_path = os.path.join(app_folder, cfg_file)
app_specs = {
'type': vcs_type,
'url': handler(regex, cfg_path),
'path': app_path,
}
apps_specs[app_name] = app_specs
if new_apps_found:
outfile = args.merge or args.file
if os.path.exists(outfile):
logger.warning('file already exists: %s' % Color.GREEN+outfile+Color.END)
if not yes_no('Overwrite ?'):
logger.error('operation aborted by user')
return
with open(outfile, 'w') as ofile:
json.dump(new_specs, ofile, sort_keys=True, indent=2, separators=(',', ': '))
logger.info('specfile written to %s' % Color.GREEN+outfile+Color.END)
logger.info('you may now add build information to the new specfile')
else:
logger.info('no new application found')
|
mattoufoutu/ToolBoxAssistant
|
ToolBoxAssistant/__init__.py
|
Python
|
gpl-3.0
| 5,458 | 0.001649 |
import numpy as np
import random
class ReplayBuffer:
""" Buffer for storing values over timesteps.
"""
def __init__(self):
""" Initializes the buffer.
"""
pass
def batch_sample(self, batch_size):
""" Randomly sample a batch of values from the buffer.
"""
raise NotImplementedError
def put(self, *value):
""" Put values into the replay buffer.
"""
raise NotImplementedError
class ExperienceReplay(ReplayBuffer):
"""
Experience Replay stores action, state, reward and terminal signal
for each time step.
"""
def __init__(self, state_size, action_size, capacity):
""" Creates an Experience Replay of certain capacity.
Acts like a circular buffer.
Args:
state_size: The size of the state to be stored.
action_size: The size of the action to be stored.
capacity: The capacity of the experience replay buffer.
"""
self.state_size = state_size
self.action_size = action_size
self.length = 0
self.capacity = capacity
self.actions = np.empty((self.capacity, self.action_size), dtype = np.float16)
self.states = np.empty((self.capacity, self.state_size), dtype = np.float16)
self.rewards = np.empty(self.capacity, dtype = np.float16)
self.dones = np.empty(self.capacity, dtype = np.bool)
self.current_index = 0
self.staged = False
def batch_sample(self, batch_size):
""" Sample a batch of experiences from the replay.
Args:
batch_size: The number of batches to select
Returns:
s_t
a_t
r_t
s_t1
done
"""
if batch_size > self.length-3:
# we might not have enough experience
raise IOError('batch_size out of range')
idxs = []
while len(idxs) < batch_size:
while True:
# keep trying random indices
idx = random.randint(1, self.length - 1)
# don't want to grab current index since it wraps
if not( idx == self.current_index and idx == self.current_index - 1 ):
idxs.append(idx)
break
s_t = self.states[idxs]
s_t1 = self.states[[(x+1) for x in idxs]]
a_t = self.actions[idxs]
r_t = np.expand_dims(self.rewards[idxs], axis = 1)
done = self.dones[idxs]
'''
j = 0
print(s_t[j], s_t1[j], a_t[j], r_t[j], done[j])
j = 1
print(s_t[j], s_t1[j], a_t[j], r_t[j], done[j])
raw_input("Press Enter to continue...")
'''
return s_t, a_t, r_t, s_t1, done
def _put(self, s_t, a_t, reward, done):
self.actions[self.current_index] = a_t
self.states[self.current_index] = s_t
self.rewards[self.current_index] = reward
self.dones[self.current_index] = done
self._icrement_index()
def put_act(self, s_t, a_t):
""" Puts the current state and the action taking into Experience Replay.
Args:
s_t: Current state.
a_t: Action taking at this state.
Raises:
IOError: If trying to overwrite previously staged action and state.
"""
if not self.staged:
self.actions[self.current_index] = a_t
self.states[self.current_index] = s_t
# stage to prevent double staging
self.staged = True
else:
# already staged an action and state
raise IOError('Trying to override previously staged action and state.')
def put_rew(self, reward, done):
""" Completes a staged insertion by adding reward and
terminal signal to Experience Replay
Args:
reward: Reward received in this step.
done: Bool signalling terminal step.
Raises:
IOError: If trying to complete insertion without having staged first.
"""
if(self.staged):
self.rewards[self.current_index] = reward
self.dones[self.current_index] = done
# unstage and increment index
self.staged = False
self._increment_index()
else:
# not yet staged state and action
raise IOError( 'Trying to complete unstaged insertion. Must insert action and state first.')
def unstage(self):
""" Unstages any currently staged insertion
"""
if(self.staged):
# stage to prevent double staging
self.staged = False
self.actions[self.current_index] = None
self.states[self.current_index] = None
def _increment_index(self):
self.current_index = (self.current_index + 1) % self.capacity
self.length = min(self.capacity-1, self.length + 1)
|
fizz-ml/pytorch-aux-reward-rl
|
replay_buffer.py
|
Python
|
mit
| 4,985 | 0.004814 |
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Michael DeHaan <michael.dehaan@gmail.com>, 2012-2013
# Copyright (c), Toshio Kuratomi <tkuratomi@ansible.com>, 2015
#
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
#
# The match_hostname function and supporting code is under the terms and
# conditions of the Python Software Foundation License. They were taken from
# the Python3 standard library and adapted for use in Python2. See comments in the
# source for which code precisely is under this License.
#
# PSF License (see licenses/PSF-license.txt or https://opensource.org/licenses/Python-2.0)
'''
The **urls** utils module offers a replacement for the urllib2 python library.
urllib2 is the python stdlib way to retrieve files from the Internet but it
lacks some security features (around verifying SSL certificates) that users
should care about in most situations. Using the functions in this module corrects
deficiencies in the urllib2 module wherever possible.
There are also third-party libraries (for instance, requests) which can be used
to replace urllib2 with a more secure library. However, all third party libraries
require that the library be installed on the managed machine. That is an extra step
for users making use of a module. If possible, avoid third party libraries by using
this code instead.
'''
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import atexit
import base64
import email.mime.multipart
import email.mime.nonmultipart
import email.mime.application
import email.parser
import email.utils
import functools
import mimetypes
import netrc
import os
import platform
import re
import socket
import sys
import tempfile
import traceback
from contextlib import contextmanager
try:
import email.policy
except ImportError:
# Py2
import email.generator
try:
import httplib
except ImportError:
# Python 3
import http.client as httplib
import ansible.module_utils.six.moves.http_cookiejar as cookiejar
import ansible.module_utils.six.moves.urllib.request as urllib_request
import ansible.module_utils.six.moves.urllib.error as urllib_error
from ansible.module_utils.common.collections import Mapping
from ansible.module_utils.six import PY3, string_types
from ansible.module_utils.six.moves import cStringIO
from ansible.module_utils.basic import get_distribution, missing_required_lib
from ansible.module_utils._text import to_bytes, to_native, to_text
try:
# python3
import urllib.request as urllib_request
from urllib.request import AbstractHTTPHandler, BaseHandler
except ImportError:
# python2
import urllib2 as urllib_request
from urllib2 import AbstractHTTPHandler, BaseHandler
urllib_request.HTTPRedirectHandler.http_error_308 = urllib_request.HTTPRedirectHandler.http_error_307
try:
from ansible.module_utils.six.moves.urllib.parse import urlparse, urlunparse
HAS_URLPARSE = True
except Exception:
HAS_URLPARSE = False
try:
import ssl
HAS_SSL = True
except Exception:
HAS_SSL = False
try:
# SNI Handling needs python2.7.9's SSLContext
from ssl import create_default_context, SSLContext
HAS_SSLCONTEXT = True
except ImportError:
HAS_SSLCONTEXT = False
# SNI Handling for python < 2.7.9 with urllib3 support
try:
# urllib3>=1.15
HAS_URLLIB3_SSL_WRAP_SOCKET = False
try:
from urllib3.contrib.pyopenssl import PyOpenSSLContext
except ImportError:
from requests.packages.urllib3.contrib.pyopenssl import PyOpenSSLContext
HAS_URLLIB3_PYOPENSSLCONTEXT = True
except ImportError:
# urllib3<1.15,>=1.6
HAS_URLLIB3_PYOPENSSLCONTEXT = False
try:
try:
from urllib3.contrib.pyopenssl import ssl_wrap_socket
except ImportError:
from requests.packages.urllib3.contrib.pyopenssl import ssl_wrap_socket
HAS_URLLIB3_SSL_WRAP_SOCKET = True
except ImportError:
pass
# Select a protocol that includes all secure tls protocols
# Exclude insecure ssl protocols if possible
if HAS_SSL:
# If we can't find extra tls methods, ssl.PROTOCOL_TLSv1 is sufficient
PROTOCOL = ssl.PROTOCOL_TLSv1
if not HAS_SSLCONTEXT and HAS_SSL:
try:
import ctypes
import ctypes.util
except ImportError:
# python 2.4 (likely rhel5 which doesn't have tls1.1 support in its openssl)
pass
else:
libssl_name = ctypes.util.find_library('ssl')
libssl = ctypes.CDLL(libssl_name)
for method in ('TLSv1_1_method', 'TLSv1_2_method'):
try:
libssl[method]
# Found something - we'll let openssl autonegotiate and hope
# the server has disabled sslv2 and 3. best we can do.
PROTOCOL = ssl.PROTOCOL_SSLv23
break
except AttributeError:
pass
del libssl
# The following makes it easier for us to script updates of the bundled backports.ssl_match_hostname
# The bundled backports.ssl_match_hostname should really be moved into its own file for processing
_BUNDLED_METADATA = {"pypi_name": "backports.ssl_match_hostname", "version": "3.7.0.1"}
LOADED_VERIFY_LOCATIONS = set()
HAS_MATCH_HOSTNAME = True
try:
from ssl import match_hostname, CertificateError
except ImportError:
try:
from backports.ssl_match_hostname import match_hostname, CertificateError
except ImportError:
HAS_MATCH_HOSTNAME = False
HAS_CRYPTOGRAPHY = True
try:
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.exceptions import UnsupportedAlgorithm
except ImportError:
HAS_CRYPTOGRAPHY = False
# Old import for GSSAPI authentication, this is not used in urls.py but kept for backwards compatibility.
try:
import urllib_gssapi
HAS_GSSAPI = True
except ImportError:
HAS_GSSAPI = False
GSSAPI_IMP_ERR = None
try:
import gssapi
class HTTPGSSAPIAuthHandler(BaseHandler):
""" Handles Negotiate/Kerberos support through the gssapi library. """
AUTH_HEADER_PATTERN = re.compile(r'(?:.*)\s*(Negotiate|Kerberos)\s*([^,]*),?', re.I)
handler_order = 480 # Handle before Digest authentication
def __init__(self, username=None, password=None):
self.username = username
self.password = password
self._context = None
def get_auth_value(self, headers):
auth_match = self.AUTH_HEADER_PATTERN.search(headers.get('www-authenticate', ''))
if auth_match:
return auth_match.group(1), base64.b64decode(auth_match.group(2))
def http_error_401(self, req, fp, code, msg, headers):
# If we've already attempted the auth and we've reached this again then there was a failure.
if self._context:
return
parsed = generic_urlparse(urlparse(req.get_full_url()))
auth_header = self.get_auth_value(headers)
if not auth_header:
return
auth_protocol, in_token = auth_header
username = None
if self.username:
username = gssapi.Name(self.username, name_type=gssapi.NameType.user)
if username and self.password:
if not hasattr(gssapi.raw, 'acquire_cred_with_password'):
raise NotImplementedError("Platform GSSAPI library does not support "
"gss_acquire_cred_with_password, cannot acquire GSSAPI credential with "
"explicit username and password.")
b_password = to_bytes(self.password, errors='surrogate_or_strict')
cred = gssapi.raw.acquire_cred_with_password(username, b_password, usage='initiate').creds
else:
cred = gssapi.Credentials(name=username, usage='initiate')
# Get the peer certificate for the channel binding token if possible (HTTPS). A bug on macOS causes the
# authentication to fail when the CBT is present. Just skip that platform.
cbt = None
cert = getpeercert(fp, True)
if cert and platform.system() != 'Darwin':
cert_hash = get_channel_binding_cert_hash(cert)
if cert_hash:
cbt = gssapi.raw.ChannelBindings(application_data=b"tls-server-end-point:" + cert_hash)
# TODO: We could add another option that is set to include the port in the SPN if desired in the future.
target = gssapi.Name("HTTP@%s" % parsed['hostname'], gssapi.NameType.hostbased_service)
self._context = gssapi.SecurityContext(usage="initiate", name=target, creds=cred, channel_bindings=cbt)
resp = None
while not self._context.complete:
out_token = self._context.step(in_token)
if not out_token:
break
auth_header = '%s %s' % (auth_protocol, to_native(base64.b64encode(out_token)))
req.add_unredirected_header('Authorization', auth_header)
resp = self.parent.open(req)
# The response could contain a token that the client uses to validate the server
auth_header = self.get_auth_value(resp.headers)
if not auth_header:
break
in_token = auth_header[1]
return resp
except ImportError:
GSSAPI_IMP_ERR = traceback.format_exc()
HTTPGSSAPIAuthHandler = None
if not HAS_MATCH_HOSTNAME:
# The following block of code is under the terms and conditions of the
# Python Software Foundation License
"""The match_hostname() function from Python 3.4, essential when using SSL."""
try:
# Divergence: Python-3.7+'s _ssl has this exception type but older Pythons do not
from _ssl import SSLCertVerificationError
CertificateError = SSLCertVerificationError
except ImportError:
class CertificateError(ValueError):
pass
def _dnsname_match(dn, hostname):
"""Matching according to RFC 6125, section 6.4.3
- Hostnames are compared lower case.
- For IDNA, both dn and hostname must be encoded as IDN A-label (ACE).
- Partial wildcards like 'www*.example.org', multiple wildcards, sole
wildcard or wildcards in labels other then the left-most label are not
supported and a CertificateError is raised.
- A wildcard must match at least one character.
"""
if not dn:
return False
wildcards = dn.count('*')
# speed up common case w/o wildcards
if not wildcards:
return dn.lower() == hostname.lower()
if wildcards > 1:
# Divergence .format() to percent formatting for Python < 2.6
raise CertificateError(
"too many wildcards in certificate DNS name: %s" % repr(dn))
dn_leftmost, sep, dn_remainder = dn.partition('.')
if '*' in dn_remainder:
# Only match wildcard in leftmost segment.
# Divergence .format() to percent formatting for Python < 2.6
raise CertificateError(
"wildcard can only be present in the leftmost label: "
"%s." % repr(dn))
if not sep:
# no right side
# Divergence .format() to percent formatting for Python < 2.6
raise CertificateError(
"sole wildcard without additional labels are not support: "
"%s." % repr(dn))
if dn_leftmost != '*':
# no partial wildcard matching
# Divergence .format() to percent formatting for Python < 2.6
raise CertificateError(
"partial wildcards in leftmost label are not supported: "
"%s." % repr(dn))
hostname_leftmost, sep, hostname_remainder = hostname.partition('.')
if not hostname_leftmost or not sep:
# wildcard must match at least one char
return False
return dn_remainder.lower() == hostname_remainder.lower()
def _inet_paton(ipname):
"""Try to convert an IP address to packed binary form
Supports IPv4 addresses on all platforms and IPv6 on platforms with IPv6
support.
"""
# inet_aton() also accepts strings like '1'
# Divergence: We make sure we have native string type for all python versions
try:
b_ipname = to_bytes(ipname, errors='strict')
except UnicodeError:
raise ValueError("%s must be an all-ascii string." % repr(ipname))
# Set ipname in native string format
if sys.version_info < (3,):
n_ipname = b_ipname
else:
n_ipname = ipname
if n_ipname.count('.') == 3:
try:
return socket.inet_aton(n_ipname)
# Divergence: OSError on late python3. socket.error earlier.
# Null bytes generate ValueError on python3(we want to raise
# ValueError anyway), TypeError # earlier
except (OSError, socket.error, TypeError):
pass
try:
return socket.inet_pton(socket.AF_INET6, n_ipname)
# Divergence: OSError on late python3. socket.error earlier.
# Null bytes generate ValueError on python3(we want to raise
# ValueError anyway), TypeError # earlier
except (OSError, socket.error, TypeError):
# Divergence .format() to percent formatting for Python < 2.6
raise ValueError("%s is neither an IPv4 nor an IP6 "
"address." % repr(ipname))
except AttributeError:
# AF_INET6 not available
pass
# Divergence .format() to percent formatting for Python < 2.6
raise ValueError("%s is not an IPv4 address." % repr(ipname))
def _ipaddress_match(ipname, host_ip):
"""Exact matching of IP addresses.
RFC 6125 explicitly doesn't define an algorithm for this
(section 1.7.2 - "Out of Scope").
"""
# OpenSSL may add a trailing newline to a subjectAltName's IP address
ip = _inet_paton(ipname.rstrip())
return ip == host_ip
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed.
The function matches IP addresses rather than dNSNames if hostname is a
valid ipaddress string. IPv4 addresses are supported on all platforms.
IPv6 addresses are supported on platforms with IPv6 support (AF_INET6
and inet_pton).
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate, match_hostname needs a "
"SSL socket or SSL context with either "
"CERT_OPTIONAL or CERT_REQUIRED")
try:
# Divergence: Deal with hostname as bytes
host_ip = _inet_paton(to_text(hostname, errors='strict'))
except UnicodeError:
# Divergence: Deal with hostname as byte strings.
# IP addresses should be all ascii, so we consider it not
# an IP address if this fails
host_ip = None
except ValueError:
# Not an IP address (common case)
host_ip = None
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if host_ip is None and _dnsname_match(value, hostname):
return
dnsnames.append(value)
elif key == 'IP Address':
if host_ip is not None and _ipaddress_match(value, host_ip):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r doesn't match either of %s" % (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or subjectAltName fields were found")
# End of Python Software Foundation Licensed code
HAS_MATCH_HOSTNAME = True
# This is a dummy cacert provided for macOS since you need at least 1
# ca cert, regardless of validity, for Python on macOS to use the
# keychain functionality in OpenSSL for validating SSL certificates.
# See: http://mercurial.selenic.com/wiki/CACertificates#Mac_OS_X_10.6_and_higher
b_DUMMY_CA_CERT = b"""-----BEGIN CERTIFICATE-----
MIICvDCCAiWgAwIBAgIJAO8E12S7/qEpMA0GCSqGSIb3DQEBBQUAMEkxCzAJBgNV
BAYTAlVTMRcwFQYDVQQIEw5Ob3J0aCBDYXJvbGluYTEPMA0GA1UEBxMGRHVyaGFt
MRAwDgYDVQQKEwdBbnNpYmxlMB4XDTE0MDMxODIyMDAyMloXDTI0MDMxNTIyMDAy
MlowSTELMAkGA1UEBhMCVVMxFzAVBgNVBAgTDk5vcnRoIENhcm9saW5hMQ8wDQYD
VQQHEwZEdXJoYW0xEDAOBgNVBAoTB0Fuc2libGUwgZ8wDQYJKoZIhvcNAQEBBQAD
gY0AMIGJAoGBANtvpPq3IlNlRbCHhZAcP6WCzhc5RbsDqyh1zrkmLi0GwcQ3z/r9
gaWfQBYhHpobK2Tiq11TfraHeNB3/VfNImjZcGpN8Fl3MWwu7LfVkJy3gNNnxkA1
4Go0/LmIvRFHhbzgfuo9NFgjPmmab9eqXJceqZIlz2C8xA7EeG7ku0+vAgMBAAGj
gaswgagwHQYDVR0OBBYEFPnN1nPRqNDXGlCqCvdZchRNi/FaMHkGA1UdIwRyMHCA
FPnN1nPRqNDXGlCqCvdZchRNi/FaoU2kSzBJMQswCQYDVQQGEwJVUzEXMBUGA1UE
CBMOTm9ydGggQ2Fyb2xpbmExDzANBgNVBAcTBkR1cmhhbTEQMA4GA1UEChMHQW5z
aWJsZYIJAO8E12S7/qEpMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADgYEA
MUB80IR6knq9K/tY+hvPsZer6eFMzO3JGkRFBh2kn6JdMDnhYGX7AXVHGflrwNQH
qFy+aenWXsC0ZvrikFxbQnX8GVtDADtVznxOi7XzFw7JOxdsVrpXgSN0eh0aMzvV
zKPZsZ2miVGclicJHzm5q080b1p/sZtuKIEZk6vZqEg=
-----END CERTIFICATE-----
"""
#
# Exceptions
#
class ConnectionError(Exception):
"""Failed to connect to the server"""
pass
class ProxyError(ConnectionError):
"""Failure to connect because of a proxy"""
pass
class SSLValidationError(ConnectionError):
"""Failure to connect due to SSL validation failing"""
pass
class NoSSLError(SSLValidationError):
"""Needed to connect to an HTTPS url but no ssl library available to verify the certificate"""
pass
class MissingModuleError(Exception):
"""Failed to import 3rd party module required by the caller"""
def __init__(self, message, import_traceback):
super(MissingModuleError, self).__init__(message)
self.import_traceback = import_traceback
# Some environments (Google Compute Engine's CoreOS deploys) do not compile
# against openssl and thus do not have any HTTPS support.
CustomHTTPSConnection = None
CustomHTTPSHandler = None
HTTPSClientAuthHandler = None
UnixHTTPSConnection = None
if hasattr(httplib, 'HTTPSConnection') and hasattr(urllib_request, 'HTTPSHandler'):
class CustomHTTPSConnection(httplib.HTTPSConnection):
def __init__(self, *args, **kwargs):
httplib.HTTPSConnection.__init__(self, *args, **kwargs)
self.context = None
if HAS_SSLCONTEXT:
self.context = self._context
elif HAS_URLLIB3_PYOPENSSLCONTEXT:
self.context = self._context = PyOpenSSLContext(PROTOCOL)
if self.context and self.cert_file:
self.context.load_cert_chain(self.cert_file, self.key_file)
def connect(self):
"Connect to a host on a given (SSL) port."
if hasattr(self, 'source_address'):
sock = socket.create_connection((self.host, self.port), self.timeout, self.source_address)
else:
sock = socket.create_connection((self.host, self.port), self.timeout)
server_hostname = self.host
# Note: self._tunnel_host is not available on py < 2.6 but this code
# isn't used on py < 2.6 (lack of create_connection)
if self._tunnel_host:
self.sock = sock
self._tunnel()
server_hostname = self._tunnel_host
if HAS_SSLCONTEXT or HAS_URLLIB3_PYOPENSSLCONTEXT:
self.sock = self.context.wrap_socket(sock, server_hostname=server_hostname)
elif HAS_URLLIB3_SSL_WRAP_SOCKET:
self.sock = ssl_wrap_socket(sock, keyfile=self.key_file, cert_reqs=ssl.CERT_NONE, certfile=self.cert_file, ssl_version=PROTOCOL,
server_hostname=server_hostname)
else:
self.sock = ssl.wrap_socket(sock, keyfile=self.key_file, certfile=self.cert_file, ssl_version=PROTOCOL)
class CustomHTTPSHandler(urllib_request.HTTPSHandler):
def https_open(self, req):
kwargs = {}
if HAS_SSLCONTEXT:
kwargs['context'] = self._context
return self.do_open(
functools.partial(
CustomHTTPSConnection,
**kwargs
),
req
)
https_request = AbstractHTTPHandler.do_request_
class HTTPSClientAuthHandler(urllib_request.HTTPSHandler):
'''Handles client authentication via cert/key
This is a fairly lightweight extension on HTTPSHandler, and can be used
in place of HTTPSHandler
'''
def __init__(self, client_cert=None, client_key=None, unix_socket=None, **kwargs):
urllib_request.HTTPSHandler.__init__(self, **kwargs)
self.client_cert = client_cert
self.client_key = client_key
self._unix_socket = unix_socket
def https_open(self, req):
return self.do_open(self._build_https_connection, req)
def _build_https_connection(self, host, **kwargs):
kwargs.update({
'cert_file': self.client_cert,
'key_file': self.client_key,
})
try:
kwargs['context'] = self._context
except AttributeError:
pass
if self._unix_socket:
return UnixHTTPSConnection(self._unix_socket)(host, **kwargs)
return httplib.HTTPSConnection(host, **kwargs)
@contextmanager
def unix_socket_patch_httpconnection_connect():
'''Monkey patch ``httplib.HTTPConnection.connect`` to be ``UnixHTTPConnection.connect``
so that when calling ``super(UnixHTTPSConnection, self).connect()`` we get the
correct behavior of creating self.sock for the unix socket
'''
_connect = httplib.HTTPConnection.connect
httplib.HTTPConnection.connect = UnixHTTPConnection.connect
yield
httplib.HTTPConnection.connect = _connect
class UnixHTTPSConnection(httplib.HTTPSConnection):
def __init__(self, unix_socket):
self._unix_socket = unix_socket
def connect(self):
# This method exists simply to ensure we monkeypatch
# httplib.HTTPConnection.connect to call UnixHTTPConnection.connect
with unix_socket_patch_httpconnection_connect():
# Disable pylint check for the super() call. It complains about UnixHTTPSConnection
# being a NoneType because of the initial definition above, but it won't actually
# be a NoneType when this code runs
# pylint: disable=bad-super-call
super(UnixHTTPSConnection, self).connect()
def __call__(self, *args, **kwargs):
httplib.HTTPSConnection.__init__(self, *args, **kwargs)
return self
class UnixHTTPConnection(httplib.HTTPConnection):
'''Handles http requests to a unix socket file'''
def __init__(self, unix_socket):
self._unix_socket = unix_socket
def connect(self):
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
try:
self.sock.connect(self._unix_socket)
except OSError as e:
raise OSError('Invalid Socket File (%s): %s' % (self._unix_socket, e))
if self.timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
self.sock.settimeout(self.timeout)
def __call__(self, *args, **kwargs):
httplib.HTTPConnection.__init__(self, *args, **kwargs)
return self
class UnixHTTPHandler(urllib_request.HTTPHandler):
'''Handler for Unix urls'''
def __init__(self, unix_socket, **kwargs):
urllib_request.HTTPHandler.__init__(self, **kwargs)
self._unix_socket = unix_socket
def http_open(self, req):
return self.do_open(UnixHTTPConnection(self._unix_socket), req)
class ParseResultDottedDict(dict):
'''
A dict that acts similarly to the ParseResult named tuple from urllib
'''
def __init__(self, *args, **kwargs):
super(ParseResultDottedDict, self).__init__(*args, **kwargs)
self.__dict__ = self
def as_list(self):
'''
Generate a list from this dict, that looks like the ParseResult named tuple
'''
return [self.get(k, None) for k in ('scheme', 'netloc', 'path', 'params', 'query', 'fragment')]
def generic_urlparse(parts):
'''
Returns a dictionary of url parts as parsed by urlparse,
but accounts for the fact that older versions of that
library do not support named attributes (ie. .netloc)
'''
generic_parts = ParseResultDottedDict()
if hasattr(parts, 'netloc'):
# urlparse is newer, just read the fields straight
# from the parts object
generic_parts['scheme'] = parts.scheme
generic_parts['netloc'] = parts.netloc
generic_parts['path'] = parts.path
generic_parts['params'] = parts.params
generic_parts['query'] = parts.query
generic_parts['fragment'] = parts.fragment
generic_parts['username'] = parts.username
generic_parts['password'] = parts.password
hostname = parts.hostname
if hostname and hostname[0] == '[' and '[' in parts.netloc and ']' in parts.netloc:
# Py2.6 doesn't parse IPv6 addresses correctly
hostname = parts.netloc.split(']')[0][1:].lower()
generic_parts['hostname'] = hostname
try:
port = parts.port
except ValueError:
# Py2.6 doesn't parse IPv6 addresses correctly
netloc = parts.netloc.split('@')[-1].split(']')[-1]
if ':' in netloc:
port = netloc.split(':')[1]
if port:
port = int(port)
else:
port = None
generic_parts['port'] = port
else:
# we have to use indexes, and then parse out
# the other parts not supported by indexing
generic_parts['scheme'] = parts[0]
generic_parts['netloc'] = parts[1]
generic_parts['path'] = parts[2]
generic_parts['params'] = parts[3]
generic_parts['query'] = parts[4]
generic_parts['fragment'] = parts[5]
# get the username, password, etc.
try:
netloc_re = re.compile(r'^((?:\w)+(?::(?:\w)+)?@)?([A-Za-z0-9.-]+)(:\d+)?$')
match = netloc_re.match(parts[1])
auth = match.group(1)
hostname = match.group(2)
port = match.group(3)
if port:
# the capture group for the port will include the ':',
# so remove it and convert the port to an integer
port = int(port[1:])
if auth:
# the capture group above includes the @, so remove it
# and then split it up based on the first ':' found
auth = auth[:-1]
username, password = auth.split(':', 1)
else:
username = password = None
generic_parts['username'] = username
generic_parts['password'] = password
generic_parts['hostname'] = hostname
generic_parts['port'] = port
except Exception:
generic_parts['username'] = None
generic_parts['password'] = None
generic_parts['hostname'] = parts[1]
generic_parts['port'] = None
return generic_parts
class RequestWithMethod(urllib_request.Request):
'''
Workaround for using DELETE/PUT/etc with urllib2
Originally contained in library/net_infrastructure/dnsmadeeasy
'''
def __init__(self, url, method, data=None, headers=None, origin_req_host=None, unverifiable=True):
if headers is None:
headers = {}
self._method = method.upper()
urllib_request.Request.__init__(self, url, data, headers, origin_req_host, unverifiable)
def get_method(self):
if self._method:
return self._method
else:
return urllib_request.Request.get_method(self)
def RedirectHandlerFactory(follow_redirects=None, validate_certs=True, ca_path=None):
"""This is a class factory that closes over the value of
``follow_redirects`` so that the RedirectHandler class has access to
that value without having to use globals, and potentially cause problems
where ``open_url`` or ``fetch_url`` are used multiple times in a module.
"""
class RedirectHandler(urllib_request.HTTPRedirectHandler):
"""This is an implementation of a RedirectHandler to match the
functionality provided by httplib2. It will utilize the value of
``follow_redirects`` that is passed into ``RedirectHandlerFactory``
to determine how redirects should be handled in urllib2.
"""
def redirect_request(self, req, fp, code, msg, hdrs, newurl):
if not HAS_SSLCONTEXT:
handler = maybe_add_ssl_handler(newurl, validate_certs, ca_path=ca_path)
if handler:
urllib_request._opener.add_handler(handler)
# Preserve urllib2 compatibility
if follow_redirects == 'urllib2':
return urllib_request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, hdrs, newurl)
# Handle disabled redirects
elif follow_redirects in ['no', 'none', False]:
raise urllib_error.HTTPError(newurl, code, msg, hdrs, fp)
method = req.get_method()
# Handle non-redirect HTTP status or invalid follow_redirects
if follow_redirects in ['all', 'yes', True]:
if code < 300 or code >= 400:
raise urllib_error.HTTPError(req.get_full_url(), code, msg, hdrs, fp)
elif follow_redirects == 'safe':
if code < 300 or code >= 400 or method not in ('GET', 'HEAD'):
raise urllib_error.HTTPError(req.get_full_url(), code, msg, hdrs, fp)
else:
raise urllib_error.HTTPError(req.get_full_url(), code, msg, hdrs, fp)
try:
# Python 2-3.3
data = req.get_data()
origin_req_host = req.get_origin_req_host()
except AttributeError:
# Python 3.4+
data = req.data
origin_req_host = req.origin_req_host
# Be conciliant with URIs containing a space
newurl = newurl.replace(' ', '%20')
# Suport redirect with payload and original headers
if code in (307, 308):
# Preserve payload and headers
headers = req.headers
else:
# Do not preserve payload and filter headers
data = None
headers = dict((k, v) for k, v in req.headers.items()
if k.lower() not in ("content-length", "content-type", "transfer-encoding"))
# http://tools.ietf.org/html/rfc7231#section-6.4.4
if code == 303 and method != 'HEAD':
method = 'GET'
# Do what the browsers do, despite standards...
# First, turn 302s into GETs.
if code == 302 and method != 'HEAD':
method = 'GET'
# Second, if a POST is responded to with a 301, turn it into a GET.
if code == 301 and method == 'POST':
method = 'GET'
return RequestWithMethod(newurl,
method=method,
headers=headers,
data=data,
origin_req_host=origin_req_host,
unverifiable=True,
)
return RedirectHandler
def build_ssl_validation_error(hostname, port, paths, exc=None):
'''Inteligently build out the SSLValidationError based on what support
you have installed
'''
msg = [
('Failed to validate the SSL certificate for %s:%s.'
' Make sure your managed systems have a valid CA'
' certificate installed.')
]
if not HAS_SSLCONTEXT:
msg.append('If the website serving the url uses SNI you need'
' python >= 2.7.9 on your managed machine')
msg.append(' (the python executable used (%s) is version: %s)' %
(sys.executable, ''.join(sys.version.splitlines())))
if not HAS_URLLIB3_PYOPENSSLCONTEXT and not HAS_URLLIB3_SSL_WRAP_SOCKET:
msg.append('or you can install the `urllib3`, `pyOpenSSL`,'
' `ndg-httpsclient`, and `pyasn1` python modules')
msg.append('to perform SNI verification in python >= 2.6.')
msg.append('You can use validate_certs=False if you do'
' not need to confirm the servers identity but this is'
' unsafe and not recommended.'
' Paths checked for this platform: %s.')
if exc:
msg.append('The exception msg was: %s.' % to_native(exc))
raise SSLValidationError(' '.join(msg) % (hostname, port, ", ".join(paths)))
def atexit_remove_file(filename):
if os.path.exists(filename):
try:
os.unlink(filename)
except Exception:
# just ignore if we cannot delete, things should be ok
pass
class SSLValidationHandler(urllib_request.BaseHandler):
'''
A custom handler class for SSL validation.
Based on:
http://stackoverflow.com/questions/1087227/validate-ssl-certificates-with-python
http://techknack.net/python-urllib2-handlers/
'''
CONNECT_COMMAND = "CONNECT %s:%s HTTP/1.0\r\n"
def __init__(self, hostname, port, ca_path=None):
self.hostname = hostname
self.port = port
self.ca_path = ca_path
def get_ca_certs(self):
# tries to find a valid CA cert in one of the
# standard locations for the current distribution
ca_certs = []
cadata = bytearray()
paths_checked = []
if self.ca_path:
paths_checked = [self.ca_path]
with open(to_bytes(self.ca_path, errors='surrogate_or_strict'), 'rb') as f:
if HAS_SSLCONTEXT:
cadata.extend(
ssl.PEM_cert_to_DER_cert(
to_native(f.read(), errors='surrogate_or_strict')
)
)
else:
ca_certs.append(f.read())
return ca_certs, cadata, paths_checked
if not HAS_SSLCONTEXT:
paths_checked.append('/etc/ssl/certs')
system = to_text(platform.system(), errors='surrogate_or_strict')
# build a list of paths to check for .crt/.pem files
# based on the platform type
if system == u'Linux':
paths_checked.append('/etc/pki/ca-trust/extracted/pem')
paths_checked.append('/etc/pki/tls/certs')
paths_checked.append('/usr/share/ca-certificates/cacert.org')
elif system == u'FreeBSD':
paths_checked.append('/usr/local/share/certs')
elif system == u'OpenBSD':
paths_checked.append('/etc/ssl')
elif system == u'NetBSD':
ca_certs.append('/etc/openssl/certs')
elif system == u'SunOS':
paths_checked.append('/opt/local/etc/openssl/certs')
# fall back to a user-deployed cert in a standard
# location if the OS platform one is not available
paths_checked.append('/etc/ansible')
tmp_path = None
if not HAS_SSLCONTEXT:
tmp_fd, tmp_path = tempfile.mkstemp()
atexit.register(atexit_remove_file, tmp_path)
# Write the dummy ca cert if we are running on macOS
if system == u'Darwin':
if HAS_SSLCONTEXT:
cadata.extend(
ssl.PEM_cert_to_DER_cert(
to_native(b_DUMMY_CA_CERT, errors='surrogate_or_strict')
)
)
else:
os.write(tmp_fd, b_DUMMY_CA_CERT)
# Default Homebrew path for OpenSSL certs
paths_checked.append('/usr/local/etc/openssl')
# for all of the paths, find any .crt or .pem files
# and compile them into single temp file for use
# in the ssl check to speed up the test
for path in paths_checked:
if os.path.exists(path) and os.path.isdir(path):
dir_contents = os.listdir(path)
for f in dir_contents:
full_path = os.path.join(path, f)
if os.path.isfile(full_path) and os.path.splitext(f)[1] in ('.crt', '.pem'):
try:
if full_path not in LOADED_VERIFY_LOCATIONS:
with open(full_path, 'rb') as cert_file:
b_cert = cert_file.read()
if HAS_SSLCONTEXT:
try:
cadata.extend(
ssl.PEM_cert_to_DER_cert(
to_native(b_cert, errors='surrogate_or_strict')
)
)
except Exception:
continue
else:
os.write(tmp_fd, b_cert)
os.write(tmp_fd, b'\n')
except (OSError, IOError):
pass
if HAS_SSLCONTEXT:
default_verify_paths = ssl.get_default_verify_paths()
paths_checked[:0] = [default_verify_paths.capath]
else:
os.close(tmp_fd)
return (tmp_path, cadata, paths_checked)
def validate_proxy_response(self, response, valid_codes=None):
'''
make sure we get back a valid code from the proxy
'''
valid_codes = [200] if valid_codes is None else valid_codes
try:
(http_version, resp_code, msg) = re.match(br'(HTTP/\d\.\d) (\d\d\d) (.*)', response).groups()
if int(resp_code) not in valid_codes:
raise Exception
except Exception:
raise ProxyError('Connection to proxy failed')
def detect_no_proxy(self, url):
'''
Detect if the 'no_proxy' environment variable is set and honor those locations.
'''
env_no_proxy = os.environ.get('no_proxy')
if env_no_proxy:
env_no_proxy = env_no_proxy.split(',')
netloc = urlparse(url).netloc
for host in env_no_proxy:
if netloc.endswith(host) or netloc.split(':')[0].endswith(host):
# Our requested URL matches something in no_proxy, so don't
# use the proxy for this
return False
return True
def make_context(self, cafile, cadata):
cafile = self.ca_path or cafile
if self.ca_path:
cadata = None
else:
cadata = cadata or None
if HAS_SSLCONTEXT:
context = create_default_context(cafile=cafile)
elif HAS_URLLIB3_PYOPENSSLCONTEXT:
context = PyOpenSSLContext(PROTOCOL)
else:
raise NotImplementedError('Host libraries are too old to support creating an sslcontext')
if cafile or cadata:
context.load_verify_locations(cafile=cafile, cadata=cadata)
return context
def http_request(self, req):
tmp_ca_cert_path, cadata, paths_checked = self.get_ca_certs()
# Detect if 'no_proxy' environment variable is set and if our URL is included
use_proxy = self.detect_no_proxy(req.get_full_url())
https_proxy = os.environ.get('https_proxy')
context = None
try:
context = self.make_context(tmp_ca_cert_path, cadata)
except NotImplementedError:
# We'll make do with no context below
pass
try:
if use_proxy and https_proxy:
proxy_parts = generic_urlparse(urlparse(https_proxy))
port = proxy_parts.get('port') or 443
proxy_hostname = proxy_parts.get('hostname', None)
if proxy_hostname is None or proxy_parts.get('scheme') == '':
raise ProxyError("Failed to parse https_proxy environment variable."
" Please make sure you export https proxy as 'https_proxy=<SCHEME>://<IP_ADDRESS>:<PORT>'")
s = socket.create_connection((proxy_hostname, port))
if proxy_parts.get('scheme') == 'http':
s.sendall(to_bytes(self.CONNECT_COMMAND % (self.hostname, self.port), errors='surrogate_or_strict'))
if proxy_parts.get('username'):
credentials = "%s:%s" % (proxy_parts.get('username', ''), proxy_parts.get('password', ''))
s.sendall(b'Proxy-Authorization: Basic %s\r\n' % base64.b64encode(to_bytes(credentials, errors='surrogate_or_strict')).strip())
s.sendall(b'\r\n')
connect_result = b""
while connect_result.find(b"\r\n\r\n") <= 0:
connect_result += s.recv(4096)
# 128 kilobytes of headers should be enough for everyone.
if len(connect_result) > 131072:
raise ProxyError('Proxy sent too verbose headers. Only 128KiB allowed.')
self.validate_proxy_response(connect_result)
if context:
ssl_s = context.wrap_socket(s, server_hostname=self.hostname)
elif HAS_URLLIB3_SSL_WRAP_SOCKET:
ssl_s = ssl_wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL, server_hostname=self.hostname)
else:
ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL)
match_hostname(ssl_s.getpeercert(), self.hostname)
else:
raise ProxyError('Unsupported proxy scheme: %s. Currently ansible only supports HTTP proxies.' % proxy_parts.get('scheme'))
else:
s = socket.create_connection((self.hostname, self.port))
if context:
ssl_s = context.wrap_socket(s, server_hostname=self.hostname)
elif HAS_URLLIB3_SSL_WRAP_SOCKET:
ssl_s = ssl_wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL, server_hostname=self.hostname)
else:
ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL)
match_hostname(ssl_s.getpeercert(), self.hostname)
# close the ssl connection
# ssl_s.unwrap()
s.close()
except (ssl.SSLError, CertificateError) as e:
build_ssl_validation_error(self.hostname, self.port, paths_checked, e)
except socket.error as e:
raise ConnectionError('Failed to connect to %s at port %s: %s' % (self.hostname, self.port, to_native(e)))
return req
https_request = http_request
def maybe_add_ssl_handler(url, validate_certs, ca_path=None):
parsed = generic_urlparse(urlparse(url))
if parsed.scheme == 'https' and validate_certs:
if not HAS_SSL:
raise NoSSLError('SSL validation is not available in your version of python. You can use validate_certs=False,'
' however this is unsafe and not recommended')
# create the SSL validation handler and
# add it to the list of handlers
return SSLValidationHandler(parsed.hostname, parsed.port or 443, ca_path=ca_path)
def getpeercert(response, binary_form=False):
""" Attempt to get the peer certificate of the response from urlopen. """
# The response from urllib2.open() is different across Python 2 and 3
if PY3:
socket = response.fp.raw._sock
else:
socket = response.fp._sock.fp._sock
try:
return socket.getpeercert(binary_form)
except AttributeError:
pass # Not HTTPS
def get_channel_binding_cert_hash(certificate_der):
""" Gets the channel binding app data for a TLS connection using the peer cert. """
if not HAS_CRYPTOGRAPHY:
return
# Logic documented in RFC 5929 section 4 https://tools.ietf.org/html/rfc5929#section-4
cert = x509.load_der_x509_certificate(certificate_der, default_backend())
hash_algorithm = None
try:
hash_algorithm = cert.signature_hash_algorithm
except UnsupportedAlgorithm:
pass
# If the signature hash algorithm is unknown/unsupported or md5/sha1 we must use SHA256.
if not hash_algorithm or hash_algorithm.name in ['md5', 'sha1']:
hash_algorithm = hashes.SHA256()
digest = hashes.Hash(hash_algorithm, default_backend())
digest.update(certificate_der)
return digest.finalize()
def rfc2822_date_string(timetuple, zone='-0000'):
"""Accepts a timetuple and optional zone which defaults to ``-0000``
and returns a date string as specified by RFC 2822, e.g.:
Fri, 09 Nov 2001 01:08:47 -0000
Copied from email.utils.formatdate and modified for separate use
"""
return '%s, %02d %s %04d %02d:%02d:%02d %s' % (
['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'][timetuple[6]],
timetuple[2],
['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'][timetuple[1] - 1],
timetuple[0], timetuple[3], timetuple[4], timetuple[5],
zone)
class Request:
def __init__(self, headers=None, use_proxy=True, force=False, timeout=10, validate_certs=True,
url_username=None, url_password=None, http_agent=None, force_basic_auth=False,
follow_redirects='urllib2', client_cert=None, client_key=None, cookies=None, unix_socket=None,
ca_path=None):
"""This class works somewhat similarly to the ``Session`` class of from requests
by defining a cookiejar that an be used across requests as well as cascaded defaults that
can apply to repeated requests
For documentation of params, see ``Request.open``
>>> from ansible.module_utils.urls import Request
>>> r = Request()
>>> r.open('GET', 'http://httpbin.org/cookies/set?k1=v1').read()
'{\n "cookies": {\n "k1": "v1"\n }\n}\n'
>>> r = Request(url_username='user', url_password='passwd')
>>> r.open('GET', 'http://httpbin.org/basic-auth/user/passwd').read()
'{\n "authenticated": true, \n "user": "user"\n}\n'
>>> r = Request(headers=dict(foo='bar'))
>>> r.open('GET', 'http://httpbin.org/get', headers=dict(baz='qux')).read()
"""
self.headers = headers or {}
if not isinstance(self.headers, dict):
raise ValueError("headers must be a dict: %r" % self.headers)
self.use_proxy = use_proxy
self.force = force
self.timeout = timeout
self.validate_certs = validate_certs
self.url_username = url_username
self.url_password = url_password
self.http_agent = http_agent
self.force_basic_auth = force_basic_auth
self.follow_redirects = follow_redirects
self.client_cert = client_cert
self.client_key = client_key
self.unix_socket = unix_socket
self.ca_path = ca_path
if isinstance(cookies, cookiejar.CookieJar):
self.cookies = cookies
else:
self.cookies = cookiejar.CookieJar()
def _fallback(self, value, fallback):
if value is None:
return fallback
return value
def open(self, method, url, data=None, headers=None, use_proxy=None,
force=None, last_mod_time=None, timeout=None, validate_certs=None,
url_username=None, url_password=None, http_agent=None,
force_basic_auth=None, follow_redirects=None,
client_cert=None, client_key=None, cookies=None, use_gssapi=False,
unix_socket=None, ca_path=None, unredirected_headers=None):
"""
Sends a request via HTTP(S) or FTP using urllib2 (Python2) or urllib (Python3)
Does not require the module environment
Returns :class:`HTTPResponse` object.
:arg method: method for the request
:arg url: URL to request
:kwarg data: (optional) bytes, or file-like object to send
in the body of the request
:kwarg headers: (optional) Dictionary of HTTP Headers to send with the
request
:kwarg use_proxy: (optional) Boolean of whether or not to use proxy
:kwarg force: (optional) Boolean of whether or not to set `cache-control: no-cache` header
:kwarg last_mod_time: (optional) Datetime object to use when setting If-Modified-Since header
:kwarg timeout: (optional) How long to wait for the server to send
data before giving up, as a float
:kwarg validate_certs: (optional) Booleani that controls whether we verify
the server's TLS certificate
:kwarg url_username: (optional) String of the user to use when authenticating
:kwarg url_password: (optional) String of the password to use when authenticating
:kwarg http_agent: (optional) String of the User-Agent to use in the request
:kwarg force_basic_auth: (optional) Boolean determining if auth header should be sent in the initial request
:kwarg follow_redirects: (optional) String of urllib2, all/yes, safe, none to determine how redirects are
followed, see RedirectHandlerFactory for more information
:kwarg client_cert: (optional) PEM formatted certificate chain file to be used for SSL client authentication.
This file can also include the key as well, and if the key is included, client_key is not required
:kwarg client_key: (optional) PEM formatted file that contains your private key to be used for SSL client
authentication. If client_cert contains both the certificate and key, this option is not required
:kwarg cookies: (optional) CookieJar object to send with the
request
:kwarg use_gssapi: (optional) Use GSSAPI handler of requests.
:kwarg unix_socket: (optional) String of file system path to unix socket file to use when establishing
connection to the provided url
:kwarg ca_path: (optional) String of file system path to CA cert bundle to use
:kwarg unredirected_headers: (optional) A list of headers to not attach on a redirected request
:returns: HTTPResponse. Added in Ansible 2.9
"""
method = method.upper()
if headers is None:
headers = {}
elif not isinstance(headers, dict):
raise ValueError("headers must be a dict")
headers = dict(self.headers, **headers)
use_proxy = self._fallback(use_proxy, self.use_proxy)
force = self._fallback(force, self.force)
timeout = self._fallback(timeout, self.timeout)
validate_certs = self._fallback(validate_certs, self.validate_certs)
url_username = self._fallback(url_username, self.url_username)
url_password = self._fallback(url_password, self.url_password)
http_agent = self._fallback(http_agent, self.http_agent)
force_basic_auth = self._fallback(force_basic_auth, self.force_basic_auth)
follow_redirects = self._fallback(follow_redirects, self.follow_redirects)
client_cert = self._fallback(client_cert, self.client_cert)
client_key = self._fallback(client_key, self.client_key)
cookies = self._fallback(cookies, self.cookies)
unix_socket = self._fallback(unix_socket, self.unix_socket)
ca_path = self._fallback(ca_path, self.ca_path)
handlers = []
if unix_socket:
handlers.append(UnixHTTPHandler(unix_socket))
ssl_handler = maybe_add_ssl_handler(url, validate_certs, ca_path=ca_path)
if ssl_handler and not HAS_SSLCONTEXT:
handlers.append(ssl_handler)
parsed = generic_urlparse(urlparse(url))
if parsed.scheme != 'ftp':
username = url_username
password = url_password
if username:
netloc = parsed.netloc
elif '@' in parsed.netloc:
credentials, netloc = parsed.netloc.split('@', 1)
if ':' in credentials:
username, password = credentials.split(':', 1)
else:
username = credentials
password = ''
parsed_list = parsed.as_list()
parsed_list[1] = netloc
# reconstruct url without credentials
url = urlunparse(parsed_list)
if use_gssapi:
if HTTPGSSAPIAuthHandler:
handlers.append(HTTPGSSAPIAuthHandler(username, password))
else:
imp_err_msg = missing_required_lib('gssapi', reason='for use_gssapi=True',
url='https://pypi.org/project/gssapi/')
raise MissingModuleError(imp_err_msg, import_traceback=GSSAPI_IMP_ERR)
elif username and not force_basic_auth:
passman = urllib_request.HTTPPasswordMgrWithDefaultRealm()
# this creates a password manager
passman.add_password(None, netloc, username, password)
# because we have put None at the start it will always
# use this username/password combination for urls
# for which `theurl` is a super-url
authhandler = urllib_request.HTTPBasicAuthHandler(passman)
digest_authhandler = urllib_request.HTTPDigestAuthHandler(passman)
# create the AuthHandler
handlers.append(authhandler)
handlers.append(digest_authhandler)
elif username and force_basic_auth:
headers["Authorization"] = basic_auth_header(username, password)
else:
try:
rc = netrc.netrc(os.environ.get('NETRC'))
login = rc.authenticators(parsed.hostname)
except IOError:
login = None
if login:
username, _, password = login
if username and password:
headers["Authorization"] = basic_auth_header(username, password)
if not use_proxy:
proxyhandler = urllib_request.ProxyHandler({})
handlers.append(proxyhandler)
context = None
if HAS_SSLCONTEXT and not validate_certs:
# In 2.7.9, the default context validates certificates
context = SSLContext(ssl.PROTOCOL_SSLv23)
if ssl.OP_NO_SSLv2:
context.options |= ssl.OP_NO_SSLv2
context.options |= ssl.OP_NO_SSLv3
context.verify_mode = ssl.CERT_NONE
context.check_hostname = False
handlers.append(HTTPSClientAuthHandler(client_cert=client_cert,
client_key=client_key,
context=context,
unix_socket=unix_socket))
elif client_cert or unix_socket:
handlers.append(HTTPSClientAuthHandler(client_cert=client_cert,
client_key=client_key,
unix_socket=unix_socket))
if ssl_handler and HAS_SSLCONTEXT and validate_certs:
tmp_ca_path, cadata, paths_checked = ssl_handler.get_ca_certs()
try:
context = ssl_handler.make_context(tmp_ca_path, cadata)
except NotImplementedError:
pass
# pre-2.6 versions of python cannot use the custom https
# handler, since the socket class is lacking create_connection.
# Some python builds lack HTTPS support.
if hasattr(socket, 'create_connection') and CustomHTTPSHandler:
kwargs = {}
if HAS_SSLCONTEXT:
kwargs['context'] = context
handlers.append(CustomHTTPSHandler(**kwargs))
handlers.append(RedirectHandlerFactory(follow_redirects, validate_certs, ca_path=ca_path))
# add some nicer cookie handling
if cookies is not None:
handlers.append(urllib_request.HTTPCookieProcessor(cookies))
opener = urllib_request.build_opener(*handlers)
urllib_request.install_opener(opener)
data = to_bytes(data, nonstring='passthru')
request = RequestWithMethod(url, method, data)
# add the custom agent header, to help prevent issues
# with sites that block the default urllib agent string
if http_agent:
request.add_header('User-agent', http_agent)
# Cache control
# Either we directly force a cache refresh
if force:
request.add_header('cache-control', 'no-cache')
# or we do it if the original is more recent than our copy
elif last_mod_time:
tstamp = rfc2822_date_string(last_mod_time.timetuple(), 'GMT')
request.add_header('If-Modified-Since', tstamp)
# user defined headers now, which may override things we've set above
unredirected_headers = unredirected_headers or []
for header in headers:
if header in unredirected_headers:
request.add_unredirected_header(header, headers[header])
else:
request.add_header(header, headers[header])
return urllib_request.urlopen(request, None, timeout)
def get(self, url, **kwargs):
r"""Sends a GET request. Returns :class:`HTTPResponse` object.
:arg url: URL to request
:kwarg \*\*kwargs: Optional arguments that ``open`` takes.
:returns: HTTPResponse
"""
return self.open('GET', url, **kwargs)
def options(self, url, **kwargs):
r"""Sends a OPTIONS request. Returns :class:`HTTPResponse` object.
:arg url: URL to request
:kwarg \*\*kwargs: Optional arguments that ``open`` takes.
:returns: HTTPResponse
"""
return self.open('OPTIONS', url, **kwargs)
def head(self, url, **kwargs):
r"""Sends a HEAD request. Returns :class:`HTTPResponse` object.
:arg url: URL to request
:kwarg \*\*kwargs: Optional arguments that ``open`` takes.
:returns: HTTPResponse
"""
return self.open('HEAD', url, **kwargs)
def post(self, url, data=None, **kwargs):
r"""Sends a POST request. Returns :class:`HTTPResponse` object.
:arg url: URL to request.
:kwarg data: (optional) bytes, or file-like object to send in the body of the request.
:kwarg \*\*kwargs: Optional arguments that ``open`` takes.
:returns: HTTPResponse
"""
return self.open('POST', url, data=data, **kwargs)
def put(self, url, data=None, **kwargs):
r"""Sends a PUT request. Returns :class:`HTTPResponse` object.
:arg url: URL to request.
:kwarg data: (optional) bytes, or file-like object to send in the body of the request.
:kwarg \*\*kwargs: Optional arguments that ``open`` takes.
:returns: HTTPResponse
"""
return self.open('PUT', url, data=data, **kwargs)
def patch(self, url, data=None, **kwargs):
r"""Sends a PATCH request. Returns :class:`HTTPResponse` object.
:arg url: URL to request.
:kwarg data: (optional) bytes, or file-like object to send in the body of the request.
:kwarg \*\*kwargs: Optional arguments that ``open`` takes.
:returns: HTTPResponse
"""
return self.open('PATCH', url, data=data, **kwargs)
def delete(self, url, **kwargs):
r"""Sends a DELETE request. Returns :class:`HTTPResponse` object.
:arg url: URL to request
:kwargs \*\*kwargs: Optional arguments that ``open`` takes.
:returns: HTTPResponse
"""
return self.open('DELETE', url, **kwargs)
def open_url(url, data=None, headers=None, method=None, use_proxy=True,
force=False, last_mod_time=None, timeout=10, validate_certs=True,
url_username=None, url_password=None, http_agent=None,
force_basic_auth=False, follow_redirects='urllib2',
client_cert=None, client_key=None, cookies=None,
use_gssapi=False, unix_socket=None, ca_path=None,
unredirected_headers=None):
'''
Sends a request via HTTP(S) or FTP using urllib2 (Python2) or urllib (Python3)
Does not require the module environment
'''
method = method or ('POST' if data else 'GET')
return Request().open(method, url, data=data, headers=headers, use_proxy=use_proxy,
force=force, last_mod_time=last_mod_time, timeout=timeout, validate_certs=validate_certs,
url_username=url_username, url_password=url_password, http_agent=http_agent,
force_basic_auth=force_basic_auth, follow_redirects=follow_redirects,
client_cert=client_cert, client_key=client_key, cookies=cookies,
use_gssapi=use_gssapi, unix_socket=unix_socket, ca_path=ca_path,
unredirected_headers=unredirected_headers)
def prepare_multipart(fields):
"""Takes a mapping, and prepares a multipart/form-data body
:arg fields: Mapping
:returns: tuple of (content_type, body) where ``content_type`` is
the ``multipart/form-data`` ``Content-Type`` header including
``boundary`` and ``body`` is the prepared bytestring body
Payload content from a file will be base64 encoded and will include
the appropriate ``Content-Transfer-Encoding`` and ``Content-Type``
headers.
Example:
{
"file1": {
"filename": "/bin/true",
"mime_type": "application/octet-stream"
},
"file2": {
"content": "text based file content",
"filename": "fake.txt",
"mime_type": "text/plain",
},
"text_form_field": "value"
}
"""
if not isinstance(fields, Mapping):
raise TypeError(
'Mapping is required, cannot be type %s' % fields.__class__.__name__
)
m = email.mime.multipart.MIMEMultipart('form-data')
for field, value in sorted(fields.items()):
if isinstance(value, string_types):
main_type = 'text'
sub_type = 'plain'
content = value
filename = None
elif isinstance(value, Mapping):
filename = value.get('filename')
content = value.get('content')
if not any((filename, content)):
raise ValueError('at least one of filename or content must be provided')
mime = value.get('mime_type')
if not mime:
try:
mime = mimetypes.guess_type(filename or '', strict=False)[0] or 'application/octet-stream'
except Exception:
mime = 'application/octet-stream'
main_type, sep, sub_type = mime.partition('/')
else:
raise TypeError(
'value must be a string, or mapping, cannot be type %s' % value.__class__.__name__
)
if not content and filename:
with open(to_bytes(filename, errors='surrogate_or_strict'), 'rb') as f:
part = email.mime.application.MIMEApplication(f.read())
del part['Content-Type']
part.add_header('Content-Type', '%s/%s' % (main_type, sub_type))
else:
part = email.mime.nonmultipart.MIMENonMultipart(main_type, sub_type)
part.set_payload(to_bytes(content))
part.add_header('Content-Disposition', 'form-data')
del part['MIME-Version']
part.set_param(
'name',
field,
header='Content-Disposition'
)
if filename:
part.set_param(
'filename',
to_native(os.path.basename(filename)),
header='Content-Disposition'
)
m.attach(part)
if PY3:
# Ensure headers are not split over multiple lines
# The HTTP policy also uses CRLF by default
b_data = m.as_bytes(policy=email.policy.HTTP)
else:
# Py2
# We cannot just call ``as_string`` since it provides no way
# to specify ``maxheaderlen``
fp = cStringIO() # cStringIO seems to be required here
# Ensure headers are not split over multiple lines
g = email.generator.Generator(fp, maxheaderlen=0)
g.flatten(m)
# ``fix_eols`` switches from ``\n`` to ``\r\n``
b_data = email.utils.fix_eols(fp.getvalue())
del m
headers, sep, b_content = b_data.partition(b'\r\n\r\n')
del b_data
if PY3:
parser = email.parser.BytesHeaderParser().parsebytes
else:
# Py2
parser = email.parser.HeaderParser().parsestr
return (
parser(headers)['content-type'], # Message converts to native strings
b_content
)
#
# Module-related functions
#
def basic_auth_header(username, password):
"""Takes a username and password and returns a byte string suitable for
using as value of an Authorization header to do basic auth.
"""
return b"Basic %s" % base64.b64encode(to_bytes("%s:%s" % (username, password), errors='surrogate_or_strict'))
def url_argument_spec():
'''
Creates an argument spec that can be used with any module
that will be requesting content via urllib/urllib2
'''
return dict(
url=dict(type='str'),
force=dict(type='bool', default=False, aliases=['thirsty'],
deprecated_aliases=[dict(name='thirsty', version='2.13', collection_name='ansible.builtin')]),
http_agent=dict(type='str', default='ansible-httpget'),
use_proxy=dict(type='bool', default=True),
validate_certs=dict(type='bool', default=True),
url_username=dict(type='str'),
url_password=dict(type='str', no_log=True),
force_basic_auth=dict(type='bool', default=False),
client_cert=dict(type='path'),
client_key=dict(type='path'),
use_gssapi=dict(type='bool', default=False),
)
def fetch_url(module, url, data=None, headers=None, method=None,
use_proxy=True, force=False, last_mod_time=None, timeout=10,
use_gssapi=False, unix_socket=None, ca_path=None, cookies=None):
"""Sends a request via HTTP(S) or FTP (needs the module as parameter)
:arg module: The AnsibleModule (used to get username, password etc. (s.b.).
:arg url: The url to use.
:kwarg data: The data to be sent (in case of POST/PUT).
:kwarg headers: A dict with the request headers.
:kwarg method: "POST", "PUT", etc.
:kwarg boolean use_proxy: Default: True
:kwarg boolean force: If True: Do not get a cached copy (Default: False)
:kwarg last_mod_time: Default: None
:kwarg int timeout: Default: 10
:kwarg boolean use_gssapi: Default: False
:kwarg unix_socket: (optional) String of file system path to unix socket file to use when establishing
connection to the provided url
:kwarg ca_path: (optional) String of file system path to CA cert bundle to use
:returns: A tuple of (**response**, **info**). Use ``response.read()`` to read the data.
The **info** contains the 'status' and other meta data. When a HttpError (status >= 400)
occurred then ``info['body']`` contains the error response data::
Example::
data={...}
resp, info = fetch_url(module,
"http://example.com",
data=module.jsonify(data),
headers={'Content-type': 'application/json'},
method="POST")
status_code = info["status"]
body = resp.read()
if status_code >= 400 :
body = info['body']
"""
if not HAS_URLPARSE:
module.fail_json(msg='urlparse is not installed')
# ensure we use proper tempdir
old_tempdir = tempfile.tempdir
tempfile.tempdir = module.tmpdir
# Get validate_certs from the module params
validate_certs = module.params.get('validate_certs', True)
username = module.params.get('url_username', '')
password = module.params.get('url_password', '')
http_agent = module.params.get('http_agent', 'ansible-httpget')
force_basic_auth = module.params.get('force_basic_auth', '')
follow_redirects = module.params.get('follow_redirects', 'urllib2')
client_cert = module.params.get('client_cert')
client_key = module.params.get('client_key')
use_gssapi = module.params.get('use_gssapi', use_gssapi)
if not isinstance(cookies, cookiejar.CookieJar):
cookies = cookiejar.LWPCookieJar()
r = None
info = dict(url=url, status=-1)
try:
r = open_url(url, data=data, headers=headers, method=method,
use_proxy=use_proxy, force=force, last_mod_time=last_mod_time, timeout=timeout,
validate_certs=validate_certs, url_username=username,
url_password=password, http_agent=http_agent, force_basic_auth=force_basic_auth,
follow_redirects=follow_redirects, client_cert=client_cert,
client_key=client_key, cookies=cookies, use_gssapi=use_gssapi,
unix_socket=unix_socket, ca_path=ca_path)
# Lowercase keys, to conform to py2 behavior, so that py3 and py2 are predictable
info.update(dict((k.lower(), v) for k, v in r.info().items()))
# Don't be lossy, append header values for duplicate headers
# In Py2 there is nothing that needs done, py2 does this for us
if PY3:
temp_headers = {}
for name, value in r.headers.items():
# The same as above, lower case keys to match py2 behavior, and create more consistent results
name = name.lower()
if name in temp_headers:
temp_headers[name] = ', '.join((temp_headers[name], value))
else:
temp_headers[name] = value
info.update(temp_headers)
# parse the cookies into a nice dictionary
cookie_list = []
cookie_dict = dict()
# Python sorts cookies in order of most specific (ie. longest) path first. See ``CookieJar._cookie_attrs``
# Cookies with the same path are reversed from response order.
# This code makes no assumptions about that, and accepts the order given by python
for cookie in cookies:
cookie_dict[cookie.name] = cookie.value
cookie_list.append((cookie.name, cookie.value))
info['cookies_string'] = '; '.join('%s=%s' % c for c in cookie_list)
info['cookies'] = cookie_dict
# finally update the result with a message about the fetch
info.update(dict(msg="OK (%s bytes)" % r.headers.get('Content-Length', 'unknown'), url=r.geturl(), status=r.code))
except NoSSLError as e:
distribution = get_distribution()
if distribution is not None and distribution.lower() == 'redhat':
module.fail_json(msg='%s. You can also install python-ssl from EPEL' % to_native(e), **info)
else:
module.fail_json(msg='%s' % to_native(e), **info)
except (ConnectionError, ValueError) as e:
module.fail_json(msg=to_native(e), **info)
except MissingModuleError as e:
module.fail_json(msg=to_text(e), exception=e.import_traceback)
except urllib_error.HTTPError as e:
try:
body = e.read()
except AttributeError:
body = ''
# Try to add exception info to the output but don't fail if we can't
try:
# Lowercase keys, to conform to py2 behavior, so that py3 and py2 are predictable
info.update(dict((k.lower(), v) for k, v in e.info().items()))
except Exception:
pass
info.update({'msg': to_native(e), 'body': body, 'status': e.code})
except urllib_error.URLError as e:
code = int(getattr(e, 'code', -1))
info.update(dict(msg="Request failed: %s" % to_native(e), status=code))
except socket.error as e:
info.update(dict(msg="Connection failure: %s" % to_native(e), status=-1))
except httplib.BadStatusLine as e:
info.update(dict(msg="Connection failure: connection was closed before a valid response was received: %s" % to_native(e.line), status=-1))
except Exception as e:
info.update(dict(msg="An unknown error occurred: %s" % to_native(e), status=-1),
exception=traceback.format_exc())
finally:
tempfile.tempdir = old_tempdir
return r, info
def fetch_file(module, url, data=None, headers=None, method=None,
use_proxy=True, force=False, last_mod_time=None, timeout=10):
'''Download and save a file via HTTP(S) or FTP (needs the module as parameter).
This is basically a wrapper around fetch_url().
:arg module: The AnsibleModule (used to get username, password etc. (s.b.).
:arg url: The url to use.
:kwarg data: The data to be sent (in case of POST/PUT).
:kwarg headers: A dict with the request headers.
:kwarg method: "POST", "PUT", etc.
:kwarg boolean use_proxy: Default: True
:kwarg boolean force: If True: Do not get a cached copy (Default: False)
:kwarg last_mod_time: Default: None
:kwarg int timeout: Default: 10
:returns: A string, the path to the downloaded file.
'''
# download file
bufsize = 65536
file_name, file_ext = os.path.splitext(str(url.rsplit('/', 1)[1]))
fetch_temp_file = tempfile.NamedTemporaryFile(dir=module.tmpdir, prefix=file_name, suffix=file_ext, delete=False)
module.add_cleanup_file(fetch_temp_file.name)
try:
rsp, info = fetch_url(module, url, data, headers, method, use_proxy, force, last_mod_time, timeout)
if not rsp:
module.fail_json(msg="Failure downloading %s, %s" % (url, info['msg']))
data = rsp.read(bufsize)
while data:
fetch_temp_file.write(data)
data = rsp.read(bufsize)
fetch_temp_file.close()
except Exception as e:
module.fail_json(msg="Failure downloading %s, %s" % (url, to_native(e)))
return fetch_temp_file.name
|
Fale/ansible
|
lib/ansible/module_utils/urls.py
|
Python
|
gpl-3.0
| 77,490 | 0.00231 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('simsoexp', '0005_schedulingpolicy_class_name'),
]
operations = [
migrations.RemoveField(
model_name='results',
name='metrics',
),
migrations.AddField(
model_name='results',
name='aborted_jobs',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='results',
name='jobs',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='results',
name='migrations',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='results',
name='norm_laxity',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='results',
name='on_schedule',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='results',
name='preemptions',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='results',
name='sys_preempt',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='results',
name='task_migrations',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='results',
name='timers',
field=models.IntegerField(default=0),
preserve_default=False,
),
]
|
Scriptopathe/simso-exp
|
simsoexp/migrations/0006_auto_20150721_1432.py
|
Python
|
bsd-2-clause
| 2,084 | 0 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from ..extern import six
from ..extern.six.moves import zip
import warnings
import weakref
from copy import deepcopy
import numpy as np
from numpy import ma
from ..units import Unit, Quantity
from ..utils.compat import NUMPY_LT_1_8
from ..utils.console import color_print
from ..utils.metadata import MetaData
from ..utils.data_info import BaseColumnInfo, dtype_info_name
from ..extern.six.moves import range
from . import groups
from . import pprint
from .np_utils import fix_column_name
# These "shims" provide __getitem__ implementations for Column and MaskedColumn
from ._column_mixins import _ColumnGetitemShim, _MaskedColumnGetitemShim
# Create a generic TableFormatter object for use by bare columns with no
# parent table.
FORMATTER = pprint.TableFormatter()
INTEGER_TYPES = (int, long, np.integer) if six.PY2 else (int, np.integer)
class StringTruncateWarning(UserWarning):
"""
Warning class for when a string column is assigned a value
that gets truncated because the base (numpy) string length
is too short.
This does not inherit from AstropyWarning because we want to use
stacklevel=2 to show the user where the issue occurred in their code.
"""
pass
def _auto_names(n_cols):
from . import conf
return [str(conf.auto_colname).format(i) for i in range(n_cols)]
# list of one and two-dimensional comparison functions, which sometimes return
# a Column class and sometimes a plain array. Used in __array_wrap__ to ensure
# they only return plain (masked) arrays (see #1446 and #1685)
_comparison_functions = set(
[np.greater, np.greater_equal, np.less, np.less_equal,
np.not_equal, np.equal,
np.isfinite, np.isinf, np.isnan, np.sign, np.signbit])
def col_copy(col, copy_indices=True):
"""
This is a mixin-safe version of Column.copy() (with copy_data=True).
"""
if isinstance(col, BaseColumn):
return col.copy()
# The new column should have None for the parent_table ref. If the
# original parent_table weakref there at the point of copying then it
# generates an infinite recursion. Instead temporarily remove the weakref
# on the original column and restore after the copy in an exception-safe
# manner.
parent_table = col.info.parent_table
indices = col.info.indices
col.info.parent_table = None
col.info.indices = []
try:
newcol = col.copy() if hasattr(col, 'copy') else deepcopy(col)
newcol.info = col.info
newcol.info.indices = deepcopy(indices or []) if copy_indices else []
for index in newcol.info.indices:
index.replace_col(col, newcol)
finally:
col.info.parent_table = parent_table
col.info.indices = indices
return newcol
class FalseArray(np.ndarray):
def __new__(cls, shape):
obj = np.zeros(shape, dtype=np.bool).view(cls)
return obj
def __setitem__(self, item, val):
val = np.asarray(val)
if np.any(val):
raise ValueError('Cannot set any element of {0} class to True'
.format(self.__class__.__name__))
if six.PY2: # avoid falling back to ndarray.__setslice__
def __setslice__(self, start, stop, val):
self.__setitem__(slice(start, stop), val)
class ColumnInfo(BaseColumnInfo):
attrs_from_parent = BaseColumnInfo.attr_names
_supports_indexing = True
class BaseColumn(_ColumnGetitemShim, np.ndarray):
meta = MetaData()
def __new__(cls, data=None, name=None,
dtype=None, shape=(), length=0,
description=None, unit=None, format=None, meta=None,
copy=False, copy_indices=True):
if data is None:
dtype = (np.dtype(dtype).str, shape)
self_data = np.zeros(length, dtype=dtype)
elif isinstance(data, BaseColumn) and hasattr(data, '_name'):
# When unpickling a MaskedColumn, ``data`` will be a bare
# BaseColumn with none of the expected attributes. In this case
# do NOT execute this block which initializes from ``data``
# attributes.
self_data = np.array(data.data, dtype=dtype, copy=copy)
if description is None:
description = data.description
if unit is None:
unit = unit or data.unit
if format is None:
format = data.format
if meta is None:
meta = deepcopy(data.meta)
if name is None:
name = data.name
elif isinstance(data, Quantity):
if unit is None:
self_data = np.array(data, dtype=dtype, copy=copy)
unit = data.unit
else:
self_data = np.array(data.to(unit), dtype=dtype, copy=copy)
if description is None:
description = data.info.description
if format is None:
format = data.info.format
if meta is None:
meta = deepcopy(data.info.meta)
else:
self_data = np.array(data, dtype=dtype, copy=copy)
self = self_data.view(cls)
self._name = fix_column_name(name)
self.unit = unit
self.format = format
self.description = description
self.meta = meta
self._parent_table = None
self.indices = deepcopy(getattr(data, 'indices', [])) if \
copy_indices else []
for index in self.indices:
index.replace_col(data, self)
return self
@property
def data(self):
return self.view(np.ndarray)
@property
def parent_table(self):
if self._parent_table is None:
return None
else:
return self._parent_table()
@parent_table.setter
def parent_table(self, table):
if table is None:
self._parent_table = None
else:
self._parent_table = weakref.ref(table)
info = ColumnInfo()
def copy(self, order='C', data=None, copy_data=True):
"""
Return a copy of the current instance.
If ``data`` is supplied then a view (reference) of ``data`` is used,
and ``copy_data`` is ignored.
Parameters
----------
order : {'C', 'F', 'A', 'K'}, optional
Controls the memory layout of the copy. 'C' means C-order,
'F' means F-order, 'A' means 'F' if ``a`` is Fortran contiguous,
'C' otherwise. 'K' means match the layout of ``a`` as closely
as possible. (Note that this function and :func:numpy.copy are very
similar, but have different default values for their order=
arguments.) Default is 'C'.
data : array, optional
If supplied then use a view of ``data`` instead of the instance
data. This allows copying the instance attributes and meta.
copy_data : bool, optional
Make a copy of the internal numpy array instead of using a
reference. Default is True.
Returns
-------
col : Column or MaskedColumn
Copy of the current column (same type as original)
"""
if data is None:
data = self.data
if copy_data:
data = data.copy(order)
out = data.view(self.__class__)
out.__array_finalize__(self)
# for MaskedColumn, MaskedArray.__array_finalize__ also copies mask
# from self, which is not the idea here, so undo
if isinstance(self, MaskedColumn):
out._mask = data._mask
self._copy_groups(out)
return out
def __setstate__(self, state):
"""
Restore the internal state of the Column/MaskedColumn for pickling
purposes. This requires that the last element of ``state`` is a
5-tuple that has Column-specific state values.
"""
# Get the Column attributes
names = ('_name', 'unit', 'format', 'description', 'meta', 'indices')
attrs = {name: val for name, val in zip(names, state[-1])}
state = state[:-1]
# Using super(type(self), self).__setstate__() gives an infinite
# recursion. Manually call the right super class to actually set up
# the array object.
super_class = ma.MaskedArray if isinstance(self, ma.MaskedArray) else np.ndarray
super_class.__setstate__(self, state)
# Set the Column attributes
for name, val in attrs.items():
setattr(self, name, val)
self._parent_table = None
def __reduce__(self):
"""
Return a 3-tuple for pickling a Column. Use the super-class
functionality but then add in a 5-tuple of Column-specific values
that get used in __setstate__.
"""
super_class = ma.MaskedArray if isinstance(self, ma.MaskedArray) else np.ndarray
reconstruct_func, reconstruct_func_args, state = super_class.__reduce__(self)
# Define Column-specific attrs and meta that gets added to state.
column_state = (self.name, self.unit, self.format, self.description,
self.meta, self.indices)
state = state + (column_state,)
return reconstruct_func, reconstruct_func_args, state
# avoid == and != to be done based on type of subclass
# (helped solve #1446; see also __array_wrap__)
def __eq__(self, other):
return self.data.__eq__(other)
def __ne__(self, other):
return self.data.__ne__(other)
def __array_finalize__(self, obj):
# Obj will be none for direct call to Column() creator
if obj is None:
return
if six.callable(super(BaseColumn, self).__array_finalize__):
super(BaseColumn, self).__array_finalize__(obj)
# Self was created from template (e.g. obj[slice] or (obj * 2))
# or viewcast e.g. obj.view(Column). In either case we want to
# init Column attributes for self from obj if possible.
self.parent_table = None
if not hasattr(self, 'indices'): # may have been copied in __new__
self.indices = []
self._copy_attrs(obj)
def __array_wrap__(self, out_arr, context=None):
"""
__array_wrap__ is called at the end of every ufunc.
Normally, we want a Column object back and do not have to do anything
special. But there are two exceptions:
1) If the output shape is different (e.g. for reduction ufuncs
like sum() or mean()), a Column still linking to a parent_table
makes little sense, so we return the output viewed as the
column content (ndarray or MaskedArray).
For this case, we use "[()]" to select everything, and to ensure we
convert a zero rank array to a scalar. (For some reason np.sum()
returns a zero rank scalar array while np.mean() returns a scalar;
So the [()] is needed for this case.
2) When the output is created by any function that returns a boolean
we also want to consistently return an array rather than a column
(see #1446 and #1685)
"""
out_arr = super(BaseColumn, self).__array_wrap__(out_arr, context)
if (self.shape != out_arr.shape or
(isinstance(out_arr, BaseColumn) and
(context is not None and context[0] in _comparison_functions))):
return out_arr.data[()]
else:
return out_arr
@property
def name(self):
"""
The name of this column.
"""
return self._name
@name.setter
def name(self, val):
val = fix_column_name(val)
if self.parent_table is not None:
table = self.parent_table
table.columns._rename_column(self.name, val)
self._name = val
@property
def descr(self):
"""Array-interface compliant full description of the column.
This returns a 3-tuple (name, type, shape) that can always be
used in a structured array dtype definition.
"""
return (self.name, self.dtype.str, self.shape[1:])
def iter_str_vals(self):
"""
Return an iterator that yields the string-formatted values of this
column.
Returns
-------
str_vals : iterator
Column values formatted as strings
"""
# Iterate over formatted values with no max number of lines, no column
# name, no unit, and ignoring the returned header info in outs.
_pformat_col_iter = self._formatter._pformat_col_iter
for str_val in _pformat_col_iter(self, -1, show_name=False, show_unit=False,
show_dtype=False, outs={}):
yield str_val
def attrs_equal(self, col):
"""Compare the column attributes of ``col`` to this object.
The comparison attributes are: ``name``, ``unit``, ``dtype``,
``format``, ``description``, and ``meta``.
Parameters
----------
col : Column
Comparison column
Returns
-------
equal : boolean
True if all attributes are equal
"""
if not isinstance(col, BaseColumn):
raise ValueError('Comparison `col` must be a Column or '
'MaskedColumn object')
attrs = ('name', 'unit', 'dtype', 'format', 'description', 'meta')
equal = all(getattr(self, x) == getattr(col, x) for x in attrs)
return equal
@property
def _formatter(self):
return FORMATTER if (self.parent_table is None) else self.parent_table.formatter
def pformat(self, max_lines=None, show_name=True, show_unit=False, show_dtype=False,
html=False):
"""Return a list of formatted string representation of column values.
If no value of ``max_lines`` is supplied then the height of the
screen terminal is used to set ``max_lines``. If the terminal
height cannot be determined then the default will be
determined using the ``astropy.conf.max_lines`` configuration
item. If a negative value of ``max_lines`` is supplied then
there is no line limit applied.
Parameters
----------
max_lines : int
Maximum lines of output (header + data rows)
show_name : bool
Include column name (default=True)
show_unit : bool
Include a header row for unit (default=False)
show_dtype : bool
Include column dtype (default=False)
html : bool
Format the output as an HTML table (default=False)
Returns
-------
lines : list
List of lines with header and formatted column values
"""
_pformat_col = self._formatter._pformat_col
lines, outs = _pformat_col(self, max_lines, show_name=show_name,
show_unit=show_unit, show_dtype=show_dtype,
html=html)
return lines
def pprint(self, max_lines=None, show_name=True, show_unit=False, show_dtype=False):
"""Print a formatted string representation of column values.
If no value of ``max_lines`` is supplied then the height of the
screen terminal is used to set ``max_lines``. If the terminal
height cannot be determined then the default will be
determined using the ``astropy.conf.max_lines`` configuration
item. If a negative value of ``max_lines`` is supplied then
there is no line limit applied.
Parameters
----------
max_lines : int
Maximum number of values in output
show_name : bool
Include column name (default=True)
show_unit : bool
Include a header row for unit (default=False)
show_dtype : bool
Include column dtype (default=True)
"""
_pformat_col = self._formatter._pformat_col
lines, outs = _pformat_col(self, max_lines, show_name=show_name, show_unit=show_unit,
show_dtype=show_dtype)
n_header = outs['n_header']
for i, line in enumerate(lines):
if i < n_header:
color_print(line, 'red')
else:
print(line)
def more(self, max_lines=None, show_name=True, show_unit=False):
"""Interactively browse column with a paging interface.
Supported keys::
f, <space> : forward one page
b : back one page
r : refresh same page
n : next row
p : previous row
< : go to beginning
> : go to end
q : quit browsing
h : print this help
Parameters
----------
max_lines : int
Maximum number of lines in table output
show_name : bool
Include a header row for column names (default=True)
show_unit : bool
Include a header row for unit (default=False)
"""
_more_tabcol = self._formatter._more_tabcol
_more_tabcol(self, max_lines=max_lines, show_name=show_name,
show_unit=show_unit)
@property
def unit(self):
"""
The unit associated with this column. May be a string or a
`astropy.units.UnitBase` instance.
Setting the ``unit`` property does not change the values of the
data. To perform a unit conversion, use ``convert_unit_to``.
"""
return self._unit
@unit.setter
def unit(self, unit):
if unit is None:
self._unit = None
else:
self._unit = Unit(unit, parse_strict='silent')
@unit.deleter
def unit(self):
self._unit = None
def convert_unit_to(self, new_unit, equivalencies=[]):
"""
Converts the values of the column in-place from the current
unit to the given unit.
To change the unit associated with this column without
actually changing the data values, simply set the ``unit``
property.
Parameters
----------
new_unit : str or `astropy.units.UnitBase` instance
The unit to convert to.
equivalencies : list of equivalence pairs, optional
A list of equivalence pairs to try if the unit are not
directly convertible. See :ref:`unit_equivalencies`.
Raises
------
astropy.units.UnitsError
If units are inconsistent
"""
if self.unit is None:
raise ValueError("No unit set on column")
self.data[:] = self.unit.to(
new_unit, self.data, equivalencies=equivalencies)
self.unit = new_unit
@property
def groups(self):
if not hasattr(self, '_groups'):
self._groups = groups.ColumnGroups(self)
return self._groups
def group_by(self, keys):
"""
Group this column by the specified ``keys``
This effectively splits the column into groups which correspond to
unique values of the ``keys`` grouping object. The output is a new
`Column` or `MaskedColumn` which contains a copy of this column but
sorted by row according to ``keys``.
The ``keys`` input to ``group_by`` must be a numpy array with the
same length as this column.
Parameters
----------
keys : numpy array
Key grouping object
Returns
-------
out : Column
New column with groups attribute set accordingly
"""
return groups.column_group_by(self, keys)
def _copy_groups(self, out):
"""
Copy current groups into a copy of self ``out``
"""
if self.parent_table:
if hasattr(self.parent_table, '_groups'):
out._groups = groups.ColumnGroups(out, indices=self.parent_table._groups._indices)
elif hasattr(self, '_groups'):
out._groups = groups.ColumnGroups(out, indices=self._groups._indices)
# Strip off the BaseColumn-ness for repr and str so that
# MaskedColumn.data __repr__ does not include masked_BaseColumn(data =
# [1 2], ...).
def __repr__(self):
return np.asarray(self).__repr__()
@property
def quantity(self):
"""
A view of this table column as a `~astropy.units.Quantity` object with
units given by the Column's `unit` parameter.
"""
# the Quantity initializer is used here because it correctly fails
# if the column's values are non-numeric (like strings), while .view
# will happily return a quantity with gibberish for numerical values
return Quantity(self, copy=False, dtype=self.dtype, order='A')
def to(self, unit, equivalencies=[], **kwargs):
"""
Converts this table column to a `~astropy.units.Quantity` object with
the requested units.
Parameters
----------
unit : `~astropy.units.Unit` or str
The unit to convert to (i.e., a valid argument to the
:meth:`astropy.units.Quantity.to` method).
equivalencies : list of equivalence pairs, optional
Equivalencies to use for this conversion. See
:meth:`astropy.units.Quantity.to` for more details.
Returns
-------
quantity : `~astropy.units.Quantity`
A quantity object with the contents of this column in the units
``unit``.
"""
return self.quantity.to(unit, equivalencies)
def _copy_attrs(self, obj):
"""
Copy key column attributes from ``obj`` to self
"""
for attr in ('name', 'unit', 'format', 'description'):
val = getattr(obj, attr, None)
setattr(self, attr, val)
self.meta = deepcopy(getattr(obj, 'meta', {}))
class Column(BaseColumn):
"""Define a data column for use in a Table object.
Parameters
----------
data : list, ndarray or None
Column data values
name : str
Column name and key for reference within Table
dtype : numpy.dtype compatible value
Data type for column
shape : tuple or ()
Dimensions of a single row element in the column data
length : int or 0
Number of row elements in column data
description : str or None
Full description of column
unit : str or None
Physical unit
format : str or None or function or callable
Format string for outputting column values. This can be an
"old-style" (``format % value``) or "new-style" (`str.format`)
format specification string or a function or any callable object that
accepts a single value and returns a string.
meta : dict-like or None
Meta-data associated with the column
Examples
--------
A Column can be created in two different ways:
- Provide a ``data`` value but not ``shape`` or ``length`` (which are
inferred from the data).
Examples::
col = Column(data=[1, 2], name='name') # shape=(2,)
col = Column(data=[[1, 2], [3, 4]], name='name') # shape=(2, 2)
col = Column(data=[1, 2], name='name', dtype=float)
col = Column(data=np.array([1, 2]), name='name')
col = Column(data=['hello', 'world'], name='name')
The ``dtype`` argument can be any value which is an acceptable
fixed-size data-type initializer for the numpy.dtype() method. See
`<http://docs.scipy.org/doc/numpy/reference/arrays.dtypes.html>`_.
Examples include:
- Python non-string type (float, int, bool)
- Numpy non-string type (e.g. np.float32, np.int64, np.bool)
- Numpy.dtype array-protocol type strings (e.g. 'i4', 'f8', 'S15')
If no ``dtype`` value is provide then the type is inferred using
``np.array(data)``.
- Provide ``length`` and optionally ``shape``, but not ``data``
Examples::
col = Column(name='name', length=5)
col = Column(name='name', dtype=int, length=10, shape=(3,4))
The default ``dtype`` is ``np.float64``. The ``shape`` argument is the
array shape of a single cell in the column.
"""
def __new__(cls, data=None, name=None,
dtype=None, shape=(), length=0,
description=None, unit=None, format=None, meta=None,
copy=False, copy_indices=True):
if isinstance(data, MaskedColumn) and np.any(data.mask):
raise TypeError("Cannot convert a MaskedColumn with masked value to a Column")
self = super(Column, cls).__new__(cls, data=data, name=name, dtype=dtype,
shape=shape, length=length, description=description,
unit=unit, format=format, meta=meta,
copy=copy, copy_indices=copy_indices)
return self
def __setattr__(self, item, value):
if not isinstance(self, MaskedColumn) and item == "mask":
raise AttributeError("cannot set mask value to a column in non-masked Table")
super(Column, self).__setattr__(item, value)
if item == 'unit' and issubclass(self.dtype.type, np.number):
try:
converted = self.parent_table._convert_col_for_table(self)
except AttributeError: # Either no parent table or parent table is None
pass
else:
if converted is not self:
self.parent_table.replace_column(self.name, converted)
def _base_repr_(self, html=False):
# If scalar then just convert to correct numpy type and use numpy repr
if self.ndim == 0:
return repr(self.item())
descr_vals = [self.__class__.__name__]
unit = None if self.unit is None else str(self.unit)
shape = None if self.ndim <= 1 else self.shape[1:]
for attr, val in (('name', self.name),
('dtype', dtype_info_name(self.dtype)),
('shape', shape),
('unit', unit),
('format', self.format),
('description', self.description),
('length', len(self))):
if val is not None:
descr_vals.append('{0}={1}'.format(attr, repr(val)))
descr = '<' + ' '.join(descr_vals) + '>\n'
if html:
from ..utils.xml.writer import xml_escape
descr = xml_escape(descr)
data_lines, outs = self._formatter._pformat_col(
self, show_name=False, show_unit=False, show_length=False, html=html)
out = descr + '\n'.join(data_lines)
if six.PY2 and isinstance(out, six.text_type):
out = out.encode('utf-8')
return out
def _repr_html_(self):
return self._base_repr_(html=True)
def __repr__(self):
return self._base_repr_(html=False)
def __unicode__(self):
# If scalar then just convert to correct numpy type and use numpy repr
if self.ndim == 0:
return str(self.item())
lines, outs = self._formatter._pformat_col(self)
return '\n'.join(lines)
if not six.PY2:
__str__ = __unicode__
def __bytes__(self):
return six.text_type(self).encode('utf-8')
if six.PY2:
__str__ = __bytes__
def _check_string_truncate(self, value):
value = np.asanyarray(value, dtype=self.dtype.type)
if value.dtype.itemsize > self.dtype.itemsize:
warnings.warn('truncated right side string(s) longer than {} '
'character(s) during assignment'
.format(self.dtype.str[2:]),
StringTruncateWarning,
stacklevel=3)
def __setitem__(self, index, value):
# Issue warning for string assignment that truncates ``value``
if issubclass(self.dtype.type, np.character):
self._check_string_truncate(value)
# update indices
self.info.adjust_indices(index, value, len(self))
# Set items using a view of the underlying data, as it gives an
# order-of-magnitude speed-up. [#2994]
self.data[index] = value
if six.PY2:
# avoid falling through to ndarray.__setslice__, instead using
# self.__setitem__, which is much faster (see above). [#3020]
def __setslice__(self, start, stop, value):
self.__setitem__(slice(start, stop), value)
def insert(self, obj, values):
"""
Insert values before the given indices in the column and return
a new `~astropy.table.Column` object.
Parameters
----------
obj : int, slice or sequence of ints
Object that defines the index or indices before which ``values`` is
inserted.
values : array_like
Value(s) to insert. If the type of ``values`` is different
from that of quantity, ``values`` is converted to the matching type.
``values`` should be shaped so that it can be broadcast appropriately
Returns
-------
out : `~astropy.table.Column`
A copy of column with ``values`` and ``mask`` inserted. Note that the
insertion does not occur in-place: a new column is returned.
"""
if self.dtype.kind == 'O':
# Even if values is array-like (e.g. [1,2,3]), insert as a single
# object. Numpy.insert instead inserts each element in an array-like
# input individually.
data = np.insert(self, obj, None, axis=0)
data[obj] = values
else:
# Explicitly convert to dtype of this column. Needed because numpy 1.7
# enforces safe casting by default, so . This isn't the case for 1.6 or 1.8+.
values = np.asarray(values, dtype=self.dtype)
data = np.insert(self, obj, values, axis=0)
out = data.view(self.__class__)
out.__array_finalize__(self)
return out
# We do this to make the methods show up in the API docs
name = BaseColumn.name
unit = BaseColumn.unit
copy = BaseColumn.copy
more = BaseColumn.more
pprint = BaseColumn.pprint
pformat = BaseColumn.pformat
convert_unit_to = BaseColumn.convert_unit_to
quantity = BaseColumn.quantity
to = BaseColumn.to
class MaskedColumn(Column, _MaskedColumnGetitemShim, ma.MaskedArray):
"""Define a masked data column for use in a Table object.
Parameters
----------
data : list, ndarray or None
Column data values
name : str
Column name and key for reference within Table
mask : list, ndarray or None
Boolean mask for which True indicates missing or invalid data
fill_value : float, int, str or None
Value used when filling masked column elements
dtype : numpy.dtype compatible value
Data type for column
shape : tuple or ()
Dimensions of a single row element in the column data
length : int or 0
Number of row elements in column data
description : str or None
Full description of column
unit : str or None
Physical unit
format : str or None or function or callable
Format string for outputting column values. This can be an
"old-style" (``format % value``) or "new-style" (`str.format`)
format specification string or a function or any callable object that
accepts a single value and returns a string.
meta : dict-like or None
Meta-data associated with the column
Examples
--------
A MaskedColumn is similar to a Column except that it includes ``mask`` and
``fill_value`` attributes. It can be created in two different ways:
- Provide a ``data`` value but not ``shape`` or ``length`` (which are
inferred from the data).
Examples::
col = MaskedColumn(data=[1, 2], name='name')
col = MaskedColumn(data=[1, 2], name='name', mask=[True, False])
col = MaskedColumn(data=[1, 2], name='name', dtype=float, fill_value=99)
The ``mask`` argument will be cast as a boolean array and specifies
which elements are considered to be missing or invalid.
The ``dtype`` argument can be any value which is an acceptable
fixed-size data-type initializer for the numpy.dtype() method. See
`<http://docs.scipy.org/doc/numpy/reference/arrays.dtypes.html>`_.
Examples include:
- Python non-string type (float, int, bool)
- Numpy non-string type (e.g. np.float32, np.int64, np.bool)
- Numpy.dtype array-protocol type strings (e.g. 'i4', 'f8', 'S15')
If no ``dtype`` value is provide then the type is inferred using
``np.array(data)``. When ``data`` is provided then the ``shape``
and ``length`` arguments are ignored.
- Provide ``length`` and optionally ``shape``, but not ``data``
Examples::
col = MaskedColumn(name='name', length=5)
col = MaskedColumn(name='name', dtype=int, length=10, shape=(3,4))
The default ``dtype`` is ``np.float64``. The ``shape`` argument is the
array shape of a single cell in the column.
"""
def __new__(cls, data=None, name=None, mask=None, fill_value=None,
dtype=None, shape=(), length=0,
description=None, unit=None, format=None, meta=None,
copy=False, copy_indices=True):
if mask is None and hasattr(data, 'mask'):
mask = data.mask
else:
mask = deepcopy(mask)
# Create self using MaskedArray as a wrapper class, following the example of
# class MSubArray in
# https://github.com/numpy/numpy/blob/maintenance/1.8.x/numpy/ma/tests/test_subclassing.py
# This pattern makes it so that __array_finalize__ is called as expected (e.g. #1471 and
# https://github.com/astropy/astropy/commit/ff6039e8)
# First just pass through all args and kwargs to BaseColumn, then wrap that object
# with MaskedArray.
self_data = BaseColumn(data, dtype=dtype, shape=shape, length=length, name=name,
unit=unit, format=format, description=description,
meta=meta, copy=copy, copy_indices=copy_indices)
self = ma.MaskedArray.__new__(cls, data=self_data, mask=mask)
# Note: do not set fill_value in the MaskedArray constructor because this does not
# go through the fill_value workarounds (see _fix_fill_value below).
if fill_value is None and hasattr(data, 'fill_value') and data.fill_value is not None:
# Coerce the fill_value to the correct type since `data` may be a
# different dtype than self.
fill_value = self.dtype.type(data.fill_value)
self.fill_value = fill_value
self.parent_table = None
# needs to be done here since self doesn't come from BaseColumn.__new__
for index in self.indices:
index.replace_col(self_data, self)
return self
def _fix_fill_value(self, val):
"""Fix a fill value (if needed) to work around a bug with setting the fill
value of a string array in MaskedArray with Python 3.x. See
https://github.com/numpy/numpy/pull/2733. This mimics the check in
numpy.ma.core._check_fill_value() (version < 1.8) which incorrectly sets
fill_value to a default if self.dtype.char is 'U' (which is the case for Python
3). Here we change the string to a byte string so that in Python 3 the
isinstance(val, basestring) part fails.
"""
if (NUMPY_LT_1_8 and isinstance(val, six.string_types) and
(self.dtype.char not in 'SV')):
val = val.encode()
return val
@property
def fill_value(self):
return self.get_fill_value() # defer to native ma.MaskedArray method
@fill_value.setter
def fill_value(self, val):
"""Set fill value both in the masked column view and in the parent table
if it exists. Setting one or the other alone doesn't work."""
val = self._fix_fill_value(val)
# Yet another ma bug workaround: If the value of fill_value for a string array is
# requested but not yet set then it gets created as 'N/A'. From this point onward
# any new fill_values are truncated to 3 characters. Note that this does not
# occur if the masked array is a structured array (as in the previous block that
# deals with the parent table).
#
# >>> x = ma.array(['xxxx'])
# >>> x.fill_value # fill_value now gets represented as an 'S3' array
# 'N/A'
# >>> x.fill_value='yyyy'
# >>> x.fill_value
# 'yyy'
#
# To handle this we are forced to reset a private variable first:
self._fill_value = None
self.set_fill_value(val) # defer to native ma.MaskedArray method
@property
def data(self):
out = self.view(ma.MaskedArray)
# The following is necessary because of a bug in Numpy, which was
# fixed in numpy/numpy#2703. The fix should be included in Numpy 1.8.0.
out.fill_value = self.fill_value
return out
def filled(self, fill_value=None):
"""Return a copy of self, with masked values filled with a given value.
Parameters
----------
fill_value : scalar; optional
The value to use for invalid entries (`None` by default). If
`None`, the ``fill_value`` attribute of the array is used
instead.
Returns
-------
filled_column : Column
A copy of ``self`` with masked entries replaced by `fill_value`
(be it the function argument or the attribute of ``self``).
"""
if fill_value is None:
fill_value = self.fill_value
fill_value = self._fix_fill_value(fill_value)
data = super(MaskedColumn, self).filled(fill_value)
# Use parent table definition of Column if available
column_cls = self.parent_table.Column if (self.parent_table is not None) else Column
out = column_cls(name=self.name, data=data, unit=self.unit,
format=self.format, description=self.description,
meta=deepcopy(self.meta))
return out
def insert(self, obj, values, mask=None):
"""
Insert values along the given axis before the given indices and return
a new `~astropy.table.MaskedColumn` object.
Parameters
----------
obj : int, slice or sequence of ints
Object that defines the index or indices before which ``values`` is
inserted.
values : array_like
Value(s) to insert. If the type of ``values`` is different
from that of quantity, ``values`` is converted to the matching type.
``values`` should be shaped so that it can be broadcast appropriately
mask : boolean array_like
Mask value(s) to insert. If not supplied then False is used.
Returns
-------
out : `~astropy.table.MaskedColumn`
A copy of column with ``values`` and ``mask`` inserted. Note that the
insertion does not occur in-place: a new masked column is returned.
"""
self_ma = self.data # self viewed as MaskedArray
if self.dtype.kind == 'O':
# Even if values is array-like (e.g. [1,2,3]), insert as a single
# object. Numpy.insert instead inserts each element in an array-like
# input individually.
new_data = np.insert(self_ma.data, obj, None, axis=0)
new_data[obj] = values
else:
# Explicitly convert to dtype of this column. Needed because numpy 1.7
# enforces safe casting by default, so . This isn't the case for 1.6 or 1.8+.
values = np.asarray(values, dtype=self.dtype)
new_data = np.insert(self_ma.data, obj, values, axis=0)
if mask is None:
if self.dtype.kind == 'O':
mask = False
else:
mask = np.zeros(values.shape, dtype=np.bool)
new_mask = np.insert(self_ma.mask, obj, mask, axis=0)
new_ma = np.ma.array(new_data, mask=new_mask, copy=False)
out = new_ma.view(self.__class__)
out.parent_table = None
out.indices = []
out._copy_attrs(self)
return out
def _copy_attrs_slice(self, out):
# Fixes issue #3023: when calling getitem with a MaskedArray subclass
# the original object attributes are not copied.
if out.__class__ is self.__class__:
out.parent_table = None
# we need this because __getitem__ does a shallow copy of indices
if out.indices is self.indices:
out.indices = []
out._copy_attrs(self)
return out
def __setitem__(self, index, value):
# update indices
self.info.adjust_indices(index, value, len(self))
ma.MaskedArray.__setitem__(self, index, value)
# We do this to make the methods show up in the API docs
name = BaseColumn.name
copy = BaseColumn.copy
more = BaseColumn.more
pprint = BaseColumn.pprint
pformat = BaseColumn.pformat
convert_unit_to = BaseColumn.convert_unit_to
|
tbabej/astropy
|
astropy/table/column.py
|
Python
|
bsd-3-clause
| 41,990 | 0.001143 |
#!/usr/bin/env python
from .util import Spec
class Port(Spec):
STATES = [
"listening", "closed", "open",
"bound_to",
"tcp", "tcp6", "udp"
]
def __init__(self, portnumber):
self.portnumber = portnumber
self.get_state()
self.state = {
'state': 'closed',
'bound': False,
'uid': None,
'inode': None,
'proto': None,
}
self.get_state()
#
self.WIN = "Port %s is %%s" % self.portnumber
def get_state(self):
import os
for line in os.popen("netstat -tnle").readlines():
line = line.strip().split()
if len(line) != 8:
continue
(proto, _, _, local, foreign, state, uid, inode) = line
if proto == 'tcp':
(bound, port) = local.split(':')
if proto == 'tcp6':
port = local.split(':::')[-1]
port = int(port)
if port == self.portnumber:
self.state = {
'state': 'listening',
'bound': bound,
'uid': uid,
'inode': inode,
'proto': proto,
}
def _make_sure(self, x, y):
if x == y:
return True
else:
return False
def sb_listening(self, *args):
if self._make_sure(self.state['state'], "listening"):
return True, "Port %s is listening" % self.portnumber
return False, "Port %s is current %s not listening" % (
self.portnumber,
self.state['state']
)
def sb_closed(self, *args):
if self._make_sure(self.state['state'], "closed"):
return True, "Port %s is closed" % self.portnumber
return False, "Port %s is current %s not closed" % (
self.portnumber, self.state['state']
)
def sb_tcp(self, *args):
if self._make_sure(self.state['proto'], "tcp"):
return True
return "Port %s is using protocol %s not TCP" % (
self.portnumber, self.state['proto']
)
def sb_udp(self, *args):
if self._make_sure(self.state['proto'], "udp"):
return True
return "Port %s is using protocol %s not udp" % (
self.portnumber, self.state['proto']
)
def sb_tcp6(self, *args):
if self._make_sure(self.state['proto'], "tcp6"):
return True
return "Port %s is using protocol %s not TCP6" % (
self.portnumber, self.state['proto']
)
def sb_bound_to(self, bound_ip):
if self._make_sure(self.state['bound'], bound_ip):
return True, "Port %s is bound to %s" % (self.portnumber, bound_ip)
return False, "The port currently bound to %s not %s" % (
self.state['bound'], bound_ip
)
|
daniellawrence/pyspeccheck
|
speccheck/port.py
|
Python
|
mit
| 2,928 | 0 |
import logging
import warnings
from collections import namedtuple
logger = logging.getLogger(__name__)
Field = namedtuple('Field', ('name', 'type_', 'default', 'desc', 'warn'))
class Config:
"""配置模块
用户可以在 rc 文件中配置各个选项的值
"""
def __init__(self):
object.__setattr__(self, '_fields', {})
def __getattr__(self, name):
# tips: 这里不能用 getattr 来获取值, 否则会死循环
if name == '_fields':
return object.__getattribute__(self, '_fields')
if name in self._fields:
try:
object.__getattribute__(self, name)
except AttributeError:
return self._fields[name].default
return object.__getattribute__(self, name)
def __setattr__(self, name, value):
if name in self._fields:
field = self._fields[name]
if field.warn is not None:
warnings.warn('Config field({}): {}'.format(name, field.warn),
stacklevel=2)
# TODO: 校验值类型
object.__setattr__(self, name, value)
else:
logger.warning('Assign to an undeclared config key.')
def deffield(self, name, type_=None, default=None, desc='', warn=None):
"""Define a configuration field
:param str name: the field name. It SHOULD be capitalized except the field
refers to a sub-config.
:param type_: feild type.
:param default: default value for the field.
:param desc: description for the field.
:param warn: if field is deprecated, set a warn message.
"""
if name not in self._fields:
self._fields[name] = Field(name=name,
type_=type_,
default=default,
desc=desc,
warn=warn)
else:
raise ValueError('Field({}) is already defined.'.format(name))
|
cosven/FeelUOwn
|
feeluown/config.py
|
Python
|
gpl-3.0
| 2,065 | 0.000504 |
from sklearn2sql_heroku.tests.regression import generic as reg_gen
reg_gen.test_model("XGBRegressor" , "RandomReg_500" , "db2")
|
antoinecarme/sklearn2sql_heroku
|
tests/regression/RandomReg_500/ws_RandomReg_500_XGBRegressor_db2_code_gen.py
|
Python
|
bsd-3-clause
| 130 | 0.015385 |
# https://leetcode.com/problems/valid-parentheses/
class Solution(object):
def isValid(self, s):
"""
:type s: str
:rtype: bool
"""
if not s:
return True
stack = []
for i in xrange(len(s)):
# if its opening it, its getting deeper so add to stack
if s[i] in "([{":
stack.append(s[i])
# if not it must be a closing parenth
# in which case check if stack is empty if not pop and check
# whether popped elem is closed with the current item
else:
if len(stack) == 0:
return False
last = stack.pop()
if s[i] == ")" and last != "(": return False
if s[i] == "]" and last != "[": return False
if s[i] == "}" and last != "{": return False
return len(stack) == 0
|
young-geng/leet_code
|
problems/20_valid-parentheses/main.py
|
Python
|
mit
| 920 | 0.003261 |
import pytest
from cleo.exceptions import LogicException
from cleo.exceptions import ValueException
from cleo.io.inputs.option import Option
def test_create():
opt = Option("option")
assert "option" == opt.name
assert opt.shortcut is None
assert opt.is_flag()
assert not opt.accepts_value()
assert not opt.requires_value()
assert not opt.is_list()
assert not opt.default
def test_dashed_name():
opt = Option("--option")
assert "option" == opt.name
def test_fail_if_name_is_empty():
with pytest.raises(ValueException):
Option("")
def test_fail_if_default_value_provided_for_flag():
with pytest.raises(LogicException):
Option("option", flag=True, default="default")
def test_fail_if_wrong_default_value_for_list_option():
with pytest.raises(LogicException):
Option("option", flag=False, is_list=True, default="default")
def test_shortcut():
opt = Option("option", "o")
assert "o" == opt.shortcut
def test_dashed_shortcut():
opt = Option("option", "-o")
assert "o" == opt.shortcut
def test_multiple_shortcuts():
opt = Option("option", "-o|oo|-ooo")
assert "o|oo|ooo" == opt.shortcut
def test_fail_if_shortcut_is_empty():
with pytest.raises(ValueException):
Option("option", "")
def test_optional_value():
opt = Option("option", flag=False, requires_value=False)
assert not opt.is_flag()
assert opt.accepts_value()
assert not opt.requires_value()
assert not opt.is_list()
assert opt.default is None
def test_optional_value_with_default():
opt = Option("option", flag=False, requires_value=False, default="Default")
assert not opt.is_flag()
assert opt.accepts_value()
assert not opt.requires_value()
assert not opt.is_list()
assert opt.default == "Default"
def test_required_value():
opt = Option("option", flag=False, requires_value=True)
assert not opt.is_flag()
assert opt.accepts_value()
assert opt.requires_value()
assert not opt.is_list()
assert opt.default is None
def test_required_value_with_default():
opt = Option("option", flag=False, requires_value=True, default="Default")
assert not opt.is_flag()
assert opt.accepts_value()
assert opt.requires_value()
assert not opt.is_list()
assert "Default" == opt.default
def test_list():
opt = Option("option", flag=False, is_list=True)
assert not opt.is_flag()
assert opt.accepts_value()
assert opt.requires_value()
assert opt.is_list()
assert [] == opt.default
def test_multi_valued_with_default():
opt = Option("option", flag=False, is_list=True, default=["foo", "bar"])
assert not opt.is_flag()
assert opt.accepts_value()
assert opt.requires_value()
assert opt.is_list()
assert ["foo", "bar"] == opt.default
|
sdispater/cleo
|
tests/io/inputs/test_option.py
|
Python
|
mit
| 2,858 | 0 |
from ctypes.util import find_library
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db.backends.sqlite3.base import (
DatabaseWrapper as SQLiteDatabaseWrapper, SQLiteCursorWrapper,
)
from .client import SpatiaLiteClient
from .features import DatabaseFeatures
from .introspection import SpatiaLiteIntrospection
from .operations import SpatiaLiteOperations
from .schema import SpatialiteSchemaEditor
class DatabaseWrapper(SQLiteDatabaseWrapper):
SchemaEditorClass = SpatialiteSchemaEditor
# Classes instantiated in __init__().
client_class = SpatiaLiteClient
features_class = DatabaseFeatures
introspection_class = SpatiaLiteIntrospection
ops_class = SpatiaLiteOperations
def __init__(self, *args, **kwargs):
# Trying to find the location of the SpatiaLite library.
# Here we are figuring out the path to the SpatiaLite library
# (`libspatialite`). If it's not in the system library path (e.g., it
# cannot be found by `ctypes.util.find_library`), then it may be set
# manually in the settings via the `SPATIALITE_LIBRARY_PATH` setting.
self.spatialite_lib = getattr(settings, 'SPATIALITE_LIBRARY_PATH',
find_library('spatialite'))
if not self.spatialite_lib:
raise ImproperlyConfigured('Unable to locate the SpatiaLite library. '
'Make sure it is in your library path, or set '
'SPATIALITE_LIBRARY_PATH in your settings.'
)
super(DatabaseWrapper, self).__init__(*args, **kwargs)
def get_new_connection(self, conn_params):
conn = super(DatabaseWrapper, self).get_new_connection(conn_params)
# Enabling extension loading on the SQLite connection.
try:
conn.enable_load_extension(True)
except AttributeError:
raise ImproperlyConfigured(
'SpatiaLite requires SQLite to be configured to allow '
'extension loading.'
)
# Loading the SpatiaLite library extension on the connection, and returning
# the created cursor.
cur = conn.cursor(factory=SQLiteCursorWrapper)
try:
cur.execute("SELECT load_extension(%s)", (self.spatialite_lib,))
except Exception as exc:
raise ImproperlyConfigured(
'Unable to load the SpatiaLite library extension "%s"' % self.spatialite_lib
) from exc
cur.close()
return conn
def prepare_database(self):
super(DatabaseWrapper, self).prepare_database()
# Check if spatial metadata have been initialized in the database
with self.cursor() as cursor:
cursor.execute("PRAGMA table_info(geometry_columns);")
if cursor.fetchall() == []:
arg = "1" if self.features.supports_initspatialmetadata_in_one_transaction else ""
cursor.execute("SELECT InitSpatialMetaData(%s)" % arg)
|
mattseymour/django
|
django/contrib/gis/db/backends/spatialite/base.py
|
Python
|
bsd-3-clause
| 3,105 | 0.001932 |
# Xlib.__init__ -- glue for Xlib package
#
# Copyright (C) 2000-2002 Peter Liljenberg <petli@ctrl-c.liu.se>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place,
# Suite 330,
# Boston, MA 02111-1307 USA
__version__ = (0, 31)
__version_extra__ = ''
__version_string__ = '.'.join(map(str, __version__)) + __version_extra__
__all__ = [
'X',
'XK',
'Xatom',
'Xcursorfont',
'Xutil',
'display',
'error',
'rdb',
# Explicitly exclude threaded, so that it isn't imported by
# from Xlib import *
]
|
python-xlib/python-xlib
|
Xlib/__init__.py
|
Python
|
lgpl-2.1
| 1,184 | 0 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2016 Bitergia
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Authors:
# Santiago Dueñas <sduenas@bitergia.com>
# Alvaro del Castillo San Felix <acs@bitergia.com>
#
import logging
import pickle
import rq
from .common import CH_PUBSUB
logger = logging.getLogger(__name__)
class ArthurWorker(rq.Worker):
"""Worker class for Arthur"""
def __init__(self, queues, **kwargs):
super().__init__(queues, **kwargs)
self.__pubsub_channel = CH_PUBSUB
@property
def pubsub_channel(self):
return self.__pubsub_channel
@pubsub_channel.setter
def pubsub_channel(self, value):
self.__pubsub_channel = value
def perform_job(self, job, queue):
"""Custom method to execute a job and notify of its result
:param job: Job object
:param queue: the queue containing the object
"""
result = super().perform_job(job, queue)
job_status = job.get_status()
job_result = job.return_value if job_status == 'finished' else None
data = {
'job_id': job.id,
'status': job_status,
'result': job_result
}
msg = pickle.dumps(data)
self.connection.publish(self.pubsub_channel, msg)
return result
|
grimoirelab/arthur
|
arthur/worker.py
|
Python
|
gpl-3.0
| 1,980 | 0 |
from django.conf import settings
def mask_toggle(number_to_mask_or_unmask):
return int(number_to_mask_or_unmask) ^ settings.MASKING_KEY
|
shafiquejamal/socialassistanceregistry
|
nr/nr/formulas.py
|
Python
|
bsd-3-clause
| 137 | 0.021898 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
import unittest
import frappe
from frappe.utils import cstr, flt, nowdate, random_string
from erpnext.hr.doctype.employee.test_employee import make_employee
from erpnext.hr.doctype.vehicle_log.vehicle_log import make_expense_claim
class TestVehicleLog(unittest.TestCase):
def setUp(self):
employee_id = frappe.db.sql("""select name from `tabEmployee` where name='testdriver@example.com'""")
self.employee_id = employee_id[0][0] if employee_id else None
if not self.employee_id:
self.employee_id = make_employee("testdriver@example.com", company="_Test Company")
self.license_plate = get_vehicle(self.employee_id)
def tearDown(self):
frappe.delete_doc("Vehicle", self.license_plate, force=1)
frappe.delete_doc("Employee", self.employee_id, force=1)
def test_make_vehicle_log_and_syncing_of_odometer_value(self):
vehicle_log = make_vehicle_log(self.license_plate, self.employee_id)
#checking value of vehicle odometer value on submit.
vehicle = frappe.get_doc("Vehicle", self.license_plate)
self.assertEqual(vehicle.last_odometer, vehicle_log.odometer)
#checking value vehicle odometer on vehicle log cancellation.
last_odometer = vehicle_log.last_odometer
current_odometer = vehicle_log.odometer
distance_travelled = current_odometer - last_odometer
vehicle_log.cancel()
vehicle.reload()
self.assertEqual(vehicle.last_odometer, current_odometer - distance_travelled)
vehicle_log.delete()
def test_vehicle_log_fuel_expense(self):
vehicle_log = make_vehicle_log(self.license_plate, self.employee_id)
expense_claim = make_expense_claim(vehicle_log.name)
fuel_expense = expense_claim.expenses[0].amount
self.assertEqual(fuel_expense, 50*500)
vehicle_log.cancel()
frappe.delete_doc("Expense Claim", expense_claim.name)
frappe.delete_doc("Vehicle Log", vehicle_log.name)
def test_vehicle_log_with_service_expenses(self):
vehicle_log = make_vehicle_log(self.license_plate, self.employee_id, with_services=True)
expense_claim = make_expense_claim(vehicle_log.name)
expenses = expense_claim.expenses[0].amount
self.assertEqual(expenses, 27000)
vehicle_log.cancel()
frappe.delete_doc("Expense Claim", expense_claim.name)
frappe.delete_doc("Vehicle Log", vehicle_log.name)
def get_vehicle(employee_id):
license_plate=random_string(10).upper()
vehicle = frappe.get_doc({
"doctype": "Vehicle",
"license_plate": cstr(license_plate),
"make": "Maruti",
"model": "PCM",
"employee": employee_id,
"last_odometer": 5000,
"acquisition_date": nowdate(),
"location": "Mumbai",
"chassis_no": "1234ABCD",
"uom": "Litre",
"vehicle_value": flt(500000)
})
try:
vehicle.insert()
except frappe.DuplicateEntryError:
pass
return license_plate
def make_vehicle_log(license_plate, employee_id, with_services=False):
vehicle_log = frappe.get_doc({
"doctype": "Vehicle Log",
"license_plate": cstr(license_plate),
"employee": employee_id,
"date": nowdate(),
"odometer": 5010,
"fuel_qty": flt(50),
"price": flt(500)
})
if with_services:
vehicle_log.append("service_detail", {
"service_item": "Oil Change",
"type": "Inspection",
"frequency": "Mileage",
"expense_amount": flt(500)
})
vehicle_log.append("service_detail", {
"service_item": "Wheels",
"type": "Change",
"frequency": "Half Yearly",
"expense_amount": flt(1500)
})
vehicle_log.save()
vehicle_log.submit()
return vehicle_log
|
mhbu50/erpnext
|
erpnext/hr/doctype/vehicle_log/test_vehicle_log.py
|
Python
|
gpl-3.0
| 3,526 | 0.025241 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.