repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
---|---|---|---|---|---|---|---|---|
looker/sentry
|
src/sentry/lang/native/systemsymbols.py
|
Python
|
bsd-3-clause
| 1,526 | 0.000655 |
from __future__ import absolute_import
import logging
from requests.exceptions import RequestException
from sentry import options
from sentry.http import Session
from sentry.lang.native.utils import sdk_info_to_sdk_id
MAX_ATTEMPTS = 3
logger = logging.getLogger(__name__)
def lookup_system_symbols(symbols, sdk_info=None, cpu_name=None):
"""Looks for system symbols in the configured system server if
enabled. If this failes o
|
r the server is disabled, `None` is
returned.
"""
if not options.get('symbolserver.enabled'):
return
url = '%s/lookup' % options.get('symbolserver.options')['url'].rstrip('/')
sess = Session()
symbol_query = {
'sdk_id': sdk_info_to_sdk_id(sdk_info),
|
'cpu_name': cpu_name,
'symbols': symbols,
}
attempts = 0
with sess:
while 1:
try:
rv = sess.post(url, json=symbol_query)
# If the symbols server does not know about the SDK at all
# it will report a 404 here. In that case just assume
# that we did not find a match and do not retry.
if rv.status_code == 404:
return None
rv.raise_for_status()
return rv.json()['symbols']
except (IOError, RequestException):
attempts += 1
if attempts > MAX_ATTEMPTS:
logger.error('Failed to contact system symbol server', exc_info=True)
return
|
getodacu/eSENS-eDocument
|
profiles/e_confirmation/xb_request/_qdt.py
|
Python
|
mit
| 3,119 | 0.003847 |
# ./_qdt.py
# -*- coding: utf-8 -*-
# PyXB bindings for NM:763e66503f6e9797a3b5522270417bad82c9c82c
# Generated 2015-02-11 21:35:49.975995 by PyXB version 1.2.4 using Python 2.6.9.final.0
# Namespace urn:oasis:names:specification:ubl:schema:xsd:QualifiedDataTypes-2 [xmlns:qdt]
from __future__ import unicode_literals
import pyxb
import pyxb.binding
import pyxb.binding.saxer
import io
import pyxb.utils.utility
import pyxb.utils.domutils
import sys
import pyxb.utils.six as _six
# Unique identifier for bindings created at the same time
_GenerationUID = pyxb.utils.utility.UniqueIdentifier('urn:uuid:2b2e2fd1-b225-11e4-b26c-14109fe53921')
# Version of PyXB used to generate the bindings
_PyXBVersion = '1.2.4'
# Generated bindings are not compatible across PyXB versions
if pyxb.__version__ != _PyXBVersion:
raise pyxb.PyXBVersionError(_PyXBVersion)
# Import bindings for namespaces imported into schema
# NOTE: All namespace declarations are reserved within the binding
Namespace = pyxb.namespace.NamespaceForURI('urn:oasis:names:specification:ubl:schema:xsd:QualifiedDataTypes-2', create_if_missing=True)
Namespace.configureCategories(['typeBinding', 'elementBinding'])
def CreateFromDocument (xml_text, default_namespace=None, location_base=None):
"""Parse the given XML and use the document element to create a
Python instance.
@param xml_text An XML document. This should be data (Python 2
str or Python 3 bytes), or a text (Python 2 unicode or Python 3
str) in the L{pyxb._InputEncoding} encoding.
@keyword default_namespace The L{pyxb.Namespace} instance to use as the
default namespace where there is no default namespace in scope.
If unspecified or C{None}, the namespace of the module containing
this function will be used.
@keyword location_base: An object to be recorded as the base of all
L{pyxb.utils.utility.Location} instances associated with events and
objects handled by the parser. You might pass the URI from which
the document was obtained.
"""
if pyxb.XMLStyle_saxer != pyxb._XMLStyle:
dom = pyxb.utils.domutils.StringToDOM(xml_text)
return CreateFro
|
mDOM(dom.documentElement, default_namespace=default_namespace)
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
saxer = pyxb.binding.saxer.make_parser(fallback_namespace=default_namespace, location_base=location_base)
handler = saxer.getContentHandler()
xmld = xml_text
if isinstance(xmld, _six.text_type):
xmld = xmld.e
|
ncode(pyxb._InputEncoding)
saxer.parse(io.BytesIO(xmld))
instance = handler.rootObject()
return instance
def CreateFromDOM (node, default_namespace=None):
"""Create a Python instance from the given DOM node.
The node tag must correspond to an element declaration in this module.
@deprecated: Forcing use of DOM interface is unnecessary; use L{CreateFromDocument}."""
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
return pyxb.binding.basis.element.AnyCreateFromDOM(node, default_namespace)
|
sswaner/twilio-aws
|
resources/lambda_functions/sms_message_handler.py
|
Python
|
gpl-3.0
| 671 | 0.011923 |
import boto3
import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)
def lambda_handler(event, context):
logger.info("RECEIVED EVENT: %s"%( str( event )) )
|
params = event['params']
sid = params['MessageSid']
from_number = params['From']
to_number = params['To']
body = params['Body']
logger.info("RECEIVED MESSAGE SID: %s, FROM: %s, TO: %s, BODY: %s" % ( sid, from_number, to_number, body))
client = boto3.client('dynamodb')
client.put_item(TableName="sms_messages", Item={
"sid": {'S': sid},
"from": {'S': from_number},
|
"to": {'S': to_number},
"body": {'S': body}})
return ""
|
paihu/moebox
|
admin.py
|
Python
|
mit
| 124 | 0 |
from django.contrib import admin
# Register your models here.
from .models import Uploade
|
r
admin.si
|
te.register(Uploader)
|
Julian/cardboard
|
cardboard/types.py
|
Python
|
mit
| 4,144 | 0.001689 |
artifact = u"Artifact"
creature = u"Creature"
enchantment = u"Enchantment"
land = u"Land"
planeswalker = u"Planeswalker"
instant = u"Instant"
sorcery = u"Sorcery"
permanents = frozenset({artifact, creature, enchantment, land, planeswalker})
nonpermanents = frozenset({instant, sorcery})
all = permanents | nonpermanents
unimplemented = frozenset({u"Plane", u"Scheme", u"Tribal", u"Vanguard"})
supertypes = frozenset({u"Basic", u"Legendary", u"Ongoing", u"Snow", u"World"})
subtypes = {
artifact : frozenset({u"Contraption", u"Equipment", u"Fortification"}),
creature : frozenset({
u"Advisor", u"Ally", u"Angel", u"Anteater", u"Antelope", u"Ape",
u"Archer", u"Archon", u"Artificer", u"Assassin", u"Assembly-Worker",
u"Atog", u"Aurochs", u"Avatar", u"Badger", u"Barbarian", u"Basilisk",
u"Bat", u"Bear", u"Beast", u"Beeble", u"Berserker", u"Bird",
u"Blinkmoth", u"Boar", u"Bringer", u"Brushwagg", u"Camarid", u"Camel",
u"Caribou", u"Carrier", u"Cat", u"Centaur", u"Cephalid", u"Chimera",
u"Citizen", u"Cleric", u"Cockatrice", u"Construct", u"Coward", u"Crab",
u"Crocodile", u"Cyclops", u"Dauthi", u"Demon", u"Deserter", u"Devil",
u"Djinn", u"Dragon", u"Drake", u"Dreadnought", u"Drone", u"Druid",
u"Dryad", u"Dwarf", u"Efreet", u"Elder", u"Eldrazi", u"Elemental",
u"Elephant", u"Elf", u"Elk", u"Eye", u"Faerie", u"Ferret", u"Fish",
u"Flagbearer", u"Fox", u"Frog", u"Fungus", u"Gargoyle", u"Germ",
u"Giant", u"Gnome", u"Goat", u"Goblin", u"Golem", u"Gorgon",
u"Graveborn", u"Gremlin", u"Griffin", u"Hag", u"Harpy", u"Hellion",
u"Hippo", u"Hippogriff", u"Homarid", u"Homunculus", u"Horror",
u"Horse", u"Hound", u"Human", u"Hydra", u"Hyena", u"Illusion", u"Imp",
u"Incarnation", u"Insect", u"Jellyfish", u"Juggernaut", u"Kavu",
u"Kirin", u"Kithkin", u"Knight", u"Kobold", u"Kor", u"Kraken",
u"Lammasu", u"Leech", u"Leviathan", u"Lhurgoyf", u"Licid", u"Lizard",
u"Manticore", u"Masticore", u"Mercenary", u"Merfolk", u"Metathran",
u"Minion", u"Minotaur", u"Monger", u"Mongoose", u"Monk", u"Moonfolk",
u"Mutant", u"Myr", u"Mystic", u"Nautilus", u"Nephilim", u"Nightmare",
u"Nightstalker", u"Ninja", u"Noggle", u"Nomad", u"Octopus", u"Ogre",
u"Ooze", u"Orb", u"Orc", u"Orgg", u"Ouphe", u"Ox", u"Oyster",
u"Pegasus", u"Pentavite", u"Pest", u"Phelddagrif", u"Phoenix",
u"Pincher", u"Pirate", u"Plant", u"Praetor", u"Prism", u"Rabbit",
u"Rat", u"Rebel", u"Reflection", u"Rhino", u"Rigger", u"Rogue",
u"Salamander", u"Samurai", u"Sand", u"Saproling", u"Satyr",
u"Scarecrow", u"Scorpion", u"Scout", u"Serf", u"Serpent", u"Shade",
u"Shaman", u"Shapeshifter", u"Sheep", u"Siren", u"Skeleton", u"Slith",
u"Sliver", u"Slug", u"Snake", u"Soldier", u"Soltari", u"Spawn",
u"Specter", u"Spellshaper", u"Sphinx", u"Spider", u"Spike", u"Spirit",
u"Splinter", u"Sponge", u"Squid", u"Squirrel", u"Starfish",
u"Surrakar", u"Survivor", u"Tetravite", u"Thalakos", u"Thopter",
u"Thrull", u"Treefolk", u"Triskelavite", u"Troll", u"Turtle",
u"Unicorn", u"Vampire", u"Vedalken", u"Viashino", u"Vo
|
lver", u"Wall",
u"Warrior", u"Weird", u"Werewolf", u"Whale", u"Wizard", u"Wolf",
u"Wolverine", u"Wombat", u"Worm", u"Wraith", u"Wurm", u"Yeti",
u"Zombie", u"Zubera"
})
|
,
enchantment : frozenset({u"Aura", u"Curse", u"Shrine"}),
instant : frozenset({u"Arcane", u"Trap"}),
u"Basic Land" : frozenset({
u"Forest", u"Island", u"Mountain", u"Plains", u"Swamp"
}),
u"Non-Basic Land" : frozenset({
u"Desert", u"Lair", u"Locus", u"Mine",
u"Power-Plant", u"Tower", u"Urza's"
}),
planeswalker : frozenset({
u"Ajani", u"Bolas", u"Chandra", u"Elspeth", u"Garruk", u"Gideon",
u"Jace", u"Karn", u"Koth", u"Liliana", u"Nissa", u"Sarkhan", u"Sorin",
u"Tezzeret", u"Venser"
}),
}
subtypes[sorcery] = subtypes[instant]
subtypes[land] = subtypes[u"Basic Land"] | subtypes[u"Non-Basic Land"]
|
rozap/arb
|
src/api/campbx.py
|
Python
|
mit
| 3,615 | 0.00083 |
import urllib2
import base64
import simplejson as json
import logging
from urllib import urlencode
from functools import partial
log = logging.getLogger(__name__)
log_formatter = logging.Formatter('%(name)s - %(message)s')
log_handler = logging.StreamHandler()
log_handler.setFormatter(log_formatter)
log.addHandler(log_handler)
log.setLevel(logging.ERROR)
opener = urllib2.build_opener()
opener.addheaders = [('User-agent', 'Mozilla/5.0')]
urllib2.install_opener(opener)
class EndPointPartial(partial):
def __new__(cls, func, conf, _repr):
cls._repr = _repr
return super(EndPointPartial, cls).__new__(cls, func, conf)
def __repr__(self):
return unicode('<API endpoint %s>' % self._repr)
class CampBX(object):
"""Camp BX API Class"""
username = None
password = None
api_url = 'https://campbx.com/api/'
log = None
# API endpoints
# { python_call : (url_php_call, requires_auth) }
endpoints = {
'xdepth': ('xdepth', False),
'xticker': ('xticker', False),
'my_funds':
|
('myfunds', True),
|
'my_orders': ('myorders', True),
'my_margins': ('mymargins', True),
'get_btc_address': ('getbtcaddr', True),
'send_instant': ('sendinstant', True),
'send_btc': ('sendbtc', True),
'trade_cancel': ('tradecancel', True),
'trade_enter': ('tradeenter', True),
'trade_advanced': ('tradeadv', True),
}
def __init__(self, username=None, password=None):
self.username = username
self.password = password
# setup logging
self.log = log
# append all the enpoints to the class dictionary
self._create_endpoints()
def debug_mode(self, toggle):
"""
Toggle debug mode for more detailed output
obj.debug_mode(True) - Turn debug mode on
obj.debug_mode(False) - Turn debug mode off
"""
if toggle:
self.log.setLevel(logging.DEBUG)
else:
self.log.setLevel(logging.ERROR)
def _make_request(self, conf, post_params={}):
"""Make a request to the API and return data in a pythonic object"""
endpoint, requires_auth = conf
# setup the url and the request objects
url = '%s%s.php' % (self.api_url, endpoint)
log.debug('Setting url to %s' % url)
request = urllib2.Request(url)
request.add_header('User-Agent', 'Mozilla/5.0')
# tack on authentication if needed
log.debug('Post params: %s' % post_params)
if requires_auth:
post_params.update({
'user': self.username,
'pass': self.password
})
# url encode all parameters
data = urlencode(post_params)
# gimme some bitcoins!
try:
log.debug('Requesting data from %s' % url)
response = urllib2.urlopen(request, data)
return json.loads(response.read())
except urllib2.URLError, e:
log.debug('Full error: %s' % e)
if hasattr(e, 'reason'):
self.log.error('Could not reach host. Reason: %s' % e.reason)
elif hasattr(e, 'code'):
self.log.error('Could not fulfill request. Error Code: %s' % e.code)
return None
def _create_endpoints(self):
"""Create all api endpoints using self.endpoint and partial from functools"""
for k, v in self.endpoints.items():
_repr = '%s.%s' % (self.__class__.__name__, k)
self.__dict__[k] = EndPointPartial(self._make_request, v, _repr)
|
adriansnetlis/bgmc16minosaur
|
bgmc16minosaur/Assets/Scripts/LightLOD.py
|
Python
|
gpl-2.0
| 659 | 0.018209 |
#This script is made by cotax
#cotax is blenderartists.org user's n
|
ickname.
#1. Place a lamp in the scene and put its energy to 0.0
#2. Connect this script to the lamp, always(true)- python
#- Add a property: energy(float) to the lamp
#- Add a property: distance(integer) to the lamp
#Set the energy and distance to your likings
from bge import logic
own = logic.getCurrentController().owner
cam = own.scene.active_camera
#get the distance and energy from the light
distance = own['distance']
energy = own['en
|
ergy']
#check distance and set the energy
if own.getDistanceTo(cam) < distance:
own.energy = energy
else:
own.energy = 0.0
|
PaulMcMillan/2014_defcon_timing
|
hue/vis6.py
|
Python
|
bsd-2-clause
| 1,918 | 0.001564 |
import matplotlib.pyplot as plt
from collections import defaultdict
from itertools import combinations
from pprint import pprint
from scipy import stats
import random
from itertools import chain
import results
def choose_points(qr_list):
return [d.total_response() - getattr(d, 'median', 0) for d in qr_list]
def check_data(data, p_threshold=0.01):
""" combinatoric KS, add hits """
data = data.sample(5001)
# data.median_filter(choose_points)
data_roundup = defaultdict(int)
for k1, k2 in combinations(data.keys(), 2):
# DON'T EVER USE A SAMPLE SIZE THAT IS A MULTIPLE OF 100
d, p = stats.ks_2samp(choose_points(data[k1]),
choose_points(data[k2]))
print k1, k2, d, p
if p < p_threshold:
data_roundup[k1] += 1
data_roundup[k2] += 1
|
return dict(data_roundup)
data = resul
|
ts.read_data(bucket=r'^/api/\w{3}(\w)\w{6}/config$',
data_dir='more_recent_data')
pprint(check_data(data))
exit()
correct = 0
incorrect = 0
unclear = 0
shortened = []
shorten_error = 0
ANSWER = '0'
for x in range(1000):
print "Iteration: ", x
res = check_data(data)
if not res:
unclear += 1
continue
if ANSWER not in res.keys() and max(res.values()) >= 4:
pprint(res)
print "shorten error"
shorten_error += 1
if max(res.values()) >= 4 and len(res.values()) < 8:
shortened.append(8 - len(res.values()))
sri = sorted(res.items(), key=lambda x: -x[1])
pprint(sri)
if sri[0][0] == ANSWER and sri[0][1] <= sri[1][1] + 2 and sri[0][1] <= 5:
unclear += 1
elif sri[0][0] == ANSWER:
correct += 1
else:
incorrect += 1
print correct, incorrect, float(correct)/(incorrect + correct) * 100.0
print "shorten error ", shorten_error, " unclear: ", unclear
print "Shortened: ", len(shortened), shortened
|
a5kin/evolife
|
evolife.py
|
Python
|
mit
| 16,479 | 0.006554 |
#!/usr/bin/env python
"""
EvoLife Cellular Automaton implementation using CUDA.
Rules are:
- Each living cell has its own birth/sustain ruleset and an energy level;
- Cell is loosing all energy if number of neighbours is not in its sustain rule;
- Cell is born with max energy if there are exactly N neighbours with N in their birth rule;
- Same is applied for living cells (re-occupation case), but only with different genomes;
- If there are several birth situations with different N possible, we choose one with larger N;
- Newly born cell's ruleset calculated as crossover between 'parent' cells rulesets;
- If cell is involved in breeding as a 'parent', it's loosing `BIRTH_COST` units of energy per each non-zero gene passed;
- This doesn't apply in re-occupation case;
- Every turn, cell is loosing `DEATH_SPEED` units of energy;
- Cell with zero energy is dying;
- Cell cannot have more than `MAX_GENES` non-zero genes in ruleset.
Additional rule is: board has torus topology.
So, if all cells initially has B3/S23 ruleset, DEATH_SPEED = BIRTH_COST = 0, MAX_GENES >= 3, we have exact Conway rules.
But if there were more than one ruleset initially, evolution may begin.
There are 2^18 possible rulesets, only a small fraction of which have been
studied in any detail. So, who knows what we may discover with evolutionary rules :)
CONTROLS:
Arrows move field
+/- zoom in/out
]/[ speed up/down
F toggle fullscreen
S dump board state to a file
Q/ESC quit
Prerequisites: pycuda, numpy, scipy, pygame, scikit-image
Debian: apt-get install python-pycuda python-numpy python-pygame python-scipy python-setuptools
Author: a5kin
Copyright: MIT License.
"""
import sys, time, math, colorsys, random, traceback
import pygame
from pygame.locals import *
import numpy as np
from scipy.misc import imsave
import scipy.ndimage.interpolation
from skimage import transform as tf
import importlib
import pycuda.driver as drv
import pycuda.tools
import pycuda.autoinit
from pycuda.compiler import SourceModule
import pycuda.gpuarray as gpuarray
from pycuda.elementwise import ElementwiseKernel
try:
expmod = importlib.import_module('experiments.' + sys.argv[1])
DEATH_SPEED = expmod.DEATH_SPEED
BIRTH_COST = expmod.BIRTH_COST
MAX_GENES = expmod.MAX_GENES
FIELD_WIDTH = expmod.FIELD_WIDTH
FIELD_HEIGHT = expmod.FIELD_HEIGHT
SAVE_FRAMES = expmod.SAVE_FRAMES
DOWNSCALE_FACTOR = expmod.DOWNSCALE_FACTOR
FRAME_SKIP = expmod.FRAME_SKIP
RANDOM_SEED = expmod.RANDOM_SEED
fld_init = expmod.fld_init
except:
print "No experiment preset found, loading default (big_bang)."
DEATH_SPEED = 0
BIRTH_COST = 0
MAX_GENES = 9
FIELD_WIDTH = 1280
FIELD_HEIGHT = 720
SAVE_FRAMES = False
DOWNSCALE_FACTOR = 1
FRAME_SKIP = 1
RANDOM_SEED = None
def fld_init(a):
return np.asarray([[(random.choice([0, 1]) * random.randint(0, 256*512) if (i < 100 and j < 100) else 0) for j in range(a.height)] for i in range(a.width)]).astype(np.int32)
step_gpu = ElementwiseKernel("unsigned int *fld, unsigned int *fld_new, unsigned int *seeds, unsigned int *img, int w, int h", """
int x = i / h;
int y = i % h;
// torus topology emulation
int xm1 = x - 1; if (xm1 < 0) xm1 = w + xm1;
int xp1 = x + 1; if (xp1 >= w) xp1 = xp1 - w;
int ym1 = y - 1; if (ym1 < 0) ym1 = h + ym1;
int yp1 = y + 1; if (yp1 >= h) yp1 = yp1 - h;
// cache neighbours values
uint f0 = fld[i];
uint f1 = fld[xm1 * h + ym1];
uint f2 = fld[x * h + ym1];
uint f3 = fld[xp1 * h + ym1];
uint f4 = fld[xm1 * h + y];
uint f5 = fld[xp1 * h + y];
uint f6 = fld[xm1 * h + yp1];
uint f7 = fld[x * h + yp1];
uint f8 = fld[xp1 * h + yp1];
uint img0 = img[i];
uint energy = (f0 >> 17);
// total number of neighbours
int N = EXISTS(f1) + EXISTS(f2) + EXISTS(f3) + EXISTS(f4) +
EXISTS(f5) + EXISTS(f6) + EXISTS(f7) + EXISTS(f8);
if (energy >= 0xff || f0 > 0 && (((f0 >> 8) & (1 << N)) == 0)) {
// cell is dying
fld_new[i] = 0;
img[i] = fadeout(img0, 5);
} else {
uint f00 = f0;
for (int ni = 8; ni > 0; ni--) {
// cache neighbours breeding fitnesses
int ff1 = FIT(f1, ni);
int ff2 = FIT(f2, ni);
int ff3 = FIT(f3, ni);
int ff4 = FIT(f4, ni);
int ff5 = FIT(f5, ni);
int ff6 = FIT(f6, ni);
int ff7 = FIT(f7, ni);
int ff8 = FIT(f8, ni);
if (ff1 + ff2 + ff3 + ff4 + ff5 + ff6 + ff7 + ff8 == ni) {
// neighbours able to breed, cell is born
f0 = 0;
int gene_num = 0;
// crossover breed of parents in deterministic (but geterogeneous) way
int seed = seeds[i]; //(((i * 58321) + 11113)) % 8;
int genes_count = {2};
int gene;
while (gene_num < 17) {
if (seed == 0 && ff1 && gene_num < 17) {
gene = (1 << (gene_num + seed) % 17) & f1;
f0 += gene;
gene_num++;
if (f00 == 0 && gene > 0) fld_new[xm1 * h + ym1] += ({1} << 17);
|
if (gene > 0) genes_count--;
if (genes_count <= 0) break;
}
if (seed > 0) seed--;
if (seed == 0 && ff2 && gene_num < 17) {
gene = (1 << (gene_num + seed) % 17) & f2;
f0 += gene;
gene_num++;
if (f00 == 0 && gene > 0) fld_new[x * h + ym1] += ({1} << 17);
|
if (gene > 0) genes_count--;
if (genes_count <= 0) break;
}
if (seed > 0) seed--;
if (seed == 0 && ff3 && gene_num < 17) {
gene = (1 << (gene_num + seed) % 17) & f3;
f0 += gene;
gene_num++;
if (f00 == 0 && gene > 0) fld_new[xp1 * h + ym1] += ({1} << 17);
if (gene > 0) genes_count--;
if (genes_count <= 0) break;
}
if (seed > 0) seed--;
if (seed == 0 && ff4 && gene_num < 17) {
gene = (1 << (gene_num + seed) % 17) & f4;
f0 += gene;
gene_num++;
if (f00 == 0 && gene > 0) fld_new[xm1 * h + y] += ({1} << 17);
if (gene > 0) genes_count--;
if (genes_count <= 0) break;
}
if (seed > 0) seed--;
if (seed == 0 && ff5 && gene_num < 17) {
gene = (1 << (gene_num + seed) % 17) & f5;
f0 += gene;
gene_num++;
if (f00 == 0 && gene > 0) fld_new[xp1 * h + y] += ({1} << 17);
if (gene > 0) genes_count--;
if (genes_count <= 0) break;
}
if (seed > 0) seed--;
if (seed == 0 && ff6 && gene_num < 17) {
gene = (1 << (gene_num + seed) % 17) & f6;
f0 += gene;
gene_num++;
if (f00 == 0 && gene > 0) fld_new[xm1 * h + yp1] += ({1} << 17);
if (gene > 0) genes_count--;
if (genes_count <= 0) break;
}
if (seed > 0) seed--;
if (seed == 0 && ff7 && gene_num < 17) {
gene = (1 << (gene_num + seed) % 17) & f7;
f0 += gene;
gene_num++;
if (f00 == 0 && gene > 0) fld_new[x * h + yp1] += ({1} << 17);
if (gene > 0) genes_count--;
if (genes_count <= 0) break;
}
if (see
|
xiangke/pycopia
|
mibs/pycopia/mibs/HP_SN_IGMP_MIB_OID.py
|
Python
|
lgpl-2.1
| 626 | 0.01278 |
# python
# This file is generated by a program (mib2py).
import HP_SN_IGMP_MIB
OIDMAP = {
'1.3.6.1.4.1.11.2.3.7.11.12.2.6.1': HP_SN_IGMP_MIB.snIgmpMIBObjects,
'1.3.6.1.4.1.11.2.3.7.11.12.2.6.1.1': HP_SN_IGMP_MIB.snIgmpQueryInterval,
'1.
|
3.6.1.4.1.11.2.3.7.11.12.2.6.1.2': HP_SN_IGMP_MIB.snIgmpGroupMembershipTime,
'1.3.6.1.4.1.11.2.3.7.11.12.2.6.1.3.1.1': HP_SN_IGMP_MIB.snIgmpIfEntryIndex,
'1.3.6.1.4.1.11.2.3.7.11.
|
12.2.6.1.3.1.2': HP_SN_IGMP_MIB.snIgmpIfPortNumber,
'1.3.6.1.4.1.11.2.3.7.11.12.2.6.1.3.1.3': HP_SN_IGMP_MIB.snIgmpIfGroupAddress,
'1.3.6.1.4.1.11.2.3.7.11.12.2.6.1.3.1.4': HP_SN_IGMP_MIB.snIgmpIfGroupAge,
}
|
vollov/isda.ca
|
isda_backend/page/admin.py
|
Python
|
mit
| 962 | 0.011435 |
from django.contrib
|
import admin
from models import Page, TPage, Content, TContent
class PageAdmin(admin.Mo
|
delAdmin):
exclude = ['posted']
#fields = ['posted', 'title']
list_display = ('title', 'posted', 'slug')
prepopulated_fields = {'slug': ('title',)}
class TPageAdmin(admin.ModelAdmin):
list_display = ('title', 'language', 'page')
#prepopulated_fields = {'slug': ('title',)}
class ContentAdmin(admin.ModelAdmin):
exclude = ['posted']
#fields = ['posted', 'title']
list_display = ('code', 'posted', 'page')
prepopulated_fields = {'slug': ('code',)}
class TContentAdmin(admin.ModelAdmin):
#change_form_template = 'page/admin/change_form.html'
list_display = ('content', 'language', 'page')
#prepopulated_fields = {'slug': ('title',)}
admin.site.register(Page, PageAdmin)
admin.site.register(TPage, TPageAdmin)
admin.site.register(Content, ContentAdmin)
admin.site.register(TContent, TContentAdmin)
|
chrislit/usfm2osis
|
setup.py
|
Python
|
gpl-3.0
| 1,809 | 0 |
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
def readfile(fn):
"""Read fn and return the contents."""
with open(path.join(here, fn), "r", encoding="utf-8") as f:
return f.read()
setup(
name="usfm2osis",
packages=find_packages(exclude=["tests*"]),
version="0.6.1",
description="Tools for converting Bibles from USFM to OSIS XML",
author="Christopher C. Little",
author_email="chrisclittle+usfm2osis@gmail.com",
url="https://github.com/chrislit/usfm2osis",
download_url="https://github.com/chrislit/usfm2osis/archive/master.zip",
keywords=["OSIS", "USFM", "Bible"],
license="GPLv3+",
zip_safe=False,
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Development Status :: 4 - Beta",
"License :: OSI Approved :: GNU General Public License v3 or later \
(GPLv3+)",
"Operating System :: OS Independent",
"Natural Language :: English",
"Intended Audience :: Religion",
"Intended Audience :: Developers",
"Topic :: Religion",
"Topic :: Text Processing :: Markup :: XML",
],
long_description="\n\n".join([readfile(f) for f in ("README.rst",)]),
# scripts=['scripts/usfm2
|
osis', 'scripts/usfmtags'],
package_data={"usfm2osis": ["schemas/*.xsd"]},
entry_points={
"console_scripts": [
"usfm2osis = usfm2osis.scripts.usfm2osis:main",
"usfmtags = usfm2osis.scripts.us
|
fmtags:main",
]
},
)
|
Purg/SMQTK
|
python/smqtk/algorithms/nn_index/hash_index/linear.py
|
Python
|
bsd-3-clause
| 3,373 | 0 |
import heapq
import os
import numpy
from smqtk.algorithms.nn_index.hash_index import HashIndex
from smqtk.utils.bit_utils import (
bit_vector_to_int_large,
int_to_bit_vector_large,
)
from smqtk.utils.metrics import hamming_distance
__author__ = "paul.tunison@kitware.com"
class LinearHashIndex (HashIndex):
"""
Basic linear index using heap sort (aka brute force).
"""
@classmethod
def is_usable(cls):
return True
def __init__(self, file_cache=None):
"""
Initialize linear, brute-force hash index
:param file_cache: Optional path to a file to cache our index to.
:type file_cache: str
"""
super(LinearHashIndex, self).__init__()
self.file_cache = file_cache
self.index = numpy.array([], bool)
self.load_cache()
def get_config(self):
return {
'file_cache': self.file_cache,
}
def load_cache(self):
"""
Load from file cache if we have one
"""
if self.file_cache and os.path.isfile(self.file_cache):
self.index = numpy.load(self.file_cache)
def save_cache(self):
"""
save to file cache if configures
"""
if self.file_cache:
numpy.save(self.file_cache, self.index)
def count(self):
return len(self.index)
def build_index(self, hashes):
"""
Build the index with the give hash codes (bit-vectors).
Subsequent calls to this method should rebuild the index, not add to
it, or raise an exception to as to protect the current index.
:raises ValueError: No data available in the given iterable.
:param hashes: Iterable of descriptor elements to build index
over.
:type hashes: collections.Iterable[numpy.ndarray[bool]]
"""
new_index = numpy.array(map(bit_vector_to_int_large, hashes))
if not new_index.size:
raise ValueError("No hashes given to index.")
self.index = new_index
self.save_cache()
def nn(self, h, n=1):
"""
Return the nearest `N` neighbors to the given hash code.
Distances are in the range [0,1] and are the percent different each
neighbor hash is from the query, based on the number of bits contained
in the query.
:param h: Hash code to compute the neighbors of. Should be the same bit
length as indexed hash codes.
:type h: numpy.ndarray[bool]
:param n: Number of nearest neighbors to find.
:type n: int
:raises ValueError: No index to query from.
:return: Tuple of nearest N hash codes and a tuple of the distance
values to those neighbors.
:rtype: (tuple[numpy.ndarray[bool], tuple[float])
"""
super(LinearHashIndex, self).nn(h, n)
h_i
|
nt = bit_vector_to_int_large(h)
bits = len(h)
#: :type: list[int|long]
near_codes = \
heapq.nsmallest(n, self.index,
lambda e: hamming_distance(h_int, e)
)
distances = map(hamming_distance, near_codes,
[h_int] * len(near_codes))
return [int_to_bit_vector_large(c
|
, bits) for c in near_codes], \
[d / float(bits) for d in distances]
|
alixedi/django_authstrap
|
authstrap/urls.py
|
Python
|
bsd-3-clause
| 1,659 | 0.002411 |
from django.conf.urls import patterns, url
from django.contrib.auth import views as auth_views
urlpatterns = patterns('',
url(r'^login/$',
auth_views.login,
{'template_name': 'authstrap/login.html'},
name='auth_login'),
url(r'^logout/$',
auth_views.logout,
{'template_name': 'authstrap/logout.html'},
name='auth_logout'),
url(r'^password/change/$',
auth_views.password_change,
{'template_name': 'authstrap/password_change_form.html'},
name='auth_password_change'),
url(r'^password/change/done/$',
auth_views.password_change_done,
{'template_name': 'authstrap/password_change_done.html'},
name='auth_password_change_done'),
url(r'^password/reset/$',
auth_views.password_reset,
{'template_name': 'authstrap/password_reset_form.html'},
name='auth_password_reset'),
url(r'^
|
password/reset/confirm/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$',
auth_views.password_reset_confirm,
{'template_name': 'authstrap/password_reset_confirm.html'},
name='auth_password_reset_confirm'),
url(r'^password/reset/complete/$',
auth_views.password_reset_complete,
{'template_name': 'authstrap/password_reset_complete.html'},
name='auth_password_reset_complete'),
url(r'^password/reset/done/$',
auth_v
|
iews.password_reset_done,
{'template_name': 'authstrap/password_change_done.html'},
name='auth_password_reset_done'),
)
|
beeftornado/sentry
|
src/sentry/rules/__init__.py
|
Python
|
bsd-3-clause
| 646 | 0.001548 |
from __future__ import absolute_import
from .base import * # NOQA
from .registry impor
|
t RuleRegistry # NOQA
def init_registry():
from sentry.constants import _SENTRY_RULES
from sentry.plugins.base import plugins
from sentry.utils.imports import import_string
from sentry.utils.safe import safe_execute
registry = RuleRegistry()
for rule in _SENTRY_RULES:
|
cls = import_string(rule)
registry.add(cls)
for plugin in plugins.all(version=2):
for cls in safe_execute(plugin.get_rules, _with_transaction=False) or ():
registry.add(cls)
return registry
rules = init_registry()
|
hufeiya/leetcode
|
python/88_Merge_Sorted_Array.py
|
Python
|
gpl-2.0
| 628 | 0.015924 |
class Solution(object):
def merge(self, nums1, m, nums2, n):
"""
:type nums1: List[int]
:type m: int
:type nums2: List[int]
:type n: int
:rtype: void Do not return anything, modify nums1 in-place instead.
"""
last,i,j = m+n-1,m-1,n-1
while i >= 0 and j >= 0:
if nums1[i] > nums2[j]:
nums1[last] = nums1[i]
last,i = last-1,i-1
else:
nums1[last] = nums2[j]
last,j = last-1,j-
|
1
while j >= 0:
nums1[last] = nums2[j]
last,j = last-1,
|
j-1
|
caternuson/rpi-laser
|
mjpegger.py
|
Python
|
mit
| 2,603 | 0.011909 |
#===========================================================================
# mjpegger.py
#
# Runs a MJPG stream on provided port.
#
# 2016-07-25
# Carter Nelson
#===========================================================================
import threading
import SimpleHTTPServer
import SocketServer
import io
keepStreaming = False
camera = None
resize = (640,360)
class MJPEGThread(threading.Thread):
"""Thread to server MJPEG stream."""
def __init__(self, group=None, target=None, name=None, args=(), kwargs=None):
threading.Thread.__init__(self, group=group, target=target, name=name)
global camera, resize
camera = kwargs['camera']
resize = kwargs['resize']
self.port = kwargs['port']
self.keepRunnning = False
self.streamRunning = False
self.server = None
def run(self, ):
print "MJPEGThread starting"
self.server = SocketServer.TCPServer(("",self.port), MJPEGStreamHandler,
bind_and_activate=False)
|
self.server.allow_reuse_address = True
self.server.timeout = 0.1
self.server.server_bind()
self.server.server_activate()
self.keepRunning = True
self.streamRunning = True
while
|
self.keepRunning:
self.server.handle_request()
self.streamRunning = False
camera.close()
self.server.server_close()
print "MJPEGThread done"
def stop(self, ):
global keepStreaming
keepStreaming = False
self.keepRunning = False
class MJPEGStreamHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
"""Handler for MJPEG stream."""
def do_GET(self, ):
print "MJPEGStreamHandler GET"
global keepStreaming
keepStreaming = True
stream = io.BytesIO()
self.send_response(200)
self.send_header('Content-type','multipart/x-mixed-replace; boundary=--picameramjpg')
self.end_headers()
for frame in camera.capture_continuous(stream, 'jpeg',
use_video_port = True,
resize = resize):
if not keepStreaming:
break
self.wfile.write("--picameramjpg")
self.send_header('Content-type','image/jpeg')
self.send_header('Content-length',len(stream.getvalue()))
self.end_headers()
self.wfile.write(stream.getvalue())
stream.seek(0)
stream.truncate()
|
Centre-Alt-Rendiment-Esportiu/att
|
src/python/test/classes/BallTracker.py
|
Python
|
gpl-3.0
| 2,078 | 0.001444 |
from test.classes.ball_detector.BallDetector import BallDetector
from test.classes.ball_detector.BounceCalculator import BounceCalculator
from test.classes.ball_detector.Extrapolator import Extrapolator
from test.classes.utils.Ball import Ball
from test.classes.utils.BallHistory import BallHistory
VERTICAL_THRESHOLD = 10
class BallTracker:
def __init__(self):
self.track_history = BallHistory()
self.ball_detector = None
self.bounce_calculator = BounceCalculator()
self.extrapolator = Extrapolator()
def first_frame(self, first_frame):
self.ball_detector = BallDetector(first_frame)
def track(self, frame):
|
found_ball = self.ball_detector.detect(frame)
if found_ball.is_none():
found_ball = self.extrapolator.extrapolate(self.track_history)
# Remove vertical m
|
ovement logic
# If we have no one to compare to, cannot detect vertical movement
if len(self.track_history) == 0 or self.track_history[-1].is_none() or found_ball.is_none():
self.track_history.update_history(found_ball)
else:
# If we have someone to compare to, look if x coordinates have changed enough
if abs(self.track_history[-1].center[0] - found_ball.center[0]) < VERTICAL_THRESHOLD:
self.track_history.update_history(Ball())
else:
self.track_history.update_history(found_ball)
return found_ball
def get_bounce(self):
bounce = self.bounce_calculator.find_bounce(self.track_history)
if not bounce.is_none():
# Bounces are detected after happening
after_bounce_ball = self.track_history[-1]
self.track_history.clear_history()
self.track_history.update_history(after_bounce_ball)
# We don't care about outside bounces
if not self.ball_detector.is_inside_table(bounce):
return Ball()
return bounce
def clear(self):
self.track_history.clear_history()
self.ball_detector.clear()
|
jeffmahoney/crash-python
|
crash/types/page.py
|
Python
|
gpl-2.0
| 10,946 | 0.001827 |
#!/usr/bin/python3
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
from typing import Dict, Union, TypeVar, Iterable, Callable, Tuple,\
Optional
from math import log, ceil
import gdb
import crash
from crash.util import find_member_variant
from crash.util.symbols import Types, Symvals, TypeCallbacks
from crash.util.symbols import SymbolCallbacks, MinimalSymbolCallbacks
from crash.cache.syscache import config
from crash.exceptions import DelayedAttributeError
#TODO debuginfo won't tell us, depends on version?
PAGE_MAPPING_ANON = 1
types = Types(['unsigned long', 'struct page', 'enum pageflags',
'enum zone_type', 'struct mem_section'])
symvals = Symvals(['mem_section', 'max_pfn'])
PageType = TypeVar('PageType', bound='Page')
class Page:
slab_cache_name = None
slab_page_name = None
compound_head_name = None
vmemmap_base = 0xffffea0000000000
vmemmap: gdb.Value
directmap_base = 0xffff880000000000
pageflags: Dict[str, int] = dict()
PG_tail = -1
PG_slab = -1
PG_lru = -1
setup_page_type_done = False
setup_pageflags_done = False
setup_pageflags_finish_done = False
ZONES_WIDTH = -1
NODES_WIDTH = -1
# TODO have arch provide this?
BITS_PER_LONG = -1
PAGE_SIZE = 4096
PAGE_SHIFT = 12
sparsemem = False
SECTION_SIZE_BITS = -1 # Depends on sparsemem=True
SECTIONS_PER_ROOT = -1 # Depends on SPARSEMEM_EXTREME
_is_tail: Callable[['Page'], bool]
_compound_head: Callable[['Page'], int]
@classmethod
def setup_page_type(cls, gdbtype: gdb.Type) -> None:
# TODO: should check config, but that failed to work on ppc64, hardcode
# 64k for now
if crash.current_target().arch.name() == "powerpc:common64":
cls.PAGE_SHIFT = 16
# also a config
cls.directmap_base = 0xc000000000000000
cls.sparsemem = True
cls.SECTION_SIZE_BITS = 24
cls.PAGE_SIZE = 1 << cls.PAGE_SHIFT
cls.slab_cache_name = find_member_variant(gdbtype, ['slab_cache', 'lru'])
cls.slab_page_name = find_member_variant(gdbtype, ['slab_page', 'lru'])
cls.compound_head_name = find_member_variant(gdbtype, ['compound_head', 'first_page'])
if not hasattr(cls, 'vmemmap'):
cls.vmemmap = gdb.Value(cls.vmemmap_base).cast(gdbtype.pointer())
cls.setup_page_type_done = True
if cls.setup_pageflags_done and not cls.se
|
tup_pageflags_finish_done:
cls.setup_pageflags_finish()
@classmethod
def setup_mem_section(cls, gdbtype: gdb.Type) -> None:
# TODO
|
assumes SPARSEMEM_EXTREME
cls.SECTIONS_PER_ROOT = cls.PAGE_SIZE // gdbtype.sizeof
@classmethod
def pfn_to_page(cls, pfn: int) -> gdb.Value:
if cls.sparsemem:
section_nr = pfn >> (cls.SECTION_SIZE_BITS - cls.PAGE_SHIFT)
root_idx = section_nr // cls.SECTIONS_PER_ROOT
offset = section_nr & (cls.SECTIONS_PER_ROOT - 1)
section = symvals.mem_section[root_idx][offset]
pagemap = section["section_mem_map"] & ~3
return (pagemap.cast(types.page_type.pointer()) + pfn).dereference()
# pylint doesn't have the visibility it needs to evaluate this
# pylint: disable=unsubscriptable-object
return cls.vmemmap[pfn]
@classmethod
def setup_pageflags(cls, gdbtype: gdb.Type) -> None:
for field in gdbtype.fields():
cls.pageflags[field.name] = field.enumval
cls.setup_pageflags_done = True
if cls.setup_page_type_done and not cls.setup_pageflags_finish_done:
cls.setup_pageflags_finish()
cls.PG_slab = 1 << cls.pageflags['PG_slab']
cls.PG_lru = 1 << cls.pageflags['PG_lru']
@classmethod
def setup_vmemmap_base(cls, symbol: gdb.Symbol) -> None:
cls.vmemmap_base = int(symbol.value())
# setup_page_type() was first and used the hardcoded initial value,
# we have to update
cls.vmemmap = gdb.Value(cls.vmemmap_base).cast(types.page_type.pointer())
@classmethod
def setup_directmap_base(cls, symbol: gdb.Symbol) -> None:
cls.directmap_base = int(symbol.value())
@classmethod
def setup_zone_type(cls, gdbtype: gdb.Type) -> None:
max_nr_zones = gdbtype['__MAX_NR_ZONES'].enumval
cls.ZONES_WIDTH = int(ceil(log(max_nr_zones, 2)))
@classmethod
# pylint: disable=unused-argument
def setup_nodes_width(cls, symbol: Union[gdb.Symbol, gdb.MinSymbol]) -> None:
"""
Detect NODES_WITH from the in-kernel config table
Args:
symbol: The ``kernel_config_data`` symbol or minimal symbol.
It is not used directly. It is used to determine whether
the config data should be available.
"""
# TODO: handle kernels with no space for nodes in page flags
try:
cls.NODES_WIDTH = int(config['NODES_SHIFT'])
except (KeyError, DelayedAttributeError):
# XXX
print("Unable to determine NODES_SHIFT from config, trying 8")
cls.NODES_WIDTH = 8
# piggyback on this callback because type callback doesn't seem to work
# for unsigned long
cls.BITS_PER_LONG = types.unsigned_long_type.sizeof * 8
@classmethod
def setup_pageflags_finish(cls) -> None:
cls.setup_pageflags_finish_done = True
cls._is_tail = cls.__is_tail_compound_head_bit
cls._compound_head = cls.__compound_head_uses_low_bit
if 'PG_tail' in cls.pageflags.keys():
cls.PG_tail = 1 << cls.pageflags['PG_tail']
cls._is_tail = cls.__is_tail_flag
if cls.compound_head_name == 'first_page':
cls._compound_head = cls.__compound_head_first_page
if cls.PG_tail == -1:
cls.PG_tail = 1 << cls.pageflags['PG_compound'] | 1 << cls.pageflags['PG_reclaim']
cls._is_tail = cls.__is_tail_flagcombo
@classmethod
def from_obj(cls, page: gdb.Value) -> 'Page':
pfn = (int(page.address) - Page.vmemmap_base) // types.page_type.sizeof
return Page(page, pfn)
@classmethod
def from_page_addr(cls, addr: int) -> 'Page':
page_ptr = gdb.Value(addr).cast(types.page_type.pointer())
return cls.from_obj(page_ptr.dereference())
def __init__(self, obj: gdb.Value, pfn: int) -> None:
self.gdb_obj = obj
self.address = int(obj.address)
self.pfn = pfn
self.flags = int(obj["flags"])
def __is_tail_flagcombo(self) -> bool:
return bool((self.flags & self.PG_tail) == self.PG_tail)
def __is_tail_flag(self) -> bool:
return bool(self.flags & self.PG_tail)
def __is_tail_compound_head_bit(self) -> bool:
return bool(self.gdb_obj['compound_head'] & 1)
def is_tail(self) -> bool:
return self._is_tail()
def is_slab(self) -> bool:
return bool(self.flags & self.PG_slab)
def is_lru(self) -> bool:
return bool(self.flags & self.PG_lru)
def is_anon(self) -> bool:
mapping = int(self.gdb_obj["mapping"])
return (mapping & PAGE_MAPPING_ANON) != 0
def get_slab_cache(self) -> gdb.Value:
if Page.slab_cache_name == "lru":
return self.gdb_obj["lru"]["next"]
return self.gdb_obj[Page.slab_cache_name]
def get_slab_page(self) -> gdb.Value:
if Page.slab_page_name == "lru":
return self.gdb_obj["lru"]["prev"]
return self.gdb_obj[Page.slab_page_name]
def get_nid(self) -> int:
# TODO: this only works when there are no sections (i.e. sparsemem_vmemmap)
return self.flags >> (self.BITS_PER_LONG - self.NODES_WIDTH)
def get_zid(self) -> int:
shift = self.BITS_PER_LONG - self.NODES_WIDTH - self.ZONES_WIDTH
zid = self.flags >> shift & ((1 << self.ZONES_WIDTH) - 1)
return zid
def __compound_head_first_page(self) -> int:
return int(self.gdb_obj['first_page'])
def __compound_head_uses_low_bit(self) -> int:
return int(self.gdb_obj['compound_head']) - 1
|
arunhotra/tensorflow
|
tensorflow/python/kernel_tests/cholesky_op_test.py
|
Python
|
apache-2.0
| 2,904 | 0.006543 |
"""Tests for tensorflow.ops.tf.Cholesky."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.python.platform
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
class CholeskyOpTest(tf.test.TestCase):
def _verifyCholesky(self, x):
with self.test_session() as sess:
# Verify that LL^T == x.
if x.ndim == 2:
chol = tf.cholesky(x)
verification = tf.matmul(chol,
chol,
transpose_a=False,
transpose_b=True)
else:
chol = tf.batch_cholesky(x)
verification = tf.batch_matmul(chol, chol, adj_x=False, adj_y=True)
chol_np, verification_np = sess.run([chol, verification])
self.assertAllClose(x, verification_np)
self.assertShapeEqual(x, chol)
# Check that the cholesky is lower triangular, and has positive diagonal
# elements.
if chol_np.shape[-1] > 0:
chol_reshaped = np.reshape(chol_np, (-1, chol_np.shape[-2],
chol_np.shape[-1]))
for chol_matrix in chol_reshaped:
self.assertAllClose(chol_matrix, np.tril(chol_matrix))
self.assertTrue((np.diag(chol_matrix) > 0.0).all())
def testBasic
|
(self):
self._verifyCholesky(np.array([[4., -1., 2.], [-1., 6., 0], [2., 0., 5.]]))
def testBatch(self):
simple_array = np.array([[[1., 0.], [0., 5.]]]) # shape (1, 2, 2)
self._verifyCholesky(simple_array)
self._verifyCholesky(np.vstack((simple_array, simple_array)))
odd_sized_array = np.array([[[4., -1., 2.], [-1., 6., 0], [2., 0., 5.]]])
self._verifyCholesky(np.vstack((odd_sized_array, odd_sized_array)))
# Generate random positive-definite matrices.
matrices
|
= np.random.rand(10, 5, 5)
for i in xrange(10):
matrices[i] = np.dot(matrices[i].T, matrices[i])
self._verifyCholesky(matrices)
def testNonSquareMatrix(self):
with self.assertRaises(ValueError):
tf.cholesky(np.array([[1., 2., 3.], [3., 4., 5.]]))
def testWrongDimensions(self):
tensor3 = tf.constant([1., 2.])
with self.assertRaises(ValueError):
tf.cholesky(tensor3)
def testNotInvertible(self):
# The input should be invertible.
with self.test_session():
with self.assertRaisesOpError("LLT decomposition was not successful. The "
"input might not be valid."):
# All rows of the matrix below add to zero
self._verifyCholesky(np.array([[1., -1., 0.], [-1., 1., -1.], [0., -1.,
1.]]))
def testEmpty(self):
self._verifyCholesky(np.empty([0, 2, 2]))
self._verifyCholesky(np.empty([2, 0, 0]))
if __name__ == "__main__":
tf.test.main()
|
chkarypidis/html2PraatMan
|
html2PraatMan.py
|
Python
|
gpl-3.0
| 7,854 | 0.026993 |
# html2PraatMan - Version 1.0 - October 16, 2013
# Batch html-to-ManPages converter for Praat documentation
# Copyright (C) 2013 Charalampos Karypidis
# Email: ch.karypidis@gmail.com
# http://addictiveknowledge.blogspot.com/
##############################
##############################
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
############################################
############################################
from bs4 import BeautifulSoup
import string, os
############################################
def doubleQuotes(s, number=1):
""" ............."""
return "\""*number + s + "\""*number
############################################
def bold(s):
return "##" + s + "#"
############################################
def italics(s):
# listWords = string.split(s)
# if len(listWords) == 1:
# return "%%" + listWords[0] + "%"
# else:
# for x in range(0,len(listWords)):
# listWords[x] = "%%" + listWords[x]
# return string.join(listWords)
return "%%" + s + "%"
############################################
def monospace(s):
return "$$" + s + "$"
############################################
def subscript(s):
return "__" + s + "_"
############################################
def superscript(s):
return "^^" + s + "^"
############################################
def link(s):
target = s['href']
filenameOnly = target.split('.')[0]
extension = target.split('.')[1]
linkText = s.string
audioExtension = ['wav', 'aiff', 'aifc', 'au', 'nist', 'flac', 'mp3']
if extension == "man":
return "@@" + filenameOnly + "|" + linkText + "@"
elif extension == "praat":
if s['alt']:
args = string.split(s['alt'], "|")
for x in range(0,len(args)):
args[x] = doubleQuotes(args[x],2)
argsStr = string.join(args, " ")
return "@@\\SC" + doubleQuotes(target,2) + " " + argsStr + " " + "|" + linkText + "@"
else:
return "@@\\SC" + doubleQuotes(target,2) + " " + "|" + linkText + "@"
elif extension in audioExtension:
return "@@\\FI" + target + " " + "|" + linkText + "@"
############################################
allFiles = []
htmlList = []
for (dirpath, dirnames, filenames) in os.walk(os.getcwd()):
allFiles.extend(filenames)
for x in range(0,len(allFiles)-1):
if allFiles[x].endswith("html",len(allFiles[x])-4):
htmlList.append(allFiles[x])
for inputFilename in htmlList:
input = BeautifulSoup(open(inputFilename))
address = input.address.string
addressCleaned = address.strip()
addressComps = addressCleaned.split('\n')
intro = input.cite.string
if len(addressComps) == 3:
recordTime = addressComps[2]
else:
recordTime = '0'
intro = input.cite.string
#####################################
outputFilename = inputFilename.split('.')[0] + ".man"
output = open(outputFilename,"w")
###########################
output.write("ManPagesTextFile\n")
output.write(doubleQuotes(inputFilename.split('.')[0].capitalize()) + " " + doubleQuotes(addressComps[0]) + " " + addressComps[1] + " " + addressComps[2] + '\n')
for child in input.body:
if child.name is not None:
if child.name == "cite":
listChildren = []
for x in child:
if x.name == "b":
temp = bold(x.string)
listChildren.append(temp)
elif x.name == "a":
temp = link(x)
listChildren.append(temp)
elif x.name == "i":
temp = italics(x.string)
listChildren.append(temp)
elif x.name == "kbd":
temp = monospace(x.string)
listChildren.append(temp)
elif x.name == "sub":
temp = subscript(x.string)
listChildren.append(temp)
elif x.name == "sup":
temp = superscript(x.string)
listChildren.append(temp)
else:
listChildren.append(str(x))
output.write("<intro> " + doubleQuotes(string.join(listChildren, '')) + '\n')
elif child.name == "h1":
listChildren = []
for x in child:
if x.name == "b":
temp = bold(x.string)
listChildren.append(temp)
elif x.name == "a":
temp = link(x)
listChildren.ap
|
pend(temp)
elif x.name == "i":
temp = italics(x.string)
listChildren.append(temp)
elif x.name == "kbd":
temp = monospace(x.string)
listChildren.append(temp)
elif x.name == "sub":
|
temp = subscript(x.string)
listChildren.append(temp)
elif x.name == "sup":
temp = superscript(x.string)
listChildren.append(temp)
else:
listChildren.append(str(x))
output.write("<entry> " + doubleQuotes(string.join(listChildren, '')) + '\n')
elif child.name == "blockquote":
output.write("<definition> " + doubleQuotes(child.string) + '\n')
elif child.name == "p":
listChildren = []
for x in child:
if x.name == "b":
temp = bold(x.string)
listChildren.append(temp)
elif x.name == "a":
temp = link(x)
listChildren.append(temp)
elif x.name == "i":
temp = italics(x.string)
listChildren.append(temp)
elif x.name == "kbd":
temp = monospace(x.string)
listChildren.append(temp)
elif x.name == "sub":
temp = subscript(x.string)
listChildren.append(temp)
elif x.name == "sup":
temp = superscript(x.string)
listChildren.append(temp)
else:
listChildren.append(str(x))
output.write("<normal> " + doubleQuotes(string.join(listChildren, '')) + '\n')
elif child.name == "address":
continue
elif child.name == "ul":
if child.get('class') == ["noBullet"]:
# if child.get('class'):
for item in child.find_all("li"):
listChildren = []
for x in item:
if x.name == "b":
temp = bold(x.string)
listChildren.append(temp)
elif x.name == "a":
temp = link(x)
listChildren.append(temp)
elif x.name == "i":
temp = italics(x.string)
listChildren.append(temp)
elif x.name == "kbd":
temp = monospace(x.string)
listChildren.append(temp)
elif x.name == "sub":
temp = subscript(x.string)
listChildren.append(temp)
elif x.name == "sup":
temp = superscript(x.string)
listChildren.append(temp)
else:
listChildren.append(str(x))
output.write("<list_item> " + "\"" + string.join(listChildren, '') + "\"" + '\n')
else:
for item in child.find_all("li"):
listChildren = []
for x in item:
if x.name == "b":
temp = bold(x.string)
listChildren.append(temp)
elif x.name == "a":
temp = link(x)
listChildren.append(temp)
elif x.name == "i":
temp = italics(x.string)
listChildren.append(temp)
elif x.name == "kbd":
temp = monospace(x.string)
listChildren.append(temp)
elif x.name == "sub":
temp = subscript(x.string)
listChildren.append(temp)
elif x.name == "sup":
temp = superscript(x.string)
listChildren.append(temp)
else:
listChildren.append(str(x))
output.write("<list_item> \"\\bu " + string.join(listChildren, '') + "\"" + '\n')
elif child.name == "code":
width = child.get('width')
height = child.get('height')
output.write("<script> " + width + " " + height + " " + doubleQuotes(child.string) + '\n')
else:
continue
output.close()
|
ooici/coi-services
|
ion/services/dm/test/test_site_data_products.py
|
Python
|
bsd-2-clause
| 20,250 | 0.007407 |
from unittest import skip
from ion.services.dm.test.dm_test_case import DMTestCase
from pyon.public import PRED, OT, RT
from pyon.util.log import log
from ion.services.dm.test.test_dm_end_2_end import DatasetMonitor
from ion.services.dm.utility.granule import RecordDictionaryTool
from nose.plugins.attrib import attr
import numpy as np
import calendar
from datetime import datetime
@attr(group='dm')
class TestSiteDataProducts(DMTestCase):
def create_device_site_deployment(self, dep_name="Deployment", starting=''):
from interface.objects import StreamConfiguration, StreamConfigurationType, InstrumentDevice
from interface.objects import InstrumentModel, PlatformAgent, InstrumentSite, TemporalBounds, Deployment
from interface.objects import RemotePlatformDeploymentContext
stream_conf = StreamConfiguration(stream_name="CTD 1 Parsed Stream", parameter_dictionary_name='ctd_parsed_param_dict', stream_type=StreamConfigurationType.PARSED)
pdict_id = self.dataset_management.read_parameter_dictionary_by_name(name='ctd_parsed_param_dict')
stream_def_id = self.create_stream_definition(name='CTD 1', parameter_dictionary_id=pdict_id)
data_product_id = self.create_data_product(name="DDP_1", stream_def_id=stream_def_id, stream_configuration=stream_conf)
self.activate_data_product(data_product_id)
dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
stream_def = self.resource_registry.find_objects(data_product_id, PRED.hasStreamDefinition)[0][0]
param_dict = self.resource_registry.find_objects(stream_def._id, PRED.hasParameterDictionary)[0][0]
# Add data to the DataProduct
dataset_monitor = DatasetMonitor(dataset_id)
self.addCleanup(dataset_monitor.stop)
rdt = self.ph.get_rdt(stream_def._id)
rdt_ = self.ph.rdt_for_data_product(data_product_id)
self.assertEquals(rdt.fields, rdt_.fields)
rdt['time'] = [0, 1, 2, 3]
rdt['temp'] = [10, 11, 12, 13]
self.ph.publish_rdt_to_data_product(data_product_id, rdt)
self.assertTrue(dataset_monitor.wait())
# Create Device
device = InstrumentDevice(name='Device 1')
device_id = self.instrument_management.create_instrument_device(device)
self.data_acquisition_management.register_instrument(device_id)
self.data_acquisition_management.assign_data_product(device_id, data_product_id)
# Create Model
model = InstrumentModel(name='Model 1')
model_id = self.instrument_management.create_instrument_model(model)
self.instrument_management.assign_instrument_model_to_instrument_device(model_id, device_id)
# Create AgentDefinition
ad = PlatformAgent(stream_configurations=[stream_conf])
ad_id, _ = self.resource_registry.create(ad)
# Create Site
site = InstrumentSite(name='Site 1', stream_configurations=[stream_conf])
site_id, _ = self.resource_registry.create(site)
self.resource_registry.create_association(site_id, PRED.hasModel, model_id)
self.resource_registry.create_association(site_id, PRED.hasAgentDefinition, ad_id)
# TemporalBounds of the Deployment
temp_bounds = TemporalBounds(start_datetime=starting, end_datetime='')
# Create Deployment
deployment = Deployment(name=dep_name, type="RemotePlatform", context=RemotePlatformDeploymentContext(),
constraint_list=[temp_bounds])
deployment_id = self.observatory_management.create_deployment(deployment=deployment, site_id=site_id, device_id=device_id)
return site_id, device_id, dataset_id, deployment_id, param_dict, data_product_id
@attr('PRELOAD')
def test_preload_creation(self):
from interface.objects import DataProductTypeEnum
self.preload_alpha()
# IDs from Preload sheets
deployment_id = "DEP_BTST_1"
site_id = "IS_BTST_SBE37"
device_id = "ID_BTST_SBE37"
#deployment_id = "DEP_BTST_2"
#site_id = "IS_BTST_CTDSIM0"
#device_id = "ID_BTST_CTDSIM0"
deployment_obj = self.container.resource_registry.find_resources_ext(alt_id=deployment_id, alt_id_ns='PRE')[0][0]
site_obj = self.container.resource_registry.find_resources_ext(alt_id=site_id, alt_id_ns='PRE')[0][0]
device_obj = self.container.resource_registry.find_resources_ext(alt_id=device_id, alt_id_ns='PRE')[0][0]
# Check associations
self.assertEquals(self.resource_registry.find_objects(site_obj._id, PRED.hasDevice, id_only=True)[0][0], device_obj._id)
self.assertEquals(self.resource_registry.find_objects(site_obj._id, PRED.hasPrimaryDeployment, id_only=True)[0][0], deployment_obj._id)
self.assertEquals(self.resource_registry.find_objects(site_obj._id, PRED.hasDeployment, id_only=True)[0][0], deployment_obj._id)
self.assertEquals(self.resource_registry.find_objects(device_obj._id, PRED.hasPrimaryDeployment, id_only=True)[0][0], deployment_obj._id)
self.assertEquals(self.resource_registry.find_objects(device_obj._id, PRED.withinDeployment, id_only=True)[0][0], deployment_obj._id)
self.assertEquals(self.resource_registry.find_objects(device_obj._id, PRED.hasDeployment, id_only=True)[0][0], deployment_obj._id)
# stream_name to dataset_id, for lookup later
device_stream_names = {}
device_data_products, _ = self.resource_registry.find_objects(device_obj._id, PRED.hasOutputProduct)
for ddp in device_data_products:
stream_def = self.resource_registry.find_objects(ddp._id, PRED.hasStreamDefinition)[0][0]
dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(ddp._id)
device_stream_names[stream_def.name] = dataset_id
site_data_products, _ = self.resource_registry.find_objects(site_obj._id, PRED.hasOutputProduct)
for sdp in s
|
ite_data_products:
self.assertEquals(sdp.category, DataProductTypeEnum.SITE)
|
self.assertEquals(len(sdp.dataset_windows), 1)
stream_def = self.resource_registry.find_objects(sdp._id, PRED.hasStreamDefinition)[0][0]
assert sdp.dataset_windows[0].dataset_id == device_stream_names.get(stream_def.name)
assert sdp.dataset_windows[0].bounds.start_datetime == deployment_obj.start_datetime
assert sdp.dataset_windows[0].bounds.end_datetime == deployment_obj.end_datetime
self.observatory_management.deactivate_deployment(deployment_id=deployment_obj._id)
deployment_obj = self.resource_registry.read(deployment_obj._id)
for sdp in site_data_products:
self.assertEquals(sdp.category, DataProductTypeEnum.SITE)
self.assertEquals(len(sdp.dataset_windows), 1)
stream_def = self.resource_registry.find_objects(sdp._id, PRED.hasStreamDefinition)[0][0]
assert sdp.dataset_windows[0].dataset_id == device_stream_names.get(stream_def.name)
assert sdp.dataset_windows[0].bounds.start_datetime == deployment_obj.start_datetime
assert sdp.dataset_windows[0].bounds.end_datetime == deployment_obj.end_datetime
@attr('INT')
def test_primary_deployment(self):
# First deployment
starting = str(calendar.timegm(datetime(2014, 1, 1, 0).timetuple()))
site_1_id, device_1_id, dataset_1_id, deployment_1_id, param_dict_a, data_product_1_id = self.create_device_site_deployment(dep_name="Site 1 - Device 1",
starting=starting)
self.assertEquals([], self.resource_registry.find_objects(device_1_id, PRED.hasPrimaryDeployment, id_only=True)[0])
self.assertEquals([], self.resource_registry.find_objects(site_1_id, PRED.hasPrimaryDeployment, id_only=True)[0])
self.assertEquals(
|
jdelacruz26/misccode
|
cad2xls.py
|
Python
|
bsd-3-clause
| 2,955 | 0.004061 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
## @copyright
# Software License Agreement (BSD License)
#
# Copyright (c) 2017, Jorge De La Cruz, Carmen Castano.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
__author__ = 'Jorge De La Cruz, Carmen Castano'
__copyright__ = 'Copyright (c) 2017 Jorge De La Cruz, Carmen Castano'
__license__ = 'BSD'
__maintainer__ = 'Jorge De La Cruz'
__email__ = 'delacruz@igm.rwth-aachen.de'
import sys
## Path to FreeCAD library
# change this by your FreeCAD library path
sys.path.append('/usr/lib/freecad/lib')
import FreeCAD as App
import Import
from datetime import datetime
class
|
GetParameters:
def __init__(self):
self.filePath = '/home/jdelacruz/Downloads/KonzeptB_lang090715.st
|
p'
def loadCAD(self):
print('Starting to load the CAD file, please be patient!...')
Import.open(self.filePath)
self.handler = App.ActiveDocument
self.parts = self.handler.Objects
print('CAD model loaded!')
def writeTxt(self):
f = open('data.txt','a')
print >>f, 'Name \t Label'
self.i = 0
self.size = len(self.parts)
self.names = range(self.size)
self.labels = range(self.size)
for self.part in self.parts:
self.names[self.i] = self.part.Name
self.labels[self.i] = self.part.Label
print >>f, self.part.Name+"\t"+self.part.Label
self.i += 1
f.close()
print('The txt file has been created successfully!')
if __name__ == '__main__':
data = GetParameters()
data.loadCAD()
data.writeTxt()
|
sharoonthomas/nereid-project
|
user.py
|
Python
|
gpl-3.0
| 2,674 | 0 |
# -*- coding: utf-8 -*-
"""
user
Add the employee relation ship to nereid user
:copyright: (c) 2012-2014 by Openlabs Technologies & Consulting (P) Limited
:license: GPLv3, see LICENSE for more details.
"""
from datetime import datetime
from nereid import request, jsonify, login_required, route
from trytond.pool import Pool, PoolMeta
from trytond.model import fields
__all__ = ['NereidUser']
__metaclass__ = PoolMeta
class NereidUser:
"""
Add employee
"""
__name__ = "nereid.user"
#: Allow the nereid user to be connected to an internal employee. This
#: indicates that the
|
user is an employee and not a regular participant
employee = fields.Many2One('company.employee', 'Employee', select=True)
member_of_projects = fields.One2Many(
"project.work.member", "user", "Member of Projects"
)
def serialize(self, purpose=None):
'''
Serialize NereidUser and return a dictonary.
'''
result = super(NereidUs
|
er, self).serialize(purpose)
result['image'] = {
'url': self.get_profile_picture(size=20),
}
result['email'] = self.email
result['employee'] = self.employee and self.employee.id or None
result['permissions'] = [p.value for p in self.permissions]
return result
@classmethod
@route("/me", methods=["GET", "POST"])
@login_required
def profile(cls):
"""
User profile
"""
if request.method == "GET" and request.is_xhr:
user, = cls.browse([request.nereid_user.id])
return jsonify(user.serialize())
return super(NereidUser, cls).profile()
def is_admin_of_project(self, project):
"""
Check if user is admin member of the given project
:param project: Active record of project
"""
if request.nereid_user.has_permissions(['project.admin']):
return True
project = project.project
assert project.type == 'project'
for member in project.members:
if member.user == self and member.role == 'admin':
return True
return False
def hours_reported_today(self):
"""
Returns the number of hours the nereid_user has done on the
current date.
"""
Timesheet = Pool().get('timesheet.line')
if not self.employee:
return 0.00
current_date = datetime.utcnow().date()
lines = Timesheet.search([
('date', '=', current_date),
('employee', '=', self.employee.id),
])
return sum(map(lambda line: line.hours, lines))
|
mkhuthir/catkin_ws
|
src/chessbot/devel/lib/python2.7/dist-packages/nasa_r2_common_msgs/srv/_ResetTableScene.py
|
Python
|
gpl-3.0
| 6,933 | 0.016443 |
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from nasa_r2_common_msgs/ResetTableSceneRequest.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class ResetTableSceneRequest(genpy.Message):
_md5sum = "ba4b0b221fb425ac5eaf73f71ae34971"
_type = "nasa_r2_common_msgs/ResetTableSceneRequest"
_has_header = False #flag to mark the presence of a Header object
_full_text = """bool reset
"""
__slots__ = ['reset']
_slot_types = ['bool']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
reset
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(ResetTableSceneRequest, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.reset is None:
self.reset = False
else:
self.reset = False
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
buff.write(_struct_B.pack(self.reset))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
start = end
end += 1
(self.reset,) = _struct_B.unpack(str[start:end])
self.reset = bool(self.reset)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
buff.write(_struct_B.pack(self.reset))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize_num
|
py(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte
|
array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
start = end
end += 1
(self.reset,) = _struct_B.unpack(str[start:end])
self.reset = bool(self.reset)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_B = struct.Struct("<B")
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from nasa_r2_common_msgs/ResetTableSceneResponse.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class ResetTableSceneResponse(genpy.Message):
_md5sum = "eb13ac1f1354ccecb7941ee8fa2192e8"
_type = "nasa_r2_common_msgs/ResetTableSceneResponse"
_has_header = False #flag to mark the presence of a Header object
_full_text = """bool result
"""
__slots__ = ['result']
_slot_types = ['bool']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
result
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(ResetTableSceneResponse, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.result is None:
self.result = False
else:
self.result = False
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
buff.write(_struct_B.pack(self.result))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
start = end
end += 1
(self.result,) = _struct_B.unpack(str[start:end])
self.result = bool(self.result)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
buff.write(_struct_B.pack(self.result))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
start = end
end += 1
(self.result,) = _struct_B.unpack(str[start:end])
self.result = bool(self.result)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_B = struct.Struct("<B")
class ResetTableScene(object):
_type = 'nasa_r2_common_msgs/ResetTableScene'
_md5sum = 'c95f6c9db0edf7da4840d218c33352c7'
_request_class = ResetTableSceneRequest
_response_class = ResetTableSceneResponse
|
sghai/robottelo
|
tests/foreman/ui/test_discoveredhost.py
|
Python
|
gpl-3.0
| 56,249 | 0.000018 |
# -*- encoding: utf-8 -*-
"""Test class for Foreman Discovery
:Requirement: Discoveredhost
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: UI
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
import subprocess
import time
from fauxfactory import gen_string
from nailgun import entities
from robottelo.decorators import (
run_in_one_thread,
run_only_on,
skip_if_not_set,
stubbed,
tier3,
upgrade,
)
from robottelo.api.utils import configure_provisioning
from robottelo.libvirt_discovery import LibvirtGuest
from robottelo.test import UITestCase
from robottelo.ui.base import UIError
from robottelo.ui.factory import (
edit_param,
make_discoveryrule,
)
from robottelo.ui.locators import common_locators, locators, tab_locators
from robottelo.ui.session import Session
from time import sleep
@run_in_one_thread
class DiscoveryTestCase(UITestCase):
"""Implements Foreman discovery tests in UI."""
def _edit_discovery_fact_column_param(self, session, param_value):
"""
Edit the 'discovery_fact_column' parameter from settings menu.
User can populate a new column on 'Discovered Hosts' page by setting
the value of 'discovery_fact_column'
"""
tab_locator = tab_locators['settings.tab_discovered']
param_name = 'discovery_fact_column'
edit_param(
session=session,
tab_locator=tab_locator,
param_name=param_name,
value_type='input',
param_value=param_value,
)
saved_element = self.settings.get_saved_value(
tab_locator, param_name)
self.assertEqual(param_value, saved_element)
def _ping_host(self, host, timeout=60):
"""Helper to ensure given IP/hostname is reachable after reboot.
:param host: A string. The IP or hostname of host.
:param int timeout: The polling timeout in seconds.
"""
timeup = time.time() + int(timeout)
while True:
command = subprocess.Popen(
'ping -c1 {0}; echo $?'.format(host),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True
)
output = command.communicate()[0]
# Checking the return code of ping is 0
if time.time() > timeup:
return False
if int(output.split()[-1]) == 0:
return True
else:
time.sleep(5)
@classmethod
@skip_if_not_set('vlan_networking')
def setUpClass(cls):
"""Steps to Configure foreman discovery
1. Build PXE default template
2. Create Organization/Location
3. Update Global parameters to set default org and location for
discovered hosts.
4. Enable auto_provision flag to perform discovery via discovery
rules.
"""
super(DiscoveryTestCase, cls).setUpClass()
# Build PXE default template to get default PXE file
entities.ConfigTemplate().build_pxe_default()
# Create Org and location
cls.org = entities.Organization(name=gen_string('alpha')).create()
cls.org_name = cls.org.name
c
|
ls.loc = entities.Location(
name=gen_string('alpha'),
|
organization=[cls.org],
).create()
# Update default org and location params to place discovered host
cls.discovery_loc = entities.Setting().search(
query={'search': 'name="discovery_location"'})[0]
cls.discovery_loc.value = cls.loc.name
cls.discovery_loc.update({'value'})
cls.discovery_org = entities.Setting().search(
query={'search': 'name="discovery_organization"'})[0]
cls.discovery_org.value = cls.org.name
cls.discovery_org.update({'value'})
# Enable flag to auto provision discovered hosts via discovery rules
cls.discovery_auto = entities.Setting().search(
query={'search': 'name="discovery_auto"'})[0]
cls.default_discovery_auto = str(cls.discovery_auto.value)
cls.discovery_auto.value = 'True'
cls.discovery_auto.update({'value'})
cls.config_env = configure_provisioning(org=cls.org, loc=cls.loc)
@classmethod
def tearDownClass(cls):
"""Restore default 'discovery_auto' global setting's value"""
cls.discovery_auto.value = cls.default_discovery_auto
cls.discovery_auto.update({'value'})
super(DiscoveryTestCase, cls).tearDownClass()
@run_only_on('sat')
@tier3
@upgrade
def test_positive_pxe_based_discovery(self):
"""Discover a host via PXE boot by setting "proxy.type=proxy" in
PXE default
:id: 43a8857d-2f08-436e-97fb-ffec6a0c84dd
:Setup: Provisioning should be configured
:Steps: PXE boot a host/VM
:expectedresults: Host should be successfully discovered
:CaseLevel: System
"""
with Session(self) as session:
session.nav.go_to_select_org(self.org_name)
with LibvirtGuest() as pxe_host:
hostname = pxe_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(hostname)
)
self.assertIsNotNone(self.discoveredhosts.search(hostname))
@run_only_on('sat')
@tier3
@upgrade
def test_positive_pxe_less_with_dhcp_unattended(self):
"""Discover a host with dhcp via bootable discovery ISO by setting
"proxy.type=proxy" in PXE default in unattended mode.
:id: fc13167f-6fa0-4fe5-8584-7716292866ce
:Setup: Provisioning should be configured
:Steps: Boot a host/VM using modified discovery ISO.
:expectedresults: Host should be successfully discovered
:CaseLevel: System
"""
with Session(self) as session:
session.nav.go_to_select_org(self.org_name)
with LibvirtGuest(boot_iso=True) as pxe_less_host:
hostname = pxe_less_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(hostname)
)
self.assertIsNotNone(self.discoveredhosts.search(hostname))
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_pxe_less_with_dhcp_semiauto(self):
"""Discover a host with dhcp via bootable discovery ISO in
semi-automated mode.
:id: 05c88618-6f15-4eb8-8501-3505160c5450
:Setup: Provisioning should be configured
:Steps: Boot a host/VM using discovery ISO
:expectedresults: Host should be successfully discovered
:caseautomation: notautomated
:CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_pxe_less_with_dhcp_interactively(self):
"""Discover a host with dhcp via bootable discovery ISO using
interactive TUI mode.
:id: 08780627-9ac1-4837-88eb-df673d974d05
:Setup: Provisioning should be configured
:Steps: Boot a host/VM using discovery ISO
:expectedresults: Host should be successfully discovered
:caseautomation: notautomated
:CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_pxe_less_without_dhcp_interactively(self):
"""Discover a host with single NIC on a network without DHCP and PXE
using ISO image in interactive TUI interface.
:id: 9703eb00-9857-4076-8b83-031a58d7c1cd
:expectedresults: Host should be discovered successfully
:caseautomation: notautomated
:CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_pxe_less_without_dhcp_semiauto(self):
"""Discover a host with single NIC on a network without DHCP and PXE
using ISO image in semi-automated mode.
:id: 8254a85f-21c8-4483-b453-15126762f6e5
:expectedresults: Host should be discovered successfully
:caseautomation: notautomated
|
appcelerator/entourage
|
components/services/appengine/stub/beaker/ext/memcached.py
|
Python
|
apache-2.0
| 4,037 | 0.006688 |
import sys
from beaker.container import NamespaceManager, Container
from beaker.exceptions import InvalidCacheBackendError, MissingCacheParameter
from beaker.synchronization import _threading, Synchronizer
from beaker.util import verify_directory, SyncDict
try:
import cmemcache as memcache
except ImportError:
try:
import memcache
except ImportError:
raise InvalidCacheBackendError("Memcached cache backend requires either the 'memcache' or 'cmemcache' library")
class MemcachedNamespaceManager(NamespaceManager):
clients = SyncDict(_threading.Lock(), {})
def __init__(self, namespace, url, data_dir=None, lock_dir=None, **params):
NamespaceManager.__init__(self, namespace, **params)
if lock_dir is not None:
self.lock_dir = lock_dir
elif data_dir is None:
raise MissingCacheParameter("data_dir or lock_dir is required")
else:
self.lock_dir = data_dir + "/container_mcd_lock"
verify_directory(self.lock_dir)
self.mc = MemcachedNamespaceManager.clients.get(url,
lambda: memcache.Client(url.split(';'), debug=0))
# memcached does its own locking. override our own stuff
def do_acquire_read_lock(self): pass
def do_release_read_lock(self): pass
def do_acquire_write_lock(self, wait = True): return True
def do_release_write_lock(self): pass
# override open/close to do nothing, keep memcache connection open as long
# as possible
def open(self, *args, **params):pass
def close(self, *args, **params):pass
def __getitem__(self, key):
cache_key = key.replace(' ', '\302\267')
keys = [self.namespace + '_' + cache_key, self.namespace + ':keys']
key_dict = self.mc.get_multi(keys)
if cache_key not in key_dict.get(self.namespace+':keys', {}):
raise KeyError(key)
return key_dict[self.namespace + '_' + cache_key]
def __contains__(self, key):
return self.has_key(key)
def has_key(self, key):
key = key.replace(' ', '\302\267')
keys = self.mc.get(self.namespace + ':keys') or {}
return key in keys
def __setitem__(self, key, value):
key = key.replace(' ', '\302\267')
keys = self.mc.get(self.namespace + ':keys')
if keys is None:
keys = {}
keys[key] = True
self.mc.set(self.namespace + ':keys', keys)
self.mc.set(self.namespace + "_" + key, value)
def __delitem__(self, key):
key = key.replace(' ', '\302\267')
keys = self.mc.get(self.namespace + ':keys')
try:
del keys[key]
self.mc.delete(self.namespace + "_" + key)
self.mc.set(self.namespace + ':keys', keys)
except KeyError:
raise
def do_remove(self):
keys = self.mc.get(self.namespace + ':keys')
if keys is not None:
delete_keys = [self.namespace + '
|
_' + x for x in keys]
delete_keys.append(self.namespace + ':keys')
self.mc.delete_multi(delete_keys)
def keys(self):
keys = self.mc.get(self.namespace + ':keys')
if keys is None:
return []
else:
return [x.replace('\302\267', ' ') for
|
x in keys.keys()]
class MemcachedContainer(Container):
def do_init(self, data_dir=None, lock_dir=None, **params):
self.funclock = None
def create_namespace(self, namespace, url, **params):
return MemcachedNamespaceManager(namespace, url, **params)
create_namespace = classmethod(create_namespace)
def lock_createfunc(self, wait = True):
if self.funclock is None:
self.funclock = Synchronizer(identifier =
"memcachedcontainer/funclock/%s" % self.namespacemanager.namespace,
use_files = True, lock_dir = self.namespacemanager.lock_dir)
return self.funclock.acquire_write_lock(wait)
def unlock_createfunc(self):
self.funclock.release_write_lock()
|
Gimpneek/jobseek
|
jobseekr/cv/forms/user.py
|
Python
|
agpl-3.0
| 1,096 | 0 |
""" Forms for use with User objects """
from django import forms
from django.contrib.auth.models import User
class UserForm(forms.ModelForm):
"""
Form for django.contrib.auth.models.User
"""
class Meta:
"""
Meta data for User Form
"""
model = User
fields = ('username', 'email', 'password')
def __init__(self, *args, **kwargs):
super(UserForm, self).__init__(*args, **kw
|
args)
self.fields['username'].required = True
self.fields['email'].required = True
self.fields['password'].required = True
def save(self, commit=True):
"""
Override save so creates a user using create_user method on User model
:param commit: Commit to DB or not
:return: Instance of UserForm
"""
instance = super(UserForm, self).save(commit=False)
User.objects.create_user(
|
username=self.cleaned_data.get('username'),
password=self.cleaned_data.get('password'),
email=self.cleaned_data.get('email')
)
return instance
|
j-windsor/cs3240-f15-team21-v2
|
post/forms.py
|
Python
|
mit
| 640 | 0.015625 |
from django import forms
from django.contrib.auth.models import User
from .models import Message
from tinymce.widgets import TinyMCE
class MessageForm(forms.Form):
recipient = forms.CharField(widget=forms.TextInput(attrs={'class':'form-control'}))
subject = for
|
ms.CharField(widget=forms.TextInput(attrs={'class':'form-control'}))
|
content = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 30}))
encrypted = forms.BooleanField(required=False)
#class Meta:
# model = Message
# fields = ('recipient', 'subject', 'content', 'encrypted',)
class KeyForm(forms.Form):
pem_file = forms.FileField()
|
lokiteitor/ikol
|
test/DBtest.py
|
Python
|
gpl-2.0
| 589 | 0.018676 |
#!/usr/bin/env python2.7
import os
import sys
thi
|
s_dir = os.path.dirname(os.path.abspath(__file__))
trunk_dir = os.path.split(this_dir)[0]
sys.path.insert(0,trunk_dir)
from ikol.dbregister import DataBase
from ikol import var
if os.path.exists(var.DB_PATH):
os.remove(var.DB_PATH)
D
|
B = DataBase(var.DB_PATH)
DB.insertPlaylist("loLWOCl7nlk","test")
DB.insertPlaylist("loLWO357nlk","testb")
DB.insertVideo("KDk2341oEQQ","loLWOCl7nlk","test")
DB.insertVideo("KDktIWeoE23","loLWOCl7nlk","testb")
print DB.getAllVideosByPlaylist("loLWOCl7nlk")
print DB.getVideoById("KDk2341oEQQ")
|
klahnakoski/MySQL-to-S3
|
vendor/pyLibrary/env/pulse.py
|
Python
|
mpl-2.0
| 7,554 | 0.00331 |
# encoding: utf-8
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski (kyle@lahnakoski.com)
#
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
import datetime
from socket import timeout as socket_timeout
from kombu import Connection, Producer, Exchange
from pytz import timezone
from mozillapulse.utils import time_to_string
from mo_logs import constants
from pyLibrary import jsons
from mo_logs.exceptions import Except, suppress_exception
from mo_logs import Log
from mo_dots import wrap, coalesce, Data, set_default
from mo_kwargs import override
from mo_threads import Thread, Lock
from mozillapulse.consumers import GenericConsumer
count_locker=Lock()
count=0
class Consumer(Thread):
@override
def __init__(
|
self,
exchange, # name of the Pulse exchange
topic, # message name pattern to subscribe to ('#' is wildcard)
target=None, # WILL BE CALLED WITH PULSE PAYLOADS AND ack() IF COMPLETE$ED WITHOUT EXCEPTION
target_queue=None, # (aka self.queue) WILL BE FILLED WITH PULSE PAYLOADS
host='pulse.mozilla.org', # url to connect,
por
|
t=5671, # tcp port
user=None,
password=None,
vhost="/",
start=0, # USED AS STARTING POINT FOR ASSIGNING THE _meta.count ATTRIBUTE
ssl=True,
applabel=None,
heartbeat=False, # True to also get the Pulse heartbeat message
durable=False, # True to keep queue after shutdown
serializer='json',
broker_timezone='GMT',
kwargs=None
):
global count
count = coalesce(start, 0)
self.target_queue = target_queue
self.pulse_target = target
if (target_queue == None and target == None) or (target_queue != None and target != None):
Log.error("Expecting a queue (for fast digesters) or a target (for slow digesters)")
Thread.__init__(self, name="Pulse consumer for " + kwargs.exchange, target=self._worker)
self.settings = kwargs
kwargs.callback = self._got_result
kwargs.user = coalesce(kwargs.user, kwargs.username)
kwargs.applabel = coalesce(kwargs.applable, kwargs.queue, kwargs.queue_name)
kwargs.topic = topic
self.pulse = ModifiedGenericConsumer(kwargs, connect=True, **kwargs)
self.start()
def _got_result(self, data, message):
global count
data = wrap(data)
with count_locker:
Log.note("{{count}} from {{exchange}}", count=count, exchange=self.pulse.exchange)
data._meta.count = count
data._meta.exchange = self.pulse.exchange
count += 1
if self.settings.debug:
Log.note("{{data}}", data= data)
if self.target_queue != None:
try:
self.target_queue.add(data)
message.ack()
except Exception as e:
e = Except.wrap(e)
if not self.target_queue.closed: # EXPECTED TO HAPPEN, THIS THREAD MAY HAVE BEEN AWAY FOR A WHILE
raise e
else:
try:
self.pulse_target(data)
message.ack()
except Exception as e:
Log.warning("Problem processing pulse (see `data` in structured log)", data=data, cause=e)
def _worker(self, please_stop):
def disconnect():
with suppress_exception:
self.target_queue.close()
Log.note("stop put into queue")
self.pulse.disconnect()
Log.note("pulse listener was given a disconnect()")
please_stop.on_go(disconnect)
while not please_stop:
try:
self.pulse.listen()
except Exception as e:
if not please_stop:
Log.warning("Pulse had problem (Have you set your Pulse permissions correctly?", e)
Log.note("pulse listener is done")
def __exit__(self, exc_type, exc_val, exc_tb):
Log.note("clean pulse exit")
self.please_stop.go()
with suppress_exception:
self.target_queue.close()
Log.note("stop put into queue")
try:
self.pulse.disconnect()
except Exception as e:
Log.warning("Can not disconnect during pulse exit, ignoring", e)
Thread.__exit__(self, exc_type, exc_val, exc_tb)
class Publisher(object):
"""
Mimic GenericPublisher https://github.com/bhearsum/mozillapulse/blob/master/mozillapulse/publishers.py
"""
@override
def __init__(
self,
exchange, # name of the Pulse exchange
host='pulse.mozilla.org', # url to connect,
port=5671, # tcp port
user=None,
password=None,
vhost="/",
start=0, # USED AS STARTING POINT FOR ASSIGNING THE _meta.count ATTRIBUTE
ssl=True,
applabel=None,
heartbeat=False, # True to also get the Pulse heartbeat message
durable=False, # True to keep queue after shutdown
serializer='json',
broker_timezone='GMT',
kwargs=None
):
self.settings = kwargs
self.connection = None
self.count = 0
def connect(self):
if not self.connection:
self.connection = Connection(
hostname=self.settings.host,
port=self.settings.port,
userid=self.settings.user,
password=self.settings.password,
virtual_host=self.settings.vhost,
ssl=self.settings.ssl
)
def disconnect(self):
if self.connection:
self.connection.release()
self.connection = None
def send(self, topic, message):
"""Publishes a pulse message to the proper exchange."""
if not message:
Log.error("Expecting a message")
message._prepare()
if not self.connection:
self.connect()
producer = Producer(
channel=self.connection,
exchange=Exchange(self.settings.exchange, type='topic'),
routing_key=topic
)
# The message is actually a simple envelope format with a payload and
# some metadata.
final_data = Data(
payload=message.data,
_meta=set_default({
'exchange': self.settings.exchange,
'routing_key': message.routing_key,
'serializer': self.settings.serializer,
'sent': time_to_string(datetime.datetime.now(timezone(self.settings.broker_timezone))),
'count': self.count
}, message.metadata)
)
producer.publish(jsons.scrub(final_data), serializer=self.settings.serializer)
self.count += 1
class ModifiedGenericConsumer(GenericConsumer):
def _drain_events_loop(self):
while True:
try:
self.connection.drain_events(timeout=self.timeout)
except socket_timeout, e:
Log.warning("timeout! Restarting {{name}} pulse consumer.", name=self.exchange, cause=e)
try:
self.disconnect()
except Exception as f:
Log.warning("Problem with disconnect()", cause=f)
break
|
zbyna/plugin.video.sosac.ph
|
default.py
|
Python
|
gpl-2.0
| 2,056 | 0.000486 |
# -*- coding: UTF-8 -*-
# /*
# * Copyright (C) 2013 Libor Zoubek + jondas
# *
# *
# * This Program is free software; you can redistribute it and/or modify
# * it under the terms of the GNU General Public License as published by
# * the Free Software Foundation; either version 2, or (at your option)
# * any later version.
# *
# * This Program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program; see the file COPYING. If not, write to
# * the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
# * http://www.gnu.org/copyleft/gpl.html
# *
# */
import sys
import xbmcaddon
import xbmcutil
import util
from resources.lib.sosac import SosacContentProvider
from resources.lib.sutils import XBMCSosac
|
__scriptid__ = 'plugin.video.sosac.ph'
__scriptname__ = 'sosac.ph'
__addon__ = xbmcaddon.Addon
|
(id=__scriptid__)
__language__ = __addon__.getLocalizedString
__set__ = __addon__.getSetting
settings = {'downloads': __set__('downloads'),
'quality': __set__('quality'),
'subs': __set__('subs') == 'true',
'add_subscribe': __set__('add_subscribe'),
'force-ch': __set__('force-ch') == 'true',
'force-sort': __set__('force-sort')}
reverse_eps = __set__('order-episodes') == '0'
force_english = __set__('force-english') == 'true'
use_memory_cache = __set__('use-memory-cache') == 'true'
util.info("URL: " + sys.argv[2])
params = util.params()
if params == {}:
xbmcutil.init_usage_reporting(__scriptid__)
util.info("Running sosac provider with params: " + str(params))
XBMCSosac(SosacContentProvider(reverse_eps=reverse_eps, force_english=force_english,
use_memory_cache=use_memory_cache), settings,
__addon__).run(params)
|
partp/gtg-services
|
GTG/gtk/backends_dialog/parameters_ui/passwordui.py
|
Python
|
gpl-3.0
| 3,286 | 0 |
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Getting Things GNOME! - a personal organizer for the GNOME desktop
# Copyright (c) 2008-2013 - Lionel Dricot & Bertrand Rousseau
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General
|
Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
from gi.repository import Gtk
from GTG import _
class PasswordUI(Gtk.Box):
'''Widget displaying a gtk.Label and a textbox to input a password'''
|
def __init__(self, req, backend, width):
'''Creates the gtk widgets and loads the current password in the text
field
@param req: a Requester
@param backend: a backend object
@param width: the width of the Gtk.Label object
'''
super(PasswordUI, self).__init__()
self.backend = backend
self.req = req
self._populate_gtk(width)
self._load_password()
self._connect_signals()
def _populate_gtk(self, width):
'''Creates the text box and the related label
@param width: the width of the Gtk.Label object
'''
password_label = Gtk.Label(label=_("Password:"))
password_label.set_alignment(xalign=0, yalign=0.5)
password_label.set_size_request(width=width, height=-1)
self.pack_start(password_label, False, True, 0)
align = Gtk.Alignment.new(0, 0.5, 1, 0)
align.set_padding(0, 0, 10, 0)
self.pack_start(align, True, True, 0)
self.password_textbox = Gtk.Entry()
align.add(self.password_textbox)
def _load_password(self):
'''Loads the password from the backend'''
password = self.backend.get_parameters()['password']
self.password_textbox.set_invisible_char('*')
self.password_textbox.set_visibility(False)
self.password_textbox.set_text(password)
def _connect_signals(self):
'''Connects the gtk signals'''
self.password_textbox.connect('changed', self.on_password_modified)
def commit_changes(self):
'''Saves the changes to the backend parameter ('password')'''
password = self.password_textbox.get_text()
self.backend.set_parameter('password', password)
def on_password_modified(self, sender):
''' Signal callback, executed when the user edits the password.
Disables the backend. The user will re-enable it to confirm the changes
(s)he made.
@param sender: not used, only here for signal compatibility
'''
if self.backend.is_enabled() and not self.backend.is_default():
self.req.set_backend_enabled(self.backend.get_id(), False)
|
rcosnita/fantastico
|
fantastico/samples/simple_component/simple_urls.py
|
Python
|
mit
| 1,965 | 0.006107 |
'''
Copyright 2013 Cosnita Radu Viorel
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
.. codeauthor:: Radu Viorel Cosnita <radu.cosnita@gmail.com>
.. py:module:: f
|
antastico.samples.simple_component.simple_urls
'''
from fantastico.mvc.base_controller import BaseController
f
|
rom fantastico.mvc.controller_decorators import ControllerProvider, Controller
from webob.response import Response
@ControllerProvider()
class SampleUrlsController(BaseController):
'''This class provides some urls with limited functionality in order to enrich the samples from fantastico framework.'''
@Controller(url="/simple-component/foreign-component-reusage")
def refence_external_component(self, request):
'''This method showcase external component reusage with template overriding. Take a look at the template of this
controller.'''
print(request.content_type)
content = self.load_template("/foreign_component_reusage.html")
return Response(content)
|
anhstudios/swganh
|
data/scripts/templates/object/intangible/pet/shared_3po_protocol_droid_silver.py
|
Python
|
mit
| 438 | 0.047945 |
#### NOTICE: THIS FILE IS AUTOGENERATED
###
|
# MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Intangible()
result.template = "object/intangible/pet/shared_3po_protocol_droid_silver.iff"
result.attribute_template_id = -1
result.stfName("","")
#### BEGIN MODIFICATIO
|
NS ####
#### END MODIFICATIONS ####
return result
|
DayGitH/Python-Challenges
|
DailyProgrammer/20120330B.py
|
Python
|
mit
| 3,384 | 0.001773 |
"""
Write a program that will help you play poker by telling you what kind of hand you have.
|
Input:
The first line of input contains the number of test cases (no more than 20). Each test case consists of one line - five
space separated cards. Each card is represented by a two-letter (or digit) word. The first character is
|
the rank
(A,K,Q,J,T,9,8,7,6,5,4,3 or 2), the second character is the suit (S,H,D,C standing for spades, hearts, diamonds and
clubs). The cards can be in any order (but they will not repeat).
Output:
For each test case output one line describing the type of a hand, exactly like in the list above.
"""
rank = ['A', 'K', 'Q', 'J', 'T', '9', '8', '7', '6', '5', '4', '3', '2']
suit = ['S', 'H', 'D', 'C']
def validate(val):
if len(val) != 5:
return False
for v in val:
if v[0] not in rank or v[1] not in suit:
return False
return True
def deck_sort(inp):
d = {'A': 0, 'K': 1, 'Q': 2, 'J': 3, 'T': 4, '9': 5, '8': 6, '7': 7, '6': 8, '5': 9, '4': 10, '3': 11, '2': 12}
return sorted(inp, key=lambda x: d[x[0]])
def same_suit(inp):
for i in inp:
if not i[1] == inp[0][1]:
return False
return True
def same_rank(inp):
for i in inp:
if i[0] != inp[0][0]:
return False
return True
def consecutive(inp):
nxt = ''
for i in inp:
if not nxt:
nxt = rank.index(i[0]) + 1
elif rank[nxt] == i[0]:
nxt = rank.index(i[0]) + 1
else:
return False
return True
def test(inp):
if royal_flush(inp):
print('Royal Flush')
elif straight_flush(inp):
print('Straight Flush')
elif four_of_a_kind(inp):
print('Four of a Kind')
elif full_house(inp):
print('Full House')
elif flush(inp):
print('Flush')
elif straight(inp):
print('Straight')
elif three_of_a_kind(inp):
print('Three of a Kind')
elif two_pair(inp):
print('Two Pair')
elif one_pair(inp):
print('One Pair')
else:
print('"High" Card')
def straight_flush(inp):
return same_suit(inp) and consecutive(inp)
def royal_flush(inp):
return straight_flush(inp) and inp[0][0] == 'A'
def four_of_a_kind(inp):
return (same_rank(inp[:4])) or \
(same_rank(inp[1:]))
def full_house(inp):
return (same_rank(inp[:3]) and same_rank(inp[3:])) or \
(same_rank(inp[:2]) and same_rank(inp[2:]))
def flush(inp):
return same_suit(inp)
def straight(inp):
return consecutive(inp)
def three_of_a_kind(inp):
return (same_rank(inp[0:3])) or \
(same_rank(inp[1:4])) or \
(same_rank(inp[2:5]))
def two_pair(inp):
return (same_rank(inp[0:2]) and same_rank(inp[2:4])) or \
(same_rank(inp[0:2]) and same_rank(inp[3:5])) or \
(same_rank(inp[1:3]) and same_rank(inp[3:5]))
def one_pair(inp):
return (same_rank(inp[0:2])) or \
(same_rank(inp[1:3])) or \
(same_rank(inp[2:4])) or \
(same_rank(inp[3:5]))
if __name__ == '__main__':
number = int(input('Number of inputs: '))
print('Please enter combinations: ')
for i in range(number):
cards = input('> ').upper().split()
if validate(cards):
test(deck_sort(cards))
else:
print('invalid input')
|
joelfiddes/toposubv2
|
topoMAPP/utils/copytree.py
|
Python
|
gpl-3.0
| 724 | 0.002762 |
#!/usr/bin/env python
import os
import shutil
# def main(src, dst, symlinks=False, ignore=None):
# """Main entry point for the script."""
# copytree(src, dst, symlin
|
ks=False, ignore=None)
|
def copytree(src, dst, symlinks=False, ignore=None):
for item in os.listdir(src):
s = os.path.join(src, item)
d = os.path.join(dst, item)
if os.path.isdir(s):
shutil.copytree(s, d, symlinks, ignore)
else:
shutil.copy2(s, d)
# # calling main
# if __name__ == '__main__':
# import sys
# src = sys.argv[1]
# dst = sys.argv[2]
# symlinks = sys.argv[3]
# ignore = sys.argv[4]
# main(src, dst, symlinks, ignore)
|
damngamerz/coala
|
coalib/settings/ConfigurationGathering.py
|
Python
|
agpl-3.0
| 17,309 | 0.000116 |
import os
import re
import sys
import logging
from coalib.collecting.Collectors import (
collect_all_bears_from_sections, filter_section_bears_by_languages)
from coalib.misc import Constants
from coalib.output.ConfWriter import ConfWriter
from coalib.output.printers.LOG_LEVEL import LOG_LEVEL
from coalib.parsing.CliParsing import parse_cli, check_conflicts
from coalib.parsing.ConfParser import ConfParser
from coalib.settings.Section import Section, extract_aspects_from_section
from coalib.settings.SectionFilling import fill_settings
from coalib.settings.Setting import Setting, path
from string import Template
COAFILE_OUTPUT = Template('$type \'$file\' $found!\n'
'Here\'s what you can do:\n'
'* add `--save` to generate a config file with '
'your current options\n'
'* add `-I` to suppress any use of config files\n')
def aspectize_sections(sections):
"""
Search for aspects related setting in a section, initialize it, and then
embed the aspects information as AspectList object into the section itself.
:param sections: List of section that potentially contain aspects setting.
:return: The new sections.
"""
for section_name, section in sections.items():
section.aspects = extract_aspects_from_section(section)
if section.aspects is not None and len(section.get('bears')):
logging.warning("'aspects' and 'bears' configuration is detected "
"in section '{}'. Aspect-based configuration will "
'takes priority and will overwrite any '
'explicitly listed bears'.format(section_name))
return sections
def merge_section_dicts(lower, higher):
"""
Merges the section dictionaries. The values of higher will take
precedence over the ones of lower. Lower will hold the modified dict in
the end.
:param lower: A section.
:param higher: A section which values will take precedence over the ones
from the other.
:return: The merged dict.
"""
for name in higher:
if name in lower:
lower[name].update(higher[name], ignore_defaults=True)
else:
# no deep copy needed
lower[name] = higher[name]
return lower
def load_config_file(filename, log_printer, silent=False):
"""
Loads sections from a config file. Prints an appropriate warning if
it doesn't exist and returns a section dict containing an empty
default section in that case.
It assumes that the cli_sections are available.
:param filename: The file to load settings from.
:param log_printer: The log printer to log the warning/error to (in case).
:param silent: Whether or not to warn the user/exit if the file
doesn't exist.
:raises SystemExit: Exits when the given filename is invalid and is not the
default coafile. Only raised when ``silent`` is
``False``.
"""
filename = os.path.abspath(filename)
try:
return ConfParser().parse(filename)
except FileNotFoundError:
if not silent:
if os.path.basename(filename) == Constants.default_coafile:
log_printer.warn(COAFILE_OUTPUT
.substitute(type='Default coafile',
file=Constants.default_coafile,
found='not found'))
else:
log_printer.err(COAFILE_OUTPUT
.substitute(type='Requested coafile',
file=filename,
found='does not exist'))
sys.exit(2)
return {'default': Section('default')}
def save_sections(sections):
"""
Saves the given sections if they are to be saved.
:param sections: A section dict.
"""
default_section = sections['cli']
try:
if bool(default_section.get('save', 'false')):
conf_writer = ConfWriter(
str(default_section.get('config', Constants.default_coafile)))
else:
return
except ValueError:
conf_writer = ConfWriter(str(default_section.get('save', '.coafile')))
conf_writer.write_sections(sections)
conf_writer.close()
def warn_nonexistent_targets(targets, sections, log_printer):
"""
Prints out a warning on the given log printer for all targets that are
not existent within the given sections.
:param targets: The targets to check.
:param sections: The sections to search. (Dict.)
:param log_printer: The log printer to warn to.
"""
for target in targets:
if target not in sections:
log_printer.warn(
"The requested section '{section}' is not existent. "
'Thus it cannot be executed.'.format(section=target))
# Can't be summarized as python will evaluate conditions lazily, those
# functions have intended side effects though.
files_config_absent = warn_config_absent(sections, 'files', log_printer)
bears_config_absent = warn_config_absent(sections, 'bears', log_printer)
if files_config_absent or bears_config_absent:
raise SystemExit(2) # Invalid CLI options provided
def warn_config_absent(sections, argument, log_printer):
"""
Checks if the given argument is present somewhere in the sections and emits
a warning that code analysis can not be run without it.
:param sections: A dictionary of sections.
:param argument: The argument to check for, e.g. "files".
|
:param log_printer: A log printer to emit the warning to.
:return: Returns a boolean True if the given argument
is present in the sections, else returns False.
"""
if all(argument not in section for section in sections.values()):
log_printer.warn('coala will not run
|
any analysis. Did you forget '
'to give the `--{}` argument?'.format(argument))
return True
return False
def load_configuration(arg_list, log_printer, arg_parser=None, args=None):
"""
Parses the CLI args and loads the config file accordingly, taking
default_coafile and the users .coarc into account.
:param arg_list: The list of CLI arguments.
:param log_printer: The LogPrinter object for logging.
:param arg_parser: An ``argparse.ArgumentParser`` instance used for
parsing the CLI arguments.
:param args: Alternative pre-parsed CLI arguments.
:return: A tuple holding (log_printer: LogPrinter, sections:
dict(str, Section), targets: list(str)). (Types
indicated after colon.)
"""
cli_sections = parse_cli(arg_list=arg_list, arg_parser=arg_parser,
args=args)
check_conflicts(cli_sections)
if (
bool(cli_sections['cli'].get('find_config', 'False')) and
str(cli_sections['cli'].get('config')) == ''):
cli_sections['cli'].add_or_create_setting(
Setting('config', re.escape(find_user_config(os.getcwd()))))
targets = []
# We don't want to store targets argument back to file, thus remove it
for item in list(cli_sections['cli'].contents.pop('targets', '')):
targets.append(item.lower())
if bool(cli_sections['cli'].get('no_config', 'False')):
sections = cli_sections
else:
base_sections = load_config_file(Constants.system_coafile, log_printer)
user_sections = load_config_file(
Constants.user_coafile,
log_printer,
silent=True)
default_config = str(base_sections['default'].get('config', '.coafile'))
user_config = str(user_sections['default'].get(
'config', default_config))
config = os.path.abspath(
str(cli_sections['cli'].get('config', user_config
|
pineapplemachine/migrates
|
test/test_reindex.py
|
Python
|
gpl-3.0
| 1,758 | 0.006826 |
import elasticsearch
import migrates
from .test_utils import callmigrates, iterate_test_data, remove_test_data
document_count = 1000
def insert_test_data(connection):
with migrates.Batch(connection, migrates.Logger()) as batch:
for i in range(0, document_count):
batch.add({
'_op_type': 'index',
'_index': 'migrates_test_reindex',
|
'_type': 'test_' + str(i % 3),
'_id': str(i),
'_source': {'x': i}
})
def validate_test_data(connection, index):
docs = set()
for document in
|
iterate_test_data(connection, index=index):
docs.add(document['_source']['x'])
assert len(docs) == document_count
def __main__():
logger = migrates.Logger()
connection = elasticsearch.Elasticsearch()
logger.log('Removing old test data.')
remove_test_data(connection)
try:
logger.log('Inserting new test data.')
insert_test_data(connection)
logger.log('Reindexing data back into the same index.')
callmigrates('reindex migrates_test_reindex -y')
logger.log('Validating resulting data.')
validate_test_data(connection, index='migrates_test_reindex')
logger.log('Reindexing data into a different index.')
callmigrates('reindex "migrates_test_reindex=>migrates_test_reindex_2" -y')
logger.log('Validating resulting data.')
assert not connection.indices.exists('migrates_test_reindex')
validate_test_data(connection, index='migrates_test_reindex_2')
finally:
logger.log('Cleaing up test data.')
remove_test_data(connection)
if __name__ == '__main__':
__main__()
|
anhstudios/swganh
|
data/scripts/templates/object/tangible/ship/crafted/droid_interface/shared_base_droid_interface_subcomponent_mk3.py
|
Python
|
mit
| 523 | 0.042065 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTAT
|
ION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/ship/crafted/droid_interface/shared_base_droid_interface_subcomponent_mk3.iff"
result.attribute_template_id = 8
result.stfName("space_crafting_n","base_droid_interface_subcomponent_mk3")
####
|
BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
miketung168/survey-dashboard
|
example_config.py
|
Python
|
mit
| 130 | 0 |
DB_CONFIG =
|
"postgresql://user:passwor
|
d@localhost/db"
ROOT_DIR = "/path/to/project"
ADMIN_USER = "username"
ADMIN_PW = "password"
|
lgarren/spack
|
var/spack/repos/builtin/packages/lua-luafilesystem/package.py
|
Python
|
lgpl-2.1
| 2,512 | 0.000398 |
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class LuaLuafilesystem(Package):
"""LuaFileSystem is a Lua library developed to complement the set of
functions related to file systems offered by the standard Lua distribution.
LuaFileSystem offers a portable way to access the underlying directory
structure and file attributes.
LuaFileSystem is free software and uses the same license as Lua 5.1
"""
homepage = 'http://keplerproject.github.io/luafilesystem'
url = 'https://github.com/keplerproject/luafilesystem/archi
|
ve/v1_6_3.tar.gz'
version('1_6_3', 'bed11874cfded8b4beed7dd054127b24')
# The version constraint here comes from this post:
#
# https://www.perforce.com/blog/git-beyond-basics-using-shallow-clones
#
# where it is claimed that full shallow clone support was added @1.9
depends_on
|
('git@1.9.0:', type='build')
extends('lua')
def install(self, spec, prefix):
rockspec_fmt = join_path(self.stage.path,
'luafilesystem-{version.underscored}',
'rockspecs',
'luafilesystem-{version.dotted}-1.rockspec')
luarocks('--tree=' + prefix, 'install',
rockspec_fmt.format(version=self.spec.version))
|
sniemi/SamPy
|
sandbox/src1/TCSE3-3rd-examples/src/py/regex/fdmgrid.py
|
Python
|
bsd-2-clause
| 2,941 | 0.00136 |
#!/usr/bin/env python
"""interpret a comapct grid specification using regex"""
import re
# use a compact regular expression with nested OR expressions,
# and hence many groups, but name the outer (main) groups:
real_short1 = \
r'\s*(?P<lower>-?(\d+(\.\d*)?|\d*\.\d+)([eE][+\-]?\d+)?)\s*'
real_short2 = \
r'\s*(?P<upper>-?(\d+(\.\d*)?|\d*\.\d+)([eE][+\-]?\d+)?)\s*'
# regex for real interval [a,b] :
domain = r'\[' + real_short1 + ',' + real_short2 + r'\]'
# regex for integer interval [a:b] :
indices = r'\[\s*(-?\d+)\s*:\s*(-?\d+)\s*\]'
# test:
examples = ('domain=[0,10] indices=[0:11]',
'domain=[0.1,1.1]x[0,2E+00] indices=[1:21]x[1:101]',
'[0,1]x[0,2]x[-1,1.5] [1:21]x[1:11]x[-10:15]')
for ex in examples:
print re.findall(indice
|
s, ex)
# a nested list is returned; requires nested group counting
print re.findall(domain, ex)
print
# work with compiled expressio
|
ns and the groupindex dictionary to
# extract the named groups easily from the nested list that is
# returned from re.findall:
print 'work with groupindex:'
for ex in examples:
print re.findall(indices, ex)
c = re.compile(domain)
groups = c.findall(ex)
intervals = []
for i in range(len(groups)):
intervals.append(
(groups[i][c.groupindex['lower']-1],
groups[i][c.groupindex['upper']-1]))
print intervals
print
# work with non-capturing parenthesis of the form (?:pattern)
real_short1 = \
r'\s*(?P<lower>-?(?:\d+(?:\.\d*)?|\d*\.\d+)(?:[eE][+\-]?\d+)?)\s*'
real_short2 = \
r'\s*(?P<upper>-?(?:\d+(?:\.\d*)?|\d*\.\d+)(?:[eE][+\-]?\d+)?)\s*'
# regex for real interval [a,b] :
domain = r'\[' + real_short1 + ',' + real_short2 + r'\]'
print 'non-capturing groups:'
for ex in examples:
print re.findall(domain, ex)
print
# avoid parenthesis, i.e., nested OR expressions:
real_sn = r'-?\d\.?\d*[Ee][+\-][0-9]+'
real_dn = r'-?\d*\.\d*'
real_in = r'-?\d+'
real1 = \
r'\s*(?P<lower>' + real_sn + '|' + real_dn + '|' + real_in + ')\s*'
real2 = \
r'\s*(?P<upper>' + real_sn + '|' + real_dn + '|' + real_in + ')\s*'
# regex for real interval [a,b] :
domain = r'\[' + real1 + ',' + real2 + r'\]'
# regex for integer interval [a:b] :
indices = r'\[\s*(-?\d+)\s*:\s*(-?\d+)\s*\]'
print '\navoid so many parenthesis (just two groups now for each interval):'
for ex in examples:
print re.findall(indices, ex)
print re.findall(domain, ex)
print
# much simpler _working_ versions:
domain = r'\[([^,]*),([^\]]*)\]'
indices = r'\[([^:,]*):([^\]]*)\]'
print '\nsimpler regular expressions:\n', domain, indices
for ex in examples:
print re.findall(indices, ex)
print re.findall(domain, ex)
print
# these give wrong results
domain = r'\[(.*?),(.*?)\]'
indices = r'\[(.*?):(.*?)\]'
print '\nalternative; simpler regular expressions:\n', domain, indices
for ex in examples:
print re.findall(indices, ex)
print re.findall(domain, ex)
print
|
rrmelcer/swissatest-analysis
|
gui/qt4/credits.py
|
Python
|
apache-2.0
| 19,884 | 0.004024 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# -----------------------------------------------
# ----> Computer Aided Optical Analysis <----
# -----------------------------------------------
# (c) 2015 by Swissatest Testmaterialien AG
# http://www.swissatest.ch
# -----------------------------------------------
# Developeded 2015 by
__author__ = 'Raoul René Melcer'
# raoul.rene.melcer@webservices-consulting.ch
# http://webservices-consulting.ch
# -----------------------------------------------
# License: Apache 2
# http://www.apache.org/licenses/LICENSE-2.0
# -----------------------------------------------
# File: credits.py
__date__ = '$03.09.2015 11:30:42$'
# https://github.com/rrmelcer/swissatest-analysis
# -----------------------------------------------
# Descripion:
# Show the application credits in GUI
# -----------------------------------------------
import logging
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Credits(QtGui.QWidget):
def __init__(self):
QtGui.QWidget.__init__(self)
self.setupUi(self)
logging.debug('{0} module init'.format(self.__class__))
def __del__(self):
logging.debug('{0} module del'.format(self.__class__))
def setupUi(self, dia_credits):
dia_credits.setObjectName(_fromUtf8("dia_credits"))
dia_credits.resize(420, 800)
dia_credits.setMinimumSize(QtCore.QSize(420, 700))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/doc/img/icons/icon.ico")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
dia_credits.setWindowIcon(icon)
dia_credits.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.gridLayout = QtGui.QGridLayout(dia_credits)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.label_4 = QtGui.QLabel(dia_credits)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_4.setFont(font)
self.label_4.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.label_4.setOpenExternalLinks(
|
True)
self.label_4.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByKeyboard|QtCore.Qt.LinksAccessibleByMouse|QtCore.Qt.TextBrowserInteraction|Qt
|
Core.Qt.TextSelectableByKeyboard|QtCore.Qt.TextSelectableByMouse)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.gridLayout.addWidget(self.label_4, 12, 1, 1, 1)
self.line_4 = QtGui.QFrame(dia_credits)
self.line_4.setFrameShape(QtGui.QFrame.HLine)
self.line_4.setFrameShadow(QtGui.QFrame.Sunken)
self.line_4.setObjectName(_fromUtf8("line_4"))
self.gridLayout.addWidget(self.line_4, 5, 0, 1, 2)
self.label_3 = QtGui.QLabel(dia_credits)
self.label_3.setOpenExternalLinks(True)
self.label_3.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByKeyboard|QtCore.Qt.LinksAccessibleByMouse|QtCore.Qt.TextBrowserInteraction|QtCore.Qt.TextSelectableByKeyboard|QtCore.Qt.TextSelectableByMouse)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.gridLayout.addWidget(self.label_3, 3, 1, 1, 1)
self.label_2 = QtGui.QLabel(dia_credits)
self.label_2.setMaximumSize(QtCore.QSize(50, 50))
self.label_2.setText(_fromUtf8(""))
self.label_2.setPixmap(QtGui.QPixmap(_fromUtf8(":/logos/doc/img/logos/users.png")))
self.label_2.setScaledContents(True)
self.label_2.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.gridLayout.addWidget(self.label_2, 3, 0, 1, 1)
self.line = QtGui.QFrame(dia_credits)
self.line.setFrameShape(QtGui.QFrame.HLine)
self.line.setFrameShadow(QtGui.QFrame.Sunken)
self.line.setObjectName(_fromUtf8("line"))
self.gridLayout.addWidget(self.line, 2, 0, 1, 2)
self.gridLayout_2 = QtGui.QGridLayout()
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
spacerItem = QtGui.QSpacerItem(458, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem, 0, 0, 1, 1)
self.btn_close_credits = QtGui.QPushButton(dia_credits)
icon = QtGui.QIcon.fromTheme(_fromUtf8("window-close"))
self.btn_close_credits.setIcon(icon)
self.btn_close_credits.setFlat(False)
self.btn_close_credits.setObjectName(_fromUtf8("btn_close_credits"))
self.gridLayout_2.addWidget(self.btn_close_credits, 0, 1, 1, 1)
self.gridLayout.addLayout(self.gridLayout_2, 15, 0, 1, 2)
self.line_2 = QtGui.QFrame(dia_credits)
self.line_2.setFrameShape(QtGui.QFrame.HLine)
self.line_2.setFrameShadow(QtGui.QFrame.Sunken)
self.line_2.setObjectName(_fromUtf8("line_2"))
self.gridLayout.addWidget(self.line_2, 8, 0, 1, 2)
self.label_8 = QtGui.QLabel(dia_credits)
self.label_8.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.label_8.setOpenExternalLinks(True)
self.label_8.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByKeyboard|QtCore.Qt.LinksAccessibleByMouse|QtCore.Qt.TextBrowserInteraction|QtCore.Qt.TextSelectableByKeyboard|QtCore.Qt.TextSelectableByMouse)
self.label_8.setObjectName(_fromUtf8("label_8"))
self.gridLayout.addWidget(self.label_8, 0, 1, 1, 1)
self.label_7 = QtGui.QLabel(dia_credits)
self.label_7.setMaximumSize(QtCore.QSize(50, 50))
self.label_7.setText(_fromUtf8(""))
self.label_7.setPixmap(QtGui.QPixmap(_fromUtf8(":/logos/doc/img/logos/licence.png")))
self.label_7.setScaledContents(True)
self.label_7.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.label_7.setObjectName(_fromUtf8("label_7"))
self.gridLayout.addWidget(self.label_7, 9, 0, 1, 1)
self.label = QtGui.QLabel(dia_credits)
self.label.setFrameShadow(QtGui.QFrame.Plain)
self.label.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.label.setOpenExternalLinks(True)
self.label.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByKeyboard|QtCore.Qt.LinksAccessibleByMouse|QtCore.Qt.TextBrowserInteraction|QtCore.Qt.TextSelectableByKeyboard|QtCore.Qt.TextSelectableByMouse)
self.label.setObjectName(_fromUtf8("label"))
self.gridLayout.addWidget(self.label, 6, 1, 1, 1)
self.label_9 = QtGui.QLabel(dia_credits)
self.label_9.setEnabled(True)
self.label_9.setMaximumSize(QtCore.QSize(50, 50))
self.label_9.setText(_fromUtf8(""))
self.label_9.setPixmap(QtGui.QPixmap(_fromUtf8(":/logos/doc/img/logos/applications.png")))
self.label_9.setScaledContents(True)
self.label_9.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.label_9.setObjectName(_fromUtf8("label_9"))
self.gridLayout.addWidget(self.label_9, 0, 0, 1, 1)
self.line_3 = QtGui.QFrame(dia_credits)
self.line_3.setFrameShape(QtGui.QFrame.HLine)
self.line_3.setFrameShadow(QtGui.QFrame.Sunken)
self.line_3.setObjectName(_fromUtf8("line_3"))
self.gridLayout.addWidget(self.line_3, 11, 0, 1, 2)
self.label_5 = QtGui.QLabel(dia_credits)
self.label_5.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.gridLayout.addWidget(self.label_5, 9, 1, 1, 1)
self.label_10 = QtGui.QLabel(dia_credits)
self.label_10.setMaximumSize(QtCore.QSize(50, 50))
self.label_10.setText(_fromUtf8(""))
self.label_10
|
beppec56/core
|
uitest/writer_tests/tdf92611.py
|
Python
|
gpl-3.0
| 647 | 0.003091 |
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
from uitest.framework import UITestCase
import time
class tdf92611(UITestCase):
def test_launch_and_close_bibliography(s
|
elf):
self.ui_test.create_doc_in_start_center("writer")
self.xUITest.executeCommand(".uno:BibliographyComponent")
time.sleep(2)
self.xUITest.executeCommand(".uno:CloseWin")
|
time.sleep(2)
self.ui_test.close_doc()
# vim: set shiftwidth=4 softtabstop=4 expandtab:
|
piyush82/icclab-rcb-web
|
virtualenv/lib/python2.7/site-packages/pip/vendor/__init__.py
|
Python
|
apache-2.0
| 264 | 0 |
"""
pip.vendor is for vendoring
|
dependencies of pip to prevent n
|
eeding pip to
depend on something external.
Files inside of pip.vendor should be considered immutable and should only be
updated to versions from upstream.
"""
from __future__ import absolute_import
|
mjsir911/pymessage
|
client.py
|
Python
|
bsd-3-clause
| 1,124 | 0.010676 |
#!/usr/bin/e
|
nv python3
# -*- coding: utf-8 -*-
import uuid
import socket
import time
__appname__ = "pymessage"
__author__ = "Marco Sirabella, Owen Davies"
__copyright__ = ""
__credits__ = "Marco Sirabella, Owen Davies"
__license__ = "new BSD 3-Clause"
__version__ = "0.0.3"
__maintainers__ = "Marco Sirabella, Owen Davies"
__email__ = "msirabel@gmail.com, dabmancer@dread.life"
__status__
|
= "Prototype"
__module__ = ""
address = ('localhost', 5350)
lguid = '0'
def connect():
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(address)
sock.send((hex(uuid.getnode()) + '\n').encode() + bytes(False)) # ik this is such BAD CODE
print("sent")
sock.send(lguid.encode())
print('sent latest guid: {}'.format(lguid))
# contents = "latest guid +5: {}".format(lguid + '5')
msg = True
fullmsg = ''
while msg:
msg = sock.recv(16).decode() # low byte count for whatever reason
#print('mes rec: {}'.format(msg))
fullmsg += msg
print('received message: {}'.format(fullmsg))
sock.close()
connect()
|
HubSpot/tcollector
|
collectors/0/netstat.py
|
Python
|
gpl-3.0
| 17,157 | 0.001224 |
#!/usr/bin/python
# This file is part of tcollector.
# Copyright (C) 2011 The tcollector Authors.
#
# This pr
|
ogram is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or (at your
# option) any later version. This program is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY; wit
|
hout even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser
# General Public License for more details. You should have received a copy
# of the GNU Lesser General Public License along with this program. If not,
# see <http://www.gnu.org/licenses/>.
# Note: I spent many hours reading the Linux kernel's source code to infer the
# exact meaning of some of the obscure but useful metrics it exposes. The
# description of the metrics are correct to the best of my knowledge, but it's
# not always to make sense of the Linux kernel's code. Please report any
# inaccuracy you find. -- tsuna.
"""Socket allocation and network statistics for TSDB.
Metrics from /proc/net/sockstat:
- net.sockstat.num_sockets: Number of sockets allocated (only TCP).
- net.sockstat.num_timewait: Number of TCP sockets currently in
TIME_WAIT state.
- net.sockstat.sockets_inuse: Number of sockets in use (TCP/UDP/raw).
- net.sockstat.num_orphans: Number of orphan TCP sockets (not attached
to any file descriptor).
- net.sockstat.memory: Memory allocated for this socket type (in bytes).
- net.sockstat.ipfragqueues: Number of IP flows for which there are
currently fragments queued for reassembly.
Metrics from /proc/net/netstat (`netstat -s' command):
- net.stat.tcp.abort: Number of connections that the kernel had to abort.
type=memory is especially bad, the kernel had to drop a connection due to
having too many orphaned sockets. Other types are normal (e.g. timeout).
- net.stat.tcp.abort.failed: Number of times the kernel failed to abort a
connection because it didn't even have enough memory to reset it (bad).
- net.stat.tcp.congestion.recovery: Number of times the kernel detected
spurious retransmits and was able to recover part or all of the CWND.
- net.stat.tcp.delayedack: Number of delayed ACKs sent of different types.
- net.stat.tcp.failed_accept: Number of times a connection had to be dropped
after the 3WHS. reason=full_acceptq indicates that the application isn't
accepting connections fast enough. You should see SYN cookies too.
- net.stat.tcp.invalid_sack: Number of invalid SACKs we saw of diff types.
(requires Linux v2.6.24-rc1 or newer)
- net.stat.tcp.memory.pressure: Number of times a socket entered the
"memory pressure" mode (not great).
- net.stat.tcp.memory.prune: Number of times a socket had to discard
received data due to low memory conditions (bad).
- net.stat.tcp.packetloss.recovery: Number of times we recovered from packet
loss by type of recovery (e.g. fast retransmit vs SACK).
- net.stat.tcp.receive.queue.full: Number of times a received packet had to
be dropped because the socket's receive queue was full.
(requires Linux v2.6.34-rc2 or newer)
- net.stat.tcp.reording: Number of times we detected re-ordering and how.
- net.stat.tcp.syncookies: SYN cookies (both sent & received).
"""
import re
import resource
import sys
import time
from collectors.lib import utils
def main():
"""Main loop"""
sys.stdin.close()
interval = 15
page_size = resource.getpagesize()
try:
sockstat = open("/proc/net/sockstat")
netstat = open("/proc/net/netstat")
snmp = open("/proc/net/snmp")
except IOError, e:
print >>sys.stderr, "open failed: %s" % e
return 13 # Ask tcollector to not re-start us.
utils.drop_privileges()
# Note: up until v2.6.37-rc2 most of the values were 32 bits.
# The first value is pretty useless since it accounts for some
# socket types but not others. So we don't report it because it's
# more confusing than anything else and it's not well documented
# what type of sockets are or aren't included in this count.
regexp = re.compile("sockets: used \d+\n"
"TCP: inuse (?P<tcp_inuse>\d+) orphan (?P<orphans>\d+)"
" tw (?P<tw_count>\d+) alloc (?P<tcp_sockets>\d+)"
" mem (?P<tcp_pages>\d+)\n"
"UDP: inuse (?P<udp_inuse>\d+)"
# UDP memory accounting was added in v2.6.25-rc1
"(?: mem (?P<udp_pages>\d+))?\n"
# UDP-Lite (RFC 3828) was added in v2.6.20-rc2
"(?:UDPLITE: inuse (?P<udplite_inuse>\d+)\n)?"
"RAW: inuse (?P<raw_inuse>\d+)\n"
"FRAG: inuse (?P<ip_frag_nqueues>\d+)"
" memory (?P<ip_frag_mem>\d+)\n")
def print_sockstat(metric, value, tags=""): # Note: tags must start with ' '
if value is not None:
print "net.sockstat.%s %d %s%s" % (metric, ts, value, tags)
# If a line in /proc/net/{netstat,snmp} doesn't start with a word in that
# dict, we'll ignore it. We use the value to build the metric name.
known_statstypes = {
"TcpExt:": "tcp",
"IpExt:": "ip", # We don't collect anything from here for now.
"Ip:": "ip", # We don't collect anything from here for now.
"Icmp:": "icmp", # We don't collect anything from here for now.
"IcmpMsg:": "icmpmsg", # We don't collect anything from here for now.
"Tcp:": "tcp", # We don't collect anything from here for now.
"Udp:": "udp",
"UdpLite:": "udplite", # We don't collect anything from here for now.
}
# Any stat in /proc/net/{netstat,snmp} that doesn't appear in this dict will
# be ignored. If we find a match, we'll use the (metricname, tags).
tcp_stats = {
# An application wasn't able to accept a connection fast enough, so
# the kernel couldn't store an entry in the queue for this connection.
# Instead of dropping it, it sent a cookie to the client.
"SyncookiesSent": ("syncookies", "type=sent"),
# After sending a cookie, it came back to us and passed the check.
"SyncookiesRecv": ("syncookies", "type=received"),
# After sending a cookie, it came back to us but looked invalid.
"SyncookiesFailed": ("syncookies", "type=failed"),
# When a socket is using too much memory (rmem), the kernel will first
# discard any out-of-order packet that has been queued (with SACK).
"OfoPruned": ("memory.prune", "type=drop_ofo_queue"),
# If the kernel is really really desperate and cannot give more memory
# to this socket even after dropping the ofo queue, it will simply
# discard the packet it received. This is Really Bad.
"RcvPruned": ("memory.prune", "type=drop_received"),
# We waited for another packet to send an ACK, but didn't see any, so
# a timer ended up sending a delayed ACK.
"DelayedACKs": ("delayedack", "type=sent"),
# We wanted to send a delayed ACK but failed because the socket was
# locked. So the timer was reset.
"DelayedACKLocked": ("delayedack", "type=locked"),
# We sent a delayed and duplicated ACK because the remote peer
# retransmitted a packet, thinking that it didn't get to us.
"DelayedACKLost": ("delayedack", "type=lost"),
# We completed a 3WHS but couldn't put the socket on the accept queue,
# so we had to discard the connection.
"ListenOverflows": ("failed_accept", "reason=full_acceptq"),
# We couldn't accept a connection because one of: we had no route to
# the destination, we failed to allocate a socket, we failed to
# allocate a new local port bind bucket. Note: this counter
# also include all the increments made to ListenOverflows...
"ListenDrops": ("failed_accept", "reason=other"),
|
getnamo/UnrealEnginePython
|
Content/Scripts/upycmd.py
|
Python
|
mit
| 2,925 | 0.037949 |
#ue.exec('pip2.py')
import subprocess
import sys
import os
import unreal_engine as ue
import _thread as thread
#ue.log(sys.path)
_problemPaths = ['']
def NormalizePaths():
problemPaths = _problemPaths
#replace '/' to '\\'
for i in range(len(sys.path)):
currentPath = sys.path[i]
sys.path[i] = currentPath.replace('\\','/')
#find additional problem paths such as engine bin
currentPath = sys.path[i]
if('Engine' in currentPath and 'Epic Games' in currentPath):
_problemPaths.append(currentPath)
#cleanup
for path in problemPaths:
if path in sys.path:
sys.path.remove(path)
#define some convenience paths
def PythonHomePath():
for path in sys.path:
normalizedPath = AsAbsPath(path)
if ('UnrealEnginePython' in normalizedPath and
normalizedPath.endswith('Binaries/Win64')):
return path
#return sys.path[1]
return "not found"
def PythonHomeScriptsPath():
return AsAbsPath(PythonHomePath() + "/Scripts")
def PythonPluginScriptPath():
for path in sys.path:
normalizedPath = AsAbsPath(path)
if ('UnrealEnginePython' in normalizedPath and
normalizedPath.endswith('Content/Scripts')):
return path
return "not found"
def PythonProjectScriptPath():
relativePath = PythonPluginScriptPath() + "/../../../../Content/Scripts";
return AsAbsPa
|
th(relativePath);
def AsAbsPath(path):
return os.path.abspath(path).replace('\\','/')
_PythonHomePath = PythonHomePath()
def FolderCommand(folder):
#replace backslashes
folder = folder.replace('/','\\')
changefolder = "cd /d \
|
"" + folder + "\" & "
return changefolder
#main public function
def run(process, path=_PythonHomePath, verbose=True):
#todo: change folder
fullcommand = FolderCommand(path) + process
if verbose:
ue.log("Started cmd <" + fullcommand + ">")
stdoutdata = subprocess.getstatusoutput(fullcommand)
if verbose:
ue.log("cmd Result: ")
ue.log(stdoutdata[1])
return stdoutdata[1] #return the data for dependent functions
def runStreaming(process, callback=None, path=_PythonHomePath, verbose=True):
#todo: change folder
fullcommand = FolderCommand(path) + process
if verbose:
print("Started cmd <" + fullcommand + ">")
#streaming version
popenobj = subprocess.Popen(fullcommand, stdout=subprocess.PIPE)
output = ''
for line in iter(process.stdout.readline, ''):
#sys.stdout.write(line)
print(line)
output += line
if verbose:
print("cmd Result: ")
print(output)
return output #return the data for dependent functions
#convenience override
def runLogOutput(process, path=_PythonHomePath):
fullcommand = FolderCommand(path) + process
stdoutdata = subprocess.getstatusoutput(fullcommand)
ue.log(stdoutdata[1])
return stdoutdata[1]
#convenience wrappers
def dir(path=_PythonHomePath):
run('dir', path)
def ls(path=_PythonHomePath):
dir(path)
def md(folder, path=_PythonHomePath):
run('md ' + folder, path)
def mkdir(folder, path=_PythonHomePath):
md(folder, path)
|
frasertweedale/drill
|
py/oldest_unique.py
|
Python
|
mit
| 1,106 | 0 |
class Node:
def __init__(self, val
|
ue):
self.value = value
self.next = None
self.prev = None
class OldestUnique:
def
|
__init__(self):
self.uniq = {}
self.seen = set()
self.head = None
self.tail = None
def feed(self, value):
if value in self.uniq:
# unlink from list but leave in uniq dict
node = self.uniq[value]
if node.prev is not None:
node.prev.next = node.next
else:
self.head = node.next
if node.next is not None:
node.next.prev = node.prev
else:
self.tail = node.prev
elif value not in self.seen:
node = Node(value)
if self.head is None:
self.tail = node
else:
node.next = self.head
self.head.prev = node
self.head = node
self.uniq[value] = node
self.seen.add(value)
def query(self):
if self.tail is not None:
return self.tail.value
|
sdrogers/ms2ldaviz
|
ms2ldaviz/basicviz/migrations/0074_auto_20180831_1206.py
|
Python
|
mit
| 670 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2018-08-31 12:06
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
|
dependencies = [
('basicviz', '0073_auto_20180831_1203'),
]
operations = [
migrations.AddField(
model_name='experiment',
name='csv_id_column',
field=models.CharField(blank=True, max_length=128, null=True),
|
),
migrations.AddField(
model_name='experiment',
name='ms2_id_field',
field=models.CharField(blank=True, max_length=128, null=True),
),
]
|
indera/barebones-flask-app
|
tests/base_test.py
|
Python
|
bsd-3-clause
| 1,403 | 0.000713 |
"""
Goal: set the environment for tests
Docs:
https://pythonhosted.org/Flask-SQLAlchemy/quickstart.html
The only things you need to know compared to plain SQLAlchemy are:
SQLAlchemy gives you access to the following things:
- all the functions and classes from sqlalchemy and sqlalchemy.orm
- a preconfigured scoped session called session
- the metadata
- the engine
- a SQLAlchemy.create_all() and SQLAlchemy.drop_al
|
l() methods to create and
drop tables according to the models
- a Model baseclass that is a configured declarative base
- The Model declarative base class behaves like a regular Python class but has
a query attribute attached that can be used to
|
query the model
- You have to commit the session, but you don't have to remove it at the end
of the request, Flask-SQLAlchemy does that for you.
"""
from flask_testing import TestCase
from app.main import app, db, mail
from app import initializer
from config import MODE_TEST
class BaseTestCase(TestCase):
""" Base class for all tests"""
def create_app(self):
""" override the default config with the test config """
initializer.do_init(app, MODE_TEST)
mail.init_app(app)
return app
def setUp(self):
""" create all tables """
db.create_all()
def tearDown(self):
""" remove all tables """
db.session.remove()
db.drop_all()
|
m5w/matxin-lineariser
|
matxin_lineariser/utlgrammars/lrule.py
|
Python
|
gpl-3.0
| 2,392 | 0.000836 |
from .lconfiguration import LocalConfiguration
from .printing import Printing
from .word import Word
from xml.etree import ElementTree
class LinearisationRule:
@classmethod
def deserialise(cls, grammars, def_rule_etree):
probability = float(def_rule_etree.get('p'))
head_node_etree = def_rule_e
|
tree.find('NODE')
linearisation_rule = {}
for node_etree in head_node_etree.findall('NODE'):
linearisation_rule[int(node_etree.get('ord'))] = (
node_etree.get('si'), Word.deserialise(node_etree))
head_node = (head_node_etr
|
ee.get('si'),
Word.deserialise(head_node_etree))
dependents = list(linearisation_rule.values())
dependents.sort()
local_configuration = LocalConfiguration(head_node[0], head_node[1],
tuple(dependents))
head_node_ord = int(head_node_etree.get('ord'))
linearisation_rule[head_node_ord] = head_node
linearisation_rule = list(linearisation_rule.items())
linearisation_rule.sort()
head_node_index = linearisation_rule.index((head_node_ord, head_node))
linearisation_rule = LinearisationRule(
[value for key, value in linearisation_rule[:head_node_index]],
[value for key, value in linearisation_rule[head_node_index + 1:]])
try:
grammars.get_grammars()[local_configuration][
probability] = linearisation_rule
except (KeyError):
grammars.get_grammars()[local_configuration] = {}
grammars.get_grammars()[local_configuration][
probability] = linearisation_rule
def __init__(self, insert, append):
self.insert = insert
self.append = append
def get_insert(self):
return self.insert
def get_append(self):
return self.append
def __str__(self):
return Printing.get_module_qualname(self) + ' = {\n' + \
' insert = ' + Printing.shift_str(Printing.print_list(self.get_insert(), print_item=self.print_edge)) + '\n' + \
' append = ' + Printing.shift_str(Printing.print_list(self.get_append(), print_item=self.print_edge)) + '\n' + \
'}'
@classmethod
def print_edge(cls, edge):
return Printing.print_tuple(edge, print_item=[repr, str])
|
RedhawkSDR/integration-gnuhawk
|
qa/gnuradio/gr/top_block.py
|
Python
|
gpl-3.0
| 4,399 | 0.006365 |
#
# This file is protected by Copyright. Please refer to the COPYRIGHT file
# distributed with this source distribution.
#
# This file is part of GNUHAWK.
#
# GNUHAWK is free software: you can redistribute it and/or modify is under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# GNUHAWK is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without ev
|
en the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along with
# this program. If not, see http:/
|
/www.gnu.org/licenses/.
#
from ossie.utils import sb
from ossie.utils.sandbox import local
import gnuradioStubs
import sources
import commands
import time
def _uuidgen():
return commands.getoutput('uuidgen')
def _isStubClass(comp):
return (isinstance(comp, gnuradioStubs.stream_to_vector) or
isinstance(comp, gnuradioStubs.vector_to_stream) or
str(comp.__class__).find("stream_to_streams") >= 0)
class top_block(object):
def __init__(self, fanOut=True):
self.sources = []
self.fanOut = fanOut
def __del__(self):
sb.domainless._cleanUpLaunchedComponents()
# clear out prop_helpers _enums dictionary between test runs
sb.domainless.prop_helpers._enums = {}
def stop(self):
# TODO: consider removing this
sb.stop()
def connect(self, src, dest, *next ):
# If the source is not a real object, skip this connection
if _isStubClass(src):
if len(next) > 0:
self.connect(dest, *next)
return
# If the destination is not a real object, skip both possible connections involving it
if _isStubClass(dest):
if len(next) > 0:
self.connect(src, *next)
return
src_port_name = None
if type(src) == tuple:
src, index = src
else:
# Default to the first port
index = 0
if isinstance(src, local.LocalComponent):
# Get just the uses ports from the source
uses_ports = filter(lambda x: x._direction == 'Uses', src._ports)
if len(uses_ports) > index:
src_port_name = uses_ports[index]._name
if isinstance(src, gnuradioStubs.head):
src = src.comp
if type(dest) == tuple:
dest, index = dest
else:
# Default to the first port
index = 0
if isinstance(dest, local.LocalComponent):
# Get just the provides ports from the destination
provides_ports = filter(lambda x: x._direction == 'Provides', dest._ports)
# -1 inputs; connect everybody to the same input port until existing components are
# modified to have 1 port per allowed input
if len(provides_ports) == 1:
index = 0
dest_port_name = provides_ports[index]._name
else:
dest_port_name = None
# Connect directly to the destination Component object for the stub head class.
if isinstance(dest, gnuradioStubs.head):
dest_obj = dest.comp
else:
dest_obj = dest
if isinstance(src, sources.sbSource):
# Generate a unique ID to disambiguate multiple streams into the same component.
stream_id = _uuidgen()
src.connect((dest_obj,stream_id))
if self.fanOut == True:
if src not in self.sources:
self.sources.append(src)
else:
self.sources.append(src)
else:
src.connect(dest_obj, usesPortName=src_port_name, providesPortName=dest_port_name)
# Make next pair of connections
if len(next) > 0:
self.connect(dest, *next)
def run(self):
sb.start()
for source in self.sources:
# TODO: only do this if this is an sbSource
# try statement is a little sloppy
try:
source.push()
except AttributeError:
pass
# Give blocks time to get setup
time.sleep(.1)
|
CZ-NIC/knot
|
tests-extra/tests/dnssec/single_type_signing/test.py
|
Python
|
gpl-3.0
| 1,348 | 0.003709 |
#!/usr/bin/env python3
"""
DNSSEC Single-Type Signing Scheme, RFC 6781
"""
from dnstest.utils import *
from dnstest.test import Test
t = Test()
knot = t.server("knot")
zones = t.zone_rnd(5, dnssec=False, records=10)
t.link(zones, knot)
t.start()
# one KSK
knot.gen_key(zones[0], ksk=True, zsk=True, alg="ECDSAP256SHA256", key_len="256")
# multiple KSKs
knot.gen_key(zones[1], ksk=True, zsk=True, alg="ECDSAP384SHA384", key_len="384")
knot.gen_key(zones[1], ksk=True, zsk=True, alg="ECDSAP256SHA256", key_len="256")
# different algorithms: KSK+ZSK pair, one KSK
knot.gen_key(zones[2], ksk=True, alg="ECDSAP256SHA256", key_len="256")
knot.gen_key(zones[2], ksk=False, alg="ECDSAP256SHA256", key_len="256")
knot.gen_key(zones[2], ksk=True, zsk=True, alg="ECDSAP384SHA384", key_len="384")
# one ZSK
knot.gen_key(zones[3], ksk=False, alg="ECDSAP256SHA256", key_len="256").change_role(ksk=True, zsk=True)
for z
|
one in zones[:-1]:
knot.dnssec(zone).enable = True
knot.dnssec(zone).single_type_signing = True
# enable automatic Single-Type signing scheme with NSEC3 on the last zone
knot.dnssec(zones[-1]).enable = True
knot.dnssec(zones[-1]).nsec3 = True
knot.dnssec(zones[-1]).single_type_signing =
|
True
knot.gen_confile()
knot.reload()
t.sleep(7)
knot.flush(wait=True)
knot.stop()
for zone in zones:
knot.zone_verify(zone)
t.end()
|
team-xue/xue
|
xue/tutor/studentviews.py
|
Python
|
bsd-3-clause
| 2,757 | 0 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, division
from django.shortcuts import redirect, get_object_or_404
from django.db import transaction
from xue.common.decorators import quickview, limit_role
from xue.tutor.forms import StudentApplicationForm, ProjectSelectionForm
from xue.tutor.models import StudentProject, StudentApplication, TutorProject
# expiration...
PRELIMINARY_EXPIRED, SECONDARY_EXPIRED = False, False
@limit_role([0])
@quickview('tutor/stud_apply_expired.html')
def apply_expired_view(request):
return {}
@limit_role([0])
@quickview('tutor/stud_apply.html')
def apply_view(request):
is_repeat = False
try:
StudentApplication.objects.get(student=request.user)
is_repeat = True
except StudentApplication.DoesNotExist:
pass
if request.method == 'POST':
# form data
frm = StudentApplicationForm(request.POST)
if frm.is_valid():
# valid data, store it if no previous application exists
if not is_repeat:
with transaction.commit_on_success():
entry = frm.save(commit=False)
entry.student = request.user
entry.save()
return redirect('xue.tutor.views.mainpage_view')
else:
frm = StudentApplicationForm()
return {
'form': frm,
'is_repeat': is_repeat,
}
@limit_role([0])
@quickview('tutor/stud_selectproj.html')
def selectproj_view(request):
# protect against rejected applicants and other random people
dummy = get_object_or_404(
StudentApplication,
student=request.user,
status=1,
)
# verify max count
projs = list(StudentProject.objects.filter(student=request.user))
if len(projs) >= 2:
return {
'is_exceeded': True,
'projects': projs,
}
year = request.user.central_info.klass.date.year
if request.method == 'POST':
# form data
frm = ProjectSelectionForm(year, request.POST)
if frm.is_valid():
# valid data, store it
with transaction.commit_on_success():
entry = frm.save(commit=False)
entry.student = request.user
entry.save()
return redirect('xue.tutor.views.mainpage_view')
else:
frm
|
= ProjectSelectionForm(year)
return {
'is_exceeded': False,
'projects': projs,
'form': frm,
}
# expiration
if PRELIMINARY_EXPIRED:
apply_view = apply_expi
|
red_view
if SECONDARY_EXPIRED:
selectproj_view = apply_expired_view
# vim:set ai et ts=4 sw=4 sts=4 fenc=utf-8:
|
kgblll/libresoft-gymkhana
|
commons/utils.py
|
Python
|
gpl-2.0
| 3,773 | 0.015119 |
# -*- coding: utf-8 -*-
# MORFEO Project
# http://morfeo-project.org
#
# Component: EzForge
#
# (C) Copyright 2004 Telefónica Investigación y Desarrollo
# S.A.Unipersonal (Telefónica I+D)
#
# Info about members and contributors of the MORFEO project
# is available at:
#
# http://morfeo-project.org/
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# If you want to use this software an plan to distribute a
# proprietary application in any way, and you are not licensing and
# distributing your source code under GPL, you probably need to
# purchase a commercial license of the product. More info about
# licensing options is available at:
#
# http://morfeo-project.org/
#
import types
from decimal import *
from django.db import models
from django.utils import simplejson
from django.core.serializers.json import DateTimeAwareJSONEncoder
from xml.dom.minidom import getDOMImplementation
def json_encode(data, ensure_ascii=False):
"""
The main issues with django's default json serializer is that properties that
had been added to a object dynamically are being ignored (and it also has
problems with some models).
"""
def _any(data):
ret = None
if type(data) is types.ListType:
ret = _list(data)
elif type(data) is types.DictType:
ret = _dict(data)
elif isinstance(data, Decimal):
# json.dumps() cant handle Decimal
ret = str(data)
elif isinstance(data, models.query.QuerySet):
# Actually its the same as a list ...
ret = _list(data)
elif isinstance(data, models.Model):
ret = _model(data)
else:
ret = data
return ret
def _model(data):
ret = {}
# If we only have a model, we only want to encode the fields.
for f in data._meta.fields:
ret[f.attname] = _any(getattr(data, f.attname))
# And additionally encode arbitrary properties that had been added.
fields = dir(data.__class__) + ret.keys()
add_ons = [k for k in dir(data) if k not in fields]
for k in add_ons:
ret[k] = _any(getattr(data, k))
return ret
def _list(data):
ret = []
for v in data:
ret.append(_any(v))
return ret
def _dict(data):
ret = {}
for k,v in data.items():
ret[k] = _any(v)
return ret
ret = _any(data)
return simplejson.dumps(ret, cls=DateTimeAwareJSONEncoder,
|
ensure_ascii=ensure_ascii)
def get_xml_error(value):
dom = getDOMImplementation()
doc = dom.createDocument(None, "e
|
rror", None)
rootelement = doc.documentElement
text = doc.createTextNode(value)
rootelement.appendChild(text)
errormsg = doc.toxml()
doc.unlink()
return errormsg
def getInnerText (domNode, tag=None):
try:
if tag ==None:
return domNode.childNodes[0].nodeValue
else:
return domNode.getElementsByTagName(tag)[0].childNodes[0].nodeValue
except:
return None
|
horazont/aioxmpp
|
tests/__init__.py
|
Python
|
lgpl-3.0
| 1,184 | 0 |
########################################################################
# File name: __init__.py
# This file is part of: aioxmpp
#
# LICENSE
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see
# <http://www.gnu.org/license
|
s/>.
#
########################################################################
from aioxmpp.e2etest import ( # NOQA
setup_pac
|
kage as e2etest_setup_package,
teardown_package,
)
import warnings
def setup_package():
e2etest_setup_package()
warnings.filterwarnings(
"error",
message=".+(Stream)?ErrorCondition",
category=DeprecationWarning,
)
|
DEV3L/python-files-by-date
|
files_by_date/service/files_service.py
|
Python
|
mit
| 4,825 | 0.003731 |
import datetime
import os
import shutil
import time
from files_by_date.utils.logging_wrapper import get_logger, log_message
from files_by_date.validators.argument_validator import ArgumentValidator
logger = get_logger(name='files_service')
class FilesService:
def __init__(self):
raise NotImplementedError
@classmethod
def gather_files(cls, parent_directory, files):
for dir_name, subdir_list, file_list in os.walk(parent_directory):
if file_list:
files.extend(
['{dir_name}{os_sep}{file_name}'.format(dir_name=dir_name, os_sep=os.sep, file_name=file) for file
in file_list])
# [f'{dir_name}{os.sep}{file}' for file in file_list] # 3.6
for subdir in subdir_list:
files = cls.gather_files(subdir, files)
return files
@classmethod
def group_files_by_modified_date(cls, files):
grouped_files = {}
for file in files:
directory_tag = cls._get_directory_tag_for_file(file)
file_group = grouped_files.get(directory_tag, list())
file_group.append(file)
grouped_files[directory_tag] = file_group
return grouped_files
@classmethod
def copy_files(cls, file_groups, target_dir, force_overwrite):
if not os.path.exists(target_dir):
os.makedirs(target_dir) # TODO: not covered
total_count = Count()
for group in file_groups:
group_count = Count()
# group_dir = f'{target_dir}{os.sep}{group}' # 3.6
group_dir = '{target_dir}{os_sep}{group}'.format(target_dir=target_dir, os_sep=os.sep, group=group)
ArgumentValidator.validate_target_dir(group_dir)
if not os.path.exists(group_dir):
os.makedirs(group_dir)
# log_message(f'Created directory: {group_dir}') # 3.6
log_message('Created directory: {group_dir}'.format(group_dir=group_dir))
# log_message(f'Copying {len(file_groups[group])} files to {group_dir}') # 3.6
log_message('Moving {group_size} files to {group_dir}'.format(group_size=len(file_groups[group]),
group_dir=group_dir))
for file in file_groups[group]:
# file_path = f'{group_dir}{os.sep}{os.path.basename(file)}' # 3.6
file_path = '{group_dir}{os_sep}{file_name}'.format(group_dir=group_dir, os_sep=os.sep,
file_name=os.path.basename(file))
if force_overwrite and os.path.exists(file_path):
os.remove(file_path)
if not os.path.exists(file_path):
shutil.copy2(file, group_dir)
group_count.add_copied(count=1)
else:
group_count.add_skipped(count=1) # TODO: not covered
total_count.add_files(count=len(file_groups[group]))
total_count.add_copied(count=group_count.copied)
total_count.add_skipped(count=group_count.skipped)
# log_message(f'Copied {group_
|
count.copied}, skipped {group_count.skipped}') # 3.6
log_message('Copied {local_copied_count}, skipped {l
|
ocal_skipped_count}'.format(
local_copied_count=group_count.copied, local_skipped_count=group_count.skipped))
log_message(
# f'Total files count {total_count.files}, total copied {total_count.copied}, total skipped {total_count.skipped}') # 3.6
'Total files count {total_files_count}, total copied {total_copied_count}, total skipped {total_skipped_count}'.format(
total_files_count=total_count.files,
total_copied_count=total_count.copied,
total_skipped_count=total_count.skipped))
return total_count
@staticmethod
def _get_directory_tag_for_file(file):
return datetime.datetime.strptime(time.ctime(os.path.getmtime(file)), "%a %b %d %H:%M:%S %Y").strftime('%Y%m')
class Count:
def __init__(self, *, files=0, copied=0, skipped=0):
self.files = files
self.copied = copied
self.skipped = skipped
def __str__(self):
# return f'files={self.files}, copied={self.copied}, skipped={self.skipped}' # 3.6
return 'files={files}, copied={copied}, skipped={skipped}'.format(files=self.files, copied=self.copied,
skipped=self.skipped)
def add_files(self, *, count=1):
self.files += count
def add_copied(self, *, count=0):
self.copied += count
def add_skipped(self, *, count=0):
self.skipped += count
|
BigelowLab/genologics
|
genologics/descriptors.py
|
Python
|
mit
| 18,060 | 0.000388 |
"""Python interface to GenoLogics LIMS via its REST API.
Entities and their descriptors for the LIMS interface.
Per Kraulis, Science for Life Laboratory, Stockholm, Sweden.
Copyright (C) 2012 Per Kraulis
"""
from genologics.constants import nsmap
try:
from urllib.parse import urlsplit, urlparse, parse_qs, urlunparse
except ImportError:
from urlparse import urlsplit, urlparse, parse_qs, urlunparse
import datetime
import time
from xml.etree import ElementTree
import logging
logger = logging.getLogger(__name__)
class BaseDescriptor(object):
"Abstract base descriptor for an instance attribute."
def __get__(self, instance, cls):
raise NotImplementedError
class TagDescriptor(BaseDescriptor):
"""Abstract base descriptor for an instance attribute
represented by an XML element.
"""
def __init__(self, tag):
self.tag = tag
def get_node(self, instance):
if self.tag:
return instance.root.find(self.tag)
else:
|
return instance.root
class StringDescriptor(TagDescriptor):
"""An instance attribute containing a string value
represented by an XML element.
"""
def __get__(self, instance, cl
|
s):
instance.get()
node = self.get_node(instance)
if node is None:
return None
else:
return node.text
def __set__(self, instance, value):
instance.get()
node = self.get_node(instance)
if node is None:
# create the new tag
node = ElementTree.Element(self.tag)
instance.root.append(node)
node.text = str(value)
class StringAttributeDescriptor(TagDescriptor):
"""An instance attribute containing a string value
represented by an XML attribute.
"""
def __get__(self, instance, cls):
instance.get()
return instance.root.attrib[self.tag]
def __set__(self, instance, value):
instance.get()
instance.root.attrib[self.tag] = value
class StringListDescriptor(TagDescriptor):
"""An instance attribute containing a list of strings
represented by multiple XML elements.
"""
def __get__(self, instance, cls):
instance.get()
result = []
for node in instance.root.findall(self.tag):
result.append(node.text)
return result
class StringDictionaryDescriptor(TagDescriptor):
"""An instance attribute containing a dictionary of string key/values
represented by a hierarchical XML element.
"""
def __get__(self, instance, cls):
instance.get()
result = dict()
node = instance.root.find(self.tag)
if node is not None:
for node2 in node.getchildren():
result[node2.tag] = node2.text
return result
class IntegerDescriptor(StringDescriptor):
"""An instance attribute containing an integer value
represented by an XMl element.
"""
def __get__(self, instance, cls):
text = super(IntegerDescriptor, self).__get__(instance, cls)
if text is not None:
return int(text)
class IntegerAttributeDescriptor(TagDescriptor):
"""An instance attribute containing a integer value
represented by an XML attribute.
"""
def __get__(self, instance, cls):
instance.get()
return int(instance.root.attrib[self.tag])
class BooleanDescriptor(StringDescriptor):
"""An instance attribute containing a boolean value
represented by an XMl element.
"""
def __get__(self, instance, cls):
text = super(BooleanDescriptor, self).__get__(instance, cls)
if text is not None:
return text.lower() == 'true'
def __set__(self, instance, value):
super(BooleanDescriptor, self).__set__(instance, str(value).lower())
class UdfDictionary(object):
"Dictionary-like container of UDFs, optionally within a UDT."
def _is_string(self, value):
try:
return isinstance(value, basestring)
except:
return isinstance(value, str)
def __init__(self, instance, *args, **kwargs):
self.instance = instance
self._udt = kwargs.pop('udt', False)
self.rootkeys = args
self._rootnode = None
self._update_elems()
self._prepare_lookup()
self.location = 0
@property
def rootnode(self):
if not self._rootnode:
self._rootnode = self.instance.root
for rootkey in self.rootkeys:
self._rootnode = self._rootnode.find(rootkey)
return self._rootnode
def get_udt(self):
if self._udt == True:
return None
else:
return self._udt
def set_udt(self, name):
assert isinstance(name, str)
if not self._udt:
raise AttributeError('cannot set name for a UDF dictionary')
self._udt = name
elem = self.rootnode.find(nsmap('udf:type'))
assert elem is not None
elem.set('name', name)
udt = property(get_udt, set_udt)
def _update_elems(self):
self._elems = []
if self._udt:
elem = self.rootnode.find(nsmap('udf:type'))
if elem is not None:
self._udt = elem.attrib['name']
self._elems = elem.findall(nsmap('udf:field'))
else:
tag = nsmap('udf:field')
for elem in self.rootnode.getchildren():
if elem.tag == tag:
self._elems.append(elem)
def _prepare_lookup(self):
self._lookup = dict()
for elem in self._elems:
type = elem.attrib['type'].lower()
value = elem.text
if not value:
value = None
elif type == 'numeric':
try:
value = int(value)
except ValueError:
value = float(value)
elif type == 'boolean':
value = value == 'true'
elif type == 'date':
value = datetime.date(*time.strptime(value, "%Y-%m-%d")[:3])
self._lookup[elem.attrib['name']] = value
def __contains__(self, key):
try:
self._lookup[key]
except KeyError:
return False
return True
def __getitem__(self, key):
return self._lookup[key]
def __setitem__(self, key, value):
self._lookup[key] = value
for node in self._elems:
if node.attrib['name'] != key: continue
vtype = node.attrib['type'].lower()
if value is None:
pass
elif vtype == 'string':
if not self._is_string(value):
raise TypeError('String UDF requires str or unicode value')
elif vtype == 'str':
if not self._is_string(value):
raise TypeError('String UDF requires str or unicode value')
elif vtype == 'text':
if not self._is_string(value):
raise TypeError('Text UDF requires str or unicode value')
elif vtype == 'numeric':
if not isinstance(value, (int, float)):
raise TypeError('Numeric UDF requires int or float value')
value = str(value)
elif vtype == 'boolean':
if not isinstance(value, bool):
raise TypeError('Boolean UDF requires bool value')
value = value and 'true' or 'false'
elif vtype == 'date':
if not isinstance(value, datetime.date): # Too restrictive?
raise TypeError('Date UDF requires datetime.date value')
value = str(value)
elif vtype == 'uri':
if not self._is_string(value):
raise TypeError('URI UDF requires str or punycode (unicode) value')
value = str(value)
else:
raise NotImplemented("UDF type '%s'" % vtype)
if not isinstance(value, str):
if not self._is_string(value):
value = str(val
|
scotartt/commentarius
|
decommentariis/decommentariis/signals.py
|
Python
|
gpl-2.0
| 1,210 | 0.015702 |
from allauth.account.signals import email_confirmed, email_changed, email_added, email_removed, user_signed_up, user_logged_in
from django.contrib.auth.models import User, Group, Permission
from django.db.models import Q
from django.dispatch import receiver
"""intercept signals from allauth"""
@receiver(email_confirmed)
def email_confirmed_(sender, email_address, **kwargs):
"""user has confirmed the email manual
|
ly"""
# print(email_address.email + " confirmed email.")
query = {'email': email_address.email}
if email_address.primary:
user = User.objects.get(**query)
# print(str(user) + " confirmed primary email.")
group = Group.objects.get(name='AllowedCommentary')
user.groups.add(group)
@receiver(user_signed_up)
def user_signed_up_(sender, request, user, **kwargs):
"""when a user signs up"""
# print("SIGN UP " + str(user) + " signed up and kwargs=" + str(kwargs))
social_login = kwargs.get('socia
|
llogin', None)
if social_login:
social_account = social_login.account
if social_account:
if 'verified_email' in social_account.extra_data:
if social_account.extra_data['verified_email']:
group = Group.objects.get(name='AllowedCommentary')
user.groups.add(group)
|
Pylvax/django
|
project/starter_app/admin.py
|
Python
|
mit
| 92 | 0.01087 |
from django.contri
|
b import admin
from .models import Message
admin.sit
|
e.register(Message)
|
fabiencro/knmt
|
tests/suite1/training_management_test.py
|
Python
|
gpl-3.0
| 3,928 | 0.001527 |
#!/usr/bin/env python
"""macro_tests.py: Some macro tests"""
from __future__ import absolute_import, division, print_function, unicode_literals
__author__ = "Fabien Cromieres"
__license__ = "undecided"
__version__ = "1.0"
__email__ = "fabien.cromieres@gmail.com"
__status__ = "Development"
# import nmt_chainer.make_data as make_data
# import nmt_chainer.training_module.train as train
# import nmt_chainer.eval as eval
from nmt_chainer.__main__ import main
import os.path
import pytest
class TestTrainingManagement:
def test_checkpoint_saving(self, tmpdir, gpu):
"""
Test no error happens during checkpoint saving.
"""
test_data_dir = os.path.join(
os.path.dirname(
os.path.abspath(__file__)),
"../tests_data")
train_dir = tmpdir.mkdir("train")
data_prefix = str(train_dir.join("test1.data"))
train_prefix
|
= str(train_dir.join("test1.train"))
data_src_file = os.path.join(test_data_dir, "src2.txt")
data_tgt_file = os.path.join(test_data_dir, "tgt2.txt")
args = 'make_data {0} {1} {2} --dev_src {0} --dev_tgt {1}'.format(
data_src_file, data_tgt_file, data_prefix).split(' ')
main(arguments=args)
args_train = ["train", data_prefix, train_prefix] + "--max_nb
|
_iters 10 --mb_size 2 --Ei 10 --Eo 12 --Hi 30 --Ha 70 --Ho 15 --Hl 23 --save_ckpt_every 5".split(" ")
if gpu is not None:
args_train += ['--gpu', gpu]
main(arguments=args_train)
def test_config_saving(self, tmpdir, gpu):
"""
Test no error happens during checkpoint saving.
"""
test_data_dir = os.path.join(
os.path.dirname(
os.path.abspath(__file__)),
"../tests_data")
train_dir = tmpdir.mkdir("train")
data_prefix = str(train_dir.join("test1.data"))
train_prefix = str(train_dir.join("test1.train"))
data_src_file = os.path.join(test_data_dir, "src2.txt")
data_tgt_file = os.path.join(test_data_dir, "tgt2.txt")
args = 'make_data {0} {1} {2} --dev_src {0} --dev_tgt {1}'.format(
data_src_file, data_tgt_file, data_prefix).split(' ')
main(arguments=args)
args_train = ["train", data_prefix, train_prefix] + "--max_nb_iters 5 --mb_size 2 --Ei 10 --Eo 12 --Hi 30 --Ha 70 --Ho 15 --Hl 23".split(" ")
if gpu is not None:
args_train += ['--gpu', gpu]
main(arguments=args_train)
config_filename = train_prefix + ".train.config"
train_prefix_2 = train_prefix + ".2"
args_train = ["train", "--config", config_filename, "--save_prefix", train_prefix_2]
if gpu is not None:
args_train += ['--gpu', gpu]
main(arguments=args_train)
config_filename2 = train_prefix_2 + ".train.config"
import json
config1 = json.load(open(config_filename))
config2 = json.load(open(config_filename2))
def compare_dict_except(d1, d2, except_fields=None):
k_list_1 = set(d1.keys())
k_list_2 = set(d2.keys())
k_xor = (k_list_1 - k_list_2) | (k_list_2 - k_list_1)
for k_diff in k_xor:
if except_fields is None or k_diff not in except_fields:
return False
for k in k_list_1 & k_list_2:
v1 = d1[k]
if isinstance(v1, dict):
compare_result = compare_dict_except(d1[k], d2[k], except_fields=except_fields)
if not compare_result:
return False
else:
if v1 != d2[k] and (
except_fields is None or k not in except_fields):
return False
return True
assert compare_dict_except(config1, config2, except_fields="metadata save_prefix config".split())
|
jbernardis/repraptoolbox
|
src/Printer/printmon.py
|
Python
|
gpl-3.0
| 26,866 | 0.039902 |
import wx
import re
import os
import time
import inspect
cmdFolder = os.path.realpath(os.path.abspath(os.path.split(inspect.getfile( inspect.currentframe() ))[0]))
gcRegex = re.compile("[-]?\d+[.]?\d*")
from cnc import CNC
from reprapenums import RepRapEventEnum
from gcframe import GcFrame
from properties import PropertiesDlg
from propenums import PropertyEnum
from printstateenum import PrintState
from tools import formatElapsed
from gcsuffix import parseGCSuffix
from sdcard import SDCard
from History.history import PrintStarted, PrintCompleted
BUTTONDIM = (48, 48)
BUTTONDIMWIDE = (96, 48)
RECORD_TIMES = True
class PrintButton(wx.BitmapButton):
def __init__(self, parent, images):
self.imgPrint = images.pngPrint
self.imgRestart = images.pngRestart
wx.BitmapButton.__init__(self, parent, wx.ID_ANY, self.imgPrint, size=BUTTONDIM)
self.setPrint()
def setPrint(self):
self.SetBitmap(self.imgPrint)
self.SetToolTip("Start printing")
def setRestart(self):
self.SetBitmap(self.imgRestart)
self.SetToolTip("Restart print from the beginning")
class PauseButton(wx.BitmapButton):
def __init__(self, parent, images):
wx.BitmapButton.__init__(self, parent, wx.ID_ANY, images.pngPause, size=BUTTONDIM)
self.setPause()
def setPause(self):
self.SetToolTip("Pause printing")
def setResume(self):
self.SetToolTip("Resume print from the paused point")
class PrintMonitorDlg(wx.Frame):
def __init__(self, parent, wparent, reprap, prtName):
self.parent = parent
self.wparent = wparent
self.log = self.parent.log
self.history = wparent.history
self.reprap = reprap
self.settings = self.parent.settings
self.images = self.parent.images
self.state = PrintState.idle
self.oldState = None
self.gcodeLoaded = False
self.gcodeFile = None
self.printerName = prtName
self.layerMap = []
self.okToImport = False
self.importFile = None
self.currentLayer = 0
self.maxTool = 0
self.eUsed = [0.0, 0.0, 0.0, 0.0]
self.totalTime = 0
self.totalTimeStr = ""
self.layerTimes = []
self.layerTimeStr = []
self.layerRange = (0, 0)
self.gObj = None
self.printLayer = 0
self.printPosition = None
title = self.buildTitle()
wx.Frame.__init__(self, wparent, wx.ID_ANY, title=title)
self.Show()
ico = wx.Icon(os.path.join(cmdFolder, "images", "printmon.png"), wx.BITMAP_TYPE_PNG)
self.SetIcon(ico)
if self.settings.hassdcard:
self.sdcard = SDCard(self.parent, self, self.reprap, self.log)
else:
self.sdcard = None
self.gcf = GcFrame(self, self.gObj, self.settings)
self.stLayerText = wx.StaticText(self, wx.ID_ANY, "Layer Height: 0.00")
ht = self.gcf.GetSize().Get()[1] - BUTTONDIM[1]*2 - 20
self.slLayers = wx.Slider(
self, wx.ID_ANY, 0, 0, 1000, size=(-1, ht),
style=wx.SL_VERTICAL | wx.SL_AUTOTICKS | wx.SL_LABELS | wx.SL_INVERSE)
self.Bind(wx.EVT_SCROLL, self.onLayerScroll, self.slLayers)
self.slLayers.Enable(False)
self.cbShowMoves = wx.CheckBox(self, wx.ID_ANY, "Show moves")
self.cbShowMoves.SetValue(self.settings.showmoves)
self.Bind(wx.EVT_CHECKBOX, self.onShowMoves, self.cbShowMoves)
self.cbShowPrevious = wx.CheckBox(self, wx.ID_ANY, "Show previous layer")
self.cbShowPrevious.SetValue(self.settings.showprevious)
self.Bind(wx.EVT_CHECKBOX, self.onShowPrevious, self.cbShowPrevious)
self.cbToolPathOnly = wx.CheckBox(self, wx.ID_ANY, "Show tool paths only")
self.cbToolPathOnly.SetValue(self.settings.toolpathonly)
self.Bind(wx.EVT_CHECKBOX, self.onToolPathOnly, self.cbToolPathOnly)
self.cbSyncPrint = wx.CheckBox(self, wx.ID_ANY, "Sync with print")
self.cbSyncPrint.SetValue(True)
self.Bind(wx.EVT_CHECKBOX, self.onSyncPrint, self.cbSyncPrint)
self.bImport = wx.BitmapButton(self, wx.ID_ANY, self.images.pngImport, size=BUTTONDIM)
self.bImport.SetToolTip("Import G Code file from toolbox")
self.Bind(wx.EVT_BUTTON, self.onImport, self.bImport)
self.bImportQ = wx.BitmapButton(self, wx.ID_ANY, self.images.pngNext, size=BUTTONDIM)
self.Bind(wx.EVT_BUTTON, self.onImportFromQueue, self.bImportQ)
self.bOpen = wx.BitmapButton(self, wx.ID_ANY, self
|
.images.pngFileopen, size=BUTTONDIM)
self.bOpen.SetToolTip("Open a G Code file")
self.Bind(wx.EVT_BUTTON, self.onOpenFile, self.bOpen)
self.Bind(wx.EVT_CLOSE, self.onClose)
self.bPrint = PrintButton(self, self.images)
self.bPrint.Enable(False)
self.Bind(wx.EVT_BUTTON, self.onPrint, self.bPrint)
self.bPause = PauseButton(self, self.images)
self.bPause.Enable(False)
self.Bind(wx.EVT_BUTTON, self.onPause, self.bPause)
self.bSdPrintTo = wx.BitmapButton(self, wx.ID_ANY, se
|
lf.images.pngSdprintto, size=(BUTTONDIMWIDE))
self.bSdPrintTo.Enable(False)
self.Bind(wx.EVT_BUTTON, self.onSdPrintTo, self.bSdPrintTo)
self.bSdPrintFrom = wx.BitmapButton(self, wx.ID_ANY, self.images.pngSdprintfrom, size=(BUTTONDIMWIDE))
self.bSdPrintFrom.Enable(False)
self.Bind(wx.EVT_BUTTON, self.onSdPrintFrom, self.bSdPrintFrom)
self.bSdDelete = wx.BitmapButton(self, wx.ID_ANY, self.images.pngSddelete, size=(BUTTONDIM))
self.bSdDelete.Enable(False)
self.Bind(wx.EVT_BUTTON, self.onSdDelete, self.bSdDelete)
self.bUp = wx.BitmapButton(self, wx.ID_ANY, self.images.pngUp, size=BUTTONDIM)
self.bUp.SetToolTip("Move up one layer")
self.Bind(wx.EVT_BUTTON, self.onUp, self.bUp)
self.bUp.Enable(False)
self.bDown = wx.BitmapButton(self, wx.ID_ANY, self.images.pngDown, size=BUTTONDIM)
self.bDown.SetToolTip("Move down one layer")
self.Bind(wx.EVT_BUTTON, self.onDown, self.bDown)
self.bDown.Enable(False)
szGcf = wx.BoxSizer(wx.HORIZONTAL)
szGcf.AddSpacer(10)
szGcf.Add(self.gcf)
szGcf.Add(self.stLayerText, 1, wx.ALIGN_CENTER_HORIZONTAL, 1)
szGcf.AddSpacer(10)
szNav = wx.BoxSizer(wx.VERTICAL)
szNav.Add(self.bUp, 1, wx.ALIGN_CENTER_HORIZONTAL, 1)
szNav.AddSpacer(10)
szNav.Add(self.slLayers)
szNav.AddSpacer(10)
szNav.Add(self.bDown, 1, wx.ALIGN_CENTER_HORIZONTAL, 1)
szGcf.Add(szNav)
szGcf.AddSpacer(10)
szOpts = wx.BoxSizer(wx.HORIZONTAL)
szOpts.AddSpacer(10)
szOpts.Add(self.cbShowMoves)
szOpts.AddSpacer(10)
szOpts.Add(self.cbShowPrevious)
szOpts.AddSpacer(10)
szOpts.Add(self.cbToolPathOnly)
szOpts.AddSpacer(10)
szOpts.Add(self.cbSyncPrint)
szOpts.AddSpacer(10)
szBtn = wx.BoxSizer(wx.HORIZONTAL)
szBtn.AddSpacer(10)
szBtn.Add(self.bImport)
szBtn.AddSpacer(10)
szBtn.Add(self.bImportQ)
szBtn.AddSpacer(10)
szBtn.Add(self.bOpen)
szBtn.AddSpacer(20)
szBtn.Add(self.bPrint)
szBtn.AddSpacer(10)
szBtn.Add(self.bPause)
if self.sdcard:
szBtn.AddSpacer(20)
szBtn.Add(self.bSdPrintTo)
szBtn.AddSpacer(10)
szBtn.Add(self.bSdPrintFrom)
szBtn.AddSpacer(10)
szBtn.Add(self.bSdDelete)
szBtn.AddSpacer(10)
szDlg = wx.BoxSizer(wx.VERTICAL)
szDlg.AddSpacer(10)
szDlg.Add(szGcf)
szDlg.AddSpacer(10)
szDlg.Add(szOpts)
szDlg.AddSpacer(10)
szDlg.Add(szBtn)
szDlg.AddSpacer(10)
self.SetSizer(szDlg)
self.Fit()
self.Layout()
self.propDlg = PropertiesDlg(self, wparent, self.printerName)
self.propDlg.Show()
if not self.settings.propposition is None:
self.propDlg.SetPosition(self.settings.propposition)
self.enableButtonsByState()
self.reprap.registerPositionHandler(self.updatePrintPosition)
self.reprap.registerEventHandler(self.reprapEvent)
self.reprap.registerSdEventHandler(self.sdcard)
def show(self):
self.Show()
self.Raise()
self.propDlg.Show()
self.propDlg.Raise()
def setLayerText(self, ht):
if ht is None:
htv = 0.0
else:
htv = ht
self.stLayerText.SetLabel("Layer Height: %0.3f" % htv)
def getStatusReport(self):
r = self.propDlg.getStatusReport()
r["PrintStatus"] = PrintState.label[self.state]
return r
def buildTitle(self):
t = "%s print monitor" % self.printerName
if self.gcodeLoaded:
if len(self.gcodeFile) > 45:
t += " - %s" % os.path.basename(self.gcodeFile)
else:
t += " - %s" % self.gcodeFile
return t
def rememberPositions(self):
self.settings.propposition = self.propDlg.GetPosition()
def isPrinting(self):
return self.state in [PrintState.pr
|
Achint08/open-event-orga-server
|
migrations/versions/8e7f7864cb60_.py
|
Python
|
gpl-3.0
| 374 | 0.008021 |
"""empty message
Revision ID: 8e7f7864cb60
Revises: ('80a704b880db', 'adf34c11b0df')
Create Da
|
te: 2016-06-19 15:43:23.027000
"""
# revision identifiers, used by Alembic.
revision = '8e7f7864cb60'
down_revision = ('80a704b880db', 'adf34c11b0df')
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
def upgrade():
pass
def downgrade():
pass
| |
mrknow/filmkodi
|
plugin.video.fanfilm/resources/lib/sources/segos_mv.py
|
Python
|
apache-2.0
| 5,195 | 0.009438 |
# -*- coding: utf-8 -*-
'''
FanFilm Add-on
Copyright (C) 2016 mrknow
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse, json, base64
from resources.lib.libraries import cleantitle
from resources.lib.libraries import client
from resources.lib.libraries import control
from resources.lib import resolvers
class source:
def __init__(self):
self.base_link = 'http://segos.es'
self.search_link = '/?search=%s'
#self.episode_link = '-Season-%01d-Episode-%01d'
def get_movie(self, imdb, title, year):
try:
query = self.search_link % (urllib.quote_plus(cleantitle.query2(title)))
query = urlparse.urljoin(self.base_link, query)
result = client.request(query)
title = cleantitle.movie(title)
result = client.parseDOM(result, 'div', attrs={'style':'overflow: hidden; margin-top: 15px;'})
result = [(
client.parseDOM(i, 'a', ret='href')[0],
client.parseDOM(i, 'a')[1],
str(re.findall(r"(\d{4})", client.parseDOM(i, 'a')[1])[0])) for i in result]
years = ['%s' % str(year), '%s' % str(int(year)+1), '%s' % str(int(year)-1)]
result = [i for i in result if title in cleantitle.movie(i[1])]
result = [i[0] for i in result if any(x in i[2] for x in years)][0]
try: url = re.compile('//.+?(/.+)').findall(result)[0]
except: url = result
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
control.log('Segos URL %s' % url)
return url
except:
return
def get_show(self, imdb, tvdb, tvshowtitle, year):
try:
query = self.moviesearch_link % (urllib.unquote(tvshowtitle))
query = urlparse.urljoin(self.base_link, query)
result = client.source(query)
result = json.loads(result)
tvshowtitle = cleantitle.tv(tvshowtitle)
years = ['%s' % str(year), '%s' % str(int(year)+1), '%s' % str(int(year)-1)]
result = [(client.parseDOM(i, 'a', ret='href')[0], client.parseDOM(i, 'h2', ret='title')[0], client.parseDOM(i, 'span', at
|
trs = {'itemprop': 'copyrightYear'})) for i in result]
result = [i for i in result if len(i[2]) > 0]
result = [i for i
|
in result if tvshowtitle == cleantitle.tv(i[1])]
result = [i[0] for i in result if any(x in i[2][0] for x in years)][0]
try: url = re.compile('//.+?(/.+)').findall(result)[0]
except: url = result
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def get_episode(self, url, imdb, tvdb, title, date, season, episode):
if url == None: return
url += self.episode_link % (int(season), int(episode))
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
def get_sources(self, url, hosthdDict, hostDict, locDict):
try:
sources = []
if url == None: return sources
url = urlparse.urljoin(self.base_link, url)
result = client.request(url)
vtype = re.findall('<div class="col-lg-9 col-md-9 col-sm-9">\s.*<b>Język</b>:(.*?)\.*</div>',result)[0].strip()
q = re.findall('<div class="col-lg-9 col-md-9 col-sm-9">\s.*<b>Jakość</b>:(.*?)\.*</div>', result)[0].strip()
quality = 'SD'
if '720' in q: quality = 'HD'
if '1080' in q: quality = '1080p'
links = client.parseDOM(result, 'div', attrs={'id':'Film'})
links = [client.parseDOM(i, 'a', ret='href', attrs={'target':'_blank'})[0] for i in links]
for i in links:
try:
host = urlparse.urlparse(i).netloc
host = host.split('.')
host = host[-2]+"."+host[-1]
host = host.lower()
host = client.replaceHTMLCodes(host)
host = host.encode('utf-8')
sources.append({'source': host, 'quality': quality, 'provider': 'SEGOS', 'url': i, 'vtype':vtype})
except:
pass
return sources
except:
return sources
def resolve(self, url):
control.log('CDA-ONLINE RESOLVE URL %s' % url)
try:
url = resolvers.request(url)
return url
except:
return
|
SrNetoChan/QGIS
|
python/plugins/processing/algs/qgis/FindProjection.py
|
Python
|
gpl-2.0
| 6,023 | 0.00249 |
# -*- coding: utf-8 -*-
"""
***************************************************************************
FindProjection.py
-----------------
Date : February 2017
Copyright : (C) 2017 by Nyall Dawson
Email : nyall dot dawson at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Nyall Dawson'
__date__ = 'February 2017'
__copyright__ = '(C) 2017, Nyall Dawson'
import os
from qgis.core import (QgsGeometry,
QgsFeature,
QgsFeatureSink,
QgsField,
QgsFields,
QgsCoordinateReferenceSystem,
QgsCoordinateTransform,
QgsCoordinateTransformContext,
QgsWkbTypes,
QgsProcessingException,
QgsProcessingParameterFeatureSource,
QgsProcessingParameterExtent,
QgsProcessingParameterCrs,
QgsProcessingParameterFeatureSink,
QgsProcessingParameterDefinition)
from qgis.PyQt.QtCore import QVariant
from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class FindProjection(QgisAlgorithm):
INPUT = 'INPUT'
TARGET_AREA = 'TARGET_AREA'
TARGET_AREA_CRS = 'TARGET_AREA_CRS'
OUTPUT = 'OUTPUT'
def tags(self):
return self.tr('crs,srs,coordinate,reference,system,guess,estimate,finder,determine').split(',')
def group(self):
return self.tr('Vector general')
def groupId(self):
return 'vectorgeneral'
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.addParameter(QgsProcessingParameterFeatureSource(self.INPUT,
self.tr('Input layer')))
extent_parameter = QgsProcessingParameterExtent(self.TARGET_AREA,
self.tr('Target area for layer'))
self.addParameter(extent_parameter)
# deprecated
crs_param = QgsProcessingParameterCrs(self.TARGET_AREA_CRS, 'Target area CRS', optional=True)
crs_param.setFlags(crs_param.flags() | QgsProcessingParameterDefinition.FlagHidden)
self.addParameter(crs_param)
self.addParameter(QgsProcessingParameterFeatureSink(self.OUTPUT,
self.tr('CRS candidates')))
def name(self):
return 'findprojection'
def displayName(self):
return self.tr('Find projection')
def processAlgorithm(self, parameters, context, feedback):
source = self.parameterAsSource(parameters, self.INPUT, context)
if source is None:
raise QgsProcessingException(self.invalidSourceError(parameters, self.INPUT))
extent = self.parameterAsExtent(parameters, self.TARGET_AREA, context)
target_crs = self.parameterAsExtentCrs(parameters, self.TARGET_AREA, context)
if self.TARGET_AREA_CRS in parameters:
c = self.parameterAsCrs(parameters, self.TARGET_AREA_CRS, context)
if c.isValid():
target_crs = c
target_geom = QgsGeometry.fromRect(extent)
fields = QgsFields()
fields.append(QgsField('auth_id', QVariant.String, '', 20))
(sink, dest_id) = self.parameterAsSink(parameters, self.OUTPUT, context,
fields, QgsWkbTypes.NoGeometry, QgsCoordinateReferenceSystem())
if sink is None:
raise QgsProcessingException(self.invalidSinkError(parameters, self.OUTPUT))
# make intersection tests nice and fast
engine = QgsGeometry.createGeometryEngine(target_geom.constGet())
engine.prepareGeometry()
layer_bounds = QgsGeometry.fromRect(source.sourceExtent())
crses_to_check = QgsCoordinateReferenceSystem.validSrsIds()
total =
|
100.0 / len(crses_to_check)
found_results = 0
|
transform_context = QgsCoordinateTransformContext()
for current, srs_id in enumerate(crses_to_check):
if feedback.isCanceled():
break
candidate_crs = QgsCoordinateReferenceSystem.fromSrsId(srs_id)
if not candidate_crs.isValid():
continue
transform_candidate = QgsCoordinateTransform(candidate_crs, target_crs, transform_context)
transformed_bounds = QgsGeometry(layer_bounds)
try:
if not transformed_bounds.transform(transform_candidate) == 0:
continue
except:
continue
try:
if engine.intersects(transformed_bounds.constGet()):
feedback.pushInfo(self.tr('Found candidate CRS: {}').format(candidate_crs.authid()))
f = QgsFeature(fields)
f.setAttributes([candidate_crs.authid()])
sink.addFeature(f, QgsFeatureSink.FastInsert)
found_results += 1
except:
continue
feedback.setProgress(int(current * total))
if found_results == 0:
feedback.reportError(self.tr('No matching projections found'))
return {self.OUTPUT: dest_id}
|
makerbot/ReplicatorG
|
skein_engines/skeinforge-35/skeinforge_application/skeinforge_plugins/craft_plugins/temperature.py
|
Python
|
gpl-2.0
| 9,647 | 0.017829 |
"""
This page is in the table of contents.
Temperature is a script to set the temperature for the object and raft.
==Operation==
The default 'Activate Temperature' checkbox is on. When it is on, the functions described below will work, when it is off, the functions will not be called.
==Settings==
===Rate===
The default cooling rate and heating rate for the extruder were both been derived from bothacker's graph at:
http://bothacker.com/wp-content/uploads/2009/09/18h5m53s9.29.2009.png
====Cooling Rate====
Default is three degrees Celcius per second.
Defines the cooling rate of the extruder.
====Heating Rate====
Default is ten degrees Celcius per second.
Defines the heating rate of the extruder.
===Temperature===
====Base Temperature====
Default for ABS is two hundred degrees Celcius.
Defines the raft base temperature.
====Interface Temperature====
Default for ABS is two hundred degrees Celcius.
Defines the raft interface temperature.
====Object First Layer Infill Temperature====
Default for ABS is 195 degrees Celcius.
Defines the infill temperature of the first layer of the object.
====Object First Layer Perimeter Temperature====
Default for ABS is two hundred and twenty degrees Celcius.
Defines the perimete
|
r temperature of the first layer of the object.
====Object Next Layers Temperature====
Default for ABS is two hundred and thirty degrees Celcius.
Defines the temperature of the next layers of the object.
====Support Layers Temperature====
Default for ABS is two hundred degrees Celcius.
Defines the
|
support layers temperature.
====Supported Layers Temperature====
Default for ABS is two hundred and thirty degrees Celcius.
Defines the temperature of the supported layers of the object, those layers which are right above a support layer.
==Examples==
The following examples add temperature information to the file Screw Holder Bottom.stl. The examples are run in a terminal in the folder which contains Screw Holder Bottom.stl and temperature.py.
> python temperature.py
This brings up the temperature dialog.
> python temperature.py Screw Holder Bottom.stl
The temperature tool is parsing the file:
Screw Holder Bottom.stl
..
The temperature tool has created the file:
.. Screw Holder Bottom_temperature.gcode
> python
Python 2.5.1 (r251:54863, Sep 22 2007, 01:43:31)
[GCC 4.2.1 (SUSE Linux)] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>> import temperature
>>> temperature.main()
This brings up the temperature dialog.
>>> temperature.writeOutput('Screw Holder Bottom.stl')
The temperature tool is parsing the file:
Screw Holder Bottom.stl
..
The temperature tool has created the file:
.. Screw Holder Bottom_temperature.gcode
"""
from __future__ import absolute_import
#Init has to be imported first because it has code to workaround the python bug where relative imports don't work if the module is imported as a main module.
import __init__
from fabmetheus_utilities.fabmetheus_tools import fabmetheus_interpret
from fabmetheus_utilities import archive
from fabmetheus_utilities import euclidean
from fabmetheus_utilities import gcodec
from fabmetheus_utilities import intercircle
from fabmetheus_utilities import settings
from skeinforge_application.skeinforge_utilities import skeinforge_craft
from skeinforge_application.skeinforge_utilities import skeinforge_polyfile
from skeinforge_application.skeinforge_utilities import skeinforge_profile
import math
import sys
__author__ = 'Enrique Perez (perez_enrique@yahoo.com)'
__date__ = '$Date: 2008/21/04 $'
__license__ = 'GPL 3.0'
def getCraftedText( fileName, text = '', repository=None):
"Temperature the file or text."
return getCraftedTextFromText( archive.getTextIfEmpty( fileName, text ), repository )
def getCraftedTextFromText(gcodeText, repository=None):
"Temperature a gcode linear move text."
if gcodec.isProcedureDoneOrFileIsEmpty( gcodeText, 'temperature'):
return gcodeText
if repository == None:
repository = settings.getReadRepository( TemperatureRepository() )
if not repository.activateTemperature.value:
return gcodeText
return TemperatureSkein().getCraftedGcode(gcodeText, repository)
def getNewRepository():
"Get the repository constructor."
return TemperatureRepository()
def writeOutput(fileName=''):
"Temperature a gcode linear move file."
fileName = fabmetheus_interpret.getFirstTranslatorFileNameUnmodified(fileName)
if fileName != '':
skeinforge_craft.writeChainTextWithNounMessage( fileName, 'temperature')
class TemperatureRepository:
"A class to handle the temperature settings."
def __init__(self):
"Set the default settings, execute title & settings fileName."
skeinforge_profile.addListsToCraftTypeRepository('skeinforge_application.skeinforge_plugins.craft_plugins.temperature.html', self )
self.fileNameInput = settings.FileNameInput().getFromFileName( fabmetheus_interpret.getGNUTranslatorGcodeFileTypeTuples(), 'Open File for Temperature', self, '')
self.activateTemperature = settings.BooleanSetting().getFromValue('Activate Temperature:', self, True )
settings.LabelSeparator().getFromRepository(self)
settings.LabelDisplay().getFromName('- Rate -', self )
self.coolingRate = settings.FloatSpin().getFromValue( 1.0, 'Cooling Rate (Celcius/second):', self, 20.0, 3.0 )
self.heatingRate = settings.FloatSpin().getFromValue( 1.0, 'Heating Rate (Celcius/second):', self, 20.0, 10.0 )
settings.LabelSeparator().getFromRepository(self)
settings.LabelDisplay().getFromName('- Temperature -', self )
self.baseTemperature = settings.FloatSpin().getFromValue( 140.0, 'Base Temperature (Celcius):', self, 260.0, 200.0 )
self.interfaceTemperature = settings.FloatSpin().getFromValue( 140.0, 'Interface Temperature (Celcius):', self, 260.0, 200.0 )
self.objectFirstLayerInfillTemperature = settings.FloatSpin().getFromValue( 140.0, 'Object First Layer Infill Temperature (Celcius):', self, 260.0, 195.0 )
self.objectFirstLayerPerimeterTemperature = settings.FloatSpin().getFromValue( 140.0, 'Object First Layer Perimeter Temperature (Celcius):', self, 260.0, 220.0 )
self.objectNextLayersTemperature = settings.FloatSpin().getFromValue( 140.0, 'Object Next Layers Temperature (Celcius):', self, 260.0, 230.0 )
self.supportLayersTemperature = settings.FloatSpin().getFromValue( 140.0, 'Support Layers Temperature (Celcius):', self, 260.0, 200.0 )
self.supportedLayersTemperature = settings.FloatSpin().getFromValue( 140.0, 'Supported Layers Temperature (Celcius):', self, 260.0, 230.0 )
self.executeTitle = 'Temperature'
def execute(self):
"Temperature button has been clicked."
fileNames = skeinforge_polyfile.getFileOrDirectoryTypesUnmodifiedGcode(self.fileNameInput.value, fabmetheus_interpret.getImportPluginFileNames(), self.fileNameInput.wasCancelled)
for fileName in fileNames:
writeOutput(fileName)
class TemperatureSkein:
"A class to temperature a skein of extrusions."
def __init__(self):
self.distanceFeedRate = gcodec.DistanceFeedRate()
self.lineIndex = 0
self.lines = None
def getCraftedGcode(self, gcodeText, repository):
"Parse gcode text and store the temperature gcode."
self.repository = repository
self.lines = archive.getTextLines(gcodeText)
if self.repository.coolingRate.value < 0.1:
print('The cooling rate should be more than 0.1, any cooling rate less than 0.1 will be treated as 0.1.')
self.repository.coolingRate.value = 0.1
if self.repository.heatingRate.value < 0.1:
print('The heating rate should be more than 0.1, any heating rate less than 0.1 will be treated as 0.1.')
self.repository.heatingRate.value = 0.1
self.parseInitialization()
self.distanceFeedRate.addLines( self.lines[self.lineIndex :] )
return self.distanceFeedRate.output.getvalue()
def parseInitialization(self):
'Parse gcode initialization and store the parameters.'
for self.lineIndex in xrange(len(self.lines)):
line = self.lines[self.lineIndex]
splitLine = gcodec.getSplitLineBeforeBracketSemicolon(line)
firstWord = gcodec.getFirstWord(splitLine)
self.distanceFeedRate.parseSplitLine(firstWord, splitLine)
if firstWord == '(</extruderInitialization>)':
se
|
pygeek/PyInterstate
|
PyInterstate.py
|
Python
|
gpl-3.0
| 6,996 | 0.008719 |
#PyInterstate by @pygeek
import urllib2
import json
class InterstateError(Exception):
"""Base class for Interstate App Exceptions."""
pass
class AuthError(InterstateError):
"""Exception raised upon authentication errors."""
pass
class IdError(InterstateError):
"""Raised when an operation attempts to query's an Interstate \
Road or Roadmap that does not exist.
"""
pass
class InterstateApp(object):
"""Pythonic Interstate App API Wrapper
(http://interstateapp.com)
Requires:
-Python 2.6+
"""
|
__version__ = "0.2.0"
def __init__(self):
self.protocol = "http://"
self.base_url = "interstateapp.com"
self.api_version = "v1"
public_key = "public-key"
private_key = "private-key"
"""Installing opener authentication for inevitable, \
subsequent requests
"""
# create a password ma
|
nager
password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
password_mgr.add_password(None, self.protocol + \
self.base_url, public_key, private_key)
handler = urllib2.HTTPBasicAuthHandler(password_mgr)
# create "opener" (OpenerDirector instance)
urlopener = urllib2.build_opener(handler)
urllib2.install_opener(urlopener)
def auth_test(self):
"""Authenticate Interstate App credentials; *public_key* : *private_key*."""
listAll_url = "{0}{1}/api/{2}/roadmap/listAll" \
.format(self.protocol, self.base_url, self.api_version)
try:
urllib2.urlopen(listAll_url)
except:
raise AuthError("Authentication Error: Please verify credentials.")
return True
def id_test(self,roadmap_id=None,road_id=None):
"""Verify whether Road or Roadmap Id exists."""
roadmap_get = "{0}{1}/api/{2}/roadmap/get/id/{3}" \
.format(self.protocol, self.base_url, self.api_version, \
roadmap_id)
road_get = "{0}{1}/api/{2}/road/get/id/{3}" \
.format(self.protocol, self.base_url, self.api_version, \
road_id)
if roadmap_id:
try:
urllib2.urlopen(roadmap_get)
except:
raise IdError("Id Error: Roadmap Id \"{0}\" does not exist.".format(roadmap_id))
return True
elif road_id:
try:
urllib2.urlopen(road_get)
except:
raise IdError("Id Error: Roadmap Id \"{0}\" does not exist.".format(road_id))
return True
class Roadmap(InterstateApp):
"""Contains methods for the Roadmap object."""
def get(self, roadmap_id):
"""roadmap/get:
Retrieve information regarding a specific \
Interstate roadmap.
Parameters:
- id(Roadmap ID)
The unique id of the Interstate roadmap.
Example Request:
http://interstateapp.com/api/v1/roadmap/get/id/ \
4c2d3b5f8ead0ec070010000
Outputs: JSON
See: http://interstateapp.com/developers/method/0/0
"""
if self.auth_test() and self.id_test(roadmap_id=roadmap_id):
get_url = "{0}{1}/api/{2}/roadmap/get/id/{3}" \
.format(self.protocol, self.base_url, self.api_version, \
roadmap_id)
roadmap = urllib2.urlopen(get_url)
roadmap = roadmap.read()
return json.loads(roadmap)
else:
return False
def listAll(self):
"""roadmap/listAll:
List all Interstate roadmaps associated with the \
used API Key.
Parameters:
None
Example Request:
http://interstateapp.com/api/v1/roadmap/listAll
Outputs: JSON
See: http://interstateapp.com/developers/method/0/1
"""
if self.auth_test():
listAll_url = "{0}{1}/api/{2}/roadmap/listAll" \
.format(self.protocol, self.base_url, self.api_version)
roadmap = urllib2.urlopen(listAll_url)
roadmap = roadmap.read()
return json.loads(roadmap)
else:
return False
def roads(self, roadmap_id):
"""roadmap/roads:
List all roads attached to the specific Interstate roadmap.
Parameters:
- id(Roadmap ID)
The unique id of the Interstate roadmap.
Example Request:
http://interstateapp.com/api/v1/roadmap/roads/id/ \
4c2d3b5f8ead0ec070010000
Outputs: JSON
See: http://interstateapp.com/developers/method/0/2
"""
if self.auth_test() and self.id_test(roadmap_id=roadmap_id):
roads_url = "{0}{1}/api/{2}/roadmap/roads/id/{3}" \
.format(self.protocol, self.base_url, self.api_version, \
roadmap_id)
roadmap = urllib2.urlopen(roads_url)
roadmap = roadmap.read()
return json.loads(roadmap)
else:
return False
class Road(InterstateApp):
"""Contains methods for the Road object."""
def get(self, road_id):
"""road/get:
Retrieve information regarding a specific Interstate road.
Parameters:
- id(Road ID)
The unique id of the Interstate road.
Example Request:
http://interstateapp.com/api/v1/road/get/id/ \
4c2d3b5f8ead0ec070010000
Outputs: JSON
See: http://interstateapp.com/developers/method/1/0
"""
if self.auth_test() and self.id_test(road_id=road_id):
get_url = "{0}{1}/api/{2}/road/get/id/{3}" \
.format(self.protocol, self.base_url, self.api_version, \
road_id)
road = urllib2.urlopen(get_url)
road = road.read()
return json.loads(road)
else:
return False
def updates(self, road_id):
"""road/get:
Retrieve updates attached to a specific Interstate road.
Parameters:
- id(Road ID)
The unique id of the Interstate road.
Example Request:
http://interstateapp.com/api/v1/road/updates/id/
4c2d3b5f8ead0ec070010000
Outputs: JSON
See: http://interstateapp.com/developers/method/1/1
"""
if self.auth_test() and self.id_test(road_id=road_id):
updates_url = "{0}{1}/api/{2}/road/updates/id/{3}" \
.format(self.protocol, self.base_url, self.api_version, \
road_id)
road = urllib2.urlopen(updates_url)
road = road.read()
return json.loads(road)
else:
return False
|
deanwilson/yum-transaction-json
|
transaction-json.py
|
Python
|
gpl-3.0
| 1,282 | 0 |
from yum.plugins import PluginYumExit, TYPE_CORE, TYPE_INTERACTIVE
try:
import json
except ImportError:
import simplejson as json
requires_api_version = '2.5'
plugin_type = (TYPE_INTERACTIVE,)
def config_hook(conduit):
parser = conduit.getOptParser()
parser.add_option('', '--json', dest='json', action='store_true',
default=False,
help="show pending package changes as JSON")
def postresolve_hook(conduit):
opts, commands = conduit.getCmdLine()
if opts.json:
packages = {}
for transaction in conduit.getTsInfo():
if transaction.name not in packages:
|
packages[transaction.name] = {}
version = {
"version": transaction.version,
"release": transaction.release,
"epoch": transaction.epoch,
"arch": transaction.arch,
"st
|
ate": transaction.ts_state,
"repo": getattr(transaction.po, 'repoid')
}
if transaction.ts_state:
packages[transaction.name]["pending"] = version
else:
packages[transaction.name]["current"] = version
print(json.dumps(packages))
raise PluginYumExit('')
|
jonmsawyer/site-tools
|
flgetpics/cookie.py
|
Python
|
mit
| 97 | 0 |
# Log into the site with your b
|
rowser, obtain the "Cookie" header,
|
# and put it here
cookie = ''
|
orzubalsky/tradeschool
|
ts/apps/tradeschool/forms.py
|
Python
|
gpl-3.0
| 7,844 | 0.000382 |
from django.forms import *
from django.forms.formsets import BaseFormSet
from django.utils.translation import ugettext_lazy as _
from django.contrib.sites.models import Site
from tradeschool.models import *
class DefaultBranchForm(Form):
def __init__(self, user, redirect_to, *args, **kwargs):
super(DefaultBranchForm, self).__init__(*args, **kwargs)
if user.is_superuser:
branches = Branch.objects.all()
else:
branches = Branch.objects.filter(pk__in=user.branches_organized.all)
choices = [(o.id, unicode(o.title)) for o in branches]
self.fields['default_branch'] = forms.ChoiceField(choices=choices)
if user.default_branch:
self.initial['default_branch'] = user.default_branch.pk
self.initial['organizer_id'] = user.pk
self.initial['redirect_to'] = redirect_to
default_branch = forms.ChoiceField()
organizer_id = forms.IntegerField(widget=forms.HiddenInput)
redirect_to = forms.CharField(widget=forms.HiddenInput)
class TimeModelChoiceField(forms.ModelChoiceField):
def label_from_instance(self, obj):
from django.utils import timezone
current_tz = timezone.get_current_timezone()
date = obj.start_time.astimezone(current_tz).strftime('%A, %b %d')
time = obj.start_time.astimezone(current_tz).strftime(
'%I:%M%p').lstrip('0').lower()
if obj.venue is not None:
return "%s %s at %s" % (date, time, obj.venue)
return "%s %s" % (date, time)
class TimeSelectionForm(Form):
"""
A simple dropdown menu for teachers to select an available time
when submitting a class. Uses the Time model
"""
time = TimeModelChoiceField(
queryset=Time.objects.all(),
error_messages={'required': _('Please select a time'), }
)
class BranchForm(ModelForm):
def __init__(self, *args, **kwargs):
super(BranchForm, self).__init__(*args, **kwargs)
self.fields['city'].error_messages['required'] = _(
"Please enter a city")
self.fields['country'].error_messages['required'] = _(
"Please enter a country")
self.initial['site'] = Site.objects.get_current()
class Meta:
model = Branch
fields = (
'city',
'state',
'country',
)
class TeacherForm (ModelForm):
def __init__(self, *args, **kwargs):
"Sets custom meta data to the form's fields"
super(ModelForm, self).__init__(*args, **kwargs)
self.fields['fullname'].error_messages['required'] = _(
"Please enter your name")
self.fields['email'].error_messages['required'] = _(
"Please enter your email")
self.fields['bio'].error_messages['required'] = _(
"Please tell us about yourself")
self.fields['phone'].error_messages['required'] = _(
"Please enter phone number")
class Meta:
model = Person
fields = ('fullname', 'email', 'phone', 'bio', 'website')
# since bio is set to blank=True in the Person model
# to accommodate students, we're setting it here manually.
bio = forms.CharField(
required=True,
label=_("A few sentences about you"),
help_text=_("For prospective students to see on the website"),
widget=forms.Textarea
)
class OrganizerForm(TeacherForm):
"""
"""
def __init__(self, *args, **kwargs):
"Sets custom meta data to the form's fields"
super(TeacherForm, self).__init__(*args, **kwargs)
self.fields['fullname'].error_messages['required'] = _(
"Please enter your name")
self.fields['email'].error_messages['required'] = _(
"Please enter your email")
self.fields['names_of_co_organizers'].error_messages['required'] = _(
"Please enter the names of at least one or two more organizers")
self.fields['bio'].error_messages['required'] = _(
"Please tell us about why you would like to open a Trade School in your area")
class Meta:
model = Person
fields = (
'fullname',
'names_of_co_organizers',
'email',
'bio',
)
# since names_of_co_organizers is set to blank=True in the Person model
# to accommodate students and teachers, we're setting it here manually.
names_of_co_organizers = forms.CharField(
required=True,
label=_("Names of Co-Organizers"),
)
bio = forms.CharField(
required=True,
label=_("A few sentences about why your group wants to open a Trade School"),
widget=forms.Textarea
)
class CourseForm (ModelForm):
def __init__(self, *args, **kwargs):
"Sets custom meta data to the form's fields"
super(ModelForm, self).__init__(*args, **kwargs)
self.fields['title'].error_messages['required'] = _(
"Please enter a class title")
self.fields['description'].error_messages['required'] = _(
"Please enter a class description")
self.fields['max_students'].error_messages['required'] = _(
"Please enter the maximum number of students in your class")
class Meta:
model = Course
fields = ('title', 'description', 'max_students')
class BarterItemForm (ModelForm):
def __init__(self, *args, **kwargs):
"Sets custom meta data to the form's fields"
super(ModelForm, self).__init__(*args, **kwargs)
self.fields['title'].widget.attrs['class'] = 'barter_item'
self.fields['title'].error_messages['required'] = _(
"Barter item cannot be blank")
class Meta:
model = BarterItem
fields = ('title',)
class BaseBarterItemFormSet(BaseFormSet):
def __init__(self, branch, *args, **kwargs):
""
self.branch = branch
super(BaseBarterItemFormSet, self).__init__(*args, **kwargs)
def clean(self):
"Checks that at least 5 barter items form are filled"
count = 0
required = self.branch.min_barteritems
if any(self.errors):
return
for form in self.forms:
if form.is_bound:
if form['title'].data:
count += 1
if count < required:
raise forms.ValidationError(
_("Please add at least %i barter items" % required)
)
class RegistrationForm(ModelForm):
def __init__(self, course, *args, **kwargs):
super(RegistrationForm, self).__init__(*args, **kwargs)
self.fields['items'].queryset = BarterItem.objects.filter(
course=course)
self.fields['items'].error_messages['required'] = _(
"Please select at least one item")
self.fields['items'].empty_label = None
class Meta:
model = Registration
fields = ('items', )
widgets = {'items': CheckboxSelectMultiple(), }
class StudentForm(ModelForm):
def __init__(self, *args, **kwargs):
super(Stu
|
dentForm, self).__init__(*args, **kwargs)
self.fields['fullname'].error_messag
|
es['required'] = _(
"Please enter your name")
self.fields['email'].error_messages['required'] = _(
"Please enter your email")
self.fields['phone'].error_messages['required'] = _(
"Please enter your phone number")
class Meta:
model = Person
fields = ('fullname', 'email', 'phone')
class FeedbackForm(ModelForm):
def __init__(self, *args, **kwargs):
super(FeedbackForm, self).__init__(*args, **kwargs)
self.fields['content'].error_messages['required'] = _(
"Please enter your feedback")
class Meta:
model = Feedback
fields = ('content',)
|
aspiers/crmsh
|
modules/ui_ra.py
|
Python
|
gpl-2.0
| 3,190 | 0.000627 |
# Copyright (C) 2008-2011 Dejan Muhamedagic <dmuhamedagic@suse.de>
# Copyright (C) 2013 Kristoffer Gronlund <kgronlund@suse.com>
# See COPYING for license information.
from . import command
from . import completers as compl
from . import utils
from . import ra
from . import constants
from . import options
def complete_class_provider_type(args):
'''
This is just too complicated to complete properly...
'''
ret = set([])
classes = ra.ra_classes()
for c in classes:
if c != 'ocf':
types = ra.ra_types(c)
for t in types:
ret.add('%s:%s' % (c, t))
providers = ra.ra_providers_all('ocf')
for p in providers:
types = ra.ra_types('ocf', p)
for t in types:
ret.add('ocf:%s:%s' % (p, t))
return list(ret)
class RA(command.UI):
'''
CIB shadow management class
'''
name = "ra"
provider_classes = ["ocf"]
def do_classes(self, context):
"usage: classes"
for c in ra.ra_classes():
if c in self.provider_classes:
providers = ra.ra_providers_all(c)
if providers:
print "%s / %s" % (c, ' '.join(providers))
else:
print "%s" % c
@command.skill_level('administrator')
def do_providers(self, context, ra_type, ra_class="ocf"):
"usage: providers <ra> [<class>]"
print ' '.join(ra.ra_providers(ra_type, ra_class))
@command.skill_level('administrator')
@command.completers(compl.call(ra.ra_classes), lambda args: ra.ra_providers_all(args[1]))
def do_list(self, context, class_, provider_=None):
"usage: list <class> [<provider>]"
if class_ not in ra.ra_classes():
context.fatal_error("class %s does not exist" % class_)
if provider_ and provider_ not in ra.ra_providers_all(class_):
context.fatal_error("there is no provider %s for class %s" % (provider_, class_))
types = ra.ra_types(class_, provider_)
if options.regression_tests:
for t in types:
print t
else:
utils.multicolumn(types)
@command.skill_level('administrator')
@command.alias('meta')
@command.completers(complete_class_provider_type)
def do_info(self, context, *args):
"usage: info [<class>:[<provider>:]]<type>"
if len(args) == 0:
context.fatal_error("Expected [<class>:[<provider>:]]<type>")
elif len(args) > 1: # obsolete syntax
if len(args) < 3:
ra_type, ra_class, ra_provider = args[0], args[1], "heartbeat"
else:
ra_type, ra_class, ra_provider = args[0], args[1], args[2]
elif args[0] in constants.meta_progs:
ra_class, ra_provider, ra_type = args[0], None, None
else:
ra_class, ra_provider, r
|
a_type = ra.disambiguate_ra_type(args[0])
agent = ra
|
.RAInfo(ra_class, ra_type, ra_provider)
if agent.mk_ra_node() is None:
return False
try:
utils.page_string(agent.meta_pretty())
except Exception, msg:
context.fatal_error(msg)
|
datalogics/scons
|
src/engine/SCons/Tool/bcc32.py
|
Python
|
mit
| 2,993 | 0.005012 |
"""SCons.Tool.bcc32
XXX
"""
#
# __COPYRIGHT__
#
# Permis
|
sion is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# di
|
stribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import os
import os.path
import string
import SCons.Defaults
import SCons.Tool
import SCons.Util
def findIt(program, env):
# First search in the SCons path and then the OS path:
borwin = env.WhereIs(program) or SCons.Util.WhereIs(program)
if borwin:
dir = os.path.dirname(borwin)
path = env['ENV'].get('PATH', [])
if not path:
path = []
if SCons.Util.is_String(path):
path = string.split(path, os.pathsep)
env['ENV']['PATH'] = string.join([dir]+path, os.pathsep)
return borwin
def generate(env):
findIt('bcc32', env)
"""Add Builders and construction variables for bcc to an
Environment."""
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
for suffix in ['.c', '.cpp']:
static_obj.add_action(suffix, SCons.Defaults.CAction)
shared_obj.add_action(suffix, SCons.Defaults.ShCAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter)
env['CC'] = 'bcc32'
env['CCFLAGS'] = SCons.Util.CLVar('')
env['CFLAGS'] = SCons.Util.CLVar('')
env['CCCOM'] = '$CC -q $CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o$TARGET $SOURCES'
env['SHCC'] = '$CC'
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')
env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS')
env['SHCCCOM'] = '$SHCC -WD $SHCFLAGS $SHCCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o$TARGET $SOURCES'
env['CPPDEFPREFIX'] = '-D'
env['CPPDEFSUFFIX'] = ''
env['INCPREFIX'] = '-I'
env['INCSUFFIX'] = ''
env['SHOBJSUFFIX'] = '.dll'
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0
env['CFILESUFFIX'] = '.cpp'
def exists(env):
return findIt('bcc32', env)
|
Metaswitch/calico-nova
|
nova/tests/unit/api/openstack/compute/contrib/test_flavor_manage.py
|
Python
|
apache-2.0
| 17,379 | 0.000173 |
# Copyright 2011 Andrew Bogott for the Wikimedia Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import mock
from oslo.serialization import jsonutils
import webob
from nova.api.openstack.compute.contrib import flavor_access
from nova.api.openstack.compute.contrib import flavormanage as flavormanage_v2
from nova.api.openstack.compute.plugins.v3 import flavor_manage as \
flavormanage_v21
from nova.compute import flavors
from nova import context
from nova import db
from nova import exception
from nova import test
from nova.tests.unit.api.openstack import fakes
def fake_db_flavor(**updates):
db_flavor = {
'root_gb': 1,
'ephemeral_gb': 1,
'name': u'frob',
'deleted': False,
'created_at': datetime.datetime(2012, 1, 19, 18, 49, 30, 877329),
'updated_at': None,
'memory_mb': 256,
'vcpus': 1,
'flavorid': 1,
'swap': 0,
'rxtx_factor': 1.0,
'extra_specs': {},
'deleted_at': None,
'vcpu_weight': None,
'id': 7,
'is_public': True,
'disabled': False,
}
if updates:
db_flavor.update(updates)
return db_flavor
def fake_get_flavor_by_flavor_id(flavorid, ctxt=None, read_deleted='yes'):
if flavorid == 'failtest':
raise exception.FlavorNotFound(flavor_id=flavorid)
elif not str(flavorid) == '1234':
raise Exception("This test expects flavorid 1234, not
|
%s" % flavorid)
if read_deleted != 'no':
raise test.TestingException("Should not be reading deleted")
return fake_db_flavor(flavorid=flavorid)
def fake_destroy(flavorname):
pass
def fake_create(context, kwargs, projects=None):
newflavor = fake_db_flavor()
flavorid = kwargs.get('flavorid')
if flavorid is None:
flavorid = 1234
newflavor['flavorid'] = flavorid
|
newflavor["name"] = kwargs.get('name')
newflavor["memory_mb"] = int(kwargs.get('memory_mb'))
newflavor["vcpus"] = int(kwargs.get('vcpus'))
newflavor["root_gb"] = int(kwargs.get('root_gb'))
newflavor["ephemeral_gb"] = int(kwargs.get('ephemeral_gb'))
newflavor["swap"] = kwargs.get('swap')
newflavor["rxtx_factor"] = float(kwargs.get('rxtx_factor'))
newflavor["is_public"] = bool(kwargs.get('is_public'))
newflavor["disabled"] = bool(kwargs.get('disabled'))
return newflavor
class FlavorManageTestV21(test.NoDBTestCase):
controller = flavormanage_v21.FlavorManageController()
validation_error = exception.ValidationError
base_url = '/v2/fake/flavors'
def setUp(self):
super(FlavorManageTestV21, self).setUp()
self.stubs.Set(flavors,
"get_flavor_by_flavor_id",
fake_get_flavor_by_flavor_id)
self.stubs.Set(flavors, "destroy", fake_destroy)
self.stubs.Set(db, "flavor_create", fake_create)
self.ctxt = context.RequestContext('fake', 'fake',
is_admin=True, auth_token=True)
self.app = self._setup_app()
self.request_body = {
"flavor": {
"name": "test",
"ram": 512,
"vcpus": 2,
"disk": 1,
"OS-FLV-EXT-DATA:ephemeral": 1,
"id": unicode('1234'),
"swap": 512,
"rxtx_factor": 1,
"os-flavor-access:is_public": True,
}
}
self.expected_flavor = self.request_body
def _setup_app(self):
return fakes.wsgi_app_v21(init_only=('os-flavor-manage',
'os-flavor-rxtx',
'os-flavor-access', 'flavors',
'os-flavor-extra-data'))
def test_delete(self):
req = fakes.HTTPRequest.blank(self.base_url + '/1234')
res = self.controller._delete(req, 1234)
# NOTE: on v2.1, http status code is set as wsgi_code of API
# method instead of status_int in a response object.
if isinstance(self.controller,
flavormanage_v21.FlavorManageController):
status_int = self.controller._delete.wsgi_code
else:
status_int = res.status_int
self.assertEqual(202, status_int)
# subsequent delete should fail
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._delete, req, "failtest")
def _test_create_missing_parameter(self, parameter):
body = {
"flavor": {
"name": "azAZ09. -_",
"ram": 512,
"vcpus": 2,
"disk": 1,
"OS-FLV-EXT-DATA:ephemeral": 1,
"id": unicode('1234'),
"swap": 512,
"rxtx_factor": 1,
"os-flavor-access:is_public": True,
}
}
del body['flavor'][parameter]
req = fakes.HTTPRequest.blank(self.base_url)
self.assertRaises(self.validation_error, self.controller._create,
req, body=body)
def test_create_missing_name(self):
self._test_create_missing_parameter('name')
def test_create_missing_ram(self):
self._test_create_missing_parameter('ram')
def test_create_missing_vcpus(self):
self._test_create_missing_parameter('vcpus')
def test_create_missing_disk(self):
self._test_create_missing_parameter('disk')
def _create_flavor_success_case(self, body):
req = webob.Request.blank(self.base_url)
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = jsonutils.dumps(body)
res = req.get_response(self.app)
self.assertEqual(200, res.status_code)
return jsonutils.loads(res.body)
def test_create(self):
body = self._create_flavor_success_case(self.request_body)
for key in self.expected_flavor["flavor"]:
self.assertEqual(body["flavor"][key],
self.expected_flavor["flavor"][key])
def test_create_public_default(self):
del self.request_body['flavor']['os-flavor-access:is_public']
body = self._create_flavor_success_case(self.request_body)
for key in self.expected_flavor["flavor"]:
self.assertEqual(body["flavor"][key],
self.expected_flavor["flavor"][key])
def test_create_without_flavorid(self):
del self.request_body['flavor']['id']
body = self._create_flavor_success_case(self.request_body)
for key in self.expected_flavor["flavor"]:
self.assertEqual(body["flavor"][key],
self.expected_flavor["flavor"][key])
def _create_flavor_bad_request_case(self, body):
self.stubs.UnsetAll()
req = webob.Request.blank(self.base_url)
req.headers['Content-Type'] = 'application/json'
req.method = 'POST'
req.body = jsonutils.dumps(body)
res = req.get_response(self.app)
self.assertEqual(res.status_code, 400)
def test_create_invalid_name(self):
self.request_body['flavor']['name'] = 'bad !@#!$%\x00 name'
self._create_flavor_bad_request_case(self.request_body)
def test_create_flavor_name_is_whitespace(self):
self.request_body['flavor']['name'] = ' '
self._create_flavor_bad_request_case(self.request_body)
def test_create_with_name_too_long(self):
self.request_body['flavor']['name'] = 'a' * 256
self._creat
|
throwable-one/teamcity-messages
|
tests/integration-tests/unittest_integration_test.py
|
Python
|
apache-2.0
| 27,766 | 0.00544 |
# coding=utf-8
import os
import subprocess
import sys
import pytest
import virtual_environments
from diff_test_tools import expected_messages, SCRIPT
from service_messages import ServiceMessage, assert_service_messages, match
from test_util import run_command
@pytest.fixture(scope='module')
def venv(request):
"""
Prepares a virtual environment for unittest, no extra packages required
:rtype : virtual_environments.VirtualEnvDescription
"""
return virtual_environments.prepare_virtualenv()
def test_changes_name(venv):
output = run_directly(venv, 'test_changes_name.py')
assert_service_messages(
output,
[
ServiceMessage('testCount', {'count': "1"}),
ServiceMessage('testStarted', {'name': "__main__.Foo.test_aa (1)", 'flowId': "__main__.Foo.test_aa (1)"}),
ServiceMessage('testFinished', {'name': "__main__.Foo.test_aa (11)", 'flowId': "__main__.Foo.test_aa (11)"}),
])
def test_nested_suits(venv):
output = run_directly(venv, 'nested_suits.py')
test_name = '__main__.TestXXX.runTest'
assert_service_messages(
output,
[
ServiceMessage('testCount', {'count': "1"}),
ServiceMessage('testStarted', {'name': test_name, 'captureStandardOutput': 'true', 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name}),
])
def test_docstring(venv):
output = run_directly(venv, 'docstring.py')
test_name = '__main__.TestXXX.runTest (A test_)'
assert_service_messages(
output,
[
ServiceMessage('testCount', {'count': "1"}),
ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
])
def test_assert(venv):
output = run_directly(venv, 'assert.py')
test_name = '__main__.TestXXX.runTest'
ms = assert_service_messages(
output,
[
ServiceMessage('testCount', {'count': "1"}),
ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testFailed', {'name': test_name, 'message': 'Failure', 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
])
failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
assert failed_ms.params['details'].index("assert 1 == 0") > 0
def test_fail(venv):
output = run_directly(venv, 'fail_test.py')
test_name = '__main__.TestXXX.runTest'
ms = assert_service_messages(
output,
[
ServiceMessage('testCount', {'count': "1"}),
ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testFailed', {'name': test_name, 'message': 'Failure', 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
])
failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
assert failed_ms.params['details'].index('fail("Grr")') > 0
def test_setup_error(venv):
output = run_directly(venv, 'setup_error.py')
test_name = '__main__.TestXXX.runTest'
ms = assert_service_messages(
output,
[
ServiceMessage('testCount', {'count': "1"}),
ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testFailed', {'name': test_name, 'message': 'Error', 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
])
failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
assert failed_ms.params['details'].index("RRR") > 0
assert failed_ms.params['details'].index("setUp") > 0
def test_teardown_error(venv):
output = run_directly(venv, 'teardown_error.py')
test_name = '__main__.TestXXX.runTest'
ms = assert_service_messages(
output,
[
ServiceMessage('testCount', {'count': "1"}),
ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testFailed', {'name': test_name, 'message': 'Error', 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
])
failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
assert failed_ms.params['details'].index("RRR") > 0
assert failed_ms.params['details'].index("tearDown") > 0
@pytest.mark.skipif("sys.version_info < (2, 7)", reason="buffer requires Python 2.7+")
def test_buffer_output(venv):
output = run_directly(venv, 'buffer_output.py')
test_name = '__main__.SpamTest.test_test'
assert_service_messages(
output,
[
ServiceMessage('testCount', {'count': "1"}),
ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testStdOut', {'out': "stdout_test1|n", 'flowId': test_name}),
ServiceMessage('testStdOut', {'out': "stdout_test2|n", 'flowId': test_name}),
ServiceMessage('testStdErr', {'out': "stderr_test1", 'flowId': test_name}),
ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testStdErr', {'out': "stderr_test2", 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
])
# Check no stdout_test or stderr_test in the output (not in service messages)
# it checks self._mirrorOutput = False
output = output.replace("out='stdout_test", "").replace("out='stderr_test", "")
assert ou
|
tput.find("stdout_test") < 0
assert output.find("stderr_test") < 0
def test_doctests(venv):
output = run_directly(venv, 'doctests.py')
test_name = '__main__.factorial'
assert_service_messages(
output,
[
ServiceMessage('testCount', {'count': "1"}),
ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
|
ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
])
def test_skip(venv):
if sys.version_info < (2, 7):
venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2==0.5.1"])
output = run_directly(venv, 'skip_test.py')
test_name = '__main__.TestSkip.test_skip_me'
assert_service_messages(
output,
[
ServiceMessage('testCount', {'count': "2"}),
ServiceMessage('testStarted', {'name': '__main__.TestSkip.test_ok'}),
ServiceMessage('testFinished', {'name': '__main__.TestSkip.test_ok'}),
ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testIgnored', {'name': test_name, 'message': u'Skipped: testing skipping øпричина', 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
])
def test_expected_failure(venv):
if sys.version_info < (2, 7):
venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2"])
output = run_directly(venv, 'expected_failure.py')
test_name = '__main__.TestSkip.test_expected_failure'
ms = assert_service_messages(
output,
[
ServiceMessage('testCount', {'count': "1"}),
ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testIgnored', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
])
failed_ms = match(ms, ServiceMessage('testIgnored', {'name': test_name}))
assert failed_ms.params['message'].find("Expected failure") == 0
assert failed_ms.params['message'].find("this should happen unfortunately") > 0
def test_subtest_ok(venv):
if sys.version_info < (3, 4):
venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2"])
output = run_directly(venv, 'subtest_ok.py'
|
Livefyre/protobuf-rpc
|
python/protobuf_rpc/connection.py
|
Python
|
mit
| 3,673 | 0.001089 |
from Queue import Queue, Empty
import contextlib
from logging import getLogger
import random
import time
from gevent.monkey import saved
LOG = getLogger(__name__)
if bool(saved):
LOG.info('using zmq.green...')
import zmq.green as zmq
else:
import zmq
class ZMQConnection(object):
def __init__(self, hosts, maxidle=None, timeout=2 * 1000, maxage=60):
self._last_used = self._born = time.time()
self._closed = False
self.maxidle = maxidle
sel
|
f.maxage = maxage
self.timeout = timeout
self._zmq_init(hosts)
def _zmq_init(self, hosts):
context = zmq.Context()
random.shuffle(hosts)
self.socket = context.socket(zmq.REQ)
self.socket.setsockopt(zmq.LINGER, 0)
for (host, port) in hosts:
self.socket.connect("tcp://%s:%s" % (host, port))
self.poller = zmq.Poller()
|
self.poller.register(self.socket, zmq.POLLIN)
def send(self, req):
self.socket.send(req)
self._last_used = time.time()
def recv(self, timeout=None):
if self.poller.poll(timeout or self.timeout):
resp = self.socket.recv()
else:
self.close()
raise TimeoutError("Timeout processing request.")
self._last_used = time.time()
return resp
def close(self):
try:
self.socket.close()
except:
pass
self._closed = True
@property
def closed(self):
if self._closed:
return self._closed
t = time.time()
died_of_old_age = self.maxage and t - self._born > self.maxage
died_of_boredom = self.maxidle and t - self._last_used > self.maxidle
if died_of_old_age:
self.close()
return True
if died_of_boredom:
self.close()
return True
return False
class ConnectionError(IOError):
pass
class ZMQConnectionPool(object):
def __init__(self, create_connection, maxsize=100):
self.maxsize = maxsize
self.pool = Queue()
self.size = 0
self.create_connection = create_connection
def get(self, block=True, timeout=None):
pool = self.pool
if self.size >= self.maxsize or pool.qsize():
# we're over limit or there are already created objects in the queue
try:
conn = pool.get(block=block, timeout=timeout)
except Empty:
raise ConnectionError("Too many connections")
# we got a connection, but it must be valid!
# a null connection means we need to create a new one
if conn and not conn.closed:
return conn
# we didn't get a valid connection, add one.
else:
# we have to room to grow, so reserve a spot!
self.size += 1
try:
conn = self.create_connection()
except:
self.size -= 1
raise
return conn
def put(self, item):
self.pool.put(item)
@contextlib.contextmanager
def connection(self, **kwargs):
"""
:yield: ZMQConnection
"""
conn = None
try:
conn = self.get(**kwargs)
yield conn
except:
# if we had problems let's discard
if conn:
conn.close()
raise
finally:
if conn and conn.closed:
# this "returns" to the pool, but will result
# in a new connection
conn = None
self.put(conn)
class TimeoutError(IOError):
pass
|
CSC591ADBI-TeamProjects/Product-Search-Relevance
|
build_tfidf.py
|
Python
|
mit
| 1,752 | 0.021119 |
import pandas as pd
import numpy as np
import re
from gensim import corpora, models, similarities
from gensim.parsing.preprocessing import STOPWORDS
def split(text):
'''
Split the input text into words/tokens; ignoring stopwords and empty strings
'''
delimiters = ".", ",", ";", ":", "-", "(", ")", " ", "\t"
regexPattern = '|'.join(map(re.escape, delimiters))
return [word for word in re.split(regexPattern, text.lower()) if word not in STOPWORDS and word != ""]
def main():
# Load data
df_train = pd.read_csv('data/train.csv', encoding="ISO-8859-1")
df_desc = pd.read_csv('data/product_descriptions.csv', encoding="ISO-8859-1")
df_attr = pd.read_csv('data/attributes_combined.csv', encoding="ISO-8859-1")
# split the texts
titles = [split(line) for line in df_train["product_title"]]
descs = [split(line) for line in df_desc["product_description"]]
attrs = [[str(line)] if isinstance(line, float) else split(line) for line in df_attr["attr_value"]]
queries = [split(line) for line in df_train["search_term"]]
texts = np.concatenate((titles, descs, attrs, queries))
# remove infrequent words
from collections import defaultdict
frequency = defaultdict(int)
for text in texts:
for token in text:
frequency[token] += 1
t
|
exts = [[token for token in text if frequency[token] > 2] for text in texts]
# build dictionary
dictionary = corpora.Dictionary(texts)
dictionary.save('homedepot.dict')
print dictionary
# a
|
ctually build a bag-of-words corpus
corpus = [dictionary.doc2bow(text) for text in texts]
corpora.MmCorpus.serialize('homedepot.mm', corpus)
# build Tf-idf model
tfidf = models.TfidfModel(corpus)
tfidf.save('homedepot.tfidf')
if __name__ == "__main__":
main()
|
holinnn/lupin
|
tests/lupin/validators/test_validator.py
|
Python
|
mit
| 601 | 0 |
# -*- coding: utf-8 -*-
import pytest
from lupin.validat
|
ors import Equal
from lupin.errors import ValidationError
@pytest.fixture
def invalid():
return Equal("sernine")
@pytest.fixture
def valid():
return Equal("lupin")
class TestAnd(object)
|
:
def test_returns_an_and_combination(self, valid, invalid):
combination = valid & invalid
with pytest.raises(ValidationError):
combination("andrésy", [])
class TestOr(object):
def test_returns_an_and_combination(self, valid, invalid):
combination = valid | invalid
combination("lupin", [])
|
buxx/TextDataExtractor
|
sandbox/dalz/implode.py
|
Python
|
gpl-2.0
| 1,003 | 0.002991 |
from sandbox.dalz.data import ArticleCommentCountFileData, ArticlePublicationDateFileData, ArticleAuthorFileData, \
ArticleWordCountFileData, CommentAuthorCommentCountFilesDatas, AuthorArticleCountFilesData, \
AuthorArticlesCommentsCountAverageFilesData, AuthorArticlesWordsCountAverageFilesData, \
ArticlePublicationHourFileData, ArticlePatriceCommentCountFileData
from tde.Implode import Implo
|
de
class ArticleImplode(Implode):
_name = 'Articles'
_data_classes = [ArticleWordCountFileData,
ArticleCommentCountFileData,
ArticlePublicationDateFileData,
ArticlePublicationHourFileData,
ArticleAuthorFileData,
ArticlePatriceCommentCountFileData]
class AuthorImplode(Implode):
_name = 'Authors'
_data_classes = [AuthorArticleCountFilesData,
|
AuthorArticlesCommentsCountAverageFilesData,
AuthorArticlesWordsCountAverageFilesData]
|
nivbend/mock-open
|
src/mock_open/test/__init__.py
|
Python
|
mit
| 161 | 0 |
# pylint
|
: disable=missing-docstring
# pylint: disable=wildcard-import
from .test_mocks import *
from .cpython.testmock import *
from .cpython.testwith import
|
*
|
ecleya/project_cron
|
project_cron/utils/apputil.py
|
Python
|
mit
| 424 | 0 |
from project_cron.utils import processutil
def open(app_n
|
ame):
script = '''
if application "%s" is not running then
tell applicatio
|
n "%s" to activate
end if
''' % (app_name, app_name)
processutil.call(['/usr/bin/osascript', '-e', script])
def close(app_name):
script = 'tell application "%s" to quit' % app_name
processutil.call(['/usr/bin/osascript', '-e', script])
|
mokuso/scan-gspread-targets
|
scan-gspread-targets.py
|
Python
|
mit
| 1,952 | 0.000512 |
#!/usr/bin/env python
# A python script to take targets from a google spreadsheet and run a
# Nessus vulnerability scan.
import json
import gspread
from oauth2client.service_account import ServiceAccountCredentials
from nessrest import ness6rest
import getpass
# Login with your Google account's API key
scopes = ['https://spreadsheets.google.com/feeds']
credentials = ServiceAccountCredentials.from_json_keyfile_name('API-xxxxxxxxxxxx.json', scopes)
gc = gspread.authorize(credentials)
# Open worksheet from spreadsheet
wks = gc.open("hosts").sheet1
# Get all values from the first column
host_list = wks.col_values(1)
temp_hosts = []
for i in host_list:
# ignore the first entry as it's just header information
# del host_list[0]
|
if i and i != 'IP':
# iterate through all rows and add to a temp array
temp_hosts.append(i)
print(temp_hosts)
# scan
# Scan Settings
# nessus_url = "https://nessus.example.com:8834"
nessus_url = "https://192.168.111.10:8834"
scan_policy = "Basic Network Scan"
scan_name = "My Scan"
# Scanner Credentials
user = getpass._raw_input('User: ')
password = getpass.getpass()
#
|
login = "username"
# password = "password"
scan = ness6rest.Scanner(url=nessus_url, login=user,
password=password, insecure=True)
# Set scan policy that should be used
scan.policy_set(name=scan_policy)
# alt_targets on edit can take an array otherwise a new scan expects a string
hosts = ','.join(temp_hosts)
# Set target and scan name
scan.scan_add(targets=hosts, name=scan_name)
# scan.scan_exists(targets=hosts, name=scan_name)
# Run Scan
scan.scan_run()
# Download results
# scan.action(action="scans", method="get")
# for s in scan.res['scans']:
# scan.scan_name = s['name']
# scan.scan_id = s['id']
# xml_nessus = scan.download_scan(export_format='nessus')
# fp = open('%s_%s.nessus'%(scan.scan_name,scan.scan_id),"w")
# fp.write(xml_nessus)
# fp.close()
|
erh3cq/hyperspy
|
hyperspy/tests/test_non-uniform_not-implemented.py
|
Python
|
gpl-3.0
| 4,442 | 0.001576 |
# -*- coding: utf-8 -*-
# Copyright 2007-2021 The HyperSpy developers
#
# This file is part of HyperSpy.
#
# HyperSpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpy. If not, see <http://www.gnu.org/licenses/>.
import pytest
import hyperspy.api as hs
from hyperspy.signals import ( Signal1D, Signal2D, EELSSpectrum, EDSTEMSpectrum,
EDSSEMSpectrum, HologramImage )
from hyperspy.components1d import EELSCLEdge
def test_signal():
s = Signal1D([10, 10])
s.axes_manager[0].convert_to_non_uniform_axis()
with pytest.raises(NotImplementedError):
s.fft()
with pytest.raises(NotImplementedError):
s.ifft()
with pytest.raises(NotImplementedError):
s.diff(0)
with pytest.raises(NotImplementedError):
s.rebin(scale=[1])
with pytest.raises(NotImplementedError):
s.split(number_of_parts=2,axis=0)
def test_signal1d():
s = Signal1D(([0, 1]))
s.axes_manager[0].convert_to_non_uniform_axis()
with pytest.raises(NotImplementedError):
s.calibrate()
with pytest.raises(NotImplementedError):
s.shift1D([1])
with pytest.raises(NotImplementedError):
s.estimate_shift1D([1])
with pytest.raises(NotImplementedError):
s.smooth_savitzky_golay()
with pytest.raises(NotImplementedError):
s.smooth_tv()
with pytest.raises(NotImplementedError):
s.filter_butterworth()
with pytest.raises(NotImplementedError):
s.gaussian_filter(1)
def test_signal2d():
s = Signal2D([[10, 10], [10, 10]])
s.axes_manager[0].convert_to_non_uniform_axis()
with pytest.raises(NotImplementedError):
s.align2D()
def test_eels():
s = EELSSpectrum(([0, 1]))
s0 = s.deepcopy()
s.axes_manager[0].convert_to_non_uniform_axis()
with pytest.raises(NotImplementedError):
s.align_zero_loss_peak()
with pytest.raises(NotImplementedError):
s.create_model(ll=s)
with pytest.raises(NotImplementedError):
s.fourier_log_deconvolution(0)
with pytest.raises(NotImplementedError):
s.fourier_ratio_deconvolution(s)
with pytest.raises(NotImplementedError):
s.fourier_ratio_deconvolution(s0)
with pytest.raises(NotImplementedError):
s0.fourier_ratio_deconvolution(s)
with pytest.raises(NotImplementedError):
s.richardson_lucy_deconvolution(s)
with pytest.raises(NotImplementedError):
s.kramers_kronig_analysis()
m = s.create_model()
g = EELSCLEdge('N_K')
with pytest.raises(NotImplementedError):
m.append(g)
def test_eds():
s = EDSTEMSpectrum(([0, 1]))
s2 = EDSSEMSpectrum(([0, 1]))
s.axes_manager[0].convert_to_non_uniform_axis()
s2.axes_manager[0].convert_to_non_uniform_axis()
s.set_microscope_parameters(20)
with pytest.raises(NotImplementedError):
s.get_calibration_from(s)
with pytest.raises(NotImplementedError):
s2.get_calibration_from(s2)
m = s.create_model()
with pytest.raises(NotImplementedError):
m.add_family_lines('Al_Ka')
with pytest.raises(NotImplementedError):
m._set_energy_scale('Al_Ka', [1.0])
with pytest.raises(NotImplementedError):
m._set_energy_offset('Al_Ka', [1.0])
def test_hol
|
ogram_image():
s = HologramImage([[10, 10], [10, 10]])
s.axes_manager[0].convert_to_non_uniform_axis()
s.axes_manager[1].convert_to_non_uniform_axis()
with pytest.raises(NotImplementedError):
s.estimate_sideband_position()
with pytest.raises
|
(NotImplementedError):
s.estimate_sideband_size(s)
with pytest.raises(NotImplementedError):
s.reconstruct_phase()
with pytest.raises(NotImplementedError):
s.statistics()
def test_lazy():
s = Signal1D([10, 10]).as_lazy()
s.axes_manager[0].convert_to_non_uniform_axis()
print(s)
with pytest.raises(NotImplementedError):
s.diff(0)
|
vinegret/youtube-dl
|
youtube_dl/extractor/tf1.py
|
Python
|
unlicense
| 3,611 | 0.002777 |
# coding: utf-8
from
|
__future__ import unicode_literals
from .common import InfoExtractor
from ..compat import comp
|
at_str
class TF1IE(InfoExtractor):
"""TF1 uses the wat.tv player."""
_VALID_URL = r'https?://(?:(?:videos|www|lci)\.tf1|(?:www\.)?(?:tfou|ushuaiatv|histoire|tvbreizh))\.fr/(?:[^/]+/)*(?P<id>[^/?#.]+)'
_TESTS = [{
'url': 'http://videos.tf1.fr/auto-moto/citroen-grand-c4-picasso-2013-presentation-officielle-8062060.html',
'info_dict': {
'id': '10635995',
'ext': 'mp4',
'title': 'Citroën Grand C4 Picasso 2013 : présentation officielle',
'description': 'Vidéo officielle du nouveau Citroën Grand C4 Picasso, lancé à l\'automne 2013.',
},
'params': {
# Sometimes wat serves the whole file with the --test option
'skip_download': True,
},
'expected_warnings': ['HTTP Error 404'],
}, {
'url': 'http://www.tfou.fr/chuggington/videos/le-grand-mysterioso-chuggington-7085291-739.html',
'info_dict': {
'id': 'le-grand-mysterioso-chuggington-7085291-739',
'ext': 'mp4',
'title': 'Le grand Mystérioso - Chuggington',
'description': 'Le grand Mystérioso - Emery rêve qu\'un article lui soit consacré dans le journal.',
'upload_date': '20150103',
},
'params': {
# Sometimes wat serves the whole file with the --test option
'skip_download': True,
},
'skip': 'HTTP Error 410: Gone',
}, {
'url': 'http://www.tf1.fr/tf1/koh-lanta/videos/replay-koh-lanta-22-mai-2015.html',
'only_matching': True,
}, {
'url': 'http://lci.tf1.fr/sept-a-huit/videos/sept-a-huit-du-24-mai-2015-8611550.html',
'only_matching': True,
}, {
'url': 'http://www.tf1.fr/hd1/documentaire/videos/mylene-farmer-d-une-icone.html',
'only_matching': True,
}, {
'url': 'https://www.tf1.fr/tmc/quotidien-avec-yann-barthes/videos/quotidien-premiere-partie-11-juin-2019.html',
'info_dict': {
'id': '13641379',
'ext': 'mp4',
'title': 'md5:f392bc52245dc5ad43771650c96fb620',
'description': 'md5:44bc54f0a21322f5b91d68e76a544eae',
'upload_date': '20190611',
},
'params': {
# Sometimes wat serves the whole file with the --test option
'skip_download': True,
},
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
wat_id = None
data = self._parse_json(
self._search_regex(
r'__APOLLO_STATE__\s*=\s*({.+?})\s*(?:;|</script>)', webpage,
'data', default='{}'), video_id, fatal=False)
if data:
try:
wat_id = next(
video.get('streamId')
for key, video in data.items()
if isinstance(video, dict)
and video.get('slug') == video_id)
if not isinstance(wat_id, compat_str) or not wat_id.isdigit():
wat_id = None
except StopIteration:
pass
if not wat_id:
wat_id = self._html_search_regex(
(r'(["\'])(?:https?:)?//www\.wat\.tv/embedframe/.*?(?P<id>\d{8})\1',
r'(["\']?)streamId\1\s*:\s*(["\']?)(?P<id>\d+)\2'),
webpage, 'wat id', group='id')
return self.url_result('wat:%s' % wat_id, 'Wat')
|
carloderamo/mushroom
|
examples/atari_dqn.py
|
Python
|
mit
| 19,062 | 0.00063 |
import argparse
import datetime
import pathlib
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
from mushroom_rl.algorithms.value import AveragedDQN, CategoricalDQN, DQN,\
DoubleDQN, MaxminDQN, DuelingDQN, NoisyDQN, Rainbow
from mushroom_rl.approximators.parametric import TorchApproximator
from mushroom_rl.core import Core, Logger
from mushroom_rl.environments import *
from mushroom_rl.policy import EpsGreedy
from mushroom_rl.utils.dataset import compute_metrics
from mushroom_rl.utils.parameters import LinearParameter, Parameter
from mushroom_rl.utils.replay_memory import PrioritizedReplayMemory
"""
This script runs Atari experiments with DQN, and some of its variants, as
presented in:
"Human-Level Control Through Deep Reinforcement Learning". Mnih V. et al.. 2015.
"""
class Network(nn.Module):
n_features = 512
def __init__(self, input_shape, output_shape, **kwargs):
super().__init__()
n_input = input_shape[0]
n_output = output_shape[0]
self._h1 = nn.Conv2d(n_input, 32, kernel_size=8, stride=4)
self._h2 = nn.Conv2d(32, 64, kernel_size=4, stride=2)
self._h3 = nn.Conv2d(64, 64, kernel_size=3, stride=1)
self._h4 = nn.Linear(3136, self.n_features)
self._h5 = nn.Linear(self.n_features, n_output)
nn.init.xavier_uniform_(self._h1.weight,
gain=nn.init.calculate_gain('relu'))
nn.init.xavier_uniform_(self._h2.weight,
gain=nn.init.calculate_gain('relu'))
nn.init.xavier_uniform_(self._h3.weight,
gain=nn.init.calculate_gain('relu'))
nn.init.xavier_uniform_(self._h4.weight,
gain=nn.init.calculate_gain('relu'))
nn.init.xavier_uniform_(self._h5.weight,
gain=nn.init.calculate_gain('linear'))
def forward(self, state, action=None):
h = F.relu(self._h1(state.float() / 255.))
h = F.relu(self._h2(h))
h = F.relu(self._h3(h))
h = F.relu(self._h4(h.view(-1, 3136)))
q = self._h5(h)
if action is None:
return q
else:
q_acted = torch.squeeze(q.gather(1, action.long()))
return q_acted
class FeatureNetwork(nn.Module):
def __init__(self, input_shape, output_shape, **kwargs):
super().__init__()
n_input = input_shape[0]
self._h1 = nn.Conv2d(n_input, 32, kernel_size=8, stride=4)
self._h2 = nn.Conv2d(32, 64, kernel_size=4, stride=2)
self._h3 = nn.Conv2d(64, 64, kernel_size=3, stride=1)
self._h4 = nn.Linear(3136, Network.n_features)
nn.init.xavier_uniform_(self._h1.weight,
gain=nn.init.calculate_gain('relu'))
nn.init.xavier_uniform_(self._h2.weight,
gain=nn.init.calculate_gain('relu'))
nn.init.xavier_uniform_(self._h3.weight,
gain=nn.init.calculate_gain('relu'))
nn.init.xavier_uniform_(self._h4.weight,
gain=nn.init.calculate_gain('relu'))
def forward(self, state, action=None):
h = F.relu(self._h1(state.float() / 255.))
h = F.relu(self._h2(h))
h = F.relu(self._h3(h))
h = F.relu(self._h4(h.view(-1, 3136)))
return h
def print_epoch(epoch, logger):
logger.info('################################################################')
logger.info('Epoch: %d' % epoch)
logger.info('----------------------------------------------------------------')
def get_stats(dataset, logger):
score = compute_metrics(dataset)
logger.info(('min_reward: %f, max_reward: %f, mean_reward: %f,'
' games_completed: %d' % score))
return score
def experiment():
np.random.seed()
# Argument parser
parser = argparse.ArgumentParser()
arg_game = parser.add_argument_group('Game')
arg_game.add_argument("--name",
type=str,
default='BreakoutDeterministic-v4',
help='Gym ID of the Atari game.')
arg_game.add_argument("--screen-width", type=int, default=84,
help='Width of the game screen.')
arg_game.add_argument("--screen-height", type=int, default=84,
help='Height of the game screen.')
arg_mem = parser.add_argument_group('Replay Memory')
arg_mem.add_argument("--initial-replay-size", type=int, default=50000,
help='Initial size of the replay memory.')
arg_mem.add_argument("--max-replay-size", type=int, default=500000,
help='Max size of the replay memory.')
arg_mem.add_argument("--prioritized", action='store_true',
help='Whether to use prioritized memory or not.')
arg_net = parser.add_argument_group('Deep Q-Network')
arg_net.add_argument("--optimizer",
choices=['adadelta',
'adam',
'rmsprop',
'rmspropcentered'],
default='adam',
help='Name of the optimizer to use.')
arg_net.add_argument("--learning-rate", type=float, default=.0001,
help='Learning rate value of the optimizer.')
arg_net.add_argument("--decay", type=float, default=.95,
help='Discount factor for the history coming from the'
'gradient momentum in rmspropcentered and'
'rmsprop')
arg_net.add_argument("--epsilon", type=float, default=1e-8,
help='Epsilon term used in rmspropcentered and'
'rmsprop')
arg_alg = parser.add_argument_group('Algorithm')
arg_alg.add_argument("--algorithm", choices=['dqn', 'ddqn', 'adqn', 'mmdqn',
'cdqn', 'dueldqn', 'ndqn', 'rainbow'],
default='dqn',
help='Name of the algorithm. dqn is for standard'
'DQN, ddqn is for Double DQN and adqn is for'
'Averaged DQN.')
arg_alg.add_argument("--n-approximators", type=int, default=1,
help="Number of approximators used in the ensemble for"
"AveragedDQN or MaxminDQN.")
arg_alg.add_argument("--batch-size", type=int, default=32,
help='Batch size for each fit of the network.')
arg_alg.add_argument("--history-length", type=int, default=
|
4,
help='Number of frames composing a state.')
arg_alg.add_argument("--target-update-frequency", type=int, default=10000,
help='Number of collected samples before each update'
'of the target network.')
arg_alg.ad
|
d_argument("--evaluation-frequency", type=int, default=250000,
help='Number of collected samples before each'
'evaluation. An epoch ends after this number of'
'steps')
arg_alg.add_argument("--train-frequency", type=int, default=4,
help='Number of collected samples before each fit of'
'the neural network.')
arg_alg.add_argument("--max-steps", type=int, default=50000000,
help='Total number of collected samples.')
arg_alg.add_argument("--final-exploration-frame", type=int, default=1000000,
help='Number of collected samples until the exploration'
'rate stops decreasing.')
arg_alg.add_argument("--initial-exploration-rate", type=float, default=1.,
help='Initial value of the exploration rate.')
arg_alg.add_argument("--final-exploration-rate", type=float, default=.1,
help='Final value of the exp
|
josiah-wolf-oberholtzer/supriya
|
supriya/realtime/servers.py
|
Python
|
mit
| 35,554 | 0.001041 |
import asyncio
import logging
import re
import threading
from os import PathLike
from typing import Optional, Set, Union
from uqbar.objects import new
import supriya.exceptions
from supriya.commands import ( # type: ignore
FailResponse,
GroupNewRequest,
GroupQueryTreeRequest,
NotifyRequest,
QuitRequest,
SyncRequest,
)
from supriya.enums import CalculationRate, NodeAction
from supriya.exceptions import ServerOffline
from supriya.osc.protocols import (
AsyncOscProtocol,
HealthCheck,
OscProtocolOffline,
ThreadedOscProtocol,
)
from supriya.querytree import QueryTreeGroup, QueryTreeSynth
from supriya.scsynth import Options, find
from ..typing import AddActionLike, CalculationRateLike
from .allocators import BlockAllocator, NodeIdAllocator
from .buffers import Buffer, BufferGroup
from .buses import AudioInputBusGroup, AudioOutputBusGroup, Bus, BusGroup
from .meters import Meters
from .nodes import Group, Node, RootNode, Synth
from .protocols import AsyncProcessProtocol, SyncProcessProtocol
from .recorder import Recorder
try:
from .shm import ServerSHM
except (ImportError, ModuleNotFoundError):
ServerSHM = None
logger = logging.getLogger("supriya.server")
DEFAULT_IP_ADDRESS = "127.0.0.1"
DEFAULT_PORT = 57110
class BaseServer:
### INITIALIZER ###
def __init__(self):
# address
self._ip_address = DEFAULT_IP_ADDRESS
self._port = DEFAULT_PORT
# process
self._client_id = 0
self._is_owner = False
self._is_running = False
self._latency = 0.1
self._maximum_logins = 1
self._options = Options()
self._osc_protocol = None
self._process_protocol = None
self._status = None
self._shm = None
# allocators
self._audio_bus_allocator = None
self._buffer_allocator = None
self._control_bus_allocator = None
self._node_id_allocator = None
self._sync_id = 0
# proxy mappings
self._synthdefs = {}
### SPECIAL METHODS ###
def __repr__(self):
if not self.is_running:
return f"<{type(self).__name__}: offline>"
string = "<{name}: {protocol}://{ip}:{port}, "
string += "{inputs}i{outputs}o>"
return string.format(
name=type(self).__name__,
protocol=self.options.protocol,
ip=self.ip_address,
port=self.port,
inputs=self.options.input_bus_channel_count,
outputs=self.options.output_bus_channel_count,
)
### PRIVATE METHODS ###
def _handle_failed_response(self, message):
logger.warning("Fail: {}".format(message))
def _handle_status_reply_response(self, message):
from supriya.commands import Response
response = Response.from_osc_message(message)
self._status = response
def _handle_synthdef_removed_response(self, message):
from supriya.commands import Response
response = Response.from_osc_message(message)
synthdef_name = response.synthdef_name
self._synthdefs.pop(synthdef_name, None)
def _setup_allocators(self):
self._audio_bus_allocator = BlockAllocator(
heap_maximum=self._options.audio_bus_channel_count,
heap_minimum=self._options.first_private_bus_id,
)
self._buffer_allocator = BlockAllocator(heap_maximum=self._options.buffer_count)
self._control_bus_allocator = BlockAllocator(
heap_maximum=self._options.control_bus_channel_count
)
self._node_id_allocator = NodeIdAllocator(
initial_node_id=self._options.initial_node_id, client_id=self.client_id
)
self._sync_id = self.client_id << 26
def _setup_osc_callbacks(self):
self._osc_protocol.register(
pattern="/d_removed", procedure=self._handle_synthdef_removed_response
)
self._osc_protocol.register(
pattern="/status.reply", procedure=self._handle_status_reply_response
)
self._osc_protocol.register(
pattern="/fail", procedure=self._handle_failed_response
)
def _setup_shm(self):
if ServerSHM is None:
return
self._shm = ServerSHM(self.port, self.options.control_bus_channel_count)
def _teardown_allocators(self):
self._audio_bus_allocator = None
self._buffer_allocator = None
self._control_bus_allocator = None
self._node_id_allocator = None
self._sync_id = 0
def _teardown_shm(self):
self._shm = None
### PUBLIC METHODS ###
def send(self, message):
if not message:
raise ValueError
if not self.is_running:
raise ServerOffline
self._osc_protocol.send(message)
return self
### PUBLIC PROPERTIES ###
@property
def audio_bus_allocator(self):
return self._audio_bus_allocator
@property
def buffer_allocator(self):
return self._buffer_allocator
@property
def client_id(self) -> int:
return self._client_id
@property
def control_bus_allocator(self):
return self._control_bus_allocator
@property
def ip_address(self) -> str:
return self._ip_address
@property
def is_owner(self) -> bool:
return self._is_owner
@property
def is_running(self) -> bool:
return self._is_running
@property
def latency(self) -> float:
return self._latency
@latency.setter
def latency(self, latency):
self._latency = float(latency)
@property
def maximum_logins(self) -> int:
return self._maximum_logins
@property
def next_sync_id(self) -> int:
sync_id = self._sync_id
self._sync_id += 1
return sync_id
@property
def node_id_allocator(self):
return self._node_id_allocator
@property
def osc_protocol(self):
return self._osc_protocol
@property
def options(self) -> Options:
return self._options
@property
def port(self) -> int:
return self._port
@property
def process_protocol(self):
return self._process_protocol
@property
def status(self):
return self._status
class AsyncServer(BaseServer):
### CLASS VARIABLES ###
_servers: Set["AsyncServer"] = set()
### INTIALIZER ###
def __init__(self):
BaseServer.__init__(self)
self._boot_future = None
self._quit_future = None
### SPECIAL METHODS ###
def __contains__(self, expr):
if isinstance(expr, supriya.synthdefs.
|
SynthDef):
name = expr.actual_name
if name in self._synthdefs and self._synthdefs[name] == expr:
return True
return False
### PRIVATE METHODS ###
async def _connect(self):
self._osc_protocol = AsyncOscProtocol()
|
await self._osc_protocol.connect(
ip_address=self._ip_address,
port=self._port,
healthcheck=HealthCheck(
request_pattern=["/status"],
response_pattern=["/status.reply"],
callback=self._shutdown,
max_attempts=5,
timeout=1.0,
backoff_factor=1.5,
),
)
self._is_running = True
self._setup_osc_callbacks()
await self._setup_notifications()
self._setup_allocators()
if self.client_id == 0:
await self._setup_default_groups()
await self._setup_system_synthdefs()
self.boot_future.set_result(True)
self._servers.add(self)
async def _disconnect(self):
self._is_running = False
self._is_owner = False
self._client_id = None
self._maximum_logins = None
self._teardown_shm()
await self._osc_protocol.disconnect()
await self._osc_protocol.exit_future
self._teardown_allocators()
if self in self._servers:
self._servers.remove(self)
self.quit_future.set_result(True)
if not self.boot_fu
|
hujiajie/chromium-crosswalk
|
third_party/WebKit/Tools/Scripts/webkitpy/style/checkers/png_unittest.py
|
Python
|
bsd-3-clause
| 3,001 | 0.001666 |
# Copyright (C) 2012 Balazs Ankes (bank@inf.u-szeged.hu) University of Szeged
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBS
|
TITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for png.py."""
import unittest
from png import PNGChecker
from webkitpy.common.system.filesystem_mock import MockFileSystem
fro
|
m webkitpy.common.system.systemhost_mock import MockSystemHost
class PNGCheckerTest(unittest.TestCase):
"""Tests PNGChecker class."""
def test_init(self):
"""Test __init__() method."""
def mock_handle_style_error(self):
pass
checker = PNGChecker("test/config", mock_handle_style_error, MockSystemHost())
self.assertEqual(checker._file_path, "test/config")
self.assertEqual(checker._handle_style_error, mock_handle_style_error)
def test_check(self):
errors = []
def mock_handle_style_error(line_number, category, confidence, message):
error = (line_number, category, confidence, message)
errors.append(error)
fs = MockFileSystem()
file_path = "foo.png"
fs.write_binary_file(file_path, "Dummy binary data")
errors = []
checker = PNGChecker(file_path, mock_handle_style_error, MockSystemHost(os_name='linux', filesystem=fs))
checker.check()
self.assertEqual(len(errors), 0)
file_path = "foo-expected.png"
fs.write_binary_file(file_path, "Dummy binary data")
errors = []
checker = PNGChecker(file_path, mock_handle_style_error, MockSystemHost(os_name='linux', filesystem=fs))
checker.check()
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0], (0, 'image/png', 5, 'Image lacks a checksum. Generate pngs using run-webkit-tests to ensure they have a checksum.'))
|
gerardroche/sublime-polyfill
|
ui.py
|
Python
|
bsd-3-clause
| 8,584 | 0.000349 |
import os
from sublime import active_window
from sublime import find_resources
from sublime import load_settings
from sublime import save_settings
import sublime_plugin
def _load_preferences():
return load_settings('Preferences.sublime-settings')
def _save_preferences():
return save_settings('Preferences.sublime-settings')
class ClearWindowCommand(sublime_plugin.WindowCommand):
def run(self):
if self.window.is_sidebar_visible():
self.window.set_sidebar_visible(False)
if self.window.is_minimap_visible():
self.window.set_minimap_visible(False)
if self.window.is_menu_visible():
self.window.set_menu_visible(False)
if self.window.is_status_bar_visible():
self.window.set_status_bar_visible(False)
self.window.run_command('resize_groups_almost_equally')
preferences = _load_preferences()
preferences.set('indent_guide_options', [])
preferences.set('line_numbers', False)
preferences.set('draw_white_space', 'selection')
preferences.set('rulers', [])
_save_preferences()
self.window.run_command('sort_user_settings')
class EnableColorSchemeCommand(sublime_plugin.ApplicationCommand):
def run(self):
self.color_schemes = []
for color_scheme in find_resources('*.tmTheme'):
ignore = False
for exclude in ['(SL)', 'Color Highlighter', 'tests']:
if exclude in color_scheme:
ignore = True
if not ignore:
self.color_schemes.append(color_scheme)
if len(self.color_schemes) > 1:
color_scheme = _load_preferences().get('color_scheme')
if color_scheme not in self.color_schemes:
self.color_schemes.insert(0, color_scheme)
self.window = active_window()
self.window.show_quick_panel(
self.color_schemes,
self.on_done,
0,
self.color_schemes.index(color_scheme),
self.on_select
)
def on_select(self, index):
if index == -1:
return
color_scheme = self.color_schemes[index]
for group in range(0, self.window.num_groups()):
active_view_in_group = self.window.active_view_in_group(group)
if active_view_in_group:
active_view_in_group.settings().set('color_scheme', color_scheme)
def on_done(self, index):
if index == -1:
for view in self.window.views():
view.settings().erase('color_scheme')
return
color_scheme = self.color_schemes[index]
preferences = _load_preferences()
preferences.set('color_scheme', color_scheme)
_save_preferences()
for view in self.window.views():
view.settings().erase('color_scheme')
class EnableThemeCommand(sublime_plugin.ApplicationCommand):
def run(self):
self.themes = []
for theme in find_resources('*.sublime-theme'):
ignore = False
for exclude in ['Addon', 'tests']:
if exclude in theme:
ignore = True
if not ignore:
self.themes.append(os.path.basename(theme))
if len(self.themes) > 1:
active_window().show_quick_panel(self.themes, self.on_done)
def on_done(self, index):
if index == -1:
return
theme = self.themes[index]
preferences = _load_preferences()
preferences.set('theme', theme)
_save_preferences()
class OverlayOpenFileCommand(sublime_plugin.WindowCommand):
"""Open File; Inspired by Vim CtrlP (https://kien.github.io/ctrlp.vim)."""
def run(self, tab=None, split=None, vsplit=None):
"""
Open file from overlay.
:param tab:
Open the selected file in a new tab
:param split:
Open the selected file in a horizontal split
:param vsplit:
Open the selected file in a vertical split
Defaults to opening in a new tab.
"""
transient_view = self.window.transient_view_in_group(self.window.active_group())
if not transient_view:
return
fname = transient_view.file_name()
if not fname:
return
if vsplit:
self.open_file_in_vertical_split(fname)
elif split:
self.open_file_in_horizontal_split(fname)
elif tab:
self.open_file_in_tab(fname)
else:
self.open_file_in_tab(fname)
self.window.run_command('hide_overlay')
def is_enabled(self):
view = self.window.active_view()
if view:
return bool(view.settings().get('polyfill.experimental_features'))
return False
def open_file_in_vertical_split(self, fname):
self.window.open_file(fname)
self.window.run_command('create_pane_with_file', {'direction': 'right'})
def open_file_in_horizontal_split(self, fname):
self.window.open_file(fname)
self.window.run_command('create_pane_with_file', {'direction': 'down'})
def open_file_in_tab(self, fname):
self.window.open_file(fname)
class PolyfillSetLayoutCommand(sublime_plugin.WindowCommand):
def run(self, cols, rows, cells):
num_groups_before = self.window.num_groups()
active_group_before = self.window.active_group()
self.window.run_command('set_layout', {
'cols': cols,
'rows': rows,
'cells': cells
})
if num_groups_before == self.window.num_groups():
# Fix issue where group focus moves when it probably shouldn't.
# When the layout is not changed then the focus shouldn't change
# either. Previously, if the active view before the layout change
# is transient ST would move the cursor focus to a group with a
# non-transient view. This can be disorien
|
ting and interrupt flow
# because where the cursor focus has moved to is not always clear.
self.window.focus_group(active_group_before)
return
if len(self.window.views_in_group(active_group_before)) < 2:
# Only move the active view before
|
layout change to the new group
# if it doesn't leave the previous group without any views.
return
view = self.window.active_view_in_group(active_group_before)
self.window.set_view_index(view, self.window.active_group(), 0)
class ResetWindowCommand(sublime_plugin.WindowCommand):
def run(self):
self.window.run_command('reset_font_size')
view = self.window.active_view()
font_size = view.settings().get('font_size_default') if view else None
if font_size:
preferences = _load_preferences()
preferences.set('font_size', font_size)
_save_preferences()
if not self.window.is_sidebar_visible():
self.window.set_sidebar_visible(True)
if not self.window.is_minimap_visible():
self.window.set_minimap_visible(True)
if not self.window.is_menu_visible():
self.window.set_menu_visible(True)
if not self.window.is_status_bar_visible():
self.window.set_status_bar_visible(True)
self.window.run_command('resize_groups_almost_equally')
class ResizeGroupsAlmostEquallyCommand(sublime_plugin.WindowCommand):
"""
Resize groups equally.
Make all groups (almost) equally high and wide, but use 'winheight' and
'winwidth' for the current window. Windows with 'winfixheight' set keep
their height and windows with 'winfixwidth' set keep their width.
@xxx winheight option
@xxx winwidth option
@xxx winfixheight option
@xxx winfixwidth option
"""
def run(self):
layout = self.window.layout()
col_count = len(layout['cols'])
row_count = len(layout['rows'])
def equalise(count):
size = round(1.0 / (count - 1), 2)
|
icereval/osf.io
|
api/applications/urls.py
|
Python
|
apache-2.0
| 417 | 0.007194 |
fro
|
m django.conf.urls import url
from api.applications import views
app_name = 'osf'
urlpatterns = [
url(r'^$', views.ApplicationList.as_view(), name=views.ApplicationList.view_name),
url(r'^(?P<client_id>\w+)/$', views.ApplicationDetail.as_view(), name=views.ApplicationDetail.view_name),
url(r'^(?P<client_id>\w+)/reset/$', views.ApplicationReset.as_view(), name=views.Application
|
Reset.view_name),
]
|
Fillll/reddit2telegram
|
reddit2telegram/channels/~inactive/r_ikeahacks/app.py
|
Python
|
mit
| 141 | 0.007092 |
#e
|
ncoding:utf-8
subreddit = 'ikeahacks'
t_channel = '@r_IKEAhacks'
def send_post(submission, r2t):
return
|
r2t.send_simple(submission)
|
FenrirUnbound/greg-ball
|
libraries/schedule.py
|
Python
|
gpl-2.0
| 262 | 0.003817 |
from datetime import datetime
class Schedule(object):
WEEK
|
_ONE = dat
|
etime(2014, 9, 2, 9)
def week(self):
time_difference = datetime.now() - self.WEEK_ONE
return (time_difference.days/7)+1
def season_year(self):
return 2014
|
auready/django
|
django/core/cache/backends/filebased.py
|
Python
|
bsd-3-clause
| 4,845 | 0.000413 |
"File-based cache backend"
import glob
import hashlib
import os
import pickle
import random
import tempfile
import time
import zlib
from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache
from django.core.files.move import file_move_safe
from django.utils.encoding import force_bytes
class FileBasedCache(BaseCache):
cache_suffix = '.djcache'
def __init__(self, dir, params):
super().__init__(params)
self._dir = os.path.abspath(dir)
self._createdir()
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
if self.has_key(key, version):
return False
self.set(key, value, timeout, version)
return True
def get(self, key, default=None, version=None):
fname = self._key_to_file(key, version)
try:
with open(fname, 'rb') as f:
if not self._is_expired(f):
return pickle.loads(zlib.decompress(f.read()))
except FileNotFoundError:
pass
return default
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
self._createdir() # Cache dir can be deleted at any time.
fname = self._key_to_file(key, version)
self._cull() # make some room if necessary
fd, tmp_path = tempfile.mkstemp(dir=self._dir)
renamed = False
try:
with open(fd, 'wb') as f:
expiry = self.get_backend_timeout(timeout)
f.write(pickle.dumps(expiry, pickle.HIGHEST_PROTOCOL))
f.write(zlib.compress(pickle.dumps(value, pickle.HIGHEST_PROTOCOL)))
file_move_safe(tmp_path, fname, allow_overwrite=True)
renamed = True
finally:
if not renamed:
os.remove(tmp_path)
def delete(self, key, version=None):
self._delete(self._key_to_file(key, version))
def _delete(self, fname):
if not fname.startswith(self._dir) or not os.path.exists(fname):
return
try:
os.remove(fname)
except FileNotFoundError:
# The file may have been removed by another process.
pass
def has_key(self, key, version=None):
fname = self._key_to_file(key, version)
if os.path.exists(fname):
with open(fname, 'rb') as f:
return not self._is_expired(f)
return False
def _cull(self):
"""
Removes random cache entries if max_entries is reached at a ratio
of num_entries / cull_frequency. A value of 0 for CULL_FREQUENCY means
that the entire cache will be purged.
"""
filelist = self._list_cache_files()
num_entries = len(filelist)
if num_entries < self._max_entries:
return # return early if no culling is required
if self._cull_frequency == 0:
return self.clear() # Clear the cache when CULL_FREQUENCY = 0
# Delete a random selection of entries
filelist = random.sample(filelist,
int(num_entries / self._cull_frequency))
for fname in filelist:
self._delete(fname)
def _createdir(self):
if not os.path.exists(self._dir):
try:
os.makedirs(self._dir, 0o700)
except FileExistsError:
pass
def _key_to_file(self, key, version=None):
"""
Convert a key into a cache file path. Basically this is the
root cache path joined with the md5sum of the key and a suffix.
"""
key = self.make_key(key, version=version)
self.validate_key(key)
return os.path.join(self._dir, ''.join(
[hashlib.md5(force_bytes(key)).hexdigest(), self.cache_suffix]))
def clear(self):
"""
Remove all the cache files.
"""
if not os.path.exists(self._dir):
return
for fname in self._list_cache_files():
self._delete(fname)
def _is_expired(self, f):
"""
Takes an open cache file and determines if it has expired,
deletes the file if it is has passed its expiry time.
"""
exp = pickle.load(f)
if exp is not None and exp < tim
|
e.time():
f.close() # On Windows a file has to be closed before deleting
self._delete(f.name)
return True
return False
def _list_cache_files(self):
"""
Get a list of paths to all the cache
|
files. These are all the files
in the root cache dir that end on the cache_suffix.
"""
if not os.path.exists(self._dir):
return []
filelist = [os.path.join(self._dir, fname) for fname
in glob.glob1(self._dir, '*%s' % self.cache_suffix)]
return filelist
|
andreaso/ansible
|
lib/ansible/modules/cloud/amazon/ec2_vpc_subnet.py
|
Python
|
gpl-3.0
| 8,929 | 0.001568 |
#!/usr/bin/python
#
# This is a free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This Ansible library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this library. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'curated'}
DOCUMENTATION = '''
---
module: ec2_vpc_subnet
short_description: Manage subnets in AWS virtual private clouds
description:
- Manage subnets in AWS virtual private clouds
version_added: "2.0"
author: Robert Estelle (@erydo)
options:
az:
description:
- "The availability zone for the subnet. Only required when state=present."
required: false
default: null
cidr:
description:
- "The CIDR block for the subnet. E.g. 192.0.2.0/24. Only required when state=present."
required: false
default: null
tags:
description:
- "A dict of tags to apply to the subnet. Any tags currently applied to the subnet and not present here will be removed."
required: false
default: null
aliases: [ 'resource_tags' ]
state:
description:
- "Create or remove the subnet"
required: false
default: present
choices: [ 'present', 'absent' ]
vpc_id:
description:
- "VPC ID of the VPC in which to create the subnet."
required: false
default: null
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
- name: Create subnet for database servers
ec2_vpc_subnet:
state: present
vpc_id: vpc-123456
cidr: 10.0.1.16/28
resource_tags:
Name: Database Subnet
register: database_subnet
- name: Remove subnet for database servers
ec2_vpc_subnet:
state: absent
vpc_id: vpc-123456
cidr: 10.0.1.16/28
'''
import time
try:
import boto.ec2
import boto.vpc
from boto.exception import EC2ResponseError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
if __name__ != '__main__':
raise
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import AnsibleAWSError, connect_to_aws, ec2_argument_spec, get_aws_connection_info
class AnsibleVPCSubnetException(Exception):
pass
class AnsibleVPCSubnetCreationException(AnsibleVPCSubnetException):
pass
class AnsibleVPCSubnetDeletionException(AnsibleVPCSubnetException):
pass
class AnsibleTagCreationException(AnsibleVPCSubnetException):
pass
def get_subnet_info(subnet):
subnet_info = {'id': subnet.id,
'availability_zone': subnet.availability_zone,
'available_ip_address_count': subnet.available_ip_address_count,
'cidr_block': subnet.cidr_block,
'default_for_az': subnet.defaultForAz,
'map_public_ip_on_launch': subnet.mapPublicIpOnLaunch,
'state': subnet.state,
'tags': subnet.tags,
'vpc_id': subnet.vpc_id
}
return subnet_info
def subnet_exists(vpc_conn, subnet_id):
filters = {'subnet-id': subnet_id}
subnet = vpc_conn.get_all_subnets(filters=filters)
if subnet and subnet[0].state == "available":
return subnet[0]
else:
return False
def create_subnet(vpc_conn, vpc_id, cidr, az, check_mode):
try:
new_subnet = vpc_conn.create_subnet(vpc_id, cidr, az, dry_run=check_mode)
# Sometimes AWS takes its time to create a subnet and so using
# new subnets's id to do things like create tags results in
# exception. boto doesn't seem to refresh 'state' of the newly
# created subnet, i.e.: it's always 'pending'.
subnet = False
while subnet is False:
subnet = subnet_exists(vpc_conn, new_subnet.id)
time.sleep(0.1)
except EC2ResponseError as e:
if e.error_code == "DryRunOperation":
subnet = None
elif e.error_code == "InvalidSubnet.Conflict":
raise AnsibleVPCSubnetCreationException("%s: the CIDR %s conflicts with another subnet with the VPC ID %s." % (e.error_code, cidr, vpc_id))
else:
raise AnsibleVPCSubnetCreationException(
'Unable to create subnet {0}, error: {1}'.format(cidr, e))
return subnet
def get_resource_tags(vpc_conn, resource_id):
return dict((t.name, t.value) for t in
vpc_conn.get_all_tags(filters={'resource-id': resource_id}))
def ensure_tags(vpc_conn, resource_id, tags, add_only, check_mode):
try:
cur_tags = get_resource_tags(vpc_conn, resource_id)
if cur_tags == tags:
return {'changed': False, 'tags': cur_tags}
to_delete = dict((k, cur_tags[k]) for k in cur_tags if k not in tags)
if to_delete and not add_only:
vpc_conn.delete_tags(resource_id, to_delete, dry_run=check_mode)
to_add = dict((k, tags[k]) for k in tags if k not in cur_tags or cur_tags[k] != tags[k])
if to_add:
vpc_conn.create_tags(resource_id, to_add, dry_run=check_mode)
latest_tags = get_resource_tags(vpc_conn, resource_id)
return {'changed': True, 'tags': latest_tags}
except EC2ResponseError as e:
raise AnsibleTagCreationException(
'Unable to update tags for {0}, error: {1}'.format(resource_id, e))
def get_matching_subnet(vpc_conn, vpc_id, cidr):
subnets = vpc_conn.get_all_subnets(filters={'vpc_id': vpc_id})
return next((s for s in subnets if s.cidr_block == cidr), None)
def ensure_subnet_present(vpc_conn, vpc_id, cidr, az, tags, check_mode):
subnet = get_matching_subnet(vpc_conn, vpc_id, cidr)
changed = False
if subnet is None:
subnet = create_subnet(vpc_conn, vpc_id, cidr, az, check_mode)
changed = True
# Subnet will be None when check_mode is true
if subnet is None:
return {
'changed': changed,
'subnet': {}
}
if tags != subnet.tags:
ensure_tags(vpc_conn, subnet.id, tags, False, check_mode)
subnet.tags = tags
changed = True
subnet_info = get_subnet_info(subnet)
return {
'changed': changed,
'subnet': subnet_info
}
def ensure_subnet_absent(vpc_conn, vpc_id, cidr, check_mode):
subnet = get_matching_subnet(vpc_conn, vpc_id, cidr)
if subnet is None:
return {'changed': False}
try:
vpc_conn.delete_subnet(subnet.id, dry_run=check_mode)
return {'changed': True}
except EC2ResponseError as e:
raise AnsibleVPCSubnetDeletionException(
'Unable to delete subnet {0}, error: {1}'
.format(subnet.cidr_block, e))
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
az=dict(default=None, required=False),
cidr=dict(default=None, req
|
uired=True),
state=dict(default='present', choices=['present', 'absent']),
tags=dict(default={}, required=False, type='dict', aliases=['resource_tags']),
vpc_id=dict(defau
|
lt=None, required=True)
)
)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
if not HAS_BOTO:
module.fail_json(msg='boto is required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if region:
try:
connection = connect_to_aws(boto.vpc, region, **aws_connect_params)
except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e:
module.fail_json(msg=str(e))
else:
modul
|
luci/luci-py
|
appengine/components/components/prpc/discovery/service_prpc_pb2.py
|
Python
|
apache-2.0
| 29,794 | 0.012016 |
# Generated by the pRPC protocol buffer compiler plugin. DO NOT EDIT!
# source: service.proto
import base64
import zlib
from google.protobuf import descriptor_pb2
# Includes description of the service.proto and all of its transitive
# dependencies. Includes source code info.
FILE_DESCRIPTOR_SET = descriptor_pb2.FileDescriptorSet()
FILE_DESCRIPTOR_SET.ParseFromString(zlib.decompress(base64.b64decode(
'eJzlvX10ZFd1J0pVqaTSbbV0VN2222o3fV3+aKktVbvbxoY2xqOW5LZMd6unpIaYGSyuqq6kcp'
'fqirpVLcuBlWQyhI98vEcAY+eBSfgyMQFCgCQDi5dhArMWvIQk7zFkzcKTYQWcQAjmw7OAISHv'
'7d8++5x7bpXaNoTO/PG8Elq177n77LPPPvvsvc8++3r/UPB2x2HrQr0aljdbUTsqDtbqcTW6EL'
'a2x/y1KFprhEf4wUpn9UgtjKut+mY7aunGpX6v7yVRvVZqe2qWn62ElTDejJpxWJz1dpn29ai5'
'L+NnxncdK5U1zrLBWb6z3ghnLd7FsF1xXyuOeQWhL96X9XPjgxX7+9hJb3DWEFs87hUMCcWRsh'
'1EGfSN7XcA3YSWnnP3N/u9gupTz1H7Vcb7WKYwxD+Kxx7L+DPR5narvrbe9o/dePQWf2k99E+d'
'm5n3pzvt9agVl/3pRsPnBrHfCkFaWCt7/rk49KNVv71ej/046rSqoV+NaqFPP9dARjOs+Z1mLW'
'xRk9Cf3gyqQEyjIoIm/ZeErZgG7x8r3+hRg6DtV4OmvxL6qxG95Neb/Nap+Zm5M4tz/ipxsOx5'
'hUJW9YNs+jNXeI4q0N+HvalCP/29i/4eUZmxg75lmF8TPhCBwlBCMoTmhQy9UFCjXsS/soRgWG'
'XVTWPLvmEejbXdaTVjP/Ab9biNsRosfkAkBr4jKz1TbAYFQmKMZcMPGg3qXpkOiYRhGs2wA8kS'
'RKm9DiRHkIPqiHeo0EckFjHGsf0+Jhx8Jo6FG5vtbX8jjONgLeTR9THqIo1uyLuZfmXUZXrax6'
'73uwXjYhyit/i9gtrntfgXOLSPOHTz2Io/mwgvDbDZDurEJUeiY/CKRmtxToIBdRKE7U3DvEaD'
'ONQKmnG9Xb8AOjZDkpVmtc4UKNNnoZ97vcaBZAhyLbEkgeQIckzd5J0QSEaN0TsHx475i3bCWq'
'Ghxm8GxC6fVueFeo1kdGWbppKluuX0nCn0MRYXkifILpKZBIKeimrMgeQIckA9d6WfV/9N3lt/'
'K+c9o5IpjnSpjNJpb7RHpIrP9/qwEEjP5EjPXPsMeuYswBV+o/TVPm/PDk+LRa8P3GDNNVjhv4'
'v7vAFaq+dJnEgbAWx+Fp/reXaetvflWFU5kOIN3uhmZ6VRry47zTxqlq8o/WA2aXzIG9kKg/Nu'
'013cdBhgp+GMNyTivQz52dfHo/d7Rt898l3y1hK9VJz2BsNmZ0NjyF+Ef3PUohtLAa8JigGRoH'
'39jOBQDwIRt24c5j0aymB4f5sUIHaLAUZy3Q6zGDZq3SiS94q3eAORXmb7CrzhXLWjICzoNhXT'
'uDjvKa2ml6Gml+vN1WjfICM42DsQbjhD7eapWWU4Tv0uXu71x9u07u/fN8QSIr9K/6HfG3k2In'
'abl1/FKHm7e9Y80O+kmdj/EzJx2tvVDON2WNMSkXuWMuXpl3pFqu8nEqmf8UYsScukDNeMbB55'
'JkrKc+a9Cl6rDIep32SZeFEzjFZpeVUbJCc7c2kBTXq4FGlotVF8QSJqAxeRlNN6kfVI2zlv2N'
'gKMrJBJqL8jCOryGt6YLtb7s/iNZ4FLLNYeayFhgzwDMHGHvCG0+wp7vXycTtotVkK8xX9o6i8'
'HCkZ1nL5Cv4s/qtkwDke8PW9M5rC3D3usVu93akBPNuuS6/yLtsRNQnJ3k6z3myHrU2ySGjcuq'
't9fztwEZk757bWWCp7Or3Aw4OFrw2on6f/sqVP9Xt7d1ozOy5fWv4kwSthi5mUr8gvWhH5RrAS'
'Nmg1ZMaHj93wrFZl+RReqeg3iy/y+kRFA8PhZ4cBa6nC7xX3e4P4V8tGP9NcAAByAUObl0ktNF'
'ub/Q3BqoWrQafRXr4QNDohCzwJlgBfAljxoLdLr6o6vXM/a898RS+0eUDQ/X0xrWURTe4CAO7+'
'1m7FfWDn4fWsJdoqtTUhUx809o0SgkJlWIMXBFr6/azXx4plxNu1dM/ZueXZhXMnTs2pTHHY8x'
'hw56mF6SWVtb/nzyzdcrPK2RfOaUCf2+CmYypPAjukEcz/zNwstehPQ6jNQHG3N8iQEwsLp1TB'
'4lxcqsyfOakGLc6TlYVzZ5VnMZyeW1ycPjmndtkWJ+5ZmltUQymyqIvdtou5M+dOq+HiqLdbd2'
'GIGOkCEaUqIURjGU0BqEWxNOPlWQxJ3IdPTZ+YO7W8cHZpfuHM9CninYVV5v71ufnK3Czxz4Gd'
'nZteIliuVPX27qRQd1xCjixkLyILjKtbFkpfyXp7dthUduzkDi+vZVlvsxM77k4s2T1bLb/nmh'
'q5i5gaQNEjsC/vUf56f7zl2eyPDPvxNoH8DpvAbd5oD6JnrYx/MePtuxhznkElZlMq8bZuDl59'
'8UnometHM97lO5uUO9LwIq9/IySX3ZhVvXvXaX7cPdnylrvb5y5mF2pqeih9fda7bEfkOxJ6wP'
'Pqzc1OW5tOWhMPMoSVF7Rsp22f5/i5p0Hc4PkJoX1M6HMvMtIewbzRU9VGPWy2l+N2Kww26s01'
'3moKx/OrQSMOKyP68aJ5ije0b+i80Z96Qz+2b5TeOOjtcgzw4tXe0H3BhWDZOFWaE7sAOyuO1Y'
'3eXm5CY6SOqo0gjplpBW5axLMFPJoxT4rP8/bwGxu0N9U3G+Ey3LyYtxxL2ShanJYGoCgms/AA'
'v7YWNsNW0A6Xw1d2qO0yueTL60G8vm8vEJzI7stUrkTDk9JujptNN2t3UaPice9yxkIcoQEvV9'
'fD6vnlTnv1+fv2u/0zhYvcZgZNzlGL4qI3hMnYqD9ANEct3kOHd1BNDgfLC/LCafI/jucXz87N'
'zVZ2GSx3Ri0I1FpkGbxLC9RaZNhLzKpW9ZjJN7WxNpViVrV6UjcwMQNaD5clzHJfHO0ZZfer1O'
'Pmdu+LxVSPm9vdr93q7d1c3+x977D7XpGadL94HXvmrbBKc1Xbd4Xb3HlQLJP4V5fDZrBCEhO0'
'6I9430Fu3NdudciLqFbn+OE0Pyse9kajlfuqWiKXCc1q/f591zJ7R/CA5fEsg4sThDteD1qbrJ'
'Jjmoxw33W6qYafMWCsiHirvto2GA/pFcEwwTbuKXAi1fE4NxsmuNsvbQZomXQ6oQ03AiY93uxd'
'jkak6IJa0A6c1pPcGmw/LQ9TdLY6K9tWsKY0nYAZ0bpkxnnpuDfkyn1x0NOSTwYJGUEzC7MwX1'
'42R7YImVGn5pfmlivnzizNn55TOcewv7uvcL06VPqLrDec9tSKL/SuMGGVOGwvb9VbvCA3Ar05'
'WvnZK60Ww/ZLqc2d3KR4yjvYjEgBkOIIWrXlJKC1HFRJIONIb4QWy1XNaFEaJzvEtDTtEt/cxc'
'SXrOuNYJPkt93aZvu8UCkQYA6//0XcJOJmQQ3S/w4qr/TXOW/Itdfh/lR5x8qwTrvmaa378gy2'
'suP92jiu6DdhRkDYQm2MFCryq3jS678vZtz9jHun2J+D++5FRj549+LymYXK6elTFXm9eKXX1w'
'ge2E5vegx6tpNAGBCgS281DLqEi+GIl2d+FT1POKaeUyx4fTMLFSwIWgEaunx2fm6G1kTpeV6/'
'ZgIWi2UDvaR/Co6MeXru9Im5isqmp7pP5UsxrULHDv+Xccb/Y8bb5djVMIiCRiPaWg4a9SAW0f'
'AYNA3Is526f6Elklf9pUcynuo2bLvIzPyvJLP0tow3nLZmu8i7+n8peV/OertTNuyzpe6V3mi9'
'Fm5sRm0Ez5cb4YWwsa/ESqM3qJjqoTyfvHcKrx3fMz87d/rswtLcmZl7ls+defGZhZeeqah6V7'
'NLuOzPeqqbqOIV3k5k0cre442cWaA9kTbGuTvvnJtZWtRxD9t6KbXAS7+R8/bsQAmpce2xaCdq'
'6tlQX4bNcJZcSXFwyBYiLjXb9dU62fPaB9duzEgC1yGlSa+4GelDsGXCK43h1vRVlHky32zb1s'
'1wLehqDWWeqyjzxLYm+6UWdWDr6XbYOzKVXRpmm4gVn0S9hsgUY5hucsgbCdbWWkBuEGm/ZNiC'
'ueHY3V7B8AFbNThBphM721kEwprmIXVaj5eTIH6Wnhcqu+qxDYCWHiWDJX0IQb5LoRGRkOuTds'
'zN+DOcW5RPSfuKfXPs8xmvYMC03fZtBu11Rpc/kVWZCv8GnCzAJouAwPEb89oIgxo7PdHGBs1k'
'bOZV4DMCxllYuxXUG6m2fdxWmQe28XHvSoO3RlYoOV
|
S15KV+Dm5cIQ1m5bl5t/QXGW/UuGk1y6'
'zTnhc0m1HbZVevKPe8V562L1UcBGMbnpc8uSjbaJ+SEyY+ptSOvadB8OcQflkJ1+pNiRvrHyb8'
'0mfDLyf+twx5bNFGN70nVFd0Ib
|
4r87IXrdXb652VMrU/shY1guZacs7Kf1SnyJ+aWoucU9fbkj'
'9/kMm8M5s7efbEY9mxk7q7s4Y9lXC1EVYx5Lv//ImsN6gOqeeoXxlQGe+xkcIQ/yoe+9SQzy9U'
'o4Z/orO6iuP+KV+jOhT78Ch8VhjVdYSgfG1je6msixufLy/4881q2b9IwsV6u70ZHz9C4yA9GG'
'1SR4ZBGPumEDG1ook44nl+JazVsZRXOnxWj4P3ThwiuUISNgBZqTeD1jbTFU/6W8RNP2rxv1GH'
'6NyIaqSu9IKZ5HN06nmj3iapSc7QOdcBCRurEawSklMkBtTqOhuAXiI8Yfs4kYT/DncRxvkCbg'
'rJRiduI/0ikDSQYCW6gEfCMc8nUaSNe1Inn5jsDLfHZq2LHCTHNIL6Bg75L0IEdebwwhBBY6x1'
'iDBLh5cQ8s+iwzNJLrWo2sFaDswkHSH+R/Sk5ZOkkJ9Pm3zCap4geuj5LvV2UGfCOr8JxFC4IM'
'iVrWaUPGO+19uxx1kcjCoi6d0ItpGIQ5JCxEc+LUiChhAKImKD9mJf86SNpI8WbTg1f5UeeCYV'
'aLW9BTERCfLjzbAKCaK36hCsFmSnqaUojpl2z1+6a37RX1y4c+ml05U5n/4+W1l4Ce3cs/6Je+'
'jhnD+zcPaeyvzJu5b8uxZOzc5VFv3pM7MEPUOW/IlzSwuVRc8vTS/SqyV+Mn3mHn/uZ85W5hYX'
'/YWKP3/67Kl5wkboK9PkJs8tTvrzZ2ZOnZslP2DSJwz+mYUlzz81f5pc6Vl/aWGSu+19z1+40z'
'89V5m5i35On5gnz/se7vDO+aUz6OzOhYrnT/tnpytL8zPnTk1X/LPnKmcXFud8jGx2fnHm1DR5'
'6bNl6p/69OdeQpaJv3jX9KlT6YF6Ptk1cxVQ7w7TPzFHVE6fODWHrnics/MVMncwoOSvGWIeEX'
'hq0vMXz87NzNNfxA+yhIiieyYF6eLcvz5HreihPzt9evokjW78mbhCEzNzrjJ3GlQTKxbPnVhc'
'ml86tzTnn1xYmGVmL85VXjI/M7d4m39qYZEZdm5xjgiZnV6a5q4JB7GLntPfJ84tzjPj5s8szV'
'Uq5/jcZYJm+aXEGaJymt6dZQ4vnMFoIStzC5V7gBZ84BmY9F961xzBK2Aqc2sabICbN7PkNqMO'
'iYk0pGSc/pm5k6fmT5I1OYfHC0Dz0vnFuQmasPlFNJjnjkkGqNNzPGpMFNHl6b8d0Z3k+fTn7/'
'SnZ18yD8qlNUnA4ryIC7Nt5i7hedk79qWsJNsd98+TIoia/ypR7P74ixnkvyRo1YIJWucnAqxM'
'gkSkhEhhNXo3INrf6mukzrap+WLQvI9W9Mn1cCPYCtqT/t3h6qo/GwZQ56SfWNPEvAhJL0i0J9'
'bKidYztnKbJqb1Vbhab4qCs9l6epPm1oSLFgBZivWaCzYJfpz11YDVQRqM3JZ2YxtqJvB3SFDy'
'rBYJmtuiE5G4gi0UynI8LK+VbZuWNpGg0vw6eQ2tdjwhWYMTtFfvo78K6gb66xyAhV3yN6CT9N'
'ckQzPyN6BT9NdRhpq/8VeZ/rqVodfJ34Aeob+uZui18jegN9JfBxl6UP4G9Gb66yrv5zP096D+'
'MdZ2swk103gDIoaZSHcN+xLUKZLWyN3DVt6yJsrUCsTC84PGGslFe32DpCBqHmr7W1HrvF/rwK'
'D3V6KoTZtGsLlJv4g1DU4rfD5RcFxlxu5lCTCGBG0KG5s0JS2eOH380jNLi2HbpAACP8RDU+9p'
'UaC58MkHiG1eIfL3nq8K6kpvmH8hr/AFKqvGOYdOJz32McRzIP0E2aWe60AyBDkoeYEmVfIF6n'
'p1yDvKOY+305heRmO6xuY8IpETQ2qQ3+bKpZPweDsRtt97oU14fBGRsb80qcUXO+YkMbzBThZE'
'tkXcdEyXdisM3dTFPn7fheQJ4iYQYhgvUkV1eSqZ8UXqSjXmTdlkxjsIy3NLB3yW9dJqFBFF+K'
'e8ErToz7Bd7cpbvKMnb/GOnrzFO6jbK1N5i3eoq9QB71aBZNUJwnL12CH/jLEUZEJ5Yem8Sasg'
'HAKyRMCJFAFZIuBEigCM6gQRcJUDyRHkoPK9JYHk1CzEYmzW59QKTQLkS2cSJnQIWWJL2bxBbY'
'6xxeZQlyPqZlPU5Yi6WaJOOZAMQUZVyYGAmutIuF4tkD51krBcP7bRTR0Coc+ONtKSd9J61sbZ'
'FLsI0OUb9bWWVm5Rs7Fd9mcjWJmwxpxh9AkBLiRPEHcYWAUnaRi+A8kR5Bp1nfcCgeTV3YRlcm'
'yCXY52tDnFAaOUinc3AoeEPJFwd4qEPC3Tu4mEMQeSIch+4lsCyRHksLqBlz8g/erFhGXKtugn'
'vC9O4e3nNmb5a0iGIAdFaWhIjiA3kN42eAfUKcJSti0GCO+pFN4BwnuK8F7tQDIEKakJB5IjyC'
'TRZ/AW1OkU3gLhPZ3CWyC8pwnvQQeSIYjv4C0Q3tMpvINqIZXoPEh4F1J4BwnvAuG93IFkCHKF'
'w5lBwrugribZ/X5GQJ46R2iOjP1NhtQ7z2PYqCV52yalKLWv0rLpaIfO2hiOf0aSe0/UYT8hDl'
'ZD2r9b4QY8NJYTxLfI/NfdmK15PWjhcNxvdejpBjkvq51mVXdcb2+bxZNsgeRDTzHIpYqQN2nz'
'Y+OHOoV1I74w3CcS36jhJo97xMFzKQ56xMFzxMErHUiGIGPqsAPJEWSKZvgVAtml7oEGHjvL26'
'NOdbXKyNlG9OPOpqx9DpXpzPMSNztWYntL/7ip5FC6iyi9J0XpLlqa96R05i6i9B7SmfscSI4
|
aslab/rct
|
higgs/trunk/code/ROS/metacontrol/master_monitor/src/master_monitor.py
|
Python
|
gpl-3.0
| 3,374 | 0.039123 |
#!/usr/bin/env python
import roslib; roslib.load_manifest('master_monitor')
import rospy
from rosgraph_msgs.msg import Log
from syscall import runCmd, runCmdOutput
from move_base_msgs.msg import MoveBaseGoal, MoveBaseAction
from geometry_msgs.msg import PoseStamped
import actionlib
import numpy as np
class ControlModel:
current_goal = None
level = ["DEBUG", "INFO", "WARN", "ERROR", "FATAL"]
kinect_recov_launched = False
move_base_client = None
control_model = ControlModel()
def main():
rospy.init_node('master_monitor')
global move_base_client
global control_model
move_base_client = actionlib.SimpleActionClient('move_base', MoveBaseAction)
current_goal_sub = rospy.Subscriber("/move_base_node/current_goal", PoseStamped, current_goal_callback)
rospy.Subscriber("/rosout", Log, callback)
rospy.Subscriber("/rosout_agg", Log, callback)
print "Master Listener Node Launched"
while not rospy.is_shutdown():
rospy.sleep(0.2)
check_system()
def callback(data):
global level
# print "Level: "+level[int(np.sqrt(data.level))]+", From node " + data.name + ", a message: "+data.msg.strip()
if str(data.msg).strip() == str("Connectivity Error: Could not find a common time /base_link and /map.") and data.name == "/move_base_node":
kinect_reconfiguration()
# if data.name == "/sicklms":
# print "Level: "+level[int(np.sqrt(data.level))]+", From node " + data.name + ", a message: "+data.msg.strip()
if data.name == "/sicklms" and str(data.msg).strip() == "woah! error!":
kinect_reconfiguration()
def current_goal_callback(data):
global control_model
control_model.current_goal = data
rospy.loginfo("Current goal received and stored")
def kinect_reconfiguration():
global kinect_recov_launched
global move_base_client
global control_model
if kinect_recov_launched:
rospy.logwarn("Kinect Reconfiguration has been l
|
aunched")
return
kinect_recov_launched = True
while not move_base_client.wait_for_server(
|
rospy.Duration(1.0)) and not rospy.is_shutdown():
rospy.loginfo("Waiting for the move_base action server to come up")
if rospy.is_shutdown():
return
rospy.loginfo("Canceling all active goals")
move_base_client.cancel_all_goals()
rospy.loginfo("Launching Kinect Reconfiguration!")
#Kill SICK laser node
runCmd("rosnode kill /sicklms &")
#Launch kinect node to replace laser_scan
runCmd("roslaunch master_monitor kinect_reconfiguration.launch &")
rospy.loginfo("Kinect Reconfiguration launcher has been launched.")
rospy.sleep(10.0)
rospy.loginfo("Re establishing model state...")
restablish_state()
rospy.loginfo("Model state loaded...")
def restablish_state():
global control_model
if control_model.current_goal != None:
goal = MoveBaseGoal()
goal.target_pose = control_model.current_goal
goal.target_pose.header.stamp = rospy.Time.now()
move_base_client.send_goal(goal)
rospy.loginfo("Last active goal re-established")
def check_system():
global kinect_recov_launched
if kinect_recov_launched:
return
# print "Rosnode list"
rosnode_list = runCmdOutput("rosnode list")
node_to_check="sicklms"
if rosnode_list.find(node_to_check) == -1:
rospy.logerr("Problem: "+node_to_check+" node not found. Launching reconfiguration")
kinect_reconfiguration()
if __name__ == '__main__':
main()
|
com4/eventmq
|
eventmq/client/__init__.py
|
Python
|
lgpl-2.1
| 933 | 0 |
# This file is part of eventmq.
#
# eventmq is free software: you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 2.1 of the License, or (at your option)
# any later
|
version.
#
# eventmq is distributed in the hope that it will be useful,
# but WITHOUT ANY WARR
|
ANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with eventmq. If not, see <http://www.gnu.org/licenses/>.
"""
:mod:`client` -- Client Utilities
=================================
This module contains a utilities that can be used when acting as a client in
eventmq. (e.g. one who requests jobs)
.. toctree ::
:maxdepth: 2
client/messages
client/jobs
"""
|
Logan213/is210-week-04-warmup
|
tests/test_task_05.py
|
Python
|
mpl-2.0
| 1,085 | 0.000922 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests lesson 04 task 05."""
# Import Python libs
import unittest
import mock
import random
class Lesson04Task05TestCase(unittest.TestCase):
"""
Test cases for lesson 04 task 05.
"""
def test_blood_pressure_status(self):
"""
Tests that the correct ``BP_STATUS`` is returned.'
This test will try random numbers in each of the target ranges.
"""
levels = {'low': [-256, 89],
|
'ideal': [90, 119],
'warning': [120, 139],
'high':
|
[140, 159],
'emergency': [160, 256]
}
for key, value in levels.iteritems():
systolic = random.randint(value[0], value[1])
with mock.patch('__builtin__.raw_input', side_effect=[systolic]):
try:
task_05 = reload(task_05)
except NameError:
import task_05
self.assertEqual(task_05.BP_STATUS.lower(), key)
if __name__ == '__main__':
unittest.main()
|
bshaffer/google-api-php-client-services
|
generator/src/googleapis/codegen/api.py
|
Python
|
apache-2.0
| 37,026 | 0.005888 |
#!/usr/bin/python2.7
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create an API definition by interpreting a discovery document.
This module interprets a discovery document to create a tree of classes which
represent the API structure in a way that is useful for generating a library.
For each discovery element (e.g. schemas, resources, methods, ...) there is
a class to represent it which is directly usable in the templates. The
instances of those classes are annotated with extra variables for use
in the template which are language specific.
The current way to make use of this class is to create a programming language
specific subclass of Api, which adds annotations and template variables
appropriate for that language.
TODO(user): Refactor this so that the API can be loaded first, then annotated.
"""
__author__ = 'aiuto@google.com (Tony Aiuto)'
import json
import logging
import operator
import urlparse
from googleapis.codegen import data_types
from googleapis.codegen import template_objects
from googleapis.codegen import utilities
from googleapis.codegen.api_exception import ApiException
from googleapis.codegen.schema import Schema
from googleapis.codegen.utilities import convert_size
_DEFAULT_SERVICE_HOST = 'www.googleapis.com'
_DEFAULT_OWNER_DOMAIN = 'google.com'
_DEFAULT_OWNER_NAME = 'Google'
_RECOGNIZED_GOOGLE_DOMAINS = (
'google.com',
'googleapis.com',
'googleplex.com'
)
# Recognized names of request and response fields used for paging.
_PAGE_TOKEN_NAMES = ('pageToken', 'nextPageToken')
_LOGGER = logging.getLogger('codegen')
class Api(template_objects.CodeObject):
"""An API definition.
This class holds a discovery centric definition of an API. It contains
members such as "resources" and "schemas" which relate directly to discovery
concepts. It defines several properties that can be used in code generation
templates:
name: The API name.
version: The API version.
versionNoDots: The API version with all '.' characters replaced with '_'.
This is typically used in class names.
versionNoDash: The API version with all '-' characters replaced with '_'.
This is typically used in file names where '-' has meaning.
authScopes: The list of the OAuth scopes used by this API.
dataWrapper: True if the API definition contains the 'dataWrapper' feature.
methods: The list of top level API methods.
models: The list of API data models, both from the schema section of
discovery and from anonymous objects defined in method definitions.
parameters: The list of global method parameters (applicable to all methods)
resources: The list of API resources
"""
def __init__(self, discovery_doc, language=None):
super(Api, self).__init__(discovery_doc, self,
wire_name=discovery_doc['name'])
name = self.values['name']
self._validator.ValidateApiName(name)
if name != 'freebase':
self._validator.ValidateApiVersion(self.values['version'])
canonical_name = self.values.get('canonicalName') or name
if not self.values.get('canonicalName'):
self.values['canonicalName'] = canonical_name
self._class_name = self.ToClassName(canonical_name, self)
# Guard against language implementor not taking care of spaces
self._class_name = self._class_name.replace(' ', '')
self._NormalizeOwnerInformation()
self._language = language
self._template_dir
|
= None
self._surface_features = {}
self._schemas = {}
self._methods_by_name = {}
self._all_methods = []
self.SetTemplateValue('className', self._class_name)
|
self.SetTemplateValue('versionNoDots',
self.values['version'].replace('.', '_'))
self.SetTemplateValue('versionNoDash',
self.values['version'].replace('-', '_'))
self.SetTemplateValue('dataWrapper',
'dataWrapper' in discovery_doc.get('features', []))
self.values.setdefault('title', name)
self.values.setdefault('exponentialBackoffDefault', False)
if not self.values.get('revision'):
self.values['revision'] = 'snapshot'
self._NormalizeUrlComponents()
# Information for variant subtypes, a dictionary of the format:
#
# { 'wireName': {'discriminant': discriminant, 'value': value,
# 'schema': schema},
# ... }
#
# ... where wireName is the name of variant subtypes, discriminant
# the field name of the discriminant, value the discriminant value
# for this variant, and schema the base schema.
#
# This information cannot be stored in the referred schema at
# reading time because at the time we read it from the base
# schema, the referenced variant schemas may not yet be loaded. So
# we first store it here, and after all schemas have been loaded,
# update the schema template properties.
self._variant_info = {}
# Build data types and methods
self._SetupModules()
self.void_type = data_types.Void(self)
self._BuildSchemaDefinitions()
self._BuildResourceDefinitions()
self.SetTemplateValue('resources', self._resources)
# Make data models part of the api dictionary
self.SetTemplateValue('models', self.ModelClasses())
# Replace methods dict with Methods
self._top_level_methods = []
method_dict = self.values.get('methods') or {}
for name in sorted(method_dict):
self._top_level_methods.append(Method(self, name, method_dict[name]))
self.SetTemplateValue('methods', self._top_level_methods)
# Global parameters
self._parameters = []
param_dict = self.values.get('parameters') or {}
for name in sorted(param_dict):
parameter = Parameter(self, name, param_dict[name], self)
self._parameters.append(parameter)
if name == 'alt':
self.SetTemplateValue('alt', parameter)
self.SetTemplateValue('parameters', self._parameters)
# Auth scopes
self._authscopes = []
if (self.values.get('auth') and
self.values['auth'].get('oauth2') and
self.values['auth']['oauth2'].get('scopes')):
for value, auth_dict in sorted(
self.values['auth']['oauth2']['scopes'].iteritems()):
self._authscopes.append(AuthScope(self, value, auth_dict))
self.SetTemplateValue('authscopes', self._authscopes)
@property
def all_schemas(self):
"""The dictionary of all the schema objects found in the API."""
return self._schemas
def _SetupModules(self):
"""Compute and set the module(s) which this API belongs under."""
# The containing module is based on the owner information.
path = self.values.get('modulePath') or self.values.get('packagePath')
self._containing_module = template_objects.Module(
package_path=path,
owner_name=self.values.get('owner'),
owner_domain=self.values.get('ownerDomain'))
self.SetTemplateValue('containingModule', self._containing_module)
# The API is a child of the containing_module
base = self.values['name']
# TODO(user): Introduce a breaking change where we always prefer
# canonicalName.
if self.values.get('packagePath'):
# Lowercase the canonical name only for non-cloud-endpoints Google APIs.
# This is to avoid breaking changes to existing Google-owned Cloud
# Endpoints APIs.
if self.values.get('rootUrl').find('.googleapis.com') > 0:
base = self.values.get('canonicalName').lower() or base
else:
base = self.values.get('canonicalName') or base
if self.values.get('version_module'):
b
|
josiah-wolf-oberholtzer/supriya
|
dev/etc/pending_ugens/XFadeRotate.py
|
Python
|
mit
| 3,541 | 0.001977 |
import collections
from supriya.enums import CalculationRate
from supriya.synthdefs import MultiOutUGen
class XFadeRotate(MultiOutUGen):
"""
::
>>> source = supriya.ugens.In.ar(bus=0)
>>> xfade_rotate = supriya.ugens.XFadeRotate.ar(
... n=0,
... source=source,
... )
>>> xfade_rotate
XFadeRotate.ar()
"""
### CLASS VARIABLES ###
__slots__ = ()
_ordered_input_names = collections.OrderedDict(
'n',
'source',
)
_valid_calculation_rates = None
### INITIALIZER ###
def __init__(
self,
calculation_rate=None,
n=0,
source=None,
):
MultiOutUGen.__init__(
self,
calculation_rate=calculation_rate,
n=n,
source=source,
)
### PUBLIC METHODS ###
@classmethod
def ar(
cls,
n=0,
source=None,
):
"""
Constructs an audio-rate XFadeRotate.
::
>>> source = supriya.ugens.In.ar(bus=0)
>>> xfade_rotate = supriya.ugens.XFadeRotate.ar(
... n=0,
... source=source,
... )
>>> xfade_rotate
XFadeRotate.ar()
Returns ugen graph.
"""
import supriya.synthdefs
calculation_rate = supriya.CalculationRate.AUDIO
ugen = cls._new_expanded(
calculation_rate=calculation_rate,
n=n,
source=source,
)
return ugen
@classmethod
def kr(
cls,
n=0,
source=None,
):
"""
Constructs a control-rate XFadeRotate.
::
>>> source = supriya.ugens.In.ar(bus=0)
>>> xfade_rotate = supriya.ugens.XFadeRotate.kr(
... n=0,
... source=source,
... )
>>> xfade_rotate
XFadeRotate.kr()
Returns ugen graph.
"""
import supriya.synthdefs
calculation_rate = supriya.CalculationRate.CONTROL
ugen = cls._new_expanded(
calculation_rate=calculation_rate,
n=n,
source=source,
)
return ugen
# def newFromDesc(): ...
### PUBLIC PROPERTIES ###
@property
def n(self):
"""
Gets `n` input of XFadeRotate.
::
>>> source = supriya.ugens.In.ar(bus=0)
>>> xfade_rotate = supriya.ugens.XFadeRotate.ar(
... n=0,
... source=source,
... )
>>> xfade_rotate.n
0.0
Returns ugen input.
"""
|
index = self._ordered_input_names.index('n')
return self._inputs[index]
@property
def source(self):
"""
Gets `source` input of XFadeRotate.
::
>>> source = supriya.ugens.In.ar(bus=0)
>>> xfade_rotate = supriya.
|
ugens.XFadeRotate.ar(
... n=0,
... source=source,
... )
>>> xfade_rotate.source
OutputProxy(
source=In(
bus=0.0,
calculation_rate=CalculationRate.AUDIO,
channel_count=1
),
output_index=0
)
Returns ugen input.
"""
index = self._ordered_input_names.index('source')
return self._inputs[index]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.