commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
7fcfe4ece5d7b792b2f38b9b0115f590d3fe0e60
|
Fix glu.py for windows
|
tuttleofx/sconsProject
|
autoconf/glu.py
|
autoconf/glu.py
|
from _external import *
from gl import *
if windows:
glu = LibWithHeaderChecker('GLU32', ['windows.h','GL/glu.h'], 'c', dependencies=[gl])
elif macos:
glu = LibWithHeaderChecker('OpenGL', ['OpenGL/glu.h'], 'c', name='glu')
else :
glu = LibWithHeaderChecker('GLU', ['GL/glu.h'], 'c', dependencies=[gl])
|
from _external import *
from gl import *
if windows:
glu = LibWithHeaderChecker('GLU32', ['windows.h','GL/glu.h'], 'c', dependencies=[gl])
if macos:
glu = LibWithHeaderChecker('OpenGL', ['OpenGL/glu.h'], 'c', name='glu')
else :
glu = LibWithHeaderChecker('GLU', ['GL/glu.h'], 'c', dependencies=[gl])
|
mit
|
Python
|
2e3d31dd20936574d238fc61c1d43983d8b9ff1c
|
Add out_path input.
|
ohsu-qin/qipipe
|
qipipe/interfaces/fix_dicom.py
|
qipipe/interfaces/fix_dicom.py
|
from nipype.interfaces.base import (BaseInterface,
BaseInterfaceInputSpec, traits, Directory, TraitedSpec)
import os
from qipipe.staging.fix_dicom import fix_dicom_headers
class FixDicomInputSpec(BaseInterfaceInputSpec):
source = Directory(exists=True, desc='The input patient directory', mandatory=True)
dest = traits.String(desc='The output location', mandatory=True)
class FixDicomOutputSpec(TraitedSpec):
dest = Directory(exists=True, desc="The target output patient directory")
class FixDicom(BaseInterface):
input_spec = FixDicomInputSpec
output_spec = FixDicomOutputSpec
def _run_interface(self, runtime):
fix_dicom_headers(self.inputs.source, self.inputs.dest)
return runtime
def _list_outputs(self):
outputs = self._outputs().get()
outputs['dest'] = self.inputs.dest
return outputs
|
from nipype.interfaces.base import (BaseInterface,
BaseInterfaceInputSpec, traits, Directory, TraitedSpec)
import os
from qipipe.staging.fix_dicom import fix_dicom_headers
class FixDicomInputSpec(BaseInterfaceInputSpec):
source = Directory(exists=True, desc='The input patient directory', mandatory=True)
dest = traits.String(desc='The output location', mandatory=True)
class FixDicomOutputSpec(TraitedSpec):
out_path = Directory(exists=True, desc="The output patient directory")
class FixDicom(BaseInterface):
input_spec = FixDicomInputSpec
output_spec = FixDicomOutputSpec
def _run_interface(self, runtime):
fix_dicom_headers(self.inputs.source, self.inputs.dest)
return runtime
def _list_outputs(self):
outputs = self._outputs().get()
outputs['out_path'] = self.inputs.dest
return outputs
|
bsd-2-clause
|
Python
|
9d3d541faaf993665040d39a5cacb52d7a096cde
|
Add in a model concept for settings
|
jalama/drupdates
|
drupdates/utils.py
|
drupdates/utils.py
|
import datetime
import requests
import os
from os.path import expanduser
import yaml
def nextFriday():
# Get the data string for the following Friday
today = datetime.date.today()
if datetime.datetime.today().weekday() == 4:
friday = str(today + datetime.timedelta( (3-today.weekday())%7+1 ))
else:
friday = str(today + datetime.timedelta( (4-today.weekday()) % 7 ))
return friday
def apiCall (uri, name, method = 'get', **kwargs):
#user = '', pword = ''):
""" Perform and API call, expecting a JSON response. Largely a wrapper
around the request module
Keyword arguments:
uri -- the uri of the Restful Web Service (required)
name -- the human readable label for the service being called (required)
method -- HTTP method to use (defaul = 'get')
kwargs -- dictionary of arguments passed directly to requests module method
"""
# FIXME: need to HTML escape passwords
func = getattr(requests, method)
args = {}
for key, value in kwargs.iteritems():
args[key] = value
# if not user == '' and not pword == '':
# args.append("auth=(user, pword)")
r = func(uri, **args)
responseDictionary = r.json()
#If API call errors out print the error and quit the script
if r.status_code != 200:
if 'errors' in responseDictionary:
errors = responseDictionary.pop('errors')
firstError = errors.pop()
elif 'error' in responseDictionary:
firstError = responseDictionary.pop('error')
else:
firstError['message'] = "No error message provided by response"
print("{0} returned an error, exiting the script.\n Status Code: {1} \n Error: {2}".format(name, r.status_code , firstError['message']))
return False
else:
return responseDictionary
class Settings:
__localFile = expanduser("~") + '/.drupdates/main.yaml'
def __init__(self):
self.__settings = {}
self.__model()
currentDir = os.path.dirname(os.path.realpath(__file__))
default = open(currentDir + "/settings/default.yaml", 'r')
self.__settings = yaml.load(default)
default.close()
if os.path.isfile(self.__localFile):
local = open(self.__localFile, 'r')
self.__local = yaml.load(local)
local.close()
self.__settings = dict(self.__settings.items() + self.__local.items())
def __model(self):
model = {}
model['default'] = ''
model['value'] = ''
model['prompt'] = ''
model['format'] = ''
self.__model = model
def get(self, setting):
if setting in self.__settings:
setting = dict(self.__model.items() + self.__settings[setting].items())
return setting['value']
# Load variables:
settings = Settings()
|
import datetime
import requests
import os
from os.path import expanduser
import yaml
def nextFriday():
# Get the data string for the following Friday
today = datetime.date.today()
if datetime.datetime.today().weekday() == 4:
friday = str(today + datetime.timedelta( (3-today.weekday())%7+1 ))
else:
friday = str(today + datetime.timedelta( (4-today.weekday()) % 7 ))
return friday
def apiCall (uri, name, method = 'get', **kwargs):
#user = '', pword = ''):
""" Perform and API call, expecting a JSON response. Largely a wrapper
around the request module
Keyword arguments:
uri -- the uri of the Restful Web Service (required)
name -- the human readable label for the service being called (required)
method -- HTTP method to use (defaul = 'get')
kwargs -- dictionary of arguments passed directly to requests module method
"""
# FIXME: need to HTML escape passwords
func = getattr(requests, method)
args = {}
for key, value in kwargs.iteritems():
args[key] = value
# if not user == '' and not pword == '':
# args.append("auth=(user, pword)")
r = func(uri, **args)
responseDictionary = r.json()
#If API call errors out print the error and quit the script
if r.status_code != 200:
if 'errors' in responseDictionary:
errors = responseDictionary.pop('errors')
firstError = errors.pop()
elif 'error' in responseDictionary:
firstError = responseDictionary.pop('error')
else:
firstError['message'] = "No error message provided by response"
print("{0} returned an error, exiting the script.\n Status Code: {1} \n Error: {2}".format(name, r.status_code , firstError['message']))
return False
else:
return responseDictionary
class Settings:
__localFile = expanduser("~") + '/.drupdates/main.yaml'
def __init__(self):
currentDir = os.path.dirname(os.path.realpath(__file__))
default = open(currentDir + "/settings/default.yaml", 'r')
self.__settings = yaml.load(default)
default.close()
if os.path.isfile(self.__localFile):
local = open(self.__localFile, 'r')
self.__local = yaml.load(local)
local.close()
self.__settings = dict( self.__settings + self.__local.items())
def get(self, setting):
return self.__settings[setting]['value']
# Load variables:
settings = Settings()
|
mit
|
Python
|
3972594787f4ed33d656ff0c097fdb3633a96b14
|
add testcase for #1
|
Thor77/TeamspeakStats,Thor77/TeamspeakStats
|
tsstats/tests/test_log.py
|
tsstats/tests/test_log.py
|
from time import sleep
import pytest
from tsstats.exceptions import InvalidLog
from tsstats.log import parse_log, parse_logs
@pytest.fixture
def clients():
return parse_log('tsstats/tests/res/test.log')
def test_log_client_count(clients):
assert len(clients) == 3
def test_log_onlinetime(clients):
assert clients['1'].onlinetime == 402
assert clients['2'].onlinetime == 20
def test_log_kicks(clients):
assert clients['UIDClient1'].kicks == 1
def test_log_pkicks(clients):
assert clients['2'].pkicks == 1
def test_log_bans(clients):
assert clients['UIDClient1'].bans == 1
def test_log_pbans(clients):
assert clients['2'].pbans == 1
def test_log_invalid():
with pytest.raises(InvalidLog):
parse_log('tsstats/tests/res/test.log.broken')
def test_log_multiple():
assert len(parse_log('tsstats/tests/res/test.log')) == \
len(parse_logs('tsstats/tests/res/test.log'))
@pytest.mark.slowtest
def test_log_client_online():
clients = parse_log('tsstats/tests/res/test.log')
assert clients['1'].onlinetime == 402
sleep(2)
clients = parse_log('tsstats/tests/res/test.log')
assert clients['1'].onlinetime == 404
|
import pytest
from tsstats.exceptions import InvalidLog
from tsstats.log import parse_log, parse_logs
@pytest.fixture
def clients():
return parse_log('tsstats/tests/res/test.log')
def test_log_client_count(clients):
assert len(clients) == 3
def test_log_onlinetime(clients):
assert clients['1'].onlinetime == 402
assert clients['2'].onlinetime == 20
def test_log_kicks(clients):
assert clients['UIDClient1'].kicks == 1
def test_log_pkicks(clients):
assert clients['2'].pkicks == 1
def test_log_bans(clients):
assert clients['UIDClient1'].bans == 1
def test_log_pbans(clients):
assert clients['2'].pbans == 1
def test_log_invalid():
with pytest.raises(InvalidLog):
parse_log('tsstats/tests/res/test.log.broken')
def test_log_multiple():
assert len(parse_log('tsstats/tests/res/test.log')) == \
len(parse_logs('tsstats/tests/res/test.log'))
|
mit
|
Python
|
07f86c47c58d6266bd4b42c81521001aca072ff1
|
Add some more rubbish to example string
|
bwhmather/json-config-parser
|
jsonconfigparser/test/__init__.py
|
jsonconfigparser/test/__init__.py
|
import unittest
from jsonconfigparser import JSONConfigParser
class JSONConfigTestCase(unittest.TestCase):
def test_init(self):
JSONConfigParser()
def test_read_string(self):
string = '[section]\n' + \
'# comment comment\n' + \
'foo = "bar"\n' + \
'\n' + \
'[section2]\n' + \
'bar = "baz"\n'
cf = JSONConfigParser()
cf.read_string(string)
self.assertEqual(cf.get('section', 'foo'), 'bar')
def test_get(self):
cf = JSONConfigParser()
cf.add_section('section')
cf.set('section', 'section', 'set-in-section')
self.assertEqual(cf.get('section', 'section'), 'set-in-section')
cf.set(cf.default_section, 'defaults', 'set-in-defaults')
self.assertEqual(cf.get('section', 'defaults'), 'set-in-defaults')
self.assertEqual(cf.get('section', 'vars',
vars={'vars': 'set-in-vars'}),
'set-in-vars')
self.assertEqual(cf.get('section', 'unset', 'fallback'), 'fallback')
suite = unittest.TestLoader().loadTestsFromTestCase(JSONConfigTestCase)
|
import unittest
from jsonconfigparser import JSONConfigParser
class JSONConfigTestCase(unittest.TestCase):
def test_init(self):
JSONConfigParser()
def test_read_string(self):
string = '[section]\n' + \
'foo = "bar"\n'
cf = JSONConfigParser()
cf.read_string(string)
self.assertEqual(cf.get('section', 'foo'), 'bar')
def test_get(self):
cf = JSONConfigParser()
cf.add_section('section')
cf.set('section', 'section', 'set-in-section')
self.assertEqual(cf.get('section', 'section'), 'set-in-section')
cf.set(cf.default_section, 'defaults', 'set-in-defaults')
self.assertEqual(cf.get('section', 'defaults'), 'set-in-defaults')
self.assertEqual(cf.get('section', 'vars',
vars={'vars': 'set-in-vars'}),
'set-in-vars')
self.assertEqual(cf.get('section', 'unset', 'fallback'), 'fallback')
suite = unittest.TestLoader().loadTestsFromTestCase(JSONConfigTestCase)
|
bsd-3-clause
|
Python
|
60039cd74693982ef38808a63366aa1454b50bd1
|
Bump version to 13.3.2
|
hhursev/recipe-scraper
|
recipe_scrapers/__version__.py
|
recipe_scrapers/__version__.py
|
__version__ = "13.3.2"
|
__version__ = "13.3.1"
|
mit
|
Python
|
87a720dc526efe9732fd1b4633e773ef4a11352a
|
Use earliest consultation if legal date is unavailable
|
meine-stadt-transparent/meine-stadt-transparent,meine-stadt-transparent/meine-stadt-transparent,meine-stadt-transparent/meine-stadt-transparent,meine-stadt-transparent/meine-stadt-transparent
|
mainapp/management/commands/fix-sort-date.py
|
mainapp/management/commands/fix-sort-date.py
|
import datetime
from dateutil import tz
from django.core.management.base import BaseCommand
from django.db.models import F, Subquery, OuterRef, Q
from mainapp.models import Paper, File, Consultation
class Command(BaseCommand):
help = "After the initial import, this command guesses the sort_date-Attribute of papers and files"
def add_arguments(self, parser):
help_str = (
"The date of the first import in the format YYYY-MM-DD. "
+ "All documents/files created up to this day will have the sort_date-Attribute modified."
)
parser.add_argument("import_date", type=str, help=help_str)
help_str = "If no date can be determined, this will be used as fallback. Should be far in the past."
parser.add_argument("fallback_date", type=str, help=help_str)
def handle(self, *args, **options):
import_date = datetime.datetime.strptime(
options["import_date"] + " 23:59:59", "%Y-%m-%d %H:%M:%S"
).replace(tzinfo=tz.tzlocal())
fallback_date = datetime.datetime.strptime(
options["fallback_date"], "%Y-%m-%d"
).replace(tzinfo=tz.tzlocal())
self.stdout.write("Fixing papers...")
num = Paper.objects.filter(
created__lte=import_date, legal_date__isnull=False
).update(sort_date=F("legal_date"), modified=F("legal_date"))
self.stdout.write(f"=> Changed papers: {num}")
num = Paper.objects.filter(legal_date__isnull=True).update(
sort_date=fallback_date
)
self.stdout.write(f"=> Not fixable due to missing legal date: {num}")
# Use the date of the earliest consultation
earliest_consultation = (
Consultation.objects.filter(paper=OuterRef("pk"), meeting__isnull=False)
.order_by("meeting__start")
.values("meeting__start")[:1]
)
num = (
Paper.objects.filter(
Q(sort_date=fallback_date) | ~Q(sort_date=F("legal_date"))
)
.annotate(earliest_consultation=Subquery(earliest_consultation))
.filter(earliest_consultation__isnull=False)
.update(sort_date=F("earliest_consultation"))
)
self.stdout.write(f"=> Fix by earliest consultation: {num}")
self.stdout.write("Fixing files...")
num = File.objects.filter(
created__lte=import_date, legal_date__isnull=False
).update(sort_date=F("legal_date"), modified=F("legal_date"))
self.stdout.write(f"=> Changed files: {num}")
num = File.objects.filter(legal_date__isnull=True).update(
sort_date=fallback_date
)
self.stdout.write(f"=> Not determinable: {num}")
|
import datetime
from django.core.management.base import BaseCommand
from django.db.models import F
from mainapp.models import Paper, File
class Command(BaseCommand):
help = "After the initial import, this command guesses the sort_date-Attribute of papers and files"
def add_arguments(self, parser):
help_str = (
"The date of the first import in the format YYYY-MM-DD. "
+ "All documents/files created up to this day will have the sort_date-Attribute modified."
)
parser.add_argument("import_date", type=str, help=help_str)
help_str = "If no date can be determined, this will be used as fallback. Should be far in the past."
parser.add_argument("fallback_date", type=str, help=help_str)
def handle(self, *args, **options):
import_date = datetime.datetime.strptime(
options["import_date"] + " 23:59:59", "%Y-%m-%d %H:%M:%S"
)
fallback_date = datetime.datetime.strptime(options["fallback_date"], "%Y-%m-%d")
self.stdout.write("Fixing papers...")
num = Paper.objects.filter(
created__lte=import_date, legal_date__isnull=False
).update(sort_date=F("legal_date"), modified=F("legal_date"))
self.stdout.write("=> Changed records: ", num)
num = Paper.objects.filter(legal_date__isnull=True).update(
sort_date=fallback_date
)
self.stdout.write("=> Not determinable: ", num)
self.stdout.write("Fixing files...")
num = File.objects.filter(
created__lte=import_date, legal_date__isnull=False
).update(sort_date=F("legal_date"), modified=F("legal_date"))
self.stdout.write("=> Changed records: ", num)
num = File.objects.filter(legal_date__isnull=True).update(
sort_date=fallback_date
)
self.stdout.write("=> Not determinable: ", num)
|
mit
|
Python
|
5320f9bd74aeab70849cf288d5da4a94bd98cccd
|
store labels in a separate text field
|
osma/annif,osma/annif,osma/annif
|
load_corpus.py
|
load_corpus.py
|
#!/usr/bin/env python
from elasticsearch import Elasticsearch
from elasticsearch.client import IndicesClient
import os
es = Elasticsearch()
index = IndicesClient(es)
if index.exists('yso'):
index.delete('yso')
indexconf = {
'mappings': {
'concept': {
'properties': {
'labels': {
'type': 'string',
'analyzer': 'finnish'
},
'text': {
'type': 'string',
'analyzer': 'finnish'
},
'boost': {
'type': 'double'
}
}
}
}
}
index.create(index='yso', body=indexconf)
files = os.listdir('corpus')
for file in files:
if not file.endswith('.fi'):
continue
f = open('corpus/%s' % file, 'r')
uri, label = f.readline().strip().split(' ', 1)
print file, uri, label
cid = uri.split('p')[-1]
labels = f.readline().strip()
text = labels + " " + "".join(f.readlines())
body = {'uri': uri, 'label': label, 'labels': labels, 'text': text, 'boost': 1}
es.index(index='yso', doc_type='concept', id=cid, body=body)
f.close()
|
#!/usr/bin/env python
from elasticsearch import Elasticsearch
from elasticsearch.client import IndicesClient
import os
es = Elasticsearch()
index = IndicesClient(es)
if index.exists('yso'):
index.delete('yso')
indexconf = {
'mappings': {
'concept': {
'properties': {
'text': {
'type': 'string',
'analyzer': 'finnish'
},
'boost': {
'type': 'double'
}
}
}
}
}
index.create(index='yso', body=indexconf)
files = os.listdir('corpus')
for file in files:
if not file.endswith('.fi'):
continue
f = open('corpus/%s' % file, 'r')
uri, label = f.readline().strip().split(' ', 1)
print file, uri, label
cid = uri.split('p')[-1]
text = "".join(f.readlines())
body = {'uri': uri, 'label': label, 'text': text, 'boost': 1}
es.index(index='yso', doc_type='concept', id=cid, body=body)
f.close()
|
cc0-1.0
|
Python
|
4a719e275c3639b2a2186711d9d616ce9435d614
|
Update agent for environment
|
danieloconell/Louis
|
reinforcement-learning/play.py
|
reinforcement-learning/play.py
|
"""This is the agent which currently takes the action with highest immediate reward."""
import env
env.make("text")
for episode in range(10):
env.reset()
episode_reward = 0
for t in range(100):
episode_reward += env.actual_reward
if env.done:
print(
"Episode %d finished after %d timesteps, with reward %d"
% ((episode + 1), (t + 1), episode_reward))
break
max_action = 0
index = -1
for item in env.actions:
if env.create_reward(item) > max_action:
max_action = env.create_reward(item)
action = [item, index]
else:
index += 1
print(action[0])
episode_reward += env.create_reward(action[0])
env.action(action[0])
env.render()
|
"""This is the agent which currently takes the action with highest immediate reward."""
import pandas as pd
import numpy as np
import env
actions = ["left", "right", "stay"]
left = {x: [0]*(env.screen_width - 1) for x in range(2)}
right = {x: [0]*(env.screen_width - 1) for x in range(2)}
table = pd.DataFrame(left)
def max(list):
max = 0
index = 0
for item in list:
item += 1
if item > max:
max = item
return item
"""if np.random.uniform() > epsilon or all_zero:
action = np.random.choice(actions)
else:
action = None"""
for episode in range(10):
env.reset()
episode_reward = 0
for t in range(100):
episode_reward += env.actual_reward
if env.done:
print(
"Episode %d finished after %d timesteps, with reward %d"
% ((episode + 1), (t + 1), episode_reward))
break
max_action = 0
index = -1
for item in actions:
if env.create_reward(item) > max_action:
max_action = env.create_reward(item)
action = [item, index]
else:
index += 1
print(action[0])
episode_reward += env.create_reward(action[0])
env.action(action[0])
env.render()
|
mit
|
Python
|
2a2ab3f758facfafe3604325ecec08cfcfa2b6e9
|
Update tests.py
|
CSE360G3/assginment5
|
image_space_app/tests.py
|
image_space_app/tests.py
|
import datetime
import unittest
from django.utils import timezone
from django.test import TestCase
class ImageSpaceTests(unittest.TestCase):
def setUp(self):
self.url="http://localhost:8000"
self.email="John@doe.com"
self.password="password"
def tearDown(self):
del self.url
def test1(self):
self.assertEqual(self.url,"http://localhost:8000")
print "After the user input the right URL, home page of the image space opens up"
def test2(self):
self.assertEqual(self.email,"John@doe.com")
self.assertEqual(self.password,"password")
print "If the user enters the correct email and password, log in to the profile of John Doe"
def test3(self):
"""
url is not correct
"""
self.assertNotEqual(self.url,"google.com")
print "If the user did not enter the right URL, URL is not correct"
def test4(self):
"""
Invalid Email/Password
"""
self.assertNotEqual(self.email,"")
self.assertNotEqual(self.password,"")
print "If the user either leaves email or password blank, Invalid Email/Password"
def test5(self):
self.assertNotEqual(self.email,"Johndoe")
self.assertNotEqual(self.password,"pass")
print "If the user enther the wrong email or password or both, Invalid Email/Password"
def test6(self):
"""
At sign up page
"""
self.assertNotEqual(self.email,"")
self.assertNotEqual(self.password,"")
print "At the sign up page if the user leaves either email or password blank then prompt: This field is required"
self.assertNotEqual(self.email,"Johndoe")
print "At the sign up page if the user enter email address to sign up without @something.com then prompt error to enter the valid email address with @"
|
import datetime
import unittest
from django.utils import timezone
from django.test import TestCase
class ImageSpaceTests(unittest.TestCase):
def setUp(self):
self.url="http://localhost:8000"
self.email="John@doe.com"
self.password="password"
def tearDown(self):
del self.url
def test1(self):
self.assertEqual(self.url,"http://localhost:8000")
print "After the user input the right URL, home page of the image space opens up"
def test2(self):
self.assertEqual(self.email,"John@doe.com")
self.assertEqual(self.password,"password")
print "If the user enters the correct email and password, log in to the profile of John Doe"
def test3(self):
"""
url is not correct
"""
self.assertNotEqual(self.url,"google.com")
print "If the user did not enter the right URL, URL is not correct"
def test4(self):
"""
Invalid Email/Password
"""
self.assertNotEqual(self.email,"")
self.assertNotEqual(self.password,"")
print "If the user either leaves email or password blank, Invalid Email/Password"
def test5(self):
self.assertNotEqual(self.email,"Johndoe")
self.assertNotEqual(self.password,"pass")
print "If the user enther the wrong email or password or both, Invalid Email/Password"
def test6(self):
"""
sign up page
"""
self.assertNotEqual(self.email,"")
self.assertNotEqual(self.password,"")
print "At the sign up page if the user leaves either email or password blank then prompt: This field is required"
|
bsd-3-clause
|
Python
|
7fb829cf17b8274ca67f98356e2d47abedc2df5b
|
Add type information to component registry
|
amolenaar/gaphor,amolenaar/gaphor
|
gaphor/services/componentregistry.py
|
gaphor/services/componentregistry.py
|
"""
A registry for components (e.g. services) and event handling.
"""
from typing import Iterator, Set, Tuple, Type, TypeVar
from gaphor.abc import Service
from gaphor.application import ComponentLookupError
T = TypeVar("T", bound=Service)
class ComponentRegistry(Service):
"""
The ComponentRegistry provides a home for application wide components.
"""
def __init__(self) -> None:
self._comp: Set[Tuple[object, str]] = set()
def shutdown(self) -> None:
pass
def get_service(self, name: str) -> Service:
"""Obtain a service used by Gaphor by name.
E.g. service("element_factory")
"""
return self.get(Service, name) # type: ignore[misc]
def register(self, component: object, name: str):
self._comp.add((component, name))
def unregister(self, component: object):
self._comp = {(c, n) for c, n in self._comp if not c is component}
def get(self, base: Type[T], name: str) -> T:
found = {(c, n) for c, n in self._comp if isinstance(c, base) and n == name}
if len(found) > 1:
raise ComponentLookupError(
f"More than one component matches {base}+{name}: {found}"
)
if len(found) == 0:
raise ComponentLookupError(
f"Component with type {base} and name {name} is not registered"
)
return next(iter(found))[0]
def all(self, base: Type[T]) -> Iterator[Tuple[T, str]]:
return ((c, n) for c, n in self._comp if isinstance(c, base))
|
"""
A registry for components (e.g. services) and event handling.
"""
from typing import Set, Tuple
from gaphor.abc import Service
from gaphor.application import ComponentLookupError
class ComponentRegistry(Service):
"""
The ComponentRegistry provides a home for application wide components.
"""
def __init__(self):
self._comp: Set[Tuple[object, str]] = set()
def shutdown(self):
pass
def get_service(self, name):
"""Obtain a service used by Gaphor by name.
E.g. service("element_factory")
"""
return self.get(Service, name)
def register(self, component, name):
self._comp.add((component, name))
def unregister(self, component):
self._comp = {(c, n) for c, n in self._comp if not c is component}
def get(self, base, name):
found = {(c, n) for c, n in self._comp if isinstance(c, base) and n == name}
if len(found) > 1:
raise ComponentLookupError(
f"More than one component matches {base}+{name}: {found}"
)
if len(found) == 0:
raise ComponentLookupError(
f"Component with type {base} and name {name} is not registered"
)
return next(iter(found))[0]
def all(self, base):
return ((c, n) for c, n in self._comp if isinstance(c, base))
|
lgpl-2.1
|
Python
|
907165cf323d2492ee2fc2f837a0aff2fec8ef77
|
Update utils.py
|
tsurubee/banpei
|
banpei/utils.py
|
banpei/utils.py
|
import numpy as np
def power_method(A, iter_num=1):
"""
Calculate the first singular vector/value of a target matrix based on the power method.
Parameters
----------
A : numpy array
Target matrix
iter_num : int
Number of iterations
Returns
-------
u : numpy array
first left singular vector of A
s : float
first singular value of A
v : numpy array
first right singular vector of A
"""
# set initial vector q
q = np.random.normal(size=A.shape[1])
q = q / np.linalg.norm(q)
for i in range(iter_num):
q = np.dot(np.dot(A.T, A), q)
v = q / np.linalg.norm(q)
Av = np.dot(A, v)
s = np.linalg.norm(Av)
u = Av / s
return u, s, v
def _rolling_window(a, window):
"""
Usage:
a = np.random.rand(30, 5)
for 2d array:
roll aling axis=0: rolling_window(a.T, 3).transpose(1, 2, 0)
roll along axis=1: rolling_window(a, 3).transpose(1, 0, 2)
for 3d array:
roll along height(axis=0): rolling_window(a.transpose(2, 1, 0), 3).transpose(2, 3, 1, 0)
roll along width(axis=1): rolling_window(a, 3).transpose(2, 0, 1, 3)
roll along depth(axis=2): rolling_window(a.transpose(0, 2, 1), 3).transpose(3, 0, 2, 1)
"""
shape = a.shape[:-1] + (a.shape[-1] - window + 1, window)
strides = a.strides + (a.strides[-1],)
return np.lib.stride_tricks.as_strided(a, shape=shape, strides=strides)
def rolling_window(arr, window, axis=0):
if arr.ndim == 1:
return _rolling_window(arr, window)
elif arr.ndim == 2:
if axis == 0:
return _rolling_window(arr.T, window).transpose(1, 2, 0)
elif axis == 1:
return _rolling_window(arr, window).transpose(1, 0, 2)
else:
raise Exception('AxisError: axis {} is out of bounds for array of dimension {}'.format(axis, arr.ndim))
elif arr.ndim == 3:
if axis == 0:
return _rolling_window(arr.transpose(0, 2, 1), window).transpose(3, 0, 2, 1)
elif axis == 1:
return _rolling_window(arr, window).transpose(2, 0, 1, 3)
elif axis == 2:
return _rolling_window(arr.transpose(2, 1, 0), window).transpose(2, 3, 1, 0)
else:
raise Exception('AxisError: axis {} is out of bounds for array of dimension {}'.format(axis, arr.ndim))
else:
return _rolling_window(arr, window)
|
import numpy as np
def power_method(A, iter_num=1):
"""
Calculate the first singular vector/value of a target matrix based on the power method.
Parameters
----------
A : numpy array
Target matrix
iter_num : int
Number of iterations
Returns
-------
u : numpy array
first left singular vector of A
s : float
first singular value of A
v : numpy array
first right singular vector of A
"""
# set initial vector q
q = np.random.normal(size=A.shape[1])
q = q / np.linalg.norm(q)
for i in range(iter_num):
q = np.dot(np.dot(A.T, A), q)
v = q / np.linalg.norm(q)
Av = np.dot(A, v)
s = np.linalg.norm(Av)
u = Av / s
return u, s, v
|
mit
|
Python
|
c793401befa1efed0b5ad1eb77809c23f6855372
|
Fix ES thread mapping.
|
EthanBlackburn/sync-engine,EthanBlackburn/sync-engine,gale320/sync-engine,EthanBlackburn/sync-engine,wakermahmud/sync-engine,EthanBlackburn/sync-engine,wakermahmud/sync-engine,gale320/sync-engine,Eagles2F/sync-engine,closeio/nylas,ErinCall/sync-engine,gale320/sync-engine,ErinCall/sync-engine,gale320/sync-engine,wakermahmud/sync-engine,jobscore/sync-engine,PriviPK/privipk-sync-engine,PriviPK/privipk-sync-engine,PriviPK/privipk-sync-engine,jobscore/sync-engine,EthanBlackburn/sync-engine,nylas/sync-engine,ErinCall/sync-engine,Eagles2F/sync-engine,PriviPK/privipk-sync-engine,wakermahmud/sync-engine,ErinCall/sync-engine,Eagles2F/sync-engine,closeio/nylas,Eagles2F/sync-engine,ErinCall/sync-engine,Eagles2F/sync-engine,nylas/sync-engine,nylas/sync-engine,jobscore/sync-engine,gale320/sync-engine,nylas/sync-engine,closeio/nylas,jobscore/sync-engine,wakermahmud/sync-engine,closeio/nylas,PriviPK/privipk-sync-engine
|
inbox/search/mappings.py
|
inbox/search/mappings.py
|
# TODO[k]: participants as nested, tags too.
# first/last_message_timestamp as {'type': 'date', 'format': 'dateOptionalTime'}
# for range filters and such?
THREAD_MAPPING = {
'properties': {
'namespace_id': {'type': 'string'},
'tags': {'type': 'string'},
'last_message_timestamp': {'type': 'string'},
'object': {'type': 'string'},
'message_ids': {'type': 'string'},
'snippet': {'type': 'string'},
'participants': {'type': 'string'},
'first_message_timestamp': {'type': 'string'},
'id': {'type': 'string'},
'subject': {'type': 'string'}
}
}
# TODO[k]:
# from, to, cc, bcc as nested.
# date as {'type': 'date', 'format': 'dateOptionalTime'} for range filters and such?
MESSAGE_MAPPING = {
'_parent': {
'type': 'thread'
},
'properties': {
'id': {'type': 'string'},
'object': {'type': 'string'},
'namespace_id': {'type': 'string'},
'subject': {'type': 'string'},
'from': {'type': 'string'},
'to': {'type': 'string'},
'cc': {'type': 'string'},
'bcc': {'type': 'string'},
'date': {'type': 'string'},
'thread_id': {'type': 'string'},
'snippet': {'type': 'string'},
'body': {'type': 'string'},
'unread': {'type': 'boolean'},
'files': {'type': 'nested', 'properties': {'size': {'type': 'long'}, 'id': {'type': 'string'}, 'content_type': {'type': 'string'}, 'filename': {'type': 'string'}}},
}
}
# TODO[k]: message._parent = thread
NAMESPACE_INDEX_MAPPING = {
'thread': THREAD_MAPPING,
'message': MESSAGE_MAPPING
}
|
# TODO[k]: participants as nested, tags too.
THREAD_MAPPING = {
'properties': {
'namespace_id': {'type': 'string'},
'tags': {'type': 'string'},
'last_message_timestamp': {'type': 'date', 'format': 'dateOptionalTime'},
'object': {'type': 'string'},
'message_ids': {'type': 'string'},
'snippet': {'type': 'string'},
'participants': {'type': 'string'},
'first_message_timestamp': {'type': 'date', 'format': 'dateOptionalTime'},
'id': {'type': 'string'},
'subject': {'type': 'string'}
}
}
# TODO[k]:
# from, to, cc, bcc as nested.
# date as {'type': 'date', 'format': 'dateOptionalTime'} for range filters and such?
MESSAGE_MAPPING = {
'_parent': {
'type': 'thread'
},
'properties': {
'id': {'type': 'string'},
'object': {'type': 'string'},
'namespace_id': {'type': 'string'},
'subject': {'type': 'string'},
'from': {'type': 'string'},
'to': {'type': 'string'},
'cc': {'type': 'string'},
'bcc': {'type': 'string'},
'date': {'type': 'string'},
'thread_id': {'type': 'string'},
'snippet': {'type': 'string'},
'body': {'type': 'string'},
'unread': {'type': 'boolean'},
'files': {'type': 'nested', 'properties': {'size': {'type': 'long'}, 'id': {'type': 'string'}, 'content_type': {'type': 'string'}, 'filename': {'type': 'string'}}},
}
}
# TODO[k]: message._parent = thread
NAMESPACE_INDEX_MAPPING = {
'thread': THREAD_MAPPING,
'message': MESSAGE_MAPPING
}
|
agpl-3.0
|
Python
|
e0dac0a621cbeed615553e5c3544f9c49de96eb2
|
Subtract 1 from model end_year
|
csdms/wmt-metadata
|
metadata/FrostNumberModel/hooks/pre-stage.py
|
metadata/FrostNumberModel/hooks/pre-stage.py
|
"""A hook for modifying parameter values read from the WMT client."""
import os
import shutil
from wmt.utils.hook import find_simulation_input_file, yaml_dump
from topoflow_utils.hook import assign_parameters
file_list = []
def execute(env):
"""Perform pre-stage tasks for running a component.
Parameters
----------
env : dict
A dict of component parameter values from WMT.
"""
env['end_year'] = long(env['start_year']) + long(env['_run_duration']) - 1
env['fn_out_filename'] = 'frostnumber_output.dat'
assign_parameters(env, file_list)
for fname in file_list:
src = find_simulation_input_file(env[fname])
shutil.copy(src, os.curdir)
yaml_dump('_env.yaml', env)
|
"""A hook for modifying parameter values read from the WMT client."""
import os
import shutil
from wmt.utils.hook import find_simulation_input_file
from topoflow_utils.hook import assign_parameters
file_list = []
def execute(env):
"""Perform pre-stage tasks for running a component.
Parameters
----------
env : dict
A dict of component parameter values from WMT.
"""
env['end_year'] = long(env['start_year']) + long(env['_run_duration'])
env['fn_out_filename'] = 'frostnumber_output.dat'
assign_parameters(env, file_list)
for fname in file_list:
src = find_simulation_input_file(env[fname])
shutil.copy(src, os.curdir)
|
mit
|
Python
|
be7ee0ba4cdfab1ef03b0d58913cddb00c572c0f
|
Revise descriptive comments
|
bowen0701/algorithms_data_structures
|
lc0131_palindrome_partitioning.py
|
lc0131_palindrome_partitioning.py
|
"""Leetcode 131. Palindrome Partitioning
Medium
URL: https://leetcode.com/problems/palindrome-partitioning/
Given a string s, partition s such that
every substring of the partition is a palindrome.
Return all possible palindrome partitioning of s.
Example:
Input: "aab"
Output:
[
["aa","b"],
["a","a","b"]
]
"""
class Solution(object):
def _backtrack(self, result, tmps, s, start):
if start == len(s):
# Check partial string with start len(s): empty string ''.
# Palindrom partition is completed.
result.append(tmps[:])
return None
for i in range(start, len(s)):
# Check partial string s[start:i+1] is palindrome.
partial = s[start:i+1]
if partial == partial[::-1]:
# If yes, append it to tmps.
tmps.append(partial)
# Further check the remaining string is also a palinfrome.
self._backtrack(result, tmps, s, i + 1)
# Backtrack by popping out the top tmps.
tmps.pop()
def partition(self, s):
"""
:type s: str
:rtype: List[List[str]]
Time complexity: O(n*2^n), where n is the length of s.
Space complexity: O(n).
"""
# Apply backtracking.
result = []
tmps = []
start = 0
self._backtrack(result, tmps, s, start)
return result
def main():
s = "aab"
print Solution().partition(s)
if __name__ == '__main__':
main()
|
"""Leetcode 131. Palindrome Partitioning
Medium
URL: https://leetcode.com/problems/palindrome-partitioning/
Given a string s, partition s such that every substring of the partition is a palindrome.
Return all possible palindrome partitioning of s.
Example:
Input: "aab"
Output:
[
["aa","b"],
["a","a","b"]
]
"""
class Solution(object):
def _backtrack(self, result, temps, s, start):
if start == len(s):
result.append(temps[:])
return None
for i in range(start, len(s)):
# Check if palindrome.
partial = s[start:i+1]
if partial == partial[::-1]:
temps.append(s[start:i+1])
self._backtrack(result, temps, s, i + 1)
temps.pop()
def partition(self, s):
"""
:type s: str
:rtype: List[List[str]]
Time complexity: O(n * 2^n), where n is the length of s.
Space complexity: O(n).
"""
# Apply backtracking.
result = []
temps = []
start = 0
self._backtrack(result, temps, s, start)
return result
def main():
s = "aab"
print Solution().partition(s)
if __name__ == '__main__':
main()
|
bsd-2-clause
|
Python
|
9d29061f8520506d798ad75aa296be8dc838aaf7
|
Remove leftover print call in paginator
|
genialis/resolwe,jberci/resolwe,genialis/resolwe,jberci/resolwe
|
resolwe/elastic/pagination.py
|
resolwe/elastic/pagination.py
|
""".. Ignore pydocstyle D400.
==================
Elastic Paginators
==================
Paginator classes used in Elastic app.
.. autoclass:: resolwe.elastic.pagination.LimitOffsetPostPagination
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from rest_framework.pagination import LimitOffsetPagination, _positive_int
def get_query_param(request, key):
"""Get query parameter uniformly for GET and POST requests."""
value = request.query_params.get(key) or request.data.get(key)
if value is None:
raise KeyError()
return value
class LimitOffsetPostPagination(LimitOffsetPagination):
"""Limit/offset paginator.
This is standard limit/offset paginator from Django REST framework,
with difference that it supports passing ``limit`` and ``offset``
attributes also in the body of the request (not just as query
parameter).
"""
def get_limit(self, request):
"""Return limit parameter."""
if self.limit_query_param:
try:
return _positive_int(
get_query_param(request, self.limit_query_param),
strict=True,
cutoff=self.max_limit
)
except (KeyError, ValueError):
pass
return self.default_limit
def get_offset(self, request):
"""Return offset parameter."""
try:
return _positive_int(
get_query_param(request, self.offset_query_param),
)
except (KeyError, ValueError):
return 0
|
""".. Ignore pydocstyle D400.
==================
Elastic Paginators
==================
Paginator classes used in Elastic app.
.. autoclass:: resolwe.elastic.pagination.LimitOffsetPostPagination
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from rest_framework.pagination import LimitOffsetPagination, _positive_int
def get_query_param(request, key):
"""Get query parameter uniformly for GET and POST requests."""
value = request.query_params.get(key) or request.data.get(key)
if value is None:
raise KeyError()
return value
class LimitOffsetPostPagination(LimitOffsetPagination):
"""Limit/offset paginator.
This is standard limit/offset paginator from Django REST framework,
with difference that it supports passing ``limit`` and ``offset``
attributes also in the body of the request (not just as query
parameter).
"""
def get_limit(self, request):
"""Return limit parameter."""
if self.limit_query_param:
try:
print(get_query_param(request, self.limit_query_param))
return _positive_int(
get_query_param(request, self.limit_query_param),
strict=True,
cutoff=self.max_limit
)
except (KeyError, ValueError):
pass
return self.default_limit
def get_offset(self, request):
"""Return offset parameter."""
try:
return _positive_int(
get_query_param(request, self.offset_query_param),
)
except (KeyError, ValueError):
return 0
|
apache-2.0
|
Python
|
ee1effb3a91bca7fcf1c590955f45e5b631a0598
|
Revise documentation
|
hankcs/HanLP,hankcs/HanLP
|
hanlp/pretrained/ner.py
|
hanlp/pretrained/ner.py
|
# -*- coding:utf-8 -*-
# Author: hankcs
# Date: 2019-12-30 20:07
from hanlp_common.constant import HANLP_URL
MSRA_NER_BERT_BASE_ZH = HANLP_URL + 'ner/ner_bert_base_msra_20200104_185735.zip'
'BERT model (:cite:`devlin-etal-2019-bert`) trained on MSRA with 3 entity types.'
MSRA_NER_ALBERT_BASE_ZH = HANLP_URL + 'ner/ner_albert_base_zh_msra_20200111_202919.zip'
'ALBERT model (:cite:`Lan2020ALBERT:`) trained on MSRA with 3 entity types.'
MSRA_NER_ELECTRA_SMALL_ZH = HANLP_URL + 'ner/msra_ner_electra_small_20210807_154832.zip'
'Electra small model (:cite:`clark2020electra:`) trained on MSRA with 26 entity types. F1 = `95.10`'
CONLL03_NER_BERT_BASE_UNCASED_EN = HANLP_URL + 'ner/ner_conll03_bert_base_uncased_en_20200104_194352.zip'
'BERT model (:cite:`devlin-etal-2019-bert`) trained on CoNLL03.'
ALL = {}
|
# -*- coding:utf-8 -*-
# Author: hankcs
# Date: 2019-12-30 20:07
from hanlp_common.constant import HANLP_URL
MSRA_NER_BERT_BASE_ZH = HANLP_URL + 'ner/ner_bert_base_msra_20200104_185735.zip'
'BERT model (:cite:`devlin-etal-2019-bert`) trained on MSRA with 3 entity types.'
MSRA_NER_ALBERT_BASE_ZH = HANLP_URL + 'ner/ner_albert_base_zh_msra_20200111_202919.zip'
'ALBERT model (:cite:`Lan2020ALBERT:`) trained on MSRA with 3 entity types.'
MSRA_NER_ELECTRA_SMALL_ZH = HANLP_URL + 'ner/msra_ner_electra_small_20210807_154832.zip'
'Electra small model (:cite:`clark2020electra:`) trained on MSRA with 3 entity types. F1 = `95.10`'
CONLL03_NER_BERT_BASE_UNCASED_EN = HANLP_URL + 'ner/ner_conll03_bert_base_uncased_en_20200104_194352.zip'
'BERT model (:cite:`devlin-etal-2019-bert`) trained on CoNLL03.'
ALL = {}
|
apache-2.0
|
Python
|
a5d3c78295d951fd29f00fc8d8480c2a518fd615
|
set srid explicit
|
geometalab/drf-utm-zone-info,geometalab/drf-utm-zone-info
|
utm_zone_info/viewsets.py
|
utm_zone_info/viewsets.py
|
from rest_framework import status, viewsets
from rest_framework.response import Response
from utm_zone_info.coordinate_reference_system import utm_zones_for_representing
from utm_zone_info.serializers import GeometrySerializer
class UTMZoneInfoViewSet(viewsets.ViewSet):
"""
A simple ViewSet for posting Points and returns valid utm_zones.
"""
serializer_class = GeometrySerializer
def create(self, request):
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
geometry = serializer.validated_data['geom']
geometry.srid = serializer.validated_data['srid']
data = dict(
utm_zone_srids=[zone.srid for zone in utm_zones_for_representing(geometry)]
)
return Response(data=data)
else:
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
from rest_framework import status, viewsets
from rest_framework.response import Response
from utm_zone_info.coordinate_reference_system import utm_zones_for_representing
from utm_zone_info.serializers import GeometrySerializer
class UTMZoneInfoViewSet(viewsets.ViewSet):
"""
A simple ViewSet for posting Points and returns valid utm_zones.
"""
serializer_class = GeometrySerializer
def create(self, request):
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
geometry = serializer.validated_data['geom']
if geometry.srid is None:
geometry.srid = serializer.validated_data['srid']
data = dict(
utm_zone_srids=[zone.srid for zone in utm_zones_for_representing(geometry)]
)
return Response(data=data)
else:
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
isc
|
Python
|
b62423f6ccb47a6f4074ec8e95d9861a3bb06874
|
Change error message
|
ViderumGlobal/ckanext-requestdata,ViderumGlobal/ckanext-requestdata,ViderumGlobal/ckanext-requestdata,ViderumGlobal/ckanext-requestdata
|
ckanext/requestdata/logic/validators.py
|
ckanext/requestdata/logic/validators.py
|
from email_validator import validate_email
from ckan.plugins.toolkit import _
from ckan.plugins.toolkit import get_action
def email_validator(key, data, errors, context):
email = data[key]
try:
validate_email(email)
except Exception:
message = _('Please provide a valid email address.')
errors[key].append(message)
def state_validator(key, data, errors, context):
possible_state = ['new', 'open', 'archive']
if data[key] not in possible_state:
message = _('The state parameter must be new, open or archive.')
errors[key].append(message)
def boolean_validator(key, data, errors, context):
if not isinstance(data[key], bool):
message = _('The {0} parameter must be a Boolean value.'
.format(key[0]))
errors[key].append(message)
def members_in_org_validator(key, data, errors, context):
maintainers = data[key].split(',')
model = context['model']
owner_org = data[('owner_org',)]
data_dict = {
'id': owner_org
}
members_in_org = get_action('member_list')(context, data_dict)
# member_list returns more than just users, so we need to extract only
# users
members_in_org = [member for member in members_in_org
if member[1] == 'user']
for email in maintainers:
user = model.User.by_email(email)
user_found = False
if len(user) > 0:
user = user[0]
for member in members_in_org:
if member[0] == user.id:
user_found = True
if not user_found:
message = _('The user with email "{0}" is not part of this '
'organization.'.format(email))
errors[key].append(message)
else:
message = _('The user with email "{0}" does not exist.'
.format(email))
errors[key].append(message)
|
from email_validator import validate_email
from ckan.plugins.toolkit import _
from ckan.plugins.toolkit import get_action
def email_validator(key, data, errors, context):
email = data[key]
try:
validate_email(email)
except Exception:
message = _('Please provide a valid email address.')
errors[key].append(message)
def state_validator(key, data, errors, context):
possible_state = ['new', 'open', 'archive']
if data[key] not in possible_state:
message = _('The state parameter must be new, open or archive.')
errors[key].append(message)
def boolean_validator(key, data, errors, context):
if not isinstance(data[key], bool):
message = _('The {0} parameter must be a Boolean value.'
.format(key[0]))
errors[key].append(message)
def members_in_org_validator(key, data, errors, context):
maintainers = data[key].split(',')
model = context['model']
owner_org = data[('owner_org',)]
data_dict = {
'id': owner_org
}
members_in_org = get_action('member_list')(context, data_dict)
# member_list returns more than just users, so we need to extract only
# users
members_in_org = [member for member in members_in_org
if member[1] == 'user']
for email in maintainers:
user = model.User.by_email(email)
user_found = False
if len(user) > 0:
user = user[0]
for member in members_in_org:
if member[0] == user.id:
user_found = True
if not user_found:
message = _('The user with email "{0}" is not part of this '
'organization.'.format(email))
errors[key].append(message)
else:
message = _('The user with email "{0}" is not part of this '
'organization.'.format(email))
errors[key].append(message)
|
agpl-3.0
|
Python
|
cd1e6ddbf8038c7f65357ec42eaa31b9ddf3f1d6
|
add statistics module
|
lmdu/krait,lmdu/krait,lmdu/krait,lmdu/krait
|
statistics.py
|
statistics.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from PySide.QtSql import *
from db import *
class Statistics:
meta_table = MetaTable()
class SequenceStatistics(Statistics):
def __init__(self, unit='Mb', letter='ATCG'):
self.table = FastaTable()
self.unit = unit
self.letter = letter
self._bases = {'A':0,'G':0,'C':0,'T':0}
self._total_sequences = 0
self._total_bases = 0
def _calc_bases(self):
for fasta_file in self.table.fetchAll():
fastas = pyfaidx.Fasta(fasta_file, sequence_always_upper=True)
for fasta in fastas:
seq = fasta[:].seq
self._total_bases += len(seq)
for b in self._bases:
self._bases[b] += seq.count(b)
for b in self._bases:
self.meta_table.insert({'name': b, 'value': self._bases[b]})
self.meta_table.insert({'name': 'total_bases', 'value': self._total_bases})
return self._bases
def _db_bases(self):
for data in self.meta_table.query("SELECT * FROM meta WHERE name IN ('A', 'T', 'G', 'C')"):
self._bases[data.name] = data.value
@property
def bases(self):
if not self._bases:
self._db_bases() or self._calc_bases()
return self._bases
def getSequenceCount(self):
if not self._total_sequences:
seq_table = SequenceTable()
self._total_sequences = seq_table.get("SELECT COUNT(1) FROM sequence LIMIT 1")
return self._total_sequences
def getSize(self):
'''
get sequence total length A+G+T+C+N
'''
if not self._total_bases:
self._total_bases = self.meta_table.getMeta('total_bases')
if not self._total_bases:
self._calc_bases()
return self._total_bases
def getNs(self):
return self._total_bases - self.getValidSize()
def getValidSize(self):
'''
get sequence valid total length A+G+T+C
'''
return sum(self.bases.values())
def getGCContent(self):
gc = self.bases['G'] + self.bases['C']
return round(gc/float(self.getValidSize()), 2)
def getScaleSize(self):
scales = {'Mb': 1000000.0, 'Kb': 1000.0}
if self.letter == 'ATGC':
total = self.getValidSize()
elif self.letter == 'ATGCN':
total = self.getSize()
return total/scales[self.unit]
def getRelativeAbundance(self, counts):
return round(counts/self.getScaleSize(), 2)
def getRelativeDensity(self, lengths):
return round(lengths/self.getScaleSize(), 2)
class MicrosatelliteStatistics(Statistics):
def __init__(self):
self.ssr_table = MicrosatelliteTable()
@property
def counts(self):
return self.ssr_table.recordCounts()
@property
def lengths(self):
return self.ssr_table.get("SELECT SUM(length) FROM ssr")
def motifLength(self):
sql = "SELECT length(motif) AS type, COUNT(1) AS count FROM ssr GROUP BY type"
return {row.type: row.count for row in self.ssr_table.query(sql)}
def motifType(self):
sql = "SELECT standard, COUNT(1) AS count FROM ssr GROUP BY standard"
return {row.standard: row.count for row in self.ssr_table.query(sql)}
def motifRepeat(self):
sql = "SELECT repeat, COUNT(1) AS count FROM ssr GROUP BY repeat"
return {row.repeat: row.count for row in self.ssr_table.query(sql)}
def report(self):
pass
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from PySide.QtSql import *
from db import *
class Statistics:
meta_table = MetaTable()
class SequenceStatistics(Statistics):
def __init__(self):
self.table = FastaTable()
self._bases = {'A':0,'G':0,'C':0,'T':0}
self._total_sequences = 0
self._total_bases = 0
def _calc_bases(self):
for fasta_file in self.table.fetchAll():
fastas = pyfaidx.Fasta(fasta_file, sequence_always_upper=True)
for fasta in fastas:
seq = fasta[:].seq
self._total_bases += len(seq)
for b in self._bases:
self._bases[b] += seq.count(b)
for b in self._bases:
self.meta_table.insert({'name': b, 'value': self._bases[b]})
self.meta_table.insert({'name': 'total_bases', 'value': self._total_bases})
return self._bases
def _db_bases(self):
for data in self.meta_table.query("SELECT * FROM meta WHERE name IN ('A', 'T', 'G', 'C')"):
self._bases[data.name] = data.value
@property
def bases(self):
if not self._bases:
self._db_bases() or self._calc_bases()
return self._bases
def getSequenceCount(self):
if not self._total_sequences:
seq_table = SequenceTable()
self._total_sequences = seq_table.get("SELECT COUNT(1) FROM sequence LIMIT 1")
return self._total_sequences
def getSize(self):
'''
get sequence total length A+G+T+C+N
'''
if not self._total_bases:
self._total_bases = self.meta_table.getMeta('total_bases')
if not self._total_bases:
self._calc_bases()
return self._total_bases
def getNs(self):
return self.total_bases - self.getValidSize()
def getValidSize(self):
'''
get sequence valid total length A+G+T+C
'''
return sum(self._bases.values())
def getGCContent(self):
gc = self._bases['G'] + self._bases['C']
return round(gc/float(self.getValidSize()), 2)
def getRelativeAbundance(self, counts):
pass
def getRelativeDensity(self, lengths):
pass
|
agpl-3.0
|
Python
|
09e8dd8ed521105aedeb9d35234998d7fa82bb4d
|
Format max line length to 79.
|
freshbooks/sqlcop
|
sqlcop/cli.py
|
sqlcop/cli.py
|
from __future__ import print_function
import sys
import sqlparse
import optparse
from sqlcop.checks.cross_join import CrossJoinCheck
from sqlcop.checks.order_by_count import OrderByCountCheck
def parse_file(filename):
try:
return open(filename, 'r').readlines()
except UnicodeDecodeError:
# It's unclear whether or not something still relies on the ascii
# encoding so I've only changed it to use utf-8 on exception.
return open(filename, 'r', encoding="utf-8").readlines()
CHECKS = (
(CrossJoinCheck, 'query contains cross join'),
(OrderByCountCheck, 'query contains a count with an order by clause')
)
def check_query(options, el):
"""
Run each of the defined checks on a query.
"""
stmt = sqlparse.parse(el)
checks = (
(check_class(**options), message)
for check_class, message in CHECKS
)
for check in checks:
if check[0](stmt[0]):
return False, check[1]
return True, ''
def main():
parser = optparse.OptionParser('sqlcop')
parser.add_option(
'--db-urls',
help=(
'Comma-separated db urls. '
'Used to fetch schema for the database so sqlcop '
'can make more accurate judgement based on the schema'
)
)
opt, args = parser.parse_args()
if len(args) < 1:
parser.error('SQL file required')
lines = parse_file(args[0])
db_urls = opt.db_urls.split(',')
if not isinstance(db_urls, list):
db_urls = list(db_urls)
failed = False
options = {'db_urls': db_urls}
last_comment = ''
for line in lines:
passed, message = check_query(options, line)
if not passed:
failed = True
print_message(message, line, last_comment)
if line.startswith('-- '):
last_comment = line
exit(failed)
def exit(failed):
sys.exit(255 if failed else 0)
def print_message(message, query, last_comment):
print("FAILED - %s" % (message))
print("-" * 70)
print()
print("Query:")
print("%s" % query)
if last_comment:
print("Preceding SQL Comment:")
print(last_comment)
|
from __future__ import print_function
import sys
import sqlparse
import optparse
from sqlcop.checks.cross_join import CrossJoinCheck
from sqlcop.checks.order_by_count import OrderByCountCheck
def parse_file(filename):
try:
return open(filename, 'r').readlines()
except UnicodeDecodeError:
# It's unclear whether or not something still relies on the ascii encoding so I've only changed it to use utf-8
# on exception.
return open(filename, 'r', encoding="utf-8").readlines()
CHECKS = (
(CrossJoinCheck, 'query contains cross join'),
(OrderByCountCheck, 'query contains a count with an order by clause')
)
def check_query(options, el):
"""
Run each of the defined checks on a query.
"""
stmt = sqlparse.parse(el)
checks = (
(check_class(**options), message)
for check_class, message in CHECKS
)
for check in checks:
if check[0](stmt[0]):
return False, check[1]
return True, ''
def main():
parser = optparse.OptionParser('sqlcop')
parser.add_option(
'--db-urls',
help=(
'Comma-separated db urls. '
'Used to fetch schema for the database so sqlcop '
'can make more accurate judgement based on the schema'
)
)
opt, args = parser.parse_args()
if len(args) < 1:
parser.error('SQL file required')
lines = parse_file(args[0])
db_urls = opt.db_urls.split(',')
if not isinstance(db_urls, list):
db_urls = list(db_urls)
failed = False
options = {'db_urls': db_urls}
last_comment = ''
for line in lines:
passed, message = check_query(options, line)
if not passed:
failed = True
print_message(message, line, last_comment)
if line.startswith('-- '):
last_comment = line
exit(failed)
def exit(failed):
sys.exit(255 if failed else 0)
def print_message(message, query, last_comment):
print("FAILED - %s" % (message))
print("-" * 70)
print()
print("Query:")
print("%s" % query)
if last_comment:
print("Preceding SQL Comment:")
print(last_comment)
|
bsd-3-clause
|
Python
|
51257ca1ebb61d48b8c8dd5b1562fdc73e4ecc99
|
Load .solv file from testdata
|
openSUSE/sat-solver-bindings,openSUSE/sat-solver-bindings,openSUSE/sat-solver-bindings,openSUSE/sat-solver-bindings,openSUSE/sat-solver-bindings,openSUSE/sat-solver-bindings
|
bindings/python/tests/relation.py
|
bindings/python/tests/relation.py
|
#
# test Relation
#
# Relations are the primary means to specify dependencies.
# Relations combine names and version through an operator.
# Relations can be compared (<=> operator) or matched (=~ operator)
#
# The following operators are defined:
# REL_GT: greater than
# REL_EQ: equals
# REL_GE: greater equal
# REL_LT: less than
# REL_NE: not equal
# REL_LE: less equal
# Future extensions (not fully defined currently)
# REL_AND: and
# REL_OR: or
# REL_WITH: with
# REL_NAMESPACE: namespace
#
#
import unittest
import sys
sys.path.insert(0, '../../../build/bindings/python')
import satsolver
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
self.pool = satsolver.Pool()
assert self.pool
self.repo = satsolver.Repo( self.pool, "test" )
assert self.repo
self.pool.set_arch("i686")
self.repo = self.pool.add_solv( "../../testdata/os11-biarch.solv" )
assert self.repo.size() > 0
def test_relation_accessors(self):
rel1 = satsolver.Relation( self.pool, "A" )
assert rel1
assert rel1.name() == "A"
assert rel1.op() == 0
assert rel1.evr() == None
rel2 = satsolver.Relation( self.pool, "A", satsolver.REL_EQ, "1.0-0" )
assert rel2
assert rel2.name() == "A"
assert rel2.op() == satsolver.REL_EQ
assert rel2.evr() == "1.0-0"
def test_providers(self):
rel = self.pool.create_relation( "glibc", satsolver.REL_GT, "2.7" )
for s in self.pool.providers(rel):
print s, "provides ", rel
assert True
def test_relation(self):
rel = self.pool.create_relation( "A", satsolver.REL_EQ, "1.0-0" )
assert rel
print "Relation: ", rel
i = 0
for s in self.repo:
i = i + 1
if i > 10:
break
if not s.provides().empty():
print "%s provides %s" % (s, s.provides().get(1))
j = 0
for p in s.provides():
j = j + 1
if j > 3:
break
if p is not None:
res1 = cmp(p, rel)
print p, " cmp ", rel, " => ", res1
res2 = p.match(rel)
print p, " match ", rel, " => ", res1
if __name__ == '__main__':
unittest.main()
|
#
# test Relation
#
# Relations are the primary means to specify dependencies.
# Relations combine names and version through an operator.
# Relations can be compared (<=> operator) or matched (=~ operator)
#
# The following operators are defined:
# REL_GT: greater than
# REL_EQ: equals
# REL_GE: greater equal
# REL_LT: less than
# REL_NE: not equal
# REL_LE: less equal
# Future extensions (not fully defined currently)
# REL_AND: and
# REL_OR: or
# REL_WITH: with
# REL_NAMESPACE: namespace
#
#
import unittest
import sys
sys.path.insert(0, '../../../build/bindings/python')
import satsolver
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
self.pool = satsolver.Pool()
assert self.pool
self.repo = satsolver.Repo( self.pool, "test" )
assert self.repo
self.pool.set_arch("i686")
self.repo = self.pool.add_solv( "os11-biarch.solv" )
assert self.repo.size() > 0
def test_relation_accessors(self):
rel1 = satsolver.Relation( self.pool, "A" )
assert rel1
assert rel1.name() == "A"
assert rel1.op() == 0
assert rel1.evr() == None
rel2 = satsolver.Relation( self.pool, "A", satsolver.REL_EQ, "1.0-0" )
assert rel2
assert rel2.name() == "A"
assert rel2.op() == satsolver.REL_EQ
assert rel2.evr() == "1.0-0"
def test_providers(self):
rel = self.pool.create_relation( "glibc", satsolver.REL_GT, "2.7" )
for s in self.pool.providers(rel):
print s, "provides ", rel
assert True
def test_relation(self):
rel = self.pool.create_relation( "A", satsolver.REL_EQ, "1.0-0" )
assert rel
print "Relation: ", rel
i = 0
for s in self.repo:
i = i + 1
if i > 10:
break
if not s.provides().empty():
print "%s provides %s" % (s, s.provides().get(1))
j = 0
for p in s.provides():
j = j + 1
if j > 3:
break
if p is not None:
res1 = cmp(p, rel)
print p, " cmp ", rel, " => ", res1
res2 = p.match(rel)
print p, " match ", rel, " => ", res1
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
|
Python
|
e64d922a7e7c64921c90d81c44014f7287ba83fa
|
disable logging in travis
|
dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq
|
.travis/localsettings.py
|
.travis/localsettings.py
|
import os
####### Configuration for CommCareHQ Running on Travis-CI #####
from docker.dockersettings import *
USE_PARTITIONED_DATABASE = os.environ.get('USE_PARTITIONED_DATABASE', 'no') == 'yes'
PARTITION_DATABASE_CONFIG = get_partitioned_database_config(USE_PARTITIONED_DATABASE)
BASE_ADDRESS = '{}:8000'.format(os.environ.get('WEB_TEST_PORT_8000_TCP_ADDR', 'localhost'))
####### S3 mock server config ######
S3_BLOB_DB_SETTINGS = {"url": "http://localhost:5000"}
KAFKA_URL = 'kafka:9092'
######## Email setup ########
# email settings: these ones are the custom hq ones
EMAIL_LOGIN = "notifications@dimagi.com"
EMAIL_PASSWORD = "******"
EMAIL_SMTP_HOST = "smtp.gmail.com"
EMAIL_SMTP_PORT = 587
EMAIL_BACKEND='django.core.mail.backends.console.EmailBackend'
####### Bitly ########
BITLY_LOGIN = None
####### Jar signing config ########
_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
JAR_SIGN = dict(
jad_tool = os.path.join(_ROOT_DIR, "corehq", "apps", "app_manager", "JadTool.jar"),
key_store = os.path.join(_ROOT_DIR, "InsecureTestingKeyStore"),
key_alias = "javarosakey",
store_pass = "onetwothreefourfive",
key_pass = "onetwothreefourfive",
)
AUDIT_MODEL_SAVE = ['django.contrib.auth.models.User']
AUDIT_ADMIN_VIEWS = False
SECRET_KEY = 'secrettravis'
# No logging
LOCAL_LOGGING_HANDLERS = {
'null': {
'level': 'DEBUG',
'class': 'django.utils.log.NullHandler',
},
}
LOCAL_LOGGING_LOGGERS = {
'': {
'level': 'CRITICAL',
'handler': 'null',
'propagate': True,
},
'pillowtop': {
'level': 'CRITICAL',
'handler': 'null',
'propagate': True,
},
'notify': {
'level': 'CRITICAL',
'handler': 'null',
'propagate': True,
},
}
PHONE_TIMEZONES_HAVE_BEEN_PROCESSED = True
PHONE_TIMEZONES_SHOULD_BE_PROCESSED = True
ENABLE_PRELOGIN_SITE = True
TESTS_SHOULD_TRACK_CLEANLINESS = True
UNIT_TESTING = True
LOCAL_APPS = (
'testapps.test_elasticsearch',
'testapps.test_pillowtop',
)
PILLOWTOP_MACHINE_ID = 'testhq'
ELASTICSEARCH_VERSION = 1.7
CACHE_REPORTS = True
|
import os
####### Configuration for CommCareHQ Running on Travis-CI #####
from docker.dockersettings import *
USE_PARTITIONED_DATABASE = os.environ.get('USE_PARTITIONED_DATABASE', 'no') == 'yes'
PARTITION_DATABASE_CONFIG = get_partitioned_database_config(USE_PARTITIONED_DATABASE)
BASE_ADDRESS = '{}:8000'.format(os.environ.get('WEB_TEST_PORT_8000_TCP_ADDR', 'localhost'))
####### S3 mock server config ######
S3_BLOB_DB_SETTINGS = {"url": "http://localhost:5000"}
KAFKA_URL = 'kafka:9092'
######## Email setup ########
# email settings: these ones are the custom hq ones
EMAIL_LOGIN = "notifications@dimagi.com"
EMAIL_PASSWORD = "******"
EMAIL_SMTP_HOST = "smtp.gmail.com"
EMAIL_SMTP_PORT = 587
EMAIL_BACKEND='django.core.mail.backends.console.EmailBackend'
####### Bitly ########
BITLY_LOGIN = None
####### Jar signing config ########
_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
JAR_SIGN = dict(
jad_tool = os.path.join(_ROOT_DIR, "corehq", "apps", "app_manager", "JadTool.jar"),
key_store = os.path.join(_ROOT_DIR, "InsecureTestingKeyStore"),
key_alias = "javarosakey",
store_pass = "onetwothreefourfive",
key_pass = "onetwothreefourfive",
)
AUDIT_MODEL_SAVE = ['django.contrib.auth.models.User']
AUDIT_ADMIN_VIEWS = False
SECRET_KEY = 'secrettravis'
# No logging
LOGGING = {
'version': 1,
'handlers': {
'null': {
'level': 'DEBUG',
'class': 'django.utils.log.NullHandler',
},
},
'loggers': {
'': {
'level': 'CRITICAL',
'handler': 'null',
'propagate': False,
},
'south': {
'level': 'CRITICAL',
'handler': 'null',
'propagate': False,
},
'pillowtop': {
'level': 'CRITICAL',
'handler': 'null',
'propagate': False,
}
}
}
PHONE_TIMEZONES_HAVE_BEEN_PROCESSED = True
PHONE_TIMEZONES_SHOULD_BE_PROCESSED = True
ENABLE_PRELOGIN_SITE = True
TESTS_SHOULD_TRACK_CLEANLINESS = True
UNIT_TESTING = True
LOCAL_APPS = (
'testapps.test_elasticsearch',
'testapps.test_pillowtop',
)
PILLOWTOP_MACHINE_ID = 'testhq'
ELASTICSEARCH_VERSION = 1.7
CACHE_REPORTS = True
|
bsd-3-clause
|
Python
|
6f69a770ef3b55a7d846abfc306e41025131d8a6
|
Fix FeedEntry custom model admin
|
jpadilla/feedleap,jpadilla/feedleap
|
apps/feeds/admin.py
|
apps/feeds/admin.py
|
from django.contrib import admin
from .models import Feed, FeedEntry
class FeedAdmin(admin.ModelAdmin):
list_display = ('feed_url', 'created_by')
class FeedEntryAdmin(admin.ModelAdmin):
list_display = ('title', 'link', 'feed',
'feed_created_by', 'added_to_kippt')
def feed_created_by(self, obj):
return obj.feed.created_by
feed_created_by.short_description = 'Created by'
admin.site.register(Feed, FeedAdmin)
admin.site.register(FeedEntry, FeedEntryAdmin)
|
from django.contrib import admin
from .models import Feed, FeedEntry
class FeedEntryAdmin(admin.ModelAdmin):
list_display = ('title', 'link', 'feed',
'feed__created_by', 'added_to_kippt')
admin.site.register(Feed)
admin.site.register(FeedEntry, FeedEntryAdmin)
|
mit
|
Python
|
034070458b18805d7282f2bb7f0880f688bf3e6e
|
Remove all subdir functionality
|
anjos/website,anjos/website
|
stuff/urls.py
|
stuff/urls.py
|
import settings
from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/(.*)', admin.site.root),
(r'^publication/', include('stuff.publications.urls')),
(r'^file/', include('stuff.files.urls')),
(r'^photo/', include('stuff.picasaweb.urls')),
(r'^bookmark/', include('stuff.delicious.urls')),
(r'^project/', include('stuff.projects.urls')),
(r'^multimedia/', include('stuff.multimedia.urls')),
(r'^git/', include('stuff.dit.urls')),
# (r'^db/(.*)', databrowse.site.root),
(r'^$', 'stuff.views.index'),
# Media serving
(r'^%smedia/(?P<path>.*)$',
'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT,
'show_indexes': True}
),
)
|
import settings
from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
subdir = ''
urlpatterns = patterns('',
(r'^%sadmin/(.*)' % subdir, admin.site.root),
(r'^%spublication/' % subdir, include('stuff.publications.urls')),
(r'^%sfile/' % subdir, include('stuff.files.urls')),
(r'^%sphoto/' % subdir, include('stuff.picasaweb.urls')),
(r'^%sbookmark/' % subdir, include('stuff.delicious.urls')),
(r'^%sproject/' % subdir, include('stuff.projects.urls')),
(r'^%smultimedia/' % subdir, include('stuff.multimedia.urls')),
# (r'^%sdb/(.*)' % subdir, databrowse.site.root),
(r'^%s$' % subdir, 'stuff.views.index'),
# Media serving
(r'^%smedia/(?P<path>.*)$' % subdir,
'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT,
'show_indexes': True}
),
)
|
bsd-2-clause
|
Python
|
be800d70ef3085035bc8330037f0881203e978cc
|
fix SSL vdsClient connections
|
oVirt/ovirt-hosted-engine-ha,oVirt/ovirt-hosted-engine-ha,oVirt/ovirt-hosted-engine-ha,oVirt/ovirt-hosted-engine-ha
|
ovirt_hosted_engine_ha/broker/submonitor_util.py
|
ovirt_hosted_engine_ha/broker/submonitor_util.py
|
#
# ovirt-hosted-engine-ha -- ovirt hosted engine high availability
# Copyright (C) 2013 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
import logging
import socket
import time
from otopi import util
from vdsm import vdscli
from . import constants
def run_vds_client_cmd(address, use_ssl, command):
"""
Run the passed in command name from the vdsClient library and either
throw an exception with the error message or return the results.
"""
# FIXME pass context to allow for shared or persistent vdsm connection
log = logging.getLogger('SubmonitorUtil')
log.debug("Connecting to vdsClient at %s with ssl=%r", address, use_ssl)
vdsClient = util.loadModule(
path=constants.VDS_CLIENT_DIR,
name='vdsClient'
)
if vdsClient._glusterEnabled:
serv = vdsClient.ge.GlusterService()
else:
serv = vdsClient.service()
serv.useSSL = use_ssl
if hasattr(vdscli, 'cannonizeAddrPort'):
server, server_port = vdscli.cannonizeAddrPort(
address
).split(':', 1)
serv.do_connect(server, server_port)
else:
host_port = vdscli.cannonizeHostPort(address)
serv.do_connect(host_port)
log.debug("Connected")
method = getattr(serv.s, command)
retry = 0
while retry < constants.VDS_CLIENT_MAX_RETRY:
try:
response = method()
break
except socket.error:
log.debug("Error", exc_info=True)
retry += 1
time.sleep(1)
if retry >= constants.VDS_CLIENT_MAX_RETRY:
raise Exception("VDSM initialization timeout")
if response['status']['code'] != 0:
raise Exception("Error {0} from {1}: {2}",
response['status']['code'], command,
response['status']['message'])
return response
|
#
# ovirt-hosted-engine-ha -- ovirt hosted engine high availability
# Copyright (C) 2013 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
import logging
import socket
import time
from otopi import util
from vdsm import vdscli
from . import constants
def run_vds_client_cmd(address, use_ssl, command):
"""
Run the passed in command name from the vdsClient library and either
throw an exception with the error message or return the results.
"""
# FIXME pass context to allow for shared or persistent vdsm connection
log = logging.getLogger('SubmonitorUtil')
log.debug("Connecting to vdsClient at %s with ssl=%r", address, use_ssl)
vdsClient = util.loadModule(
path=constants.VDS_CLIENT_DIR,
name='vdsClient'
)
if vdsClient._glusterEnabled:
serv = vdsClient.ge.GlusterService()
else:
serv = vdsClient.service()
serv.use_ssl = use_ssl
if hasattr(vdscli, 'cannonizeAddrPort'):
server, server_port = vdscli.cannonizeAddrPort(
address
).split(':', 1)
serv.do_connect(server, server_port)
else:
host_port = vdscli.cannonizeHostPort(address)
serv.do_connect(host_port)
log.debug("Connected")
method = getattr(serv.s, command)
retry = 0
while retry < constants.VDS_CLIENT_MAX_RETRY:
try:
response = method()
break
except socket.error:
log.debug("Error", exc_info=True)
retry += 1
time.sleep(1)
if retry >= constants.VDS_CLIENT_MAX_RETRY:
raise Exception("VDSM initialization timeout")
if response['status']['code'] != 0:
raise Exception("Error {0} from {1}: {2}",
response['status']['code'], command,
response['status']['message'])
return response
|
lgpl-2.1
|
Python
|
bf460618bc0b2e535de46a0dc0ddb08b8680ab6c
|
Stop to use the __future__ module.
|
openstack/octavia,openstack/octavia,openstack/octavia
|
octavia/db/migration/alembic_migrations/env.py
|
octavia/db/migration/alembic_migrations/env.py
|
# Copyright 2014 Rackspace
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from alembic import context
from sqlalchemy import create_engine
from sqlalchemy import pool
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
try:
octavia_config = config.octavia_config
except AttributeError:
print("Error: Please use the octavia-db-manage command for octavia"
" alembic actions.")
sys.exit(1)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
context.configure(url=octavia_config.database.connection,
target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
engine = create_engine(
octavia_config.database.connection,
poolclass=pool.NullPool)
connection = engine.connect()
context.configure(
connection=connection,
target_metadata=target_metadata)
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
|
# Copyright 2014 Rackspace
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import with_statement
import sys
from alembic import context
from sqlalchemy import create_engine
from sqlalchemy import pool
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
try:
octavia_config = config.octavia_config
except AttributeError:
print("Error: Please use the octavia-db-manage command for octavia"
" alembic actions.")
sys.exit(1)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
context.configure(url=octavia_config.database.connection,
target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
engine = create_engine(
octavia_config.database.connection,
poolclass=pool.NullPool)
connection = engine.connect()
context.configure(
connection=connection,
target_metadata=target_metadata)
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
|
apache-2.0
|
Python
|
25866e86338ac2cf0f042dded6a343a00b5f7241
|
Bump version to 0.5.0-alpha.4.
|
fls-bioinformatics-core/RnaChipIntegrator
|
rnachipintegrator/__init__.py
|
rnachipintegrator/__init__.py
|
# Current version of the library
__version__ = '0.5.0-alpha.4'
def get_version():
"""Returns a string with the current version of the library (e.g., "0.2.0")
"""
return __version__
|
# Current version of the library
__version__ = '0.5.0-alpha.3'
def get_version():
"""Returns a string with the current version of the library (e.g., "0.2.0")
"""
return __version__
|
artistic-2.0
|
Python
|
1193980f77d715e1ca2b22bcb8a4b74eaed1122c
|
Add missing import
|
lino-framework/book,lsaffre/lino_book,lsaffre/lino_book,lsaffre/lino_book,lino-framework/book,lino-framework/book,lino-framework/book
|
lino_book/projects/polls/test.py
|
lino_book/projects/polls/test.py
|
from lino.utils.test import DocTest
from lino.utils.djangotest import WebIndexTestCase
|
from lino.utils.djangotest import WebIndexTestCase
|
unknown
|
Python
|
84159dae072424b0cc8d457b9e64e7b2490f08df
|
Remove some redundant URL configs.
|
hello-base/web,hello-base/web,hello-base/web,hello-base/web
|
base/components/people/urls.py
|
base/components/people/urls.py
|
from django.conf.urls import patterns, url
from django.http import Http404
from django.views.generic.base import RedirectView
from multiurl import ContinueResolving, multiurl
from .views import (GroupBrowseView, GroupDetailView, GroupDiscographyView, GroupMembershipView,
IdolBrowseView, IdolDetailView, IdolDiscographyView, StaffBrowseView, StaffDetailView)
urlpatterns = patterns('',
# MultiURL allows us to unite all of the music under a simpler URL.
multiurl(
url(r'^(?P<slug>[-\w]+)/$', name='group-detail', view=GroupDetailView.as_view()),
url(r'^(?P<slug>[-\w]+)/$', name='idol-detail', view=IdolDetailView.as_view()),
catch=(Http404, ContinueResolving)
),
url(r'^groups/browse/$', name='group-browse', view=GroupBrowseView.as_view()),
url(r'^groups/$', name='group-list', view=RedirectView.as_view(url='/groups/browse/')),
url(r'^idols/browse/$', name='idol-browse', view=IdolBrowseView.as_view()),
url(r'^idols/$', name='idol-list', view=RedirectView.as_view(url='/idols/browse/')),
url(r'^staff/browse/$', name='staff-browse', view=StaffBrowseView.as_view()),
url(r'^staff/(?P<slug>[-\w]+)/$', name='staff-detail', view=StaffDetailView.as_view()),
url(r'^staff/$', name='staff-list', view=RedirectView.as_view(url='/staff/browse/'))
)
|
from django.conf.urls import patterns, url
from django.http import Http404
from django.views.generic.base import RedirectView
from multiurl import ContinueResolving, multiurl
from .views import (GroupBrowseView, GroupDetailView, GroupDiscographyView, GroupMembershipView,
IdolBrowseView, IdolDetailView, IdolDiscographyView, StaffBrowseView, StaffDetailView)
urlpatterns = patterns('',
# MultiURL allows us to unite all of the music under a simpler URL.
multiurl(
url(r'^(?P<slug>[-\w]+)/$', name='group-detail', view=GroupDetailView.as_view()),
url(r'^(?P<slug>[-\w]+)/$', name='idol-detail', view=IdolDetailView.as_view()),
# url('^music/(?P<slug>[-\w]+)/$', name='album-detail', view=AlbumDetailView.as_view()),
# url('^music/(?P<slug>[-\w]+)/$', name='single-detail', view=SingleDetailView.as_view()),
catch=(Http404, ContinueResolving)
),
url(r'^groups/browse/$', name='group-browse', view=GroupBrowseView.as_view()),
url(r'^groups/(?P<slug>[-\w]+)/discography/$', name='group-discography', view=GroupDiscographyView.as_view()),
url(r'^groups/(?P<slug>[-\w]+)/members/$', name='group-membership', view=GroupMembershipView.as_view()),
url(r'^groups/(?P<slug>[-\w]+)/$', name='group-detail', view=GroupDetailView.as_view()),
url(r'^groups/$', name='group-list', view=RedirectView.as_view(url='/groups/browse/')),
url(r'^idols/browse/$', name='idol-browse', view=IdolBrowseView.as_view()),
url(r'^idols/(?P<slug>[-\w]+)/discography/$', name='idol-discography', view=IdolDiscographyView.as_view()),
url(r'^idols/(?P<slug>[-\w]+)/$', name='idol-detail', view=IdolDetailView.as_view()),
url(r'^idols/$', name='idol-list', view=RedirectView.as_view(url='/idols/browse/')),
url(r'^staff/browse/$', name='staff-browse', view=StaffBrowseView.as_view()),
url(r'^staff/(?P<slug>[-\w]+)/$', name='staff-detail', view=StaffDetailView.as_view()),
url(r'^staff/$', name='staff-list', view=RedirectView.as_view(url='/staff/browse/'))
)
|
apache-2.0
|
Python
|
fc5b13f413713cacd147bbd29daac5df7f5bd2de
|
update notification sending tests
|
wwitzel3/awx,snahelou/awx,wwitzel3/awx,snahelou/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx,snahelou/awx
|
awx/main/tests/unit/test_tasks.py
|
awx/main/tests/unit/test_tasks.py
|
import pytest
from contextlib import contextmanager
from awx.main.models import (
UnifiedJob,
Notification,
)
from awx.main.tasks import (
send_notifications,
run_administrative_checks,
)
from awx.main.task_engine import TaskEnhancer
@contextmanager
def apply_patches(_patches):
[p.start() for p in _patches]
yield
[p.stop() for p in _patches]
def test_send_notifications_not_list():
with pytest.raises(TypeError):
send_notifications(None)
def test_send_notifications_job_id(mocker):
with mocker.patch('awx.main.models.UnifiedJob.objects.get'):
send_notifications([], job_id=1)
assert UnifiedJob.objects.get.called
assert UnifiedJob.objects.get.called_with(id=1)
def test_send_notifications_list(mocker):
patches = list()
mock_job = mocker.MagicMock(spec=UnifiedJob)
patches.append(mocker.patch('awx.main.models.UnifiedJob.objects.get', return_value=mock_job))
mock_notifications = [mocker.MagicMock(spec=Notification, subject="test", body={'hello': 'world'})]
patches.append(mocker.patch('awx.main.models.Notification.objects.filter', return_value=mock_notifications))
with apply_patches(patches):
send_notifications([1,2], job_id=1)
assert Notification.objects.filter.call_count == 1
assert mock_notifications[0].status == "successful"
assert mock_notifications[0].save.called
assert mock_job.notifications.add.called
assert mock_job.notifications.add.called_with(*mock_notifications)
@pytest.mark.parametrize("current_instances,call_count", [(91, 2), (89,1)])
def test_run_admin_checks_usage(mocker, current_instances, call_count):
patches = list()
patches.append(mocker.patch('awx.main.tasks.User'))
mock_te = mocker.Mock(spec=TaskEnhancer)
mock_te.validate_enhancements.return_value = {'instance_count': 100, 'current_instances': current_instances, 'date_warning': True}
patches.append(mocker.patch('awx.main.tasks.TaskEnhancer', return_value=mock_te))
mock_sm = mocker.Mock()
patches.append(mocker.patch('awx.main.tasks.send_mail', wraps=mock_sm))
with apply_patches(patches):
run_administrative_checks()
assert mock_sm.called
if call_count == 2:
assert '90%' in mock_sm.call_args_list[0][0][0]
else:
assert 'expire' in mock_sm.call_args_list[0][0][0]
|
import pytest
from contextlib import contextmanager
from awx.main.models import (
UnifiedJob,
Notification,
)
from awx.main.tasks import (
send_notifications,
run_administrative_checks,
)
from awx.main.task_engine import TaskEnhancer
@contextmanager
def apply_patches(_patches):
[p.start() for p in _patches]
yield
[p.stop() for p in _patches]
def test_send_notifications_not_list():
with pytest.raises(TypeError):
send_notifications(None)
def test_send_notifications_job_id(mocker):
with mocker.patch('awx.main.models.UnifiedJob.objects.get'):
send_notifications([], job_id=1)
assert UnifiedJob.objects.get.called
assert UnifiedJob.objects.get.called_with(id=1)
def test_send_notifications_list(mocker):
patches = list()
mock_job = mocker.MagicMock(spec=UnifiedJob)
patches.append(mocker.patch('awx.main.models.UnifiedJob.objects.get', return_value=mock_job))
mock_notification = mocker.MagicMock(spec=Notification, subject="test", body={'hello': 'world'})
patches.append(mocker.patch('awx.main.models.Notification.objects.get', return_value=mock_notification))
with apply_patches(patches):
send_notifications([1,2], job_id=1)
assert Notification.objects.get.call_count == 2
assert mock_notification.status == "successful"
assert mock_notification.save.called
assert mock_job.notifications.add.called
assert mock_job.notifications.add.called_with(mock_notification)
@pytest.mark.parametrize("current_instances,call_count", [(91, 2), (89,1)])
def test_run_admin_checks_usage(mocker, current_instances, call_count):
patches = list()
patches.append(mocker.patch('awx.main.tasks.User'))
mock_te = mocker.Mock(spec=TaskEnhancer)
mock_te.validate_enhancements.return_value = {'instance_count': 100, 'current_instances': current_instances, 'date_warning': True}
patches.append(mocker.patch('awx.main.tasks.TaskEnhancer', return_value=mock_te))
mock_sm = mocker.Mock()
patches.append(mocker.patch('awx.main.tasks.send_mail', wraps=mock_sm))
with apply_patches(patches):
run_administrative_checks()
assert mock_sm.called
if call_count == 2:
assert '90%' in mock_sm.call_args_list[0][0][0]
else:
assert 'expire' in mock_sm.call_args_list[0][0][0]
|
apache-2.0
|
Python
|
25ff7901a495a140e4c8d0890fdc0746f54104b7
|
rename bundled assets files
|
mattoufoutu/EventViz,mattoufoutu/EventViz
|
eventviz/assets.py
|
eventviz/assets.py
|
# -*- coding: utf-8 -*-
from flask.ext.assets import Bundle, Environment
JS_ASSETS = [
'js/jquery-1.9.1.js',
'js/jquery.tablesorter.js',
'js/bootstrap.js'
]
JS_TIMELINE_ASSETS = [
'js/timeline.js',
'js/eventviz-timeline.js'
]
CSS_ASSETS = [
'css/bootstrap.css',
'css/eventviz.css'
]
CSS_TIMELINE_ASSETS = [
'css/timeline.css'
]
JS_MINIFIER = 'yui_js'
CSS_MINIFIER = 'yui_css'
def setup_assets(app):
assets = Environment(app)
js_all = Bundle(*JS_ASSETS, filters=JS_MINIFIER, output='js/eventviz-bundle.min.js')
assets.register('js_all', js_all)
js_timeline = Bundle(*JS_TIMELINE_ASSETS, filters=JS_MINIFIER, output='js/timeline-bundle.min.js')
assets.register('js_timeline', js_timeline)
css_all = Bundle(*CSS_ASSETS, filters=CSS_MINIFIER, output='css/eventviz-bundle.min.css')
assets.register('css_all', css_all)
css_timeline = Bundle(*CSS_TIMELINE_ASSETS, filters=CSS_MINIFIER, output='css/timeline-bundle.min.css')
assets.register('css_timeline', css_timeline)
|
# -*- coding: utf-8 -*-
from flask.ext.assets import Bundle, Environment
JS_ASSETS = [
'js/jquery-1.9.1.js',
'js/jquery.tablesorter.js',
'js/bootstrap.js'
]
JS_TIMELINE_ASSETS = [
'js/timeline.js',
'js/eventviz-timeline.js'
]
CSS_ASSETS = [
'css/bootstrap.css',
'css/eventviz.css'
]
CSS_TIMELINE_ASSETS = [
'css/timeline.css'
]
JS_MINIFIER = 'yui_js'
CSS_MINIFIER = 'yui_css'
def setup_assets(app):
assets = Environment(app)
js_all = Bundle(*JS_ASSETS, filters=JS_MINIFIER, output='js/eventviz.min.js')
assets.register('js_all', js_all)
js_timeline = Bundle(*JS_TIMELINE_ASSETS, filters=JS_MINIFIER, output='js/timeline.min.js')
assets.register('js_timeline', js_timeline)
css_all = Bundle(*CSS_ASSETS, filters=CSS_MINIFIER, output='css/eventviz.min.css')
assets.register('css_all', css_all)
css_timeline = Bundle(*CSS_TIMELINE_ASSETS, filters=CSS_MINIFIER, output='css/timeline.min.css')
assets.register('css_timeline', css_timeline)
|
mit
|
Python
|
61e4693988c5b89b4a82457181813e7a6e73403b
|
Fix slugify for use without validator
|
stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten
|
utils/text.py
|
utils/text.py
|
import codecs
from django.core import exceptions
from django.utils import text
import translitcodec
def no_validator(arg):
pass
def slugify(model, field, value, validator=no_validator):
orig_slug = slug = text.slugify(codecs.encode(value, 'translit/long'))[:45]
i = 0
while True:
try:
try:
validator(slug)
except exceptions.ValidationError:
pass
else:
model.objects.get(**{field: slug})
i += 1
slug = orig_slug + '-' + str(i)
except model.DoesNotExist:
return slug
|
import codecs
from django.core import exceptions
from django.utils import text
import translitcodec
def slugify(model, field, value, validator):
orig_slug = slug = text.slugify(codecs.encode(value, 'translit/long'))[:45]
i = 0
while True:
try:
try:
validator(slug)
except exceptions.ValidationError:
pass
else:
model.objects.get(**{field: slug})
i += 1
slug = orig_slug + '-' + str(i)
except model.DoesNotExist:
return slug
|
agpl-3.0
|
Python
|
f2ffb339714ba848ea48008f20fa7adc71609b7f
|
add --update command to pr management util
|
openstates/openstates.org,openstates/openstates.org,openstates/openstates.org,openstates/openstates.org
|
people_admin/management/commands/create_pulls.py
|
people_admin/management/commands/create_pulls.py
|
from django.core.management.base import BaseCommand
from people_admin.models import DeltaSet, PullStatus
from people_admin.git import delta_set_to_pr, get_pr_status
class Command(BaseCommand):
help = "create pull requests from deltas"
def add_arguments(self, parser):
parser.add_argument("--list", default=False, action="store_true")
parser.add_argument("--delta")
parser.add_argument("--update", default=False, action="store_true")
def handle(self, *args, **options):
nothing = True
if options["list"]:
nothing = False
to_create = DeltaSet.objects.filter(
pr_status=PullStatus.NOT_CREATED
).order_by("id")
for ds in to_create:
print(f"{ds.id} | {ds.name} | {ds.created_by}")
if options["delta"]:
nothing = False
ds = DeltaSet.objects.get(
pk=options["delta"], pr_status=PullStatus.NOT_CREATED
)
print(f"creating {ds.id} | {ds.name} | {ds.created_by}")
ds.pr_url = delta_set_to_pr(ds)
ds.pr_status = PullStatus.CREATED
ds.save()
if options["update"]:
nothing = False
for ds in DeltaSet.objects.filter(pr_status=PullStatus.CREATED):
pr_id = int(ds.pr_url.split("/")[-1])
new_status = get_pr_status(pr_id)
if new_status != ds.pr_status:
print(
f"Updating {ds.id} | {ds.name} | {ds.get_pr_status} => {new_status}"
)
ds.pr_status = new_status
ds.save()
if nothing:
print("must either pass --list or --delta parameters")
|
from django.core.management.base import BaseCommand
from people_admin.models import DeltaSet, PullStatus
from people_admin.git import delta_set_to_pr
class Command(BaseCommand):
help = "create pull requests from deltas"
def add_arguments(self, parser):
parser.add_argument("--list", default=False, action="store_true")
parser.add_argument("--delta")
def handle(self, *args, **options):
nothing = True
if options["list"]:
nothing = False
to_create = DeltaSet.objects.filter(
pr_status=PullStatus.NOT_CREATED
).order_by("id")
for ds in to_create:
print(f"{ds.id} | {ds.name} | {ds.created_by}")
if options["delta"]:
nothing = False
ds = DeltaSet.objects.get(
pk=options["delta"], pr_status=PullStatus.NOT_CREATED
)
print(f"creating {ds.id} | {ds.name} | {ds.created_by}")
ds.pr_url = delta_set_to_pr(ds)
ds.pr_status = PullStatus.CREATED
ds.save()
if nothing:
print("must either pass --list or --delta parameters")
|
mit
|
Python
|
efef8389f2536179ebea189fed33c3ca446e68ac
|
Refactor indicators
|
jmelett/pyfx,jmelett/pyFxTrader,jmelett/pyfx
|
pyFxTrader/utils/indicators.py
|
pyFxTrader/utils/indicators.py
|
# -*- coding: utf-8 -*-
import numpy as np
def moving_average(x, n, type='simple'):
"""
compute an n period moving average.
type is 'simple' | 'exponential'
"""
x = np.asarray(x)
if type == 'simple':
weights = np.ones(n)
else:
weights = np.exp(np.linspace(-1., 0., n))
weights /= weights.sum()
a = np.convolve(x, weights, mode='full')[:len(x)]
a[:n] = a[n]
return a
def relative_strength(prices, n=14):
"""
compute the n period relative strength indicator
http://stockcharts.com/school/doku.php?id=chart_school:glossary_r#relativestrengthindex
http://www.investopedia.com/terms/r/rsi.asp
"""
deltas = np.diff(prices)
seed = deltas[:n + 1]
up = seed[seed >= 0].sum() / n
down = -seed[seed < 0].sum() / n
rs = up / down
rsi = np.zeros_like(prices)
rsi[:n] = 100. - 100. / (1. + rs)
for i in range(n, len(prices)):
delta = deltas[i - 1] # cause the diff is 1 shorter
if delta > 0:
upval = delta
downval = 0.
else:
upval = 0.
downval = -delta
up = (up * (n - 1) + upval) / n
down = (down * (n - 1) + downval) / n
rs = up / down
rsi[i] = 100. - 100. / (1. + rs)
return rsi
def moving_average_convergence(x, nslow=26, nfast=12, nsign=9, simple=False):
"""
compute the MACD (Moving Average Convergence/Divergence) using a fast and
slow exponential moving avg'
"""
macd_dict = {}
macd_dict['fast'] = moving_average(x, nfast, type='exponential')
macd_dict['slow'] = moving_average(x, nslow, type='exponential')
macd_dict['macd'] = map(lambda f, s: round(f - s, 5), macd_dict['fast'],
macd_dict['slow'])
macd_dict['sign'] = moving_average(macd_dict['macd'], nsign)
if not simple:
return macd_dict
else:
return macd_dict['macd']
|
# -*- coding: utf-8 -*-
import numpy as np
def moving_average(x, n, type='simple'):
"""
compute an n period moving average.
type is 'simple' | 'exponential'
"""
x = np.asarray(x)
if type == 'simple':
weights = np.ones(n)
else:
weights = np.exp(np.linspace(-1., 0., n))
weights /= weights.sum()
a = np.convolve(x, weights, mode='full')[:len(x)]
a[:n] = a[n]
return a
def relative_strength(prices, n=14):
"""
compute the n period relative strength indicator
http://stockcharts.com/school/doku.php?id=chart_school:glossary_r#relativestrengthindex
http://www.investopedia.com/terms/r/rsi.asp
"""
deltas = np.diff(prices)
seed = deltas[:n + 1]
up = seed[seed >= 0].sum() / n
down = -seed[seed < 0].sum() / n
rs = up / down
rsi = np.zeros_like(prices)
rsi[:n] = 100. - 100. / (1. + rs)
for i in range(n, len(prices)):
delta = deltas[i - 1] # cause the diff is 1 shorter
if delta > 0:
upval = delta
downval = 0.
else:
upval = 0.
downval = -delta
up = (up * (n - 1) + upval) / n
down = (down * (n - 1) + downval) / n
rs = up / down
rsi[i] = 100. - 100. / (1. + rs)
return rsi
def moving_average_convergence(x, nslow=26, nfast=12):
"""
compute the MACD (Moving Average Convergence/Divergence) using a fast and slow exponential moving avg'
return value is emaslow, emafast, macd which are len(x) arrays
"""
emaslow = moving_average(x, nslow, type='exponential')
emafast = moving_average(x, nfast, type='exponential')
return emaslow, emafast, emafast - emaslow
|
mit
|
Python
|
2eef6612a046b2982327a36ffe03beb4a0aa54f3
|
Remove dependancies on lmi-sdp and sympy for is_passive.
|
python-control/python-control
|
control/passivity.py
|
control/passivity.py
|
'''
Author: Mark Yeatman
Date: May 15, 2022
'''
from . import statesp as ss
import numpy as np
import cvxopt as cvx
def is_passive(sys):
'''
Indicates if a linear time invarient system is passive
Constructs a linear matrix inequality and a feasibility optimization
such that is a solution exists, the system is passive.
The source for the algorithm is:
McCourt, Michael J., and Panos J. Antsaklis. "Demonstrating passivity and dissipativity using computational methods." ISIS 8 (2013).
'''
A = sys.A
B = sys.B
C = sys.C
D = sys.D
def make_LMI_matrix(P):
V = np.vstack((
np.hstack((A.T @ P + P@A, P@B)),
np.hstack((B.T@P, np.zeros_like(D))))
)
return V
P = np.zeros_like(A)
matrix_list = []
state_space_size = A.shape[0]
for i in range(0, state_space_size):
for j in range(0, state_space_size):
if j <= i:
P = P*0.0
P[i, j] = 1.0
P[j, i] = 1.0
matrix_list.append(make_LMI_matrix(P).flatten())
coefficents = np.vstack(matrix_list).T
constants = -np.vstack((
np.hstack((np.zeros_like(A), - C.T)),
np.hstack((- C, -D - D.T)))
)
number_of_opt_vars = int(
(state_space_size**2-state_space_size)/2 + state_space_size)
c = cvx.matrix(0.0, (number_of_opt_vars, 1))
# crunch feasibility solution
sol = cvx.solvers.sdp(c,
Gs=[cvx.matrix(coefficents)],
hs=[cvx.matrix(constants)])
return (sol["x"] is not None)
|
'''
Author: Mark Yeatman
Date: May 15, 2022
'''
from . import statesp as ss
from sympy import symbols, Matrix, symarray
from lmi_sdp import LMI_NSD, to_cvxopt
from cvxopt import solvers
import numpy as np
def is_passive(sys):
'''
Indicates if a linear time invarient system is passive
Constructs a linear matrix inequality and a feasibility optimization
such that is a solution exists, the system is passive.
The source for the algorithm is:
McCourt, Michael J., and Panos J. Antsaklis. "Demonstrating passivity and dissipativity using computational methods." ISIS 8 (2013).
'''
A = sys.A
B = sys.B
C = sys.C
D = sys.D
P = Matrix(symarray('p', A.shape))
# enforce symmetry in P
size = A.shape[0]
for i in range(0, size):
for j in range(0, size):
P[i, j] = P[j, i]
# construct matrix for storage function x'*V*x
V = Matrix.vstack(
Matrix.hstack(A.T * P + P*A, P*B - C.T),
Matrix.hstack(B.T*P - C, Matrix(-D - D.T))
)
# construct LMI, convert to form for feasibility solver
LMI_passivty = LMI_NSD(V, 0*V)
min_obj = 0 * symbols("x")
variables = V.free_symbols
solvers.options['show_progress'] = False
c, Gs, hs = to_cvxopt(min_obj, LMI_passivty, variables)
# crunch feasibility solution
sol = solvers.sdp(c, Gs=Gs, hs=hs)
return (sol["x"] is not None)
|
bsd-3-clause
|
Python
|
28455c541b45ef6ca8e098702e0b7ea7c49a4a71
|
Update ASDF version.
|
mchung94/latest-versions
|
versions/software/asdf.py
|
versions/software/asdf.py
|
from versions.software.utils import get_response, get_text_between
def name():
"""Return the precise name for the software."""
return 'asdf'
def installed_version():
"""Return the installed version of asdf."""
# I don't have a command-line version to run to get this from
return '3.3.2'
def latest_version():
"""Return the latest version of asdf available for download."""
url = 'https://common-lisp.net/project/asdf/archives/asdf.lisp'
source_code = get_response(url).text
return get_text_between(source_code, 'This is ASDF ', ':')
|
from versions.software.utils import get_response, get_text_between
def name():
"""Return the precise name for the software."""
return 'asdf'
def installed_version():
"""Return the installed version of asdf."""
# I don't have a command-line version to run to get this from
return '3.3.1'
def latest_version():
"""Return the latest version of asdf available for download."""
url = 'https://common-lisp.net/project/asdf/archives/asdf.lisp'
source_code = get_response(url).text
return get_text_between(source_code, 'This is ASDF ', ':')
|
mit
|
Python
|
4011c54fc1e20f9d2e9514c1344ce3ee5bf032db
|
fix docstring
|
harpolea/pyro2,zingale/pyro2,harpolea/pyro2,zingale/pyro2
|
lm_atm/__init__.py
|
lm_atm/__init__.py
|
"""The pyro solver for low Mach number atmospheric flow. This
implements as second-order approximate projection method. The general
flow is:
* create the limited slopes of rho, u and v (in both directions)
* get the advective velocities through a piecewise linear Godunov
method
* enforce the divergence constraint on the velocities through a
projection (the MAC projection)
* predict rho to edges and do the conservative update
* recompute the interface states using the new advective velocity
* update U in time to get the provisional velocity field
* project the final velocity to enforce the divergence constraint.
The projections are done using multigrid
"""
from .simulation import *
|
"""The pyro solver for low Mach number atmospheric flow. This
implements as second-order approximate projection method. The general
flow is:
* create the limited slopes of rho, u and v (in both directions)
* get the advective velocities through a piecewise linear Godunov
method
* enforce the divergence constraint on the velocities through a
projection (the MAC projection)
* predict rho to edges and do the conservative update
* recompute the interface states using the new advective velocity
* update U in time to get the provisional velocity field
* project the final velocity to enforce the divergence constraint.
The projections are done using multigrid
"""
from .simulation import *
|
bsd-3-clause
|
Python
|
9362511d420a297fc1ed27f0642c4dcd527b4aff
|
Swap incorrect argument
|
jevinw/rec_utilities,jevinw/rec_utilities
|
babel_util/scripts/wos_to_edge.py
|
babel_util/scripts/wos_to_edge.py
|
#!/usr/bin/env python3
from parsers.wos import WOSStream
from util.PajekFactory import PajekFactory
from util.misc import open_file, Benchmark
if __name__ == "__main__":
import argparse
import sys
parser = argparse.ArgumentParser(description="Creates Pajek (.net) files from WOS XML")
parser.add_argument('infile')
parser.add_argument('outfile', nargs='?', type=argparse.FileType('w'), default=sys.stdout)
arguments = parser.parse_args()
with open_file(arguments.infile) as f:
p = WOSStream(f)
for entry in p.parse():
for citation in entry["citations"]:
arguments.write("%s\t%s\n" % (entry["id"], citation))
|
#!/usr/bin/env python3
from parsers.wos import WOSStream
from util.PajekFactory import PajekFactory
from util.misc import open_file, Benchmark
if __name__ == "__main__":
import argparse
import sys
parser = argparse.ArgumentParser(description="Creates Pajek (.net) files from WOS XML")
parser.add_argument('infile')
parser.add_argument('outfile', nargs='?', type=argparse.FileType('w'), default=sys.stdout)
arguments = parser.parse_args()
with open_file(parser.infile) as f:
p = WOSStream(f)
for entry in p.parse():
for citation in entry["citations"]:
arguments.write("%s\t%s\n" % (entry["id"], citation))
|
agpl-3.0
|
Python
|
7b6a16f2dc418e7898d5cca248228d50becf9d05
|
Add a method representation() for transition calendars.
|
jwg4/qual,jwg4/calexicon
|
calexicon/calendars/historical.py
|
calexicon/calendars/historical.py
|
from datetime import date
from base import Calendar
from calexicon.dates import InvalidDate, DateWithCalendar
from main import JulianCalendar, ProlepticGregorianCalendar
class SwitchDateWithCalendar(DateWithCalendar):
def __str__(self):
return "%s (%s - %s)" % (
self.calendar.date_display_string(self._date),
self.calendar.display_name,
self.calendar.period_string(self._date)
)
class JulianToGregorianCalendar(Calendar):
def date(self, year, month, day):
gregorian_date = date(year, month, day)
if gregorian_date < self.first_gregorian_day:
julian_date = JulianCalendar().date(year, month, day)
if not julian_date < self.first_gregorian_day:
raise InvalidDate("This is a 'missing day' when the calendars changed.")
return self.from_date(julian_date._date)
return self.from_date(gregorian_date)
@classmethod
def date_display_string(cls, d):
if d >= cls.first_gregorian_day:
return ProlepticGregorianCalendar.date_display_string(d)
return JulianCalendar.date_display_string(d)
@classmethod
def representation(cls, d):
if d >= cls.first_gregorian_day:
return ProlepticGregorianCalendar.representation(d)
return JulianCalendar.representation(d)
@classmethod
def period_string(cls, d):
if d >= cls.first_gregorian_day:
return 'Gregorian'
else:
return 'Julian'
def from_date(self, d):
return SwitchDateWithCalendar(self.__class__, d)
class EnglishHistoricalCalendar(JulianToGregorianCalendar):
display_name = "English Historical Calendar"
first_gregorian_day = date(1752, 9, 14)
class SpanishHistoricalCalendar(JulianToGregorianCalendar):
display_name = "Spanish Historical Calendar"
first_gregorian_day = date(1582, 10, 15)
class FrenchHistoricalCalendar(JulianToGregorianCalendar):
display_name = "French Historical Calendar"
first_gregorian_day = date(1582, 12, 20)
|
from datetime import date
from base import Calendar
from calexicon.dates import InvalidDate, DateWithCalendar
from main import JulianCalendar, ProlepticGregorianCalendar
class SwitchDateWithCalendar(DateWithCalendar):
def __str__(self):
return "%s (%s - %s)" % (
self.calendar.date_display_string(self._date),
self.calendar.display_name,
self.calendar.period_string(self._date)
)
class JulianToGregorianCalendar(Calendar):
def date(self, year, month, day):
gregorian_date = date(year, month, day)
if gregorian_date < self.first_gregorian_day:
julian_date = JulianCalendar().date(year, month, day)
if not julian_date < self.first_gregorian_day:
raise InvalidDate("This is a 'missing day' when the calendars changed.")
return self.from_date(julian_date._date)
return self.from_date(gregorian_date)
@classmethod
def date_display_string(cls, d):
if d >= cls.first_gregorian_day:
return ProlepticGregorianCalendar.date_display_string(d)
return JulianCalendar.date_display_string(d)
@classmethod
def period_string(cls, d):
if d >= cls.first_gregorian_day:
return 'Gregorian'
else:
return 'Julian'
def from_date(self, d):
return SwitchDateWithCalendar(self.__class__, d)
class EnglishHistoricalCalendar(JulianToGregorianCalendar):
display_name = "English Historical Calendar"
first_gregorian_day = date(1752, 9, 14)
class SpanishHistoricalCalendar(JulianToGregorianCalendar):
display_name = "Spanish Historical Calendar"
first_gregorian_day = date(1582, 10, 15)
class FrenchHistoricalCalendar(JulianToGregorianCalendar):
display_name = "French Historical Calendar"
first_gregorian_day = date(1582, 12, 20)
|
apache-2.0
|
Python
|
daed100280b615ab7bd50bbf54d7f40f1d5d2a42
|
Add CAN_DETECT
|
Asnelchristian/coala-bears,vijeth-aradhya/coala-bears,refeed/coala-bears,mr-karan/coala-bears,kaustubhhiware/coala-bears,madhukar01/coala-bears,sounak98/coala-bears,horczech/coala-bears,kaustubhhiware/coala-bears,coala/coala-bears,SanketDG/coala-bears,yash-nisar/coala-bears,madhukar01/coala-bears,gs0510/coala-bears,horczech/coala-bears,shreyans800755/coala-bears,aptrishu/coala-bears,yash-nisar/coala-bears,SanketDG/coala-bears,srisankethu/coala-bears,sounak98/coala-bears,arjunsinghy96/coala-bears,horczech/coala-bears,arjunsinghy96/coala-bears,srisankethu/coala-bears,damngamerz/coala-bears,arjunsinghy96/coala-bears,refeed/coala-bears,meetmangukiya/coala-bears,yash-nisar/coala-bears,ankit01ojha/coala-bears,aptrishu/coala-bears,horczech/coala-bears,seblat/coala-bears,SanketDG/coala-bears,sounak98/coala-bears,horczech/coala-bears,dosarudaniel/coala-bears,shreyans800755/coala-bears,srisankethu/coala-bears,ankit01ojha/coala-bears,incorrectusername/coala-bears,madhukar01/coala-bears,damngamerz/coala-bears,naveentata/coala-bears,madhukar01/coala-bears,kaustubhhiware/coala-bears,coala/coala-bears,yashtrivedi96/coala-bears,sounak98/coala-bears,incorrectusername/coala-bears,LWJensen/coala-bears,Asnelchristian/coala-bears,ku3o/coala-bears,sounak98/coala-bears,meetmangukiya/coala-bears,ankit01ojha/coala-bears,SanketDG/coala-bears,arjunsinghy96/coala-bears,Asnelchristian/coala-bears,aptrishu/coala-bears,ankit01ojha/coala-bears,Vamshi99/coala-bears,Vamshi99/coala-bears,refeed/coala-bears,Shade5/coala-bears,madhukar01/coala-bears,seblat/coala-bears,horczech/coala-bears,aptrishu/coala-bears,shreyans800755/coala-bears,yash-nisar/coala-bears,coala/coala-bears,gs0510/coala-bears,kaustubhhiware/coala-bears,gs0510/coala-bears,naveentata/coala-bears,horczech/coala-bears,seblat/coala-bears,coala-analyzer/coala-bears,vijeth-aradhya/coala-bears,incorrectusername/coala-bears,coala/coala-bears,SanketDG/coala-bears,aptrishu/coala-bears,kaustubhhiware/coala-bears,meetmangukiya/coala-bears,seblat/coala-bears,dosarudaniel/coala-bears,yashtrivedi96/coala-bears,aptrishu/coala-bears,coala/coala-bears,ku3o/coala-bears,coala/coala-bears,mr-karan/coala-bears,yashtrivedi96/coala-bears,coala/coala-bears,damngamerz/coala-bears,refeed/coala-bears,damngamerz/coala-bears,Asnelchristian/coala-bears,Vamshi99/coala-bears,coala-analyzer/coala-bears,meetmangukiya/coala-bears,yash-nisar/coala-bears,naveentata/coala-bears,shreyans800755/coala-bears,refeed/coala-bears,shreyans800755/coala-bears,Asnelchristian/coala-bears,dosarudaniel/coala-bears,incorrectusername/coala-bears,ankit01ojha/coala-bears,ankit01ojha/coala-bears,incorrectusername/coala-bears,chriscoyfish/coala-bears,seblat/coala-bears,SanketDG/coala-bears,SanketDG/coala-bears,ku3o/coala-bears,dosarudaniel/coala-bears,ankit01ojha/coala-bears,ku3o/coala-bears,chriscoyfish/coala-bears,coala-analyzer/coala-bears,naveentata/coala-bears,yashtrivedi96/coala-bears,shreyans800755/coala-bears,Asnelchristian/coala-bears,meetmangukiya/coala-bears,srisankethu/coala-bears,kaustubhhiware/coala-bears,Shade5/coala-bears,kaustubhhiware/coala-bears,yashtrivedi96/coala-bears,vijeth-aradhya/coala-bears,ankit01ojha/coala-bears,arjunsinghy96/coala-bears,ankit01ojha/coala-bears,refeed/coala-bears,meetmangukiya/coala-bears,sounak98/coala-bears,Asnelchristian/coala-bears,chriscoyfish/coala-bears,Vamshi99/coala-bears,aptrishu/coala-bears,arjunsinghy96/coala-bears,LWJensen/coala-bears,Vamshi99/coala-bears,madhukar01/coala-bears,yash-nisar/coala-bears,coala/coala-bears,LWJensen/coala-bears,seblat/coala-bears,srisankethu/coala-bears,meetmangukiya/coala-bears,kaustubhhiware/coala-bears,vijeth-aradhya/coala-bears,yash-nisar/coala-bears,dosarudaniel/coala-bears,aptrishu/coala-bears,Vamshi99/coala-bears,coala/coala-bears,dosarudaniel/coala-bears,coala-analyzer/coala-bears,mr-karan/coala-bears,LWJensen/coala-bears,aptrishu/coala-bears,meetmangukiya/coala-bears,arjunsinghy96/coala-bears,vijeth-aradhya/coala-bears,refeed/coala-bears,incorrectusername/coala-bears,ku3o/coala-bears,madhukar01/coala-bears,madhukar01/coala-bears,naveentata/coala-bears,shreyans800755/coala-bears,damngamerz/coala-bears,srisankethu/coala-bears,mr-karan/coala-bears,Shade5/coala-bears,damngamerz/coala-bears,Asnelchristian/coala-bears,ankit01ojha/coala-bears,LWJensen/coala-bears,srisankethu/coala-bears,horczech/coala-bears,incorrectusername/coala-bears,shreyans800755/coala-bears,Vamshi99/coala-bears,ankit01ojha/coala-bears,vijeth-aradhya/coala-bears,Vamshi99/coala-bears,Vamshi99/coala-bears,coala-analyzer/coala-bears,arjunsinghy96/coala-bears,mr-karan/coala-bears,LWJensen/coala-bears,coala-analyzer/coala-bears,coala/coala-bears,kaustubhhiware/coala-bears,srisankethu/coala-bears,damngamerz/coala-bears,damngamerz/coala-bears,damngamerz/coala-bears,aptrishu/coala-bears,damngamerz/coala-bears,naveentata/coala-bears,srisankethu/coala-bears,yashtrivedi96/coala-bears,vijeth-aradhya/coala-bears,Vamshi99/coala-bears,mr-karan/coala-bears,damngamerz/coala-bears,chriscoyfish/coala-bears,seblat/coala-bears,coala-analyzer/coala-bears,dosarudaniel/coala-bears,shreyans800755/coala-bears,horczech/coala-bears,seblat/coala-bears,naveentata/coala-bears,yashtrivedi96/coala-bears,LWJensen/coala-bears,yash-nisar/coala-bears,horczech/coala-bears,ku3o/coala-bears,chriscoyfish/coala-bears,chriscoyfish/coala-bears,ku3o/coala-bears,coala/coala-bears,Shade5/coala-bears,vijeth-aradhya/coala-bears,Shade5/coala-bears,srisankethu/coala-bears,coala-analyzer/coala-bears,mr-karan/coala-bears,yash-nisar/coala-bears,mr-karan/coala-bears,srisankethu/coala-bears,Shade5/coala-bears,ku3o/coala-bears,gs0510/coala-bears,Shade5/coala-bears,Shade5/coala-bears,SanketDG/coala-bears,coala-analyzer/coala-bears,gs0510/coala-bears,ku3o/coala-bears,meetmangukiya/coala-bears,horczech/coala-bears,dosarudaniel/coala-bears,SanketDG/coala-bears,refeed/coala-bears,chriscoyfish/coala-bears,yash-nisar/coala-bears,sounak98/coala-bears,madhukar01/coala-bears,gs0510/coala-bears,shreyans800755/coala-bears,naveentata/coala-bears,incorrectusername/coala-bears,vijeth-aradhya/coala-bears,Vamshi99/coala-bears,yash-nisar/coala-bears,yashtrivedi96/coala-bears,Shade5/coala-bears,yashtrivedi96/coala-bears,LWJensen/coala-bears,aptrishu/coala-bears,chriscoyfish/coala-bears,coala/coala-bears,gs0510/coala-bears,refeed/coala-bears,gs0510/coala-bears,shreyans800755/coala-bears,dosarudaniel/coala-bears,incorrectusername/coala-bears,LWJensen/coala-bears,sounak98/coala-bears,sounak98/coala-bears,gs0510/coala-bears,Asnelchristian/coala-bears,refeed/coala-bears,naveentata/coala-bears,arjunsinghy96/coala-bears,refeed/coala-bears
|
bears/python/PyDocStyleBear.py
|
bears/python/PyDocStyleBear.py
|
from coalib.bearlib.abstractions.Lint import Lint
from coalib.bears.LocalBear import LocalBear
from coalib.bears.requirements.PipRequirement import PipRequirement
from coalib.settings.Setting import typed_list
class PyDocStyleBear(LocalBear, Lint):
executable = 'pydocstyle'
output_regex = r'(.*\.py):(?P<line>\d+) (.+):\n\s+(?P<message>.*)'
use_stderr = True
LANGUAGES = {"Python", "Python 2", "Python 3"}
REQUIREMENTS = {PipRequirement('pydocstyle', '1.*')}
AUTHORS = {'The coala developers'}
AUTHORS_EMAILS = {'coala-devel@googlegroups.com'}
LICENSE = 'AGPL-3.0'
CAN_DETECT = {'Formatting', 'Documentation'}
def run(self,
filename,
file,
pydocstyle_select: typed_list(str)=(),
pydocstyle_ignore: typed_list(str)=()):
'''
Checks python docstrings.
:param pydocstyle_select: List of checked errors by specifying
which errors to check for.
:param pydocstyle_ignore: List of checked errors by specifying
which errors to ignore.
Note: pydocstyle_select and pydocstyle_ignore are mutually exclusive.
They cannot be used together.
'''
self.arguments = '{filename}'
if pydocstyle_ignore and pydocstyle_select:
self.err("The arguments pydocstyle_select and pydocstyle_ignore "
"are both given but mutually exclusive.")
return
elif pydocstyle_ignore:
ignore = ','.join(part.strip() for part in pydocstyle_ignore)
self.arguments += " --ignore={}".format(ignore)
elif pydocstyle_select:
select = ','.join(part.strip() for part in pydocstyle_select)
self.arguments += " --select={} ".format(select)
return self.lint(filename, file)
|
from coalib.bearlib.abstractions.Lint import Lint
from coalib.bears.LocalBear import LocalBear
from coalib.bears.requirements.PipRequirement import PipRequirement
from coalib.settings.Setting import typed_list
class PyDocStyleBear(LocalBear, Lint):
executable = 'pydocstyle'
output_regex = r'(.*\.py):(?P<line>\d+) (.+):\n\s+(?P<message>.*)'
use_stderr = True
LANGUAGES = {"Python", "Python 2", "Python 3"}
REQUIREMENTS = {PipRequirement('pydocstyle', '1.*')}
AUTHORS = {'The coala developers'}
AUTHORS_EMAILS = {'coala-devel@googlegroups.com'}
LICENSE = 'AGPL-3.0'
def run(self,
filename,
file,
pydocstyle_select: typed_list(str)=(),
pydocstyle_ignore: typed_list(str)=()):
'''
Checks python docstrings.
:param pydocstyle_select: List of checked errors by specifying
which errors to check for.
:param pydocstyle_ignore: List of checked errors by specifying
which errors to ignore.
Note: pydocstyle_select and pydocstyle_ignore are mutually exclusive.
They cannot be used together.
'''
self.arguments = '{filename}'
if pydocstyle_ignore and pydocstyle_select:
self.err("The arguments pydocstyle_select and pydocstyle_ignore "
"are both given but mutually exclusive.")
return
elif pydocstyle_ignore:
ignore = ','.join(part.strip() for part in pydocstyle_ignore)
self.arguments += " --ignore={}".format(ignore)
elif pydocstyle_select:
select = ','.join(part.strip() for part in pydocstyle_select)
self.arguments += " --select={} ".format(select)
return self.lint(filename, file)
|
agpl-3.0
|
Python
|
51872cd1a966f10976200dcdf9998a9119072d43
|
write warnings to stderr, not stdout (which might be muted)
|
blixt/py-starbound,6-lasers/py-starbound
|
export.py
|
export.py
|
#!/usr/bin/env python
import optparse
import os
import sys
import starbound
def main():
p = optparse.OptionParser()
p.add_option('-d', '--destination', dest='path',
help='Destination directory')
options, arguments = p.parse_args()
if len(arguments) != 1:
raise ValueError('Only one argument is supported (package path)')
package_path = arguments[0]
base = options.path if options.path else '.'
with starbound.open_file(package_path) as package:
if not isinstance(package, starbound.Package):
raise ValueError('Provided path is not a package')
print 'Loading index...'
# Get the paths from the index in the database.
paths = list(package.get_index())
print 'Index loaded. Extracting %d files...' % len(paths)
num_files = 0
percentage_count = max(len(paths) // 100, 1)
for path in paths:
dest_path = base + path
dir_path = os.path.dirname(dest_path)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
try:
data = package.get(path)
except:
# break the dots in case std{out,err} are the same tty:
sys.stdout.write('\n')
sys.stdout.flush()
print >>sys.stderr, 'W: Failed to read', path
continue
with open(dest_path, 'w') as file:
file.write(data)
num_files += 1
if not num_files % percentage_count:
sys.stdout.write('.')
sys.stdout.flush()
print
print 'Extracted %d files.' % num_files
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import optparse
import os
import sys
import starbound
def main():
p = optparse.OptionParser()
p.add_option('-d', '--destination', dest='path',
help='Destination directory')
options, arguments = p.parse_args()
if len(arguments) != 1:
raise ValueError('Only one argument is supported (package path)')
package_path = arguments[0]
base = options.path if options.path else '.'
with starbound.open_file(package_path) as package:
if not isinstance(package, starbound.Package):
raise ValueError('Provided path is not a package')
print 'Loading index...'
# Get the paths from the index in the database.
paths = list(package.get_index())
print 'Index loaded. Extracting %d files...' % len(paths)
num_files = 0
percentage_count = max(len(paths) // 100, 1)
for path in paths:
dest_path = base + path
dir_path = os.path.dirname(dest_path)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
try:
data = package.get(path)
except:
print
print 'Failed to read', path
continue
with open(dest_path, 'w') as file:
file.write(data)
num_files += 1
if not num_files % percentage_count:
sys.stdout.write('.')
sys.stdout.flush()
print
print 'Extracted %d files.' % num_files
if __name__ == '__main__':
main()
|
mit
|
Python
|
8785cade9bfe7cc3c54db0d0a068f99c5883ef1b
|
Allow locations to be imported from codelists management page
|
markbrough/maedi-projects,markbrough/maedi-projects,markbrough/maedi-projects
|
maediprojects/views/codelists.py
|
maediprojects/views/codelists.py
|
from flask import Flask, render_template, flash, request, Markup, \
session, redirect, url_for, escape, Response, abort, send_file, jsonify
from flask.ext.login import login_required, current_user
from maediprojects import app, db, models
from maediprojects.query import activity as qactivity
from maediprojects.query import location as qlocation
from maediprojects.lib import codelists
import json
@app.route("/codelists/")
@login_required
def codelists_management():
return render_template("codelists.html",
loggedinuser=current_user,
codelist_codes = codelists.get_db_codelists(),
codelist_names = codelists.get_db_codelist_names(),
countries = codelists.get_codelists()["Country"],
countries_locations = qlocation.get_countries_locations()
)
@app.route("/codelists/import_locations/", methods=["POST"])
@login_required
def import_locations():
existing_countries = list(map(lambda l: l.country.code,
qlocation.get_countries_locations()))
country_code = request.form.get("country")
if not country_code in existing_countries:
qlocation.import_locations(country_code)
flash("Locations successfully set up for that county!", "success")
else:
flash("Locations for that country were not imported, because they have already been imported!", "danger")
return redirect(url_for("codelists_management"))
|
from flask import Flask, render_template, flash, request, Markup, \
session, redirect, url_for, escape, Response, abort, send_file, jsonify
from flask.ext.login import login_required, current_user
from maediprojects import app, db, models
from maediprojects.query import activity as qactivity
from maediprojects.query import location as qlocation
from maediprojects.lib import codelists
import json
@app.route("/codelists/")
@login_required
def codelists_management():
return render_template("codelists.html",
loggedinuser=current_user,
codelist_codes = codelists.get_db_codelists(),
codelist_names = codelists.get_db_codelist_names()
)
|
agpl-3.0
|
Python
|
e50d42032669d84c344e13863ccb8122b79b8b4a
|
prepare for release
|
kalefranz/auxlib,kalefranz/auxlib
|
auxlib/__about__.py
|
auxlib/__about__.py
|
# -*- coding: utf-8 -*-
"""auxiliary library to the python standard library"""
from __future__ import absolute_import, division, print_function
__all__ = ["__title__", "__author__", "__email__", "__license__", "__copyright__",
"__homepage__"]
__title__ = "auxlib"
__author__ = 'Kale Franz'
__email__ = 'kale@franz.io'
__homepage__ = 'https://github.com/kalefranz/auxlib'
__license__ = "ISC"
__copyright__ = "(c) 2015 Kale Franz. All rights reserved."
__summary__ = locals().__doc__
|
# -*- coding: utf-8 -*-
"""auxiliary library to the python standard library"""
from __future__ import absolute_import, division, print_function
import os
import sys
import warnings
__all__ = ["__title__", "__author__", "__email__", "__license__", "__copyright__",
"__homepage__"]
__title__ = "auxlib"
__author__ = 'Kale Franz'
__email__ = 'kale@franz.io'
__homepage__ = 'https://github.com/kalefranz/auxlib'
__license__ = "ISC"
__copyright__ = "(c) 2015 Kale Franz. All rights reserved."
__summary__ = locals().__doc__
|
isc
|
Python
|
01d7850ccf5b23c448a898a1a23533e8207e8e49
|
Bump version to 1.7.2
|
liampauling/betfair,liampauling/betfairlightweight
|
betfairlightweight/__init__.py
|
betfairlightweight/__init__.py
|
import logging
from .apiclient import APIClient
from .exceptions import BetfairError
from .streaming import StreamListener
from . import filters
__title__ = 'betfairlightweight'
__version__ = '1.7.2'
__author__ = 'Liam Pauling'
# Set default logging handler to avoid "No handler found" warnings.
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
|
import logging
from .apiclient import APIClient
from .exceptions import BetfairError
from .streaming import StreamListener
from . import filters
__title__ = 'betfairlightweight'
__version__ = '1.7.1'
__author__ = 'Liam Pauling'
# Set default logging handler to avoid "No handler found" warnings.
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
|
mit
|
Python
|
24f546168b428580ccee05ba28f15e96fb5f64c9
|
Create a financial year
|
Code4SA/municipal-data,Code4SA/municipal-data,Code4SA/municipal-data,Code4SA/municipal-data
|
scorecard/tests/test_views.py
|
scorecard/tests/test_views.py
|
import json
from infrastructure.models import FinancialYear
from django.test import (
TransactionTestCase,
Client,
override_settings,
)
from . import (
import_data,
)
from .resources import (
GeographyResource,
MunicipalityProfileResource,
MedianGroupResource,
RatingCountGroupResource,
)
@override_settings(
SITE_ID=2,
STATICFILES_STORAGE="django.contrib.staticfiles.storage.StaticFilesStorage",
)
class GeographyDetailViewTestCase(TransactionTestCase):
serialized_rollback = True
def test_context(self):
# Import sample data
import_data(
GeographyResource,
"views/scorecard_geography.csv",
)
import_data(
MunicipalityProfileResource,
"views/municipality_profile.csv",
)
import_data(
MedianGroupResource,
"views/median_group.csv",
)
import_data(
RatingCountGroupResource,
"views/rating_count_group.csv",
)
FinancialYear.objects.create(budget_year="2049/2050", active=1)
# Make request
client = Client()
response = client.get("/profiles/municipality-CPT-city-of-cape-town/")
context = response.context
page_data = json.loads(context["page_data_json"])
# Test for amount types
self.assertIsInstance(page_data["amount_types_v1"], dict)
# Test for cube names
self.assertIsInstance(page_data["cube_names"], dict)
# Test for municipality category descriptions
self.assertIsInstance(page_data["municipal_category_descriptions"], dict)
|
import json
from django.test import (
TransactionTestCase,
Client,
override_settings,
)
from . import (
import_data,
)
from .resources import (
GeographyResource,
MunicipalityProfileResource,
MedianGroupResource,
RatingCountGroupResource,
)
@override_settings(
SITE_ID=2,
STATICFILES_STORAGE="django.contrib.staticfiles.storage.StaticFilesStorage",
)
class GeographyDetailViewTestCase(TransactionTestCase):
serialized_rollback = True
def test_context(self):
# Import sample data
import_data(
GeographyResource,
"views/scorecard_geography.csv",
)
import_data(
MunicipalityProfileResource,
"views/municipality_profile.csv",
)
import_data(
MedianGroupResource,
"views/median_group.csv",
)
import_data(
RatingCountGroupResource,
"views/rating_count_group.csv",
)
# Make request
client = Client()
response = client.get("/profiles/municipality-CPT-city-of-cape-town/")
context = response.context
page_data = json.loads(context["page_data_json"])
# Test for amount types
self.assertIsInstance(page_data["amount_types_v1"], dict)
# Test for cube names
self.assertIsInstance(page_data["cube_names"], dict)
# Test for municipality category descriptions
self.assertIsInstance(page_data["municipal_category_descriptions"], dict)
|
mit
|
Python
|
70482d032d1acef1570b16551bf170a0a271a7ec
|
Put the 'import *' back into test-settings.py
|
mansilladev/zulip,Juanvulcano/zulip,verma-varsha/zulip,shaunstanislaus/zulip,noroot/zulip,peguin40/zulip,Galexrt/zulip,ashwinirudrappa/zulip,ufosky-server/zulip,AZtheAsian/zulip,Vallher/zulip,seapasulli/zulip,Frouk/zulip,bowlofstew/zulip,eastlhu/zulip,wavelets/zulip,sup95/zulip,eeshangarg/zulip,j831/zulip,zulip/zulip,pradiptad/zulip,mansilladev/zulip,Jianchun1/zulip,developerfm/zulip,brockwhittaker/zulip,sharmaeklavya2/zulip,saitodisse/zulip,thomasboyt/zulip,luyifan/zulip,praveenaki/zulip,sup95/zulip,kaiyuanheshang/zulip,hackerkid/zulip,avastu/zulip,ahmadassaf/zulip,avastu/zulip,joshisa/zulip,babbage/zulip,tiansiyuan/zulip,esander91/zulip,arpith/zulip,aakash-cr7/zulip,hustlzp/zulip,tbutter/zulip,sharmaeklavya2/zulip,Juanvulcano/zulip,gkotian/zulip,codeKonami/zulip,themass/zulip,gigawhitlocks/zulip,lfranchi/zulip,wangdeshui/zulip,swinghu/zulip,itnihao/zulip,avastu/zulip,ryanbackman/zulip,praveenaki/zulip,armooo/zulip,rht/zulip,brockwhittaker/zulip,zofuthan/zulip,xuxiao/zulip,guiquanz/zulip,dotcool/zulip,bssrdf/zulip,vikas-parashar/zulip,tbutter/zulip,itnihao/zulip,tbutter/zulip,Frouk/zulip,shrikrishnaholla/zulip,brainwane/zulip,KingxBanana/zulip,sonali0901/zulip,karamcnair/zulip,tiansiyuan/zulip,wweiradio/zulip,ufosky-server/zulip,jeffcao/zulip,krtkmj/zulip,zofuthan/zulip,wangdeshui/zulip,moria/zulip,mahim97/zulip,umkay/zulip,alliejones/zulip,johnnygaddarr/zulip,yocome/zulip,m1ssou/zulip,KingxBanana/zulip,krtkmj/zulip,mdavid/zulip,firstblade/zulip,kokoar/zulip,dwrpayne/zulip,mohsenSy/zulip,schatt/zulip,ikasumiwt/zulip,zhaoweigg/zulip,Cheppers/zulip,huangkebo/zulip,stamhe/zulip,littledogboy/zulip,lfranchi/zulip,tiansiyuan/zulip,guiquanz/zulip,jerryge/zulip,littledogboy/zulip,karamcnair/zulip,peguin40/zulip,zwily/zulip,eeshangarg/zulip,proliming/zulip,kou/zulip,qq1012803704/zulip,shrikrishnaholla/zulip,SmartPeople/zulip,glovebx/zulip,nicholasbs/zulip,Drooids/zulip,krtkmj/zulip,shaunstanislaus/zulip,zulip/zulip,Cheppers/zulip,verma-varsha/zulip,dxq-git/zulip,Vallher/zulip,ashwinirudrappa/zulip,shaunstanislaus/zulip,ericzhou2008/zulip,KJin99/zulip,luyifan/zulip,aps-sids/zulip,gkotian/zulip,babbage/zulip,kokoar/zulip,mahim97/zulip,seapasulli/zulip,jimmy54/zulip,m1ssou/zulip,so0k/zulip,dxq-git/zulip,sonali0901/zulip,mahim97/zulip,jeffcao/zulip,johnnygaddarr/zulip,bowlofstew/zulip,so0k/zulip,hustlzp/zulip,j831/zulip,dotcool/zulip,zwily/zulip,udxxabp/zulip,aliceriot/zulip,bluesea/zulip,joyhchen/zulip,Vallher/zulip,niftynei/zulip,praveenaki/zulip,PhilSk/zulip,johnnygaddarr/zulip,andersk/zulip,pradiptad/zulip,samatdav/zulip,stamhe/zulip,peiwei/zulip,xuanhan863/zulip,amallia/zulip,shrikrishnaholla/zulip,codeKonami/zulip,fw1121/zulip,jeffcao/zulip,moria/zulip,bastianh/zulip,stamhe/zulip,suxinde2009/zulip,bssrdf/zulip,hengqujushi/zulip,bowlofstew/zulip,swinghu/zulip,shubhamdhama/zulip,babbage/zulip,dnmfarrell/zulip,wangdeshui/zulip,peguin40/zulip,firstblade/zulip,jimmy54/zulip,arpitpanwar/zulip,grave-w-grave/zulip,JPJPJPOPOP/zulip,voidException/zulip,udxxabp/zulip,gigawhitlocks/zulip,udxxabp/zulip,akuseru/zulip,dhcrzf/zulip,TigorC/zulip,armooo/zulip,ryansnowboarder/zulip,huangkebo/zulip,codeKonami/zulip,paxapy/zulip,Batterfii/zulip,natanovia/zulip,jrowan/zulip,vikas-parashar/zulip,andersk/zulip,AZtheAsian/zulip,bitemyapp/zulip,peguin40/zulip,Qgap/zulip,gkotian/zulip,JanzTam/zulip,grave-w-grave/zulip,souravbadami/zulip,ufosky-server/zulip,tdr130/zulip,jeffcao/zulip,Juanvulcano/zulip,kokoar/zulip,souravbadami/zulip,schatt/zulip,jainayush975/zulip,zachallaun/zulip,vakila/zulip,ipernet/zulip,aps-sids/zulip,amanharitsh123/zulip,noroot/zulip,bssrdf/zulip,punchagan/zulip,andersk/zulip,he15his/zulip,JanzTam/zulip,codeKonami/zulip,Cheppers/zulip,peguin40/zulip,moria/zulip,technicalpickles/zulip,krtkmj/zulip,yocome/zulip,codeKonami/zulip,hafeez3000/zulip,bastianh/zulip,j831/zulip,jessedhillon/zulip,zacps/zulip,natanovia/zulip,LeeRisk/zulip,Drooids/zulip,rishig/zulip,ikasumiwt/zulip,arpith/zulip,zorojean/zulip,themass/zulip,technicalpickles/zulip,grave-w-grave/zulip,ApsOps/zulip,Suninus/zulip,kaiyuanheshang/zulip,pradiptad/zulip,amallia/zulip,bastianh/zulip,rishig/zulip,Qgap/zulip,amyliu345/zulip,bssrdf/zulip,joyhchen/zulip,amyliu345/zulip,LeeRisk/zulip,arpitpanwar/zulip,xuxiao/zulip,themass/zulip,moria/zulip,jessedhillon/zulip,Batterfii/zulip,wavelets/zulip,akuseru/zulip,jrowan/zulip,joyhchen/zulip,brockwhittaker/zulip,ryanbackman/zulip,synicalsyntax/zulip,suxinde2009/zulip,TigorC/zulip,zachallaun/zulip,ahmadassaf/zulip,isht3/zulip,glovebx/zulip,noroot/zulip,alliejones/zulip,pradiptad/zulip,mdavid/zulip,umkay/zulip,jessedhillon/zulip,dnmfarrell/zulip,atomic-labs/zulip,Diptanshu8/zulip,babbage/zulip,ryanbackman/zulip,brainwane/zulip,calvinleenyc/zulip,karamcnair/zulip,susansls/zulip,krtkmj/zulip,PaulPetring/zulip,vakila/zulip,jrowan/zulip,adnanh/zulip,nicholasbs/zulip,jessedhillon/zulip,vakila/zulip,krtkmj/zulip,dnmfarrell/zulip,EasonYi/zulip,littledogboy/zulip,ashwinirudrappa/zulip,akuseru/zulip,amallia/zulip,johnny9/zulip,christi3k/zulip,suxinde2009/zulip,udxxabp/zulip,amanharitsh123/zulip,ipernet/zulip,esander91/zulip,dxq-git/zulip,andersk/zulip,Drooids/zulip,jrowan/zulip,souravbadami/zulip,amallia/zulip,kaiyuanheshang/zulip,mdavid/zulip,Galexrt/zulip,RobotCaleb/zulip,luyifan/zulip,joshisa/zulip,huangkebo/zulip,technicalpickles/zulip,Batterfii/zulip,MariaFaBella85/zulip,cosmicAsymmetry/zulip,Diptanshu8/zulip,yocome/zulip,RobotCaleb/zulip,zacps/zulip,amyliu345/zulip,showell/zulip,so0k/zulip,fw1121/zulip,bluesea/zulip,xuanhan863/zulip,reyha/zulip,eastlhu/zulip,hayderimran7/zulip,babbage/zulip,jainayush975/zulip,christi3k/zulip,cosmicAsymmetry/zulip,alliejones/zulip,ipernet/zulip,xuxiao/zulip,PhilSk/zulip,bastianh/zulip,johnny9/zulip,AZtheAsian/zulip,hj3938/zulip,rishig/zulip,Batterfii/zulip,ryansnowboarder/zulip,kaiyuanheshang/zulip,sharmaeklavya2/zulip,zofuthan/zulip,jerryge/zulip,shubhamdhama/zulip,shaunstanislaus/zulip,hayderimran7/zulip,vakila/zulip,dotcool/zulip,brainwane/zulip,jonesgithub/zulip,ericzhou2008/zulip,johnnygaddarr/zulip,littledogboy/zulip,vaidap/zulip,jrowan/zulip,dwrpayne/zulip,shubhamdhama/zulip,showell/zulip,pradiptad/zulip,blaze225/zulip,amallia/zulip,Diptanshu8/zulip,levixie/zulip,tdr130/zulip,arpitpanwar/zulip,jerryge/zulip,esander91/zulip,calvinleenyc/zulip,mahim97/zulip,thomasboyt/zulip,lfranchi/zulip,Galexrt/zulip,tommyip/zulip,blaze225/zulip,joshisa/zulip,udxxabp/zulip,aakash-cr7/zulip,cosmicAsymmetry/zulip,dotcool/zulip,shubhamdhama/zulip,ApsOps/zulip,arpitpanwar/zulip,zorojean/zulip,aps-sids/zulip,wdaher/zulip,kou/zulip,zorojean/zulip,calvinleenyc/zulip,tbutter/zulip,Gabriel0402/zulip,aps-sids/zulip,armooo/zulip,vaidap/zulip,LAndreas/zulip,itnihao/zulip,peguin40/zulip,jessedhillon/zulip,jonesgithub/zulip,Frouk/zulip,LAndreas/zulip,souravbadami/zulip,zhaoweigg/zulip,ryansnowboarder/zulip,hackerkid/zulip,mansilladev/zulip,yuvipanda/zulip,sup95/zulip,synicalsyntax/zulip,showell/zulip,zacps/zulip,tdr130/zulip,fw1121/zulip,sonali0901/zulip,lfranchi/zulip,dawran6/zulip,esander91/zulip,PaulPetring/zulip,amyliu345/zulip,avastu/zulip,SmartPeople/zulip,Suninus/zulip,LAndreas/zulip,nicholasbs/zulip,developerfm/zulip,PaulPetring/zulip,johnny9/zulip,suxinde2009/zulip,DazWorrall/zulip,showell/zulip,jainayush975/zulip,bitemyapp/zulip,bitemyapp/zulip,peiwei/zulip,Galexrt/zulip,aakash-cr7/zulip,kou/zulip,umkay/zulip,fw1121/zulip,RobotCaleb/zulip,ApsOps/zulip,bastianh/zulip,swinghu/zulip,gkotian/zulip,SmartPeople/zulip,johnny9/zulip,jeffcao/zulip,proliming/zulip,MayB/zulip,hustlzp/zulip,j831/zulip,luyifan/zulip,adnanh/zulip,PaulPetring/zulip,cosmicAsymmetry/zulip,hackerkid/zulip,hustlzp/zulip,voidException/zulip,LeeRisk/zulip,qq1012803704/zulip,punchagan/zulip,dwrpayne/zulip,kaiyuanheshang/zulip,umkay/zulip,deer-hope/zulip,KJin99/zulip,dhcrzf/zulip,synicalsyntax/zulip,armooo/zulip,easyfmxu/zulip,glovebx/zulip,zulip/zulip,fw1121/zulip,Frouk/zulip,aps-sids/zulip,wdaher/zulip,easyfmxu/zulip,ufosky-server/zulip,firstblade/zulip,PaulPetring/zulip,saitodisse/zulip,glovebx/zulip,levixie/zulip,Vallher/zulip,jerryge/zulip,joyhchen/zulip,hj3938/zulip,praveenaki/zulip,hayderimran7/zulip,hayderimran7/zulip,vaidap/zulip,jackrzhang/zulip,eastlhu/zulip,KJin99/zulip,xuxiao/zulip,isht3/zulip,wdaher/zulip,mohsenSy/zulip,timabbott/zulip,Cheppers/zulip,jackrzhang/zulip,so0k/zulip,zofuthan/zulip,wweiradio/zulip,Gabriel0402/zulip,rishig/zulip,joshisa/zulip,samatdav/zulip,developerfm/zulip,j831/zulip,shrikrishnaholla/zulip,tommyip/zulip,andersk/zulip,Frouk/zulip,dnmfarrell/zulip,TigorC/zulip,dxq-git/zulip,timabbott/zulip,vakila/zulip,vikas-parashar/zulip,hafeez3000/zulip,aakash-cr7/zulip,shrikrishnaholla/zulip,willingc/zulip,seapasulli/zulip,andersk/zulip,amallia/zulip,vabs22/zulip,bowlofstew/zulip,swinghu/zulip,noroot/zulip,shubhamdhama/zulip,easyfmxu/zulip,bssrdf/zulip,ipernet/zulip,adnanh/zulip,jerryge/zulip,m1ssou/zulip,huangkebo/zulip,mansilladev/zulip,ApsOps/zulip,JPJPJPOPOP/zulip,blaze225/zulip,willingc/zulip,Suninus/zulip,jessedhillon/zulip,JPJPJPOPOP/zulip,Jianchun1/zulip,tommyip/zulip,ryansnowboarder/zulip,Qgap/zulip,paxapy/zulip,lfranchi/zulip,aakash-cr7/zulip,stamhe/zulip,levixie/zulip,vakila/zulip,aliceriot/zulip,susansls/zulip,dawran6/zulip,dattatreya303/zulip,littledogboy/zulip,thomasboyt/zulip,praveenaki/zulip,jphilipsen05/zulip,eeshangarg/zulip,ericzhou2008/zulip,Diptanshu8/zulip,zachallaun/zulip,hackerkid/zulip,samatdav/zulip,brockwhittaker/zulip,ryansnowboarder/zulip,hayderimran7/zulip,atomic-labs/zulip,zulip/zulip,xuxiao/zulip,tommyip/zulip,tdr130/zulip,EasonYi/zulip,voidException/zulip,dwrpayne/zulip,Juanvulcano/zulip,Frouk/zulip,cosmicAsymmetry/zulip,hengqujushi/zulip,arpith/zulip,peiwei/zulip,brainwane/zulip,RobotCaleb/zulip,kaiyuanheshang/zulip,rishig/zulip,Diptanshu8/zulip,samatdav/zulip,hackerkid/zulip,dotcool/zulip,wavelets/zulip,proliming/zulip,isht3/zulip,AZtheAsian/zulip,nicholasbs/zulip,Batterfii/zulip,Cheppers/zulip,ryansnowboarder/zulip,saitodisse/zulip,dnmfarrell/zulip,hengqujushi/zulip,hafeez3000/zulip,dotcool/zulip,praveenaki/zulip,ahmadassaf/zulip,alliejones/zulip,timabbott/zulip,dhcrzf/zulip,developerfm/zulip,hengqujushi/zulip,xuanhan863/zulip,Vallher/zulip,RobotCaleb/zulip,avastu/zulip,luyifan/zulip,yocome/zulip,MayB/zulip,wangdeshui/zulip,so0k/zulip,Suninus/zulip,fw1121/zulip,peiwei/zulip,amanharitsh123/zulip,brockwhittaker/zulip,johnnygaddarr/zulip,SmartPeople/zulip,yocome/zulip,timabbott/zulip,noroot/zulip,dattatreya303/zulip,joyhchen/zulip,paxapy/zulip,paxapy/zulip,MariaFaBella85/zulip,technicalpickles/zulip,Cheppers/zulip,dwrpayne/zulip,Batterfii/zulip,peiwei/zulip,bssrdf/zulip,MariaFaBella85/zulip,KingxBanana/zulip,jonesgithub/zulip,he15his/zulip,hafeez3000/zulip,KJin99/zulip,mahim97/zulip,luyifan/zulip,johnny9/zulip,johnny9/zulip,Frouk/zulip,yuvipanda/zulip,avastu/zulip,JPJPJPOPOP/zulip,hj3938/zulip,adnanh/zulip,calvinleenyc/zulip,stamhe/zulip,AZtheAsian/zulip,sup95/zulip,qq1012803704/zulip,Batterfii/zulip,adnanh/zulip,he15his/zulip,Suninus/zulip,ApsOps/zulip,praveenaki/zulip,tiansiyuan/zulip,he15his/zulip,levixie/zulip,Vallher/zulip,easyfmxu/zulip,jphilipsen05/zulip,levixie/zulip,glovebx/zulip,eeshangarg/zulip,dhcrzf/zulip,KingxBanana/zulip,willingc/zulip,technicalpickles/zulip,voidException/zulip,yuvipanda/zulip,vabs22/zulip,SmartPeople/zulip,eastlhu/zulip,KingxBanana/zulip,susansls/zulip,so0k/zulip,PhilSk/zulip,ApsOps/zulip,jonesgithub/zulip,Cheppers/zulip,umkay/zulip,levixie/zulip,ahmadassaf/zulip,paxapy/zulip,natanovia/zulip,Juanvulcano/zulip,zwily/zulip,arpitpanwar/zulip,zacps/zulip,KJin99/zulip,littledogboy/zulip,arpith/zulip,jainayush975/zulip,timabbott/zulip,xuanhan863/zulip,stamhe/zulip,EasonYi/zulip,zofuthan/zulip,akuseru/zulip,jeffcao/zulip,arpitpanwar/zulip,thomasboyt/zulip,rht/zulip,jainayush975/zulip,mohsenSy/zulip,xuanhan863/zulip,ashwinirudrappa/zulip,tiansiyuan/zulip,dawran6/zulip,ashwinirudrappa/zulip,fw1121/zulip,kou/zulip,JPJPJPOPOP/zulip,xuxiao/zulip,Drooids/zulip,verma-varsha/zulip,aliceriot/zulip,mdavid/zulip,christi3k/zulip,mohsenSy/zulip,joshisa/zulip,natanovia/zulip,ufosky-server/zulip,bluesea/zulip,Vallher/zulip,aliceriot/zulip,themass/zulip,dnmfarrell/zulip,umkay/zulip,itnihao/zulip,reyha/zulip,mansilladev/zulip,wangdeshui/zulip,niftynei/zulip,timabbott/zulip,dattatreya303/zulip,atomic-labs/zulip,reyha/zulip,zulip/zulip,LAndreas/zulip,lfranchi/zulip,ipernet/zulip,grave-w-grave/zulip,amanharitsh123/zulip,grave-w-grave/zulip,babbage/zulip,Qgap/zulip,vaidap/zulip,jackrzhang/zulip,zofuthan/zulip,tdr130/zulip,ahmadassaf/zulip,deer-hope/zulip,aps-sids/zulip,hengqujushi/zulip,PhilSk/zulip,jeffcao/zulip,Jianchun1/zulip,akuseru/zulip,luyifan/zulip,PhilSk/zulip,themass/zulip,bssrdf/zulip,hustlzp/zulip,easyfmxu/zulip,bluesea/zulip,jphilipsen05/zulip,vabs22/zulip,thomasboyt/zulip,DazWorrall/zulip,zulip/zulip,tbutter/zulip,guiquanz/zulip,ikasumiwt/zulip,shubhamdhama/zulip,dhcrzf/zulip,saitodisse/zulip,aliceriot/zulip,firstblade/zulip,willingc/zulip,ApsOps/zulip,noroot/zulip,seapasulli/zulip,schatt/zulip,udxxabp/zulip,armooo/zulip,niftynei/zulip,synicalsyntax/zulip,ipernet/zulip,alliejones/zulip,Qgap/zulip,zacps/zulip,hustlzp/zulip,zhaoweigg/zulip,qq1012803704/zulip,MayB/zulip,suxinde2009/zulip,vabs22/zulip,dattatreya303/zulip,reyha/zulip,jackrzhang/zulip,Suninus/zulip,swinghu/zulip,gkotian/zulip,jrowan/zulip,ikasumiwt/zulip,ericzhou2008/zulip,saitodisse/zulip,synicalsyntax/zulip,synicalsyntax/zulip,qq1012803704/zulip,dwrpayne/zulip,arpith/zulip,wavelets/zulip,wweiradio/zulip,bitemyapp/zulip,suxinde2009/zulip,voidException/zulip,grave-w-grave/zulip,xuanhan863/zulip,blaze225/zulip,ufosky-server/zulip,niftynei/zulip,MayB/zulip,voidException/zulip,he15his/zulip,hj3938/zulip,zorojean/zulip,Galexrt/zulip,tdr130/zulip,wangdeshui/zulip,gigawhitlocks/zulip,ahmadassaf/zulip,dxq-git/zulip,ikasumiwt/zulip,jonesgithub/zulip,dotcool/zulip,bitemyapp/zulip,brainwane/zulip,zhaoweigg/zulip,easyfmxu/zulip,rht/zulip,easyfmxu/zulip,ufosky-server/zulip,wweiradio/zulip,schatt/zulip,JanzTam/zulip,Gabriel0402/zulip,jphilipsen05/zulip,saitodisse/zulip,zofuthan/zulip,hackerkid/zulip,souravbadami/zulip,ryanbackman/zulip,nicholasbs/zulip,samatdav/zulip,kokoar/zulip,Drooids/zulip,saitodisse/zulip,noroot/zulip,tdr130/zulip,dhcrzf/zulip,RobotCaleb/zulip,vaidap/zulip,wweiradio/zulip,willingc/zulip,DazWorrall/zulip,arpith/zulip,themass/zulip,joshisa/zulip,christi3k/zulip,moria/zulip,bowlofstew/zulip,yocome/zulip,adnanh/zulip,Drooids/zulip,bastianh/zulip,LeeRisk/zulip,TigorC/zulip,synicalsyntax/zulip,rht/zulip,bluesea/zulip,ericzhou2008/zulip,jackrzhang/zulip,mansilladev/zulip,punchagan/zulip,cosmicAsymmetry/zulip,so0k/zulip,zhaoweigg/zulip,isht3/zulip,LAndreas/zulip,yuvipanda/zulip,johnnygaddarr/zulip,susansls/zulip,brockwhittaker/zulip,Drooids/zulip,alliejones/zulip,shrikrishnaholla/zulip,m1ssou/zulip,esander91/zulip,ashwinirudrappa/zulip,dawran6/zulip,jackrzhang/zulip,LeeRisk/zulip,wavelets/zulip,rishig/zulip,deer-hope/zulip,amyliu345/zulip,TigorC/zulip,proliming/zulip,xuanhan863/zulip,jimmy54/zulip,hafeez3000/zulip,jimmy54/zulip,Jianchun1/zulip,m1ssou/zulip,johnny9/zulip,rht/zulip,zachallaun/zulip,rht/zulip,natanovia/zulip,MayB/zulip,nicholasbs/zulip,arpitpanwar/zulip,mdavid/zulip,bastianh/zulip,wavelets/zulip,showell/zulip,Gabriel0402/zulip,kou/zulip,tiansiyuan/zulip,JanzTam/zulip,dxq-git/zulip,souravbadami/zulip,punchagan/zulip,dwrpayne/zulip,zwily/zulip,showell/zulip,hackerkid/zulip,LAndreas/zulip,calvinleenyc/zulip,wangdeshui/zulip,babbage/zulip,hafeez3000/zulip,xuxiao/zulip,EasonYi/zulip,hj3938/zulip,isht3/zulip,christi3k/zulip,tbutter/zulip,reyha/zulip,qq1012803704/zulip,ryansnowboarder/zulip,zwily/zulip,tommyip/zulip,deer-hope/zulip,zachallaun/zulip,swinghu/zulip,verma-varsha/zulip,dawran6/zulip,guiquanz/zulip,j831/zulip,bluesea/zulip,lfranchi/zulip,glovebx/zulip,Jianchun1/zulip,eastlhu/zulip,hustlzp/zulip,KJin99/zulip,moria/zulip,hj3938/zulip,schatt/zulip,paxapy/zulip,reyha/zulip,KJin99/zulip,seapasulli/zulip,JanzTam/zulip,shaunstanislaus/zulip,verma-varsha/zulip,vikas-parashar/zulip,peiwei/zulip,amyliu345/zulip,eeshangarg/zulip,jimmy54/zulip,kokoar/zulip,EasonYi/zulip,developerfm/zulip,jackrzhang/zulip,wweiradio/zulip,voidException/zulip,wdaher/zulip,RobotCaleb/zulip,pradiptad/zulip,gkotian/zulip,LeeRisk/zulip,brainwane/zulip,akuseru/zulip,punchagan/zulip,shaunstanislaus/zulip,rht/zulip,wweiradio/zulip,shubhamdhama/zulip,akuseru/zulip,jessedhillon/zulip,SmartPeople/zulip,littledogboy/zulip,bitemyapp/zulip,Suninus/zulip,Galexrt/zulip,esander91/zulip,hayderimran7/zulip,thomasboyt/zulip,vabs22/zulip,JanzTam/zulip,gigawhitlocks/zulip,willingc/zulip,blaze225/zulip,gigawhitlocks/zulip,bowlofstew/zulip,gkotian/zulip,yuvipanda/zulip,adnanh/zulip,sharmaeklavya2/zulip,yuvipanda/zulip,amallia/zulip,wdaher/zulip,codeKonami/zulip,dnmfarrell/zulip,itnihao/zulip,atomic-labs/zulip,samatdav/zulip,jonesgithub/zulip,DazWorrall/zulip,developerfm/zulip,esander91/zulip,guiquanz/zulip,rishig/zulip,TigorC/zulip,natanovia/zulip,vakila/zulip,hafeez3000/zulip,technicalpickles/zulip,kaiyuanheshang/zulip,Qgap/zulip,wdaher/zulip,Gabriel0402/zulip,jerryge/zulip,vikas-parashar/zulip,mohsenSy/zulip,firstblade/zulip,Gabriel0402/zulip,zorojean/zulip,aps-sids/zulip,niftynei/zulip,mansilladev/zulip,MariaFaBella85/zulip,natanovia/zulip,wdaher/zulip,aliceriot/zulip,ericzhou2008/zulip,jimmy54/zulip,guiquanz/zulip,christi3k/zulip,udxxabp/zulip,timabbott/zulip,jainayush975/zulip,sharmaeklavya2/zulip,joyhchen/zulip,deer-hope/zulip,Qgap/zulip,AZtheAsian/zulip,huangkebo/zulip,thomasboyt/zulip,MariaFaBella85/zulip,stamhe/zulip,MariaFaBella85/zulip,Gabriel0402/zulip,bitemyapp/zulip,dattatreya303/zulip,punchagan/zulip,jphilipsen05/zulip,ikasumiwt/zulip,eeshangarg/zulip,levixie/zulip,yuvipanda/zulip,shrikrishnaholla/zulip,zwily/zulip,moria/zulip,eastlhu/zulip,krtkmj/zulip,glovebx/zulip,developerfm/zulip,DazWorrall/zulip,hayderimran7/zulip,LeeRisk/zulip,gigawhitlocks/zulip,firstblade/zulip,JPJPJPOPOP/zulip,proliming/zulip,sonali0901/zulip,PaulPetring/zulip,johnnygaddarr/zulip,pradiptad/zulip,seapasulli/zulip,zachallaun/zulip,JanzTam/zulip,ikasumiwt/zulip,dawran6/zulip,kokoar/zulip,technicalpickles/zulip,joshisa/zulip,eeshangarg/zulip,tommyip/zulip,andersk/zulip,zhaoweigg/zulip,guiquanz/zulip,swinghu/zulip,peiwei/zulip,mohsenSy/zulip,schatt/zulip,eastlhu/zulip,tiansiyuan/zulip,huangkebo/zulip,ashwinirudrappa/zulip,seapasulli/zulip,MariaFaBella85/zulip,sonali0901/zulip,atomic-labs/zulip,suxinde2009/zulip,gigawhitlocks/zulip,tommyip/zulip,vabs22/zulip,avastu/zulip,isht3/zulip,sup95/zulip,sup95/zulip,calvinleenyc/zulip,umkay/zulip,jphilipsen05/zulip,bluesea/zulip,zachallaun/zulip,kokoar/zulip,hengqujushi/zulip,EasonYi/zulip,showell/zulip,zacps/zulip,proliming/zulip,zulip/zulip,shaunstanislaus/zulip,Galexrt/zulip,deer-hope/zulip,itnihao/zulip,he15his/zulip,ericzhou2008/zulip,wavelets/zulip,KingxBanana/zulip,verma-varsha/zulip,punchagan/zulip,alliejones/zulip,vikas-parashar/zulip,kou/zulip,jonesgithub/zulip,karamcnair/zulip,MayB/zulip,qq1012803704/zulip,karamcnair/zulip,DazWorrall/zulip,Jianchun1/zulip,mdavid/zulip,LAndreas/zulip,armooo/zulip,mdavid/zulip,proliming/zulip,karamcnair/zulip,tbutter/zulip,PhilSk/zulip,jimmy54/zulip,sonali0901/zulip,MayB/zulip,amanharitsh123/zulip,kou/zulip,schatt/zulip,PaulPetring/zulip,atomic-labs/zulip,m1ssou/zulip,firstblade/zulip,dattatreya303/zulip,Juanvulcano/zulip,bowlofstew/zulip,blaze225/zulip,zorojean/zulip,codeKonami/zulip,armooo/zulip,ipernet/zulip,zorojean/zulip,karamcnair/zulip,zhaoweigg/zulip,brainwane/zulip,Diptanshu8/zulip,itnihao/zulip,ryanbackman/zulip,jerryge/zulip,he15his/zulip,vaidap/zulip,susansls/zulip,yocome/zulip,dxq-git/zulip,huangkebo/zulip,sharmaeklavya2/zulip,EasonYi/zulip,aakash-cr7/zulip,ryanbackman/zulip,hj3938/zulip,willingc/zulip,susansls/zulip,nicholasbs/zulip,themass/zulip,atomic-labs/zulip,DazWorrall/zulip,zwily/zulip,amanharitsh123/zulip,ahmadassaf/zulip,deer-hope/zulip,mahim97/zulip,hengqujushi/zulip,dhcrzf/zulip,aliceriot/zulip,niftynei/zulip,m1ssou/zulip
|
humbug/test-settings.py
|
humbug/test-settings.py
|
from settings import *
DATABASES['default']["NAME"] = "zephyr/tests/zephyrdb.test"
|
from settings import DATABASES
DATABASES['default']["NAME"] = "zephyr/tests/zephyrdb.test"
|
apache-2.0
|
Python
|
7c74017bc0d76ecb34e3fab44767290f51d98a09
|
Decrease get_updates timeout for client test suite
|
ryanbackman/zulip,christi3k/zulip,dnmfarrell/zulip,littledogboy/zulip,LAndreas/zulip,timabbott/zulip,hayderimran7/zulip,xuxiao/zulip,deer-hope/zulip,zhaoweigg/zulip,m1ssou/zulip,wdaher/zulip,punchagan/zulip,lfranchi/zulip,kokoar/zulip,voidException/zulip,brainwane/zulip,blaze225/zulip,akuseru/zulip,JPJPJPOPOP/zulip,hayderimran7/zulip,MayB/zulip,ipernet/zulip,AZtheAsian/zulip,gigawhitlocks/zulip,bssrdf/zulip,bitemyapp/zulip,ashwinirudrappa/zulip,proliming/zulip,LeeRisk/zulip,littledogboy/zulip,JanzTam/zulip,developerfm/zulip,pradiptad/zulip,dotcool/zulip,alliejones/zulip,hj3938/zulip,Juanvulcano/zulip,qq1012803704/zulip,yocome/zulip,sharmaeklavya2/zulip,AZtheAsian/zulip,tdr130/zulip,vaidap/zulip,wangdeshui/zulip,sup95/zulip,blaze225/zulip,dnmfarrell/zulip,stamhe/zulip,Drooids/zulip,rht/zulip,suxinde2009/zulip,aakash-cr7/zulip,nicholasbs/zulip,qq1012803704/zulip,bowlofstew/zulip,aakash-cr7/zulip,mdavid/zulip,xuanhan863/zulip,wangdeshui/zulip,bastianh/zulip,hafeez3000/zulip,bssrdf/zulip,umkay/zulip,jphilipsen05/zulip,zofuthan/zulip,proliming/zulip,mansilladev/zulip,codeKonami/zulip,karamcnair/zulip,wavelets/zulip,paxapy/zulip,Cheppers/zulip,kaiyuanheshang/zulip,jerryge/zulip,hustlzp/zulip,nicholasbs/zulip,reyha/zulip,andersk/zulip,huangkebo/zulip,mahim97/zulip,pradiptad/zulip,jessedhillon/zulip,littledogboy/zulip,moria/zulip,DazWorrall/zulip,zofuthan/zulip,jimmy54/zulip,umkay/zulip,zorojean/zulip,xuxiao/zulip,sharmaeklavya2/zulip,stamhe/zulip,themass/zulip,Diptanshu8/zulip,andersk/zulip,technicalpickles/zulip,dhcrzf/zulip,Jianchun1/zulip,tommyip/zulip,ipernet/zulip,praveenaki/zulip,levixie/zulip,developerfm/zulip,praveenaki/zulip,punchagan/zulip,joshisa/zulip,saitodisse/zulip,hengqujushi/zulip,Batterfii/zulip,codeKonami/zulip,technicalpickles/zulip,hayderimran7/zulip,voidException/zulip,amanharitsh123/zulip,rishig/zulip,ryansnowboarder/zulip,LeeRisk/zulip,bastianh/zulip,Batterfii/zulip,ikasumiwt/zulip,ashwinirudrappa/zulip,Drooids/zulip,Cheppers/zulip,tommyip/zulip,Vallher/zulip,johnnygaddarr/zulip,wweiradio/zulip,jrowan/zulip,RobotCaleb/zulip,jphilipsen05/zulip,PhilSk/zulip,firstblade/zulip,suxinde2009/zulip,gkotian/zulip,punchagan/zulip,aakash-cr7/zulip,amyliu345/zulip,dwrpayne/zulip,moria/zulip,dxq-git/zulip,LAndreas/zulip,bssrdf/zulip,guiquanz/zulip,Gabriel0402/zulip,Batterfii/zulip,so0k/zulip,RobotCaleb/zulip,huangkebo/zulip,stamhe/zulip,AZtheAsian/zulip,noroot/zulip,xuxiao/zulip,dxq-git/zulip,vaidap/zulip,ufosky-server/zulip,rht/zulip,babbage/zulip,kaiyuanheshang/zulip,dawran6/zulip,aliceriot/zulip,dattatreya303/zulip,themass/zulip,noroot/zulip,timabbott/zulip,hj3938/zulip,RobotCaleb/zulip,brockwhittaker/zulip,Qgap/zulip,shrikrishnaholla/zulip,voidException/zulip,bowlofstew/zulip,verma-varsha/zulip,jeffcao/zulip,bitemyapp/zulip,praveenaki/zulip,LAndreas/zulip,proliming/zulip,aakash-cr7/zulip,jackrzhang/zulip,brainwane/zulip,Suninus/zulip,jainayush975/zulip,christi3k/zulip,Galexrt/zulip,gkotian/zulip,praveenaki/zulip,hackerkid/zulip,Cheppers/zulip,vakila/zulip,cosmicAsymmetry/zulip,niftynei/zulip,noroot/zulip,nicholasbs/zulip,suxinde2009/zulip,akuseru/zulip,noroot/zulip,natanovia/zulip,shubhamdhama/zulip,fw1121/zulip,eeshangarg/zulip,aakash-cr7/zulip,wangdeshui/zulip,grave-w-grave/zulip,ahmadassaf/zulip,andersk/zulip,vabs22/zulip,zofuthan/zulip,ashwinirudrappa/zulip,alliejones/zulip,Frouk/zulip,wdaher/zulip,pradiptad/zulip,bluesea/zulip,so0k/zulip,vikas-parashar/zulip,Cheppers/zulip,easyfmxu/zulip,bastianh/zulip,ufosky-server/zulip,mohsenSy/zulip,souravbadami/zulip,Batterfii/zulip,amanharitsh123/zulip,hackerkid/zulip,sharmaeklavya2/zulip,rishig/zulip,karamcnair/zulip,luyifan/zulip,jainayush975/zulip,hackerkid/zulip,glovebx/zulip,littledogboy/zulip,he15his/zulip,zwily/zulip,ApsOps/zulip,fw1121/zulip,showell/zulip,ufosky-server/zulip,dwrpayne/zulip,hengqujushi/zulip,bluesea/zulip,ikasumiwt/zulip,codeKonami/zulip,Juanvulcano/zulip,dotcool/zulip,tommyip/zulip,umkay/zulip,sup95/zulip,showell/zulip,mdavid/zulip,sonali0901/zulip,thomasboyt/zulip,natanovia/zulip,babbage/zulip,mahim97/zulip,JPJPJPOPOP/zulip,KJin99/zulip,bowlofstew/zulip,synicalsyntax/zulip,dawran6/zulip,timabbott/zulip,zofuthan/zulip,Batterfii/zulip,souravbadami/zulip,Drooids/zulip,wavelets/zulip,verma-varsha/zulip,Galexrt/zulip,atomic-labs/zulip,LeeRisk/zulip,so0k/zulip,swinghu/zulip,armooo/zulip,glovebx/zulip,xuanhan863/zulip,zachallaun/zulip,littledogboy/zulip,jeffcao/zulip,aliceriot/zulip,jphilipsen05/zulip,Frouk/zulip,grave-w-grave/zulip,karamcnair/zulip,bluesea/zulip,schatt/zulip,deer-hope/zulip,showell/zulip,armooo/zulip,hayderimran7/zulip,tiansiyuan/zulip,codeKonami/zulip,vakila/zulip,nicholasbs/zulip,firstblade/zulip,eeshangarg/zulip,johnny9/zulip,luyifan/zulip,zorojean/zulip,avastu/zulip,jackrzhang/zulip,ipernet/zulip,ashwinirudrappa/zulip,yuvipanda/zulip,tommyip/zulip,paxapy/zulip,synicalsyntax/zulip,technicalpickles/zulip,shubhamdhama/zulip,ryansnowboarder/zulip,seapasulli/zulip,grave-w-grave/zulip,jonesgithub/zulip,armooo/zulip,bssrdf/zulip,wavelets/zulip,sonali0901/zulip,brockwhittaker/zulip,jphilipsen05/zulip,MayB/zulip,hj3938/zulip,ryansnowboarder/zulip,wangdeshui/zulip,eastlhu/zulip,Diptanshu8/zulip,he15his/zulip,ipernet/zulip,kaiyuanheshang/zulip,JanzTam/zulip,glovebx/zulip,aliceriot/zulip,vikas-parashar/zulip,thomasboyt/zulip,susansls/zulip,swinghu/zulip,wweiradio/zulip,EasonYi/zulip,rht/zulip,seapasulli/zulip,SmartPeople/zulip,Qgap/zulip,umkay/zulip,tdr130/zulip,dxq-git/zulip,MariaFaBella85/zulip,Vallher/zulip,jackrzhang/zulip,saitodisse/zulip,levixie/zulip,udxxabp/zulip,willingc/zulip,babbage/zulip,yuvipanda/zulip,ryanbackman/zulip,Galexrt/zulip,aps-sids/zulip,suxinde2009/zulip,babbage/zulip,peiwei/zulip,dwrpayne/zulip,ahmadassaf/zulip,krtkmj/zulip,jonesgithub/zulip,wavelets/zulip,dattatreya303/zulip,dotcool/zulip,eastlhu/zulip,showell/zulip,bssrdf/zulip,jeffcao/zulip,ufosky-server/zulip,paxapy/zulip,ufosky-server/zulip,sonali0901/zulip,sharmaeklavya2/zulip,dawran6/zulip,jessedhillon/zulip,verma-varsha/zulip,dhcrzf/zulip,shrikrishnaholla/zulip,zorojean/zulip,vikas-parashar/zulip,Gabriel0402/zulip,atomic-labs/zulip,Jianchun1/zulip,guiquanz/zulip,rishig/zulip,ryansnowboarder/zulip,kou/zulip,ryanbackman/zulip,MayB/zulip,jainayush975/zulip,jackrzhang/zulip,eastlhu/zulip,hengqujushi/zulip,sonali0901/zulip,synicalsyntax/zulip,cosmicAsymmetry/zulip,saitodisse/zulip,Gabriel0402/zulip,firstblade/zulip,christi3k/zulip,suxinde2009/zulip,umkay/zulip,zhaoweigg/zulip,Suninus/zulip,shaunstanislaus/zulip,zulip/zulip,technicalpickles/zulip,Juanvulcano/zulip,kokoar/zulip,suxinde2009/zulip,fw1121/zulip,dxq-git/zulip,dattatreya303/zulip,ikasumiwt/zulip,moria/zulip,shubhamdhama/zulip,krtkmj/zulip,zacps/zulip,andersk/zulip,Jianchun1/zulip,jessedhillon/zulip,jonesgithub/zulip,cosmicAsymmetry/zulip,ahmadassaf/zulip,MariaFaBella85/zulip,proliming/zulip,calvinleenyc/zulip,deer-hope/zulip,LAndreas/zulip,gkotian/zulip,dnmfarrell/zulip,sharmaeklavya2/zulip,JPJPJPOPOP/zulip,Jianchun1/zulip,arpitpanwar/zulip,hayderimran7/zulip,Gabriel0402/zulip,EasonYi/zulip,zacps/zulip,Juanvulcano/zulip,codeKonami/zulip,esander91/zulip,DazWorrall/zulip,souravbadami/zulip,kou/zulip,jimmy54/zulip,aliceriot/zulip,jessedhillon/zulip,itnihao/zulip,calvinleenyc/zulip,technicalpickles/zulip,wweiradio/zulip,jimmy54/zulip,joshisa/zulip,firstblade/zulip,avastu/zulip,tbutter/zulip,LeeRisk/zulip,reyha/zulip,KJin99/zulip,vabs22/zulip,j831/zulip,gigawhitlocks/zulip,paxapy/zulip,brainwane/zulip,tbutter/zulip,vakila/zulip,akuseru/zulip,jerryge/zulip,noroot/zulip,easyfmxu/zulip,so0k/zulip,ahmadassaf/zulip,dhcrzf/zulip,PhilSk/zulip,johnny9/zulip,wweiradio/zulip,ericzhou2008/zulip,joshisa/zulip,shubhamdhama/zulip,dxq-git/zulip,itnihao/zulip,arpitpanwar/zulip,tommyip/zulip,kaiyuanheshang/zulip,babbage/zulip,yuvipanda/zulip,avastu/zulip,SmartPeople/zulip,Vallher/zulip,ApsOps/zulip,mansilladev/zulip,KingxBanana/zulip,zhaoweigg/zulip,j831/zulip,isht3/zulip,EasonYi/zulip,tdr130/zulip,Diptanshu8/zulip,niftynei/zulip,ipernet/zulip,Batterfii/zulip,adnanh/zulip,adnanh/zulip,bitemyapp/zulip,dawran6/zulip,KingxBanana/zulip,MariaFaBella85/zulip,peiwei/zulip,praveenaki/zulip,dxq-git/zulip,vakila/zulip,xuxiao/zulip,dhcrzf/zulip,zwily/zulip,hengqujushi/zulip,tbutter/zulip,showell/zulip,bluesea/zulip,SmartPeople/zulip,andersk/zulip,shubhamdhama/zulip,dhcrzf/zulip,PaulPetring/zulip,lfranchi/zulip,bastianh/zulip,mohsenSy/zulip,calvinleenyc/zulip,aps-sids/zulip,luyifan/zulip,technicalpickles/zulip,alliejones/zulip,jrowan/zulip,krtkmj/zulip,DazWorrall/zulip,ikasumiwt/zulip,tiansiyuan/zulip,Drooids/zulip,krtkmj/zulip,yuvipanda/zulip,kou/zulip,eeshangarg/zulip,shrikrishnaholla/zulip,schatt/zulip,he15his/zulip,mdavid/zulip,zachallaun/zulip,gkotian/zulip,he15his/zulip,shaunstanislaus/zulip,Frouk/zulip,rht/zulip,lfranchi/zulip,zulip/zulip,xuxiao/zulip,littledogboy/zulip,Suninus/zulip,hustlzp/zulip,showell/zulip,brainwane/zulip,dotcool/zulip,itnihao/zulip,hj3938/zulip,MayB/zulip,tdr130/zulip,DazWorrall/zulip,pradiptad/zulip,zhaoweigg/zulip,aps-sids/zulip,punchagan/zulip,AZtheAsian/zulip,dotcool/zulip,willingc/zulip,rishig/zulip,umkay/zulip,ahmadassaf/zulip,Galexrt/zulip,hj3938/zulip,tbutter/zulip,fw1121/zulip,noroot/zulip,susansls/zulip,bowlofstew/zulip,jerryge/zulip,firstblade/zulip,xuxiao/zulip,amallia/zulip,krtkmj/zulip,bastianh/zulip,m1ssou/zulip,noroot/zulip,hackerkid/zulip,ericzhou2008/zulip,LAndreas/zulip,karamcnair/zulip,Cheppers/zulip,peguin40/zulip,tbutter/zulip,samatdav/zulip,udxxabp/zulip,armooo/zulip,mahim97/zulip,vakila/zulip,verma-varsha/zulip,susansls/zulip,grave-w-grave/zulip,isht3/zulip,andersk/zulip,jackrzhang/zulip,ryanbackman/zulip,babbage/zulip,timabbott/zulip,vabs22/zulip,KingxBanana/zulip,luyifan/zulip,dhcrzf/zulip,mohsenSy/zulip,so0k/zulip,KJin99/zulip,shrikrishnaholla/zulip,jimmy54/zulip,atomic-labs/zulip,zachallaun/zulip,mansilladev/zulip,zhaoweigg/zulip,mohsenSy/zulip,karamcnair/zulip,kaiyuanheshang/zulip,codeKonami/zulip,shrikrishnaholla/zulip,calvinleenyc/zulip,wweiradio/zulip,sup95/zulip,PaulPetring/zulip,vakila/zulip,peiwei/zulip,dnmfarrell/zulip,ufosky-server/zulip,jimmy54/zulip,grave-w-grave/zulip,itnihao/zulip,ikasumiwt/zulip,gigawhitlocks/zulip,johnny9/zulip,sonali0901/zulip,johnny9/zulip,willingc/zulip,littledogboy/zulip,easyfmxu/zulip,eastlhu/zulip,PhilSk/zulip,joyhchen/zulip,m1ssou/zulip,synicalsyntax/zulip,swinghu/zulip,glovebx/zulip,yuvipanda/zulip,Suninus/zulip,joyhchen/zulip,codeKonami/zulip,tbutter/zulip,nicholasbs/zulip,susansls/zulip,jimmy54/zulip,cosmicAsymmetry/zulip,shrikrishnaholla/zulip,pradiptad/zulip,grave-w-grave/zulip,mohsenSy/zulip,he15his/zulip,aps-sids/zulip,samatdav/zulip,eastlhu/zulip,vikas-parashar/zulip,reyha/zulip,KJin99/zulip,zulip/zulip,ufosky-server/zulip,ApsOps/zulip,zacps/zulip,levixie/zulip,LeeRisk/zulip,fw1121/zulip,Gabriel0402/zulip,kou/zulip,Galexrt/zulip,zachallaun/zulip,aps-sids/zulip,tommyip/zulip,vaidap/zulip,themass/zulip,firstblade/zulip,jeffcao/zulip,natanovia/zulip,arpith/zulip,joyhchen/zulip,shaunstanislaus/zulip,bssrdf/zulip,guiquanz/zulip,alliejones/zulip,rht/zulip,sup95/zulip,so0k/zulip,ashwinirudrappa/zulip,arpitpanwar/zulip,brockwhittaker/zulip,joshisa/zulip,j831/zulip,jainayush975/zulip,samatdav/zulip,swinghu/zulip,yocome/zulip,saitodisse/zulip,reyha/zulip,joshisa/zulip,natanovia/zulip,zofuthan/zulip,yocome/zulip,DazWorrall/zulip,udxxabp/zulip,wdaher/zulip,shaunstanislaus/zulip,dotcool/zulip,willingc/zulip,proliming/zulip,JPJPJPOPOP/zulip,gigawhitlocks/zulip,isht3/zulip,swinghu/zulip,showell/zulip,tiansiyuan/zulip,stamhe/zulip,themass/zulip,Frouk/zulip,jonesgithub/zulip,JanzTam/zulip,eastlhu/zulip,avastu/zulip,glovebx/zulip,KJin99/zulip,eeshangarg/zulip,arpitpanwar/zulip,qq1012803704/zulip,johnnygaddarr/zulip,MariaFaBella85/zulip,thomasboyt/zulip,tbutter/zulip,huangkebo/zulip,souravbadami/zulip,qq1012803704/zulip,mansilladev/zulip,shrikrishnaholla/zulip,JanzTam/zulip,ashwinirudrappa/zulip,johnny9/zulip,dawran6/zulip,moria/zulip,jackrzhang/zulip,shaunstanislaus/zulip,he15his/zulip,punchagan/zulip,ericzhou2008/zulip,zachallaun/zulip,saitodisse/zulip,udxxabp/zulip,developerfm/zulip,arpith/zulip,JanzTam/zulip,arpitpanwar/zulip,zwily/zulip,gigawhitlocks/zulip,m1ssou/zulip,kokoar/zulip,RobotCaleb/zulip,amyliu345/zulip,dattatreya303/zulip,johnny9/zulip,Jianchun1/zulip,MayB/zulip,suxinde2009/zulip,m1ssou/zulip,Juanvulcano/zulip,rht/zulip,ericzhou2008/zulip,aliceriot/zulip,jessedhillon/zulip,JPJPJPOPOP/zulip,moria/zulip,johnny9/zulip,andersk/zulip,kokoar/zulip,voidException/zulip,stamhe/zulip,gkotian/zulip,LeeRisk/zulip,lfranchi/zulip,hustlzp/zulip,rishig/zulip,rishig/zulip,timabbott/zulip,natanovia/zulip,pradiptad/zulip,willingc/zulip,amyliu345/zulip,deer-hope/zulip,johnnygaddarr/zulip,developerfm/zulip,alliejones/zulip,DazWorrall/zulip,vikas-parashar/zulip,esander91/zulip,lfranchi/zulip,luyifan/zulip,akuseru/zulip,umkay/zulip,amanharitsh123/zulip,johnnygaddarr/zulip,peguin40/zulip,adnanh/zulip,jphilipsen05/zulip,moria/zulip,levixie/zulip,wangdeshui/zulip,DazWorrall/zulip,joyhchen/zulip,mdavid/zulip,KingxBanana/zulip,yocome/zulip,Gabriel0402/zulip,MayB/zulip,natanovia/zulip,huangkebo/zulip,TigorC/zulip,saitodisse/zulip,tiansiyuan/zulip,gigawhitlocks/zulip,lfranchi/zulip,LAndreas/zulip,rishig/zulip,dwrpayne/zulip,j831/zulip,Vallher/zulip,cosmicAsymmetry/zulip,isht3/zulip,brainwane/zulip,dattatreya303/zulip,sharmaeklavya2/zulip,synicalsyntax/zulip,kokoar/zulip,aliceriot/zulip,zwily/zulip,niftynei/zulip,guiquanz/zulip,SmartPeople/zulip,schatt/zulip,zulip/zulip,deer-hope/zulip,akuseru/zulip,vikas-parashar/zulip,PaulPetring/zulip,dhcrzf/zulip,huangkebo/zulip,xuanhan863/zulip,deer-hope/zulip,Cheppers/zulip,dwrpayne/zulip,Suninus/zulip,udxxabp/zulip,Vallher/zulip,ericzhou2008/zulip,fw1121/zulip,avastu/zulip,amyliu345/zulip,vaidap/zulip,kokoar/zulip,amallia/zulip,seapasulli/zulip,bitemyapp/zulip,Juanvulcano/zulip,atomic-labs/zulip,isht3/zulip,vabs22/zulip,vakila/zulip,zofuthan/zulip,Diptanshu8/zulip,kou/zulip,hj3938/zulip,arpith/zulip,akuseru/zulip,blaze225/zulip,themass/zulip,jainayush975/zulip,zachallaun/zulip,hafeez3000/zulip,kou/zulip,amanharitsh123/zulip,Drooids/zulip,jerryge/zulip,arpith/zulip,technicalpickles/zulip,fw1121/zulip,karamcnair/zulip,wavelets/zulip,nicholasbs/zulip,jeffcao/zulip,wdaher/zulip,huangkebo/zulip,LeeRisk/zulip,voidException/zulip,Frouk/zulip,bowlofstew/zulip,swinghu/zulip,ryanbackman/zulip,luyifan/zulip,sonali0901/zulip,zorojean/zulip,TigorC/zulip,peguin40/zulip,cosmicAsymmetry/zulip,MariaFaBella85/zulip,zulip/zulip,developerfm/zulip,christi3k/zulip,AZtheAsian/zulip,kou/zulip,armooo/zulip,zwily/zulip,jimmy54/zulip,bastianh/zulip,Galexrt/zulip,Suninus/zulip,mdavid/zulip,j831/zulip,he15his/zulip,wweiradio/zulip,JPJPJPOPOP/zulip,jonesgithub/zulip,qq1012803704/zulip,udxxabp/zulip,amallia/zulip,amallia/zulip,shubhamdhama/zulip,hafeez3000/zulip,m1ssou/zulip,kaiyuanheshang/zulip,johnnygaddarr/zulip,bowlofstew/zulip,arpith/zulip,isht3/zulip,m1ssou/zulip,voidException/zulip,timabbott/zulip,peguin40/zulip,Qgap/zulip,amallia/zulip,thomasboyt/zulip,PaulPetring/zulip,niftynei/zulip,ipernet/zulip,itnihao/zulip,reyha/zulip,lfranchi/zulip,RobotCaleb/zulip,verma-varsha/zulip,wdaher/zulip,Qgap/zulip,calvinleenyc/zulip,Frouk/zulip,eeshangarg/zulip,zacps/zulip,niftynei/zulip,arpitpanwar/zulip,hengqujushi/zulip,souravbadami/zulip,LAndreas/zulip,blaze225/zulip,Drooids/zulip,hayderimran7/zulip,zwily/zulip,itnihao/zulip,hayderimran7/zulip,adnanh/zulip,mohsenSy/zulip,jerryge/zulip,esander91/zulip,zhaoweigg/zulip,mahim97/zulip,zachallaun/zulip,EasonYi/zulip,bssrdf/zulip,joshisa/zulip,eeshangarg/zulip,arpith/zulip,ryansnowboarder/zulip,reyha/zulip,dnmfarrell/zulip,dawran6/zulip,hengqujushi/zulip,easyfmxu/zulip,ryansnowboarder/zulip,luyifan/zulip,guiquanz/zulip,christi3k/zulip,karamcnair/zulip,zwily/zulip,zulip/zulip,easyfmxu/zulip,hackerkid/zulip,tiansiyuan/zulip,atomic-labs/zulip,arpitpanwar/zulip,gigawhitlocks/zulip,bitemyapp/zulip,vabs22/zulip,MayB/zulip,ApsOps/zulip,Qgap/zulip,zorojean/zulip,hafeez3000/zulip,Cheppers/zulip,esander91/zulip,pradiptad/zulip,Vallher/zulip,jonesgithub/zulip,PhilSk/zulip,vaidap/zulip,Suninus/zulip,esander91/zulip,tiansiyuan/zulip,hafeez3000/zulip,adnanh/zulip,glovebx/zulip,peguin40/zulip,synicalsyntax/zulip,xuanhan863/zulip,seapasulli/zulip,hafeez3000/zulip,brockwhittaker/zulip,tommyip/zulip,dnmfarrell/zulip,johnnygaddarr/zulip,Galexrt/zulip,joyhchen/zulip,levixie/zulip,natanovia/zulip,TigorC/zulip,armooo/zulip,JanzTam/zulip,krtkmj/zulip,Drooids/zulip,jrowan/zulip,EasonYi/zulip,brockwhittaker/zulip,brainwane/zulip,alliejones/zulip,levixie/zulip,verma-varsha/zulip,gkotian/zulip,thomasboyt/zulip,proliming/zulip,udxxabp/zulip,paxapy/zulip,armooo/zulip,amanharitsh123/zulip,akuseru/zulip,jackrzhang/zulip,seapasulli/zulip,seapasulli/zulip,niftynei/zulip,Jianchun1/zulip,bluesea/zulip,joshisa/zulip,zofuthan/zulip,xuanhan863/zulip,jessedhillon/zulip,synicalsyntax/zulip,atomic-labs/zulip,shaunstanislaus/zulip,hustlzp/zulip,deer-hope/zulip,easyfmxu/zulip,PhilSk/zulip,RobotCaleb/zulip,praveenaki/zulip,proliming/zulip,jerryge/zulip,jerryge/zulip,schatt/zulip,KingxBanana/zulip,bluesea/zulip,kokoar/zulip,gkotian/zulip,MariaFaBella85/zulip,punchagan/zulip,amyliu345/zulip,TigorC/zulip,huangkebo/zulip,seapasulli/zulip,babbage/zulip,moria/zulip,Vallher/zulip,jainayush975/zulip,mdavid/zulip,saitodisse/zulip,ikasumiwt/zulip,hengqujushi/zulip,zacps/zulip,RobotCaleb/zulip,tdr130/zulip,AZtheAsian/zulip,shubhamdhama/zulip,SmartPeople/zulip,wavelets/zulip,swinghu/zulip,dnmfarrell/zulip,dotcool/zulip,voidException/zulip,zulip/zulip,timabbott/zulip,PaulPetring/zulip,schatt/zulip,guiquanz/zulip,bitemyapp/zulip,ericzhou2008/zulip,wangdeshui/zulip,dxq-git/zulip,souravbadami/zulip,mansilladev/zulip,kaiyuanheshang/zulip,JanzTam/zulip,EasonYi/zulip,developerfm/zulip,thomasboyt/zulip,zhaoweigg/zulip,Diptanshu8/zulip,joyhchen/zulip,zorojean/zulip,peiwei/zulip,ApsOps/zulip,hustlzp/zulip,glovebx/zulip,zorojean/zulip,hj3938/zulip,ikasumiwt/zulip,KingxBanana/zulip,easyfmxu/zulip,rht/zulip,tdr130/zulip,dwrpayne/zulip,eastlhu/zulip,tdr130/zulip,nicholasbs/zulip,PaulPetring/zulip,ahmadassaf/zulip,willingc/zulip,guiquanz/zulip,aakash-cr7/zulip,wangdeshui/zulip,stamhe/zulip,itnihao/zulip,atomic-labs/zulip,ashwinirudrappa/zulip,jphilipsen05/zulip,levixie/zulip,wdaher/zulip,dattatreya303/zulip,hafeez3000/zulip,hackerkid/zulip,adnanh/zulip,jrowan/zulip,EasonYi/zulip,thomasboyt/zulip,susansls/zulip,Diptanshu8/zulip,sup95/zulip,mdavid/zulip,mahim97/zulip,ApsOps/zulip,mansilladev/zulip,wavelets/zulip,themass/zulip,amallia/zulip,avastu/zulip,j831/zulip,qq1012803704/zulip,mansilladev/zulip,wweiradio/zulip,peiwei/zulip,yocome/zulip,eeshangarg/zulip,susansls/zulip,punchagan/zulip,samatdav/zulip,jeffcao/zulip,KJin99/zulip,alliejones/zulip,esander91/zulip,calvinleenyc/zulip,jonesgithub/zulip,peiwei/zulip,bluesea/zulip,paxapy/zulip,bastianh/zulip,ApsOps/zulip,vabs22/zulip,schatt/zulip,samatdav/zulip,jrowan/zulip,jeffcao/zulip,Gabriel0402/zulip,PhilSk/zulip,TigorC/zulip,praveenaki/zulip,dwrpayne/zulip,amyliu345/zulip,amallia/zulip,johnnygaddarr/zulip,shaunstanislaus/zulip,ahmadassaf/zulip,ericzhou2008/zulip,esander91/zulip,jrowan/zulip,sup95/zulip,Qgap/zulip,Frouk/zulip,themass/zulip,blaze225/zulip,avastu/zulip,bowlofstew/zulip,aps-sids/zulip,vaidap/zulip,ipernet/zulip,xuanhan863/zulip,KJin99/zulip,stamhe/zulip,yocome/zulip,TigorC/zulip,aps-sids/zulip,firstblade/zulip,hackerkid/zulip,ryanbackman/zulip,hustlzp/zulip,SmartPeople/zulip,blaze225/zulip,mahim97/zulip,willingc/zulip,developerfm/zulip,yuvipanda/zulip,samatdav/zulip,MariaFaBella85/zulip,ryansnowboarder/zulip,christi3k/zulip,brainwane/zulip,yocome/zulip,Batterfii/zulip,tiansiyuan/zulip,xuanhan863/zulip,bitemyapp/zulip,adnanh/zulip,amanharitsh123/zulip,jessedhillon/zulip,hustlzp/zulip,xuxiao/zulip,schatt/zulip,brockwhittaker/zulip,wdaher/zulip,peguin40/zulip,so0k/zulip,qq1012803704/zulip,peiwei/zulip,yuvipanda/zulip,zacps/zulip,PaulPetring/zulip,krtkmj/zulip,Qgap/zulip,aliceriot/zulip
|
humbug/test_settings.py
|
humbug/test_settings.py
|
from settings import *
DATABASES["default"] = {"NAME": "zephyr/tests/zephyrdb.test",
"ENGINE": "django.db.backends.sqlite3",
"OPTIONS": { "timeout": 20, },}
TORNADO_SERVER = 'http://localhost:9983'
# Decrease the get_updates timeout to 1 second.
# This allows CasperJS to proceed quickly to the next test step.
POLL_TIMEOUT = 1000
|
from settings import *
DATABASES["default"] = {"NAME": "zephyr/tests/zephyrdb.test",
"ENGINE": "django.db.backends.sqlite3",
"OPTIONS": { "timeout": 20, },}
TORNADO_SERVER = 'http://localhost:9983'
|
apache-2.0
|
Python
|
4e539a2c35484fedbee2284e894b2e60635de83c
|
create initial models
|
charlon/mdot,uw-it-aca/mdot,charlon/mdot,uw-it-aca/mdot,uw-it-aca/mdot,charlon/mdot,uw-it-aca/mdot
|
mdot/models.py
|
mdot/models.py
|
from django.db import models
from django.contrib.auth.models import User
from django.forms import ModelForm
# Create your models here.
class Sponsor(models.Model):
name = models.CharField(max_length = 50)
netid = models.CharField(max_length = 8)
title = models.CharField(max_length = 50)
email = models.EmailField(max_length = 30)
department = models.CharField(max_length = 30)
unit = models.CharField(max_length = 30)
class Manager(models.Model):
name = models.CharField(max_length = 50)
netid = models.CharField(max_length = 8)
class App(models.Model):
# define plarforms
name = models.CharField(max_length = 50)
primary_language = models.CharField(max_length = 20) # are we using abbreviations?
request_date = models.DateTimeField(auto_now_add = True)
requester = models.ForeignKey(User)
app_manager = models.ForeignKey(Manager)
app_sponsor = models.ForeignKey(Sponsor)
app_sponser_agreed_date = models.DateTimeField(auto_now = True)
app_sponser_agreed = models.BooleanField(default = False)
class Agreement(models.Model):
app = models.ForeignKey(App)
agree_time = models.DateTimeField(auto_now_add = True)
class SponsorForm(ModelForm):
class Meta:
model = Sponsor
fields = '__all__'
labels = {
}
help_texts = {
}
error_messages = {
}
class ManagerForm(ModelForm):
class Meta:
model = Manager
fields = '__all__'
labels = {
}
help_texts = {
}
error_messages = {
}
class AppForm(ModelForm):
class Meta:
model = App
fields = ['name', 'primary_language']
labels = {
}
help_texts = {
}
error_messages = {
}
|
from django.db import models
# Create your models here.
|
apache-2.0
|
Python
|
c3cc948ceede66a70eadc300e558a42c8b06769b
|
Update change_names_miseq.py
|
lauringlab/variant_pipeline,lauringlab/variant_pipeline,lauringlab/variant_pipeline,lauringlab/variant_pipeline
|
scripts/change_names_miseq.py
|
scripts/change_names_miseq.py
|
#import sys
import os
import argparse
import shutil
parser = argparse.ArgumentParser(description='This program takes Miseq fastq files and renames them as sample.read_direction.#.fastq and keeps a log of the change')
parser.add_argument('-s',action='store',dest='s',help='The sorce directory containing the original fastq files')
parser.add_argument('-f',action='store',dest='f',help='The final directory that will hold the renamed fastq files')
parser.add_argument('-run',action='store_true',dest='test',default=False,help='Boolean switch to run program, without this the program runs in test mode: the log is made but no files are renamed')
parser.add_argument('-gz',action='store_true',dest='zip',default=False,help='Boolean switch. Activate when working with .gz files')
args=parser.parse_args()
s=args.s
f=args.f
test=args.test
zip=args.zip
# input argument is fastq directory
if not os.path.exists(f):
os.makedirs(f)
outfile = open(f+'renaming_log.txt','w')
#os.chdir(sys.argv[1])
if test==False:
print "running in test mode add option -r to run"
for filename in os.listdir(s):
# print filename
name=filename.split("_L")
good_name = name[0].split("_")[0]
lane_junk = name[1]
read_number=lane_junk.split("_R")
fastq_number=read_number[1][4]
read_number=read_number[1][0]
if zip==True:
perfect_name= good_name.replace("-","_")+"."+read_number+"."+fastq_number+".fastq.gz"
else:
perfect_name= good_name.replace("-","_")+"."+read_number+"."+fastq_number+".fastq"
# Write file to new name
print("COPYING "+ s+filename + " to "+f+perfect_name)
outfile.write(s+filename + "\t COPIED to \t" + f+perfect_name + "\n")
if test==True:
shutil.copy(s+filename,f+perfect_name)
outfile.close()
|
#import sys
import os
import argparse
import shutil
parser = argparse.ArgumentParser(description='This program takes Miseq fastq files and renames them as sample.read_direction.#.fastq and keeps a log of the change')
parser.add_argument('-s',action='store',dest='s',help='The sorce directory containing the original fastq files')
parser.add_argument('-f',action='store',dest='f',help='The final directory that will hold the renamed fastq files')
parser.add_argument('-run',action='store_true',dest='test',default=False,help='Boolean switch to run program, without this the program runs in test mode: the log is made but no files are renamed')
parser.add_argument('-gz',action='store_true',dest='zip',default=False,help='Boolean switch. Activate when working with .gz files')
args=parser.parse_args()
s=args.s
f=args.f
test=args.test
zip=args.zip
# input argument is fastq directory
if not os.path.exists(f):
os.makedirs(f)
outfile = open(f+'renaming_log.txt','w')
#os.chdir(sys.argv[1])
if test==False:
print "running in test mode add option -r to run"
for filename in os.listdir(s):
print filename
name=filename.split("_L")
good_name = name[0].split("_")[0]
lane_junk = name[1]
read_number=lane_junk.split("_R")
fastq_number=read_number[1][4]
read_number=read_number[1][0]
if zip==True:
perfect_name= good_name.replace("-","_")+"."+read_number+"."+fastq_number+".fastq.gz"
else:
perfect_name= good_name.replace("-","_")+"."+read_number+"."+fastq_number+".fastq"
# Write file to new name
print("COPYING "+ s+filename + " to "+f+perfect_name)
outfile.write(s+filename + "\t COPIED to \t" + f+perfect_name + "\n")
if test==True:
shutil.copy(s+filename,f+perfect_name)
outfile.close()
|
apache-2.0
|
Python
|
6d0bc825a1fd9184bf7b4007bfa82b69e5c7cb35
|
fix search to use sites
|
Anaconda-Platform/anaconda-client,Anaconda-Platform/anaconda-client,Anaconda-Platform/anaconda-client
|
binstar_client/commands/search.py
|
binstar_client/commands/search.py
|
'''
Search binstar for packages
'''
from binstar_client.utils import get_binstar
from binstar_client.utils.pprint import pprint_packages
import logging
log = logging.getLogger('binstar.search')
def search(args):
binstar = get_binstar(args)
log.info("Run 'binstar show <USER/PACKAGE>' to get more details:")
packages = binstar.search(args.name, package_type=args.package_type)
pprint_packages(packages)
log.info("Found %i packages" % len(packages))
def add_parser(subparsers):
parser1 = subparsers.add_parser('search',
help='Search binstar',
description=__doc__)
parser1.add_argument('name', nargs=1, help='Search string')
parser1.add_argument('-t', '--package-type', choices=['conda', 'pypi'],
help='only search for packages of this type')
parser1.set_defaults(main=search)
|
'''
Search binstar for packages
'''
from binstar_client.utils import get_binstar
from binstar_client.utils.pprint import pprint_packages
import logging
log = logging.getLogger('binstar.search')
def search(args):
binstar = get_binstar()
log.info("Run 'binstar show <USER/PACKAGE>' to get more details:")
packages = binstar.search(args.name, package_type=args.package_type)
pprint_packages(packages)
log.info("Found %i packages" % len(packages))
def add_parser(subparsers):
parser1 = subparsers.add_parser('search',
help='Search binstar',
description=__doc__)
parser1.add_argument('name', nargs=1, help='Search string')
parser1.add_argument('-t', '--package-type', choices=['conda', 'pypi'],
help='only search for packages of this type')
parser1.set_defaults(main=search)
|
bsd-3-clause
|
Python
|
83e071dd64807d1064fdd60ee0788f385b5f9334
|
Remove noise
|
xaque208/dotfiles,xaque208/dotfiles,xaque208/dotfiles
|
bin/symlinks.py
|
bin/symlinks.py
|
#! /usr/bin/env python
import os
import fnmatch
def link(source, dest):
try:
if not os.path.exists(dest):
print("linking " + source + " to " + dest)
os.symlink(source,dest)
except:
print("fail")
def dotlink(source):
dest = os.environ['HOME'] + "/." + os.path.basename(source.split('.symlink')[0])
if os.path.exists(dest):
current_target = os.path.realpath(dest)
if current_target != source:
link(source,dest)
else:
link(source,dest)
def find(pattern, path):
result = []
for root, dirs, files in os.walk(path):
for name in files:
if fnmatch.fnmatch(name, pattern):
result.append(os.path.join(root, name))
for name in dirs:
if fnmatch.fnmatch(name, pattern):
result.append(os.path.join(root, name))
return result
def setup():
dotfiles_root = os.environ['HOME'] + '/dotfiles'
print("Finding linkables...")
linkables = find('*.symlink', dotfiles_root)
print("Done.")
for x in linkables:
dotlink(x)
if __name__ == "__main__":
setup()
|
#! /usr/bin/env python
import os
import fnmatch
def link(source, dest):
try:
if not os.path.exists(dest):
print("linking " + source + " to " + dest)
os.symlink(source,dest)
except:
print("fail")
def dotlink(source):
dest = os.environ['HOME'] + "/." + os.path.basename(source.split('.symlink')[0])
if os.path.exists(dest):
current_target = os.path.realpath(dest)
if current_target != source:
link(source,dest)
else:
print("skipping " + source + " already links to " + dest)
else:
link(source,dest)
def find(pattern, path):
result = []
for root, dirs, files in os.walk(path):
for name in files:
if fnmatch.fnmatch(name, pattern):
result.append(os.path.join(root, name))
for name in dirs:
if fnmatch.fnmatch(name, pattern):
result.append(os.path.join(root, name))
return result
def setup():
dotfiles_root = os.environ['HOME'] + '/dotfiles'
print("Finding linkables...")
linkables = find('*.symlink', dotfiles_root)
print("Done.")
for x in linkables:
dotlink(x)
if __name__ == "__main__":
setup()
|
mit
|
Python
|
6f7d0ce060a29af86bd7cf98de6b6b23bb248fdd
|
Add missing Bus import in can/__init__.py
|
cantools/cantools
|
cantools/database/can/__init__.py
|
cantools/database/can/__init__.py
|
from .database import Database
from .message import Message
from .message import EncodeError
from .message import DecodeError
from .signal import Signal
from .node import Node
from .bus import Bus
|
from .database import Database
from .message import Message
from .message import EncodeError
from .message import DecodeError
from .signal import Signal
from .node import Node
|
mit
|
Python
|
903130b5802f34f619187635fb4b205184abd3d9
|
Add an example check
|
Netuitive/netuitive-client-python
|
example/example.py
|
example/example.py
|
import netuitive
import time
import os
ApiClient = netuitive.Client(url=os.environ.get('API_URL'), api_key=os.environ.get('CUSTOM_API_KEY'))
MyElement = netuitive.Element()
MyElement.add_attribute('Language', 'Python')
MyElement.add_attribute('app_version', '7.0')
MyElement.add_relation('my_child_element')
MyElement.add_tag('Production', 'True')
MyElement.add_tag('app_tier', 'True')
timestamp = int(time.mktime(time.localtime()))
MyElement.add_sample('app.error', timestamp, 1, host='appserver01')
MyElement.add_sample('app.request', timestamp, 10, host='appserver01')
ApiClient.post(MyElement)
MyElement.clear_samples()
MyEvent = netuitive.Event('appserver01', 'INFO', 'test event','this is a test message', 'INFO')
ApiClient.post_event(MyEvent)
MyCheck = netuitive.Check('heartbeat', 'element', 60)
ApiClient.post_check(MyCheck)
if ApiClient.time_insync():
print('we have time sync with the server')
|
import netuitive
import time
import os
ApiClient = netuitive.Client(url=os.environ.get('API_URL'), api_key=os.environ.get('CUSTOM_API_KEY'))
MyElement = netuitive.Element()
MyElement.add_attribute('Language', 'Python')
MyElement.add_attribute('app_version', '7.0')
MyElement.add_relation('my_child_element')
MyElement.add_tag('Production', 'True')
MyElement.add_tag('app_tier', 'True')
timestamp = int(time.mktime(time.localtime()))
MyElement.add_sample('app.error', timestamp, 1, host='appserver01')
MyElement.add_sample('app.request', timestamp, 10, host='appserver01')
ApiClient.post(MyElement)
MyElement.clear_samples()
MyEvent = netuitive.Event('appserver01', 'INFO', 'test event','this is a test message', 'INFO')
ApiClient.post_event(MyEvent)
if ApiClient.time_insync():
print('we have time sync with the server')
|
apache-2.0
|
Python
|
6909fc497041761eadb5a8b8947eeb21b7fdbcc8
|
use GetManager method in example
|
detrout/telepathy-python,epage/telepathy-python,freedesktop-unofficial-mirror/telepathy__telepathy-python,max-posedon/telepathy-python,max-posedon/telepathy-python,freedesktop-unofficial-mirror/telepathy__telepathy-python,epage/telepathy-python,PabloCastellano/telepathy-python,PabloCastellano/telepathy-python,detrout/telepathy-python
|
examples/avatar.py
|
examples/avatar.py
|
"""
Telepathy example which requests the avatar for the user's own handle and
displays it in a Gtk window.
"""
import dbus.glib
import gtk
import sys
from telepathy.constants import CONNECTION_STATUS_CONNECTED
from telepathy.interfaces import (
CONN_MGR_INTERFACE, CONN_INTERFACE, CONN_INTERFACE_AVATARS)
import telepathy.client
def parse_account(s):
lines = s.splitlines()
pairs = []
for line in lines:
k, v = line.split(':', 1)
k = k.strip()
v = v.strip()
pairs.append((k, v))
return dict(pairs)
def window_closed_cb(window):
gtk.main_quit()
def status_changed_cb(state, reason):
if state != CONNECTION_STATUS_CONNECTED:
return
handle = conn[CONN_INTERFACE].GetSelfHandle()
tokens = conn[CONN_INTERFACE_AVATARS].GetAvatarTokens([handle])
print 'token:', tokens[0]
image, mime = conn[CONN_INTERFACE_AVATARS].RequestAvatar(handle)
image = ''.join(chr(i) for i in image)
window = gtk.Window()
loader = gtk.gdk.PixbufLoader()
loader.write(image)
loader.close()
image = gtk.Image()
image.set_from_pixbuf(loader.get_pixbuf())
window.add(image)
window.show_all()
window.connect('destroy', gtk.main_quit)
if __name__ == '__main__':
if len(sys.argv) > 1:
account_file = sys.argv[1]
else:
account_file = 'account'
reg = telepathy.client.ManagerRegistry()
reg.LoadManagers()
account = parse_account(file(account_file).read())
manager = account['manager']
protocol = account['protocol']
del account['manager']
del account['protocol']
mgr = reg.GetManager(manager)
conn_bus_name, conn_object_path = mgr[CONN_MGR_INTERFACE].Connect(
protocol, account)
conn = telepathy.client.Connection(conn_bus_name, conn_object_path)
conn[CONN_INTERFACE].connect_to_signal('StatusChanged', status_changed_cb)
gtk.main()
conn[CONN_INTERFACE].Disconnect()
|
"""
Telepathy example which requests the avatar for the user's own handle and
displays it in a Gtk window.
"""
import dbus.glib
import gtk
import sys
from telepathy.constants import CONNECTION_STATUS_CONNECTED
from telepathy.interfaces import (
CONN_MGR_INTERFACE, CONN_INTERFACE, CONN_INTERFACE_AVATARS)
import telepathy.client
def parse_account(s):
lines = s.splitlines()
pairs = []
for line in lines:
k, v = line.split(':', 1)
k = k.strip()
v = v.strip()
pairs.append((k, v))
return dict(pairs)
def window_closed_cb(window):
gtk.main_quit()
def status_changed_cb(state, reason):
if state != CONNECTION_STATUS_CONNECTED:
return
handle = conn[CONN_INTERFACE].GetSelfHandle()
tokens = conn[CONN_INTERFACE_AVATARS].GetAvatarTokens([handle])
print 'token:', tokens[0]
image, mime = conn[CONN_INTERFACE_AVATARS].RequestAvatar(handle)
image = ''.join(chr(i) for i in image)
window = gtk.Window()
loader = gtk.gdk.PixbufLoader()
loader.write(image)
loader.close()
image = gtk.Image()
image.set_from_pixbuf(loader.get_pixbuf())
window.add(image)
window.show_all()
window.connect('destroy', gtk.main_quit)
if __name__ == '__main__':
if len(sys.argv) > 1:
account_file = sys.argv[1]
else:
account_file = 'account'
reg = telepathy.client.ManagerRegistry()
reg.LoadManagers()
account = parse_account(file(account_file).read())
manager = account['manager']
protocol = account['protocol']
del account['manager']
del account['protocol']
mgr_bus_name = reg.GetBusName(manager)
mgr_object_path = reg.GetObjectPath(manager)
mgr = telepathy.client.ConnectionManager(mgr_bus_name, mgr_object_path)
conn_bus_name, conn_object_path = mgr[CONN_MGR_INTERFACE].Connect(
protocol, account)
conn = telepathy.client.Connection(conn_bus_name, conn_object_path)
conn[CONN_INTERFACE].connect_to_signal('StatusChanged', status_changed_cb)
gtk.main()
conn[CONN_INTERFACE].Disconnect()
|
lgpl-2.1
|
Python
|
cda7e0d2242e5cc3dafca63a3af01f150fcd37be
|
Fix seeds for new names
|
HPI-SWA-Lab/BP2016H1,HPI-SWA-Lab/BP2016H1,HPI-SWA-Lab/BP2016H1,HPI-SWA-Lab/BP2016H1,HPI-SWA-Lab/BP2016H1
|
server/seed.py
|
server/seed.py
|
from tables import *
fira = Font(fontName='Fira Sans Regular', family_id=1, author_id=1)
fira.tags.append(Tag(text='#pretty', type='opinion'))
fira.tags.append(Tag(text='Latin', type='language'))
thread1 = Thread(title='I don\'t like this word')
thread1.glyphs.append(Glyph(glyphName='A', version_hash='9c7075ca420f30aedb27c48102466313fa4d12c8', font_id=1))
thread1.glyphs.append(Glyph(glyphName='a', version_hash='9c7075ca420f30aedb27c48102466313fa4d12c8', font_id=1))
thread1.glyphs.append(Glyph(glyphName='s', version_hash='9c7075ca420f30aedb27c48102466313fa4d12c8', font_id=1))
entities = [
User(userName='Eva', password='eveisevil'),
User(userName='Tom', password='safepwissafe'),
Family(familyName='Fira'),
fira,
thread1,
Codepoint(unicodeValue=0x0041, pointSize=12.5, features='liga', thread_id=1, font_id=1),
Codepoint(unicodeValue=0x0061, pointSize=12.5, features='liga', thread_id=1, font_id=1),
Codepoint(unicodeValue=0x0073, pointSize=12.5, features='liga', thread_id=1, font_id=1),
Comment(text='why would anyone comment on aas', author_id=2, thread_id=1),
Comment(text='because.', author_id=1, thread_id=1),
SampleText(title='Evil Wizards', text='Mad wizards brew evil jack with horses', author_id=2)
]
|
from tables import *
fira = Font(name='Fira Sans Regular', family_id=1, author_id=1)
fira.tags.append(Tag(text='#pretty', type='opinion'))
fira.tags.append(Tag(text='Latin', type='language'))
thread1 = Thread(title='I don\'t like this word')
thread1.glyphs.append(Glyph(name='A', version_hash='9c7075ca420f30aedb27c48102466313fa4d12c8', font_id=1))
thread1.glyphs.append(Glyph(name='a', version_hash='9c7075ca420f30aedb27c48102466313fa4d12c8', font_id=1))
thread1.glyphs.append(Glyph(name='s', version_hash='9c7075ca420f30aedb27c48102466313fa4d12c8', font_id=1))
entities = [
User(name='Eva', password='eveisevil'),
User(name='Tom', password='safepwissafe'),
Family(name='Fira'),
fira,
thread1,
Codepoint(value=0x0041, size=12.5, features='liga', thread_id=1, font_id=1),
Codepoint(value=0x0061, size=12.5, features='liga', thread_id=1, font_id=1),
Codepoint(value=0x0073, size=12.5, features='liga', thread_id=1, font_id=1),
Comment(text='why would anyone comment on aas', author_id=2, thread_id=1),
Comment(text='because.', author_id=1, thread_id=1),
SampleText(title='Evil Wizards', text='Mad wizards brew evil jack with horses', author_id=2)
]
|
mit
|
Python
|
67f535f92d79de05aa10e86da3cdd635bc71537b
|
Use proper stacklevel for deprecation warnings
|
scrapy/w3lib
|
w3lib/util.py
|
w3lib/util.py
|
from warnings import warn
def str_to_unicode(text, encoding=None, errors='strict'):
warn(
"The w3lib.utils.str_to_unicode function is deprecated and "
"will be removed in a future release.",
DeprecationWarning,
stacklevel=2,
)
if encoding is None:
encoding = 'utf-8'
if isinstance(text, bytes):
return text.decode(encoding, errors)
return text
def unicode_to_str(text, encoding=None, errors='strict'):
warn(
"The w3lib.utils.unicode_to_str function is deprecated and "
"will be removed in a future release.",
DeprecationWarning,
stacklevel=2,
)
if encoding is None:
encoding = 'utf-8'
if isinstance(text, str):
return text.encode(encoding, errors)
return text
def to_unicode(text, encoding=None, errors='strict'):
"""Return the unicode representation of a bytes object `text`. If `text`
is already an unicode object, return it as-is."""
if isinstance(text, str):
return text
if not isinstance(text, (bytes, str)):
raise TypeError('to_unicode must receive a bytes, str or unicode '
'object, got %s' % type(text).__name__)
if encoding is None:
encoding = 'utf-8'
return text.decode(encoding, errors)
def to_bytes(text, encoding=None, errors='strict'):
"""Return the binary representation of `text`. If `text`
is already a bytes object, return it as-is."""
if isinstance(text, bytes):
return text
if not isinstance(text, str):
raise TypeError('to_bytes must receive a unicode, str or bytes '
'object, got %s' % type(text).__name__)
if encoding is None:
encoding = 'utf-8'
return text.encode(encoding, errors)
def to_native_str(text, encoding=None, errors='strict'):
""" Return str representation of `text` """
warn(
"The w3lib.utils.to_native_str function is deprecated and "
"will be removed in a future release. Please use "
"w3lib.utils.to_unicode instead.",
DeprecationWarning,
stacklevel=2,
)
return to_unicode(text, encoding, errors)
|
from warnings import warn
def str_to_unicode(text, encoding=None, errors='strict'):
warn(
"The w3lib.utils.str_to_unicode function is deprecated and "
"will be removed in a future release.",
DeprecationWarning
)
if encoding is None:
encoding = 'utf-8'
if isinstance(text, bytes):
return text.decode(encoding, errors)
return text
def unicode_to_str(text, encoding=None, errors='strict'):
warn(
"The w3lib.utils.unicode_to_str function is deprecated and "
"will be removed in a future release.",
DeprecationWarning
)
if encoding is None:
encoding = 'utf-8'
if isinstance(text, str):
return text.encode(encoding, errors)
return text
def to_unicode(text, encoding=None, errors='strict'):
"""Return the unicode representation of a bytes object `text`. If `text`
is already an unicode object, return it as-is."""
if isinstance(text, str):
return text
if not isinstance(text, (bytes, str)):
raise TypeError('to_unicode must receive a bytes, str or unicode '
'object, got %s' % type(text).__name__)
if encoding is None:
encoding = 'utf-8'
return text.decode(encoding, errors)
def to_bytes(text, encoding=None, errors='strict'):
"""Return the binary representation of `text`. If `text`
is already a bytes object, return it as-is."""
if isinstance(text, bytes):
return text
if not isinstance(text, str):
raise TypeError('to_bytes must receive a unicode, str or bytes '
'object, got %s' % type(text).__name__)
if encoding is None:
encoding = 'utf-8'
return text.encode(encoding, errors)
def to_native_str(text, encoding=None, errors='strict'):
""" Return str representation of `text` """
warn(
"The w3lib.utils.to_native_str function is deprecated and "
"will be removed in a future release. Please use "
"w3lib.utils.to_unicode instead.",
DeprecationWarning
)
return to_unicode(text, encoding, errors)
|
bsd-3-clause
|
Python
|
24f7d137c7a0f58625543858b8f4a09f1dead859
|
Update client.py
|
tamasgal/controlhost
|
examples/client.py
|
examples/client.py
|
from controlhost import Client
with Client('127.0.0.1') as client:
client.subscribe('foo')
try:
while True:
prefix, message = client.get_message()
print prefix.tag
print prefix.length
print message
except KeyboardInterrupt:
client._disconnect()
|
from controlhost import Client
with Client('131.188.161.241') as client:
client.subscribe('foo')
try:
while True:
prefix, message = client.get_message()
print prefix.tag
print prefix.length
print message
except KeyboardInterrupt:
client._disconnect()
|
mit
|
Python
|
a475173ce00b2d6686c601ffc46a8d2bc3ed0a7f
|
Switch back to development version
|
goldmann/dogen,goldmann/dogen,jboss-container-images/concreate,jboss-container-images/concreate,jboss-dockerfiles/dogen,jboss-container-images/concreate,jboss-dockerfiles/dogen,goldmann/dogen,jboss-dockerfiles/dogen
|
dogen/version.py
|
dogen/version.py
|
version = "2.1.0rc1.dev"
|
version = "2.0.0"
|
mit
|
Python
|
c7439eb0d8a88a3a3584a3e73ed9badc910dcd05
|
Move newrelic initialization to the very start of wsgi initialization
|
DXCanas/content-curation,DXCanas/content-curation,DXCanas/content-curation,DXCanas/content-curation
|
contentcuration/contentcuration/wsgi.py
|
contentcuration/contentcuration/wsgi.py
|
"""
WSGI config for contentcuration project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import logging
import os
# Attach newrelic APM
try:
import newrelic.agent
newrelic.agent.initialize()
except ImportError:
pass
try:
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "contentcuration.settings")
application = get_wsgi_application()
except ImportError:
logging.warn("Django's WSGI wasn't successfully imported!")
|
"""
WSGI config for contentcuration project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "contentcuration.settings")
# Attach newrelic APM
try:
import newrelic.agent
newrelic.agent.initialize()
except ImportError:
pass
application = get_wsgi_application()
|
mit
|
Python
|
55d54f67111583dab4209639ef8e3d6430ea7939
|
Handle oversteer in turns.
|
jeradesign/QuickBot_Follow,jeradesign/QuickBot_Follow
|
src/Command_Interpreter.py
|
src/Command_Interpreter.py
|
# Motor driver for QuickBot_Follow.
# John Brewer 3/31/16
# Copyright (C) 2016 Jera Design LLC
# All Rights Reserverd
import Motor_Driver
import sys
from time import sleep
Motor_Driver.init_pins()
print "Ready"
last = ""
count = 0
while True:
line = sys.stdin.readline().rstrip()
if not line:
break;
if line == "left":
print "turn left"
if last != "left":
count = 10
last = "left"
continue
if count > 0:
count = count - 1
continue
Motor_Driver.move(-90, 88, 0.01)
if line == "right":
print "turn right"
if last != "right":
count = 10
last = "right"
continue
if count > 0:
count = count - 1
continue
Motor_Driver.move(90, -88, 0.01)
if line == "straight":
print "go straight"
last = "straight"
Motor_Driver.move(70, 68, 0.05)
else:
print "STOP"
# last = "STOP"
|
# Motor driver for QuickBot_Follow.
# John Brewer 3/31/16
# Copyright (C) 2016 Jera Design LLC
# All Rights Reserverd
import Motor_Driver
import sys
Motor_Driver.init_pins()
print "Ready"
while True:
line = sys.stdin.readline().rstrip()
if not line:
break;
if line == "left":
print "turn left"
Motor_Driver.move(-70, 70, 0.05)
elif line == "right":
print "turn right"
Motor_Driver.move(70, -70, 0.05)
elif line == "straight":
print "go straight"
Motor_Driver.move(100, 98, 0.095)
else:
print "STOP"
|
bsd-3-clause
|
Python
|
6486a888cbcec7285df92020f76e3f1c5fbba0e2
|
Load exchange rates in test setup. Make it posible to use --keepdb
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
bluebottle/test/test_runner.py
|
bluebottle/test/test_runner.py
|
from django.test.runner import DiscoverRunner
from django.db import connection
from django.core import management
from tenant_schemas.utils import get_tenant_model
from bluebottle.test.utils import InitProjectDataMixin
class MultiTenantRunner(DiscoverRunner, InitProjectDataMixin):
def setup_databases(self, *args, **kwargs):
result = super(MultiTenantRunner, self).setup_databases(*args, **kwargs)
# Create secondary tenant
connection.set_schema_to_public()
tenant_domain = 'testserver2'
tenant2, _created = get_tenant_model().objects.get_or_create(
domain_url=tenant_domain,
schema_name='test2',
client_name='test2')
tenant2.save(
verbosity=self.verbosity)
# Add basic data for tenant
connection.set_tenant(tenant2)
self.init_projects()
# Create main tenant
connection.set_schema_to_public()
management.call_command('loaddata', 'exchange_rates.json', verbosity=1)
tenant_domain = 'testserver'
tenant, _created = get_tenant_model().objects.get_or_create(
domain_url=tenant_domain,
schema_name='test',
client_name='test')
tenant.save(
verbosity=self.verbosity)
connection.set_tenant(tenant)
return result
|
from django.test.runner import DiscoverRunner
from django.db import connection
from tenant_schemas.utils import get_tenant_model
from bluebottle.test.utils import InitProjectDataMixin
class MultiTenantRunner(DiscoverRunner, InitProjectDataMixin):
def setup_databases(self, *args, **kwargs):
result = super(MultiTenantRunner, self).setup_databases(*args, **kwargs)
# Create secondary tenant
connection.set_schema_to_public()
tenant_domain = 'testserver2'
tenant2 = get_tenant_model()(
domain_url=tenant_domain,
schema_name='test2',
client_name='test2')
tenant2.save(
verbosity=self.verbosity)
# Add basic data for tenant
connection.set_tenant(tenant2)
self.init_projects()
# Create main tenant
connection.set_schema_to_public()
tenant_domain = 'testserver'
tenant = get_tenant_model()(
domain_url=tenant_domain,
schema_name='test',
client_name='test')
tenant.save(
verbosity=self.verbosity)
connection.set_tenant(tenant)
return result
|
bsd-3-clause
|
Python
|
d9a9cb9004ddc20d92441df50d3a0f73432803bb
|
Remove import only used for debugging
|
ska-sa/katdal
|
scripts/mvf_read_benchmark.py
|
scripts/mvf_read_benchmark.py
|
#!/usr/bin/env python
from __future__ import print_function, division, absolute_import
from builtins import range
import argparse
import logging
import time
import katdal
from katdal.lazy_indexer import DaskLazyIndexer
import numpy as np
parser = argparse.ArgumentParser()
parser.add_argument('filename')
parser.add_argument('--time', type=int, default=10, help='Number of times to read per batch')
parser.add_argument('--channels', type=int, help='Number of channels to read')
parser.add_argument('--dumps', type=int, help='Number of times to read')
parser.add_argument('--joint', action='store_true', help='Load vis, weights, flags together')
parser.add_argument('--applycal', help='Calibration solutions to apply')
args = parser.parse_args()
logging.basicConfig(level='INFO', format='%(asctime)s [%(levelname)s] %(message)s')
logging.info('Starting')
kwargs = {}
if args.applycal is not None:
kwargs['applycal'] = args.applycal
f = katdal.open(args.filename, **kwargs)
logging.info('File loaded, shape %s', f.shape)
if args.channels:
f.select(channels=np.s_[:args.channels])
if args.dumps:
f.select(dumps=np.s_[:args.dumps])
start = time.time()
for st in range(0, f.shape[0], args.time):
et = st + args.time
if args.joint:
vis, weights, flags = DaskLazyIndexer.get([f.vis, f.weights, f.flags], np.s_[st:et])
else:
vis = f.vis[st:et]
weights = f.weights[st:et]
flags = f.flags[st:et]
logging.info('Loaded %d dumps', vis.shape[0])
size = np.product(f.shape) * 10
elapsed = time.time() - start
logging.info('Loaded %d bytes in %.3f s (%.3f MB/s)', size, elapsed, size / elapsed / 1e6)
|
#!/usr/bin/env python
from __future__ import print_function, division, absolute_import
from builtins import range
import argparse
import logging
import time
import dask
import katdal
from katdal.lazy_indexer import DaskLazyIndexer
import numpy as np
parser = argparse.ArgumentParser()
parser.add_argument('filename')
parser.add_argument('--time', type=int, default=10, help='Number of times to read per batch')
parser.add_argument('--channels', type=int, help='Number of channels to read')
parser.add_argument('--dumps', type=int, help='Number of times to read')
parser.add_argument('--joint', action='store_true', help='Load vis, weights, flags together')
parser.add_argument('--applycal', help='Calibration solutions to apply')
args = parser.parse_args()
logging.basicConfig(level='INFO', format='%(asctime)s [%(levelname)s] %(message)s')
logging.info('Starting')
kwargs = {}
if args.applycal is not None:
kwargs['applycal'] = args.applycal
f = katdal.open(args.filename, **kwargs)
logging.info('File loaded, shape %s', f.shape)
if args.channels:
f.select(channels=np.s_[:args.channels])
if args.dumps:
f.select(dumps=np.s_[:args.dumps])
start = time.time()
for st in range(0, f.shape[0], args.time):
et = st + args.time
if args.joint:
vis, weights, flags = DaskLazyIndexer.get([f.vis, f.weights, f.flags], np.s_[st:et])
else:
vis = f.vis[st:et]
weights = f.weights[st:et]
flags = f.flags[st:et]
logging.info('Loaded %d dumps', vis.shape[0])
size = np.product(f.shape) * 10
elapsed = time.time() - start
logging.info('Loaded %d bytes in %.3f s (%.3f MB/s)', size, elapsed, size / elapsed / 1e6)
|
bsd-3-clause
|
Python
|
873c5e8bf85a8be5a08852134967d29353ed3009
|
Swap ndcms for generic T3 string.
|
matz-e/lobster,matz-e/lobster,matz-e/lobster
|
examples/simple.py
|
examples/simple.py
|
from lobster import cmssw
from lobster.core import *
storage = StorageConfiguration(
output=[
"hdfs:///store/user/matze/test_shuffle_take29",
"file:///hadoop/store/user/matze/test_shuffle_take29",
"root://T3_US_NotreDame/store/user/matze/test_shuffle_take29",
"srm://T3_US_NotreDame/store/user/matze/test_shuffle_take29",
]
)
processing = Category(
name='processing',
cores=1,
runtime=900,
memory=1000
)
workflows = []
single_mu = Workflow(
label='single_mu',
dataset=cmssw.Dataset(
dataset='/SingleMu/Run2012A-recover-06Aug2012-v1/AOD',
events_per_task=5000
),
category=processing,
pset='slim.py',
publish_label='test',
merge_size='3.5G',
outputs=['output.root']
)
workflows.append(single_mu)
config = Config(
label='shuffle',
workdir='/tmpscratch/users/matze/test_shuffle_take30',
plotdir='/afs/crc.nd.edu/user/m/mwolf3/www/lobster/test_shuffle_take29',
storage=storage,
workflows=workflows,
advanced=AdvancedOptions(log_level=1)
)
|
from lobster import cmssw
from lobster.core import *
storage = StorageConfiguration(
output=[
"hdfs:///store/user/matze/test_shuffle_take29",
"file:///hadoop/store/user/matze/test_shuffle_take29",
"root://ndcms.crc.nd.edu//store/user/matze/test_shuffle_take29",
"srm://T3_US_NotreDame/store/user/matze/test_shuffle_take29",
]
)
processing = Category(
name='processing',
cores=1,
runtime=900,
memory=1000
)
workflows = []
single_mu = Workflow(
label='single_mu',
dataset=cmssw.Dataset(
dataset='/SingleMu/Run2012A-recover-06Aug2012-v1/AOD',
events_per_task=5000
),
category=processing,
pset='slim.py',
publish_label='test',
merge_size='3.5G',
outputs=['output.root']
)
workflows.append(single_mu)
config = Config(
label='shuffle',
workdir='/tmpscratch/users/matze/test_shuffle_take30',
plotdir='/afs/crc.nd.edu/user/m/mwolf3/www/lobster/test_shuffle_take29',
storage=storage,
workflows=workflows,
advanced=AdvancedOptions(log_level=1)
)
|
mit
|
Python
|
6beff62ef9741cfe5ed0443250f5a93d04d74bca
|
Create UserCandidate model
|
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
|
packages/grid/backend/grid/api/users/models.py
|
packages/grid/backend/grid/api/users/models.py
|
# stdlib
from typing import Optional
from typing import Union
# third party
from nacl.encoding import HexEncoder
from nacl.signing import SigningKey
from pydantic import BaseModel
from pydantic import EmailStr
class BaseUser(BaseModel):
email: Optional[EmailStr]
name: Optional[str]
role: Union[Optional[int], Optional[str]] # TODO: Should be int in SyftUser
daa_pdf: Optional[bytes] = b""
class Config:
orm_mode = True
class UserCreate(BaseUser):
email: EmailStr
role: str = "Data Scientist"
name: str
password: str
class UserUpdate(BaseUser):
password: Optional[str]
budget: Optional[float]
class UserCandidate(BaseUser):
email: EmailStr
status: str = "pending"
name: str
class User(BaseUser):
id: int
role: Union[int, str] # TODO: This should be int. Perhaps add role_name instead?
budget_spent: Optional[float]
class UserPrivate(User):
private_key: str
def get_signing_key(self) -> SigningKey:
return SigningKey(self.private_key.encode(), encoder=HexEncoder)
class UserSyft(User):
hashed_password: str
salt: str
verify_key: str
|
# stdlib
from typing import Optional
from typing import Union
# third party
from nacl.encoding import HexEncoder
from nacl.signing import SigningKey
from pydantic import BaseModel
from pydantic import EmailStr
class BaseUser(BaseModel):
email: Optional[EmailStr]
name: Optional[str]
role: Union[Optional[int], Optional[str]] # TODO: Should be int in SyftUser
daa_pdf: Optional[bytes] = b""
class Config:
orm_mode = True
class UserCreate(BaseUser):
email: EmailStr
role: str = "Data Scientist"
name: str
password: str
class UserUpdate(BaseUser):
password: Optional[str]
budget: Optional[float]
class User(BaseUser):
id: int
role: Union[int, str] # TODO: This should be int. Perhaps add role_name instead?
budget_spent: Optional[float]
class UserPrivate(User):
private_key: str
def get_signing_key(self) -> SigningKey:
return SigningKey(self.private_key.encode(), encoder=HexEncoder)
class UserSyft(User):
hashed_password: str
salt: str
verify_key: str
|
apache-2.0
|
Python
|
7db2f2f9124fd82bbcaf8eabea9ff57306796f58
|
Fix relative path to .gitignore and other minor changes.
|
sippet/webrtc,sippet/webrtc,sippet/webrtc,sippet/webrtc,sippet/webrtc,sippet/webrtc
|
build/extra_gitignore.py
|
build/extra_gitignore.py
|
#!/usr/bin/env python
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
""" Adds extra patterns to the root .gitignore file.
Reads the contents of the filename given as the first argument and appends
them to the root .gitignore file. The new entires are intended to be additional
ignoring patterns, or negating patterns to override existing entries (man
gitignore for more details).
"""
import os
import sys
MODIFY_STRING = '# The following added by %s\n'
def main(argv):
if not argv[1]:
# Special case; do nothing.
return 0
modify_string = MODIFY_STRING % argv[0]
gitignore_file = os.path.dirname(argv[0]) + '/../../.gitignore'
lines = open(gitignore_file, 'r').readlines()
for i, line in enumerate(lines):
# Look for modify_string in the file to ensure we don't append the extra
# patterns more than once.
if line == modify_string:
lines = lines[:i]
break
lines.append(modify_string)
f = open(gitignore_file, 'w')
f.write(''.join(lines))
f.write(open(argv[1], 'r').read())
f.close()
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
#!/usr/bin/env python
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
""" Adds extra patterns to the root .gitignore file.
Reads the contents of the filename given as the first argument and appends
them to the root .gitignore file. The new entires are intended to be additional
ignoring patterns, or negating patterns to override existing entries (man
gitignore for more details).
"""
import os
import sys
MODIFY_STRING = '# The following added by %s\n'
def main(argv):
if not argv[1]:
# Special case; do nothing.
return 0
modify_string = (MODIFY_STRING % argv[0])
gitignore_file = os.path.dirname(argv[0]) + '/../.gitignore'
lines = open(gitignore_file, 'r').readlines()
for i, line in enumerate(lines):
if line == modify_string:
lines = lines[:i]
break
lines.append(modify_string)
f = open(gitignore_file, 'w')
f.write(''.join(lines))
f.write(open(argv[1], 'r').read())
f.close()
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
bsd-3-clause
|
Python
|
86429b75bea758627eeef930b604e819089435a7
|
fix missing toUpper for location message
|
biji/yowsup,ongair/yowsup
|
yowsup/layers/protocol_media/layer.py
|
yowsup/layers/protocol_media/layer.py
|
from yowsup.layers import YowLayer, YowLayerEvent, YowProtocolLayer
from .protocolentities import ImageDownloadableMediaMessageProtocolEntity
from .protocolentities import LocationMediaMessageProtocolEntity
from .protocolentities import VCardMediaMessageProtocolEntity
class YowMediaProtocolLayer(YowProtocolLayer):
def __init__(self):
handleMap = {
"message": (self.recvMessageStanza, self.sendMessageEntity)
}
super(YowMediaProtocolLayer, self).__init__(handleMap)
def __str__(self):
return "Media Layer"
def sendMessageEntity(self, entity):
if entity.getType() == "media":
self.entityToLower(entity)
###recieved node handlers handlers
def recvMessageStanza(self, node):
if node.getAttributeValue("type") == "media":
mediaNode = node.getChild("media")
if mediaNode.getAttributeValue("type") == "image":
entity = ImageDownloadableMediaMessageProtocolEntity.fromProtocolTreeNode(node)
self.toUpper(entity)
elif mediaNode.getAttributeValue("type") == "location":
entity = LocationMediaMessageProtocolEntity.fromProtocolTreeNode(node)
self.toUpper(entity)
elif mediaNode.getAttributeValue("type") == "vcard":
entity = VCardMediaMessageProtocolEntity.fromProtocolTreeNode(node)
self.toUpper(entity)
|
from yowsup.layers import YowLayer, YowLayerEvent, YowProtocolLayer
from .protocolentities import ImageDownloadableMediaMessageProtocolEntity
from .protocolentities import LocationMediaMessageProtocolEntity
from .protocolentities import VCardMediaMessageProtocolEntity
class YowMediaProtocolLayer(YowProtocolLayer):
def __init__(self):
handleMap = {
"message": (self.recvMessageStanza, self.sendMessageEntity)
}
super(YowMediaProtocolLayer, self).__init__(handleMap)
def __str__(self):
return "Media Layer"
def sendMessageEntity(self, entity):
if entity.getType() == "media":
self.entityToLower(entity)
###recieved node handlers handlers
def recvMessageStanza(self, node):
if node.getAttributeValue("type") == "media":
mediaNode = node.getChild("media")
if mediaNode.getAttributeValue("type") == "image":
entity = ImageDownloadableMediaMessageProtocolEntity.fromProtocolTreeNode(node)
self.toUpper(entity)
elif mediaNode.getAttributeValue("type") == "location":
entity = LocationMediaMessageProtocolEntity.fromProtocolTreeNode(node)
elif mediaNode.getAttributeValue("type") == "vcard":
entity = VCardMediaMessageProtocolEntity.fromProtocolTreeNode(node)
self.toUpper(entity)
|
mit
|
Python
|
2f50e7e71b124ae42cab5edb19c030fcc69a4ef5
|
Fix failing attribute lookups
|
maferelo/saleor,HyperManTT/ECommerceSaleor,car3oon/saleor,itbabu/saleor,itbabu/saleor,jreigel/saleor,HyperManTT/ECommerceSaleor,car3oon/saleor,UITools/saleor,mociepka/saleor,UITools/saleor,UITools/saleor,KenMutemi/saleor,jreigel/saleor,KenMutemi/saleor,mociepka/saleor,HyperManTT/ECommerceSaleor,tfroehlich82/saleor,KenMutemi/saleor,maferelo/saleor,itbabu/saleor,tfroehlich82/saleor,tfroehlich82/saleor,mociepka/saleor,UITools/saleor,maferelo/saleor,UITools/saleor,car3oon/saleor,jreigel/saleor
|
saleor/product/models/utils.py
|
saleor/product/models/utils.py
|
from django.utils.encoding import smart_text
def get_attributes_display_map(variant, attributes):
display = {}
for attribute in attributes:
value = variant.get_attribute(attribute.pk)
if value:
choices = {smart_text(a.pk): a for a in attribute.values.all()}
attr = choices.get(value)
if attr:
display[attribute.pk] = attr
else:
display[attribute.pk] = value
return display
|
from django.utils.encoding import smart_text
def get_attributes_display_map(variant, attributes):
print "in get_attributes_display_map with " + str(variant) + " and " + str(attributes)
display = {}
for attribute in attributes:
value = variant.get_attribute(attribute.pk)
if value:
choices = {smart_text(a.pk): a for a in attribute.values.all()}
attr = choices.get(value)
if attr:
display[attribute.pk] = attr
else:
display[attribute.pk] = value
return display
|
bsd-3-clause
|
Python
|
6a8fba9bc6bb1108b048947b7ffc10c0904fba14
|
Move plugin loading to separate function
|
cmende/pytelefoob0t
|
foob0t.py
|
foob0t.py
|
# Copyright 2017 Christoph Mende
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import time
import telepot
from telepot.loop import MessageLoop
import plugin_loader
commands = {}
users = {}
def load_plugins():
for i in plugin_loader.get_plugins():
print('Loading plugin ' + i['name'])
plugin = plugin_loader.load_plugin(i)
commands.update(plugin.commands)
def handle(msg):
content_type, chat_type, chat_id = telepot.glance(msg)
print(content_type, chat_type, chat_id)
# reject non-text messages
if content_type != 'text':
return
# split message in command (first word) and args (rest)
argv = msg['text'].strip().split(' ', 1)
command = argv[0].lower()
args = None
if len(argv) == 2:
args = argv[1]
# reject non-commands
if not command.startswith('/'):
return
# strip / from command
command = command[1:]
# strip username from command
if command.endswith('@'+username):
command = command[:-len(username)-1]
# search for plugin handling command
for c in commands:
if c != command:
continue
# found it => look up user
uid = msg['from']['id']
user = users.setdefault(uid, msg['from'])
retval = commands[c](user, args)
bot.sendMessage(chat_id, retval)
if len(sys.argv) < 2:
sys.exit('Usage: %s <telegram api token>' % sys.argv[0])
bot = telepot.Bot(sys.argv[1])
load_plugins()
username = bot.getMe()['username']
MessageLoop(bot, handle).run_as_thread()
while 1:
time.sleep(10)
|
# Copyright 2017 Christoph Mende
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import time
import telepot
from telepot.loop import MessageLoop
import plugin_loader
commands = {}
users = {}
username = None
for i in plugin_loader.get_plugins():
print('Loading plugin ' + i['name'])
plugin = plugin_loader.load_plugin(i)
commands.update(plugin.commands)
def handle(msg):
content_type, chat_type, chat_id = telepot.glance(msg)
print(content_type, chat_type, chat_id)
# reject non-text messages
if content_type != 'text':
return
# split message in command (first word) and args (rest)
argv = msg['text'].strip().split(' ', 1)
command = argv[0].lower()
args = None
if len(argv) == 2:
args = argv[1]
# reject non-commands
if not command.startswith('/'):
return
# strip / from command
command = command[1:]
# strip username from command
if command.endswith('@'+username):
command = command[:-len(username)-1]
# search for plugin handling command
for c in commands:
if c != command:
continue
# found it => look up user
uid = msg['from']['id']
user = users.setdefault(uid, msg['from'])
retval = commands[c](user, args)
bot.sendMessage(chat_id, retval)
if len(sys.argv) < 2:
sys.exit('Usage: %s <telegram api token>' % sys.argv[0])
bot = telepot.Bot(sys.argv[1])
username = bot.getMe()['username']
MessageLoop(bot, handle).run_as_thread()
while 1:
time.sleep(10)
|
apache-2.0
|
Python
|
6903779f0d34145af1f13fef7f4e07b605aec3d0
|
Update __init__.py
|
CactusDev/CactusBot
|
cactusbot/commands/__init__.py
|
cactusbot/commands/__init__.py
|
"""Handle commands."""
from .command import Command
from .magic import COMMANDS
__all__ = ["Command", "COMMANDS"]
|
"""Handle commands."""
from .command import Command
from .magic import COMMANDS
__all__ = ["Command", "COMMANDS]
|
mit
|
Python
|
5e8b82130a0bd0d63629e725fc06380105955274
|
Update data migration
|
baylee-d/osf.io,Johnetordoff/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,adlius/osf.io,Johnetordoff/osf.io,mattclark/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,Johnetordoff/osf.io,felliott/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,adlius/osf.io,baylee-d/osf.io,felliott/osf.io,cslzchen/osf.io,adlius/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,adlius/osf.io,mattclark/osf.io,felliott/osf.io,mfraezz/osf.io,pattisdr/osf.io,pattisdr/osf.io,saradbowman/osf.io,mfraezz/osf.io,aaxelb/osf.io,mattclark/osf.io,pattisdr/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,mfraezz/osf.io,cslzchen/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io
|
osf/migrations/0084_preprint_node_divorce.py
|
osf/migrations/0084_preprint_node_divorce.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.9 on 2018-03-12 18:25
from __future__ import unicode_literals
from django.db import migrations
from django.db import transaction
def divorce_preprints_from_nodes(apps, schema_editor):
Preprint = apps.get_model('osf', 'PreprintService')
PreprintContributor = apps.get_model('osf', 'PreprintContributor')
# tried to use F() function here but F() doesn't support table joins
# instead, using the following to make this transaction atomic
with transaction.atomic():
for preprint in Preprint.objects.filter(node__isnull=False):
preprint.title = preprint.node.title
preprint.description = preprint.node.description
preprint.creator = preprint.node.creator
preprint.save()
for preprint in Preprint.objects.all():
if preprint.node:
# use bulk create
for contrib in preprint.node.contributor_set.all():
# make a PreprintContributor that points to the pp instead of the node
# because there's a throughtable, relations are designated
# solely on the through model, and adds on the related models
# are not required.
new_contrib = PreprintContributor.objects.create(
preprint=preprint,
user=contrib.user,
read=contrib.read,
write=contrib.write,
admin=contrib.admin,
visible=contrib.visible
)
new_contrib.save()
class Migration(migrations.Migration):
dependencies = [
('osf', '0083_update_preprint_model_for_divorce'),
]
operations = [
migrations.RunPython(divorce_preprints_from_nodes)
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.9 on 2018-03-12 18:25
from __future__ import unicode_literals
from django.db import migrations
from django.db import transaction
def divorce_preprints_from_nodes(apps, schema_editor):
Preprint = apps.get_model('osf', 'PreprintService')
PreprintContributor = apps.get_model('osf', 'PreprintContributor')
# tried to use F() function here but F() doesn't support table joins
# instead, using the following to make this transaction atomic
with transaction.atomic():
for preprint in Preprint.objects.filter(node__isnull=False):
preprint.title = preprint.node.title
preprint.description = preprint.node.description
preprint.creator = preprint.node.creator
preprint.save()
for preprint in Preprint.objects.all():
if preprint.node:
# preprint.title = preprint.node.title
# preprint.description = preprint.node.description
# preprint.creator = preprint.node.creator
# use bulk create
for contrib in preprint.node._contributors:
# make a PreprintContributor that points to the pp instead of the node
new_contrib = PreprintContributor.objects.create()
new_contrib.primary_identifier_name = contrib.primary_identifier_name
new_contrib.read = contrib.read
new_contrib.write = contrib.write
new_contrib.admin = contrib.admin
new_contrib.visible = contrib.visible
new_contrib.user = contrib.user
new_contrib.preprint = preprint
new_contrib.save()
preprint._contributors.add(new_contrib)
# will existing nodes attached to preprints still by accessible? A: yes!
preprint.save()
class Migration(migrations.Migration):
dependencies = [
('osf', '0083_update_preprint_model_for_divorce'),
]
operations = [
migrations.RunPython(divorce_preprints_from_nodes)
]
|
apache-2.0
|
Python
|
dd2f7da18fb295d58ac763ee7e91b9b1a5bdf1d0
|
Update __about__.py
|
reaperhulk/bcrypt,reaperhulk/bcrypt,alex/bcrypt,growingdever/bcrypt,pyca/bcrypt,pyca/bcrypt,pyca/bcrypt,growingdever/bcrypt,reaperhulk/bcrypt,alex/bcrypt,reaperhulk/bcrypt,alex/bcrypt,growingdever/bcrypt,pyca/bcrypt
|
bcrypt/__about__.py
|
bcrypt/__about__.py
|
# Author:: Donald Stufft (<donald@stufft.io>)
# Copyright:: Copyright (c) 2013 Donald Stufft
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
__title__ = "bcrypt"
__summary__ = "Modern password hashing for your software and your servers"
__uri__ = "https://github.com/pyca/bcrypt/"
__version__ = "1.0.2"
__author__ = "Donald Stufft"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
|
# Author:: Donald Stufft (<donald@stufft.io>)
# Copyright:: Copyright (c) 2013 Donald Stufft
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
__title__ = "bcrypt"
__summary__ = "Modern password hashing for your software and your servers"
__uri__ = "https://github.com/dstufft/bcrypt/"
__version__ = "1.0.2"
__author__ = "Donald Stufft"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
|
apache-2.0
|
Python
|
d7c35749c682cb86356cdf825f3886e22b07942a
|
Add --refresh command line argument to Django admin command build_genome_blastdb
|
ginkgobioworks/edge,ginkgobioworks/edge,ginkgobioworks/edge,ginkgobioworks/edge
|
src/edge/management/commands/build_genome_blastdb.py
|
src/edge/management/commands/build_genome_blastdb.py
|
from edge.blastdb import build_all_genome_dbs
from django.core.management.base import BaseCommand
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'--refresh',
action='store_true',
help='Rebuild BLAST database files',
)
def handle(self, *args, **options):
if options['refresh']:
build_all_genome_dbs(refresh=True)
else:
build_all_genome_dbs(refresh=False)
|
from edge.blastdb import build_all_genome_dbs
from django.core.management.base import BaseCommand
class Command(BaseCommand):
def handle(self, *args, **options):
build_all_genome_dbs()
|
mit
|
Python
|
f7f576adfccdfbc386c991bb35f2a52e9db19b5e
|
remove hack
|
albertz/music-player,kingsj/music-player,albertz/music-player,albertz/music-player,albertz/music-player,kingsj/music-player,albertz/music-player,kingsj/music-player,albertz/music-player,kingsj/music-player,kingsj/music-player
|
tracker.py
|
tracker.py
|
from utils import *
from pprint import pprint
import sys
from State import state
from player import PlayerEventCallbacks
import lastfm
def track(event, args, kwargs):
print "track:", repr(event), repr(args), repr(kwargs)
if event is PlayerEventCallbacks.onSongChange:
oldSong = kwargs["oldSong"]
newSong = kwargs["newSong"]
if oldSong: oldSong.close() # in case anyone is holding any ref to it, close at least the file
if "artist" not in newSong.metadata:
print "new song metadata is incomplete:", newSong.metadata
else:
print "new song:", newSong.fileext, ",", newSong.artist, "-", newSong.track, ",", formatTime(newSong.duration)
pprint(newSong.metadata)
lastfm.onSongChange(newSong)
if event is PlayerEventCallbacks.onSongFinished:
song = kwargs["song"]
lastfm.onSongFinished(song)
def trackerMain():
lastfm.login()
for ev,args,kwargs in state.updates.read():
try:
track(ev, args, kwargs)
except:
sys.excepthook(*sys.exc_info())
lastfm.quit()
|
from utils import *
from pprint import pprint
import sys
from State import state
from player import PlayerEventCallbacks
import lastfm
def track(event, args, kwargs):
print "track:", repr(event), repr(args), repr(kwargs)
if event is PlayerEventCallbacks.onSongChange:
oldSong = kwargs["oldSong"]
newSong = kwargs["newSong"]
if oldSong is newSong: print "** something strange. oldSong is newSong" # TODO: fix
elif oldSong: oldSong.close() # in case anyone is holding any ref to it, close at least the file
if "artist" not in newSong.metadata:
print "new song metadata is incomplete:", newSong.metadata
else:
print "new song:", newSong.fileext, ",", newSong.artist, "-", newSong.track, ",", formatTime(newSong.duration)
pprint(newSong.metadata)
lastfm.onSongChange(newSong)
if event is PlayerEventCallbacks.onSongFinished:
song = kwargs["song"]
lastfm.onSongFinished(song)
def trackerMain():
lastfm.login()
for ev,args,kwargs in state.updates.read():
try:
track(ev, args, kwargs)
except:
sys.excepthook(*sys.exc_info())
lastfm.quit()
|
bsd-2-clause
|
Python
|
587abec7ff5b90c03885e164d9b6b62a1fb41f76
|
Fix the headers sent by the GitHub renderer.
|
ssundarraj/grip,mgoddard-pivotal/grip,mgoddard-pivotal/grip,joeyespo/grip,joeyespo/grip,jbarreras/grip,jbarreras/grip,ssundarraj/grip
|
grip/github_renderer.py
|
grip/github_renderer.py
|
from flask import abort, json
import requests
def render_content(text, gfm=False, context=None,
username=None, password=None):
"""Renders the specified markup using the GitHub API."""
if gfm:
url = 'https://api.github.com/markdown'
data = {'text': text, 'mode': 'gfm'}
if context:
data['context'] = context
data = json.dumps(data)
headers = {'content-type': 'application/json'}
else:
url = 'https://api.github.com/markdown/raw'
data = text
headers = {'content-type': 'text/x-markdown'}
auth = (username, password) if username else None
r = requests.post(url, headers=headers, data=data, auth=auth)
# Relay HTTP errors
if r.status_code != 200:
try:
message = r.json()['message']
except:
message = r.text
abort(r.status_code, message)
return r.text
|
from flask import abort, json
import requests
def render_content(text, gfm=False, context=None,
username=None, password=None):
"""Renders the specified markup using the GitHub API."""
if gfm:
url = 'https://api.github.com/markdown'
data = {'text': text, 'mode': 'gfm'}
if context:
data['context'] = context
data = json.dumps(data)
else:
url = 'https://api.github.com/markdown/raw'
data = text
headers = {'content-type': 'text/plain'}
auth = (username, password) if username else None
r = requests.post(url, headers=headers, data=data, auth=auth)
# Relay HTTP errors
if r.status_code != 200:
try:
message = r.json()['message']
except:
message = r.text
abort(r.status_code, message)
return r.text
|
mit
|
Python
|
67f64792dc7321cd9521e927b4eb1a58b67cdcdc
|
Allow passing of direct function reference to url triple
|
brinkframework/brink
|
brink/server.py
|
brink/server.py
|
from aiohttp import web
from brink.config import config
from brink.db import conn
from brink.handlers import __handler_wrapper, __ws_handler_wrapper
from brink.utils import resolve_func
from brink.cli import print_globe, print_info
import importlib
import aiohttp_autoreload
import logging
def run_server(conf):
for cfg in vars(conf):
if cfg[:2] != "__":
config.set(cfg, getattr(conf, cfg))
# Setup database config for later use
conn.setup(config.get("DATABASE", {}))
# Resolve middleware
middleware = [resolve_func(func) for
func in config.get("MIDDLEWARE", [])]
server = web.Application(middlewares=middleware)
logger = logging.getLogger("brink")
# Iterate over all installed apps and add their routes
for app in config.get("INSTALLED_APPS", []):
__load_app(server, app)
# Enable source code auto reload on change only if DEBUG is enabled
if config.get("DEBUG"):
aiohttp_autoreload.add_reload_hook(
lambda: print_info("Detected code change. Reloading...",
spaced=True))
aiohttp_autoreload.start()
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
logger.addHandler(ch)
server.make_handler(access_log=logger)
port = config.get("PORT", 8888)
print_globe("Server listening on port %s\n" % port)
web.run_app(server, port=port, print=lambda *args: None)
def __load_app(server, package):
urls = importlib.import_module("%s.urls" % package)
for url in urls.urls:
__add_route(server, url, package)
def __add_route(server, url, package):
(method, route, handler) = url
handler_wrapper = __ws_handler_wrapper if method == "WS" \
else __handler_wrapper
if type(handler) is str:
try:
handler_func = resolve_func(handler)
except ModuleNotFoundError:
handler_func = resolve_func("%s.%s" % (package, handler))
else:
handler_func = handler
handler_func = handler_wrapper(handler_func)
if method == "GET" or method == "WS":
server.router.add_get(route, handler_func)
elif method == "POST":
server.router.add_post(route, handler_func)
elif method == "PUT":
server.router.add_put(route, handler_func)
elif method == "PATCH":
server.router.add_patch(route, handler_func)
elif method == "DELETE":
server.router.add_delete(route, handler_func)
|
from aiohttp import web
from brink.config import config
from brink.db import conn
from brink.handlers import __handler_wrapper, __ws_handler_wrapper
from brink.utils import resolve_func
from brink.cli import print_globe, print_info
import importlib
import aiohttp_autoreload
import logging
def run_server(conf):
for cfg in vars(conf):
if cfg[:2] != "__":
config.set(cfg, getattr(conf, cfg))
# Setup database config for later use
conn.setup(config.get("DATABASE", {}))
# Resolve middleware
middleware = [resolve_func(func) for
func in config.get("MIDDLEWARE", [])]
server = web.Application(middlewares=middleware)
logger = logging.getLogger("brink")
# Iterate over all installed apps and add their routes
for app in config.get("INSTALLED_APPS", []):
__load_app(server, app)
# Enable source code auto reload on change only if DEBUG is enabled
if config.get("DEBUG"):
aiohttp_autoreload.add_reload_hook(
lambda: print_info("Detected code change. Reloading...",
spaced=True))
aiohttp_autoreload.start()
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
logger.addHandler(ch)
server.make_handler(access_log=logger)
port = config.get("PORT", 8888)
print_globe("Server listening on port %s\n" % port)
web.run_app(server, port=port, print=lambda *args: None)
def __load_app(server, package):
urls = importlib.import_module("%s.urls" % package)
for url in urls.urls:
__add_route(server, url, package)
def __add_route(server, url, package):
(method, route, handler) = url
handler_wrapper = __ws_handler_wrapper if method == "WS" \
else __handler_wrapper
try:
handler_func = resolve_func(handler)
except ModuleNotFoundError:
handler_func = resolve_func("%s.%s" % (package, handler))
handler_func = handler_wrapper(handler_func)
if method == "GET" or method == "WS":
server.router.add_get(route, handler_func)
elif method == "POST":
server.router.add_post(route, handler_func)
elif method == "PUT":
server.router.add_put(route, handler_func)
elif method == "PATCH":
server.router.add_patch(route, handler_func)
elif method == "DELETE":
server.router.add_delete(route, handler_func)
|
bsd-3-clause
|
Python
|
6156960333163e15fd2ddd96e831bbdf2e92163d
|
Correct reference to organization
|
gg7/sentry,1tush/sentry,gg7/sentry,jokey2k/sentry,TedaLIEz/sentry,beeftornado/sentry,TedaLIEz/sentry,mvaled/sentry,gg7/sentry,zenefits/sentry,nicholasserra/sentry,kevinlondon/sentry,kevinastone/sentry,looker/sentry,mvaled/sentry,ewdurbin/sentry,daevaorn/sentry,JackDanger/sentry,mvaled/sentry,JTCunning/sentry,JTCunning/sentry,ifduyue/sentry,beeftornado/sentry,wong2/sentry,zenefits/sentry,kevinastone/sentry,mitsuhiko/sentry,gencer/sentry,hongliang5623/sentry,beeftornado/sentry,daevaorn/sentry,Natim/sentry,vperron/sentry,nicholasserra/sentry,JamesMura/sentry,boneyao/sentry,BuildingLink/sentry,gencer/sentry,korealerts1/sentry,BayanGroup/sentry,Natim/sentry,gencer/sentry,imankulov/sentry,imankulov/sentry,vperron/sentry,looker/sentry,llonchj/sentry,ifduyue/sentry,felixbuenemann/sentry,BuildingLink/sentry,drcapulet/sentry,ewdurbin/sentry,BuildingLink/sentry,mvaled/sentry,jokey2k/sentry,jean/sentry,drcapulet/sentry,mvaled/sentry,JackDanger/sentry,pauloschilling/sentry,ngonzalvez/sentry,fotinakis/sentry,ifduyue/sentry,JamesMura/sentry,kevinlondon/sentry,fuziontech/sentry,JamesMura/sentry,vperron/sentry,ifduyue/sentry,1tush/sentry,fotinakis/sentry,jokey2k/sentry,JTCunning/sentry,ewdurbin/sentry,wujuguang/sentry,fuziontech/sentry,JamesMura/sentry,felixbuenemann/sentry,ifduyue/sentry,1tush/sentry,argonemyth/sentry,drcapulet/sentry,zenefits/sentry,hongliang5623/sentry,looker/sentry,looker/sentry,looker/sentry,wujuguang/sentry,Kryz/sentry,alexm92/sentry,alexm92/sentry,imankulov/sentry,boneyao/sentry,jean/sentry,jean/sentry,kevinlondon/sentry,argonemyth/sentry,daevaorn/sentry,ngonzalvez/sentry,boneyao/sentry,songyi199111/sentry,BayanGroup/sentry,gencer/sentry,korealerts1/sentry,wong2/sentry,BuildingLink/sentry,hongliang5623/sentry,Kryz/sentry,mitsuhiko/sentry,songyi199111/sentry,Kryz/sentry,wong2/sentry,daevaorn/sentry,nicholasserra/sentry,fuziontech/sentry,BayanGroup/sentry,zenefits/sentry,gencer/sentry,BuildingLink/sentry,pauloschilling/sentry,songyi199111/sentry,JamesMura/sentry,mvaled/sentry,kevinastone/sentry,JackDanger/sentry,jean/sentry,zenefits/sentry,TedaLIEz/sentry,alexm92/sentry,llonchj/sentry,argonemyth/sentry,llonchj/sentry,pauloschilling/sentry,wujuguang/sentry,ngonzalvez/sentry,korealerts1/sentry,fotinakis/sentry,fotinakis/sentry,felixbuenemann/sentry,Natim/sentry,jean/sentry
|
src/sentry/api/bases/organization.py
|
src/sentry/api/bases/organization.py
|
from __future__ import absolute_import
from sentry.api.base import Endpoint
from sentry.api.exceptions import ResourceDoesNotExist
from sentry.api.permissions import ScopedPermission
from sentry.models import AuthIdentity, Organization, OrganizationMember
class OrganizationPermission(ScopedPermission):
scope_map = {
'GET': ['org:read', 'org:write', 'org:delete'],
'POST': ['org:write', 'org:delete'],
'PUT': ['org:write', 'org:delete'],
'DELETE': ['org:delete'],
}
def has_object_permission(self, request, view, organization):
if request.auth:
if self.is_project_key(request):
return False
return request.auth.organization_id == organization.id
if request.user.is_superuser:
return True
try:
om = OrganizationMember.objects.get(
organization=organization,
user=request.user,
)
except OrganizationMember.DoesNotExist:
return False
try:
auth_identity = AuthIdentity.objects.get(
auth_provider__organization=organization.id,
)
except AuthIdentity.DoesNotExist:
pass
else:
# TODO(dcramer): we might simply want to change their scopes to
# something like 'org:read' since we'd still want them to know
# they're part of the org. Alternatively we introduce yet another
# scope that suggests extremely limited read.
if not auth_identity.is_valid(om):
return False
allowed_scopes = set(self.scope_map[request.method])
current_scopes = om.scopes
return any(s in allowed_scopes for s in current_scopes)
class OrganizationEndpoint(Endpoint):
permission_classes = (OrganizationPermission,)
def convert_args(self, request, organization_slug, *args, **kwargs):
try:
organization = Organization.objects.get_from_cache(
slug=organization_slug,
)
except Organization.DoesNotExist:
raise ResourceDoesNotExist
self.check_object_permissions(request, organization)
kwargs['organization'] = organization
return (args, kwargs)
|
from __future__ import absolute_import
from sentry.api.base import Endpoint
from sentry.api.exceptions import ResourceDoesNotExist
from sentry.api.permissions import ScopedPermission
from sentry.models import AuthIdentity, Organization, OrganizationMember
class OrganizationPermission(ScopedPermission):
scope_map = {
'GET': ['org:read', 'org:write', 'org:delete'],
'POST': ['org:write', 'org:delete'],
'PUT': ['org:write', 'org:delete'],
'DELETE': ['org:delete'],
}
def has_object_permission(self, request, view, organization):
if request.auth:
if self.is_project_key(request):
return False
return request.auth.organization_id == organization.id
if request.user.is_superuser:
return True
try:
om = OrganizationMember.objects.get(
organization=organization,
user=request.user,
)
except OrganizationMember.DoesNotExist:
return False
try:
auth_identity = AuthIdentity.objects.get(
auth_provider__organization=self.organization_id,
)
except AuthIdentity.DoesNotExist:
pass
else:
# TODO(dcramer): we might simply want to change their scopes to
# something like 'org:read' since we'd still want them to know
# they're part of the org. Alternatively we introduce yet another
# scope that suggests extremely limited read.
if not auth_identity.is_valid(om):
return False
allowed_scopes = set(self.scope_map[request.method])
current_scopes = om.scopes
return any(s in allowed_scopes for s in current_scopes)
class OrganizationEndpoint(Endpoint):
permission_classes = (OrganizationPermission,)
def convert_args(self, request, organization_slug, *args, **kwargs):
try:
organization = Organization.objects.get_from_cache(
slug=organization_slug,
)
except Organization.DoesNotExist:
raise ResourceDoesNotExist
self.check_object_permissions(request, organization)
kwargs['organization'] = organization
return (args, kwargs)
|
bsd-3-clause
|
Python
|
798e51e880374b43c405ce7e4314b3d1a3311c5c
|
Make exceptions for bad behavior (#220)
|
wiki-ai/wikilabels,wiki-ai/wikilabels,wiki-ai/wikilabels
|
wikilabels/database/db.py
|
wikilabels/database/db.py
|
import logging
from contextlib import contextmanager
from psycopg2.extras import RealDictCursor
from psycopg2.pool import ThreadedConnectionPool
from .campaigns import Campaigns
from .labels import Labels
from .tasks import Tasks
from .worksets import Worksets
logger = logging.getLogger(__name__)
class DB:
def __init__(self, *args, **kwargs):
self.pool_params = (args, kwargs)
self.pool = None
self.campaigns = Campaigns(self)
self.worksets = Worksets(self)
self.tasks = Tasks(self)
self.labels = Labels(self)
self.logger = logging.getLogger(__name__)
def _initialize_pool(self):
if self.pool is None:
logger.info("Initializing connection pool.")
args, kwargs = self.pool_params
self.pool = ThreadedConnectionPool(
*args, cursor_factory=RealDictCursor, **kwargs)
def execute(self, sql):
with self.transaction() as transactor:
cursor = transactor.cursor()
cursor.execute(sql)
return cursor
@contextmanager
def transaction(self):
"""Provides a transactional scope around a series of operations."""
self._initialize_pool()
conn = self.pool.getconn()
try:
yield conn
conn.commit()
except: # noqa: E722
# We're fine with the bare except cos we raise in any case.
conn.rollback()
raise
finally:
self.pool.putconn(conn)
@classmethod
def from_config(cls, config):
# Copy config as kwargs
params = {k: v for k, v in config['database'].items()}
params['minconn'] = params.get('minconn', 1)
params['maxconn'] = params.get('maxconn', 5)
return cls(**params)
|
import logging
from contextlib import contextmanager
from psycopg2.extras import RealDictCursor
from psycopg2.pool import ThreadedConnectionPool
from .campaigns import Campaigns
from .labels import Labels
from .tasks import Tasks
from .worksets import Worksets
logger = logging.getLogger(__name__)
class DB:
def __init__(self, *args, **kwargs):
self.pool_params = (args, kwargs)
self.pool = None
self.campaigns = Campaigns(self)
self.worksets = Worksets(self)
self.tasks = Tasks(self)
self.labels = Labels(self)
self.logger = logging.getLogger(__name__)
def _initialize_pool(self):
if self.pool is None:
logger.info("Initializing connection pool.")
args, kwargs = self.pool_params
self.pool = ThreadedConnectionPool(
*args, cursor_factory=RealDictCursor, **kwargs)
def execute(self, sql):
with self.transaction() as transactor:
cursor = transactor.cursor()
cursor.execute(sql)
return cursor
@contextmanager
def transaction(self):
"""Provides a transactional scope around a series of operations."""
self._initialize_pool()
conn = self.pool.getconn()
try:
yield conn
conn.commit()
except:
conn.rollback()
raise
finally:
self.pool.putconn(conn)
@classmethod
def from_config(cls, config):
# Copy config as kwargs
params = {k: v for k, v in config['database'].items()}
params['minconn'] = params.get('minconn', 1)
params['maxconn'] = params.get('maxconn', 5)
return cls(**params)
|
mit
|
Python
|
3f0932f8fc1277fc5354476470c2931d48f62977
|
bump version
|
SexualHealthInnovations/callisto-core,SexualHealthInnovations/callisto-core,project-callisto/callisto-core,project-callisto/callisto-core
|
callisto_core/utils/version.py
|
callisto_core/utils/version.py
|
__version__ = '0.10.11'
|
__version__ = '0.10.10'
|
agpl-3.0
|
Python
|
7220621fcdba6de2e0fabb69e2d51dd382e739ba
|
Fix Windows freeze error
|
desbma/sacad,desbma/sacad
|
freeze.py
|
freeze.py
|
#!/usr/bin/env python3
import os
import re
from cx_Freeze import setup, Executable
with open(os.path.join("sacad", "__init__.py"), "rt") as f:
version = re.search("__version__ = \"([^\"]+)\"", f.read()).group(1)
build_exe_options = {"includes": ["lxml._elementpath"],
"packages": ["asyncio", "idna"],
"optimize": 0}
setup(name="sacad",
version=version,
author="desbma",
packages=["sacad"],
options={"build_exe": build_exe_options},
executables=[Executable(os.path.join("sacad", "__main__.py"),
targetName="sacad.exe"),
Executable(os.path.join("sacad", "recurse.py"),
targetName="sacad_r.exe")])
|
#!/usr/bin/env python3
import os
import re
from cx_Freeze import setup, Executable
with open(os.path.join("sacad", "__init__.py"), "rt") as f:
version = re.search("__version__ = \"([^\"]+)\"", f.read()).group(1)
build_exe_options = {"includes": ["lxml._elementpath"],
"packages": ["asyncio"],
"optimize": 0}
setup(name="sacad",
version=version,
author="desbma",
packages=["sacad"],
options={"build_exe": build_exe_options},
executables=[Executable(os.path.join("sacad", "__main__.py"),
targetName="sacad.exe"),
Executable(os.path.join("sacad", "recurse.py"),
targetName="sacad_r.exe")])
|
mpl-2.0
|
Python
|
ec51bcd1803a2f576f6a325b9b950d86c5d0b2a9
|
Cut 0.9.1
|
singingwolfboy/invocations,pyinvoke/invocations,mrjmad/invocations
|
invocations/_version.py
|
invocations/_version.py
|
__version_info__ = (0, 9, 1)
__version__ = '.'.join(map(str, __version_info__))
|
__version_info__ = (0, 9, 0)
__version__ = '.'.join(map(str, __version_info__))
|
bsd-2-clause
|
Python
|
4be292c5c38b4eec08c56a872f6cd4f390bc607a
|
make compiler's py3k warning a full deprecation warning #6837
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
Lib/compiler/__init__.py
|
Lib/compiler/__init__.py
|
"""Package for parsing and compiling Python source code
There are several functions defined at the top level that are imported
from modules contained in the package.
parse(buf, mode="exec") -> AST
Converts a string containing Python source code to an abstract
syntax tree (AST). The AST is defined in compiler.ast.
parseFile(path) -> AST
The same as parse(open(path))
walk(ast, visitor, verbose=None)
Does a pre-order walk over the ast using the visitor instance.
See compiler.visitor for details.
compile(source, filename, mode, flags=None, dont_inherit=None)
Returns a code object. A replacement for the builtin compile() function.
compileFile(filename)
Generates a .pyc file by compiling filename.
"""
import warnings
warnings.warn("The compiler package is deprecated and removed in Python 3.x.",
DeprecationWarning, stacklevel=2)
from compiler.transformer import parse, parseFile
from compiler.visitor import walk
from compiler.pycodegen import compile, compileFile
|
"""Package for parsing and compiling Python source code
There are several functions defined at the top level that are imported
from modules contained in the package.
parse(buf, mode="exec") -> AST
Converts a string containing Python source code to an abstract
syntax tree (AST). The AST is defined in compiler.ast.
parseFile(path) -> AST
The same as parse(open(path))
walk(ast, visitor, verbose=None)
Does a pre-order walk over the ast using the visitor instance.
See compiler.visitor for details.
compile(source, filename, mode, flags=None, dont_inherit=None)
Returns a code object. A replacement for the builtin compile() function.
compileFile(filename)
Generates a .pyc file by compiling filename.
"""
from warnings import warnpy3k
warnpy3k("the compiler package has been removed in Python 3.0", stacklevel=2)
del warnpy3k
from compiler.transformer import parse, parseFile
from compiler.visitor import walk
from compiler.pycodegen import compile, compileFile
|
mit
|
Python
|
7f9c9c25f5786bf96ff3d89cc8fd840e3e6a4a6d
|
Allow passing of a tuple of three integers to get a datetime.
|
MinchinWeb/minchin.pelican.jinja_filters
|
pelican/plugins/jinja_filters/jinja_filters.py
|
pelican/plugins/jinja_filters/jinja_filters.py
|
"""Various filters for Jinja."""
from datetime import datetime as _datetime
from titlecase import titlecase as _titlecase
__all__ = [
"article_date",
"breaking_spaces",
"datetime",
"titlecase",
]
def datetime(value, format_str="%Y/%m/%d %H:%M"):
"""
Convert a datetime to a different format.
The default format looks like --> 2016/11/25 12:34
Args
----
value (datetime.datetime): input date and time
format_str (str): The datetime format string to apply to value
Returns
-------
str: value, after the format_str has been applied
"""
return value.strftime(format_str)
def article_date(value):
"""
Convert a date to the format we want it displayed on the article template.
Format looks like --> Friday, November 4, 2020
Args
----
value (datetime.datetime): input date
Returns
-------
str: value, formatted nicely for displaying the date.
"""
return value.strftime("%A, %B %-d, %Y")
def datetime_from_period(value):
"""
Converts "period" into a datetime object.
On yearly/monthly/daily archive pages, a "period" object is supplied so you
know what timeperiod the particular archive page is for. This converts it
to a datetime.datetime object, so it can be further processed.
If a month is not provided (i.e. the period is for a yearly archive),
January is assumed. If a day is not provided (i.e. the period is for a
yearly or monthly archive), the 1st is assumed.
You can also generate a tuple of (up to three) integers to get a datetime
out, using the integer representation for the month (1=January, etc).
Args
----
value (tuple): input period
Returns
-------
datetime.datetime: value converted
"""
if len(value) >= 2 and isinstance(value[2], int):
placeholder_month = _datetime(2021, value[2], 1).strftime("%B")
elif len(value) == 1:
placeholder_month = _datetime(2021, 1, 1).strftime("%B")
else:
placeholder_month = value[2]
new_value = " ".join(
value[0], placeholder_month, value[2] if len(value) >= 3 else 1,
)
new_datetime = _datetime.strptime(*new_value, "%Y %B %-d")
return new_datetime
def breaking_spaces(value):
"""
Convert non-breaking spaces to regular spaces.
Args
----
value (str): input value
Returns
-------
str: the input string, now with regular spaces
"""
return value.replace("\u00A0", " ")
def titlecase(value):
"""
Returns the titlecased version of the supplied text.
Args
----
value (str): input value
Returns
-------
str: value, titlecase formatted
"""
return _titlecase(value)
|
"""Various filters for Jinja."""
from datetime import datetime as _datetime
from titlecase import titlecase as _titlecase
__all__ = [
"article_date",
"breaking_spaces",
"datetime",
"titlecase",
]
def datetime(value, format_str="%Y/%m/%d %H:%M"):
"""
Convert a datetime to a different format.
The default format looks like --> 2016/11/25 12:34
Args
----
value (datetime.datetime): input date and time
format_str (str): The datetime format string to apply to value
Returns
-------
str: value, after the format_str has been applied
"""
return value.strftime(format_str)
def article_date(value):
"""
Convert a date to the format we want it displayed on the article template.
Format looks like --> Friday, November 4, 2020
Args
----
value (datetime.datetime): input date
Returns
-------
str: value, formatted nicely for displaying the date.
"""
return value.strftime("%A, %B %-d, %Y")
def datetime_from_period(value):
"""
Converts "period" into a datetime object.
On yearly/monthly/daily archive pages, a "period" object is supplied so you
know what timeperiod the particular archive page is for. This converts it
to a datetime.datetime object, so it can be further processed.
If a month is not provided (i.e. the period is for a yearly archive),
January is assumed. If a day is not provided (i.e. the period is for a
yearly or monthly archive), the 1st is assumed.
Args
----
value (tuple): input period
Returns
-------
datetime.datetime: value converted
"""
JANUARY = _datetime(2021, 1, 1).strftime("%B")
new_value = " ".join(
value[0],
value[1] if len(value) > 1 else JANUARY,
value[2] if len(value) > 2 else 1,
)
new_datetime = _datetime.strptime(*new_value, "%Y %B %-d")
return new_datetime
def breaking_spaces(value):
"""
Convert non-breaking spaces to regular spaces.
Args
----
value (str): input value
Returns
-------
str: the input string, now with regular spaces
"""
return value.replace("\u00A0", " ")
def titlecase(value):
"""
Returns the titlecased version of the supplied text.
Args
----
value (str): input value
Returns
-------
str: value, titlecase formatted
"""
return _titlecase(value)
|
mit
|
Python
|
b548092d480871e402e2d50ab96d864c5851cab2
|
fix __init__ changes
|
kkroening/ffmpeg-python
|
ffmpeg/__init__.py
|
ffmpeg/__init__.py
|
from __future__ import unicode_literals
from . import _filters, _ffmpeg, _run
from ._filters import *
from ._ffmpeg import *
from ._run import *
__all__ = _filters.__all__ + _ffmpeg.__all__ + _run.__all__
|
from __future__ import unicode_literals
from . import _filters, _ffmpeg, _run
from ._filters import *
from ._ffmpeg import *
from ._run import *
from ._view import *
__all__ = _filters.__all__ + _ffmpeg.__all__ + _run.__all__ + _view.__all__
|
apache-2.0
|
Python
|
71a84ecb772aa5560e35409219c11001ac168c6a
|
Add logging for contact form email.
|
cdriehuys/chmvh-website,cdriehuys/chmvh-website,cdriehuys/chmvh-website
|
chmvh_website/contact/forms.py
|
chmvh_website/contact/forms.py
|
import logging
from smtplib import SMTPException
from django import forms
from django.conf import settings
from django.core import mail
from django.template import loader
logger = logging.getLogger('chmvh_website.{0}'.format(__name__))
class ContactForm(forms.Form):
name = forms.CharField()
email = forms.EmailField()
message = forms.CharField(widget=forms.Textarea(
attrs={'rows': 5}))
template = loader.get_template('contact/email/message.txt')
def send_email(self):
subject = '[CHMVH Website] Message from {}'.format(
self.cleaned_data['name'])
context = {
'name': self.cleaned_data['name'],
'email': self.cleaned_data['email'],
'message': self.cleaned_data['message'],
}
logger.debug("Preparing to send email")
try:
emails_sent = mail.send_mail(
subject,
self.template.render(context),
settings.DEFAULT_FROM_EMAIL,
['info@chapelhillvet.com'])
logger.info("Succesfully sent email from {0}".format(
self.cleaned_data['email']))
except SMTPException as e:
emails_sent = 0
logger.exception("Failed to send email.", exc_info=e)
return emails_sent == 1
|
from django import forms
from django.conf import settings
from django.core import mail
from django.template import loader
class ContactForm(forms.Form):
name = forms.CharField()
email = forms.EmailField()
message = forms.CharField(widget=forms.Textarea(
attrs={'rows': 5}))
template = loader.get_template('contact/email/message.txt')
def send_email(self):
subject = '[CHMVH Website] Message from {}'.format(
self.cleaned_data['name'])
context = {
'name': self.cleaned_data['name'],
'email': self.cleaned_data['email'],
'message': self.cleaned_data['message'],
}
emails_sent = mail.send_mail(
subject,
self.template.render(context),
settings.DEFAULT_FROM_EMAIL,
['info@chapelhillvet.com'],
fail_silently=True)
return emails_sent == 1
|
mit
|
Python
|
a11c058c520581239a76d1b87920fec7f087eff3
|
Use round brackets
|
rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org
|
readthedocs/builds/managers.py
|
readthedocs/builds/managers.py
|
"""Build and Version class model Managers"""
from __future__ import absolute_import
import logging
from django.db import models
from django.core.exceptions import ObjectDoesNotExist
from .constants import (BRANCH, TAG, LATEST, LATEST_VERBOSE_NAME, STABLE,
STABLE_VERBOSE_NAME)
from .querysets import VersionQuerySet
from readthedocs.core.utils.extend import (SettingsOverrideObject,
get_override_class)
log = logging.getLogger(__name__)
__all__ = ['VersionManager']
class VersionManagerBase(models.Manager):
"""
Version manager for manager only queries.
For queries not suitable for the :py:class:`VersionQuerySet`, such as create
queries.
"""
@classmethod
def from_queryset(cls, queryset_class, class_name=None):
# This is overridden because :py:meth:`models.Manager.from_queryset`
# uses `inspect` to retrieve the class methods, and the proxy class has
# no direct members.
queryset_class = get_override_class(
VersionQuerySet,
VersionQuerySet._default_class # pylint: disable=protected-access
)
return super(VersionManagerBase, cls).from_queryset(queryset_class, class_name)
def create_stable(self, **kwargs):
defaults = {
'slug': STABLE,
'verbose_name': STABLE_VERBOSE_NAME,
'machine': True,
'active': True,
'identifier': STABLE,
'type': TAG,
}
defaults.update(kwargs)
return self.create(**defaults)
def create_latest(self, **kwargs):
defaults = {
'slug': LATEST,
'verbose_name': LATEST_VERBOSE_NAME,
'machine': True,
'active': True,
'identifier': LATEST,
'type': BRANCH,
}
defaults.update(kwargs)
return self.create(**defaults)
def get_object_or_log(self, **kwargs):
try:
return super(VersionManagerBase, self).get(**kwargs)
except ObjectDoesNotExist:
log.warning('Version not found for the pk = {pk}'.format(pk=kwargs.get('pk')))
class VersionManager(SettingsOverrideObject):
_default_class = VersionManagerBase
_override_setting = 'VERSION_MANAGER'
|
"""Build and Version class model Managers"""
from __future__ import absolute_import
import logging
from django.db import models
from django.core.exceptions import ObjectDoesNotExist
from .constants import (BRANCH, TAG, LATEST, LATEST_VERBOSE_NAME, STABLE,
STABLE_VERBOSE_NAME)
from .querysets import VersionQuerySet
from readthedocs.core.utils.extend import (SettingsOverrideObject,
get_override_class)
log = logging.getLogger(__name__)
__all__ = ['VersionManager']
class VersionManagerBase(models.Manager):
"""
Version manager for manager only queries.
For queries not suitable for the :py:class:`VersionQuerySet`, such as create
queries.
"""
@classmethod
def from_queryset(cls, queryset_class, class_name=None):
# This is overridden because :py:meth:`models.Manager.from_queryset`
# uses `inspect` to retrieve the class methods, and the proxy class has
# no direct members.
queryset_class = get_override_class(
VersionQuerySet,
VersionQuerySet._default_class # pylint: disable=protected-access
)
return super(VersionManagerBase, cls).from_queryset(queryset_class, class_name)
def create_stable(self, **kwargs):
defaults = {
'slug': STABLE,
'verbose_name': STABLE_VERBOSE_NAME,
'machine': True,
'active': True,
'identifier': STABLE,
'type': TAG,
}
defaults.update(kwargs)
return self.create(**defaults)
def create_latest(self, **kwargs):
defaults = {
'slug': LATEST,
'verbose_name': LATEST_VERBOSE_NAME,
'machine': True,
'active': True,
'identifier': LATEST,
'type': BRANCH,
}
defaults.update(kwargs)
return self.create(**defaults)
def get_object_or_log(self, **kwargs):
try:
return super(VersionManagerBase, self).get(**kwargs)
except ObjectDoesNotExist:
log.warning('Version not found for the pk = {pk}'.format(pk=kwargs.get['pk']))
class VersionManager(SettingsOverrideObject):
_default_class = VersionManagerBase
_override_setting = 'VERSION_MANAGER'
|
mit
|
Python
|
797ab31382a6c92eb4e9496969e36c35a23db20d
|
Bump version to 10.0.1
|
hhursev/recipe-scraper
|
recipe_scrapers/__version__.py
|
recipe_scrapers/__version__.py
|
__version__ = "10.0.1"
|
__version__ = "10.0.0"
|
mit
|
Python
|
82b1e2db9c9175370d40354c2e6851bb26d58183
|
bump plugin version
|
loomchild/bountyfunding,centaurustech/bountyfunding,loomchild/bountyfunding,centaurustech/bountyfunding,centaurustech/bountyfunding,bountyfunding/bountyfunding,loomchild/bountyfunding,bountyfunding/bountyfunding,bountyfunding/bountyfunding
|
plugins/bountyfunding_plugin_trac/src/setup.py
|
plugins/bountyfunding_plugin_trac/src/setup.py
|
#!/usr/bin/env python
from setuptools import find_packages, setup
setup(
name='BountyFunding', version='0.6',
packages=find_packages(),
entry_points = {
'trac.plugins': [
'bountyfunding = bountyfunding.bountyfunding',
],
},
package_data={'bountyfunding': ['templates/*', 'htdocs/styles/*', 'htdocs/scripts/*']},
)
|
#!/usr/bin/env python
from setuptools import find_packages, setup
setup(
name='BountyFunding', version='0.5',
packages=find_packages(),
entry_points = {
'trac.plugins': [
'bountyfunding = bountyfunding.bountyfunding',
],
},
package_data={'bountyfunding': ['templates/*', 'htdocs/styles/*', 'htdocs/scripts/*']},
)
|
agpl-3.0
|
Python
|
e09798d5adbdea422d31eeed6fded746c0b8e5eb
|
update reduce options
|
boada/planckClusters,boada/planckClusters,boada/planckClusters,boada/planckClusters,boada/planckClusters
|
MOSAICpipe/reduce_ALL.py
|
MOSAICpipe/reduce_ALL.py
|
import os
from glob import glob
''' This file links the MOSAIC pipeline into each folder and then does the
complete reduction on things. It still needs to have the individual association
files created before hand, but it does everything else.
I've updated it to also to the newfirm linking and reduction. You specify which
instrument you want to use as a command line argument. 'mosaic' or 'newfirm'
'''
script_dir = '/home/boada/Projects/planckClusters/MOSAICpipe'
def main():
dirs = [dirs for _, dirs, _ in os.walk('./')][0] # only want top level
cwd = os.getcwd()
for d in dirs:
print(d)
os.chdir(cwd)
if 'PSZ' not in d:
continue
target_dir = './{}'.format(d)
if not os.path.isdir(target_dir):
continue
relpath = os.path.relpath('{}'.format(script_dir), target_dir)
print(relpath)
print(target_dir)
try:
os.symlink('{}/combcat_PROJECTED.py'.format(script_dir),
'{}/combcat_PROJECTED.py'.format(target_dir))
except FileExistsError:
pass
# now do the pipeline
os.chdir(target_dir)
assocFile = glob('*.assoc')[0]
print(os.getcwd())
# build the command
cmd = 'python3 combcat_PROJECTED.py {} ./ ./'.format(assocFile)
cmd += ' --noSWarp --noPhoto --noAstro --noRGB'
print(cmd)
os.system(cmd)
# clean up all of the intermediate data products
cmds = ["find . -path '*/.diagnostics/*' -delete",
"find . -type d -name '.diagnostics' -empty -delete",
"find . -type f -name 'registration_*' -delete",
"find . -type f -name '*ldac*' -delete",
"find . -type f -name 'diagnostics.html' -delete",
"find . -type f -name '*.lst' -delete",
"find . -type f -name '*.xml' -delete",
"find . -type f -name 'GAIA.cat' -delete",
"find . -type f -name 'best_astrometry.dat' -delete"]
for cmd in cmds:
os.system(cmd)
if __name__ == "__main__":
main()
|
import os
from glob import glob
import sys
''' This file links the MOSAIC pipeline into each folder and then does the
complete reduction on things. It still needs to have the individual association
files created before hand, but it does everything else.
I've updated it to also to the newfirm linking and reduction. You specify which
instrument you want to use as a command line argument. 'mosaic' or 'newfirm'
'''
script_dir = '/home/boada/Projects/planckClusters/MOSAICpipe'
def main():
dirs = [dirs for _, dirs, _ in os.walk('./')][0] # only want top level
cwd = os.getcwd()
for d in dirs:
print(d)
os.chdir(cwd)
if 'PSZ' not in d:
continue
target_dir = './{}'.format(d)
if not os.path.isdir(target_dir):
continue
relpath = os.path.relpath('{}'.format(script_dir), target_dir)
print(relpath)
print(target_dir)
try:
os.symlink('{}/combcat_PROJECTED.py'.format(script_dir),
'{}/combcat_PROJECTED.py'.format(target_dir))
except FileExistsError:
pass
# now do the pipeline
os.chdir(target_dir)
assocFile = glob('*.assoc')[0]
print(os.getcwd())
# build the command
cmd = 'python3 combcat_PROJECTED.py {} ./ ./'.format(assocFile)
cmd += ' --noPhoto --noAstro --noSEx --noBPZ'
print(cmd)
os.system(cmd)
# clean up all of the intermediate data products
cmds = ["find . -path '*/.diagnostics/*' -delete",
"find . -type d -name '.diagnostics' -empty -delete",
"find . -type f -name 'registration_*' -delete",
"find . -type f -name '*ldac*' -delete",
"find . -type f -name 'diagnostics.html' -delete",
"find . -type f -name '*.lst' -delete",
"find . -type f -name '*.xml' -delete",
"find . -type f -name 'GAIA.cat' -delete",
"find . -type f -name 'best_astrometry.dat' -delete"]
for cmd in cmds:
os.system(cmd)
if __name__ == "__main__":
main()
|
mit
|
Python
|
d7cfdbd2bde0cc876db8c1bce020d8a1cf0ea77b
|
Add search filtering for name and booleans in resource API.
|
uw-it-aca/mdot-rest,uw-it-aca/mdot-rest
|
mdot_rest/views.py
|
mdot_rest/views.py
|
from django.shortcuts import render
from .models import Resource
from .serializers import ResourceSerializer
from rest_framework import generics, permissions
import django_filters
class ResourceFilter(django_filters.FilterSet):
class Meta:
model = Resource
fields = ('name', 'featured', 'accessible', 'responsive_web',)
class ResourceList(generics.ListCreateAPIView):
queryset = Resource.objects.all()
serializer_class = ResourceSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
filter_class = ResourceFilter
class ResourceDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Resource.objects.all()
serializer_class = ResourceSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
|
from django.shortcuts import render
from .models import Resource
from .serializers import ResourceSerializer
from rest_framework import generics, permissions
class ResourceList(generics.ListCreateAPIView):
queryset = Resource.objects.all()
serializer_class = ResourceSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
class ResourceDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Resource.objects.all()
serializer_class = ResourceSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
|
apache-2.0
|
Python
|
08d2ade71e6fb69512cb6d39cb7ef8712a44172a
|
update mediumRegex
|
sanxofon/basicnlp,sanxofon/basicnlp
|
mediumRegexUTF8.py
|
mediumRegexUTF8.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# FIX PARA WINDOWS CONSOLE ----------------------
# Usar: chcp 1252
import codecs,locale,sys
sys.stdout = codecs.getwriter(locale.getpreferredencoding())(sys.stdout)
# -----------------------------------------------
import re
cadena = u"""—¡Joven «emponzoñado» con el whisky, qué fin… te aguarda exhibir!
El pingüino Wenceslao hizo kilómetros bajo exhaustiva
lluvia y frío, añoraba a su querido cachorro."""
patrones = [
(ur"(\w)", u"Busca todos los caracteres de palabra"),
(ur"(\W)", u"Busca todos los caracteres que no son de palabra"),
(ur"(\s)", u"Busca todos los caracteres de espaciado"),
(ur"(\S)", u"Busca todos los caracteres que no son de espaciado"),
(ur"(\w+)", u"Busca todas las palabras"),
(ur"(\w+)\s+(\w+)", u"Busca pares de palabras separadas por un espacio"),
(ur"([^\s]+)\s+([^\s]+)", u"Busca dos grupos de caracteres que no sean espacios seguidos, separados por un espacio"),
(ur"(\w+)[^\w\s]?\s+[^\w\s]?(\w+)", u"Busca dos palabras separadas por un espacio que pueden o no tener un caractes no de palabra a los lados"),
(ur"(\w+)\s+(?=(\w+))","Busca todos los pares de palabras (separadas por espacio) con lookahead"),
(ur"(\w+)(?=(?:\s+(\W*)(\w+))|([^\w\r\n]+))","Busca pares de palabra/palabra o palabra/otro, puede incluir caracteres que anteceden la segunda palabra"),
]
print u"\nCadena:",cadena
for i,patron in enumerate(patrones):
if (len(sys.argv)>1 and sys.argv[1]!=str(i)):
continue
paco = re.compile(patron[0], re.UNICODE)
match = paco.findall(cadena)
print "\n",patron[1]
print "\t",patron[0],"\n"
if len(match)>0:
for ii,m in enumerate(match):
if isinstance(match[0], tuple):
m = filter(None, m) # Elimina los valores vacíos
print ii,"\t", "\t".join(m)
else:
print ii,"\t", m
else:
print "\tNo hubo coincidencias"
print
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# FIX PARA WINDOWS CONSOLE ----------------------
import codecs,sys
sys.stdout = codecs.getwriter("utf8")(sys.stdout)
# -----------------------------------------------
import re
cadena = u"""—¡Joven «emponzoñado» con el whisky, qué fin… te aguarda exhibir!
El pingüino Wenceslao hizo kilómetros bajo exhaustiva
lluvia y frío, añoraba a su querido cachorro."""
patrones = [
(ur"(\w)", u"Busca todos los caracteres de palabra"),
(ur"(\W)", u"Busca todos los caracteres que no son de palabra"),
(ur"(\s)", u"Busca todos los caracteres de espaciado"),
(ur"(\S)", u"Busca todos los caracteres que no son de espaciado"),
(ur"(\w+)", u"Busca todas las palabras"),
(ur"(\w+)\s+(\w+)", u"Busca pares de palabras separadas por un espacio"),
(ur"([^\s]+)\s+([^\s]+)", u"Busca dos grupos de caracteres que no sean espacios seguidos, separados por un espacio"),
(ur"(\w+)[^\w\s]?\s+[^\w\s]?(\w+)", u"Busca dos palabras separadas por un espacio que pueden o no tener un caractes no de palabra a los lados"),
(ur"(\w+)\s+(?=(\w+))","Busca todos los pares de palabras (separadas por espacio) con lookahead"),
(ur"(\w+)(?=(?:\s+(\W*)(\w+))|([^\w\r\n]+))","Busca pares de palabra/palabra o palabra/otro, puede incluir caracteres que anteceden la segunda palabra"),
]
print u"\nCadena:",cadena
for i,patron in enumerate(patrones):
if (len(sys.argv)>1 and sys.argv[1]!=str(i)):
continue
paco = re.compile(patron[0], re.UNICODE)
match = paco.findall(cadena)
print "\n",patron[1]
print "\t",patron[0],"\n"
if len(match)>0:
for ii,m in enumerate(match):
if isinstance(match[0], tuple):
m = filter(None, m) # Elimina los valores vacíos
print ii,"\t", "\t".join(m)
else:
print ii,"\t", m
else:
print "\tNo hubo coincidencias"
print
|
mit
|
Python
|
76dcc6cd050172af50c0721b312ea499f0bb7b71
|
modify build option
|
cubicdaiya/neoagent,cubicdaiya/neoagent
|
build/config.py
|
build/config.py
|
# -*- coding: utf-8 -*-
cflags = [
'-std=c99',
'-Wall',
'-g0',
'-O3',
# '-fno-strict-aliasing',
'-D_GNU_SOURCE',
]
libs = [
'pthread',
'ev',
'json',
]
includes = [
'ext',
]
headers = [
'stdint.h',
'stdbool.h',
'unistd.h',
'sys/stat.h',
'sys/types.h',
'sys/socket.h',
'sys/un.h',
'sys/ioctl.h',
'arpa/inet.h',
'netinet/in.h',
'netdb.h',
'signal.h',
'errno.h',
'pthread.h',
'ev.h',
]
funcs = [
'sigaction',
'sigignore',
]
|
# -*- coding: utf-8 -*-
cflags = [
'-std=c99',
'-Wall',
'-g',
'-O2',
# '-fno-strict-aliasing',
'-D_GNU_SOURCE',
]
libs = [
'pthread',
'ev',
'json',
]
includes = [
'ext',
]
headers = [
'stdint.h',
'stdbool.h',
'unistd.h',
'sys/stat.h',
'sys/types.h',
'sys/socket.h',
'sys/un.h',
'sys/ioctl.h',
'arpa/inet.h',
'netinet/in.h',
'netdb.h',
'signal.h',
'errno.h',
'pthread.h',
'ev.h',
]
funcs = [
'sigaction',
'sigignore',
]
|
bsd-3-clause
|
Python
|
f9d911091f01d91485f21c01850798892ed28dd0
|
add right arrow
|
thomasballinger/scottwasright,thomasballinger/scottwasright
|
scottsright/manual_readline.py
|
scottsright/manual_readline.py
|
char_sequences = {}
def on(seq):
def add_to_char_sequences(func):
char_sequences[seq] = func
return func
return add_to_char_sequences
@on('[D')
@on('')
def left_arrow(cursor_offset, line):
return max(0, cursor_offset - 1), line
@on('[C')
@on('')
def right_arrow(cursor_offset, line):
return min(len(line), cursor_offset + 1), line
if __name__ == '__main__':
print repr(char_sequences)
|
char_sequences = {}
def on(seq):
def add_to_char_sequences(func):
char_sequences[seq] = func
return func
return add_to_char_sequences
@on('[D')
@on('')
@on('\x02')
def left_arrow(cursor_offset, line):
return max(0, cursor_offset - 1), line
if __name__ == '__main__':
print repr(char_sequences)
|
mit
|
Python
|
3555b002aae386220bc02d662a9b188426afc08f
|
Create a specific group for the Facebook plugins - makes it a bit neater in the list of plugins.
|
chrisglass/cmsplugin_facebook
|
cmsplugin_facebook/cms_plugins.py
|
cmsplugin_facebook/cms_plugins.py
|
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from cmsplugin_facebook import models
class BasePlugin(CMSPluginBase):
name = None
def render(self, context, instance, placeholder):
context.update({'instance': instance,
'name': self.name,
'url': instance.pageurl or \
context['request'].build_absolute_uri()})
return context
class FacebookLikeBoxPlugin(BasePlugin):
model = models.FacebookLikeBox
name = 'Facebook Like Box'
module = 'Facebook'
render_template = 'cmsplugin_facebook/likebox.html'
change_form_template = 'cmsplugin_facebook/likebox_change_form.html'
class FacebookLikeButtonPlugin(BasePlugin):
model = models.FacebookLikeButton
name = 'Facebook Like Button'
module = 'Facebook'
render_template = 'cmsplugin_facebook/likebutton.html'
change_form_template = 'cmsplugin_facebook/likebutton_change_form.html'
plugin_pool.register_plugin(FacebookLikeBoxPlugin)
plugin_pool.register_plugin(FacebookLikeButtonPlugin)
|
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from cmsplugin_facebook import models
class BasePlugin(CMSPluginBase):
name = None
def render(self, context, instance, placeholder):
context.update({'instance': instance,
'name': self.name,
'url': instance.pageurl or \
context['request'].build_absolute_uri()})
return context
class FacebookLikeBoxPlugin(BasePlugin):
model = models.FacebookLikeBox
name = 'Facebook Like Box'
render_template = 'cmsplugin_facebook/likebox.html'
change_form_template = 'cmsplugin_facebook/likebox_change_form.html'
class FacebookLikeButtonPlugin(BasePlugin):
model = models.FacebookLikeButton
name = 'Facebook Like Button'
render_template = 'cmsplugin_facebook/likebutton.html'
change_form_template = 'cmsplugin_facebook/likebutton_change_form.html'
plugin_pool.register_plugin(FacebookLikeBoxPlugin)
plugin_pool.register_plugin(FacebookLikeButtonPlugin)
|
bsd-3-clause
|
Python
|
7f2b3d91550fd6af46ee10e6c68c8633408b12ed
|
Revert revert to properly fix #125 without cruft. Sigh.
|
stencila/hub,stencila/hub,stencila/hub,stencila/hub,stencila/hub
|
director/scripts/create_dev_projects.py
|
director/scripts/create_dev_projects.py
|
"""
Create projects for the development database
"""
from django.conf import settings
from accounts.models import Account, AccountUserRole
from projects.models import Project
def random_account_member(account):
return AccountUserRole.objects.filter(account=account).order_by('?').first().user
def run(*args):
# Ensure that this is only used in development
assert settings.DEBUG
# Assumes that there are at least 3 accounts
accounts = Account.objects.all()
Project.objects.create(
account=accounts[0],
creator=random_account_member(accounts[0]),
public=True,
name='The project name',
description='''
The project description. Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure
dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non
roident, sunt in culpa qui officia deserunt mollit anim id est laborum.
'''.strip()
)
Project.objects.create(
account=accounts[1],
creator=random_account_member(accounts[1]),
public=True
)
Project.objects.create(
account=accounts[2],
creator=random_account_member(accounts[2]),
public=False
)
|
"""
Create projects for the development database
"""
from django.conf import settings
from accounts.models import Account
from projects.models import Project
def run(*args):
# Ensure that this is only used in development
assert settings.DEBUG
# Assumes that there are at least 3 accounts
accounts = Account.objects.all()
Project.objects.create(
account=accounts[0],
public=True,
name='The project name',
description='''
The project description. Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure
dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non
roident, sunt in culpa qui officia deserunt mollit anim id est laborum.
'''.strip()
)
Project.objects.create(
account=accounts[1],
public=True
)
Project.objects.create(
account=accounts[2],
public=False
)
|
apache-2.0
|
Python
|
0282c7eaecb32b736592c84cda1f7520c130c676
|
Update basic tests
|
iconpin/anser
|
test/basic.py
|
test/basic.py
|
import unittest
from anser import Anser, Client
class BasicAnserTest(unittest.TestCase):
def test_creation(self):
server = Anser(__file__)
self.assertEquals(server.name, __file__)
def test_creation_explicit_no_debug(self):
server = Anser(__file__, debug=False)
self.assertFalse(server.debug)
def test_creation_implicit_no_debug(self):
server = Anser(__file__)
self.assertFalse(server.debug)
def test_creation_explicit_debug(self):
server = Anser(__file__, debug=True)
self.assertTrue(server.debug)
def test_add_action(self):
server = Anser(__file__)
@server.action('default')
def dummy_action(message, address):
pass
self.assertTrue(dummy_action in server.actions)
class BasicClientTest(unittest.TestCase):
def test_creation(self):
client = Client('10.0.0.1', 4000)
self.assertEquals(client.address, '10.0.0.1')
self.assertEquals(client.port, 4000)
def test_creation_implicit_no_debug(self):
client = Client('10.0.0.1', 4000)
self.assertFalse(client.debug)
def test_creation_explicit_debug(self):
client = Client('10.0.0.1', 4000, debug=True)
self.assertTrue(client.debug)
if __name__ == '__main__':
unittest.main()
|
import unittest
from anser import Anser
class BasicTest(unittest.TestCase):
def setUp(self):
pass
def test_creation(self):
server = Anser(__file__)
self.assertEquals(server.name, __file__)
def test_creation_explicit_no_debug(self):
server = Anser(__file__, debug=False)
self.assertFalse(server.debug)
def test_creation_implicit_no_debug(self):
server = Anser(__file__)
self.assertFalse(server.debug)
def test_creation_explicit_debug(self):
server = Anser(__file__, debug=True)
self.assertTrue(server.debug)
def test_add_action(self):
server = Anser(__file__)
@server.action('default')
def dummy_action(message, address):
pass
self.assertTrue(dummy_action in server.actions)
def test_receive(self):
pass
def test_send(self):
pass
if __name__ == '__main__':
unittest.main()
|
mit
|
Python
|
db4d5263c38e95ad8c2e253512c563ea97b8772f
|
Fix adduser script first line
|
goneall/PiLightsWebServer,goneall/PiLightsWebServer
|
src/adduser.py
|
src/adduser.py
|
#!/usr/bin/env python
# Licensed under the Apache 2.0 License
'''
Add a user to the database
Usage: adduser username password
The environment variable LIGHTS_WEB_DATABASE must be set to the path of the database
Created on Nov 13, 2014
@author: Gary O'Neall
'''
import sys
import sqlite3
from hashlib import sha256
from os import path
DB_VAR = '$LIGHTS_WEB_DATABASE'
DB_PATH = path.expandvars(DB_VAR)
def usage():
''' Pprints the usage to the console
'''
print "Usage:"
print "adduser username password"
if __name__ == '__main__':
if len(sys.argv) != 3:
usage()
sys.exit(1)
username = sys.argv[1].strip()
password = sys.argv[2].strip()
password_hash = sha256(password)
password_dig = password_hash.hexdigest()
if not path.isfile(DB_PATH):
print "Database is not initialized"
sys.exit(1)
con = sqlite3.connect(DB_PATH)
try:
cursor = con.execute('select id from users where username=?', [username])
row = cursor.fetchone()
if row:
print "User already exists"
sys.exit(1)
con.execute('insert into users (username, password) values (?, ?)', [username, password_dig])
print 'user added'
except Exception as ex:
print "Error updating database: "+str(ex)
finally:
con.commit()
con.close()
|
#!/usr/bin/env python
# Licensed under the Apache 2.0 License
'''
Add a user to the database
Usage: adduser username password
The environment variable LIGHTS_WEB_DATABASE must be set to the path of the database
Created on Nov 13, 2014
@author: Gary O'Neall
'''
import sys
import sqlite3
from hashlib import sha256
from os import path
DB_VAR = '$LIGHTS_WEB_DATABASE'
print DB_VAR
DB_PATH = path.expandvars(DB_VAR)
print DB_VAR + ':' + DB_PATH
def usage():
''' Pprints the usage to the console
'''
print "Usage:"
print "adduser username password"
if __name__ == '__main__':
print 'hello'
if len(sys.argv) != 3:
usage()
sys.exit(1)
username = sys.argv[1].strip()
password = sys.argv[2].strip()
password_hash = sha256(password)
password_dig = password_hash.hexdigest()
if not path.isfile(DB_PATH):
print "Database is not initialized"
sys.exit(1)
con = sqlite3.connect(DB_PATH)
try:
cursor = con.execute('select id from users where username=?', [username])
row = cursor.fetchone()
if row:
print "User already exists"
sys.exit(1)
con.execute('insert into users (username, password) values (?, ?)', [username, password_dig])
print 'user added'
except Exception as ex:
print "Error updating database: "+str(ex)
finally:
con.commit()
con.close()
|
apache-2.0
|
Python
|
a04d97bd9bb62d15201d8cadd1fd3b24980d3507
|
Fix installation path generation for configuration file
|
ivannieto/archer-t2u-ubuntu-installer,ivannieto/archer-t2u-ubuntu-installer,ivannieto/archer-t2u-ubuntu-installer,ivannieto/archer-t2u-ubuntu-installer
|
t2u-driver-installer.py
|
t2u-driver-installer.py
|
import os
PATH = os.getcwd()
HOME = os.getenv('HOME')
INSTALL_FILES = PATH+'/driver-files'
DEV_DIR = HOME+'/test-install'
PROD_DIR = '/etc'
BIN_DIR = '/usr/bin/'
print(('*'*25)+'\n')
print()
def take_input():
i = input("Please, disconnect all devices you're trying to install and press [I]: ")
return i
while(take_input()!="i"):
take_input()
else:
# Install files
os.system("cd {} && make clean && make && sudo make install".format(INSTALL_FILES))
# Check for existing installation dirs
print(os.path)
if os.path.isdir(PROD_DIR + '/Wireless'):
pass
else:
os.mkdir(PROD_DIR + '/Wireless/RT2870STA/')
os.mkdir(PROD_DIR + '/Wireless/RT2870STA/')
# Copy configuration file
os.system("sudo cp {}/RT2870STA.dat {}/Wireless/RT2870STA/RT2870STA.dat".format(INSTALL_FILES, PROD_DIR))
# Install driver
os.system("cd {}/os/linux/ && sudo insmod mt7650u_sta.ko".format(INSTALL_FILES))
# Script generator for running t2u-driver as a system program
os.system("sudo cp {0}/t2u-driver {1}/t2u-driver && sudo chmod +x {1}/t2u-driver".format(PATH, BIN_DIR))
# Restart warning
print("*"*25+"\n\nYour computer should be restarted now.\nPlease close all running programs and restart manually when you're done.\nAn executable will be installed to enable or disable the adapter.\nJust run in your terminal '$ sudo t2u-driver'.")
|
import os
PATH = os.getcwd()
HOME = os.getenv('HOME')
INSTALL_FILES = PATH+'/driver-files'
DEV_DIR = HOME+'/test-install'
PROD_DIR = '/etc'
BIN_DIR = '/usr/bin/'
print(('*'*25)+'\n')
print()
def take_input():
i = input("Please, disconnect all devices you're trying to install and press [I]: ")
return i
while(take_input()!="i"):
take_input()
else:
# Install files
os.system("cd {} && make clean && make && sudo make install".format(INSTALL_FILES))
# Comprobar si existe el directorio de destino de la configuracion y si no existe crearlo
print(os.path)
if os.path.isdir(DEV_DIR + '/Wireless/RT2870STA/'):
pass
else:
os.mkdir(DEV_DIR + '/Wireless/RT2870STA/')
# Copiar el archivo de configuracion
os.system("sudo cp {}/RT2870STA.dat {}/Wireless/RT2870STA/RT2870STA.dat".format(INSTALL_FILES, DEV_DIR))
# Instalar el modulo del driver
os.system("cd {}/os/linux/ && sudo insmod mt7650u_sta.ko".format(INSTALL_FILES))
# Generar el script que debería correrse al inicio del sistema para levantar la conexion
os.system("sudo cp {0}/t2u-driver {1}/t2u-driver && sudo chmod +x {1}/t2u-driver".format(PATH, BIN_DIR))
# Avisar de las opciones para deshabilitar o levantar el adaptador desde el programa LOCAL_BIN
print("*"*25+"\n\nYour computer should be restarted now.\nPlease close all running programs and restart manually when you're done.\nAn executable will be installed to enable or disable the adapter.\nJust run in your terminal '$ sudo t2u-driver'.")
|
mit
|
Python
|
90b1aebe4b67ff9f221aee3b0c668f658d915537
|
Update bottlespin.py
|
kallerdaller/Cogs-Yorkfield
|
bottlespin/bottlespin.py
|
bottlespin/bottlespin.py
|
import discord
from discord.ext import commands
from random import choice
class Bottlespin:
"""Spins a bottle and lands on a random user."""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True, no_pm=True, alias=["bottlespin"])
async def spin(self, ctx, role):
"""Spin the bottle"""
roles = ctx.message.server.roles
role = discord.utils.get(ctx.message.server.roles, name = role
await self.bot.say(str(role))
if role in roles:
await self.bot.say(str(role))
await self.bot.say(str(roles))
author = ctx.message.author
server = ctx.message.server
if len(server.members) < 2:
await self.bot.say("`Not enough people are around to spin the bottle`")
return
if role in roles:
roleexist = True
else:
await self.bot.say("`{} is not a exising role`".format(role))
return
if roleexist:
target = [m for m in server.members if m != author and role in [
s.name for s in m.roles] and str(m.status) == "online" or str(m.status) == "idle"]
else:
target = [m for m in server.members if m != author and str(
m.status) == "online" or str(m.status) == "idle"]
if not target:
if role:
await self.bot.say("`Sorry I couldnt find anyone to point the bottle at with the role {}`".format(role))
else:
await self.bot.say("`Sorry I couldnt find anyone to point the bottle at`")
return
else:
target = choice(list(target))
await self.bot.say("`{0.display_name}#{0.discriminator} spinned the bottle and it landed on {1.display_name}#{1.discriminator}`".format(author, target))
def setup(bot):
n = Bottlespin(bot)
bot.add_cog(n)
|
import discord
from discord.ext import commands
from random import choice
class Bottlespin:
"""Spins a bottle and lands on a random user."""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True, no_pm=True, alias=["bottlespin"])
async def spin(self, ctx, role):
"""Spin the bottle"""
await self.bot.say(str(role))
roles = ctx.message.server.roles
if role in roles:
await self.bot.say(str(role))
await self.bot.say(str(roles))
author = ctx.message.author
server = ctx.message.server
if len(server.members) < 2:
await self.bot.say("`Not enough people are around to spin the bottle`")
return
if role in roles:
roleexist = True
else:
await self.bot.say("`{} is not a exising role`".format(role))
return
if roleexist:
target = [m for m in server.members if m != author and role in [
s.name for s in m.roles] and str(m.status) == "online" or str(m.status) == "idle"]
else:
target = [m for m in server.members if m != author and str(
m.status) == "online" or str(m.status) == "idle"]
if not target:
if role:
await self.bot.say("`Sorry I couldnt find anyone to point the bottle at with the role {}`".format(role))
else:
await self.bot.say("`Sorry I couldnt find anyone to point the bottle at`")
return
else:
target = choice(list(target))
await self.bot.say("`{0.display_name}#{0.discriminator} spinned the bottle and it landed on {1.display_name}#{1.discriminator}`".format(author, target))
def setup(bot):
n = Bottlespin(bot)
bot.add_cog(n)
|
mit
|
Python
|
fb6aa002e13a1d1205da28b20d419419067117f6
|
Implement basic genome crossover (#44, #36)
|
a5kin/hecate,a5kin/hecate
|
xentica/tools/genetics.py
|
xentica/tools/genetics.py
|
"""A collection of functions allowing genetics manipulations."""
from xentica import core
from xentica.tools import xmath
def genome_crossover(state, num_genes, *genomes, rng_name="rng"):
"""
Crossover given genomes in stochastic way.
:param state:
A container holding model's properties.
:param num_genes:
Genome length, assuming all genomes has same number of genes.
:param genomes:
A list of genomes (integers) to crossover
:param rng_name:
Name of ``RandomProperty``.
:returns: Single integer, a resulting genome.
"""
gene_choose = core.IntegerVariable()
new_genome = core.IntegerVariable()
for gene in range(num_genes):
gene_choose *= 0
for i, genome in enumerate(genomes):
gene_choose += ((genome >> gene) & 1) << i
rand_val = getattr(state, rng_name).uniform
winner_gene = xmath.int(rand_val * (len(genomes) + 1))
new_genome += ((gene_choose >> winner_gene) & 1) << gene
return new_genome
|
"""A collection of functions allowing genetics manipulations."""
def genome_crossover(*genomes):
"""
Crossover given genomes in stochastic way.
:param genomes: A list of genomes (integers) to crossover
:returns: Single integer, a resulting genome.
"""
raise NotImplementedError
|
mit
|
Python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.