commit
stringlengths
40
40
subject
stringlengths
4
1.73k
repos
stringlengths
5
127k
old_file
stringlengths
2
751
new_file
stringlengths
2
751
new_contents
stringlengths
1
8.98k
old_contents
stringlengths
0
6.59k
license
stringclasses
13 values
lang
stringclasses
23 values
c0d79ba0420f6e0176e98266a02c60b1f53f4a93
apply same simplification to setfield
crccheck/dj-obj-update
obj_update.py
obj_update.py
from __future__ import unicode_literals import logging import sys # for python 2/3 compatibility text_type = unicode if sys.version_info[0] < 3 else str logger = logging.getLogger('obj_update') def setfield(obj, fieldname, value): """Fancy setattr with debugging.""" old = getattr(obj, fieldname) old_repr = old if old is None else text_type(old) new_repr = value if value is None else text_type(value) if old_repr != new_repr: setattr(obj, fieldname, value) if not hasattr(obj, '_is_dirty'): obj._is_dirty = [] obj._dirty_fields = [] obj._is_dirty.append(u'[%s %s->%s]' % (fieldname, old_repr, new_repr)) obj._dirty_fields.append(fieldname) def set_foreign_field(obj, fieldname, value): """Fancy setattr with debugging for foreign fields.""" old = getattr(obj, fieldname) old_repr = old if old is None else old.pk new_repr = value if value is None else value.pk if old_repr != new_repr: setattr(obj, fieldname, value) if not hasattr(obj, '_is_dirty'): obj._is_dirty = [] obj._dirty_fields = [] obj._is_dirty.append('[%s %s->%s]' % (fieldname, old_repr, new_repr)) obj._dirty_fields.append(fieldname) def update(obj, data): """ Fancy way to update `obj` with `data` dict. Returns True if data changed and was saved. """ for field_name, value in data.items(): # is_relation is Django 1.8 only if obj._meta.get_field(field_name).is_relation: set_foreign_field(obj, field_name, value) else: setfield(obj, field_name, value) if getattr(obj, '_is_dirty', None): logger.debug(u''.join(obj._is_dirty)) obj.save(update_fields=obj._dirty_fields) del obj._is_dirty del obj._dirty_fields return True
from __future__ import unicode_literals import logging import sys # for python 2/3 compatibility text_type = unicode if sys.version_info[0] < 3 else str logger = logging.getLogger('obj_update') def setfield(obj, fieldname, value): """Fancy setattr with debugging.""" old = getattr(obj, fieldname) if old is None and value is None: changed = False elif old is None and value is not None: changed = True else: changed = text_type(old) != text_type(value) if changed: setattr(obj, fieldname, value) if not hasattr(obj, '_is_dirty'): obj._is_dirty = [] obj._dirty_fields = [] # obj._is_dirty.append(u'[%s %s->%s]' % (fieldname, old, value)) obj._is_dirty.append(fieldname) obj._dirty_fields.append(fieldname) def set_foreign_field(obj, fieldname, value): """Fancy setattr with debugging for foreign fields.""" old = getattr(obj, fieldname) old_repr = old if old is None else old.pk new_repr = value if value is None else value.pk if old_repr != new_repr: setattr(obj, fieldname, value) if not hasattr(obj, '_is_dirty'): obj._is_dirty = [] obj._dirty_fields = [] obj._is_dirty.append('[%s %s->%s]' % (fieldname, old_repr, new_repr)) obj._dirty_fields.append(fieldname) def update(obj, data): """ Fancy way to update `obj` with `data` dict. Returns True if data changed and was saved. """ for field_name, value in data.items(): # is_relation is Django 1.8 only if obj._meta.get_field(field_name).is_relation: set_foreign_field(obj, field_name, value) else: setfield(obj, field_name, value) if getattr(obj, '_is_dirty', None): logger.debug(u''.join(obj._is_dirty)) obj.save(update_fields=obj._dirty_fields) del obj._is_dirty del obj._dirty_fields return True
apache-2.0
Python
c3587c23f6a5f34cf7bdc0a88b4057381f7752ac
add cascade delete table
maigfrga/flaskutils,maigfrga/flaskutils,Riffstation/flaskutils,maigfrga/flaskutils,Riffstation/flaskutils,Riffstation/flaskutils,maigfrga/flaskutils,Riffstation/flaskutils
flaskutils/test.py
flaskutils/test.py
from flaskutils import app from .models import FlaskModel from pgsqlutils.base import syncdb, Session class ModelTestCase(object): def setup(self): """ Use this test case when no interaction in a view is required """ syncdb() def teardown(self): Session.rollback() Session.close() class TransactionalTestCase(object): """ This tests should be used when testing views """ def setup(self): self.client = app.test_client() self.json_request_headers = { 'Accept': 'application/json', 'Content-Type': 'application/json' } syncdb() def teardown(self): for t in FlaskModel.metadata.sorted_tables: sql = 'delete from {} cascade;'.format(t.name) Session.execute(sql) Session.commit() Session.close() class ApiTestCase(object): """ Instanciates an http client ready to make json requests and get json responses, it doesn't instanciate a database connection """ def setup(self): self.client = app.test_client() self.json_request_headers = { 'Accept': 'application/json', 'Content-Type': 'application/json' }
from flaskutils import app from .models import FlaskModel from pgsqlutils.base import syncdb, Session class ModelTestCase(object): def setup(self): """ Use this test case when no interaction in a view is required """ syncdb() def teardown(self): Session.rollback() Session.close() class TransactionalTestCase(object): """ This tests should be used when testing views """ def setup(self): self.client = app.test_client() self.json_request_headers = { 'Accept': 'application/json', 'Content-Type': 'application/json' } syncdb() def teardown(self): for t in FlaskModel.metadata.sorted_tables: sql = 'delete from {};'.format(t.name) Session.execute(sql) Session.commit() Session.close() class ApiTestCase(object): """ Instanciates an http client ready to make json requests and get json responses, it doesn't instanciate a database connection """ def setup(self): self.client = app.test_client() self.json_request_headers = { 'Accept': 'application/json', 'Content-Type': 'application/json' }
apache-2.0
Python
e8f941aca9a111eb81c41e0be3a0c6591386083c
change way to get if celery should be used
nelsonmonteiro/django-flowjs
flowjs/settings.py
flowjs/settings.py
from django.conf import settings # Media path where the files are saved FLOWJS_PATH = getattr(settings, "FLOWJS_PATH", 'flowjs/') # Remove the upload files when the model is deleted FLOWJS_REMOVE_FILES_ON_DELETE = getattr(settings, "FLOWJS_REMOVE_FILES_ON_DELETE", True) # Remove temporary chunks after file have been upload and created FLOWJS_AUTO_DELETE_CHUNKS = getattr(settings, "FLOWJS_AUTO_DELETE_CHUNKS", True) # Time in days to remove non completed uploads FLOWJS_EXPIRATION_DAYS = getattr(settings, "FLOWJS_EXPIRATION_DAYS", 1) # When flowjs should join files in background. Options: 'none', 'media' (audio and video), 'all' (all files). FLOWJS_JOIN_CHUNKS_IN_BACKGROUND = getattr(settings, "FLOWJS_JOIN_CHUNKS_IN_BACKGROUND", 'none') # Check if FLOWJS should use Celery FLOWJS_WITH_CELERY = getattr(settings, "FLOWJS_USE_CELERY", False)
from django.conf import settings # Media path where the files are saved FLOWJS_PATH = getattr(settings, "FLOWJS_PATH", 'flowjs/') # Remove the upload files when the model is deleted FLOWJS_REMOVE_FILES_ON_DELETE = getattr(settings, "FLOWJS_REMOVE_FILES_ON_DELETE", True) # Remove temporary chunks after file have been upload and created FLOWJS_AUTO_DELETE_CHUNKS = getattr(settings, "FLOWJS_AUTO_DELETE_CHUNKS", True) # Time in days to remove non completed uploads FLOWJS_EXPIRATION_DAYS = getattr(settings, "FLOWJS_EXPIRATION_DAYS", 1) # When flowjs should join files in background. Options: 'none', 'media' (audio and video), 'all' (all files). FLOWJS_JOIN_CHUNKS_IN_BACKGROUND = getattr(settings, "FLOWJS_JOIN_CHUNKS_IN_BACKGROUND", 'none') # Check if FLOWJS should use Celery FLOWJS_WITH_CELERY = 'celery' in settings.INSTALLED_APPS
mit
Python
c2798702a1f2b1dc40c10b481b9989f9a86c71b2
Fix indentation error in some helpers
AliOsm/arabic-text-diacritization
helpers/fix_fathatan.py
helpers/fix_fathatan.py
# -*- coding: utf-8 -*- import os import re import argparse def fix_fathatan(file_path): with open(file_path, 'r') as file: lines = file.readlines() new_lines = [] for line in lines: new_lines.append(re.sub(r'اً', 'ًا', line)) file_path = file_path.split(os.sep) file_path[-1] = 'fixed_' + file_path[-1] file_path = os.sep.join(file_path) with open(file_path, 'w') as file: file.write(''.join(new_lines)) print(file_path) if __name__ == '__main__': parser = argparse.ArgumentParser(description='Changes after-Alif fathatan to before-Alit fathatan') parser.add_argument('-in', '--file-path', help='File path to fix it', required=True) args = parser.parse_args() fix_fathatan(args.file_path)
# -*- coding: utf-8 -*- import os import re import argparse def fix_fathatan(file_path): with open(file_path, 'r') as file: lines = file.readlines() new_lines = [] for line in lines: new_lines.append(re.sub(r'اً', 'ًا', line)) file_path = file_path.split(os.sep) file_path[-1] = 'fixed_' + file_path[-1] file_path = os.sep.join(file_path) with open(file_path, 'w') as file: file.write(''.join(new_lines)) print(file_path) if __name__ == '__main__': parser = argparse.ArgumentParser(description='Changes after-Alif fathatan to before-Alit fathatan') parser.add_argument('-in', '--file-path', help='File path to fix it', required=True) args = parser.parse_args() fix_fathatan(args.file_path)
mit
Python
5398f356cab1e98673c253849a1de2bb76fc537a
move lapse archival to staging
akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem
scripts/util/autolapses2box.py
scripts/util/autolapses2box.py
"""Send autolapse tar files to staging for archival. Run from RUN_MIDNIGHT.sh for the previous date""" import datetime import subprocess import os import stat import glob from pyiem.util import logger LOG = logger() def main(): """Run for the previous date, please""" valid = datetime.date.today() - datetime.timedelta(days=1) now = datetime.datetime.now() os.chdir("/mesonet/share/lapses/auto") localfns = [] for tarfilename in glob.glob("*frames.tar"): # Make sure this file was generated yesterday and not old. mtime = os.stat(tarfilename)[stat.ST_MTIME] age = float(now.strftime("%s")) - mtime if age > 86400.0: continue localfns.append(tarfilename) if not localfns: LOG.info("Found no files within the past day?") return remotepath = valid.strftime("/stage/iemwebcams/auto/%Y/%m/%d") cmd = ( 'rsync -a --rsync-path "mkdir -p %s && rsync" %s ' "mesonet@metl60.agron.iastate.edu:%s" ) % (remotepath, " ".join(localfns), remotepath) LOG.debug(cmd) subprocess.call(cmd, shell=True) if __name__ == "__main__": main()
"""Send autolapse tar files to box for archival. Run from RUN_MIDNIGHT.sh for the previous date""" import datetime import os import stat import glob from pyiem.box_utils import sendfiles2box def main(): """Run for the previous date, please""" valid = datetime.date.today() - datetime.timedelta(days=1) now = datetime.datetime.now() os.chdir("/mesonet/share/lapses/auto") localfns = [] for tarfilename in glob.glob("*frames.tar"): # Make sure this file was generated yesterday and not old. mtime = os.stat(tarfilename)[stat.ST_MTIME] age = float(now.strftime("%s")) - mtime if age > 86400.0: continue localfns.append(tarfilename) if not localfns: print("autolapses2box found no files within the past day?") return remotepath = valid.strftime("/iemwebcams/auto/%Y/%m/%d") res = sendfiles2box(remotepath, localfns) for sid, fn in zip(res, localfns): if sid is None: print("failed to upload %s" % (fn,)) if __name__ == "__main__": main()
mit
Python
97a490db75f0a4976199365c3f654ba8cdb9a781
Test zip, and print format
zzz0072/Python_Exercises,zzz0072/Python_Exercises
01_Built-in_Types/tuple.py
01_Built-in_Types/tuple.py
#!/usr/bin/env python import sys import pickle # Test zip, and format in print names = ["xxx", "yyy", "zzz"] ages = [18, 19, 20] persons = zip(names, ages) for name, age in persons: print "{0}'s age is {1}".format(name, age) # Check argument if len(sys.argv) != 2: print("%s filename" % sys.argv[0]) raise SystemExit(1) # Write tuples file = open(sys.argv[1], "wb"); line = [] while True: print("Enter name, age, score (ex: zzz, 16, 90) or quit"); line = sys.stdin.readline() if line == "quit\n": break raws = line.split(",") name = raws[0] age = int(raws[1]) score = int(raws[2]) record = (name, age, score) pickle.dump(record, file) file.close() # Read back file = open(sys.argv[1], "rb"); while True: try: record = pickle.load(file) print record name, age, score= record print("name = %s" % name) print("name = %d" % age) print("name = %d" % score) except (EOFError): break file.close()
#!/usr/bin/env python import sys import pickle # Check argument if len(sys.argv) != 2: print("%s filename" % sys.argv[0]) raise SystemExit(1) # Write tuples file = open(sys.argv[1], "wb"); line = [] while True: print("Enter name, age, score (ex: zzz, 16, 90) or quit"); line = sys.stdin.readline() if line == "quit\n": break raws = line.split(",") name = raws[0] age = int(raws[1]) score = int(raws[2]) record = (name, age, score) pickle.dump(record, file) file.close() # Read back file = open(sys.argv[1], "rb"); while True: try: record = pickle.load(file) print record name, age, score= record print("name = %s" % name) print("name = %d" % age) print("name = %d" % score) except (EOFError): break file.close()
bsd-2-clause
Python
ba6ef8c9f0881e7236063d5372f64656df1b4bf0
rename package from 'motion_control' to 'kinesis'
MSLNZ/msl-equipment
msl/equipment/resources/thorlabs/__init__.py
msl/equipment/resources/thorlabs/__init__.py
""" Wrappers around APIs from Thorlabs. """ from .kinesis.motion_control import MotionControl from .kinesis.callbacks import MotionControlCallback
""" Wrappers around APIs from Thorlabs. """ from .motion_control.motion_control import MotionControl from .motion_control.callbacks import MotionControlCallback
mit
Python
a68f9e0e7f9d99e0052c6c01395dbb131c052797
remove k4
childe/esproxy,childe/esproxy
esproxy/views.py
esproxy/views.py
import os from django.http import HttpResponse, HttpResponseRedirect from django.views.decorators.csrf import csrf_exempt from django.contrib.auth.decorators import login_required from django.template import RequestContext from django.shortcuts import render_to_response from settings import ELASTICSEARCH_PROXY, ELASTICSEARCH_REAL,KIBANA_DIR def login_or_404(func): def inner(*args, **karags): request = args[0] if request.user.is_authenticated(): return func(*args, **karags) else: return HttpResponseRedirect("/es/") return inner #@login_or_404 @csrf_exempt def elasticsearch(request): fullpath = request.get_full_path() fullpath = fullpath[len(ELASTICSEARCH_PROXY):] response = HttpResponse() response['X-Accel-Redirect'] = ELASTICSEARCH_REAL + '/' + fullpath return response @login_required def home(request): html = open(os.path.join(KIBANA_DIR,"index.html")).read() return HttpResponse(html)
from django.http import HttpResponse, HttpResponseRedirect from django.views.decorators.csrf import csrf_exempt from django.contrib.auth.decorators import login_required from django.template import RequestContext from django.shortcuts import render_to_response from settings import ELASTICSEARCH_PROXY, ELASTICSEARCH_REAL def login_or_404(func): def inner(*args, **karags): request = args[0] if request.user.is_authenticated(): return func(*args, **karags) else: return HttpResponseRedirect("/es/") return inner #@login_or_404 @csrf_exempt def elasticsearch(request): fullpath = request.get_full_path() fullpath = fullpath[len(ELASTICSEARCH_PROXY):] response = HttpResponse() response['X-Accel-Redirect'] = ELASTICSEARCH_REAL + '/' + fullpath return response @login_required def home(request): html = open('templates/index.html').read() return HttpResponse(html)
mit
Python
8cd2332871bd246352f23f286ae459c2cf399a35
allow classifier parameter to be configurable
GoogleCloudPlatform/cloudml-samples,GoogleCloudPlatform/cloudml-samples
sklearn/sklearn-template/template/trainer/model.py
sklearn/sklearn-template/template/trainer/model.py
# Copyright 2019 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== from sklearn import compose from sklearn import ensemble from sklearn import impute from sklearn import pipeline from sklearn import preprocessing import numpy as np from trainer import metadata def get_estimator(flags): # TODO: Allow pre-processing to be configurable through flags classifier = ensemble.RandomForestClassifier(**flags) numeric_transformer = pipeline.Pipeline([ ('imputer', impute.SimpleImputer(strategy='median')), ('scaler', preprocessing.StandardScaler()), ]) numeric_log_transformer = pipeline.Pipeline([ ('imputer', impute.SimpleImputer(strategy='median')), ('log', preprocessing.FunctionTransformer( func=np.log1p, inverse_func=np.expm1, validate=True)), ('scaler', preprocessing.StandardScaler()), ]) numeric_bin_transformer = pipeline.Pipeline([ ('imputer', impute.SimpleImputer(strategy='median')), ('bin', preprocessing.KBinsDiscretizer(n_bins=5, encode='onehot-dense')), ]) categorical_transformer = pipeline.Pipeline([ ('imputer', impute.SimpleImputer( strategy='constant', fill_value='missing')), ('onehot', preprocessing.OneHotEncoder(handle_unknown='ignore', sparse=False)), ]) preprocessor = compose.ColumnTransformer([ ('numeric', numeric_transformer, metadata.NUMERIC_FEATURES), ('numeric', numeric_log_transformer, metadata.NUMERIC_FEATURES), ('numeric', numeric_bin_transformer, metadata.NUMERIC_FEATURES), ('categorical', categorical_transformer, metadata.CATEGORICAL_FEATURES), ]) estimator = pipeline.Pipeline([ ('preprocessor', preprocessor), ('classifier', classifier), ]) return estimator
# Copyright 2019 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== from sklearn import compose from sklearn import ensemble from sklearn import impute from sklearn import pipeline from sklearn import preprocessing import numpy as np from trainer import metadata def get_estimator(flags): classifier = ensemble.RandomForestClassifier() # TODO(cezequiel): Make use of flags for hparams _ = flags numeric_transformer = pipeline.Pipeline([ ('imputer', impute.SimpleImputer(strategy='median')), ('scaler', preprocessing.StandardScaler()), ]) numeric_log_transformer = pipeline.Pipeline([ ('imputer', impute.SimpleImputer(strategy='median')), ('log', preprocessing.FunctionTransformer( func=np.log1p, inverse_func=np.expm1, validate=True)), ('scaler', preprocessing.StandardScaler()), ]) numeric_bin_transformer = pipeline.Pipeline([ ('imputer', impute.SimpleImputer(strategy='median')), ('bin', preprocessing.KBinsDiscretizer(n_bins=5, encode='onehot-dense')), ]) categorical_transformer = pipeline.Pipeline([ ('imputer', impute.SimpleImputer( strategy='constant', fill_value='missing')), ('onehot', preprocessing.OneHotEncoder(handle_unknown='ignore')), ]) preprocessor = compose.ColumnTransformer([ ('numeric', numeric_transformer, metadata.NUMERIC_FEATURES), ('numeric', numeric_log_transformer, metadata.NUMERIC_FEATURES), ('numeric', numeric_bin_transformer, metadata.NUMERIC_FEATURES), ('categorical', categorical_transformer, metadata.CATEGORICAL_FEATURES), ]) estimator = pipeline.Pipeline([ ('preprocessor', preprocessor), ('classifier', classifier), ]) return estimator
apache-2.0
Python
ad8ff0e8d280a8a0b3876382b63a1be4ad0784e5
increment version
shacknetisp/fourthevaz,shacknetisp/fourthevaz,shacknetisp/fourthevaz
version.py
version.py
# -*- coding: utf-8 -*- import platform name = "Fourth Evaz" version = (0, 1, 10) source = "https://github.com/shacknetisp/fourthevaz" def gitstr(): try: return "%s" % (open('.git/refs/heads/master').read().strip()[0:10]) except FileNotFoundError: return "" except IndexError: return "" def versionstr(): return "%d.%d.%d%s" % (version[0], version[1], version[2], '-' + gitstr() if gitstr() else '') def pythonversionstr(): return '{t[0]}.{t[1]}.{t[2]}'.format(t=platform.python_version_tuple()) def systemversionstr(): return platform.platform()
# -*- coding: utf-8 -*- import platform name = "Fourth Evaz" version = (0, 1, 9) source = "https://github.com/shacknetisp/fourthevaz" def gitstr(): try: return "%s" % (open('.git/refs/heads/master').read().strip()[0:10]) except FileNotFoundError: return "" except IndexError: return "" def versionstr(): return "%d.%d.%d%s" % (version[0], version[1], version[2], '-' + gitstr() if gitstr() else '') def pythonversionstr(): return '{t[0]}.{t[1]}.{t[2]}'.format(t=platform.python_version_tuple()) def systemversionstr(): return platform.platform()
mit
Python
185f174b6c1d50ad51987765f42e078a6081e5d3
Remove semi-colon
kranthikumar/exercises-in-programming-style,crista/exercises-in-programming-style,crista/exercises-in-programming-style,kranthikumar/exercises-in-programming-style,crista/exercises-in-programming-style,crista/exercises-in-programming-style,kranthikumar/exercises-in-programming-style,kranthikumar/exercises-in-programming-style,crista/exercises-in-programming-style,kranthikumar/exercises-in-programming-style
06-pipeline/tf-06.py
06-pipeline/tf-06.py
#!/usr/bin/env python import sys, re, operator, string # # The functions # def read_file(path_to_file): """ Takes a path to a file and returns the entire contents of the file as a string """ with open(path_to_file) as f: data = f.read() return data def filter_chars_and_normalize(str_data): """ Takes a string and returns a copy with all nonalphanumeric chars replaced by white space """ pattern = re.compile('[\W_]+') return pattern.sub(' ', str_data).lower() def scan(str_data): """ Takes a string and scans for words, returning a list of words. """ return str_data.split() def remove_stop_words(word_list): """ Takes a list of words and returns a copy with all stop words removed """ with open('../stop_words.txt') as f: stop_words = f.read().split(',') # add single-letter words stop_words.extend(list(string.ascii_lowercase)) return [w for w in word_list if not w in stop_words] def frequencies(word_list): """ Takes a list of words and returns a dictionary associating words with frequencies of occurrence """ word_freqs = {} for w in word_list: if w in word_freqs: word_freqs[w] += 1 else: word_freqs[w] = 1 return word_freqs def sort(word_freq): """ Takes a dictionary of words and their frequencies and returns a list of pairs where the entries are sorted by frequency """ return sorted(word_freq.items(), key=operator.itemgetter(1), reverse=True) def print_all(word_freqs): """ Takes a list of pairs where the entries are sorted by frequency and print them recursively. """ if(len(word_freqs) > 0): print(word_freqs[0][0], '-', word_freqs[0][1]) print_all(word_freqs[1:]) # # The main function # print_all(sort(frequencies(remove_stop_words(scan(filter_chars_and_normalize(read_file(sys.argv[1]))))))[0:25])
#!/usr/bin/env python import sys, re, operator, string # # The functions # def read_file(path_to_file): """ Takes a path to a file and returns the entire contents of the file as a string """ with open(path_to_file) as f: data = f.read() return data def filter_chars_and_normalize(str_data): """ Takes a string and returns a copy with all nonalphanumeric chars replaced by white space """ pattern = re.compile('[\W_]+') return pattern.sub(' ', str_data).lower() def scan(str_data): """ Takes a string and scans for words, returning a list of words. """ return str_data.split() def remove_stop_words(word_list): """ Takes a list of words and returns a copy with all stop words removed """ with open('../stop_words.txt') as f: stop_words = f.read().split(',') # add single-letter words stop_words.extend(list(string.ascii_lowercase)) return [w for w in word_list if not w in stop_words] def frequencies(word_list): """ Takes a list of words and returns a dictionary associating words with frequencies of occurrence """ word_freqs = {} for w in word_list: if w in word_freqs: word_freqs[w] += 1 else: word_freqs[w] = 1 return word_freqs def sort(word_freq): """ Takes a dictionary of words and their frequencies and returns a list of pairs where the entries are sorted by frequency """ return sorted(word_freq.items(), key=operator.itemgetter(1), reverse=True) def print_all(word_freqs): """ Takes a list of pairs where the entries are sorted by frequency and print them recursively. """ if(len(word_freqs) > 0): print(word_freqs[0][0], '-', word_freqs[0][1]) print_all(word_freqs[1:]); # # The main function # print_all(sort(frequencies(remove_stop_words(scan(filter_chars_and_normalize(read_file(sys.argv[1]))))))[0:25])
mit
Python
e5963987e678926ad8cdde93e2551d0516a7686b
Increase timeout for bench_pictures on Android
Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot
slave/skia_slave_scripts/android_bench_pictures.py
slave/skia_slave_scripts/android_bench_pictures.py
#!/usr/bin/env python # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Run the Skia bench_pictures executable. """ from android_render_pictures import AndroidRenderPictures from android_run_bench import DoBench from bench_pictures import BenchPictures from build_step import BuildStep import sys class AndroidBenchPictures(BenchPictures, AndroidRenderPictures): def __init__(self, args, attempts=1, timeout=4800): super(AndroidBenchPictures, self).__init__(args, attempts=attempts, timeout=timeout) def _DoBenchPictures(self, config, threads): data_file = self._BuildDataFile(self._device_dirs.SKPPerfDir(), config, threads) args = self._PictureArgs(self._device_dirs.SKPDir(), config, threads) DoBench(serial=self._serial, executable='bench_pictures', perf_data_dir=self._perf_data_dir, device_perf_dir=self._device_dirs.SKPPerfDir(), data_file=data_file, extra_args=args) def _Run(self): self._PushSKPSources(self._serial) super(AndroidBenchPictures, self)._Run() if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(AndroidBenchPictures))
#!/usr/bin/env python # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Run the Skia bench_pictures executable. """ from android_render_pictures import AndroidRenderPictures from android_run_bench import DoBench from bench_pictures import BenchPictures from build_step import BuildStep import sys class AndroidBenchPictures(BenchPictures, AndroidRenderPictures): def _DoBenchPictures(self, config, threads): data_file = self._BuildDataFile(self._device_dirs.SKPPerfDir(), config, threads) args = self._PictureArgs(self._device_dirs.SKPDir(), config, threads) DoBench(serial=self._serial, executable='bench_pictures', perf_data_dir=self._perf_data_dir, device_perf_dir=self._device_dirs.SKPPerfDir(), data_file=data_file, extra_args=args) def _Run(self): self._PushSKPSources(self._serial) super(AndroidBenchPictures, self)._Run() if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(AndroidBenchPictures))
bsd-3-clause
Python
d4e890a16fcb155c6df78d378b3ba9429590c74b
fix test
marco-hoyer/cfn-sphere,cfn-sphere/cfn-sphere,ImmobilienScout24/cfn-sphere,cfn-sphere/cfn-sphere,cfn-sphere/cfn-sphere
src/unittest/python/aws/kms_tests.py
src/unittest/python/aws/kms_tests.py
import base64 import unittest2 from boto.kms.exceptions import InvalidCiphertextException from cfn_sphere.aws.kms import KMS from mock import patch from cfn_sphere.exceptions import InvalidEncryptedValueException class KMSTests(unittest2.TestCase): @patch('cfn_sphere.aws.kms.kms.connect_to_region') def test_decrypt_value(self, kms_mock): kms_mock.return_value.decrypt.return_value = {'Plaintext': 'decryptedValue'} self.assertEqual('decryptedValue', KMS().decrypt("ZW5jcnlwdGVkVmFsdWU=")) kms_mock.return_value.decrypt.assert_called_once_with(base64.b64decode("ZW5jcnlwdGVkVmFsdWU=")) @patch('cfn_sphere.aws.kms.kms.connect_to_region') def test_invalid_base64(self, kms_mock): with self.assertRaises(InvalidEncryptedValueException): KMS().decrypt("asdqwda") @patch('cfn_sphere.aws.kms.kms.connect_to_region') def test_invalid_kms_key(self, kms_mock): kms_mock.return_value.decrypt.side_effect = InvalidCiphertextException("400", "Bad Request") with self.assertRaises(InvalidEncryptedValueException): KMS().decrypt("ZW5jcnlwdGVkVmFsdWU=")
import unittest2 from boto.kms.exceptions import InvalidCiphertextException from cfn_sphere.aws.kms import KMS from mock import patch from cfn_sphere.exceptions import InvalidEncryptedValueException class KMSTests(unittest2.TestCase): @patch('cfn_sphere.aws.kms.kms.connect_to_region') def test_decrypt_value(self, kms_mock): kms_mock.return_value.decrypt.return_value = {'Plaintext': 'decryptedValue'} self.assertEqual('decryptedValue', KMS().decrypt("ZW5jcnlwdGVkVmFsdWU=")) kms_mock.return_value.decrypt.assert_called_once_with("encryptedValue") @patch('cfn_sphere.aws.kms.kms.connect_to_region') def test_invalid_base64(self, kms_mock): with self.assertRaises(InvalidEncryptedValueException): KMS().decrypt("asdqwda") @patch('cfn_sphere.aws.kms.kms.connect_to_region') def test_invalid_kms_key(self, kms_mock): kms_mock.return_value.decrypt.side_effect = InvalidCiphertextException("400", "Bad Request") with self.assertRaises(InvalidEncryptedValueException): KMS().decrypt("ZW5jcnlwdGVkVmFsdWU=")
apache-2.0
Python
8425a06fb270e18b7aa7b137cb99b43ce39a4b53
Fix bitrotted function call
henn/hil,meng-sun/hil,henn/hil,henn/haas,SahilTikale/haas,kylehogan/haas,CCI-MOC/haas,kylehogan/hil,meng-sun/hil,henn/hil_sahil,kylehogan/hil,henn/hil_sahil
haas.wsgi
haas.wsgi
#!/usr/bin/env python import haas.api from haas import config, model, server config.load('/etc/haas.cfg') config.configure_logging() config.load_extensions() server.init() from haas.rest import wsgi_handler as application
#!/usr/bin/env python import haas.api from haas import config, model, server config.load('/etc/haas.cfg') config.configure_logging() config.load_extensions() server.api_server_init() from haas.rest import wsgi_handler as application
apache-2.0
Python
b219823af7188f968d7c52c5273148c510bd7454
Simplify the ckernel pass a bit more
ChinaQuants/blaze,jdmcbr/blaze,xlhtc007/blaze,aterrel/blaze,FrancescAlted/blaze,mrocklin/blaze,maxalbert/blaze,alexmojaki/blaze,dwillmer/blaze,cowlicks/blaze,LiaoPan/blaze,jcrist/blaze,maxalbert/blaze,mrocklin/blaze,cowlicks/blaze,ChinaQuants/blaze,aterrel/blaze,scls19fr/blaze,mwiebe/blaze,FrancescAlted/blaze,nkhuyu/blaze,scls19fr/blaze,ContinuumIO/blaze,xlhtc007/blaze,alexmojaki/blaze,nkhuyu/blaze,caseyclements/blaze,LiaoPan/blaze,FrancescAlted/blaze,FrancescAlted/blaze,jcrist/blaze,jdmcbr/blaze,dwillmer/blaze,mwiebe/blaze,mwiebe/blaze,mwiebe/blaze,ContinuumIO/blaze,cpcloud/blaze,aterrel/blaze,caseyclements/blaze,cpcloud/blaze
blaze/compute/air/frontend/ckernel_impls.py
blaze/compute/air/frontend/ckernel_impls.py
""" Convert 'kernel' Op to 'ckernel'. """ from __future__ import absolute_import, division, print_function from pykit.ir import transform, Op def run(func, env): strategies = env['strategies'] transform(CKernelImplementations(strategies), func) class CKernelImplementations(object): """ For kernels that are implemented via ckernels, this grabs the ckernel_deferred and turns it into a ckernel op. """ def __init__(self, strategies): self.strategies = strategies def op_kernel(self, op): if self.strategies[op] != 'ckernel': return # Default overload is CKERNEL, so no need to look it up again overload = op.metadata['overload'] impl = overload.func new_op = Op('ckernel', op.type, [impl, op.args[1:]], op.result) new_op.add_metadata({'rank': 0, 'parallel': True}) return new_op
""" Lift ckernels to their appropriate rank so they always consume the full array arguments. """ from __future__ import absolute_import, division, print_function import datashape from pykit.ir import transform, Op #------------------------------------------------------------------------ # Run #------------------------------------------------------------------------ def run(func, env): strategies = env['strategies'] transform(CKernelImplementations(strategies), func) #------------------------------------------------------------------------ # Extract CKernel Implementations #------------------------------------------------------------------------ class CKernelImplementations(object): """ For kernels that are implemented via ckernels, this grabs the ckernel_deferred and turns it into a ckernel op. """ def __init__(self, strategies): self.strategies = strategies def op_kernel(self, op): if self.strategies[op] != 'ckernel': return function = op.metadata['kernel'] overload = op.metadata['overload'] # Default overload is CKERNEL, so no need to look it up again func = overload.func polysig = overload.sig monosig = overload.resolved_sig argtypes = datashape.coretypes.Tuple(monosig.argtypes) impl = overload.func assert monosig == overload.resolved_sig, (monosig, overload.resolved_sig) new_op = Op('ckernel', op.type, [impl, op.args[1:]], op.result) new_op.add_metadata({'rank': 0, 'parallel': True}) return new_op
bsd-3-clause
Python
60dd476337ead3262daaa17ee4a973937cac380d
Add help for sites argument to manage.py scan
DNSUsher/securethenews,freedomofpress/securethenews,freedomofpress/securethenews,DNSUsher/securethenews,freedomofpress/securethenews,freedomofpress/securethenews,DNSUsher/securethenews
securethenews/sites/management/commands/scan.py
securethenews/sites/management/commands/scan.py
import json import subprocess from django.core.management.base import BaseCommand, CommandError from django.db import transaction from sites.models import Site, Scan def pshtt(domain): pshtt_cmd = ['pshtt', '--json', domain] p = subprocess.Popen( pshtt_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) stdout, stderr = p.communicate() # pshtt returns a list with a single item, which is a dictionary of # the scan results. pshtt_results = json.loads(stdout)[0] return pshtt_results, stdout, stderr def scan(site): # Scan the domain with pshtt results, stdout, stderr = pshtt(site.domain) scan = Scan( site=site, live=results['Live'], valid_https=results['Valid HTTPS'], downgrades_https=results['Downgrades HTTPS'], defaults_to_https=results['Defaults to HTTPS'], hsts=results['HSTS'], hsts_max_age=results['HSTS Max Age'], hsts_entire_domain=results['HSTS Entire Domain'], hsts_preload_ready=results['HSTS Preload Ready'], hsts_preloaded=results['HSTS Preloaded'], pshtt_stdout=stdout, pshtt_stderr=stderr, ).save() class Command(BaseCommand): help = 'Rescan all sites and store the results in the database' def add_arguments(self, parser): parser.add_argument('sites', nargs='*', type=str, default='', help=("Specify one or more domain names of sites to scan. " "If unspecified, scan all sites.")) def handle(self, *args, **options): # Support targeting a specific site to scan. if options['sites']: sites = [] for domain_name in options['sites']: try: site = Site.objects.get(domain=domain_name) sites.append(site) except Site.DoesNotExist: msg = "Site with domain '{}' does not exist".format(domain_name) raise CommandError(msg) else: sites = Site.objects.all() with transaction.atomic(): for site in sites: self.stdout.write('Scanning: {}'.format(site.domain)) scan(site)
import json import subprocess from django.core.management.base import BaseCommand, CommandError from django.db import transaction from sites.models import Site, Scan def pshtt(domain): pshtt_cmd = ['pshtt', '--json', domain] p = subprocess.Popen( pshtt_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) stdout, stderr = p.communicate() # pshtt returns a list with a single item, which is a dictionary of # the scan results. pshtt_results = json.loads(stdout)[0] return pshtt_results, stdout, stderr def scan(site): # Scan the domain with pshtt results, stdout, stderr = pshtt(site.domain) scan = Scan( site=site, live=results['Live'], valid_https=results['Valid HTTPS'], downgrades_https=results['Downgrades HTTPS'], defaults_to_https=results['Defaults to HTTPS'], hsts=results['HSTS'], hsts_max_age=results['HSTS Max Age'], hsts_entire_domain=results['HSTS Entire Domain'], hsts_preload_ready=results['HSTS Preload Ready'], hsts_preloaded=results['HSTS Preloaded'], pshtt_stdout=stdout, pshtt_stderr=stderr, ).save() class Command(BaseCommand): help = 'Rescan all sites and store the results in the database' def add_arguments(self, parser): parser.add_argument('sites', nargs='*', type=str, default='') def handle(self, *args, **options): # Support targeting a specific site to scan. if options['sites']: sites = [] for domain_name in options['sites']: try: site = Site.objects.get(domain=domain_name) sites.append(site) except Site.DoesNotExist: msg = "Site with domain '{}' does not exist".format(domain_name) raise CommandError(msg) else: sites = Site.objects.all() with transaction.atomic(): for site in sites: self.stdout.write('Scanning: {}'.format(site.domain)) scan(site)
agpl-3.0
Python
53c6e1f1d0939b4c585301427920e0cf1dd9e341
Remove tabs.
ibus/ibus,ueno/ibus,luoxsbupt/ibus,fujiwarat/ibus,luoxsbupt/ibus,phuang/ibus,fujiwarat/ibus,ueno/ibus,phuang/ibus,ibus/ibus,luoxsbupt/ibus,ibus/ibus-cros,Keruspe/ibus,Keruspe/ibus,j717273419/ibus,fujiwarat/ibus,ibus/ibus-cros,ibus/ibus-cros,phuang/ibus,fujiwarat/ibus,luoxsbupt/ibus,Keruspe/ibus,luoxsbupt/ibus,j717273419/ibus,ueno/ibus,ibus/ibus,ueno/ibus,Keruspe/ibus,ueno/ibus,j717273419/ibus,ibus/ibus,j717273419/ibus,ibus/ibus-cros,phuang/ibus
panel/main.py
panel/main.py
import ibus import gtk import dbus import dbus.mainloop.glib import panel class PanelApplication: def __init__ (self): self._dbusconn = dbus.connection.Connection (ibus.IBUS_ADDR) self._dbusconn.add_signal_receiver (self._disconnected_cb, "Disconnected", dbus_interface = dbus.LOCAL_IFACE) self._panel = panel.PanelProxy (self._dbusconn, "/org/freedesktop/IBus/Panel") self._ibus = self._dbusconn.get_object (ibus.IBUS_NAME, ibus.IBUS_PATH) self._ibus.RegisterPanel (self._panel, True) def run (self): gtk.main () def _disconnected_cb (self): print "disconnected" gtk.main_quit () def main (): # gtk.settings_get_default ().props.gtk_theme_name = "/home/phuang/.themes/aud-Default/gtk-2.0/gtkrc" gtk.rc_parse ("./themes/default/gtkrc") PanelApplication ().run () if __name__ == "__main__": dbus.mainloop.glib.DBusGMainLoop (set_as_default=True) main ()
import ibus import gtk import dbus import dbus.mainloop.glib import panel class PanelApplication: def __init__ (self): self._dbusconn = dbus.connection.Connection (ibus.IBUS_ADDR) self._dbusconn.add_signal_receiver (self._disconnected_cb, "Disconnected", dbus_interface = dbus.LOCAL_IFACE) self._panel = panel.PanelProxy (self._dbusconn, "/org/freedesktop/IBus/Panel") self._ibus = self._dbusconn.get_object (ibus.IBUS_NAME, ibus.IBUS_PATH) self._ibus.RegisterPanel (self._panel, True) def run (self): gtk.main () def _disconnected_cb (self): print "disconnected" gtk.main_quit () def main (): # gtk.settings_get_default ().props.gtk_theme_name = "/home/phuang/.themes/aud-Default/gtk-2.0/gtkrc" gtk.rc_parse ("./themes/default/gtkrc") PanelApplication ().run () if __name__ == "__main__": dbus.mainloop.glib.DBusGMainLoop (set_as_default=True) main ()
lgpl-2.1
Python
6bdc16e24e51d16b0fa214d30394317079bc90a9
Throw more user-friendly execption inside get_backend_instance method.
nzlosh/st2,peak6/st2,Plexxi/st2,peak6/st2,Plexxi/st2,peak6/st2,Plexxi/st2,tonybaloney/st2,nzlosh/st2,Plexxi/st2,StackStorm/st2,nzlosh/st2,StackStorm/st2,StackStorm/st2,nzlosh/st2,tonybaloney/st2,tonybaloney/st2,StackStorm/st2
st2auth/st2auth/backends/__init__.py
st2auth/st2auth/backends/__init__.py
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import traceback import json from oslo_config import cfg from stevedore.driver import DriverManager from stevedore.extension import ExtensionManager from st2common import log as logging __all__ = [ 'get_available_backends', 'get_backend_instance' ] LOG = logging.getLogger(__name__) BACKENDS_NAMESPACE = 'st2auth.backends.backend' def get_available_backends(): """ Return names of the available / installed authentication backends. :rtype: ``list`` of ``str`` """ manager = ExtensionManager(namespace=BACKENDS_NAMESPACE, invoke_on_load=False) return manager.names() def get_backend_instance(name): """ Retrieve a class instance for the provided auth backend. :param name: Backend name. :type name: ``str`` """ LOG.debug('Retrieving backend instance for backend "%s"' % (name)) try: manager = DriverManager(namespace=BACKENDS_NAMESPACE, name=name, invoke_on_load=False) except RuntimeError: message = 'Invalid authentication backend specified: %s' % (name) LOG.exception(message) raise ValueError(message) backend_kwargs = cfg.CONF.auth.backend_kwargs if backend_kwargs: try: kwargs = json.loads(backend_kwargs) except ValueError as e: raise ValueError('Failed to JSON parse backend settings for backend "%s": %s' % (name, str(e))) else: kwargs = {} cls = manager.driver try: cls_instance = cls(**kwargs) except Exception as e: tb_msg = traceback.format_exc() msg = ('Failed to instantiate auth backend "%s" with backend settings "%s": %s' % (name, str(kwargs), str(e))) msg += '\n\n' + tb_msg exc_cls = type(e) raise exc_cls(msg) return cls_instance
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json from oslo_config import cfg from stevedore.driver import DriverManager from stevedore.extension import ExtensionManager from st2common import log as logging __all__ = [ 'get_available_backends', 'get_backend_instance' ] LOG = logging.getLogger(__name__) BACKENDS_NAMESPACE = 'st2auth.backends.backend' def get_available_backends(): """ Return names of the available / installed authentication backends. :rtype: ``list`` of ``str`` """ manager = ExtensionManager(namespace=BACKENDS_NAMESPACE, invoke_on_load=False) return manager.names() def get_backend_instance(name): """ :param name: Backend name. :type name: ``str`` """ try: manager = DriverManager(namespace=BACKENDS_NAMESPACE, name=name, invoke_on_load=False) except RuntimeError: message = 'Invalid authentication backend specified: %s' % (name) LOG.exception(message) raise ValueError(message) backend_kwargs = cfg.CONF.auth.backend_kwargs if backend_kwargs: try: kwargs = json.loads(backend_kwargs) except ValueError as e: raise ValueError('Failed to JSON parse backend settings: %s' % (str(e))) else: kwargs = {} cls = manager.driver cls_instance = cls(**kwargs) return cls_instance
apache-2.0
Python
212a9a901625605000210ec8c436c8f6e9be7c39
correct the log filename and handler
xgfone/pycom,xgfone/xutils
xutils/log.py
xutils/log.py
# -*- coding: utf-8 -*- import os import os.path import logging from logging.handlers import RotatingFileHandler def init(logger=None, level="INFO", file=None, handler_cls=None, process=False, max_count=30, propagate=True, file_config=None, dict_config=None): root = logging.getLogger() if not logger: logger = root # Initialize the argument logger with the arguments, level and log_file. if logger: fmt = ("%(asctime)s - %(process)d - %(pathname)s - %(funcName)s - " "%(lineno)d - %(levelname)s - %(message)s") datefmt = "%Y-%m-%d %H:%M:%S" formatter = logging.Formatter(fmt=fmt, datefmt=datefmt) level = getattr(logging, level.upper()) if file: if process: filename, ext = os.path.splitext(file) file = "{0}.{1}{2}".format(filename, os.getpid(), ext) if handler_cls: handler = handler_cls(file, max_count) else: handler = RotatingFileHandler(file, maxBytes=1024**3, backupCount=max_count) else: handler = logging.StreamHandler() handler.setLevel(level) handler.setFormatter(formatter) root.setLevel(level) root.addHandler(handler) loggers = logger if isinstance(logger, (list, tuple)) else [logger] for logger in loggers: if logger is root: continue logger.propagate = propagate logger.setLevel(level) logger.addHandler(handler) # Initialize logging by the configuration file, file_config. if file_config: logging.config.fileConfig(file_config, disable_existing_loggers=False) # Initialize logging by the dict configuration, dict_config. if dict_config and hasattr(logging.config, "dictConfig"): logging.config.dictConfig(dict_config)
# -*- coding: utf-8 -*- import os import os.path import logging from logging.handlers import RotatingFileHandler def init(logger=None, level="INFO", file=None, handler_cls=None, process=False, max_count=30, propagate=True, file_config=None, dict_config=None): root = logging.getLogger() if not logger: logger = root # Initialize the argument logger with the arguments, level and log_file. if logger: fmt = ("%(asctime)s - %(process)d - %(pathname)s - %(funcName)s - " "%(lineno)d - %(levelname)s - %(message)s") datefmt = "%Y-%m-%d %H:%M:%S" formatter = logging.Formatter(fmt=fmt, datefmt=datefmt) level = getattr(logging, level.upper()) if file: if process: filename, ext = os.path.splitext(file) if ext: file = "{0}.{1}{2}".format(filename, os.getpid(), ext) else: file = "{0}.{1}".format(filename, os.getpid()) if handler_cls: handler = handler_cls(file, max_count) else: handler = RotatingFileHandler(file, maxBytes=1024**3, backupCount=max_count) else: handler = logging.StreamHandler() handler.setLevel(level) handler.setFormatter(formatter) root.setLevel(level) root.addFilter(handler) loggers = logger if isinstance(logger, (list, tuple)) else [logger] for logger in loggers: if logger is root: continue logger.propagate = propagate logger.setLevel(level) logger.addHandler(handler) # Initialize logging by the configuration file, file_config. if file_config: logging.config.fileConfig(file_config, disable_existing_loggers=False) # Initialize logging by the dict configuration, dict_config. if dict_config and hasattr(logging.config, "dictConfig"): logging.config.dictConfig(dict_config)
mit
Python
0e60c23ce6e40304437218151e895dcaf856f832
Update project version
skioo/django-customer-billing,skioo/django-customer-billing
billing/__init__.py
billing/__init__.py
__version__ = '1.7' __copyright__ = 'Copyright (c) 2020, Skioo SA' __licence__ = 'MIT' __URL__ = 'https://github.com/skioo/django-customer-billing'
__version__ = '1.6' __copyright__ = 'Copyright (c) 2020, Skioo SA' __licence__ = 'MIT' __URL__ = 'https://github.com/skioo/django-customer-billing'
mit
Python
22fef4c07a28a96267e1d3f0390bc366790252a0
Use alias for nodejs_tool.
wt/bazel_rules_nodejs
nodejs/def.bzl
nodejs/def.bzl
_js_filetype = FileType([".js"]) SCRIPT_TEMPLATE = """\ #!/bin/bash "{node_bin}" "{script_path}" """ def nodejs_binary_impl(ctx): ctx.file_action( ctx.outputs.executable, SCRIPT_TEMPLATE.format(node_bin=ctx.file._nodejs_tool.short_path, script_path=ctx.file.main_script.short_path), executable=True) all_runfiles = [ctx.file._nodejs_tool] all_runfiles.append(ctx.file.main_script) return struct( runfiles=ctx.runfiles(files=all_runfiles), ) nodejs_binary = rule( nodejs_binary_impl, executable=True, attrs={ "main_script": attr.label( single_file=True, allow_files=_js_filetype, ), "_nodejs_tool": attr.label( default=Label("//nodejs/toolchain:nodejs_tool"), single_file=True, allow_files=True, executable=True, cfg=HOST_CFG, ) }, ) NODEJS_BUILD_FILE_CONTENTS = """\ package( default_visibility = ["//visibility:public"]) alias( name = "nodejs_tool", actual = "//:bin/node", ) """ def nodejs_repositories(): native.new_http_archive( name = 'nodejs_linux_amd64', url = 'https://nodejs.org/dist/v4.4.4/node-v4.4.4-linux-x64.tar.xz', build_file_content = NODEJS_BUILD_FILE_CONTENTS, sha256 = 'c8b4e3c6e07e51593dddbf1d2ec3cf0e' + 'c09d5c6b8c5258b37b3816cc6b7e9fe3', strip_prefix = "node-v4.4.4-linux-x64", )
_js_filetype = FileType([".js"]) SCRIPT_TEMPLATE = """\ #!/bin/bash "{node_bin}" "{script_path}" """ def nodejs_binary_impl(ctx): ctx.file_action( ctx.outputs.executable, SCRIPT_TEMPLATE.format(node_bin=ctx.file._nodejs_tool.short_path, script_path=ctx.file.main_script.short_path), executable=True) all_runfiles = [ctx.file._nodejs_tool] all_runfiles.append(ctx.file.main_script) return struct( runfiles=ctx.runfiles(files=all_runfiles), ) nodejs_binary = rule( nodejs_binary_impl, executable=True, attrs={ "main_script": attr.label( single_file=True, allow_files=_js_filetype, ), "_nodejs_tool": attr.label( default=Label("//nodejs/toolchain:nodejs_tool"), single_file=True, allow_files=True, executable=True, cfg=HOST_CFG, ) }, ) NODEJS_BUILD_FILE_CONTENTS = """\ package( default_visibility = ["//visibility:public"]) filegroup( name = "nodejs_tool", srcs = ["bin/node"], ) """ def nodejs_repositories(): native.new_http_archive( name = 'nodejs_linux_amd64', url = 'https://nodejs.org/dist/v4.4.4/node-v4.4.4-linux-x64.tar.xz', build_file_content = NODEJS_BUILD_FILE_CONTENTS, sha256 = 'c8b4e3c6e07e51593dddbf1d2ec3cf0e' + 'c09d5c6b8c5258b37b3816cc6b7e9fe3', strip_prefix = "node-v4.4.4-linux-x64", )
apache-2.0
Python
e8540104547878e9f8360ba07ca0cbf1ee63e6ca
update Nottingham import script
DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations
polling_stations/apps/data_collection/management/commands/import_nottingham.py
polling_stations/apps/data_collection/management/commands/import_nottingham.py
from data_collection.github_importer import BaseGitHubImporter class Command(BaseGitHubImporter): srid = 4326 districts_srid = 4326 council_id = "E06000018" elections = [] scraper_name = "wdiv-scrapers/DC-PollingStations-Nottingham" geom_type = "geojson" def district_record_to_dict(self, record): poly = self.extract_geometry(record, self.geom_type, self.get_srid("districts")) return { "internal_council_id": record["POLLINGDIS"], "name": record["POLLINGDIS"], "area": poly, "polling_station_id": record["POLLINGDIS"], } def station_record_to_dict(self, record): location = self.extract_geometry( record, self.geom_type, self.get_srid("stations") ) return { "internal_council_id": record["CONST"], "postcode": "", "address": record["NAME"] + "\n" + record["ADDRESS"], "location": location, }
from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter class Command(BaseXpressDemocracyClubCsvImporter): council_id = "E06000018" addresses_name = "parl.2017-06-08/Version 1/Democracy_Club__08June2017 8.tsv" stations_name = "parl.2017-06-08/Version 1/Democracy_Club__08June2017 8.tsv" elections = ["parl.2017-06-08"] csv_delimiter = "\t"
bsd-3-clause
Python
6647025f3cb44818d0ff403160df35aa827516c7
refactor env var load
zanaca/docker-dns,zanaca/docker-dns
src/config.py
src/config.py
import os import sys import socket import platform import util import json import re APP = os.path.basename(sys.argv[0]) USER = os.environ.get('SUDO_USER', 'USER') HOME = os.path.expanduser(f"~{USER}") HOME_ROOT = os.path.expanduser("~root") BASE_PATH = os.path.dirname(os.path.dirname(__file__)) HOSTNAME = socket.gethostname() HOSTUNAME = platform.uname().system if util.on_macos or util.on_windows: NAME = platform.uname()[0].lower() else: name_pattern = re.compile(r'-(\w*)') NAME = re.search(pattern=name_pattern, string=platform.uname().version).group(1).lower() if util.on_macos: # OS_VERSION example: '12.0.1' OS_VERSION = platform.mac_ver()[0] elif util.on_windows: # OS_VERSION example: '10.0.19042' OS_VERSION = platform.win32_ver()[1] elif util.on_wsl: # OS_VERSION example: '10.0.19044.0' powershell_path = '/mnt/c/Windows/System32/WindowsPowerShell/v1.0//powershell.exe' version_path = '[Environment]::OSVersion.VersionString' OS_VERSION = os.popen(f'{powershell_path} {version_path}').read().split(' ')[-1].replace('\n', '') else: # OS_VERSION example: '20.04.1' version_pattern = re.compile('~(.*)-') OS_VERSION = re.search(pattern=version_pattern, string=platform.uname().version).group(1) OS = f'{HOSTUNAME}_{NAME}' TOP_LEVEL_DOMAIN = (util.read_cache('tld') or 'docker').strip() DOCKER_CONTAINER_TAG = (util.read_cache('tag') or 'ns0').strip() DOCKER_CONTAINER_NAME = (util.read_cache('name') or DOCKER_CONTAINER_TAG).strip() SUPPORTED_OS_VERSIONS = json.load(open(f'{BASE_PATH}/supported_os.json', 'r'))
import os import sys import socket import platform import util import json import re APP = os.path.basename(sys.argv[0]) USER = os.environ.get('SUDO_USER') if not USER: USER = os.environ.get('USER') HOME = os.path.expanduser(f"~{USER}") HOME_ROOT = os.path.expanduser("~root") BASE_PATH = os.path.dirname(os.path.dirname(__file__)) HOSTNAME = socket.gethostname() HOSTUNAME = platform.uname().system if util.on_macos or util.on_windows: NAME = platform.uname()[0].lower() else: name_pattern = re.compile(r'-(\w*)') NAME = re.search(pattern=name_pattern, string=platform.uname().version).group(1).lower() if util.on_macos: # OS_VERSION example: '12.0.1' OS_VERSION = platform.mac_ver()[0] elif util.on_windows: # OS_VERSION example: '10.0.19042' OS_VERSION = platform.win32_ver()[1] elif util.on_wsl: # OS_VERSION example: '10.0.19044.0' powershell_path = '/mnt/c/Windows/System32/WindowsPowerShell/v1.0//powershell.exe' version_path = '[Environment]::OSVersion.VersionString' OS_VERSION = os.popen(f'{powershell_path} {version_path}').read().split(' ')[-1].replace('\n', '') else: # OS_VERSION example: '20.04.1' version_pattern = re.compile('~(.*)-') OS_VERSION = re.search(pattern=version_pattern, string=platform.uname().version).group(1) OS = f'{HOSTUNAME}_{NAME}' TOP_LEVEL_DOMAIN = (util.read_cache('tld') or 'docker').strip() DOCKER_CONTAINER_TAG = (util.read_cache('tag') or 'ns0').strip() DOCKER_CONTAINER_NAME = (util.read_cache('name') or DOCKER_CONTAINER_TAG).strip() SUPPORTED_OS_VERSIONS = json.load(open(f'{BASE_PATH}/supported_os.json', 'r'))
mit
Python
24110636fd6eaae8962478c9f3e56c9da469be81
bump version to 0.9.0
njsmith/zs,njsmith/zs
zs/version.py
zs/version.py
# This file is part of ZS # Copyright (C) 2013-2014 Nathaniel Smith <njs@pobox.com> # See file LICENSE.txt for license information. # This file must be kept very simple, because it is consumed from several # places -- it is imported by zs/__init__.py, execfile'd by setup.py, etc. # We use a simple scheme: # 1.0.0 -> 1.0.0-dev -> 1.1.0 -> 1.1.0-dev # where the -dev versions are never released into the wild, they're just what # we stick into the VCS in between releases. # # This is compatible with PEP 440: # http://legacy.python.org/dev/peps/pep-0440/ # in a slightly abusive way -- PEP 440 provides no guidance on what version # number to use for *unreleased* versions, so we use an "integrator suffix", # which is intended to be used for things like Debian's locally patched # version, and is not allowed on public index servers. Which sounds about # right, actually... Crucially, PEP 440 says that "foo-bar" sorts *after* # "foo", which is what we want for a dev version. (Compare to "foo.dev0", # which sorts *before* "foo".) __version__ = "0.9.0"
# This file is part of ZS # Copyright (C) 2013-2014 Nathaniel Smith <njs@pobox.com> # See file LICENSE.txt for license information. # This file must be kept very simple, because it is consumed from several # places -- it is imported by zs/__init__.py, execfile'd by setup.py, etc. # We use a simple scheme: # 1.0.0 -> 1.0.0-dev -> 1.1.0 -> 1.1.0-dev # where the -dev versions are never released into the wild, they're just what # we stick into the VCS in between releases. # # This is compatible with PEP 440: # http://legacy.python.org/dev/peps/pep-0440/ # in a slightly abusive way -- PEP 440 provides no guidance on what version # number to use for *unreleased* versions, so we use an "integrator suffix", # which is intended to be used for things like Debian's locally patched # version, and is not allowed on public index servers. Which sounds about # right, actually... Crucially, PEP 440 says that "foo-bar" sorts *after* # "foo", which is what we want for a dev version. (Compare to "foo.dev0", # which sorts *before* "foo".) __version__ = "0.0.0-dev"
bsd-2-clause
Python
970978b5355259fe943d5efed1b8b4ce945fdfa7
Debug control flow and exit on errors
robbystk/weather
weather.py
weather.py
#! /usr/bin/python2 from os.path import expanduser,isfile import sys from urllib import urlopen location_path="~/.location" def location_from_homedir(): if isfile(expanduser(location_path)): with open(expanduser(location_path)) as f: return "&".join(f.read().split("\n")) else: print("no location file at ", location_path) sys.exit(2) def location_from_file(location_file): try: f = open(expanduser(location_file),'r') except: print("file ", location_file, " not found\nLooking in home directory") return location_from_homedir() if len(sys.argv) == 1: # not given location file data = location_from_homedir() elif len(sys.argv) == 2: # given location file data = location_from_file(sys.argv[1]) else: # wrong number of arguments print("Usage: ", sys.argv[0], " [location file]") sys.exit(1) url="http://forecast.weather.gov/MapClick.php?"+data+"FcstType=digitalDWML" forecast = urlopen(url).read()
#! /usr/bin/python2 from os.path import expanduser,isfile from sys import argv from urllib import urlopen location_path="~/.location" def location_from_homedir(): if isfile(expanduser(location_path)): with open(expanduser(location_path)) as f: return "&".join(f.read().split("\n")) else: print("no location file at ", location_path) def location_from_file(file): try: f = open(expanduser(file),'r') except: print("file ", location_file, " not found") location_from_homedir if len(argv) == 1: # not given location file data = location_from_homedir() elif len(argv) == 2: # given location file data = location_from_file(argv[1]) else: # wrong number of arguments print("Usage: ", argv[0], " [location file]") url="http://forecast.weather.gov/MapClick.php?"+data+"FcstType=digitalDWML" forecast = urlopen(url).read()
mit
Python
6e024501b76beaccc2daa46f045f4e3444aa146b
update python client library example
alerta/python-alerta-client,alerta/python-alerta,alerta/python-alerta-client
examples/send.py
examples/send.py
#!/usr/bin/env python from alerta.api import ApiClient from alerta.alert import Alert api = ApiClient(endpoint='http://localhost:8080', key='tUA6oBX6E5hUUQZ+dyze6vZbOMmiZWA7ke88Nvio') alert = Alert( resource='web-server-01', event='HttpError', group='Web', environment='Production', service='theguardian.com', severity='major', value='Bad Gateway (502)', text='Web server error.', tags=['web', 'dc1', 'london'], attributes={'customer': 'The Guardian'} ) print alert try: print api.send(alert) except Exception as e: print e
#!/usr/bin/env python from alerta.client import Alert, ApiClient client = ApiClient() alert = Alert(resource='res1', event='event1') print alert print client.send(alert)
apache-2.0
Python
00d006d280960228f249480e7af990cf7df39b59
remove useless function
miLibris/flask-rest-jsonapi
jsonapi_utils/alchemy.py
jsonapi_utils/alchemy.py
# -*- coding: utf-8 -*- from sqlalchemy.sql.expression import desc, asc, text from jsonapi_utils.constants import DEFAULT_PAGE_SIZE def paginate_query(query, pagination_kwargs): """Paginate query result according to jsonapi rfc :param sqlalchemy.orm.query.Query query: sqlalchemy queryset :param dict pagination_kwargs: pagination informations """ page_size = int(pagination_kwargs.get('size', 0)) or DEFAULT_PAGE_SIZE query = query.limit(page_size) if pagination_kwargs.get('number'): query = query.offset((int(pagination_kwargs['number']) - 1) * page_size) return query def sort_query(query, querystring): """ :param query: sqlalchemy query to sort :param JSONAPIQueryString querystring: current querystring """ expressions = {'asc': asc, 'desc': desc} order_items = [] for sort_opt in querystring.sorting: field = text(sort_opt['field']) order = expressions.get(sort_opt['order']) order_items.append(order(field)) return query.order_by(*order_items)
# -*- coding: utf-8 -*- from sqlalchemy.sql.expression import desc, asc, text from jsonapi_utils.constants import DEFAULT_PAGE_SIZE def paginate_query(query, pagination_kwargs): """Paginate query result according to jsonapi rfc :param sqlalchemy.orm.query.Query query: sqlalchemy queryset :param dict pagination_kwargs: pagination informations """ page_size = int(pagination_kwargs.get('size', 0)) or DEFAULT_PAGE_SIZE query = query.limit(page_size) if pagination_kwargs.get('number'): query = query.offset((int(pagination_kwargs['number']) - 1) * page_size) return query def sort_query(query, querystring): """ :param query: sqlalchemy query to sort :param JSONAPIQueryString querystring: current querystring """ expressions = {'asc': asc, 'desc': desc} order_items = [] for sort_opt in querystring.sorting: field = text(sort_opt['field']) order = expressions.get(sort_opt['order']) order_items.append(order(field)) return query.order_by(*order_items) def include_query(query, include_kwargs): pass
mit
Python
1f31ed22627cb1cf5b4323a18435bd9fb7fc7462
add trailing slash
cwilkes/crispy-barnacle,cwilkes/crispy-barnacle,cwilkes/crispy-barnacle
web/url.py
web/url.py
from flask import Flask, render_template, jsonify, redirect, url_for from web.clasher import Clasher import os xml_file = 'https://s3.amazonaws.com/navishack/PC-00-COMP-BBC.xml.gz' clash_data = None app = Flask(__name__) app.debug = 'DEBUG' in os.environ @app.route('/') def index(): return redirect('/clash/') @app.route('/projects/') def projects(): return 'The project page' @app.route('/about') def about(): return 'The about page' def get_clash_test(): global clash_data if clash_data is None: print 'loading' clash_data = Clasher(xml_file) return clash_data.data['exchange']['batchtest']['clashtests']['clashtest'] @app.route('/clash/') def clash_index(): number_clashes = len(get_clash_test()) return render_template('clash_index.html', number_clashes=number_clashes) @app.route('/clash/<int:number>') def clash_by_number(number): clash_info = get_clash_test()[number] return jsonify(clash_info) @app.route('/hello/') @app.route('/hello/<name>') def hello(name=None): return render_template('hello.html', name=name) @app.route('/time') @app.route('/time/') def time_series(): data_url = '/static/test-data.json' return render_template('clash-summary-over-time.html', data_url=data_url)
from flask import Flask, render_template, jsonify, redirect, url_for from web.clasher import Clasher import os xml_file = 'https://s3.amazonaws.com/navishack/PC-00-COMP-BBC.xml.gz' clash_data = None app = Flask(__name__) app.debug = 'DEBUG' in os.environ @app.route('/') def index(): return redirect('/clash/') @app.route('/projects/') def projects(): return 'The project page' @app.route('/about') def about(): return 'The about page' def get_clash_test(): global clash_data if clash_data is None: print 'loading' clash_data = Clasher(xml_file) return clash_data.data['exchange']['batchtest']['clashtests']['clashtest'] @app.route('/clash/') def clash_index(): number_clashes = len(get_clash_test()) return render_template('clash_index.html', number_clashes=number_clashes) @app.route('/clash/<int:number>') def clash_by_number(number): clash_info = get_clash_test()[number] return jsonify(clash_info) @app.route('/hello/') @app.route('/hello/<name>') def hello(name=None): return render_template('hello.html', name=name) @app.route('/time') def time_series(): data_url = '/static/test-data.json' return render_template('clash-summary-over-time.html', data_url=data_url)
mit
Python
3dd84fc4dc6cff921329286485e287c13ebebdec
Update version.py
mir-dataset-loaders/mirdata
mirdata/version.py
mirdata/version.py
#!/usr/bin/env python """Version info""" short_version = "0.3" version = "0.3.4b1"
#!/usr/bin/env python """Version info""" short_version = "0.3" version = "0.3.4b0"
bsd-3-clause
Python
32b287b9d22b22262d291fb7e352a3502fe2e68f
Fix Docker test to wait fixed time (#5858)
lolski/grakn,graknlabs/grakn,graknlabs/grakn,graknlabs/grakn,lolski/grakn,lolski/grakn,lolski/grakn,graknlabs/grakn
test/assembly/docker.py
test/assembly/docker.py
#!/usr/bin/env python import os import socket import subprocess as sp import sys import time print('Building the image...') sp.check_call(['bazel', 'run', '//:assemble-docker']) print('Starting the image...') sp.check_call(['docker', 'run', '-v', '{}:/grakn-core-all-linux/logs/'.format(os.getcwd()), '--name', 'grakn','-d', '--rm', '-ti', '-p', '127.0.0.1:48555:48555/tcp', 'bazel:assemble-docker']) print('Docker status:') sp.check_call(['docker', 'ps']) print('Waiting 30s for the instance to be ready') time.sleep(30) print('Running the test...') sp.check_call(['bazel', 'test', '//test/common:grakn-application-test', '--test_output=streamed', '--spawn_strategy=standalone', '--cache_test_results=no']) print('Stopping the container...') sp.check_call(['docker', 'kill', 'grakn']) print('Done!')
#!/usr/bin/env python import os import socket import subprocess as sp import sys import time def wait_for_port(port, host='localhost', timeout=30.0): start_time = time.time() while True: try: socket.create_connection((host, port), timeout=timeout) return except OSError as ex: time.sleep(0.01) if time.time() - start_time >= timeout: raise TimeoutError('Waited too long for the port {} on host {} to start accepting ' 'connections.'.format(port, host)) print('Building the image...') sp.check_call(['bazel', 'run', '//:assemble-docker']) print('Starting the image...') sp.check_call(['docker', 'run', '-v', '{}:/grakn-core-all-linux/logs/'.format(os.getcwd()), '--name', 'grakn','-d', '--rm', '-ti', '-p', '127.0.0.1:48555:48555/tcp', 'bazel:assemble-docker']) print('Docker status:') sp.check_call(['docker', 'ps']) sys.stdout.write('Waiting for the instance to be ready') sys.stdout.flush() timeout = 0 # TODO: add timeout # TODO: fail if the docker image is dead wait_for_port(48555) print('Running the test...') sp.check_call(['bazel', 'test', '//test/common:grakn-application-test', '--test_output=streamed', '--spawn_strategy=standalone', '--cache_test_results=no']) print('Stopping the container...') sp.check_call(['docker', 'kill', 'grakn']) print('Done!')
agpl-3.0
Python
d8a27a94d90e5611b24c26c331a0016bbbb87af0
update debug set default to false
gobuild-old/gobuild3,gobuild-old/gobuild3,gobuild/gobuild3,gobuild-old/gobuild3,gobuild/gobuild3,gobuild-old/gobuild3,gobuild/gobuild3,gobuild-old/gobuild3,gobuild/gobuild3,gobuild/gobuild3
web/web.py
web/web.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # web # import os import flask import models app = flask.Flask(__name__) app.secret_key = 'some_secret' app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024 # max 16M #app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER def register_routers(): from routers import home app.register_blueprint(home.bp, url_prefix='') for blueprint in 'home', 'repo', 'donate': exec 'from routers import %s' %(blueprint) bp = eval(blueprint+'.bp') app.register_blueprint(bp, url_prefix='/'+blueprint) #@app.route('/login/', methods=['GET', 'POST']) #def login(): # app.logger.debug("login") # error = None # if request.method == 'POST': # if request.form['username'] != 'admin' or \ # request.form['password'] != 'secret': # error = 'Invalid credentials' # else: # flash('You are successfully logged in') # return redirect(url_for('index')) # return render_template('login.html', error=error) port = os.getenv('PORT') or '5000' debug = os.getenv('DEBUG') in ('true', '1') or False if __name__ == '__main__': register_routers() app.run(debug=debug, host='0.0.0.0', port=int(port))
#!/usr/bin/env python # -*- coding: utf-8 -*- # # web # import os import flask import models app = flask.Flask(__name__) app.secret_key = 'some_secret' app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024 # max 16M #app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER def register_routers(): from routers import home app.register_blueprint(home.bp, url_prefix='') for blueprint in 'home', 'repo', 'donate': exec 'from routers import %s' %(blueprint) bp = eval(blueprint+'.bp') app.register_blueprint(bp, url_prefix='/'+blueprint) #@app.route('/login/', methods=['GET', 'POST']) #def login(): # app.logger.debug("login") # error = None # if request.method == 'POST': # if request.form['username'] != 'admin' or \ # request.form['password'] != 'secret': # error = 'Invalid credentials' # else: # flash('You are successfully logged in') # return redirect(url_for('index')) # return render_template('login.html', error=error) port = os.getenv('PORT') or '5000' if __name__ == '__main__': register_routers() app.run(debug=True, host='0.0.0.0', port=int(port))
mit
Python
aea0d98fd7fb5eaa0fd547b5442af6984f8d78e0
fix bugs
Charleo85/SIS-Rebuild,Charleo85/SIS-Rebuild,Charleo85/SIS-Rebuild,Charleo85/SIS-Rebuild
exp/exp/tests.py
exp/exp/tests.py
from django.test import TestCase, RequestFactory import json from . import views_model, views_auth class SearchTestCase(TestCase): def setUp(self): self.factory = RequestFactory() def test_general_search(self): post_data = { 'search_query': 'tp3ks', 'query_specifier': 'general', } request = self.factory.post('/search/', data=post_data) response = views_model.search(request) resp_data = json.loads(response.content.decode('utf-8')) self.assertEqual(resp_data['status_code'], 200)
from django.test import TestCase, RequestFactory import json from . import views_model, views_auth class SearchTestCase(TestCase): def setUp(self): self.factory = RequestFactory() def test_general_search(self): post_data = { 'search_query': 'tp3ks', 'query_specifier': 'general', } request = self.factory.post('/search/', data=post_data) response = views_model.search(request) resp_data = json.loads(response.context, safe=False) self.assertEqual(resp_data['status_code'], 200)
bsd-3-clause
Python
ebfac180c04d24ea8ff93583eac52e6c0bc8d553
Add contract test for 'v2' email notification
alphagov/notifications-api,alphagov/notifications-api
tests/app/public_contracts/test_GET_notification.py
tests/app/public_contracts/test_GET_notification.py
from . import return_json_from_response, validate_v0, validate from app.models import ApiKey, KEY_TYPE_NORMAL from app.dao.api_key_dao import save_model_api_key from app.v2.notifications.notification_schemas import get_notification_response from tests import create_authorization_header def _get_notification(client, notification, url): save_model_api_key(ApiKey( service=notification.service, name='api_key', created_by=notification.service.created_by, key_type=KEY_TYPE_NORMAL )) auth_header = create_authorization_header(service_id=notification.service_id) return client.get(url, headers=[auth_header]) def test_get_v2_sms_contract(client, sample_notification): response_json = return_json_from_response(_get_notification( client, sample_notification, '/v2/notifications/{}'.format(sample_notification.id) )) validate(response_json, get_notification_response) def test_get_v2_email_contract(client, sample_email_notification): response_json = return_json_from_response(_get_notification( client, sample_email_notification, '/v2/notifications/{}'.format(sample_email_notification.id) )) validate(response_json, get_notification_response) def test_get_api_sms_contract(client, sample_notification): response_json = return_json_from_response(_get_notification( client, sample_notification, '/notifications/{}'.format(sample_notification.id) )) validate_v0(response_json, 'GET_notification_return_sms.json') def test_get_api_email_contract(client, sample_email_notification): response_json = return_json_from_response(_get_notification( client, sample_email_notification, '/notifications/{}'.format(sample_email_notification.id) )) validate_v0(response_json, 'GET_notification_return_email.json') def test_get_job_sms_contract(client, sample_notification): response_json = return_json_from_response(_get_notification( client, sample_notification, '/notifications/{}'.format(sample_notification.id) )) validate_v0(response_json, 'GET_notification_return_sms.json') def test_get_job_email_contract(client, sample_email_notification): response_json = return_json_from_response(_get_notification( client, sample_email_notification, '/notifications/{}'.format(sample_email_notification.id) )) validate_v0(response_json, 'GET_notification_return_email.json') def test_get_notifications_contract(client, sample_notification, sample_email_notification): response_json = return_json_from_response(_get_notification( client, sample_notification, '/notifications' )) validate_v0(response_json, 'GET_notifications_return.json')
from . import return_json_from_response, validate_v0, validate from app.models import ApiKey, KEY_TYPE_NORMAL from app.dao.api_key_dao import save_model_api_key from app.v2.notifications.notification_schemas import get_notification_response from tests import create_authorization_header def _get_notification(client, notification, url): save_model_api_key(ApiKey( service=notification.service, name='api_key', created_by=notification.service.created_by, key_type=KEY_TYPE_NORMAL )) auth_header = create_authorization_header(service_id=notification.service_id) return client.get(url, headers=[auth_header]) def test_get_v2_notification(client, sample_notification): response_json = return_json_from_response(_get_notification( client, sample_notification, '/v2/notifications/{}'.format(sample_notification.id) )) validate(response_json, get_notification_response) def test_get_api_sms_contract(client, sample_notification): response_json = return_json_from_response(_get_notification( client, sample_notification, '/notifications/{}'.format(sample_notification.id) )) validate_v0(response_json, 'GET_notification_return_sms.json') def test_get_api_email_contract(client, sample_email_notification): response_json = return_json_from_response(_get_notification( client, sample_email_notification, '/notifications/{}'.format(sample_email_notification.id) )) validate_v0(response_json, 'GET_notification_return_email.json') def test_get_job_sms_contract(client, sample_notification): response_json = return_json_from_response(_get_notification( client, sample_notification, '/notifications/{}'.format(sample_notification.id) )) validate_v0(response_json, 'GET_notification_return_sms.json') def test_get_job_email_contract(client, sample_email_notification): response_json = return_json_from_response(_get_notification( client, sample_email_notification, '/notifications/{}'.format(sample_email_notification.id) )) validate_v0(response_json, 'GET_notification_return_email.json') def test_get_notifications_contract(client, sample_notification, sample_email_notification): response_json = return_json_from_response(_get_notification( client, sample_notification, '/notifications' )) validate_v0(response_json, 'GET_notifications_return.json')
mit
Python
fc6ad89460dca9e1e5b1a4effe14c868cafc0e54
add looping around math
mlyons-tcc/test
test.py
test.py
__author__ = 'mike.lyons' print "Hello World!" while True: x = raw_input("Enter any number: ") y = raw_input("Enter another number: ") try: x = float(x) y = float(y) except ValueError: x = 0.0 y = 0.0 print x+y print x/2 print y**2 user_exit = raw_input("Exit? (y/n): ") if user_exit == 'y' or user_exit == 'Y': break print "Goodbye World!"
__author__ = 'mike.lyons' print "Hello World!" x = raw_input("Enter any number: ") y = raw_input("Enter another number: ") try: x = float(x) y = float(y) except ValueError: x = 0.0 y = 0.0 print x+y print x/2 print y**2
apache-2.0
Python
91a3a94466736ef6996befa73549e309fb9251f8
Remove unused import
enstrategic/django-sql-explorer,tzangms/django-sql-explorer,enstrategic/django-sql-explorer,groveco/django-sql-explorer,grantmcconnaughey/django-sql-explorer,epantry/django-sql-explorer,enstrategic/django-sql-explorer,groveco/django-sql-explorer,dsanders11/django-sql-explorer,enstrategic/django-sql-explorer,epantry/django-sql-explorer,groveco/django-sql-explorer,dsanders11/django-sql-explorer,grantmcconnaughey/django-sql-explorer,epantry/django-sql-explorer,tzangms/django-sql-explorer,dsanders11/django-sql-explorer,grantmcconnaughey/django-sql-explorer,dsanders11/django-sql-explorer,tzangms/django-sql-explorer,groveco/django-sql-explorer
explorer/urls.py
explorer/urls.py
from django.conf.urls import url from explorer.views import ( QueryView, CreateQueryView, PlayQueryView, DeleteQueryView, ListQueryView, ListQueryLogView, download_query, view_csv_query, email_csv_query, download_csv_from_sql, schema, format_sql, ) urlpatterns = [ url(r'(?P<query_id>\d+)/$', QueryView.as_view(), name='query_detail'), url(r'(?P<query_id>\d+)/download$', download_query, name='query_download'), url(r'(?P<query_id>\d+)/csv$', view_csv_query, name='query_csv'), url(r'(?P<query_id>\d+)/email_csv$', email_csv_query, name='email_csv_query'), url(r'(?P<pk>\d+)/delete$', DeleteQueryView.as_view(), name='query_delete'), url(r'new/$', CreateQueryView.as_view(), name='query_create'), url(r'play/$', PlayQueryView.as_view(), name='explorer_playground'), url(r'csv$', download_csv_from_sql, name='generate_csv'), url(r'schema/$', schema, name='explorer_schema'), url(r'logs/$', ListQueryLogView.as_view(), name='explorer_logs'), url(r'format/$', format_sql, name='format_sql'), url(r'^$', ListQueryView.as_view(), name='explorer_index'), ]
from django.conf.urls import patterns, url from explorer.views import ( QueryView, CreateQueryView, PlayQueryView, DeleteQueryView, ListQueryView, ListQueryLogView, download_query, view_csv_query, email_csv_query, download_csv_from_sql, schema, format_sql, ) urlpatterns = [ url(r'(?P<query_id>\d+)/$', QueryView.as_view(), name='query_detail'), url(r'(?P<query_id>\d+)/download$', download_query, name='query_download'), url(r'(?P<query_id>\d+)/csv$', view_csv_query, name='query_csv'), url(r'(?P<query_id>\d+)/email_csv$', email_csv_query, name='email_csv_query'), url(r'(?P<pk>\d+)/delete$', DeleteQueryView.as_view(), name='query_delete'), url(r'new/$', CreateQueryView.as_view(), name='query_create'), url(r'play/$', PlayQueryView.as_view(), name='explorer_playground'), url(r'csv$', download_csv_from_sql, name='generate_csv'), url(r'schema/$', schema, name='explorer_schema'), url(r'logs/$', ListQueryLogView.as_view(), name='explorer_logs'), url(r'format/$', format_sql, name='format_sql'), url(r'^$', ListQueryView.as_view(), name='explorer_index'), ]
mit
Python
3b4dd9a59de9a37a4167f64fec2f3896479f56c9
Simplify option formatting.
ohsu-qin/qipipe
qipipe/registration/ants/similarity_metrics.py
qipipe/registration/ants/similarity_metrics.py
class SimilarityMetric(object): _FMT = "{name}[{fixed}, {moving}, {opts}]" def __init__(self, name, *opts): self.name = name self.opts = opts def format(self, fixed, moving, weight=1): """ Formats the ANTS similiarity metric argument. :param reference: the fixed reference file :param moving: the moving file to register :param weight: the weight to assign this metric (default 1) :rtype: str """ opts = [weight] opts.extend(self.opts) opt_arg = ', '.join(opts) return SimilarityMetric._FMT.format(name=self.name, fixed=fixed, moving=moving, opts=opt_arg) def __str__(self): return self.name class PR(SimilarityMetric): """ The probability mapping metric. """ def __init__(self): super(PR, self).__init__('PR') class CC(SimilarityMetric): """ The cross-correlation metric. """ def __init__(self, radius=4): super(CC, self).__init__('CC', radius) class MI(SimilarityMetric): """ The mutual information metric. """ def __init__(self, bins=32): super(MI, self).__init__('MI', bins) class MSQ(SimilarityMetric): """ The mean-squared metric. """ def __init__(self): super(MSQ, self).__init__('MSQ', 0) class JTB(SimilarityMetric): """ The B-spline metric. """ def __init__(self, radius=32): super(JTB, self).__init__('JTB', radius)
class SimilarityMetric(object): _FMT = "{name}[{fixed}, {moving}, {opts}]" def __init__(self, name, *opts): self.name = name self.opts = opts def format(self, fixed, moving, weight=1): """ Formats the ANTS similiarity metric argument. :param reference: the fixed reference file :param moving: the moving file to register :param weight: the weight to assign this metric (default 1) :rtype: str """ opt_arg = ', '.join([weight] + self.opts) return SimilarityMetric._FMT.format(name=self.name, fixed=fixed, moving=moving, opts=opt_arg) def __str__(self): return self.name class PR(SimilarityMetric): """ The probability mapping metric. """ def __init__(self): super(PR, self).__init__('PR') class CC(SimilarityMetric): """ The cross-correlation metric. """ def __init__(self, radius=4): super(CC, self).__init__('CC', radius) class MI(SimilarityMetric): """ The mutual information metric. """ def __init__(self, bins=32): super(MI, self).__init__('MI', bins) class MSQ(SimilarityMetric): """ The mean-squared metric. """ def __init__(self): super(MSQ, self).__init__('MSQ', 0) class JTB(SimilarityMetric): """ The B-spline metric. """ def __init__(self, radius=32): super(JTB, self).__init__('JTB', radius)
bsd-2-clause
Python
bc6bacf6bd5fccf2e09dd3c07f6104e1f845351b
Revert "Added solution to assignment 4"
infoscout/python-bootcamp-pv
bootcamp/lesson4.py
bootcamp/lesson4.py
import datetime import math import requests from core import test_helper # Question 1 # ---------- # Using the datetime module return a datetime object with the year of 2015, the month of June, and the day of 1 def playing_with_dt(): # Write code here pass # Question 2 # ---------- # Using the math module return pi def playing_with_math(): # Write code here pass # Question 3 # ---------- # The following URL is public data set of demographic statistics by zip code in the city of New York # url: https://data.cityofnewyork.us/api/views/kku6-nxdu/rows.json?accessType=DOWNLOAD # # Make a request to that address and inspect the contents. Return the number of unique demographic attributes in the # data set as well as the percentage of ETHNICITY UNKNOWN formatted as a string with 2 significant figures. # The return object should be a tuple data type def explore_data(): # Write code here pass def main(): print "\nRunning playing_with_dt_one function..." test_helper(playing_with_dt(), datetime.datetime(2015, 06, 01)) print "\nRunning playing_with_dt_one function..." test_helper(playing_with_math(), math.pi) print "\nRunning explore_data function..." test_helper(explore_data(), ('0.0039%', 46)) if __name__ == '__main__': main()
import datetime import math import requests from core import test_helper # Question 1 # ---------- # Using the datetime module return a datetime object with the year of 2015, the month of June, and the day of 1 def playing_with_dt(): return datetime.datetime(year=2015, month=06, day=01) # Question 2 # ---------- # Using the math module return pi def playing_with_math(): return math.pi # Question 3 # ---------- # The following URL is public data set of demographic statistics by zip code in the city of New York # url: https://data.cityofnewyork.us/api/views/kku6-nxdu/rows.json?accessType=DOWNLOAD # # Make a request to that address and inspect the contents. Return the number of unique demographic attributes in the # data set as well as the percentage of ETHNICITY UNKNOWN formatted as a string with 2 significant figures. # The return object should be a tuple data type def explore_data(): demo_attributes = [] url = 'http://data.cityofnewyork.us/api/views/kku6-nxdu/rows.json?accessType=DOWNLOAD' r = requests.get(url=url) json = r.json() meta = json['meta'] view = meta['view'] columns = view['columns'] for column in columns: if column['name'] == 'PERCENT ETHNICITY UNKNOWN': avg = column['cachedContents']['average'] if column['dataTypeName'] == 'number': demo_attributes.append(column['name']) num_attributes = len(set(demo_attributes)) avg_formatted = '{:.4f}%'.format(float(avg)) t = (avg_formatted, num_attributes) return t def main(): print "\nRunning playing_with_dt_one function..." test_helper(playing_with_dt(), datetime.datetime(2015, 06, 01)) print "\nRunning playing_with_dt_one function..." test_helper(playing_with_math(), math.pi) print "\nRunning explore_data function..." test_helper(explore_data(), ('0.0039%', 46)) if __name__ == '__main__': main()
mit
Python
864d551ca7aaf661ecfe54cca8c69e0f9daf1c46
fix license
laike9m/ezcf,hzruandd/ezcf,laike9m/ezcf,hzruandd/ezcf
ezcf/__init__.py
ezcf/__init__.py
__author__ = "laike9m (laike9m@gmail.com)" __title__ = 'ezcf' __version__ = '0.0.1' __license__ = 'MIT' __copyright__ = 'Copyright 2015 laike9m' import sys from .api import ConfigFinder sys.meta_path.append(ConfigFinder())
__author__ = "laike9m (laike9m@gmail.com)" __title__ = 'ezcf' __version__ = '0.0.1' # __build__ = None __license__ = 'Apache 2.0' __copyright__ = 'Copyright 2015 laike9m' import sys from .api import ConfigFinder sys.meta_path.append(ConfigFinder())
mit
Python
ae8e98a4e609bee0e73175bdc50859dd0bed62cb
Fix lint errors.
mozilla/normandy,mozilla/normandy,mozilla/normandy,mozilla/normandy
recipe-server/normandy/base/api/serializers.py
recipe-server/normandy/base/api/serializers.py
from django.contrib.auth.models import User from rest_framework import serializers class UserSerializer(serializers.ModelSerializer): id = serializers.IntegerField() first_name = serializers.CharField() last_name = serializers.CharField() email = serializers.CharField() class Meta: model = User fields = [ 'id', 'first_name', 'last_name', 'email', ] class ServiceInfoSerializer(serializers.ModelSerializer): user = UserSerializer() peer_approval_enforced = serializers.BooleanField() class Meta: model = User fields = [ 'user', 'peer_approval_enforced', ]
from django.contrib.auth.models import User from rest_framework import serializers class UserSerializer(serializers.ModelSerializer): id = serializers.IntegerField() first_name = serializers.CharField() last_name = serializers.CharField() email = serializers.CharField() class Meta: model = User fields = [ 'id', 'first_name', 'last_name', 'email', ] class ServiceInfoSerializer(serializers.ModelSerializer): user = UserSerializer(); peer_approval_enforced = serializers.BooleanField() class Meta: model = User fields = [ 'user', 'peer_approval_enforced', ]
mpl-2.0
Python
53db7465c8bb5f7ea0a9647b69555df36ade4191
Use a trigger and lambda instead of schedule for the delay stage.
matham/moa
moa/stage/delay.py
moa/stage/delay.py
__all__ = ('Delay', ) import random import time from kivy.clock import Clock from kivy.properties import (OptionProperty, BoundedNumericProperty, ReferenceListProperty) from moa.stage import MoaStage class Delay(MoaStage): _delay_step_trigger = None def __init__(self, **kwargs): super(Delay, self).__init__(**kwargs) self._delay_step_trigger = Clock.create_trigger(lambda dt: self.step_stage()) def pause(self, *largs, **kwargs): if super(Delay, self).pause(*largs, **kwargs): self.delay = max(0, self.delay - (time.clock() - self.start_time)) self._delay_step_trigger.cancel() return True return False def unpause(self, *largs, **kwargs): if super(Delay, self).unpause(*largs, **kwargs): self._delay_step_trigger.timeout = self.delay self._delay_step_trigger() return True return False def stop(self, *largs, **kwargs): if super(Delay, self).stop(*largs, **kwargs): self._delay_step_trigger.cancel() return True return False def step_stage(self, *largs, **kwargs): if not super(Delay, self).step_stage(*largs, **kwargs): return False if self.delay_type == 'random': self.delay = random.uniform(self.min, self.max) self._delay_step_trigger.timeout = self.delay self._delay_step_trigger() return True min = BoundedNumericProperty(0., min=0.) max = BoundedNumericProperty(1., min=0.) range = ReferenceListProperty(min, max) delay = BoundedNumericProperty(0.5, min=0.) delay_type = OptionProperty('constant', options=['constant', 'random'])
__all__ = ('Delay', ) import random import time from kivy.clock import Clock from kivy.properties import (OptionProperty, BoundedNumericProperty, ReferenceListProperty) from moa.stage import MoaStage class Delay(MoaStage): def pause(self, *largs, **kwargs): if super(Delay, self).pause(*largs, **kwargs): self.delay = max(0, self.delay - (time.clock() - self.start_time)) Clock.unschedule(self.step_stage) return True return False def unpause(self, *largs, **kwargs): if super(Delay, self).unpause(*largs, **kwargs): Clock.schedule_once(self.step_stage, self.delay) return True return False def stop(self, *largs, **kwargs): if super(Delay, self).stop(*largs, **kwargs): Clock.unschedule(self.step_stage) return True return False def step_stage(self, *largs, **kwargs): if not super(Delay, self).step_stage(*largs, **kwargs): return False if self.delay_type == 'random': self.delay = random.uniform(self.min, self.max) Clock.schedule_once(self.step_stage, self.delay) return True min = BoundedNumericProperty(0., min=0.) max = BoundedNumericProperty(1., min=0.) range = ReferenceListProperty(min, max) delay = BoundedNumericProperty(0.5, min=0.) delay_type = OptionProperty('constant', options=['constant', 'random'])
mit
Python
bdacc7646c087d8fd87feb20c6af1d23d5cb1feb
clean up track/models
nikolas/edx-platform,Stanford-Online/edx-platform,polimediaupv/edx-platform,appliedx/edx-platform,zubair-arbi/edx-platform,iivic/BoiseStateX,Endika/edx-platform,Livit/Livit.Learn.EdX,chand3040/cloud_that,antoviaque/edx-platform,nanolearning/edx-platform,UXE/local-edx,alexthered/kienhoc-platform,carsongee/edx-platform,zadgroup/edx-platform,nttks/edx-platform,inares/edx-platform,chauhanhardik/populo,rationalAgent/edx-platform-custom,shurihell/testasia,procangroup/edx-platform,doismellburning/edx-platform,pelikanchik/edx-platform,tiagochiavericosta/edx-platform,auferack08/edx-platform,TsinghuaX/edx-platform,jazkarta/edx-platform,atsolakid/edx-platform,playm2mboy/edx-platform,Lektorium-LLC/edx-platform,jamiefolsom/edx-platform,mcgachey/edx-platform,naresh21/synergetics-edx-platform,openfun/edx-platform,auferack08/edx-platform,jbassen/edx-platform,OmarIthawi/edx-platform,kmoocdev2/edx-platform,iivic/BoiseStateX,BehavioralInsightsTeam/edx-platform,BehavioralInsightsTeam/edx-platform,dkarakats/edx-platform,polimediaupv/edx-platform,miptliot/edx-platform,jamesblunt/edx-platform,wwj718/ANALYSE,proversity-org/edx-platform,itsjeyd/edx-platform,pepeportela/edx-platform,IITBinterns13/edx-platform-dev,cyanna/edx-platform,wwj718/edx-platform,vikas1885/test1,simbs/edx-platform,itsjeyd/edx-platform,Kalyzee/edx-platform,bigdatauniversity/edx-platform,ampax/edx-platform-backup,Semi-global/edx-platform,hkawasaki/kawasaki-aio8-1,iivic/BoiseStateX,franosincic/edx-platform,playm2mboy/edx-platform,pelikanchik/edx-platform,appsembler/edx-platform,synergeticsedx/deployment-wipro,shubhdev/edxOnBaadal,JCBarahona/edX,jonathan-beard/edx-platform,jamiefolsom/edx-platform,dkarakats/edx-platform,raccoongang/edx-platform,longmen21/edx-platform,leansoft/edx-platform,Edraak/circleci-edx-platform,beni55/edx-platform,jjmiranda/edx-platform,mjirayu/sit_academy,nanolearning/edx-platform,lduarte1991/edx-platform,EduPepperPDTesting/pepper2013-testing,OmarIthawi/edx-platform,sudheerchintala/LearnEraPlatForm,morenopc/edx-platform,sudheerchintala/LearnEraPlatForm,IONISx/edx-platform,rationalAgent/edx-platform-custom,jbassen/edx-platform,msegado/edx-platform,halvertoluke/edx-platform,kalebhartje/schoolboost,vasyarv/edx-platform,philanthropy-u/edx-platform,syjeon/new_edx,apigee/edx-platform,chrisndodge/edx-platform,beacloudgenius/edx-platform,devs1991/test_edx_docmode,motion2015/edx-platform,simbs/edx-platform,inares/edx-platform,morpheby/levelup-by,ahmadio/edx-platform,vismartltd/edx-platform,pabloborrego93/edx-platform,naresh21/synergetics-edx-platform,nagyistoce/edx-platform,dsajkl/123,Shrhawk/edx-platform,dcosentino/edx-platform,prarthitm/edxplatform,analyseuc3m/ANALYSE-v1,ampax/edx-platform-backup,angelapper/edx-platform,jzoldak/edx-platform,chauhanhardik/populo,IONISx/edx-platform,EduPepperPDTesting/pepper2013-testing,B-MOOC/edx-platform,vikas1885/test1,waheedahmed/edx-platform,xingyepei/edx-platform,alexthered/kienhoc-platform,fly19890211/edx-platform,fly19890211/edx-platform,morenopc/edx-platform,RPI-OPENEDX/edx-platform,rue89-tech/edx-platform,polimediaupv/edx-platform,louyihua/edx-platform,romain-li/edx-platform,devs1991/test_edx_docmode,torchingloom/edx-platform,ahmedaljazzar/edx-platform,doganov/edx-platform,hkawasaki/kawasaki-aio8-1,edx/edx-platform,ubc/edx-platform,devs1991/test_edx_docmode,praveen-pal/edx-platform,ak2703/edx-platform,franosincic/edx-platform,DNFcode/edx-platform,xuxiao19910803/edx,Unow/edx-platform,philanthropy-u/edx-platform,RPI-OPENEDX/edx-platform,eestay/edx-platform,ahmadiga/min_edx,xuxiao19910803/edx-platform,morenopc/edx-platform,dsajkl/123,proversity-org/edx-platform,caesar2164/edx-platform,rhndg/openedx,shubhdev/edx-platform,pelikanchik/edx-platform,Edraak/circleci-edx-platform,Kalyzee/edx-platform,shashank971/edx-platform,Unow/edx-platform,zadgroup/edx-platform,etzhou/edx-platform,deepsrijit1105/edx-platform,Lektorium-LLC/edx-platform,nanolearning/edx-platform,jjmiranda/edx-platform,xinjiguaike/edx-platform,DNFcode/edx-platform,rhndg/openedx,Edraak/edraak-platform,dkarakats/edx-platform,jazkarta/edx-platform,shabab12/edx-platform,jelugbo/tundex,simbs/edx-platform,jzoldak/edx-platform,rismalrv/edx-platform,ovnicraft/edx-platform,jazkarta/edx-platform,zubair-arbi/edx-platform,xinjiguaike/edx-platform,etzhou/edx-platform,MakeHer/edx-platform,LICEF/edx-platform,defance/edx-platform,franosincic/edx-platform,nttks/jenkins-test,ahmadiga/min_edx,mahendra-r/edx-platform,deepsrijit1105/edx-platform,PepperPD/edx-pepper-platform,a-parhom/edx-platform,benpatterson/edx-platform,Edraak/circleci-edx-platform,apigee/edx-platform,Edraak/edraak-platform,Softmotions/edx-platform,teltek/edx-platform,xuxiao19910803/edx,Ayub-Khan/edx-platform,MakeHer/edx-platform,bdero/edx-platform,abdoosh00/edx-rtl-final,xinjiguaike/edx-platform,LearnEra/LearnEraPlaftform,knehez/edx-platform,edry/edx-platform,marcore/edx-platform,chauhanhardik/populo,jswope00/griffinx,kalebhartje/schoolboost,UXE/local-edx,motion2015/edx-platform,doganov/edx-platform,teltek/edx-platform,prarthitm/edxplatform,mahendra-r/edx-platform,mbareta/edx-platform-ft,rationalAgent/edx-platform-custom,B-MOOC/edx-platform,Softmotions/edx-platform,hastexo/edx-platform,beacloudgenius/edx-platform,IITBinterns13/edx-platform-dev,adoosii/edx-platform,bdero/edx-platform,proversity-org/edx-platform,andyzsf/edx,ferabra/edx-platform,nttks/jenkins-test,SravanthiSinha/edx-platform,wwj718/ANALYSE,IONISx/edx-platform,eduNEXT/edunext-platform,bigdatauniversity/edx-platform,jamiefolsom/edx-platform,mjirayu/sit_academy,xuxiao19910803/edx,jolyonb/edx-platform,Stanford-Online/edx-platform,antoviaque/edx-platform,J861449197/edx-platform,bitifirefly/edx-platform,unicri/edx-platform,pomegranited/edx-platform,cselis86/edx-platform,nanolearningllc/edx-platform-cypress,atsolakid/edx-platform,msegado/edx-platform,kxliugang/edx-platform,jzoldak/edx-platform,don-github/edx-platform,iivic/BoiseStateX,xingyepei/edx-platform,chudaol/edx-platform,naresh21/synergetics-edx-platform,DNFcode/edx-platform,TsinghuaX/edx-platform,CourseTalk/edx-platform,jazztpt/edx-platform,ak2703/edx-platform,chudaol/edx-platform,eestay/edx-platform,synergeticsedx/deployment-wipro,xuxiao19910803/edx-platform,don-github/edx-platform,y12uc231/edx-platform,xuxiao19910803/edx-platform,cecep-edu/edx-platform,IndonesiaX/edx-platform,pdehaye/theming-edx-platform,PepperPD/edx-pepper-platform,fintech-circle/edx-platform,J861449197/edx-platform,B-MOOC/edx-platform,mitocw/edx-platform,vismartltd/edx-platform,B-MOOC/edx-platform,nttks/jenkins-test,edx/edx-platform,mjirayu/sit_academy,antonve/s4-project-mooc,mahendra-r/edx-platform,RPI-OPENEDX/edx-platform,simbs/edx-platform,eestay/edx-platform,cselis86/edx-platform,jbzdak/edx-platform,gymnasium/edx-platform,J861449197/edx-platform,procangroup/edx-platform,ak2703/edx-platform,torchingloom/edx-platform,halvertoluke/edx-platform,pepeportela/edx-platform,chauhanhardik/populo,longmen21/edx-platform,hkawasaki/kawasaki-aio8-1,chudaol/edx-platform,jruiperezv/ANALYSE,jruiperezv/ANALYSE,yokose-ks/edx-platform,UOMx/edx-platform,Livit/Livit.Learn.EdX,angelapper/edx-platform,devs1991/test_edx_docmode,ahmadio/edx-platform,miptliot/edx-platform,philanthropy-u/edx-platform,eemirtekin/edx-platform,pelikanchik/edx-platform,MSOpenTech/edx-platform,mushtaqak/edx-platform,marcore/edx-platform,franosincic/edx-platform,chrisndodge/edx-platform,LICEF/edx-platform,Edraak/edx-platform,ampax/edx-platform,hmcmooc/muddx-platform,zhenzhai/edx-platform,motion2015/a3,jamiefolsom/edx-platform,valtech-mooc/edx-platform,Stanford-Online/edx-platform,chauhanhardik/populo_2,jazkarta/edx-platform-for-isc,ESOedX/edx-platform,cecep-edu/edx-platform,olexiim/edx-platform,utecuy/edx-platform,JCBarahona/edX,Edraak/edx-platform,cecep-edu/edx-platform,bdero/edx-platform,beacloudgenius/edx-platform,bitifirefly/edx-platform,adoosii/edx-platform,doismellburning/edx-platform,benpatterson/edx-platform,jazztpt/edx-platform,EduPepperPDTesting/pepper2013-testing,tanmaykm/edx-platform,miptliot/edx-platform,benpatterson/edx-platform,10clouds/edx-platform,dsajkl/123,RPI-OPENEDX/edx-platform,jolyonb/edx-platform,bitifirefly/edx-platform,gymnasium/edx-platform,zerobatu/edx-platform,Semi-global/edx-platform,shabab12/edx-platform,vismartltd/edx-platform,pabloborrego93/edx-platform,edry/edx-platform,ahmadio/edx-platform,mjg2203/edx-platform-seas,naresh21/synergetics-edx-platform,kamalx/edx-platform,LICEF/edx-platform,mjirayu/sit_academy,andyzsf/edx,playm2mboy/edx-platform,beni55/edx-platform,MakeHer/edx-platform,10clouds/edx-platform,gymnasium/edx-platform,eduNEXT/edunext-platform,chauhanhardik/populo_2,cselis86/edx-platform,abdoosh00/edraak,chauhanhardik/populo,nttks/edx-platform,franosincic/edx-platform,dsajkl/reqiop,LearnEra/LearnEraPlaftform,jolyonb/edx-platform,zhenzhai/edx-platform,pdehaye/theming-edx-platform,pepeportela/edx-platform,EduPepperPD/pepper2013,appsembler/edx-platform,utecuy/edx-platform,iivic/BoiseStateX,openfun/edx-platform,analyseuc3m/ANALYSE-v1,edry/edx-platform,eduNEXT/edx-platform,shubhdev/edx-platform,arifsetiawan/edx-platform,sudheerchintala/LearnEraPlatForm,halvertoluke/edx-platform,hkawasaki/kawasaki-aio8-2,hkawasaki/kawasaki-aio8-0,hkawasaki/kawasaki-aio8-0,vismartltd/edx-platform,halvertoluke/edx-platform,WatanabeYasumasa/edx-platform,UXE/local-edx,solashirai/edx-platform,jamesblunt/edx-platform,kxliugang/edx-platform,inares/edx-platform,SravanthiSinha/edx-platform,TsinghuaX/edx-platform,motion2015/a3,kmoocdev/edx-platform,Stanford-Online/edx-platform,4eek/edx-platform,ferabra/edx-platform,xinjiguaike/edx-platform,vasyarv/edx-platform,abdoosh00/edraak,nanolearning/edx-platform,bitifirefly/edx-platform,knehez/edx-platform,romain-li/edx-platform,motion2015/edx-platform,zadgroup/edx-platform,openfun/edx-platform,alu042/edx-platform,hamzehd/edx-platform,jswope00/griffinx,gsehub/edx-platform,dsajkl/123,IndonesiaX/edx-platform,kalebhartje/schoolboost,peterm-itr/edx-platform,rue89-tech/edx-platform,mitocw/edx-platform,UOMx/edx-platform,hkawasaki/kawasaki-aio8-2,mushtaqak/edx-platform,cecep-edu/edx-platform,bigdatauniversity/edx-platform,zerobatu/edx-platform,CredoReference/edx-platform,LICEF/edx-platform,hastexo/edx-platform,mtlchun/edx,wwj718/edx-platform,UXE/local-edx,PepperPD/edx-pepper-platform,JCBarahona/edX,ahmedaljazzar/edx-platform,SivilTaram/edx-platform,zubair-arbi/edx-platform,ESOedX/edx-platform,Softmotions/edx-platform,EDUlib/edx-platform,zadgroup/edx-platform,rismalrv/edx-platform,knehez/edx-platform,jazkarta/edx-platform-for-isc,kamalx/edx-platform,SravanthiSinha/edx-platform,y12uc231/edx-platform,kmoocdev/edx-platform,tanmaykm/edx-platform,eemirtekin/edx-platform,shubhdev/openedx,shurihell/testasia,mushtaqak/edx-platform,mcgachey/edx-platform,DefyVentures/edx-platform,xingyepei/edx-platform,fintech-circle/edx-platform,xuxiao19910803/edx-platform,dcosentino/edx-platform,devs1991/test_edx_docmode,ubc/edx-platform,LearnEra/LearnEraPlaftform,Livit/Livit.Learn.EdX,pku9104038/edx-platform,ak2703/edx-platform,CourseTalk/edx-platform,shurihell/testasia,appliedx/edx-platform,xuxiao19910803/edx,cselis86/edx-platform,ampax/edx-platform-backup,rationalAgent/edx-platform-custom,nikolas/edx-platform,nagyistoce/edx-platform,chudaol/edx-platform,jelugbo/tundex,praveen-pal/edx-platform,OmarIthawi/edx-platform,shubhdev/edx-platform,nikolas/edx-platform,ahmadiga/min_edx,edx-solutions/edx-platform,peterm-itr/edx-platform,jazkarta/edx-platform,atsolakid/edx-platform,Semi-global/edx-platform,JCBarahona/edX,nagyistoce/edx-platform,a-parhom/edx-platform,longmen21/edx-platform,benpatterson/edx-platform,IndonesiaX/edx-platform,gsehub/edx-platform,motion2015/a3,jbassen/edx-platform,shubhdev/edx-platform,mcgachey/edx-platform,MSOpenTech/edx-platform,pepeportela/edx-platform,mbareta/edx-platform-ft,martynovp/edx-platform,syjeon/new_edx,Shrhawk/edx-platform,ampax/edx-platform,amir-qayyum-khan/edx-platform,ahmadio/edx-platform,nikolas/edx-platform,AkA84/edx-platform,hamzehd/edx-platform,vismartltd/edx-platform,shubhdev/openedx,cpennington/edx-platform,romain-li/edx-platform,jonathan-beard/edx-platform,ovnicraft/edx-platform,B-MOOC/edx-platform,pku9104038/edx-platform,pomegranited/edx-platform,kxliugang/edx-platform,Edraak/edx-platform,jjmiranda/edx-platform,alu042/edx-platform,solashirai/edx-platform,stvstnfrd/edx-platform,nagyistoce/edx-platform,jazkarta/edx-platform-for-isc,Semi-global/edx-platform,benpatterson/edx-platform,syjeon/new_edx,romain-li/edx-platform,sameetb-cuelogic/edx-platform-test,Shrhawk/edx-platform,inares/edx-platform,10clouds/edx-platform,kamalx/edx-platform,halvertoluke/edx-platform,Edraak/circleci-edx-platform,sameetb-cuelogic/edx-platform-test,gymnasium/edx-platform,appsembler/edx-platform,itsjeyd/edx-platform,zofuthan/edx-platform,devs1991/test_edx_docmode,MSOpenTech/edx-platform,apigee/edx-platform,jazkarta/edx-platform-for-isc,chauhanhardik/populo_2,procangroup/edx-platform,tiagochiavericosta/edx-platform,TsinghuaX/edx-platform,shubhdev/edxOnBaadal,mtlchun/edx,vasyarv/edx-platform,amir-qayyum-khan/edx-platform,praveen-pal/edx-platform,lduarte1991/edx-platform,analyseuc3m/ANALYSE-v1,TeachAtTUM/edx-platform,jonathan-beard/edx-platform,eemirtekin/edx-platform,mtlchun/edx,LearnEra/LearnEraPlaftform,tiagochiavericosta/edx-platform,xingyepei/edx-platform,rue89-tech/edx-platform,nanolearningllc/edx-platform-cypress-2,ferabra/edx-platform,alexthered/kienhoc-platform,sameetb-cuelogic/edx-platform-test,jazztpt/edx-platform,kmoocdev/edx-platform,jbzdak/edx-platform,PepperPD/edx-pepper-platform,Softmotions/edx-platform,shashank971/edx-platform,devs1991/test_edx_docmode,jelugbo/tundex,dkarakats/edx-platform,appliedx/edx-platform,nanolearningllc/edx-platform-cypress-2,alexthered/kienhoc-platform,atsolakid/edx-platform,vasyarv/edx-platform,motion2015/edx-platform,edx-solutions/edx-platform,doismellburning/edx-platform,pku9104038/edx-platform,nagyistoce/edx-platform,zofuthan/edx-platform,analyseuc3m/ANALYSE-v1,fly19890211/edx-platform,eduNEXT/edunext-platform,pku9104038/edx-platform,yokose-ks/edx-platform,appliedx/edx-platform,kalebhartje/schoolboost,eduNEXT/edx-platform,etzhou/edx-platform,louyihua/edx-platform,sameetb-cuelogic/edx-platform-test,jswope00/griffinx,edx-solutions/edx-platform,Ayub-Khan/edx-platform,SivilTaram/edx-platform,alu042/edx-platform,solashirai/edx-platform,shashank971/edx-platform,kmoocdev/edx-platform,kursitet/edx-platform,hmcmooc/muddx-platform,gsehub/edx-platform,DNFcode/edx-platform,jelugbo/tundex,jamesblunt/edx-platform,peterm-itr/edx-platform,Kalyzee/edx-platform,jruiperezv/ANALYSE,BehavioralInsightsTeam/edx-platform,Ayub-Khan/edx-platform,hmcmooc/muddx-platform,olexiim/edx-platform,shabab12/edx-platform,dsajkl/123,kursitet/edx-platform,nttks/edx-platform,shurihell/testasia,AkA84/edx-platform,shubhdev/openedx,Ayub-Khan/edx-platform,ubc/edx-platform,EduPepperPD/pepper2013,cecep-edu/edx-platform,shubhdev/edxOnBaadal,ZLLab-Mooc/edx-platform,amir-qayyum-khan/edx-platform,mjg2203/edx-platform-seas,morenopc/edx-platform,martynovp/edx-platform,rhndg/openedx,edx/edx-platform,stvstnfrd/edx-platform,kmoocdev2/edx-platform,ampax/edx-platform,EduPepperPD/pepper2013,chand3040/cloud_that,SivilTaram/edx-platform,kamalx/edx-platform,jswope00/GAI,dcosentino/edx-platform,4eek/edx-platform,4eek/edx-platform,mcgachey/edx-platform,4eek/edx-platform,kursitet/edx-platform,appliedx/edx-platform,zhenzhai/edx-platform,martynovp/edx-platform,kmoocdev/edx-platform,arifsetiawan/edx-platform,jazztpt/edx-platform,mitocw/edx-platform,shabab12/edx-platform,leansoft/edx-platform,xuxiao19910803/edx-platform,J861449197/edx-platform,IndonesiaX/edx-platform,synergeticsedx/deployment-wipro,cognitiveclass/edx-platform,Edraak/edx-platform,morenopc/edx-platform,antonve/s4-project-mooc,msegado/edx-platform,SravanthiSinha/edx-platform,sudheerchintala/LearnEraPlatForm,torchingloom/edx-platform,hkawasaki/kawasaki-aio8-2,nanolearningllc/edx-platform-cypress-2,longmen21/edx-platform,antoviaque/edx-platform,praveen-pal/edx-platform,morpheby/levelup-by,WatanabeYasumasa/edx-platform,kmoocdev2/edx-platform,chauhanhardik/populo_2,vasyarv/edx-platform,sameetb-cuelogic/edx-platform-test,ahmadiga/min_edx,EDUlib/edx-platform,PepperPD/edx-pepper-platform,Edraak/edx-platform,JioEducation/edx-platform,WatanabeYasumasa/edx-platform,doismellburning/edx-platform,adoosii/edx-platform,rhndg/openedx,angelapper/edx-platform,tiagochiavericosta/edx-platform,hmcmooc/muddx-platform,etzhou/edx-platform,don-github/edx-platform,CredoReference/edx-platform,zerobatu/edx-platform,hamzehd/edx-platform,nanolearningllc/edx-platform-cypress-2,proversity-org/edx-platform,JioEducation/edx-platform,lduarte1991/edx-platform,fintech-circle/edx-platform,leansoft/edx-platform,ampax/edx-platform-backup,eduNEXT/edx-platform,waheedahmed/edx-platform,yokose-ks/edx-platform,zofuthan/edx-platform,beacloudgenius/edx-platform,kmoocdev2/edx-platform,adoosii/edx-platform,chrisndodge/edx-platform,auferack08/edx-platform,ampax/edx-platform,teltek/edx-platform,AkA84/edx-platform,kalebhartje/schoolboost,JioEducation/edx-platform,dcosentino/edx-platform,carsongee/edx-platform,motion2015/a3,tanmaykm/edx-platform,hkawasaki/kawasaki-aio8-0,lduarte1991/edx-platform,edry/edx-platform,torchingloom/edx-platform,valtech-mooc/edx-platform,prarthitm/edxplatform,mitocw/edx-platform,prarthitm/edxplatform,nanolearning/edx-platform,pomegranited/edx-platform,y12uc231/edx-platform,TeachAtTUM/edx-platform,adoosii/edx-platform,synergeticsedx/deployment-wipro,knehez/edx-platform,rismalrv/edx-platform,zhenzhai/edx-platform,arifsetiawan/edx-platform,jonathan-beard/edx-platform,jelugbo/tundex,abdoosh00/edraak,AkA84/edx-platform,arbrandes/edx-platform,CourseTalk/edx-platform,EduPepperPD/pepper2013,rhndg/openedx,openfun/edx-platform,beni55/edx-platform,antonve/s4-project-mooc,jazztpt/edx-platform,hkawasaki/kawasaki-aio8-1,ahmadio/edx-platform,unicri/edx-platform,morpheby/levelup-by,atsolakid/edx-platform,wwj718/ANALYSE,ovnicraft/edx-platform,hamzehd/edx-platform,eestay/edx-platform,shurihell/testasia,xuxiao19910803/edx,edx/edx-platform,wwj718/edx-platform,Lektorium-LLC/edx-platform,utecuy/edx-platform,vikas1885/test1,pabloborrego93/edx-platform,Softmotions/edx-platform,etzhou/edx-platform,shashank971/edx-platform,olexiim/edx-platform,abdoosh00/edx-rtl-final,shubhdev/edxOnBaadal,nanolearningllc/edx-platform-cypress,morpheby/levelup-by,solashirai/edx-platform,dkarakats/edx-platform,mahendra-r/edx-platform,jzoldak/edx-platform,don-github/edx-platform,valtech-mooc/edx-platform,wwj718/edx-platform,deepsrijit1105/edx-platform,Shrhawk/edx-platform,rismalrv/edx-platform,Unow/edx-platform,utecuy/edx-platform,mushtaqak/edx-platform,zubair-arbi/edx-platform,AkA84/edx-platform,MSOpenTech/edx-platform,arifsetiawan/edx-platform,SravanthiSinha/edx-platform,ahmedaljazzar/edx-platform,philanthropy-u/edx-platform,ZLLab-Mooc/edx-platform,DefyVentures/edx-platform,leansoft/edx-platform,bdero/edx-platform,zadgroup/edx-platform,polimediaupv/edx-platform,rismalrv/edx-platform,jamesblunt/edx-platform,pomegranited/edx-platform,shubhdev/openedx,wwj718/ANALYSE,torchingloom/edx-platform,10clouds/edx-platform,nanolearningllc/edx-platform-cypress,shubhdev/edx-platform,jbassen/edx-platform,cognitiveclass/edx-platform,cyanna/edx-platform,valtech-mooc/edx-platform,andyzsf/edx,rue89-tech/edx-platform,kursitet/edx-platform,mtlchun/edx,raccoongang/edx-platform,jbassen/edx-platform,a-parhom/edx-platform,leansoft/edx-platform,cpennington/edx-platform,ak2703/edx-platform,y12uc231/edx-platform,arbrandes/edx-platform,Endika/edx-platform,bigdatauniversity/edx-platform,arbrandes/edx-platform,dsajkl/reqiop,eemirtekin/edx-platform,abdoosh00/edraak,openfun/edx-platform,eestay/edx-platform,shubhdev/openedx,ferabra/edx-platform,msegado/edx-platform,IONISx/edx-platform,jruiperezv/ANALYSE,jbzdak/edx-platform,playm2mboy/edx-platform,bigdatauniversity/edx-platform,yokose-ks/edx-platform,jswope00/GAI,knehez/edx-platform,Ayub-Khan/edx-platform,motion2015/a3,beacloudgenius/edx-platform,dsajkl/reqiop,zerobatu/edx-platform,jolyonb/edx-platform,andyzsf/edx,hkawasaki/kawasaki-aio8-0,fintech-circle/edx-platform,eduNEXT/edunext-platform,IONISx/edx-platform,jbzdak/edx-platform,zerobatu/edx-platform,ZLLab-Mooc/edx-platform,olexiim/edx-platform,appsembler/edx-platform,carsongee/edx-platform,deepsrijit1105/edx-platform,ovnicraft/edx-platform,UOMx/edx-platform,jazkarta/edx-platform-for-isc,chand3040/cloud_that,louyihua/edx-platform,jbzdak/edx-platform,JioEducation/edx-platform,alu042/edx-platform,doismellburning/edx-platform,don-github/edx-platform,playm2mboy/edx-platform,hamzehd/edx-platform,DefyVentures/edx-platform,antonve/s4-project-mooc,pdehaye/theming-edx-platform,auferack08/edx-platform,Edraak/circleci-edx-platform,IITBinterns13/edx-platform-dev,eduNEXT/edx-platform,EduPepperPDTesting/pepper2013-testing,arbrandes/edx-platform,miptliot/edx-platform,vikas1885/test1,ZLLab-Mooc/edx-platform,LICEF/edx-platform,ovnicraft/edx-platform,louyihua/edx-platform,marcore/edx-platform,a-parhom/edx-platform,jruiperezv/ANALYSE,nttks/jenkins-test,romain-li/edx-platform,caesar2164/edx-platform,jazkarta/edx-platform,longmen21/edx-platform,CourseTalk/edx-platform,defance/edx-platform,teltek/edx-platform,doganov/edx-platform,carsongee/edx-platform,syjeon/new_edx,nttks/edx-platform,unicri/edx-platform,doganov/edx-platform,ubc/edx-platform,mjg2203/edx-platform-seas,rationalAgent/edx-platform-custom,gsehub/edx-platform,apigee/edx-platform,ahmedaljazzar/edx-platform,zubair-arbi/edx-platform,yokose-ks/edx-platform,tanmaykm/edx-platform,chudaol/edx-platform,jonathan-beard/edx-platform,edx-solutions/edx-platform,EDUlib/edx-platform,Edraak/edraak-platform,inares/edx-platform,ahmadiga/min_edx,raccoongang/edx-platform,CredoReference/edx-platform,shubhdev/edxOnBaadal,mushtaqak/edx-platform,zofuthan/edx-platform,DefyVentures/edx-platform,angelapper/edx-platform,cognitiveclass/edx-platform,RPI-OPENEDX/edx-platform,Endika/edx-platform,antonve/s4-project-mooc,chand3040/cloud_that,waheedahmed/edx-platform,cognitiveclass/edx-platform,chand3040/cloud_that,Livit/Livit.Learn.EdX,vikas1885/test1,pdehaye/theming-edx-platform,mtlchun/edx,jamesblunt/edx-platform,kmoocdev2/edx-platform,nttks/jenkins-test,nanolearningllc/edx-platform-cypress,unicri/edx-platform,SivilTaram/edx-platform,beni55/edx-platform,J861449197/edx-platform,fly19890211/edx-platform,IndonesiaX/edx-platform,abdoosh00/edx-rtl-final,ampax/edx-platform-backup,utecuy/edx-platform,jswope00/griffinx,stvstnfrd/edx-platform,nikolas/edx-platform,xinjiguaike/edx-platform,DNFcode/edx-platform,valtech-mooc/edx-platform,amir-qayyum-khan/edx-platform,Kalyzee/edx-platform,kxliugang/edx-platform,MakeHer/edx-platform,mbareta/edx-platform-ft,waheedahmed/edx-platform,tiagochiavericosta/edx-platform,shashank971/edx-platform,nanolearningllc/edx-platform-cypress,msegado/edx-platform,kamalx/edx-platform,doganov/edx-platform,jswope00/GAI,motion2015/edx-platform,defance/edx-platform,IITBinterns13/edx-platform-dev,martynovp/edx-platform,EduPepperPD/pepper2013,Endika/edx-platform,polimediaupv/edx-platform,ZLLab-Mooc/edx-platform,simbs/edx-platform,alexthered/kienhoc-platform,UOMx/edx-platform,eemirtekin/edx-platform,EDUlib/edx-platform,martynovp/edx-platform,EduPepperPDTesting/pepper2013-testing,dcosentino/edx-platform,marcore/edx-platform,Shrhawk/edx-platform,Semi-global/edx-platform,beni55/edx-platform,Edraak/edraak-platform,4eek/edx-platform,antoviaque/edx-platform,JCBarahona/edX,mjirayu/sit_academy,cpennington/edx-platform,zhenzhai/edx-platform,fly19890211/edx-platform,caesar2164/edx-platform,cyanna/edx-platform,chauhanhardik/populo_2,olexiim/edx-platform,mbareta/edx-platform-ft,kursitet/edx-platform,pomegranited/edx-platform,hkawasaki/kawasaki-aio8-2,procangroup/edx-platform,cyanna/edx-platform,mjg2203/edx-platform-seas,nttks/edx-platform,rue89-tech/edx-platform,xingyepei/edx-platform,MakeHer/edx-platform,hastexo/edx-platform,ferabra/edx-platform,wwj718/ANALYSE,stvstnfrd/edx-platform,SivilTaram/edx-platform,edry/edx-platform,mahendra-r/edx-platform,WatanabeYasumasa/edx-platform,zofuthan/edx-platform,Lektorium-LLC/edx-platform,cyanna/edx-platform,hastexo/edx-platform,ESOedX/edx-platform,nanolearningllc/edx-platform-cypress-2,MSOpenTech/edx-platform,Kalyzee/edx-platform,jswope00/GAI,itsjeyd/edx-platform,dsajkl/reqiop,EduPepperPDTesting/pepper2013-testing,kxliugang/edx-platform,pabloborrego93/edx-platform,abdoosh00/edx-rtl-final,cognitiveclass/edx-platform,CredoReference/edx-platform,cselis86/edx-platform,cpennington/edx-platform,TeachAtTUM/edx-platform,defance/edx-platform,TeachAtTUM/edx-platform,DefyVentures/edx-platform,caesar2164/edx-platform,wwj718/edx-platform,y12uc231/edx-platform,peterm-itr/edx-platform,mcgachey/edx-platform,devs1991/test_edx_docmode,unicri/edx-platform,ESOedX/edx-platform,jswope00/griffinx,raccoongang/edx-platform,BehavioralInsightsTeam/edx-platform,jjmiranda/edx-platform,jamiefolsom/edx-platform,arifsetiawan/edx-platform,Unow/edx-platform,OmarIthawi/edx-platform,ubc/edx-platform,solashirai/edx-platform,chrisndodge/edx-platform,bitifirefly/edx-platform,waheedahmed/edx-platform
common/djangoapps/track/models.py
common/djangoapps/track/models.py
from django.db import models class TrackingLog(models.Model): """Defines the fields that are stored in the tracking log database""" dtcreated = models.DateTimeField('creation date', auto_now_add=True) username = models.CharField(max_length=32, blank=True) ip = models.CharField(max_length=32, blank=True) event_source = models.CharField(max_length=32) event_type = models.CharField(max_length=512, blank=True) event = models.TextField(blank=True) agent = models.CharField(max_length=256, blank=True) page = models.CharField(max_length=512, blank=True, null=True) time = models.DateTimeField('event time') host = models.CharField(max_length=64, blank=True) def __unicode__(self): fmt = ( u"[{self.time}] {self.username}@{self.ip}: " u"{self.event_source}| {self.event_type} | " u"{self.page} | {self.event}" ) return fmt.format(self=self)
from django.db import models from django.db import models class TrackingLog(models.Model): dtcreated = models.DateTimeField('creation date', auto_now_add=True) username = models.CharField(max_length=32, blank=True) ip = models.CharField(max_length=32, blank=True) event_source = models.CharField(max_length=32) event_type = models.CharField(max_length=512, blank=True) event = models.TextField(blank=True) agent = models.CharField(max_length=256, blank=True) page = models.CharField(max_length=512, blank=True, null=True) time = models.DateTimeField('event time') host = models.CharField(max_length=64, blank=True) def __unicode__(self): s = "[%s] %s@%s: %s | %s | %s | %s" % (self.time, self.username, self.ip, self.event_source, self.event_type, self.page, self.event) return s
agpl-3.0
Python
ed6b086f785c4856ef73484ffc2082a0fba200b8
Update accessible classes
oscar6echo/ezhc,oscar6echo/ezhc,oscar6echo/ezhc
ezhc/__init__.py
ezhc/__init__.py
from ._config import load_js_libs from ._highcharts import Highcharts from ._highstock import Highstock from ._global_options import GlobalOptions from ._theme import Theme from . import sample from . import build from ._clock import Clock __all__ = ['Highcharts', 'Highstock', 'GlobalOptions', 'Theme', 'sample', 'build', 'Clock', ] load_js_libs()
from ._config import load_js_libs from ._highcharts import Highcharts from ._highstock import Highstock from . import sample from . import build from ._clock import Clock __all__ = ['Highcharts', 'Highstock', 'sample', 'build', 'Clock', ] load_js_libs()
mit
Python
dec3d29f8482cb71f5ea3337622460a38b4f9124
Set the default to production
chouseknecht/galaxy,chouseknecht/galaxy,chouseknecht/galaxy,chouseknecht/galaxy
galaxy/__init__.py
galaxy/__init__.py
# (c) 2012-2016, Ansible by Red Hat # # This file is part of Ansible Galaxy # # Ansible Galaxy is free software: you can redistribute it and/or modify # it under the terms of the Apache License as published by # the Apache Software Foundation, either version 2 of the License, or # (at your option) any later version. # # Ansible Galaxy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # Apache License for more details. # # You should have received a copy of the Apache License # along with Galaxy. If not, see <http://www.apache.org/licenses/>. import os.path import sys import warnings __version__ = '2.2.0' __all__ = ['__version__'] def find_commands(management_dir): # Modified version of function from django/core/management/__init__.py. command_dir = os.path.join(management_dir, 'commands') commands = [] try: for f in os.listdir(command_dir): if f.startswith('_'): continue elif f.endswith('.py') and f[:-3] not in commands: commands.append(f[:-3]) elif f.endswith('.pyc') and f[:-4] not in commands: commands.append(f[:-4]) except OSError: pass return commands def prepare_env(): os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'galaxy.settings.production') local_site_packages = os.path.join( os.path.dirname(__file__), 'lib', 'site-packages') sys.path.insert(0, local_site_packages) from django.conf import settings if not settings.DEBUG: warnings.simplefilter('ignore', DeprecationWarning) # import django.utils settings.version = __version__ def manage(): # Prepare the galaxy environment. prepare_env() # Now run the command (or display the version). from django.core.management import execute_from_command_line if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): sys.stdout.write('galaxy-%s\n' % __version__) else: execute_from_command_line(sys.argv)
# (c) 2012-2016, Ansible by Red Hat # # This file is part of Ansible Galaxy # # Ansible Galaxy is free software: you can redistribute it and/or modify # it under the terms of the Apache License as published by # the Apache Software Foundation, either version 2 of the License, or # (at your option) any later version. # # Ansible Galaxy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # Apache License for more details. # # You should have received a copy of the Apache License # along with Galaxy. If not, see <http://www.apache.org/licenses/>. import os.path import sys import warnings __version__ = '2.2.0' __all__ = ['__version__'] def find_commands(management_dir): # Modified version of function from django/core/management/__init__.py. command_dir = os.path.join(management_dir, 'commands') commands = [] try: for f in os.listdir(command_dir): if f.startswith('_'): continue elif f.endswith('.py') and f[:-3] not in commands: commands.append(f[:-3]) elif f.endswith('.pyc') and f[:-4] not in commands: commands.append(f[:-4]) except OSError: pass return commands def prepare_env(): os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'galaxy.settings.default') local_site_packages = os.path.join( os.path.dirname(__file__), 'lib', 'site-packages') sys.path.insert(0, local_site_packages) from django.conf import settings if not settings.DEBUG: warnings.simplefilter('ignore', DeprecationWarning) # import django.utils settings.version = __version__ def manage(): # Prepare the galaxy environment. prepare_env() # Now run the command (or display the version). from django.core.management import execute_from_command_line if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): sys.stdout.write('galaxy-%s\n' % __version__) else: execute_from_command_line(sys.argv)
apache-2.0
Python
a7f2a211edad68ed2042266eae05bc3153904580
adjust simulation_exp3.py
ntucllab/striatum
simulation/simulation_exp3.py
simulation/simulation_exp3.py
from striatum.storage import history from striatum.storage import model from striatum.bandit import exp3 import simulation as sm import numpy as np import matplotlib.pyplot as plt def main(): times = 1000 d = 5 actions = [1, 2, 3, 4, 5] # Parameter tunning tunning_region = np.arange(0.001, 1, 0.03) ctr_tunning = np.zeros(shape=(len(tunning_region), 1)) context1, desired_action1 = sm.data_simulation(times, d, actions) i = 0 for gamma in tunning_region: historystorage = history.MemoryHistoryStorage() modelstorage = model.MemoryModelStorage() policy = exp3.Exp3(actions, historystorage, modelstorage, gamma) seq_error = sm.policy_evaluation(policy, context1, desired_action1) ctr_tunning[i] = times - seq_error[-1] i += 1 ctr_tunning /= times gamma_opt = tunning_region[np.argmax(ctr_tunning)] sm.tuning_plot(tunning_region, ctr_tunning, label="gamma changes") # Regret Analysis times = 10000 context2, desired_action2 = sm.data_simulation(times, d, actions) historystorage = history.MemoryHistoryStorage() modelstorage = model.MemoryModelStorage() policy = exp3.Exp3(actions, historystorage, modelstorage, gamma=gamma_opt) regret = sm.regret_calculation(sm.policy_evaluation(policy, context2, desired_action2)) sm.regret_plot(times, regret, label='gamma = ' + str(gamma_opt)) if __name__ == '__main__': main()
from striatum.storage import history from striatum.storage import model from striatum.bandit import exp3 import simulation as sm import numpy as np import matplotlib.pyplot as plt def main(): times = 1000 d = 5 actions = [1, 2, 3, 4, 5] # Parameter tunning tunning_region = np.arange(0.001, 1, 0.03) ctr_tunning = np.zeros(shape=(len(tunning_region), 1)) context1, desired_action1 = sm.data_simulation(times, d, actions) i = 0 for gamma in tunning_region: historystorage = history.MemoryHistoryStorage() modelstorage = model.MemoryModelStorage() policy = exp3.Exp3(actions, historystorage, modelstorage, gamma) seq_error = sm.policy_evaluation(policy, context1, desired_action1) ctr_tunning[i] = times - seq_error[-1] i += 1 ctr_tunning /= times gamma_opt = tunning_region[np.argmax(ctr_tunning)] # Plot the parameter tunning result plt.plot(tunning_region, ctr_tunning, 'ro-', label="gamma changes") plt.xlabel('parameter value') plt.ylabel('CTR') plt.legend() axes = plt.gca() axes.set_ylim([0, 1]) plt.title("Parameter Tunning Curve - EXP3") plt.show() # Regret Analysis times = 10000 context2, desired_action2 = sm.data_simulation(times, d, actions) historystorage = history.MemoryHistoryStorage() modelstorage = model.MemoryModelStorage() policy = exp3.Exp3(actions, historystorage, modelstorage, gamma=gamma_opt) seq_error = sm.policy_evaluation(policy, context2, desired_action2) seq_error = [x / y for x, y in zip(seq_error, range(1, times + 1))] # Plot the regret analysis plt.plot(range(times), seq_error, 'r-', label='gamma = ' + str(gamma_opt)) plt.xlabel('time') plt.ylabel('regret') plt.legend() axes = plt.gca() axes.set_ylim([0, 1]) plt.title("Regret Bound with respect to T - EXP3") plt.show() if __name__ == '__main__': main()
bsd-2-clause
Python
20d41f31e40a8d20902fcfea4543fa9c2c4d8cae
add dummy import function, so modulefinder can find our tables.
googlefonts/fonttools,fonttools/fonttools
Lib/fontTools/ttLib/tables/__init__.py
Lib/fontTools/ttLib/tables/__init__.py
def _moduleFinderHint(): import B_A_S_E_ import C_F_F_ import D_S_I_G_ import DefaultTable import G_D_E_F_ import G_P_O_S_ import G_S_U_B_ import J_S_T_F_ import L_T_S_H_ import O_S_2f_2 import T_S_I_B_ import T_S_I_D_ import T_S_I_J_ import T_S_I_P_ import T_S_I_S_ import T_S_I_V_ import T_S_I__0 import T_S_I__1 import T_S_I__2 import T_S_I__3 import T_S_I__5 import __init__ import _c_m_a_p import _c_v_t import _f_p_g_m import _g_a_s_p import _g_l_y_f import _h_d_m_x import _h_e_a_d import _h_h_e_a import _h_m_t_x import _k_e_r_n import _l_o_c_a import _m_a_x_p import _n_a_m_e import _p_o_s_t import _p_r_e_p import _v_h_e_a import _v_m_t_x import asciiTable import otBase import otConverters import otData import otTables import ttProgram
"""Empty __init__.py file to signal Python this directory is a package. (It can't be completely empty since WinZip seems to skip empty files.) """
mit
Python
c24979627a8a2282a297704b735b1445b56dbce6
Bump version. [skip ci]
mindflayer/python-mocket,mocketize/python-mocket
mocket/__init__.py
mocket/__init__.py
try: # Py2 from mocket import mocketize, Mocket, MocketEntry, Mocketizer except ImportError: # Py3 from mocket.mocket import mocketize, Mocket, MocketEntry, Mocketizer __all__ = (mocketize, Mocket, MocketEntry, Mocketizer) __version__ = '2.7.2'
try: # Py2 from mocket import mocketize, Mocket, MocketEntry, Mocketizer except ImportError: # Py3 from mocket.mocket import mocketize, Mocket, MocketEntry, Mocketizer __all__ = (mocketize, Mocket, MocketEntry, Mocketizer) __version__ = '2.7.1'
bsd-3-clause
Python
3e96eaeb9bb722d24fe4e589c49e52d32e8af1aa
Bump version.
mocketize/python-mocket,mindflayer/python-mocket
mocket/__init__.py
mocket/__init__.py
try: # Py2 from mocket import mocketize, Mocket, MocketEntry, Mocketizer except ImportError: # Py3 from mocket.mocket import mocketize, Mocket, MocketEntry, Mocketizer __all__ = (mocketize, Mocket, MocketEntry, Mocketizer) __version__ = '3.7.1'
try: # Py2 from mocket import mocketize, Mocket, MocketEntry, Mocketizer except ImportError: # Py3 from mocket.mocket import mocketize, Mocket, MocketEntry, Mocketizer __all__ = (mocketize, Mocket, MocketEntry, Mocketizer) __version__ = '3.7.0'
bsd-3-clause
Python
812c40bfaf2ef4f59643c53e8b8ac76f20777423
Modify a debian example to archlinux
ronnix/fabtools,ahnjungho/fabtools,bitmonk/fabtools,davidcaste/fabtools,AMOSoft/fabtools,hagai26/fabtools,pombredanne/fabtools,fabtools/fabtools,prologic/fabtools,sociateru/fabtools,n0n0x/fabtools-python,wagigi/fabtools-python,badele/fabtools
fabtools/arch.py
fabtools/arch.py
""" Archlinux packages ================== This module provides tools to manage Archlinux packages and repositories. """ from __future__ import with_statement from fabric.api import hide, run, settings from fabtools.utils import run_as_root MANAGER = 'LC_ALL=C pacman' def update_index(quiet=True): """ Update pacman package definitions. """ manager = MANAGER if quiet: with settings(hide('running', 'stdout', 'stderr', 'warnings'), warn_only=True): run_as_root("%(manager)s -Sy" % locals()) else: run_as_root("%(manager)s -Sy" % locals()) def upgrade(): """ Upgrade all packages. """ manager = MANAGER run_as_root("%(manager)s -Su" % locals(), pty=False) def is_installed(pkg_name): """ Check if a package is installed. """ manager = MANAGER with settings(hide('running', 'stdout', 'stderr', 'warnings'), warn_only=True): res = run("%(manager)s -Q %(pkg_name)s" % locals()) return res.succeeded def install(packages, update=False, options=None): """ Install one or more packages. If *update* is ``True``, the package definitions will be updated first, using :py:func:`~fabtools.arch.update_index`. Extra *options* may be passed to ``pacman`` if necessary. Example:: import fabtools # Update index, then install a single package fabtools.arch.install('mongodb', update=True) # Install multiple packages fabtools.arch.install([ 'mongodb', 'python-pymongo', ]) """ manager = MANAGER if update: update_index() if options is None: options = [] if not isinstance(packages, basestring): packages = " ".join(packages) options.append("-q") options = " ".join(options) cmd = '%(manager)s -S %(options)s %(packages)s' % locals() run_as_root(cmd, pty=False) def uninstall(packages, options=None): """ Remove one or more packages. Extra *options* may be passed to ``pacman`` if necessary. """ manager = MANAGER if options is None: options = [] if not isinstance(packages, basestring): packages = " ".join(packages) options = " ".join(options) cmd = '%(manager)s -R %(options)s %(packages)s' % locals() run_as_root(cmd, pty=False)
""" Archlinux packages ================== This module provides tools to manage Archlinux packages and repositories. """ from __future__ import with_statement from fabric.api import hide, run, settings from fabtools.utils import run_as_root MANAGER = 'LC_ALL=C pacman' def update_index(quiet=True): """ Update pacman package definitions. """ manager = MANAGER if quiet: with settings(hide('running', 'stdout', 'stderr', 'warnings'), warn_only=True): run_as_root("%(manager)s -Sy" % locals()) else: run_as_root("%(manager)s -Sy" % locals()) def upgrade(): """ Upgrade all packages. """ manager = MANAGER run_as_root("%(manager)s -Su" % locals(), pty=False) def is_installed(pkg_name): """ Check if a package is installed. """ manager = MANAGER with settings(hide('running', 'stdout', 'stderr', 'warnings'), warn_only=True): res = run("%(manager)s -Q %(pkg_name)s" % locals()) return res.succeeded def install(packages, update=False, options=None): """ Install one or more packages. If *update* is ``True``, the package definitions will be updated first, using :py:func:`~fabtools.arch.update_index`. Extra *options* may be passed to ``pacman`` if necessary. Example:: import fabtools # Update index, then install a single package fabtools.arch.install('build-essential', update=True) # Install multiple packages fabtools.arch.install([ 'python-dev', 'libxml2-dev', ]) """ manager = MANAGER if update: update_index() if options is None: options = [] if not isinstance(packages, basestring): packages = " ".join(packages) options.append("-q") options = " ".join(options) cmd = '%(manager)s -S %(options)s %(packages)s' % locals() run_as_root(cmd, pty=False) def uninstall(packages, options=None): """ Remove one or more packages. Extra *options* may be passed to ``pacman`` if necessary. """ manager = MANAGER if options is None: options = [] if not isinstance(packages, basestring): packages = " ".join(packages) options = " ".join(options) cmd = '%(manager)s -R %(options)s %(packages)s' % locals() run_as_root(cmd, pty=False)
bsd-2-clause
Python
c9aff74371f176daa011514a05875f59c86a33c6
Refactor CLI argument parsing.
bsuweb/checker
checker/main.py
checker/main.py
#!/usr/bin/env python import os import sys import subprocess import argparse class Checker: def __init__(self, path): if not os.path.isdir(path): sys.exit(1); self.path = os.path.realpath(path) self.jobs = self.getExecutableFiles(self.path) def getExecutableFiles(self,path): files = [] for dirname, dirnames, filenames in os.walk(path): for filename in filenames: filename_path = os.path.join(dirname, filename) if os.access(filename_path,os.X_OK): files.append(filename_path) return files; def run(self): for job in self.jobs: subprocess.call(job) if __name__ == '__main__': # Add CLI parsing. parser = argparse.ArgumentParser( description = "A script that runs all the jobs in the given directory and keeps track of responses in an sqlite database.") parser.add_argument('path', metavar='jobs-directory', type=str, nargs=1, help='Path to the directory where executable jobs are.') args = parser.parse_args() # Initialize and run the checker. check = Checker(args.path[0]) check.run()
#!/usr/bin/env python import os import sys import subprocess import getopt class Checker: def __init__(self, path): if not os.path.isdir(path): sys.exit(1); self.path = os.path.realpath(path) self.jobs = self.getExecutableFiles(self.path) def getExecutableFiles(self,path): files = [] for dirname, dirnames, filenames in os.walk(path): for filename in filenames: filename_path = os.path.join(dirname, filename) if os.access(filename_path,os.X_OK): files.append(filename_path) return files; def run(self): for job in self.jobs: subprocess.call(job) if __name__ == '__main__': opts, path = getopt.getopt(sys.argv[1], "h") for opt, arg in opts: if opt == '-h': print './main.py /full/path/to/jobs' sys.exit() check = Checker(path) check.run()
mit
Python
3c231fb34f8adb1d290f2cfc0164dbea6049bc34
Reorder methods in test.py
ConsenSys/ethjsonrpc
test.py
test.py
from ethjsonrpc import EthJsonRpc methods = [ 'web3_clientVersion', 'net_version', 'net_peerCount', 'net_listening', 'eth_protocolVersion', 'eth_coinbase', 'eth_mining', 'eth_hashrate', 'eth_gasPrice', 'eth_accounts', 'eth_blockNumber', 'eth_getCompilers', 'eth_newPendingTransactionFilter', 'eth_getWork', # 'shh_version', # 'shh_newIdentity', # 'shh_newGroup', ] c = EthJsonRpc() print len(methods) for m in methods: meth = getattr(c, m) result = meth() print '%s: %s (%s)' % (m, result, type(result))
from ethjsonrpc import EthJsonRpc methods = [ 'web3_clientVersion', 'net_version', 'net_listening', 'net_peerCount', 'eth_protocolVersion', 'eth_coinbase', 'eth_mining', 'eth_hashrate', 'eth_gasPrice', 'eth_accounts', 'eth_blockNumber', 'eth_getCompilers', 'eth_newPendingTransactionFilter', 'eth_getWork', # 'shh_version', # 'shh_newIdentity', # 'shh_newGroup', ] c = EthJsonRpc() print len(methods) for m in methods: meth = getattr(c, m) result = meth() print '%s: %s (%s)' % (m, result, type(result))
unlicense
Python
7a1ad4ae0e3ec15c1fd5aec763476e482ea76ba8
Make a better version of shuffle
stephantul/somber
somber/components/utilities.py
somber/components/utilities.py
"""Utility functions.""" import numpy as np class Scaler(object): """ Scales data based on the mean and standard deviation. Attributes ---------- mean : numpy array The columnwise mean of the data after scaling. std : numpy array The columnwise standard deviation of the data after scaling. is_fit : bool Indicates whether this scaler has been fit yet. """ def __init__(self): """Initialize the scaler.""" self.mean = None self.std = None self.is_fit = False def fit_transform(self, X): """First call fit, then call transform.""" self.fit(X) return self.transform(X) def fit(self, X): """ Fit the scaler based on some data. Takes the columnwise mean and standard deviation of the entire input array. If the array has more than 2 dimensions, it is flattened. Parameters ---------- X : numpy array Returns ------- scaled : numpy array A scaled version of said array. """ if X.ndim > 2: X = X.reshape((np.prod(X.shape[:-1]), X.shape[-1])) self.mean = X.mean(0) self.std = X.std(0) self.is_fit = True return self def transform(self, X): """Transform your data to zero mean unit variance.""" if not self.is_fit: raise ValueError("The scaler has not been fit yet.") return (X-self.mean) / (self.std + 10e-7) def inverse_transform(self, X): """Invert the transformation.""" return ((X * self.std) + self.mean) def shuffle(array): """Gpu/cpu-agnostic shuffle function.""" return np.random.permutation(array)
"""Utility functions.""" import numpy as np class Scaler(object): """ Scales data based on the mean and standard deviation. Attributes ---------- mean : numpy array The columnwise mean of the data after scaling. std : numpy array The columnwise standard deviation of the data after scaling. is_fit : bool Indicates whether this scaler has been fit yet. """ def __init__(self): """Initialize the scaler.""" self.mean = None self.std = None self.is_fit = False def fit_transform(self, X): """First call fit, then call transform.""" self.fit(X) return self.transform(X) def fit(self, X): """ Fit the scaler based on some data. Takes the columnwise mean and standard deviation of the entire input array. If the array has more than 2 dimensions, it is flattened. Parameters ---------- X : numpy array Returns ------- scaled : numpy array A scaled version of said array. """ if X.ndim > 2: X = X.reshape((np.prod(X.shape[:-1]), X.shape[-1])) self.mean = X.mean(0) self.std = X.std(0) self.is_fit = True return self def transform(self, X): """Transform your data to zero mean unit variance.""" if not self.is_fit: raise ValueError("The scaler has not been fit yet.") return (X-self.mean) / (self.std + 10e-7) def inverse_transform(self, X): """Invert the transformation.""" return ((X * self.std) + self.mean) def shuffle(array): """Gpu/cpu-agnostic shuffle function.""" z = array.copy() np.random.shuffle(z) return z
mit
Python
00b57b668a5c68a209dac335915bbf2312df0580
Make sure tests run on local package
scott-maddox/openbandparams
test.py
test.py
# # Copyright (c) 2013-2014, Scott J Maddox # # This file is part of openbandparams. # # openbandparams is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # openbandparams is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with openbandparams. If not, see <http://www.gnu.org/licenses/>. # ############################################################################# ''' Find and run all unit tests in the project. ''' # Make sure we import the local package import os import sys sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), 'src'))) import nose nose.main()
# # Copyright (c) 2013-2014, Scott J Maddox # # This file is part of openbandparams. # # openbandparams is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # openbandparams is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with openbandparams. If not, see <http://www.gnu.org/licenses/>. # ############################################################################# ''' Find and run all unit tests in the project. ''' import nose nose.main()
agpl-3.0
Python
a122193144185320f045367613650b40f7df00b8
Rework the test script a bit.
djc/jasinja,djc/jasinja
test.py
test.py
import codegen, jinja2, spidermonkey, sys import simplejson as json TESTS = [ ('{{ test }}', {'test': 'crap'}), ('{% if a %}x{% endif %}', {'a': True}), ('{% if a %}c{% endif %}b', {'a': False}), ('{{ 1 if a else 2 }}', {'a': True}), ('{{ 1 if a else 2 }}', {'a': False}), ('{% if a %}d{% else %}e{% endif %}', {'a': False}), ('{% if a %}f{% elif b %}g{% endif %}', {'b': True}), ("{{ '%4.2f'|format(x) }}", {'x': 17.0}), ('{{ d[:7] }}', {'d': '2011-05-27'}), ('{{ a.x }}', {'a': {'x': 'z'}}), ('{{ "%.6f"|format(a / b) }}', {'a': 5.0, 'b': 3}), ('{{ "%.1f"|format(a.x / b.y * 100) }}', {'a': {'x': 20}, 'b': {'y': 5}}), ('{% macro x(y) %}{{ y / 2 }}{% endmacro %}{{ x(z) }}', {'z': 512}), ] def jstest(env, src, data): run = spidermonkey.Runtime() ctx = run.new_context() js = codegen.generate(env, codegen.compile(env, src)) jsobj = json.dumps(data) code = js + '\ntemplate.render(%s);' % jsobj return ctx.execute(code) def pytest(env, src, data): tmpl = env.from_string(src) return tmpl.render(data) def run(i, quiet=True): src, data = TESTS[i] env = jinja2.Environment() ast = codegen.compile(env, src) if not quiet: print ast print codegen.generate(env, ast) js = jstest(env, src, data) py = pytest(env, src, data) if not quiet: print 'js:', repr(js) print 'py:', repr(py) if js.isdigit(): return float(js) == float(py) return js == py def test(): for i, t in enumerate(TESTS): res = run(i) sys.stdout.write('.' if res else 'F') sys.stdout.write('\n') if __name__ == '__main__': args = sys.argv[1:] if args: run(int(args[0]), False) else: test()
import codegen, jinja2, spidermonkey, sys import simplejson as json def jstest(env, src, data): run = spidermonkey.Runtime() ctx = run.new_context() js = codegen.generate(env, codegen.compile(env, src)) jsobj = json.dumps(data) code = js + '\ntemplate.render(%s);' % jsobj return ctx.execute(code) def pytest(env, src, data): tmpl = env.from_string(src) return tmpl.render(data) WORKS = [ ('{{ test }}', {'test': 'crap'}), ('{% if a %}x{% endif %}', {'a': True}), ('{% if a %}c{% endif %}b', {'a': False}), ('{{ 1 if a else 2 }}', {'a': True}), ('{{ 1 if a else 2 }}', {'a': False}), ('{% if a %}d{% else %}e{% endif %}', {'a': False}), ('{% if a %}f{% elif b %}g{% endif %}', {'b': True}), ("{{ '%4.2f'|format(x) }}", {'x': 17.0}), ('{{ d[:7] }}', {'d': '2011-05-27'}), ('{{ a.x }}', {'a': {'x': 'z'}}), ('{{ "%.6f"|format(a / b) }}', {'a': 5.0, 'b': 3}), ('{{ "%.1f"|format(a.x / b.y * 100) }}', {'a': {'x': 20}, 'b': {'y': 5}}), ('{% macro x(y) %}{{ y / 2 }}{% endmacro %}{{ x(z) }}', {'z': 512}), ] # next: # - assignment + cond-expr # - for-loop src, data = WORKS[int(sys.argv[1])] env = jinja2.Environment() ast = codegen.compile(env, src) print ast print codegen.generate(env, ast) print 'js:', repr(jstest(env, src, data)) print 'py:', repr(pytest(env, src, data))
bsd-3-clause
Python
6d425b617a28b2eb35d53f35f5136148aa1f2ef6
Add relative import for the parser
buddly27/champollion
source/champollion/__init__.py
source/champollion/__init__.py
# :coding: utf-8 import os from ._version import __version__ from .directive.data import AutoDataDirective from .directive.function import AutoFunctionDirective from .directive.class_ import AutoClassDirective from .directive.method import AutoMethodDirective from .directive.attribute import AutoAttributeDirective from .viewcode import ( add_source_code_links, create_code_pages, create_missing_code_link ) from .parser import get_environment def parse_js_source(app): """Parse the javascript source path.""" path = os.path.abspath(app.config.js_source) app.env.js_environment = get_environment(path) def setup(app): """Register the javascript autodoc directives.""" app.add_config_value("js_source", None, "env") app.connect("builder-inited", parse_js_source) app.connect("doctree-read", add_source_code_links) app.connect("html-collect-pages", create_code_pages) app.connect("missing-reference", create_missing_code_link) app.add_directive_to_domain("js", "autodata", AutoDataDirective) app.add_directive_to_domain("js", "autofunction", AutoFunctionDirective) app.add_directive_to_domain("js", "autoclass", AutoClassDirective) app.add_directive_to_domain("js", "automethod", AutoMethodDirective) app.add_directive_to_domain("js", "autoattribute", AutoAttributeDirective) # app.add_directive_to_domain("js", "automodule", AutoModuleDirective) return { "version": __version__ }
# :coding: utf-8 import os from ._version import __version__ from .directive.data import AutoDataDirective from .directive.function import AutoFunctionDirective from .directive.class_ import AutoClassDirective from .directive.method import AutoMethodDirective from .directive.attribute import AutoAttributeDirective from .viewcode import ( add_source_code_links, create_code_pages, create_missing_code_link ) import parser def parse_js_source(app): """Parse the javascript source path.""" path = os.path.abspath(app.config.js_source) app.env.js_environment = parser.get_environment(path) def setup(app): """Register the javascript autodoc directives.""" app.add_config_value("js_source", None, "env") app.connect("builder-inited", parse_js_source) app.connect("doctree-read", add_source_code_links) app.connect("html-collect-pages", create_code_pages) app.connect("missing-reference", create_missing_code_link) app.add_directive_to_domain("js", "autodata", AutoDataDirective) app.add_directive_to_domain("js", "autofunction", AutoFunctionDirective) app.add_directive_to_domain("js", "autoclass", AutoClassDirective) app.add_directive_to_domain("js", "automethod", AutoMethodDirective) app.add_directive_to_domain("js", "autoattribute", AutoAttributeDirective) # app.add_directive_to_domain("js", "automodule", AutoModuleDirective) return { "version": __version__ }
apache-2.0
Python
1caace2631f8e9c38cf0adfb1179a5260dcd3c33
Change output_all_unitprot to allow multi ids for some proteins.
cmunk/protwis,fosfataza/protwis,fosfataza/protwis,fosfataza/protwis,cmunk/protwis,protwis/protwis,cmunk/protwis,cmunk/protwis,fosfataza/protwis,protwis/protwis,protwis/protwis
tools/management/commands/output_all_uniprot.py
tools/management/commands/output_all_uniprot.py
from django.core.management.base import BaseCommand, CommandError from django.core.management import call_command from django.conf import settings from django.db import connection from django.db.models import Q from django.template.loader import render_to_string from protein.models import Protein from residue.models import ResidueGenericNumber, ResidueGenericNumberEquivalent from common import definitions from common.selection import SelectionItem from common.alignment_gpcr import Alignment import xlsxwriter, xlrd import logging, json, os class Command(BaseCommand): help = "Output all uniprot mappings" logger = logging.getLogger(__name__) def handle(self, *args, **options): #Get the proteins f = open('uniprot.json', 'w') ps = Protein.objects.filter(Q(source__name='SWISSPROT') | Q(source__name='TREMBL'),web_links__web_resource__slug='uniprot').all().prefetch_related('web_links__web_resource') print('total:',len(ps)) mapping = {} for p in ps: uniprot = p.web_links.filter(web_resource__slug='uniprot').values_list('index', flat = True) mapping[p.entry_name] = list(uniprot) json.dump(mapping,f, indent=4, separators=(',', ': ')) # print("Seqs: {}\tNot matching: {}".format(num_of_sequences, num_of_non_matching_sequences)) # open("uniprot.txt", "w").write()
from django.core.management.base import BaseCommand, CommandError from django.core.management import call_command from django.conf import settings from django.db import connection from django.db.models import Q from django.template.loader import render_to_string from protein.models import Protein from residue.models import ResidueGenericNumber, ResidueGenericNumberEquivalent from common import definitions from common.selection import SelectionItem from common.alignment_gpcr import Alignment import xlsxwriter, xlrd import logging, json, os class Command(BaseCommand): help = "Output all uniprot mappings" logger = logging.getLogger(__name__) def handle(self, *args, **options): #Get the proteins f = open('uniprot.json', 'w') ps = Protein.objects.filter(Q(source__name='SWISSPROT') | Q(source__name='TREMBL'),web_links__web_resource__slug='uniprot').all().prefetch_related('web_links__web_resource') print('total:',len(ps)) mapping = {} for p in ps: uniprot = p.web_links.get(web_resource__slug='uniprot') mapping[p.entry_name] = uniprot.index json.dump(mapping,f, indent=4, separators=(',', ': ')) # print("Seqs: {}\tNot matching: {}".format(num_of_sequences, num_of_non_matching_sequences)) # open("uniprot.txt", "w").write()
apache-2.0
Python
1b06091101c119f30eb5eabb2d2638fab0e8f658
Test modified to work with renamed debug function
petrgabrlik/BullsAndCows
test_debug.py
test_debug.py
from bullsandcows import isdebug def test_isdebug(): assert isdebug() == 0, "program is in debug mode, this should not be commited"
from bullsandcows import isdebugmode def test_isdebugmode(): assert isdebugmode() == 0, "program is in debug mode, this should not be commited"
mit
Python
f1c47f99255bc6ff2dc7819d72ceafbecaa328a4
Fix comment formatting
bechtoldt/imapclient,bechtoldt/imapclient
imapclient/test/util.py
imapclient/test/util.py
# Copyright (c) 2014, Menno Smits # Released subject to the New BSD License # Please see http://en.wikipedia.org/wiki/BSD_licenses from __future__ import unicode_literals def find_unittest2(): import unittest if hasattr(unittest, 'skip') and hasattr(unittest, 'loader'): return unittest # unittest from stdlib is unittest2, use that try: import unittest2 # try for a separately installed unittest2 package except ImportError: raise ImportError('unittest2 not installed and unittest in standard library is not unittest2') else: return unittest2 unittest = find_unittest2() def patch_TestCase(): TestCase = unittest.TestCase # Older versions of unittest2 don't have # TestCase.assertRaisesRegex and newer version raises warnings # when you use assertRaisesRegexp. This helps deal with the # mismatch. if not hasattr(TestCase, 'assertRaisesRegex'): TestCase.assertRaisesRegex = TestCase.assertRaisesRegexp patch_TestCase()
# Copyright (c) 2014, Menno Smits # Released subject to the New BSD License # Please see http://en.wikipedia.org/wiki/BSD_licenses from __future__ import unicode_literals def find_unittest2(): import unittest if hasattr(unittest, 'skip') and hasattr(unittest, 'loader'): return unittest # unittest from stdlib is unittest2, use that try: import unittest2 # try for a separately installed unittest2 package except ImportError: raise ImportError('unittest2 not installed and unittest in standard library is not unittest2') else: return unittest2 unittest = find_unittest2() def patch_TestCase(): TestCase = unittest.TestCase # Older versions of unittest2 don't have # TestCase.assertRaisesRegex # and newer version raises warnings # when you use # assertRaisesRegexp. This helps deal with the # mismatch. if not hasattr(TestCase, 'assertRaisesRegex'): TestCase.assertRaisesRegex = TestCase.assertRaisesRegexp patch_TestCase()
bsd-3-clause
Python
96261b3c277cb2f694fb5cc2f7cbe29847ff1a53
change the receiver
elixirhub/events-portal-scraping-scripts
SyncEmailNotification.py
SyncEmailNotification.py
__author__ = 'chuqiao' import smtplib import base64 from email.MIMEMultipart import MIMEMultipart from email.MIMEText import MIMEText def viewlog(file): file = open("syncsolr.log") file.seek(0,2)# Go to the end of the file while True: line = file.readline() if "***Finished synchronizing***" in line: mailUpdate() elif "***Synchronize failed***" in line: mailAlert() def mailUpdate(): fromaddr = 'bioeventsportal@gmail.com' toaddr = 'info@bioevents.pro' msg = MIMEMultipart() msg['From'] = fromaddr msg['To'] = toaddr msg['Subject'] = "[Sync-reports] Synchronise two Solrs" body = '''The IAnn Solr is now synchronised with the Bioevents Solr. ''' msg.attach(MIMEText(body, 'plain')) username = 'bioeventsportal' password = base64.b64decode('YmlvZXZlbnRzMzIx') server = smtplib.SMTP('smtp.gmail.com', 587) server.ehlo() server.starttls() server.login(username, password) text = msg.as_string() server.sendmail(fromaddr, toaddr, text) server.quit() def mailAlert(): fromaddr = 'bioeventsportal@gmail.com' toaddr = 'info@bioevents.pro' msg = MIMEMultipart() msg['From'] = fromaddr msg['To'] = toaddr msg['Subject'] = "[Sync-reports]Synchronise two Solrs failed" body = '''The synchronisation of two Solrs failed. ''' msg.attach(MIMEText(body, 'plain')) username = 'bioeventsportal' password = base64.b64decode('YmlvZXZlbnRzMzIx') server = smtplib.SMTP('smtp.gmail.com', 587) server.ehlo() server.starttls() server.login(username, password) text = msg.as_string() server.sendmail(fromaddr, toaddr, text) server.quit() if __name__ == '__main__': viewlog(file)
__author__ = 'chuqiao' import smtplib import base64 from email.MIMEMultipart import MIMEMultipart from email.MIMEText import MIMEText def viewlog(file): file = open("syncsolr.log") file.seek(0,2)# Go to the end of the file while True: line = file.readline() if "***Finished synchronizing***" in line: mailUpdate() elif "***Synchronize failed***" in line: mailAlert() def mailUpdate(): fromaddr = 'bioeventsportal@gmail.com' toaddr = 'info@bioevents-portal.org' msg = MIMEMultipart() msg['From'] = fromaddr msg['To'] = toaddr msg['Subject'] = "[Sync-reports] Synchronise two Solrs" body = '''The IAnn Solr is now synchronised with the Bioevents Solr. ''' msg.attach(MIMEText(body, 'plain')) username = 'bioeventsportal' password = base64.b64decode('YmlvZXZlbnRzMzIx') server = smtplib.SMTP('smtp.gmail.com', 587) server.ehlo() server.starttls() server.login(username, password) text = msg.as_string() server.sendmail(fromaddr, toaddr, text) server.quit() def mailAlert(): fromaddr = 'bioeventsportal@gmail.com' toaddr = 'info@bioevents-portal.org' msg = MIMEMultipart() msg['From'] = fromaddr msg['To'] = toaddr msg['Subject'] = "[Sync-reports]Synchronise two Solrs failed" body = '''The synchronisation of two Solrs failed. ''' msg.attach(MIMEText(body, 'plain')) username = 'bioeventsportal' password = base64.b64decode('YmlvZXZlbnRzMzIx') server = smtplib.SMTP('smtp.gmail.com', 587) server.ehlo() server.starttls() server.login(username, password) text = msg.as_string() server.sendmail(fromaddr, toaddr, text) server.quit() if __name__ == '__main__': viewlog(file)
mit
Python
474dfd3aa9d03ed6bbda47078523badcc7909664
Reorganize and refactor
jnfrye/local_plants_book
scripts/observations/scrape/CalFloraScraper.py
scripts/observations/scrape/CalFloraScraper.py
from selenium import webdriver import pandas as pd import argparse import PyFloraBook.web.communication as scraping import PyFloraBook.input_output.data_coordinator as dc # ---------------- GLOBALS ---------------- SITE_NAME = "CalFlora" # ---------------- INPUT ---------------- # Parse arguments PARSER = argparse.ArgumentParser( description='Scrape CalFlora for species counts for given family' ) PARSER.add_argument( "-f", "--families", nargs='+', help="Names of the families to be analyzed." ) args = PARSER.parse_args() families = args.families # ---------------- SCRAPING ---------------- print("Opening browser...") browser = webdriver.Firefox() browser.set_window_size(500, 300) browser.set_window_position(200, 200) output_path = dc.locate_raw_data_folder() / SITE_NAME for family in families: # Load the webpage try: browser.get( "http://www.calflora.org/entry/wgh.html#srch=t&family=" + family + "&group=none&fmt=simple&y=39.493&x=-119.6979&z=5&rid=rs940") except: pass # lol scraping.wait_for_load(browser, "CLASS_NAME", "familyColumn") # Download the rows in the species data table # Next we skip the first three rows because they contain nonsense data_table = browser.find_element_by_id("resultSlot") data_rows = data_table.find_elements_by_tag_name("tr")[3:] # Extract the species counts species_list = [ (row.find_element_by_class_name("column1Simple").text, int(row.find_element_by_class_name("observColumn").text.split()[0])) for row in data_rows ] # ---------------- ANALYSIS ---------------- # Convert to friendly format for writing CSV family_results_path = str(output_path / (family + "_raw_data.csv")) all_species = pd.DataFrame(species_list, columns=["full_name", "count"]) all_species.to_csv( family_results_path, columns=['full_name', 'count'], index=False ) # For whatever reason, it won't load the next page unless I do this browser.get("about:blank") browser.quit()
from selenium import webdriver import pandas as pd import argparse import PyFloraBook.web.communication as scraping import PyFloraBook.input_output.data_coordinator as dc # ---------------- INPUT ---------------- # Parse arguments parser = argparse.ArgumentParser( description='Scrape CalFlora for species counts for given family') parser.add_argument("-f", "--families", nargs='+', help="Names of the families to be analyzed.") args = parser.parse_args() families = args.families # ---------------- SCRAPING ---------------- print("Opening browser...") browser = webdriver.Firefox() browser.set_window_size(500, 300) browser.set_window_position(200, 200) SITE_NAME = "CalFlora" OUTPUT_PATH = dc.locate_raw_data_folder() / SITE_NAME for family in families: # Load the webpage try: browser.get( "http://www.calflora.org/entry/wgh.html#srch=t&family=" + family + "&group=none&fmt=simple&y=39.493&x=-119.6979&z=5&rid=rs940") except: pass # lol scraping.wait_for_load(browser, "CLASS_NAME", "familyColumn") # Download the rows in the species data table # Next we skip the first three rows because they contain nonsense data_table = browser.find_element_by_id("resultSlot") data_rows = data_table.find_elements_by_tag_name("tr")[3:] # Extract the species counts species_list = [ (row.find_element_by_class_name("column1Simple").text, int(row.find_element_by_class_name("observColumn").text.split()[0])) for row in data_rows ] # ---------------- ANALYSIS ---------------- # Convert to friendly format for writing CSV family_results_path = str(OUTPUT_PATH / (family + "_raw_data.csv")) all_species = pd.DataFrame(species_list, columns=["full_name", "count"]) all_species.to_csv( family_results_path, columns=['full_name', 'count'], index=False ) # For whatever reason, it won't load the next page unless I do this browser.get("about:blank") browser.quit()
mit
Python
1305af162dd05591cc0e5328eb192843b63dabb1
Use DefaultRouter instead of SimpleRouter
City-of-Helsinki/kerrokantasi,vikoivun/kerrokantasi,stephawe/kerrokantasi,stephawe/kerrokantasi,stephawe/kerrokantasi,City-of-Helsinki/kerrokantasi,City-of-Helsinki/kerrokantasi,vikoivun/kerrokantasi,City-of-Helsinki/kerrokantasi,vikoivun/kerrokantasi
kk/urls_v1.py
kk/urls_v1.py
from django.conf.urls import include, url from kk.views import ( HearingCommentViewSet, HearingImageViewSet, HearingViewSet, SectionCommentViewSet, SectionViewSet, UserDataViewSet ) from rest_framework_nested import routers router = routers.DefaultRouter() router.register(r'hearing', HearingViewSet) router.register(r'users', UserDataViewSet, base_name='users') hearing_comments_router = routers.NestedSimpleRouter(router, r'hearing', lookup='comment_parent') hearing_comments_router.register(r'comments', HearingCommentViewSet, base_name='comments') hearing_child_router = routers.NestedSimpleRouter(router, r'hearing', lookup='hearing') hearing_child_router.register(r'sections', SectionViewSet, base_name='sections') hearing_child_router.register(r'images', HearingImageViewSet, base_name='images') section_comments_router = routers.NestedSimpleRouter(hearing_child_router, r'sections', lookup='comment_parent') section_comments_router.register(r'comments', SectionCommentViewSet, base_name='comments') urlpatterns = [ url(r'^', include(router.urls, namespace='v1')), url(r'^', include(hearing_comments_router.urls, namespace='v1')), url(r'^', include(hearing_child_router.urls, namespace='v1')), url(r'^', include(section_comments_router.urls, namespace='v1')), ]
from django.conf.urls import include, url from kk.views import ( HearingCommentViewSet, HearingImageViewSet, HearingViewSet, SectionCommentViewSet, SectionViewSet, UserDataViewSet ) from rest_framework_nested import routers router = routers.SimpleRouter() router.register(r'hearing', HearingViewSet) router.register(r'users', UserDataViewSet, base_name='users') hearing_comments_router = routers.NestedSimpleRouter(router, r'hearing', lookup='comment_parent') hearing_comments_router.register(r'comments', HearingCommentViewSet, base_name='comments') hearing_child_router = routers.NestedSimpleRouter(router, r'hearing', lookup='hearing') hearing_child_router.register(r'sections', SectionViewSet, base_name='sections') hearing_child_router.register(r'images', HearingImageViewSet, base_name='images') section_comments_router = routers.NestedSimpleRouter(hearing_child_router, r'sections', lookup='comment_parent') section_comments_router.register(r'comments', SectionCommentViewSet, base_name='comments') urlpatterns = [ url(r'^', include(router.urls, namespace='v1')), url(r'^', include(hearing_comments_router.urls, namespace='v1')), url(r'^', include(hearing_child_router.urls, namespace='v1')), url(r'^', include(section_comments_router.urls, namespace='v1')), ]
mit
Python
a0dcb73836222e3515c4af4cf4cfe2d41f470b9e
handle missing stash[benchstorage] (#3564)
cloudify-cosmo/cloudify-manager,cloudify-cosmo/cloudify-manager,cloudify-cosmo/cloudify-manager
tests/integration_tests/tests/benchmarks/conftest.py
tests/integration_tests/tests/benchmarks/conftest.py
import pytest from datetime import datetime def log_result(name, timing, start, stop): if timing: name = f'{name}.{timing}' print(f'BENCH {name}: {stop - start}') class _Timings(object): def __init__(self, func_name): self.records = {} self._func_name = func_name def start(self, name=None): self.records[name] = [datetime.utcnow(), None] def stop(self, name=None): if name not in self.records: raise RuntimeError(f'bench called stop without a start: {name}') self.records[name][1] = datetime.utcnow() log_result(self._func_name, name, *self.records[name]) @pytest.fixture() def bench(request): """Give the tests a "bench" fixture for measuring time. Tests can call `self.bench.start()` and `self.bench.stop()` (with an optional name). Results will be printed out immediately, and also at the end of the session (using the pytest_sessionx hooks). """ func_name = request.function.__name__ storage = request.session.stash['benchstorage'] timings = _Timings(func_name) request.cls.bench = timings storage[func_name] = timings def pytest_sessionstart(session): session.stash['benchstorage'] = {} def pytest_sessionfinish(session, exitstatus): print('\nBENCHMARK RESULTS') for name, timings in session.stash.get('benchstorage', {}).items(): for timing, (start, stop) in timings.records.items(): log_result(name, timing, start, stop)
import pytest from datetime import datetime def log_result(name, timing, start, stop): if timing: name = f'{name}.{timing}' print(f'BENCH {name}: {stop - start}') class _Timings(object): def __init__(self, func_name): self.records = {} self._func_name = func_name def start(self, name=None): self.records[name] = [datetime.utcnow(), None] def stop(self, name=None): if name not in self.records: raise RuntimeError(f'bench called stop without a start: {name}') self.records[name][1] = datetime.utcnow() log_result(self._func_name, name, *self.records[name]) @pytest.fixture() def bench(request): """Give the tests a "bench" fixture for measuring time. Tests can call `self.bench.start()` and `self.bench.stop()` (with an optional name). Results will be printed out immediately, and also at the end of the session (using the pytest_sessionx hooks). """ func_name = request.function.__name__ storage = request.session.stash['benchstorage'] timings = _Timings(func_name) request.cls.bench = timings storage[func_name] = timings def pytest_sessionstart(session): session.stash['benchstorage'] = {} def pytest_sessionfinish(session, exitstatus): print('\nBENCHMARK RESULTS') for name, timings in session.stash['benchstorage'].items(): for timing, (start, stop) in timings.records.items(): log_result(name, timing, start, stop)
apache-2.0
Python
57adb8240cf0015e1e10f2e9fd4f090a8d896a27
Revert "[examples...bindings_generator] began to update"
pymor/dune-pymor,pymor/dune-pymor
examples/stationarylinear_bindings_generator.py
examples/stationarylinear_bindings_generator.py
#! /usr/bin/env python # This file is part of the dune-pymor project: # https://github.com/pymor/dune-pymor # Copyright Holders: Felix Albrecht, Stephan Rave # License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause) import sys from pybindgen import param, retval from dune.pymor.core import prepare_python_bindings, inject_lib_dune_pymor, finalize_python_bindings from dune.pymor.discretizations import inject_StationaryDiscretizationImplementation def inject_Example(module): '''injects the user code into the module''' namespace = module.add_cpp_namespace('Example') AnalyticalProblem = namespace.add_class('AnalyticalProblem') AnalyticalProblem.add_constructor([]) AnalyticalProblem.add_constructor([param('const int', 'dd')]) if __name__ == '__main__': # prepare the module module, pybindgen_filename = prepare_python_bindings(sys.argv[1:]) # add all of libdunepymor module, exceptions, interfaces, CONFIG_H = inject_lib_dune_pymor(module) # add example user code inject_Example(module) # add the users discretization discretization = inject_StationaryDiscretizationImplementation( module, exceptions, interfaces, CONFIG_H, 'Example::SimpleDiscretization', Traits={'VectorType': 'Dune::Pymor::LA::DuneDynamicVector< double >', 'OperatorType': 'Dune::Pymor::Operators::LinearAffinelyDecomposedContainerBased< Dune::Pymor::Operators::DuneDynamic< double > >', 'FunctionalType': 'Dune::Pymor::Functionals::LinearAffinelyDecomposedVectorBased< Dune::Pymor::LA::DuneDynamicVector< double > >', 'ProductType': 'Dune::Pymor::Operators::LinearAffinelyDecomposedContainerBased< Dune::Pymor::Operators::DuneDynamic< double > >'}) # and add the custom constructor to the discretization discretization.add_constructor([param('const Example::AnalyticalProblem *', 'prob', transfer_ownership=True)]) # and finally write the pybindgen .cc file finalize_python_bindings(module, pybindgen_filename)
#! /usr/bin/env python # This file is part of the dune-pymor project: # https://github.com/pymor/dune-pymor # Copyright Holders: Felix Albrecht, Stephan Rave # License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause) import sys from pybindgen import param, retval from dune.pymor.core import prepare_python_bindings, inject_lib_dune_pymor, finalize_python_bindings #from dune.pymor.discretizations import inject_StationaryDiscretizationImplementation def inject_Example(module): '''injects the user code into the module''' namespace = module.add_cpp_namespace('Example') AnalyticalProblem = namespace.add_class('AnalyticalProblem') AnalyticalProblem.add_constructor([]) AnalyticalProblem.add_constructor([param('const int', 'dd')]) if __name__ == '__main__': # prepare the module module, pybindgen_filename = prepare_python_bindings(sys.argv[1:]) # add all of libdunepymor module, exceptions, interfaces, CONFIG_H = inject_lib_dune_pymor(module) # # add example user code # inject_Example(module) # # add the users discretization # discretization = inject_StationaryDiscretizationImplementation( # module, exceptions, interfaces, CONFIG_H, # 'Example::SimpleDiscretization', # Traits={'VectorType': 'Dune::Pymor::LA::DuneDynamicVector< double >', # 'OperatorType': 'Dune::Pymor::Operators::LinearAffinelyDecomposedContainerBased< Dune::Pymor::Operators::DuneDynamic< double > >', # 'FunctionalType': 'Dune::Pymor::Functionals::LinearAffinelyDecomposedVectorBased< Dune::Pymor::LA::DuneDynamicVector< double > >', # 'ProductType': 'Dune::Pymor::Operators::LinearAffinelyDecomposedContainerBased< Dune::Pymor::Operators::DuneDynamic< double > >'}) # # and add the custom constructor to the discretization # discretization.add_constructor([param('const Example::AnalyticalProblem *', 'prob', transfer_ownership=True)]) # and finally write the pybindgen .cc file finalize_python_bindings(module, pybindgen_filename)
bsd-2-clause
Python
e6cef6d96a3c2bd6dd07f580f4a704734133d316
Bump version to 0.3c2
OSSystems/lava-server,OSSystems/lava-server,Linaro/lava-server,Linaro/lava-server,OSSystems/lava-server,Linaro/lava-server,Linaro/lava-server
lava_server/__init__.py
lava_server/__init__.py
# Copyright (C) 2010, 2011 Linaro Limited # # Author: Zygmunt Krynicki <zygmunt.krynicki@linaro.org> # # This file is part of LAVA Server. # # LAVA Server is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License version 3 # as published by the Free Software Foundation # # LAVA Server is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with LAVA Server. If not, see <http://www.gnu.org/licenses/>. __version__ = (0, 3, 0, "candidate", 2)
# Copyright (C) 2010, 2011 Linaro Limited # # Author: Zygmunt Krynicki <zygmunt.krynicki@linaro.org> # # This file is part of LAVA Server. # # LAVA Server is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License version 3 # as published by the Free Software Foundation # # LAVA Server is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with LAVA Server. If not, see <http://www.gnu.org/licenses/>. __version__ = (0, 3, 0, "candidate", 1)
agpl-3.0
Python
cf194c1c3a64c4547049c16fb901a2b33dc84ddf
Add verbose output
xii/xii,xii/xii
src/xii/output.py
src/xii/output.py
import os from threading import Lock from abc import ABCMeta, abstractmethod # synchronize output from multiple threads output_lock = Lock() class colors: TAG = '\033[0m' NORMAL = '\033[37m' CLEAR = '\033[0m' BOLD = '\033[1m' UNDERLINE = '\033[4m' WARN = '\033[91m' SUCCESS = '\033[34m' def width(): # FIXME: port me to subprocess _, columns = os.popen('stty size', 'r').read().split() return int(columns) def warn(msg, tag="[xii]"): output = "{} {}".format(tag, msg) print(colors.WARN + colors.BOLD + output + colors.CLEAR) class HasOutput: __meta__ = ABCMeta @abstractmethod def entity_path(self): pass @abstractmethod def is_verbose(self): pass def verbose(self, msg): if self.is_verbose(): self._tprint(self._generate_tag(), msg, colors.NORMAL) def say(self, msg): self._tprint(self._generate_tag(), msg, colors.NORMAL) def counted(self, i, msg): tag = "{}[#{}]".format(self._generate_tag(), i) self._tprint(tag, msg, colors.NORMAL) def warn(self, msg): self._tprint(self._generate_tag(), msg, colors.WARN + colors.BOLD) def success(self, msg): self._tprint(self._generate_tag(), msg, colors.SUCCESS + colors.BOLD) def _tprint(self, tag, msg, wrap=None): stop = 40 fill = stop - len(tag) line = "{} {}: {}".format(tag, "." * fill, msg) if wrap: line = wrap + line + colors.CLEAR output_lock.acquire() print(line) output_lock.release() def _generate_tag(self): tag = "" for ident in self.entity_path(): tag += "[" + ident + "]" return tag
import os from threading import Lock from abc import ABCMeta, abstractmethod # synchronize output from multiple threads output_lock = Lock() class colors: TAG = '\033[0m' NORMAL = '\033[37m' CLEAR = '\033[0m' BOLD = '\033[1m' UNDERLINE = '\033[4m' WARN = '\033[91m' SUCCESS = '\033[34m' def width(): # FIXME: port me to subprocess _, columns = os.popen('stty size', 'r').read().split() return int(columns) def warn(msg, tag="[xii]"): output = "{} {}".format(tag, msg) print(colors.WARN + colors.BOLD + output + colors.CLEAR) class HasOutput: __meta__ = ABCMeta @abstractmethod def entity_path(self): pass def say(self, msg): self._tprint(self._generate_tag(), msg, colors.NORMAL) def counted(self, i, msg): tag = "{}[#{}]".format(self._generate_tag(), i) self._tprint(tag, msg, colors.NORMAL) def warn(self, msg): self._tprint(self._generate_tag(), msg, colors.WARN + colors.BOLD) def success(self, msg): self._tprint(self._generate_tag(), msg, colors.SUCCESS + colors.BOLD) def _tprint(self, tag, msg, wrap=None): stop = 40 fill = stop - len(tag) line = "{} {}: {}".format(tag, "." * fill, msg) if wrap: line = wrap + line + colors.CLEAR output_lock.acquire() print(line) output_lock.release() def _generate_tag(self): tag = "" for ident in self.entity_path(): tag += "[" + ident + "]" return tag
apache-2.0
Python
b3b99ed11d6c86721e9e57441111e0c88461eb70
Fix example state relation
jrleeman/rsfmodel
examples/defining_new_state_relation.py
examples/defining_new_state_relation.py
import numpy as np import matplotlib.pyplot as plt from math import log from rsfmodel import rsf # This is really just the Ruina realtion, but let's pretend we invented it! # We'll inherit attributes from rsf.StateRelation, but you wouldn't have to. # It does provide velocity contribution calculation for us though! class MyStateRelation(rsf.StateRelation): # Need to provide a steady state calcualtion method def set_steady_state(self, system): self.state = self.Dc/system.vref def evolve_state(self, system): return -1 * (system.v * self.state / self.Dc) * log(system.v * self.state / self.Dc) model = rsf.Model() # Set model initial conditions model.mu0 = 0.6 # Friction initial (at the reference velocity) model.a = 0.01 # Empirical coefficient for the direct effect model.k = 1e-3 # Normalized System stiffness (friction/micron) model.v = 1. # Initial slider velocity, generally is vlp(t=0) model.vref = 1. # Reference velocity, generally vlp(t=0) state1 = MyStateRelation() state1.b = 0.005 # Empirical coefficient for the evolution effect state1.Dc = 10. # Critical slip distance model.state_relations = [state1] # Which state relation we want to use # We want to solve for 40 seconds at 100Hz model.time = np.arange(0, 40.01, 0.01) # We want to slide at 1 um/s for 10 s, then at 10 um/s for 31 lp_velocity = np.ones_like(model.time) lp_velocity[10*100:] = 10. # Velocity after 10 seconds is 10 um/s # Set the model load point velocity, must be same shape as model.model_time model.loadpoint_velocity = lp_velocity # Run the model! model.solve() # Make the phase plot rsf.phasePlot(model) # Make a plot in displacement rsf.dispPlot(model) # Make a plot in time rsf.timePlot(model)
import numpy as np import matplotlib.pyplot as plt from math import log from rsfmodel import rsf # This is really just the Ruina realtion, but let's pretend we invented it! # We'll inherit attributes from rsf.StateRelation, but you wouldn't have to. # It does provide velocity contribution calculation for us though! class MyStateRelation(rsf.StateRelation): # Need to provide a steady state calcualtion method def _set_steady_state(self, system): self.state = self.Dc/system.vref def evolve_state(self, system): if self.state is None: self.state = _set_steady_state(self, system) return -1 * (system.v * self.state / self.Dc) * log(system.v * self.state / self.Dc) model = rsf.Model() # Set model initial conditions model.mu0 = 0.6 # Friction initial (at the reference velocity) model.a = 0.01 # Empirical coefficient for the direct effect model.k = 1e-3 # Normalized System stiffness (friction/micron) model.v = 1. # Initial slider velocity, generally is vlp(t=0) model.vref = 1. # Reference velocity, generally vlp(t=0) state1 = MyStateRelation() state1.b = 0.005 # Empirical coefficient for the evolution effect state1.Dc = 10. # Critical slip distance model.state_relations = [state1] # Which state relation we want to use # We want to solve for 40 seconds at 100Hz model.time = np.arange(0, 40.01, 0.01) # We want to slide at 1 um/s for 10 s, then at 10 um/s for 31 lp_velocity = np.ones_like(model.time) lp_velocity[10*100:] = 10. # Velocity after 10 seconds is 10 um/s # Set the model load point velocity, must be same shape as model.model_time model.loadpoint_velocity = lp_velocity # Run the model! model.solve() # Make the phase plot rsf.phasePlot(model) # Make a plot in displacement rsf.dispPlot(model) # Make a plot in time rsf.timePlot(model)
mit
Python
abd3daed5cd0c70d76bf8fa1cfdda93efcda3e70
Make the `now` helper timezone aware
funkybob/knights-templater,funkybob/knights-templater
knights/compat/django.py
knights/compat/django.py
from django.core.urlresolvers import reverse from django.utils import timezone from django.utils.encoding import iri_to_uri import datetime from knights.library import Library register = Library() @register.helper def now(fmt): return timezone.now().strftime(fmt) @register.helper def url(name, *args, **kwargs): try: return reverse(name, args=args, kwargs=kwargs) except: return None @register.helper def static(filename): try: from django.conf import settings except ImportError: prefix = '' else: prefix = iri_to_uri(getattr(settings, filename, '')) return prefix
from django.core.urlresolvers import reverse from django.utils.encoding import iri_to_uri import datetime from knights.library import Library register = Library() @register.helper def now(fmt): return datetime.datetime.now().strftime(fmt) @register.helper def url(name, *args, **kwargs): try: return reverse(name, args=args, kwargs=kwargs) except: return None @register.helper def static(filename): try: from django.conf import settings except ImportError: prefix = '' else: prefix = iri_to_uri(getattr(settings, filename, '')) return prefix
mit
Python
cbe4f5470fec966538f63ca9beb04838bfbf3aa3
change to contentnode_1 and contentnode_2 for content relationship
jayoshih/kolibri,learningequality/kolibri,jamalex/kolibri,66eli77/kolibri,rtibbles/kolibri,aronasorman/kolibri,jayoshih/kolibri,mrpau/kolibri,benjaoming/kolibri,jamalex/kolibri,rtibbles/kolibri,christianmemije/kolibri,DXCanas/kolibri,whitzhu/kolibri,DXCanas/kolibri,learningequality/kolibri,jayoshih/kolibri,lyw07/kolibri,christianmemije/kolibri,66eli77/kolibri,jtamiace/kolibri,whitzhu/kolibri,benjaoming/kolibri,aronasorman/kolibri,jamalex/kolibri,christianmemije/kolibri,ralphiee22/kolibri,indirectlylit/kolibri,66eli77/kolibri,DXCanas/kolibri,indirectlylit/kolibri,benjaoming/kolibri,MingDai/kolibri,jtamiace/kolibri,lyw07/kolibri,aronasorman/kolibri,jayoshih/kolibri,rtibbles/kolibri,aronasorman/kolibri,ralphiee22/kolibri,jonboiser/kolibri,learningequality/kolibri,mrpau/kolibri,jonboiser/kolibri,lyw07/kolibri,jamalex/kolibri,lyw07/kolibri,benjaoming/kolibri,learningequality/kolibri,jonboiser/kolibri,ralphiee22/kolibri,MingDai/kolibri,jtamiace/kolibri,indirectlylit/kolibri,ralphiee22/kolibri,indirectlylit/kolibri,whitzhu/kolibri,DXCanas/kolibri,rtibbles/kolibri,mrpau/kolibri,MingDai/kolibri,mrpau/kolibri,MingDai/kolibri,christianmemije/kolibri,jtamiace/kolibri,66eli77/kolibri,whitzhu/kolibri,jonboiser/kolibri
kolibri/content/admin.py
kolibri/content/admin.py
from django.contrib import admin from .models import PrerequisiteContentRelationship, RelatedContentRelationship class PrerequisiteRelationshipInline1(admin.TabularInline): model = PrerequisiteContentRelationship fk_name = 'contentnode_1' max = 20 extra = 0 class PrerequisiteRelationshipInline2(admin.TabularInline): model = PrerequisiteContentRelationship fk_name = 'contentnode_2' max = 20 extra = 0 class RelatedRelationshipInline1(admin.TabularInline): model = RelatedContentRelationship fk_name = 'contentnode_1' max = 20 extra = 0 class RelatedRelationshipInline2(admin.TabularInline): model = RelatedContentRelationship fk_name = 'contentnode_2' max = 20 extra = 0 class ContentNodeAdmin(admin.ModelAdmin): inlines = (PrerequisiteRelationshipInline1, PrerequisiteRelationshipInline2, RelatedRelationshipInline1, RelatedRelationshipInline2)
from django.contrib import admin from .models import PrerequisiteContentRelationship, RelatedContentRelationship class PrerequisiteRelationshipInline1(admin.TabularInline): model = PrerequisiteContentRelationship fk_name = 'contentmetadata_1' max = 20 extra = 0 class PrerequisiteRelationshipInline2(admin.TabularInline): model = PrerequisiteContentRelationship fk_name = 'contentmetadata_2' max = 20 extra = 0 class RelatedRelationshipInline1(admin.TabularInline): model = RelatedContentRelationship fk_name = 'contentmetadata_1' max = 20 extra = 0 class RelatedRelationshipInline2(admin.TabularInline): model = RelatedContentRelationship fk_name = 'contentmetadata_2' max = 20 extra = 0 class ContentMetadataAdmin(admin.ModelAdmin): inlines = (PrerequisiteRelationshipInline1, PrerequisiteRelationshipInline2, RelatedRelationshipInline1, RelatedRelationshipInline2)
mit
Python
d43657286f49271a6236499bdba288925fb23087
update tests to v1.2.0 (#1307)
jmluy/xpython,exercism/python,exercism/xpython,smalley/python,smalley/python,exercism/python,N-Parsons/exercism-python,behrtam/xpython,jmluy/xpython,exercism/xpython,behrtam/xpython,N-Parsons/exercism-python
exercises/roman-numerals/roman_numerals_test.py
exercises/roman-numerals/roman_numerals_test.py
import unittest import roman_numerals # Tests adapted from `problem-specifications//canonical-data.json` @ v1.2.0 class RomanTest(unittest.TestCase): numerals = { 1: 'I', 2: 'II', 3: 'III', 4: 'IV', 5: 'V', 6: 'VI', 9: 'IX', 27: 'XXVII', 48: 'XLVIII', 49: 'XLIX', 59: 'LIX', 93: 'XCIII', 141: 'CXLI', 163: 'CLXIII', 402: 'CDII', 575: 'DLXXV', 911: 'CMXI', 1024: 'MXXIV', 3000: 'MMM', } def test_numerals(self): for arabic, numeral in self.numerals.items(): self.assertEqual(roman_numerals.numeral(arabic), numeral) if __name__ == '__main__': unittest.main()
import unittest import roman_numerals # Tests adapted from `problem-specifications//canonical-data.json` @ v1.0.0 class RomanTest(unittest.TestCase): numerals = { 1: 'I', 2: 'II', 3: 'III', 4: 'IV', 5: 'V', 6: 'VI', 9: 'IX', 27: 'XXVII', 48: 'XLVIII', 59: 'LIX', 93: 'XCIII', 141: 'CXLI', 163: 'CLXIII', 402: 'CDII', 575: 'DLXXV', 911: 'CMXI', 1024: 'MXXIV', 3000: 'MMM', } def test_numerals(self): for arabic, numeral in self.numerals.items(): self.assertEqual(roman_numerals.numeral(arabic), numeral) if __name__ == '__main__': unittest.main()
mit
Python
34b2385d6a3bb7acdbcd3f894d30dfbc734bd52e
allow to set matplotlib backend from env MATPLOTLIB_BACKEND
has2k1/plotnine,has2k1/plotnine
ggplot/__init__.py
ggplot/__init__.py
# For testing purposes we might need to set mpl backend before any # other import of matplotlib. def _set_mpl_backend(): import os import matplotlib as mpl env_backend = os.environ.get('MATPLOTLIB_BACKEND') if env_backend: # we were instructed mpl.use(env_backend) _set_mpl_backend() from .ggplot import * from .exampledata import *
from .ggplot import * from .exampledata import *
mit
Python
4e9a530403dce47f322df471255a0fc40fd1071f
Change number of episodes to 60000
davidrobles/mlnd-capstone-code
examples/tic_ql_tabular_selfplay_all.py
examples/tic_ql_tabular_selfplay_all.py
''' The Q-learning algorithm is used to learn the state-action values for all Tic-Tac-Toe positions by playing games against itself (self-play). ''' from capstone.game.games import TicTacToe from capstone.game.players import RandPlayer from capstone.rl import Environment, GameMDP from capstone.rl.learners import QLearningSelfPlay from capstone.rl.policies import EGreedy, RandomPolicy from capstone.rl.utils import EpisodicWLDPlotter from capstone.rl.value_functions import TabularQ game = TicTacToe() env = Environment(GameMDP(game)) tabularq = TabularQ(random_state=23) egreedy = EGreedy(env.actions, tabularq, epsilon=0.5, random_state=23) rand_policy = RandomPolicy(env.actions, random_state=23) qlearning = QLearningSelfPlay( env=env, qf=tabularq, policy=rand_policy, learning_rate=0.1, discount_factor=0.99, n_episodes=60000, verbose=0, callbacks=[ EpisodicWLDPlotter( game=game, opp_player=RandPlayer(random_state=23), n_matches=2000, period=1000, filename='tic_ql_tabular_selfplay_all.pdf' ) ] ) qlearning.learn()
''' The Q-learning algorithm is used to learn the state-action values for all Tic-Tac-Toe positions by playing games against itself (self-play). ''' from capstone.game.games import TicTacToe from capstone.game.players import RandPlayer from capstone.rl import Environment, GameMDP from capstone.rl.learners import QLearningSelfPlay from capstone.rl.policies import EGreedy, RandomPolicy from capstone.rl.utils import EpisodicWLDPlotter from capstone.rl.value_functions import TabularQ game = TicTacToe() env = Environment(GameMDP(game)) tabularq = TabularQ(random_state=23) egreedy = EGreedy(env.actions, tabularq, epsilon=0.5, random_state=23) rand_policy = RandomPolicy(env.actions, random_state=23) qlearning = QLearningSelfPlay( env=env, qf=tabularq, policy=rand_policy, learning_rate=0.1, discount_factor=0.99, n_episodes=3000, verbose=0, callbacks=[ EpisodicWLDPlotter( game=game, opp_player=RandPlayer(random_state=23), n_matches=2000, period=1000, filename='tic_ql_tabular_selfplay_all.pdf' ) ] ) qlearning.learn()
mit
Python
f694b2a234216ae7ecc7b925799f43caca5e9a32
add more debug output for config file
fretboardfreak/escadrille,fretboardfreak/escadrille,fretboardfreak/escadrille
preprocess.py
preprocess.py
#!/usr/bin/env python3 # Copyright 2016 Curtis Sand <curtissand@gmail.com> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Preprocessor script for Squadron.""" import sys from lib.cmdline import PreprocessUI from lib.cmdline import dprint from lib.cmdline import vprint from lib.config import ConfigFile VERSION = "0.0" def main(): """Main method for Squadron preprocessor.""" user_interface = PreprocessUI(version=VERSION) options = user_interface.parse_cmd_line() dprint('cmdline args: %s' % options) vprint('Parsing Config File...') config_file = ConfigFile(options.config) config_file.load() dprint('Config Sections: %s' % config_file.parser.sections()) for section in config_file.parser.sections(): dprint('Options in section %s: %s' % (section, config_file.parser.options(section))) dprint('Enabled Tasks: %s' % config_file.enabled_tasks) dprint('Copy Files Jobs:') for cfj in list(config_file.copy_files_jobs): dprint("%s:" % cfj.name) dprint(" sources: %s" % ' '.join(cfj.value.sources.value)) dprint(" destination: %s" % cfj.value.destination.value) return 0 if __name__ == '__main__': try: sys.exit(main()) except SystemExit: sys.exit(0) except KeyboardInterrupt: print('...interrupted by user, exiting.') sys.exit(1) except Exception as exc: import lib.cmdline if lib.cmdline.DEBUG: raise else: print(exc) sys.exit(1)
#!/usr/bin/env python3 # Copyright 2016 Curtis Sand <curtissand@gmail.com> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Preprocessor script for Squadron.""" import sys from lib.cmdline import PreprocessUI from lib.cmdline import dprint from lib.cmdline import vprint from lib.config import ConfigFile VERSION = "0.0" def main(): """Main method for Squadron preprocessor.""" user_interface = PreprocessUI(version=VERSION) options = user_interface.parse_cmd_line() dprint('cmdline args: %s' % options) vprint('Parsing Config File...') config_file = ConfigFile(options.config) config_file.load() dprint('Config Sections: %s' % config_file.parser.sections()) for section in config_file.parser.sections(): dprint('Options in section %s: %s' % (section, config_file.parser.options(section))) return 0 if __name__ == '__main__': try: sys.exit(main()) except SystemExit: sys.exit(0) except KeyboardInterrupt: print('...interrupted by user, exiting.') sys.exit(1) except Exception as exc: import lib.cmdline if lib.cmdline.DEBUG: raise else: print(exc) sys.exit(1)
apache-2.0
Python
292fd33a3d251b4c5773e96989e45d8e3d7c6c3b
Change tasks in settingfs
vtemian/kruncher
krunchr/settings/base.py
krunchr/settings/base.py
from socket import gethostname HOSTNAME = gethostname() HOSTNAME_SHORT = HOSTNAME.split('.')[0] APPLICATION_ROOT = '/v1/krunchr' DEBUG = True RETHINKDB_HOST = 'batman.krunchr.net' RETHINKDB_PORT = 28019 RETHINKDB_AUTH = '' RETHINKDB_DB = 'krunchr' BROKER_URL = 'redis://localhost:6379' CELERY_RESULT_BACKEND = 'redis://localhost:6379' # we must use a safe serializer in order to run celery as root CELERY_TASK_SERIALIZER = 'json' CELERY_RESULT_SERIALIZER = 'json' CELERY_ACCEPT_CONTENT = ['json'] CELERYD_HIJACK_ROOT_LOGGER = False CELERY_IMPORTS = ('analyser.tasks', 'utils.tasks.execute') DISCO_FILES = '/tmp' DISCO_NODES = '3'
from socket import gethostname HOSTNAME = gethostname() HOSTNAME_SHORT = HOSTNAME.split('.')[0] APPLICATION_ROOT = '/v1/krunchr' DEBUG = True RETHINKDB_HOST = 'batman.krunchr.net' RETHINKDB_PORT = 28019 RETHINKDB_AUTH = '' RETHINKDB_DB = 'krunchr' BROKER_URL = 'redis://localhost:6379' CELERY_RESULT_BACKEND = 'redis://localhost:6379' # we must use a safe serializer in order to run celery as root CELERY_TASK_SERIALIZER = 'json' CELERY_RESULT_SERIALIZER = 'json' CELERY_ACCEPT_CONTENT = ['json'] CELERYD_HIJACK_ROOT_LOGGER = False CELERY_IMPORTS = ('analyser.tasks', 'map.jobs.sum', 'utils.tasks.execute') DISCO_FILES = '/tmp' DISCO_NODES = '3'
apache-2.0
Python
2d391f9e6183f06c0785cbb2c57b7a4fcf703a80
Bump version to 0.1a14
letuananh/chirptext,letuananh/chirptext
chirptext/__version__.py
chirptext/__version__.py
# -*- coding: utf-8 -*- # chirptext's package version information __author__ = "Le Tuan Anh" __email__ = "tuananh.ke@gmail.com" __copyright__ = "Copyright (c) 2012, Le Tuan Anh" __credits__ = [] __license__ = "MIT License" __description__ = "ChirpText is a collection of text processing tools for Python." __url__ = "https://github.com/letuananh/chirptext" __maintainer__ = "Le Tuan Anh" __version_major__ = "0.1" __version__ = "{}a14".format(__version_major__) __version_long__ = "{} - Alpha".format(__version_major__) __status__ = "Prototype"
# -*- coding: utf-8 -*- # chirptext's package version information __author__ = "Le Tuan Anh" __email__ = "tuananh.ke@gmail.com" __copyright__ = "Copyright (c) 2012, Le Tuan Anh" __credits__ = [] __license__ = "MIT License" __description__ = "ChirpText is a collection of text processing tools for Python." __url__ = "https://github.com/letuananh/chirptext" __maintainer__ = "Le Tuan Anh" __version_major__ = "0.1" __version__ = "{}a13".format(__version_major__) __version_long__ = "{} - Alpha".format(__version_major__) __status__ = "Prototype"
mit
Python
6436a8adf4088f59c704a7d49e5f61c30d665058
return true in ping cli
longaccess/longaccess-client,longaccess/longaccess-client,longaccess/longaccess-client
lacli/server/__init__.py
lacli/server/__init__.py
from lacli.decorators import command from lacli.command import LaBaseCommand from twisted.python.log import startLogging, msg from twisted.internet import reactor from thrift.transport import TTwisted from thrift.protocol import TBinaryProtocol from lacli.server.interface.ClientInterface import CLI import sys import zope class LaServerCommand(LaBaseCommand): """Run a RPC server Usage: lacli server [--no-detach] [--port <port>] Options: --no-detach don't detach from terminal --port <port> port to listen on [default: 9090] """ zope.interface.implements(CLI.Iface) prompt = 'lacli:server> ' def makecmd(self, options): cmd = ["run"] if options['--port']: cmd.append(options['--port']) return " ".join(cmd) @command(port=int) def do_run(self, port=9090): """ Usage: run [<port>] """ reactor.listenTCP(port, TTwisted.ThriftServerFactory( processor=CLI.Processor(self), iprot_factory=TBinaryProtocol.TBinaryProtocolFactory())) startLogging(sys.stderr) msg('Running reactor') reactor.run() def PingCLI(self): msg('pingCLI()') return True
from lacli.decorators import command from lacli.command import LaBaseCommand from twisted.python.log import startLogging, msg from twisted.internet import reactor from thrift.transport import TTwisted from thrift.protocol import TBinaryProtocol from lacli.server.interface.ClientInterface import CLI import sys import zope class LaServerCommand(LaBaseCommand): """Run a RPC server Usage: lacli server [--no-detach] [--port <port>] Options: --no-detach don't detach from terminal --port <port> port to listen on [default: 9090] """ zope.interface.implements(CLI.Iface) prompt = 'lacli:server> ' def makecmd(self, options): cmd = ["run"] if options['--port']: cmd.append(options['--port']) return " ".join(cmd) @command(port=int) def do_run(self, port=9090): """ Usage: run [<port>] """ reactor.listenTCP(port, TTwisted.ThriftServerFactory( processor=CLI.Processor(self), iprot_factory=TBinaryProtocol.TBinaryProtocolFactory())) startLogging(sys.stderr) msg('Running reactor') reactor.run() def PingCLI(self): msg('pingCLI()')
apache-2.0
Python
af40e69bca873a7d0060aaf3391fb8feb91bf673
Use correct format for custom payload keys
ebmdatalab/openprescribing,ebmdatalab/openprescribing,ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc,annapowellsmith/openpresc,annapowellsmith/openpresc,ebmdatalab/openprescribing
openprescribing/frontend/signals/handlers.py
openprescribing/frontend/signals/handlers.py
import logging from allauth.account.signals import user_logged_in from anymail.signals import tracking from requests_futures.sessions import FuturesSession from django.contrib.auth.models import User from django.db.models.signals import post_save from django.dispatch import receiver from django.conf import settings from common.utils import google_user_id from frontend.models import Profile logger = logging.getLogger(__name__) @receiver(post_save, sender=User) def handle_user_save(sender, instance, created, **kwargs): if created: Profile.objects.create(user=instance) @receiver(user_logged_in, sender=User) def handle_user_logged_in(sender, request, user, **kwargs): user.searchbookmark_set.update(approved=True) user.orgbookmark_set.update(approved=True) def send_ga_event(event): user = User.objects.filter(email=event.recipient) if user: user = user[0] session = FuturesSession() payload = { 'v': 1, 'tid': settings.GOOGLE_TRACKING_ID, 'cid': google_user_id(user), 't': 'event', 'ec': 'email', 'ea': event.event_type, 'ua': event.user_agent, 'cm': 'email', 'dt': event.subject, 'cn': event.campaign_name, 'cs': event.campaign_source, 'dp': "/email/%s/%s/%s/%s" % ( event.campaign_name, event.campaign_source, event.user_id, event.event_type ) } logger.debug("Recording event in Analytics: %s" % payload) session.post( 'https://www.google-analytics.com/collect', data=payload) else: logger.error("Could not find receipient %s" % event.recipient) @receiver(tracking) def handle_anymail_webhook(sender, event, esp_name, **kwargs): logger.info("Received webhook from %s: %s" % (esp_name, event.event_type)) logger.debug("Full event data: %s" % event.__dict__) send_ga_event(event)
import logging from allauth.account.signals import user_logged_in from anymail.signals import tracking from requests_futures.sessions import FuturesSession from django.contrib.auth.models import User from django.db.models.signals import post_save from django.dispatch import receiver from django.conf import settings from common.utils import google_user_id from frontend.models import Profile logger = logging.getLogger(__name__) @receiver(post_save, sender=User) def handle_user_save(sender, instance, created, **kwargs): if created: Profile.objects.create(user=instance) @receiver(user_logged_in, sender=User) def handle_user_logged_in(sender, request, user, **kwargs): user.searchbookmark_set.update(approved=True) user.orgbookmark_set.update(approved=True) def send_ga_event(event): user = User.objects.filter(email=event.recipient) if user: user = user[0] session = FuturesSession() payload = { 'v': 1, 'tid': settings.GOOGLE_TRACKING_ID, 'cid': google_user_id(user), 't': 'event', 'ec': 'email', 'ea': event.event_type, 'ua': event.user_agent, 'cm': 'email', } if event.metadata: payload['dt'] = event.metadata['subject'] payload['cn'] = event.metadata['campaign_name'] payload['cs'] = event.metadata['campaign_source'] payload['dp'] = "/email/%s/%s/%s/%s" % ( event.metadata['campaign_name'], event.metadata['campaign_source'], event.metadata['user_id'], event.event_type ) else: logger.info("No metadata found for event type %s" % event.event_type) logger.debug("Full event data: %s" % event.__dict__) logger.debug("Recording event in Analytics: %s" % payload) session.post( 'https://www.google-analytics.com/collect', data=payload) else: logger.error("Could not find receipient %s" % event.recipient) @receiver(tracking) def handle_anymail_webhook(sender, event, esp_name, **kwargs): logger.info("Received webhook from %s: %s" % (esp_name, event.event_type)) send_ga_event(event)
mit
Python
52a6ea1e7dd4333b9db6a0bbd53b8ae0b39a1f6d
Add __doc__ to module functions
AndersonMasese/Myshop,AndersonMasese/Myshop,AndersonMasese/Myshop
Designs/redundant.py
Designs/redundant.py
'''Due to the needs arising from completing the project on time, I have defined redundant.py which will hold replacement modules as I migrate from file based application to lists only web application. This modules so far will offer the capabilities of registration, creating a shopping list and adding items into a shopping list''' global account account=[] def register(username,email,password): '''registration list''' account.append(username) account.append(email) account.append(password) return account global shopping_list_container shopping_list_container=[]#contain shopping lists only def create(list_name): '''container of names of shopping lists''' #list_name=[] shopping_list_container.append(list_name) return shopping_list_container#list of dictionaries def list_update(nameoflist,item): '''adding item to a given name of list''' nameoflist.append(item) shopping_list_container.append(nameoflist) global itemsdictionary itemsdictionary={} def create1(slist): '''update shopping lists with key (names) and items(as dictionaris)''' itemsdictionary.update(slist) global shared_shopping_list_container shared_shopping_list_container=[] def create3(list_name): '''container for the shared lists. In future may be integrated with facebook''' #list_name=[] shared_shopping_list_container.append(list_name) return shared_shopping_list_container#list of dictionaries global shareditemsdictionary shareditemsdictionary={} def create2(slist): '''updating shared dictionary''' shareditemsdictionary.update(slist)
'''Due to the needs arising from completing the project on time, I have defined redundant.py which will hold replacement modules as I migrate from file based application to lists only web application. This modules so far will offer the capabilities of registration, creating a shopping list and adding items into a shopping list''' global account account=[] def register(username,email,password): account.append(username) account.append(email) account.append(password) return account global shopping_list_container shopping_list_container=[]#contain shopping lists only def create(list_name): #list_name=[] shopping_list_container.append(list_name) return shopping_list_container#list of dictionaries def list_update(nameoflist,item): nameoflist.append(item) shopping_list_container.append(nameoflist) global itemsdictionary itemsdictionary={} def create1(slist): itemsdictionary.update(slist) global shared_shopping_list_container shared_shopping_list_container=[] def create3(list_name): #list_name=[] shared_shopping_list_container.append(list_name) return shared_shopping_list_container#list of dictionaries global shareditemsdictionary shareditemsdictionary={} def create2(slist): shareditemsdictionary.update(slist)
mit
Python
111cdf1496074e25b764e042fa0ab1b7b0e2a2b7
Add agriculture import to calendar registry
rsheftel/pandas_market_calendars,rsheftel/pandas_market_calendars
pandas_market_calendars/calendar_registry.py
pandas_market_calendars/calendar_registry.py
from .market_calendar import MarketCalendar from .exchange_calendar_asx import ASXExchangeCalendar from .exchange_calendar_bmf import BMFExchangeCalendar from .exchange_calendar_cfe import CFEExchangeCalendar from .exchange_calendar_cme import CMEExchangeCalendar from .exchange_calendar_cme_agriculture import CMEAgricultureExchangeCalendar from .exchange_calendar_eurex import EUREXExchangeCalendar from .exchange_calendar_hkex import HKEXExchangeCalendar from .exchange_calendar_ice import ICEExchangeCalendar from .exchange_calendar_jpx import JPXExchangeCalendar from .exchange_calendar_lse import LSEExchangeCalendar from .exchange_calendar_nyse import NYSEExchangeCalendar from .exchange_calendar_ose import OSEExchangeCalendar from .exchange_calendar_six import SIXExchangeCalendar from .exchange_calendar_sse import SSEExchangeCalendar from .exchange_calendar_tsx import TSXExchangeCalendar def get_calendar(name, open_time=None, close_time=None): """ Retrieves an instance of an MarketCalendar whose name is given. :param name: The name of the MarketCalendar to be retrieved. :param open_time: Market open time override as datetime.time object. If None then default is used. :param close_time: Market close time override as datetime.time object. If None then default is used. :return: MarketCalendar of the desired calendar. """ return MarketCalendar.factory(name, open_time=open_time, close_time=close_time) def get_calendar_names(): """All Market Calendar names and aliases that can be used in "factory" :return: list(str) """ return MarketCalendar.calendar_names()
from .market_calendar import MarketCalendar from .exchange_calendar_asx import ASXExchangeCalendar from .exchange_calendar_bmf import BMFExchangeCalendar from .exchange_calendar_cfe import CFEExchangeCalendar from .exchange_calendar_cme import CMEExchangeCalendar from .exchange_calendar_eurex import EUREXExchangeCalendar from .exchange_calendar_hkex import HKEXExchangeCalendar from .exchange_calendar_ice import ICEExchangeCalendar from .exchange_calendar_jpx import JPXExchangeCalendar from .exchange_calendar_lse import LSEExchangeCalendar from .exchange_calendar_nyse import NYSEExchangeCalendar from .exchange_calendar_ose import OSEExchangeCalendar from .exchange_calendar_six import SIXExchangeCalendar from .exchange_calendar_sse import SSEExchangeCalendar from .exchange_calendar_tsx import TSXExchangeCalendar def get_calendar(name, open_time=None, close_time=None): """ Retrieves an instance of an MarketCalendar whose name is given. :param name: The name of the MarketCalendar to be retrieved. :param open_time: Market open time override as datetime.time object. If None then default is used. :param close_time: Market close time override as datetime.time object. If None then default is used. :return: MarketCalendar of the desired calendar. """ return MarketCalendar.factory(name, open_time=open_time, close_time=close_time) def get_calendar_names(): """All Market Calendar names and aliases that can be used in "factory" :return: list(str) """ return MarketCalendar.calendar_names()
mit
Python
1534c3c47fea71db2cf4f9f224c2e5ff5a8632e2
Remove audio file after playing, not while
milkey-mouse/swood
tests/test.py
tests/test.py
from time import sleep import sys import os sys.path.insert(0, os.path.realpath("../../swood")) import swood def find_program(prog): for path in os.environ["PATH"].split(os.pathsep): vlc_location = os.path.join(path.strip('"'), prog) if os.path.isfile(fpath): return vlc_location, args return None def play_audio(clip): import subprocess if os.name == "nt": if os.path.isfile("C:/Program Files (x86)/VideoLAN/VLC/vlc.exe"): return subprocess.Popen(["C:/Program Files (x86)/VideoLAN/VLC/vlc.exe", clip, "vlc://quit"]) elif find_program("vlc.exe"): return subprocess.Popen([find_program("vlc.exe"), clip, "vlc://quit"]) elif os.path.isfile("C:/Program Files (x86)/Windows Media Player/wmplayer.exe"): return subprocess.Popen(["C:/Program Files (x86)/Windows Media Player/wmplayer.exe", clip, "/Play", "/Close"]) elif find_program("wmplayer.exe"): return subprocess.Popen([find_program("wmplayer.exe"), clip, "/Play", "/Close"]) else: raise FileNotFoundError("Can't find an audio player.") running_player = None def run(midi, *args, play=False): global running_player print("~~~~~~~~~~ Testing '{}' ~~~~~~~~~~".format(midi)) out = "outputs/" + midi + ".wav" swood.run_cmd(["samples/doot.wav", "midis/" + midi + ".mid", out, "--no-pbar", *args]) if play: if not os.path.isfile(out): return if running_player: running_player.wait() os.remove(running_player.args[1]) running_player = play_audio(out) if sys.argv[1] == "playall": try: run("beethoven", play=True) run("dummy", play=True) run("pitchbend", play=True) finally: import glob for wav in glob.iglob("outputs/*.wav"): os.remove(wav) elif sys.argv[1] == "all": run("beethoven") run("dummy") run("pitchbend") elif sys.argv[1] == "bend": run("pitchbend")
from time import sleep import sys import os sys.path.insert(0, os.path.realpath("../../swood")) import swood def find_program(prog): for path in os.environ["PATH"].split(os.pathsep): vlc_location = os.path.join(path.strip('"'), prog) if os.path.isfile(fpath): return vlc_location, args return None def play_audio(clip): import subprocess if os.name == "nt": if os.path.isfile("C:/Program Files (x86)/VideoLAN/VLC/vlc.exe"): return subprocess.Popen(["C:/Program Files (x86)/VideoLAN/VLC/vlc.exe", clip, "vlc://quit"]) elif find_program("vlc.exe"): return subprocess.Popen([find_program("vlc.exe"), clip, "vlc://quit"]) elif os.path.isfile("C:/Program Files (x86)/Windows Media Player/wmplayer.exe"): return subprocess.Popen(["C:/Program Files (x86)/Windows Media Player/wmplayer.exe", clip, "/Play", "/Close"]) elif find_program("wmplayer.exe"): return subprocess.Popen([find_program("wmplayer.exe"), clip, "/Play", "/Close"]) else: raise FileNotFoundError("Can't find an audio player.") running_player = None def run(midi, *args, play=False): global running_player print("~~~~~~~~~~ Testing '{}' ~~~~~~~~~~".format(midi)) out = "outputs/" + midi + ".wav" swood.run_cmd(["samples/doot.wav", "midis/" + midi + ".mid", out, "--no-pbar", *args]) if play: if not os.path.isfile(out): return if running_player: os.remove(out) running_player.wait() running_player = play_audio(out) if sys.argv[1] == "playall": try: run("beethoven", play=True) run("dummy", play=True) run("pitchbend", play=True) finally: import glob for wav in glob.iglob("outputs/*.wav"): os.remove(wav) elif sys.argv[1] == "all": run("beethoven") run("dummy") run("pitchbend") elif sys.argv[1] == "bend": run("pitchbend")
mit
Python
f2350b4be2e88f282e7a49cafebb7e8e7c37efd9
Bump version
TyVik/YaDiskClient
YaDiskClient/__init__.py
YaDiskClient/__init__.py
""" Client for Yandex.Disk. """ __version__ = '1.0.1'
""" Client for Yandex.Disk. """ __version__ = '0.5.1'
mit
Python
9a1a346999b77ef7485913c79d7d854c779ecd0d
add version 2.10.0 (#15205)
iulian787/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack
var/spack/repos/builtin/packages/py-h5py/package.py
var/spack/repos/builtin/packages/py-h5py/package.py
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyH5py(PythonPackage): """The h5py package provides both a high- and low-level interface to the HDF5 library from Python.""" homepage = "http://www.h5py.org/" url = "https://pypi.io/packages/source/h/h5py/h5py-2.10.0.tar.gz" import_modules = ['h5py', 'h5py._hl'] version('2.10.0', sha256='84412798925dc870ffd7107f045d7659e60f5d46d1c70c700375248bf6bf512d') version('2.9.0', sha256='9d41ca62daf36d6b6515ab8765e4c8c4388ee18e2a665701fef2b41563821002') version('2.8.0', sha256='e626c65a8587921ebc7fb8d31a49addfdd0b9a9aa96315ea484c09803337b955') version('2.7.1', sha256='180a688311e826ff6ae6d3bda9b5c292b90b28787525ddfcb10a29d5ddcae2cc') version('2.7.0', sha256='79254312df2e6154c4928f5e3b22f7a2847b6e5ffb05ddc33e37b16e76d36310') version('2.6.0', sha256='b2afc35430d5e4c3435c996e4f4ea2aba1ea5610e2d2f46c9cae9f785e33c435') version('2.5.0', sha256='9833df8a679e108b561670b245bcf9f3a827b10ccb3a5fa1341523852cfac2f6') version('2.4.0', sha256='faaeadf4b8ca14c054b7568842e0d12690de7d5d68af4ecce5d7b8fc104d8e60') variant('mpi', default=True, description='Build with MPI support') # Build dependencies depends_on('py-cython@0.23:', type='build') depends_on('py-pkgconfig', type='build') depends_on('py-setuptools', type='build') # Build and runtime dependencies depends_on('py-cached-property@1.5:', type=('build', 'run')) depends_on('py-numpy@1.7:', type=('build', 'run')) depends_on('py-six', type=('build', 'run')) # Link dependencies depends_on('hdf5@1.8.4:+hl') # MPI dependencies depends_on('hdf5+mpi', when='+mpi') depends_on('mpi', when='+mpi') depends_on('py-mpi4py', when='+mpi', type=('build', 'run')) phases = ['configure', 'install'] def configure(self, spec, prefix): self.setup_py('configure', '--hdf5={0}'.format(spec['hdf5'].prefix)) if '+mpi' in spec: env['CC'] = spec['mpi'].mpicc self.setup_py('configure', '--mpi')
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyH5py(PythonPackage): """The h5py package provides both a high- and low-level interface to the HDF5 library from Python.""" homepage = "http://www.h5py.org/" url = "https://pypi.io/packages/source/h/h5py/h5py-2.9.0.tar.gz" import_modules = ['h5py', 'h5py._hl'] version('2.9.0', sha256='9d41ca62daf36d6b6515ab8765e4c8c4388ee18e2a665701fef2b41563821002') version('2.8.0', sha256='e626c65a8587921ebc7fb8d31a49addfdd0b9a9aa96315ea484c09803337b955') version('2.7.1', sha256='180a688311e826ff6ae6d3bda9b5c292b90b28787525ddfcb10a29d5ddcae2cc') version('2.7.0', sha256='79254312df2e6154c4928f5e3b22f7a2847b6e5ffb05ddc33e37b16e76d36310') version('2.6.0', sha256='b2afc35430d5e4c3435c996e4f4ea2aba1ea5610e2d2f46c9cae9f785e33c435') version('2.5.0', sha256='9833df8a679e108b561670b245bcf9f3a827b10ccb3a5fa1341523852cfac2f6') version('2.4.0', sha256='faaeadf4b8ca14c054b7568842e0d12690de7d5d68af4ecce5d7b8fc104d8e60') variant('mpi', default=True, description='Build with MPI support') # Build dependencies depends_on('py-cython@0.23:', type='build') depends_on('py-pkgconfig', type='build') depends_on('py-setuptools', type='build') # Build and runtime dependencies depends_on('py-numpy@1.7:', type=('build', 'run')) depends_on('py-six', type=('build', 'run')) # Link dependencies depends_on('hdf5@1.8.4:+hl') # MPI dependencies depends_on('hdf5+mpi', when='+mpi') depends_on('mpi', when='+mpi') depends_on('py-mpi4py', when='+mpi', type=('build', 'run')) phases = ['configure', 'install'] def configure(self, spec, prefix): self.setup_py('configure', '--hdf5={0}'.format(spec['hdf5'].prefix)) if '+mpi' in spec: env['CC'] = spec['mpi'].mpicc self.setup_py('configure', '--mpi')
lgpl-2.1
Python
6a7611c4c2a41d5bf316697df92636b2b43e9125
add version 2.1.2 to r-gsodr (#21035)
LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack
var/spack/repos/builtin/packages/r-gsodr/package.py
var/spack/repos/builtin/packages/r-gsodr/package.py
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RGsodr(RPackage): """A Global Surface Summary of the Day (GSOD) Weather Data Client for R Provides automated downloading, parsing, cleaning, unit conversion and formatting of Global Surface Summary of the Day ('GSOD') weather data from the from the USA National Centers for Environmental Information ('NCEI'). Units are converted from from United States Customary System ('USCS') units to International System of Units ('SI'). Stations may be individually checked for number of missing days defined by the user, where stations with too many missing observations are omitted. Only stations with valid reported latitude and longitude values are permitted in the final data. Additional useful elements, saturation vapour pressure ('es'), actual vapour pressure ('ea') and relative humidity ('RH') are calculated from the original data using the improved August-Roche-Magnus approximation (Alduchov & Eskridge 1996) and included in the final data set. The resulting metadata include station identification information, country, state, latitude, longitude, elevation, weather observations and associated flags. For information on the 'GSOD' data from 'NCEI', please see the 'GSOD' 'readme.txt' file available from, <https://www1.ncdc.noaa.gov/pub/data/gsod/readme.txt>.""" homepage = "https://docs.ropensci.org/GSODR/" url = "https://cloud.r-project.org/src/contrib/GSODR_2.1.1.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/GSODR" version('2.1.2', sha256='4fc1d084b6c21055d8cc17a6a6dc412261aa0d4ef4079bcd73b580a8c16bf74e') version('2.1.1', sha256='dba732e5bd1e367b9d710e6b8924f0c02fa4546202f049124dba02bc2e3329f5') depends_on('r@3.5.0:', type=('build', 'run')) depends_on('r-countrycode', type=('build', 'run')) depends_on('r-curl', type=('build', 'run')) depends_on('r-data-table@1.11.6:', type=('build', 'run')) depends_on('r-future-apply', type=('build', 'run')) depends_on('r-httr', type=('build', 'run')) depends_on('r-r-utils', type=('build', 'run'))
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RGsodr(RPackage): """A Global Surface Summary of the Day (GSOD) Weather Data Client for R""" homepage = "https://docs.ropensci.org/GSODR/" url = "https://cloud.r-project.org/src/contrib/GSODR_2.1.1.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/GSODR" version('2.1.1', sha256='dba732e5bd1e367b9d710e6b8924f0c02fa4546202f049124dba02bc2e3329f5') depends_on('r@3.5.0:', type=('build', 'run')) depends_on('r-countrycode', type=('build', 'run')) depends_on('r-curl', type=('build', 'run')) depends_on('r-data-table@1.11.6:', type=('build', 'run')) depends_on('r-future-apply', type=('build', 'run')) depends_on('r-httr', type=('build', 'run')) depends_on('r-r-utils', type=('build', 'run'))
lgpl-2.1
Python
22428bcdbb095b407a0845c35e06c8ace0653a44
Use MEDIA_URL instead of a hardcoded path
fiam/blangoblog,fiam/blangoblog,fiam/blangoblog
urls.py
urls.py
from django.conf.urls.defaults import * from django.contrib import admin from django.conf import settings admin.autodiscover() urlpatterns = patterns('', (r'^admin/(.*)', admin.site.root), (r'^', include('blangoblog.blango.urls')), ) handler500 = 'blango.views.server_error' handler404 = 'blango.views.page_not_found' if settings.DEBUG: from os.path import abspath, dirname, join PROJECT_DIR = dirname(abspath(__file__)) urlpatterns += patterns('', (r'^%s(?P<path>.*)$' % settings.MEDIA_URL[1:], 'django.views.static.serve', {'document_root': join(PROJECT_DIR, 'media')}), )
from django.conf.urls.defaults import * from django.contrib import admin from django.conf import settings admin.autodiscover() urlpatterns = patterns('', (r'^admin/(.*)', admin.site.root), (r'^', include('blangoblog.blango.urls')), ) handler500 = 'blango.views.server_error' handler404 = 'blango.views.page_not_found' if settings.DEBUG: from os.path import abspath, dirname, join PROJECT_DIR = dirname(abspath(__file__)) urlpatterns += patterns('', (r'^site-media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': join(PROJECT_DIR, 'media')}), )
bsd-3-clause
Python
b30391c105e8aaa6533cbd043772983aad14a04a
remove bigbrother from urls
SpreadBand/SpreadBand,SpreadBand/SpreadBand
urls.py
urls.py
from django.conf.urls.defaults import patterns, url, include from django.contrib.gis import admin import settings admin.autodiscover() # Sitemaps import venue.sitemaps import band.sitemaps import event.sitemaps sitemaps = {} sitemaps.update(venue.sitemaps.sitemaps) sitemaps.update(band.sitemaps.sitemaps) sitemaps.update(event.sitemaps.sitemaps) # URLS urlpatterns = patterns('', # temporary index page url(r'^$', 'django.views.generic.simple.direct_to_template', {'template': 'index.html'}, name='home'), # Private beta url(r'^alpha/', include('privatebeta.urls')), # auth + profile (r'^user/', include('socialregistration.urls')), (r'^user/', include('account.urls', namespace='account')), # comments (r'^comments/', include('django.contrib.comments.urls')), # reviews (r'^reviews/', include('reviews.urls')), # Feedback (r'^feedback/', include('backcap.urls', namespace='backcap')), # Bands (r'^b/', include('band.urls', namespace='band')), (r'^b/', include('album.urls', namespace='album')), (r'^b/', include('presskit.urls', namespace='presskit')), # Venues (r'^v/', include('venue.urls', namespace='venue')), # Events (r'^e/', include('event.urls', namespace='event')), # bargain (r'^e/gigbargain/', include('gigbargain.urls', namespace='gigbargain')), # BigBrother # (r'^bb/', include('bigbrother.urls')), # Ajax select channels (r'^ajax_select/', include('ajax_select.urls')), # Activity stream (r'^activity/', include('actstream.urls')), # Notifications (r'^notification/', include('notification.urls')), # Search (r'^search/', include('haystack.urls')), # FAQ (r'^faq/', include('faq.urls', namespace='faq')), # REST API (r'^api/', include('api.urls')), # Robots.txt and sitemap (r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}), (r'^robots\.txt$', include('robots.urls')), # Django admin (r'^admin/', include(admin.site.urls)), ) if settings.DEBUG: urlpatterns += patterns('', (r'^site_media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT}), url(r'^rosetta/', include('rosetta.urls')), )
from django.conf.urls.defaults import patterns, url, include from django.contrib.gis import admin import settings admin.autodiscover() # Sitemaps import venue.sitemaps import band.sitemaps import event.sitemaps sitemaps = {} sitemaps.update(venue.sitemaps.sitemaps) sitemaps.update(band.sitemaps.sitemaps) sitemaps.update(event.sitemaps.sitemaps) # URLS urlpatterns = patterns('', # temporary index page url(r'^$', 'django.views.generic.simple.direct_to_template', {'template': 'index.html'}, name='home'), # Private beta url(r'^alpha/', include('privatebeta.urls')), # auth + profile (r'^user/', include('socialregistration.urls')), (r'^user/', include('account.urls', namespace='account')), # comments (r'^comments/', include('django.contrib.comments.urls')), # reviews (r'^reviews/', include('reviews.urls')), # Feedback (r'^feedback/', include('backcap.urls', namespace='backcap')), # Bands (r'^b/', include('band.urls', namespace='band')), (r'^b/', include('album.urls', namespace='album')), (r'^b/', include('presskit.urls', namespace='presskit')), # Venues (r'^v/', include('venue.urls', namespace='venue')), # Events (r'^e/', include('event.urls', namespace='event')), # bargain (r'^e/gigbargain/', include('gigbargain.urls', namespace='gigbargain')), # BigBrother (r'^bb/', include('bigbrother.urls')), # Ajax select channels (r'^ajax_select/', include('ajax_select.urls')), # Activity stream (r'^activity/', include('actstream.urls')), # Notifications (r'^notification/', include('notification.urls')), # Search (r'^search/', include('haystack.urls')), # FAQ (r'^faq/', include('faq.urls', namespace='faq')), # REST API (r'^api/', include('api.urls')), # Robots.txt and sitemap (r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}), (r'^robots\.txt$', include('robots.urls')), # Django admin (r'^admin/', include(admin.site.urls)), ) if settings.DEBUG: urlpatterns += patterns('', (r'^site_media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT}), url(r'^rosetta/', include('rosetta.urls')), )
agpl-3.0
Python
46666faa163fde00c29eca375398f20ee2b86154
fix line splitting for diff generation
balabit/git-magic,balabit/git-magic
gitmagic/change.py
gitmagic/change.py
import difflib from io import StringIO import git class Change(object): def __init__(self, a_file_name: str, b_file_name: str, a_file_content: [str], b_file_content: [str], a_hunk: (int, int), b_hunk: (int, int), diff: str): self.a_file_name = a_file_name self.b_file_name = b_file_name self.a_file_content = a_file_content self.b_file_content = b_file_content self.a_hunk = a_hunk self.b_hunk = b_hunk self.diff = diff def __set__(self, key, value): raise TypeError('Immutable class') def find_changes(repo: git.repo.Repo) -> [Change]: while True: try: diff = repo.head.commit.diff(None)[0] except IndexError: break a_file_content = diff.a_blob.data_stream.read().decode().splitlines(keepends=True) b_file_content = open(diff.b_path).read().splitlines(keepends=True) for tag, a_start, a_end, b_start, b_end in difflib.SequenceMatcher( None, a_file_content, b_file_content).get_opcodes(): if tag == 'equal': continue unified_diff = "".join(difflib.unified_diff( a_file_content[:a_end + 3], b_file_content[:b_end + 3], fromfile=diff.a_path, tofile=diff.b_path) ) yield Change(diff.a_path, diff.b_path, a_file_content, b_file_content, (a_start, a_end), (b_start, b_end), unified_diff) break
import difflib from io import StringIO import git class Change(object): def __init__(self, a_file_name: str, b_file_name: str, a_file_content: [str], b_file_content: [str], a_hunk: (int, int), b_hunk: (int, int), diff: str): self.a_file_name = a_file_name self.b_file_name = b_file_name self.a_file_content = a_file_content self.b_file_content = b_file_content self.a_hunk = a_hunk self.b_hunk = b_hunk self.diff = diff def __set__(self, key, value): raise TypeError('Immutable class') def find_changes(repo: git.repo.Repo) -> [Change]: while True: try: diff = repo.head.commit.diff(None)[0] except IndexError: break a_file_content = [ x + "\n" for x in diff.a_blob.data_stream.read().decode().split('\n') ] b_file_content = [ x + "\n" for x in open(diff.b_path).read().split('\n') ] for tag, a_start, a_end, b_start, b_end in difflib.SequenceMatcher(None, a_file_content, b_file_content).get_opcodes(): if tag == 'equal': continue unified_diff = "".join(difflib.unified_diff( a_file_content[:a_end + 3], b_file_content[:b_end + 3], fromfile=diff.a_path, tofile=diff.b_path) ) yield Change(diff.a_path, diff.b_path, a_file_content, b_file_content, (a_start, a_end), (b_start, b_end), unified_diff) break
mit
Python
62f4e31ef3a8f96adbfb4016c809777b6ed3b331
Reduce useless imports in urls.py
SmartJog/webengine,SmartJog/webengine
urls.py
urls.py
from django.conf.urls.defaults import patterns, url, include from webengine.utils import get_valid_plugins from django.contrib import admin from django.conf import settings # List of patterns to apply, default view is webengine.index urlpatterns = patterns('', url(r'^$', 'webengine.utils.default_view'), ) if hasattr(settings, 'ENABLE_ADMIN') and settings.ENABLE_ADMIN: admin.autodiscover() # List of patterns to apply, default view is webengine.index urlpatterns += patterns('', (r'^admin/(.*)$', admin.site.root) ) plugs = get_valid_plugins() for name, mod in plugs: # Append patterns of each plugins # Let each plugin define their urlpatterns, just concat them here. urlpatterns += patterns('', (r'^' + name + '/', include(name + '.urls'))) # JS translations. We have to prepend 'webengine.' to the package # name since it is the way it is spelled in # settings.INSTALLED_APPS; see also #2306. urlpatterns += patterns('', url(r'^jsi18n/' + name + '/$', 'django.views.i18n.javascript_catalog', {'packages': ['webengine.' + name]})) # JUST FOR DEBUG PURPOSE, STATIC PAGES WILL BE SERVED BY APACHE. if settings.DEBUG: urlpatterns += patterns('', (r'^medias/(?P<path>.*)$', 'django.views.static.serve', {'document_root': '/usr/share/webengine/medias/'}), ) if hasattr(settings, 'ENABLE_ADMIN') and settings.ENABLE_ADMIN: urlpatterns += patterns('', (r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': '/usr/share/python-support/python-django/django/contrib/admin/media'}), )
from django.conf.urls.defaults import * from webengine.utils import get_valid_plugins from django.contrib import admin from django.conf import settings # List of patterns to apply, default view is webengine.index urlpatterns = patterns('', url(r'^$', 'webengine.utils.default_view'), ) if hasattr(settings, 'ENABLE_ADMIN') and settings.ENABLE_ADMIN: admin.autodiscover() # List of patterns to apply, default view is webengine.index urlpatterns += patterns('', (r'^admin/(.*)$', admin.site.root) ) plugs = get_valid_plugins() for name, mod in plugs: # Append patterns of each plugins # Let each plugin define their urlpatterns, just concat them here. urlpatterns += patterns('', (r'^' + name + '/', include(name + '.urls'))) # JS translations. We have to prepend 'webengine.' to the package # name since it is the way it is spelled in # settings.INSTALLED_APPS; see also #2306. urlpatterns += patterns('', url(r'^jsi18n/' + name + '/$', 'django.views.i18n.javascript_catalog', {'packages': ['webengine.' + name]})) # JUST FOR DEBUG PURPOSE, STATIC PAGES WILL BE SERVED BY APACHE. if settings.DEBUG: urlpatterns += patterns('', (r'^medias/(?P<path>.*)$', 'django.views.static.serve', {'document_root': '/usr/share/webengine/medias/'}), ) if hasattr(settings, 'ENABLE_ADMIN') and settings.ENABLE_ADMIN: urlpatterns += patterns('', (r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': '/usr/share/python-support/python-django/django/contrib/admin/media'}), )
lgpl-2.1
Python
224700aada7e7d80b4389b123ee00b5f14e88c99
Fix HTML escaping of TextPlugin, by feature/text-filters branch
django-fluent/django-fluent-contents,django-fluent/django-fluent-contents,edoburu/django-fluent-contents,edoburu/django-fluent-contents,django-fluent/django-fluent-contents,edoburu/django-fluent-contents
fluent_contents/plugins/text/content_plugins.py
fluent_contents/plugins/text/content_plugins.py
""" Definition of the plugin. """ from django.utils.safestring import mark_safe from fluent_contents.extensions import ContentPlugin, plugin_pool, ContentItemForm from fluent_contents.plugins.text.models import TextItem class TextItemForm(ContentItemForm): """ Perform extra processing for the text item """ def clean_text(self, html): """ Perform the cleanup in the form, allowing to raise a ValidationError """ return self.instance.apply_pre_filters(html) @plugin_pool.register class TextPlugin(ContentPlugin): """ CMS plugin for WYSIWYG text items. """ model = TextItem form = TextItemForm admin_init_template = "admin/fluent_contents/plugins/text/admin_init.html" # TODO: remove the need for this. admin_form_template = ContentPlugin.ADMIN_TEMPLATE_WITHOUT_LABELS search_output = True def render(self, request, instance, **kwargs): # Included in a DIV, so the next item will be displayed below. # The text_final is allowed to be None, to migrate old plugins. text = instance.text if instance.text_final is None else instance.text_final return mark_safe(u'<div class="text">{0}</div>\n'.format(text))
""" Definition of the plugin. """ from django.utils.html import format_html from fluent_contents.extensions import ContentPlugin, plugin_pool, ContentItemForm from fluent_contents.plugins.text.models import TextItem class TextItemForm(ContentItemForm): """ Perform extra processing for the text item """ def clean_text(self, html): """ Perform the cleanup in the form, allowing to raise a ValidationError """ return self.instance.apply_pre_filters(html) @plugin_pool.register class TextPlugin(ContentPlugin): """ CMS plugin for WYSIWYG text items. """ model = TextItem form = TextItemForm admin_init_template = "admin/fluent_contents/plugins/text/admin_init.html" # TODO: remove the need for this. admin_form_template = ContentPlugin.ADMIN_TEMPLATE_WITHOUT_LABELS search_output = True def render(self, request, instance, **kwargs): # Included in a DIV, so the next item will be displayed below. # The text_final is allowed to be None, to migrate old plugins. text = instance.text if instance.text_final is None else instance.text_final return format_html(u'<div class="text">{0}</div>\n', text)
apache-2.0
Python
f48b3df88bb01edd8489f0d7f59794b2cd46bde8
Make PY3 compatible
pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus
test/core/028-dynamic-hierarchy/local_hierarchy.py
test/core/028-dynamic-hierarchy/local_hierarchy.py
#!/usr/bin/env python import os import subprocess import sys from Pegasus.DAX3 import * if len(sys.argv) != 2: print("Usage: %s CLUSTER_PEGASUS_HOME" % sys.argv[0]) sys.exit(1) cluster_pegasus_home = sys.argv[1] # to setup python lib dir for importing Pegasus PYTHON DAX API # pegasus_config = os.path.join("pegasus-config") + " --noeoln --python" # lib_dir = subprocess.Popen(pegasus_config, stdout=subprocess.PIPE, shell=True).communicate()[0] # Insert this directory in our search path # os.sys.path.insert(0, lib_dir) # Create a abstract dag adag = ADAG("local-hierarchy") # pre_dax = Job(namespace="level1", name="sleep") # pre_dax.addArguments('5') # adag.addJob(pre_dax) # Add a job to generate the DAX daxfile = File("inner.dax") gen_dax = Job(namespace="blackdiamond", name="generate") gen_dax.addArguments(cluster_pegasus_home) gen_dax.addArguments("inner.dax") gen_dax.uses(daxfile, link=Link.OUTPUT, transfer=True) adag.addJob(gen_dax) dax1 = DAX(daxfile) # DAX jobs are called with same arguments passed, while planning the root level dax dax1.addArguments("--conf dax1.properties") dax1.addArguments("--output-site local") dax1.addArguments("-vvv") # the dax job needs a basename option as DAX doesnt exist when outer level workflow is planned dax1.addArguments("--basename inner") dax1.uses(File("dax1.properties"), link=Link.INPUT) dax1.uses(File("dax1.rc"), link=Link.INPUT) dax1.uses(File("dax1.sites.xml"), link=Link.INPUT) dax1.uses(File("dax1.tc.text"), link=Link.INPUT) adag.addJob(dax1) # this dax job uses a pre-existing dax file # that has to be present in the replica catalog daxfile2 = File("inner2.dax") dax2 = DAX(daxfile2) # pegasus-plan arguments for the DAX jobs can be overwritten dax2.addArguments("--output-site local") dax2.addArguments("-vvv") adag.addJob(dax2) # Add a job post_dax = Job(namespace="level2", name="sleep") post_dax.addArguments("5") adag.addJob(post_dax) # Add control-flow dependencies adag.addDependency(Dependency(parent=gen_dax, child=dax1)) adag.addDependency(Dependency(parent=dax1, child=dax2)) adag.addDependency(Dependency(parent=dax2, child=post_dax)) # Write the DAX to stdout adag.writeXML(sys.stdout)
#!/usr/bin/env python import os import sys import subprocess if len(sys.argv) != 2: print "Usage: %s CLUSTER_PEGASUS_HOME" % (sys.argv[0]) sys.exit(1) cluster_pegasus_home=sys.argv[1] # to setup python lib dir for importing Pegasus PYTHON DAX API #pegasus_config = os.path.join("pegasus-config") + " --noeoln --python" #lib_dir = subprocess.Popen(pegasus_config, stdout=subprocess.PIPE, shell=True).communicate()[0] #Insert this directory in our search path #os.sys.path.insert(0, lib_dir) from Pegasus.DAX3 import * # Create a abstract dag adag = ADAG('local-hierarchy') #pre_dax = Job(namespace="level1", name="sleep") #pre_dax.addArguments('5') #adag.addJob(pre_dax) # Add a job to generate the DAX daxfile = File('inner.dax') gen_dax = Job(namespace="blackdiamond", name="generate") gen_dax.addArguments( cluster_pegasus_home) gen_dax.addArguments( "inner.dax") gen_dax.uses( daxfile, link=Link.OUTPUT, transfer=True) adag.addJob(gen_dax) dax1 = DAX (daxfile) #DAX jobs are called with same arguments passed, while planning the root level dax dax1.addArguments('--conf dax1.properties') dax1.addArguments('--output-site local') dax1.addArguments( '-vvv') # the dax job needs a basename option as DAX doesnt exist when outer level workflow is planned dax1.addArguments( '--basename inner' ) dax1.uses( File("dax1.properties"), link=Link.INPUT) dax1.uses( File("dax1.rc"), link=Link.INPUT) dax1.uses( File("dax1.sites.xml"), link=Link.INPUT) dax1.uses( File("dax1.tc.text"), link=Link.INPUT) adag.addJob(dax1) # this dax job uses a pre-existing dax file # that has to be present in the replica catalog daxfile2 = File('inner2.dax') dax2 = DAX (daxfile2) #pegasus-plan arguments for the DAX jobs can be overwritten dax2.addArguments('--output-site local') dax2.addArguments( '-vvv') adag.addJob(dax2) # Add a job post_dax = Job(namespace="level2", name="sleep") post_dax.addArguments('5') adag.addJob(post_dax) # Add control-flow dependencies adag.addDependency(Dependency(parent=gen_dax, child=dax1)) adag.addDependency(Dependency(parent=dax1, child=dax2)) adag.addDependency(Dependency(parent=dax2, child=post_dax)) # Write the DAX to stdout adag.writeXML(sys.stdout)
apache-2.0
Python
919c2e5ea6a22ac7b24aa005e5ad3c7886b3feb7
fix broadcaster display
eellak/ccradio,eellak/ccradio,eellak/ccradio
panel/admin.py
panel/admin.py
from ccradio.panel.models import Broadcaster, Category, Stream from django.contrib import admin from django.contrib.auth.models import User class BroadcasterAdmin(admin.ModelAdmin): list_display = ('title', 'category', 'stream', 'url') list_filter = ('category', 'stream') ordering = ('title', 'category') search_fields = ('title',) admin.site.register(Broadcaster, BroadcasterAdmin) class CategoryAdmin(admin.ModelAdmin): list_display = ('name',) ordering = ('name',) admin.site.register(Category, CategoryAdmin) class StreamAdmin(admin.ModelAdmin): list_display = ('name', 'uri',) ordering = ('uri',) admin.site.register(Stream, StreamAdmin)
from ccradio.panel.models import Broadcaster, Category, Stream from django.contrib import admin from django.contrib.auth.models import User class BroadcasterAdmin(admin.ModelAdmin): list_display = ('title', 'category', 'active', 'stream') list_filter = ('category', 'stream', 'active') ordering = ('title', 'category') search_fields = ('title',) admin.site.register(Broadcaster, BroadcasterAdmin) class CategoryAdmin(admin.ModelAdmin): list_display = ('name',) ordering = ('name',) admin.site.register(Category, CategoryAdmin) class StreamAdmin(admin.ModelAdmin): list_display = ('name', 'uri',) ordering = ('uri',) admin.site.register(Stream, StreamAdmin)
agpl-3.0
Python
afe5e9a0a097d6b6615e01d574b42e6c996282d6
Disable colore for @mrtazz
ericmjl/legit,wkentaro/legit,blueyed/legit,jetgeng/legit,kennethreitz/legit,deshion/legit,kennethreitz/legit,hickford/legit,nxnfufunezn/legit,mattn/legit,nxnfufunezn/legit,hickford/legit,ericmjl/legit,wkentaro/legit,jetgeng/legit,deshion/legit,mattn/legit,blueyed/legit
legit/core.py
legit/core.py
# -*- coding: utf-8 -*- """ legit.core ~~~~~~~~~~ This module provides the basic functionality of legit. """ # from clint import resources import os import clint.textui.colored __version__ = '0.0.9' __author__ = 'Kenneth Reitz' __license__ = 'BSD' if 'LEGIT_NO_COLORS' in os.environ: clint.textui.colored.DISABLE_COLOR = True # resources.init('kennethreitz', 'legit') # resources.user.write('config.ini', "we'll get there.")
# -*- coding: utf-8 -*- """ legit.core ~~~~~~~~~~ This module provides the basic functionality of legit. """ # from clint import resources __version__ = '0.0.9' __author__ = 'Kenneth Reitz' __license__ = 'BSD' # resources.init('kennethreitz', 'legit') # resources.user.write('config.ini', "we'll get there.")
bsd-3-clause
Python
05cd983e108764d288798784508249726b0ffd70
add version 0.6.2 to yaml-cpp (#7931)
LLNL/spack,tmerrick1/spack,matthiasdiener/spack,LLNL/spack,mfherbst/spack,iulian787/spack,krafczyk/spack,LLNL/spack,krafczyk/spack,matthiasdiener/spack,tmerrick1/spack,matthiasdiener/spack,tmerrick1/spack,mfherbst/spack,matthiasdiener/spack,LLNL/spack,iulian787/spack,mfherbst/spack,krafczyk/spack,tmerrick1/spack,krafczyk/spack,iulian787/spack,LLNL/spack,iulian787/spack,mfherbst/spack,krafczyk/spack,tmerrick1/spack,matthiasdiener/spack,mfherbst/spack,iulian787/spack
var/spack/repos/builtin/packages/yaml-cpp/package.py
var/spack/repos/builtin/packages/yaml-cpp/package.py
############################################################################## # Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class YamlCpp(CMakePackage): """A YAML parser and emitter in C++""" homepage = "https://github.com/jbeder/yaml-cpp" url = "https://github.com/jbeder/yaml-cpp/archive/yaml-cpp-0.5.3.tar.gz" version('0.6.2', '5b943e9af0060d0811148b037449ef82') version('0.5.3', '2bba14e6a7f12c7272f87d044e4a7211') version('develop', git='https://github.com/jbeder/yaml-cpp', branch='master') variant('shared', default=True, description='Enable build of shared libraries') variant('pic', default=True, description='Build with position independent code') depends_on('boost@:1.66.99', when='@:0.5.3') conflicts('%gcc@:4.8', when='@0.6.0:', msg="versions 0.6.0: require c++11 support") conflicts('%clang@:3.3.0', when='@0.6.0:', msg="versions 0.6.0: require c++11 support") # currently we can't check for apple-clang's version # conflicts('%clang@:4.0.0-apple', when='@0.6.0:', # msg="versions 0.6.0: require c++11 support") conflicts('%intel@:11.1', when='@0.6.0:', msg="versions 0.6.0: require c++11 support") conflicts('%xl@:13.1', when='@0.6.0:', msg="versions 0.6.0: require c++11 support") conflicts('%xl_r@:13.1', when='@0.6.0:', msg="versions 0.6.0: require c++11 support") def cmake_args(self): spec = self.spec options = [] options.extend([ '-DBUILD_SHARED_LIBS:BOOL=%s' % ( 'ON' if '+shared' in spec else 'OFF'), '-DCMAKE_POSITION_INDEPENDENT_CODE:BOOL=%s' % ( 'ON' if '+pic' in spec else 'OFF'), ]) return options
############################################################################## # Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class YamlCpp(CMakePackage): """A YAML parser and emitter in C++""" homepage = "https://github.com/jbeder/yaml-cpp" url = "https://github.com/jbeder/yaml-cpp/archive/yaml-cpp-0.5.3.tar.gz" version('0.5.3', '2bba14e6a7f12c7272f87d044e4a7211') version('develop', git='https://github.com/jbeder/yaml-cpp', branch='master') variant('shared', default=True, description='Enable build of shared libraries') variant('pic', default=True, description='Build with position independent code') depends_on('boost', when='@:0.5.3') def cmake_args(self): spec = self.spec options = [] options.extend([ '-DBUILD_SHARED_LIBS:BOOL=%s' % ( 'ON' if '+shared' in spec else 'OFF'), '-DCMAKE_POSITION_INDEPENDENT_CODE:BOOL=%s' % ( 'ON' if '+pic' in spec else 'OFF'), ]) return options
lgpl-2.1
Python
58d3d5fb4ae8fc081e5e503e2b6316dc81deb678
Rewrite views.py
kagemiku/gpbot-line
gpbot/bot/views.py
gpbot/bot/views.py
from django.shortcuts import render from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden from django.conf import settings from linebot import LineBotApi, WebhookParser from linebot.exceptions import InvalidSignatureError, LineBotApiError from linebot.models import ( MessageEvent, SourceUser, TextMessage, StickerMessage, ImageMessage, VideoMessage, AudioMessage, LocationMessage, ) import os import json import requests from .router import Router from .reply_generator import ReplyGenerator from .event_processor import ( text_message_event_processor as tmep, ) CHANNEL_ACCESS_TOKEN = os.environ['LINE_CHANNEL_ACCESS_TOKEN'] CHANNEL_SECRET = os.environ['LINE_CHANNEL_SECRET'] line_bot_api = LineBotApi(CHANNEL_ACCESS_TOKEN) parser = WebhookParser(CHANNEL_SECRET) reply_generator = ReplyGenerator(line_bot_api) router = Router( reply_generator, [ tmep.DefaultTextMessageEventProcessor(), ] ) def callback(request): if settings.DEBUG: reply = '' request_json = json.loads(request.body.decode('utf-8')) for e in request_json['events']: reply_token = e['replyToken'] message_type = e['message']['type'] source_user = SourceUser(user_id=e['source']['userId']) if message_type == 'text': text = e['message']['text'] timestamp = e['timestamp'] text_message = TextMessage(text=text) message_event = MessageEvent(timestamp=timestamp, source=source_user, message=text_message) router.relay(message_event) return HttpResponse() else: if request.method == 'POST': signature = request.META['HTTP_X_LINE_SIGNATURE'] print('signature: ', signature) body = request.body.decode('utf-8') try: events = parser.parse(body, signature) except InvalidSignatureError: return HttpResponseForbidden() except LineBotApiError: return HttpResponseBadRequest() for event in events: router.relay(event) return HttpResponse() else: return HttpResponseBadRequest()
from django.shortcuts import render from django.http import HttpResponse import os import json import requests REPLY_ENDPOINT = 'https://api.line.me/v2/bot/message/reply' CHANNEL_ACCESS_TOKEN = os.environ['LINE_CHANNEL_ACCESS_TOKEN'] HEADER = { 'Content-Type': 'application/json', 'Authorization': 'Bearer ' + CHANNEL_ACCESS_TOKEN } def reply_text(reply_token, text): reply = text payload = { 'replyToken': reply_token, 'messages': [ { 'type': 'text', 'text': reply, }, ] } result = requests.post(REPLY_ENDPOINT, headers=HEADER, data=json.dumps(payload)) print('result: ', result) return reply def callback(request): reply = '' request_json = json.loads(request.body.decode('utf-8')) for e in request_json['events']: reply_token = e['replyToken'] message_type = e['message']['type'] if message_type == 'text': text = e['message']['text'] reply += reply_text(reply_token, text) return HttpResponse(reply)
mit
Python
63f01e99acf8b365539c7d58d1acf86d70d03261
Fix name-tests-test
chriskuehl/pre-commit-hooks,bgschiller/pre-commit-hooks,Harwood/pre-commit-hooks,jordant/pre-commit-hooks,pre-commit/pre-commit-hooks,Coverfox/pre-commit-hooks,jordant/pre-commit-hooks,dupuy/pre-commit-hooks,arahayrabedian/pre-commit-hooks
pre_commit_hooks/tests_should_end_in_test.py
pre_commit_hooks/tests_should_end_in_test.py
from __future__ import print_function import argparse import sys def validate_files(argv=None): parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*') args = parser.parse_args(argv) retcode = 0 for filename in args.filenames: if ( not filename.endswith('_test.py') and not filename.endswith('__init__.py') and not filename.endswith('/conftest.py') ): retcode = 1 print('{0} does not end in _test.py'.format(filename)) return retcode if __name__ == '__main__': sys.exit(validate_files())
from __future__ import print_function import sys def validate_files(argv=None): retcode = 0 for filename in argv: if ( not filename.endswith('_test.py') and not filename.endswith('__init__.py') and not filename.endswith('/conftest.py') ): retcode = 1 print('{0} does not end in _test.py'.format(filename)) return retcode if __name__ == '__main__': sys.exit(validate_files())
mit
Python
edf96afa7045efa4d60d80edaef9a13226535892
Fix typo in feature_blocksdir.py log message
chaincoin/chaincoin,chaincoin/chaincoin,chaincoin/chaincoin,chaincoin/chaincoin,chaincoin/chaincoin,chaincoin/chaincoin
test/functional/feature_blocksdir.py
test/functional/feature_blocksdir.py
#!/usr/bin/env python3 # Copyright (c) 2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the blocksdir option. """ import os import shutil from test_framework.test_framework import BitcoinTestFramework, initialize_datadir class BlocksdirTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 def run_test(self): self.stop_node(0) shutil.rmtree(self.nodes[0].datadir) initialize_datadir(self.options.tmpdir, 0) self.log.info("Starting with nonexistent blocksdir ...") blocksdir_path = os.path.join(self.options.tmpdir, 'blocksdir') self.nodes[0].assert_start_raises_init_error(["-blocksdir=" + blocksdir_path], 'Error: Specified blocks directory "{}" does not exist.'.format(blocksdir_path)) os.mkdir(blocksdir_path) self.log.info("Starting with existing blocksdir ...") self.start_node(0, ["-blocksdir=" + blocksdir_path]) self.log.info("mining blocks..") self.nodes[0].generate(10) assert os.path.isfile(os.path.join(blocksdir_path, "regtest", "blocks", "blk00000.dat")) assert os.path.isdir(os.path.join(self.nodes[0].datadir, "regtest", "blocks", "index")) if __name__ == '__main__': BlocksdirTest().main()
#!/usr/bin/env python3 # Copyright (c) 2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the blocksdir option. """ import os import shutil from test_framework.test_framework import BitcoinTestFramework, initialize_datadir class BlocksdirTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 def run_test(self): self.stop_node(0) shutil.rmtree(self.nodes[0].datadir) initialize_datadir(self.options.tmpdir, 0) self.log.info("Starting with non exiting blocksdir ...") blocksdir_path = os.path.join(self.options.tmpdir, 'blocksdir') self.nodes[0].assert_start_raises_init_error(["-blocksdir=" + blocksdir_path], 'Error: Specified blocks directory "{}" does not exist.'.format(blocksdir_path)) os.mkdir(blocksdir_path) self.log.info("Starting with exiting blocksdir ...") self.start_node(0, ["-blocksdir=" + blocksdir_path]) self.log.info("mining blocks..") self.nodes[0].generate(10) assert os.path.isfile(os.path.join(blocksdir_path, "regtest", "blocks", "blk00000.dat")) assert os.path.isdir(os.path.join(self.nodes[0].datadir, "regtest", "blocks", "index")) if __name__ == '__main__': BlocksdirTest().main()
mit
Python
cb0b197ccad0b49baa24f3dff374de2ff2572cde
Make ua_comment test pass on 0.16.0
chaincoin/chaincoin,chaincoin/chaincoin,chaincoin/chaincoin,chaincoin/chaincoin,chaincoin/chaincoin,chaincoin/chaincoin
test/functional/feature_uacomment.py
test/functional/feature_uacomment.py
#!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the -uacomment option.""" from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal class UacommentTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.setup_clean_chain = True def run_test(self): self.log.info("test multiple -uacomment") test_uacomment = self.nodes[0].getnetworkinfo()["subversion"][-12:-1] assert_equal(test_uacomment, "(testnode0)") self.restart_node(0, ["-uacomment=foo"]) foo_uacomment = self.nodes[0].getnetworkinfo()["subversion"][-17:-1] assert_equal(foo_uacomment, "(testnode0; foo)") self.log.info("test -uacomment max length") self.stop_node(0) expected = "exceeds maximum length (256). Reduce the number or size of uacomments." self.assert_start_raises_init_error(0, ["-uacomment=" + 'a' * 256], expected) self.log.info("test -uacomment unsafe characters") for unsafe_char in ['/', ':', '(', ')']: expected = "User Agent comment (" + unsafe_char + ") contains unsafe characters" self.assert_start_raises_init_error(0, ["-uacomment=" + unsafe_char], expected) if __name__ == '__main__': UacommentTest().main()
#!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the -uacomment option.""" from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal class UacommentTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.setup_clean_chain = True def run_test(self): self.log.info("test multiple -uacomment") test_uacomment = self.nodes[0].getnetworkinfo()["subversion"][-12:-1] assert_equal(test_uacomment, "(testnode0)") self.restart_node(0, ["-uacomment=foo"]) foo_uacomment = self.nodes[0].getnetworkinfo()["subversion"][-17:-1] assert_equal(foo_uacomment, "(testnode0; foo)") self.log.info("test -uacomment max length") self.stop_node(0) expected = "Total length of network version string (286) exceeds maximum length (256). Reduce the number or size of uacomments." self.assert_start_raises_init_error(0, ["-uacomment=" + 'a' * 256], expected) self.log.info("test -uacomment unsafe characters") for unsafe_char in ['/', ':', '(', ')']: expected = "User Agent comment (" + unsafe_char + ") contains unsafe characters" self.assert_start_raises_init_error(0, ["-uacomment=" + unsafe_char], expected) if __name__ == '__main__': UacommentTest().main()
mit
Python
e41dde0874ff40dde175830618b40ae3828865d6
Update file paths to new location for trac, inside of irrigator_pro
warnes/irrigatorpro,warnes/irrigatorpro,warnes/irrigatorpro,warnes/irrigatorpro
irrigator_pro/trac/cgi-bin/trac.wsgi
irrigator_pro/trac/cgi-bin/trac.wsgi
import os, os.path, site, sys, socket # Add django root dir to python path PROJECT_ROOT = '/prod/irrigator_pro' print "PROJECT_ROOT=", PROJECT_ROOT sys.path.append(PROJECT_ROOT) # Add virtualenv dirs to python path if socket.gethostname()=='gregs-mbp': VIRTUAL_ENV_ROOT = os.path.join( PROJECT_ROOT, 'VirtualEnvs', 'irrigator_pro') else: VIRTUAL_ENV_ROOT = '/prod/VirtualEnvs/irrigator_pro/' print "VIRTUAL_ENV_ROOT='%s'" % VIRTUAL_ENV_ROOT activate_this = os.path.join(VIRTUAL_ENV_ROOT, 'bin', 'activate_this.py') execfile(activate_this, dict(__file__=activate_this)) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "irrigator_pro.settings") import irrigator_pro.settings os.environ['TRAC_ENV'] = '/prod/irrigator_pro/trac' os.environ['PYTHON_EGG_CACHE'] = '/prod/irrigator_pro/trac/eggs' def application(environ, start_request): if not 'trac.env_parent_dir' in environ: environ.setdefault('trac.env_path', '/prod/irrigator_pro/trac') if 'PYTHON_EGG_CACHE' in environ: os.environ['PYTHON_EGG_CACHE'] = environ['PYTHON_EGG_CACHE'] elif 'trac.env_path' in environ: os.environ['PYTHON_EGG_CACHE'] = \ os.path.join(environ['trac.env_path'], '.egg-cache') elif 'trac.env_parent_dir' in environ: os.environ['PYTHON_EGG_CACHE'] = \ os.path.join(environ['trac.env_parent_dir'], '.egg-cache') from trac.web.main import dispatch_request return dispatch_request(environ, start_request)
import os, os.path, site, sys, socket # Add django root dir to python path PROJECT_ROOT = '/prod/irrigator_pro' print "PROJECT_ROOT=", PROJECT_ROOT sys.path.append(PROJECT_ROOT) # Add virtualenv dirs to python path if socket.gethostname()=='gregs-mbp': VIRTUAL_ENV_ROOT = os.path.join( PROJECT_ROOT, 'VirtualEnvs', 'irrigator_pro') else: VIRTUAL_ENV_ROOT = '/prod/VirtualEnvs/irrigator_pro/' print "VIRTUAL_ENV_ROOT='%s'" % VIRTUAL_ENV_ROOT activate_this = os.path.join(VIRTUAL_ENV_ROOT, 'bin', 'activate_this.py') execfile(activate_this, dict(__file__=activate_this)) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "irrigator_pro.settings") import irrigator_pro.settings os.environ['TRAC_ENV'] = '/prod/trac' os.environ['PYTHON_EGG_CACHE'] = '/prod/trac/eggs' def application(environ, start_request): if not 'trac.env_parent_dir' in environ: environ.setdefault('trac.env_path', '/prod/trac') if 'PYTHON_EGG_CACHE' in environ: os.environ['PYTHON_EGG_CACHE'] = environ['PYTHON_EGG_CACHE'] elif 'trac.env_path' in environ: os.environ['PYTHON_EGG_CACHE'] = \ os.path.join(environ['trac.env_path'], '.egg-cache') elif 'trac.env_parent_dir' in environ: os.environ['PYTHON_EGG_CACHE'] = \ os.path.join(environ['trac.env_parent_dir'], '.egg-cache') from trac.web.main import dispatch_request return dispatch_request(environ, start_request)
mit
Python
44e3643f9ddacce293feac2a2e9ca09799b7ff68
Use options.rootdir instead of __file__
ella/citools,ella/citools
citools/pavement.py
citools/pavement.py
import os import sys from os.path import join, abspath, dirname from paver.easy import * @task @consume_args @needs('unit', 'integrate') def test(): """ Run whole testsuite """ def djangonize_test_environment(test_project_module): sys.path.insert(0, options.rootdir) sys.path.insert(0, join(options.rootdir, "tests")) sys.path.insert(0, join(options.rootdir, "tests", test_project_module)) os.environ['DJANGO_SETTINGS_MODULE'] = "%s.settings" % test_project_module def run_tests(test_project_module, nose_args): djangonize_test_environment(test_project_module) import nose os.chdir(join(options.rootdir, "tests", test_project_module)) argv = ["--with-django"] + nose_args nose.run_exit( argv = ["nosetests"] + argv, defaultTest = test_project_module ) @task @consume_args def unit(args): """ Run unittests """ run_tests(test_project_module="unit_project", nose_args=[]+args) @task @consume_args def integrate(args): """ Run integration tests """ run_tests(test_project_module="example_project", nose_args=["--with-selenium", "--with-djangoliveserver"]+args) @task def install_dependencies(): sh('pip install -r requirements.txt') @task def bootstrap(): options.virtualenv = {'packages_to_install' : ['pip']} call_task('paver.virtual.bootstrap') sh("python bootstrap.py") path('bootstrap.py').remove() print '*'*80 if sys.platform in ('win32', 'winnt'): print "* Before running other commands, You now *must* run %s" % os.path.join("bin", "activate.bat") else: print "* Before running other commands, You now *must* run source %s" % os.path.join("bin", "activate") print '*'*80 @task @needs('citools.paver.install_dependencies') def prepare(): """ Prepare complete environment """
import os import sys from os.path import join, abspath, dirname from paver.easy import * @task @consume_args @needs('unit', 'integrate') def test(): """ Run whole testsuite """ def djangonize_test_environment(test_project_module): sys.path.insert(0, abspath(join(dirname(__file__)))) sys.path.insert(0, abspath(join(dirname(__file__), "tests"))) sys.path.insert(0, abspath(join(dirname(__file__), "tests", test_project_module))) os.environ['DJANGO_SETTINGS_MODULE'] = "%s.settings" % test_project_module def run_tests(test_project_module, nose_args): djangonize_test_environment(test_project_module) import nose os.chdir(abspath(join(dirname(__file__), "tests", test_project_module))) argv = ["--with-django"] + nose_args nose.run_exit( argv = ["nosetests"] + argv, defaultTest = test_project_module ) @task @consume_args def unit(args): """ Run unittests """ run_tests(test_project_module="unit_project", nose_args=[]+args) @task @consume_args def integrate(args): """ Run integration tests """ run_tests(test_project_module="example_project", nose_args=["--with-selenium", "--with-djangoliveserver"]+args) @task def install_dependencies(): sh('pip install -r requirements.txt') @task def bootstrap(): options.virtualenv = {'packages_to_install' : ['pip']} call_task('paver.virtual.bootstrap') sh("python bootstrap.py") path('bootstrap.py').remove() print '*'*80 if sys.platform in ('win32', 'winnt'): print "* Before running other commands, You now *must* run %s" % os.path.join("bin", "activate.bat") else: print "* Before running other commands, You now *must* run source %s" % os.path.join("bin", "activate") print '*'*80 @task @needs('citools.paver.install_dependencies') def prepare(): """ Prepare complete environment """
bsd-3-clause
Python
8ff877fe82ced94582ca48cb066a9363995b09cd
Fix flake8
bow/bioconda-recipes,colinbrislawn/bioconda-recipes,Luobiny/bioconda-recipes,JenCabral/bioconda-recipes,acaprez/recipes,rob-p/bioconda-recipes,omicsnut/bioconda-recipes,martin-mann/bioconda-recipes,ivirshup/bioconda-recipes,ostrokach/bioconda-recipes,HassanAmr/bioconda-recipes,CGATOxford/bioconda-recipes,abims-sbr/bioconda-recipes,chapmanb/bioconda-recipes,daler/bioconda-recipes,bioconda/recipes,oena/bioconda-recipes,rvalieris/bioconda-recipes,rvalieris/bioconda-recipes,bebatut/bioconda-recipes,gregvonkuster/bioconda-recipes,xguse/bioconda-recipes,JenCabral/bioconda-recipes,mcornwell1957/bioconda-recipes,martin-mann/bioconda-recipes,cokelaer/bioconda-recipes,zachcp/bioconda-recipes,dkoppstein/recipes,ivirshup/bioconda-recipes,rvalieris/bioconda-recipes,zachcp/bioconda-recipes,phac-nml/bioconda-recipes,dmaticzka/bioconda-recipes,BIMSBbioinfo/bioconda-recipes,instituteofpathologyheidelberg/bioconda-recipes,shenwei356/bioconda-recipes,instituteofpathologyheidelberg/bioconda-recipes,bioconda/bioconda-recipes,acaprez/recipes,roryk/recipes,cokelaer/bioconda-recipes,gvlproject/bioconda-recipes,oena/bioconda-recipes,bebatut/bioconda-recipes,blankenberg/bioconda-recipes,npavlovikj/bioconda-recipes,JenCabral/bioconda-recipes,dmaticzka/bioconda-recipes,zachcp/bioconda-recipes,JenCabral/bioconda-recipes,acaprez/recipes,instituteofpathologyheidelberg/bioconda-recipes,chapmanb/bioconda-recipes,dmaticzka/bioconda-recipes,mcornwell1957/bioconda-recipes,hardingnj/bioconda-recipes,CGATOxford/bioconda-recipes,colinbrislawn/bioconda-recipes,npavlovikj/bioconda-recipes,abims-sbr/bioconda-recipes,cokelaer/bioconda-recipes,HassanAmr/bioconda-recipes,guowei-he/bioconda-recipes,dkoppstein/recipes,ivirshup/bioconda-recipes,lpantano/recipes,bioconda/recipes,npavlovikj/bioconda-recipes,chapmanb/bioconda-recipes,xguse/bioconda-recipes,joachimwolff/bioconda-recipes,ostrokach/bioconda-recipes,joachimwolff/bioconda-recipes,shenwei356/bioconda-recipes,shenwei356/bioconda-recipes,Luobiny/bioconda-recipes,blankenberg/bioconda-recipes,JenCabral/bioconda-recipes,lpantano/recipes,joachimwolff/bioconda-recipes,hardingnj/bioconda-recipes,colinbrislawn/bioconda-recipes,peterjc/bioconda-recipes,jfallmann/bioconda-recipes,gvlproject/bioconda-recipes,Luobiny/bioconda-recipes,HassanAmr/bioconda-recipes,blankenberg/bioconda-recipes,saketkc/bioconda-recipes,daler/bioconda-recipes,peterjc/bioconda-recipes,keuv-grvl/bioconda-recipes,matthdsm/bioconda-recipes,jasper1918/bioconda-recipes,CGATOxford/bioconda-recipes,blankenberg/bioconda-recipes,bow/bioconda-recipes,colinbrislawn/bioconda-recipes,gvlproject/bioconda-recipes,bebatut/bioconda-recipes,oena/bioconda-recipes,bioconda/bioconda-recipes,matthdsm/bioconda-recipes,bow/bioconda-recipes,omicsnut/bioconda-recipes,guowei-he/bioconda-recipes,CGATOxford/bioconda-recipes,colinbrislawn/bioconda-recipes,mdehollander/bioconda-recipes,saketkc/bioconda-recipes,martin-mann/bioconda-recipes,oena/bioconda-recipes,mcornwell1957/bioconda-recipes,roryk/recipes,shenwei356/bioconda-recipes,xguse/bioconda-recipes,keuv-grvl/bioconda-recipes,mdehollander/bioconda-recipes,bebatut/bioconda-recipes,gregvonkuster/bioconda-recipes,daler/bioconda-recipes,rob-p/bioconda-recipes,hardingnj/bioconda-recipes,mcornwell1957/bioconda-recipes,guowei-he/bioconda-recipes,gvlproject/bioconda-recipes,gvlproject/bioconda-recipes,guowei-he/bioconda-recipes,ostrokach/bioconda-recipes,ivirshup/bioconda-recipes,hardingnj/bioconda-recipes,zachcp/bioconda-recipes,instituteofpathologyheidelberg/bioconda-recipes,keuv-grvl/bioconda-recipes,lpantano/recipes,matthdsm/bioconda-recipes,HassanAmr/bioconda-recipes,HassanAmr/bioconda-recipes,roryk/recipes,ostrokach/bioconda-recipes,mdehollander/bioconda-recipes,peterjc/bioconda-recipes,acaprez/recipes,colinbrislawn/bioconda-recipes,gregvonkuster/bioconda-recipes,dmaticzka/bioconda-recipes,joachimwolff/bioconda-recipes,peterjc/bioconda-recipes,chapmanb/bioconda-recipes,BIMSBbioinfo/bioconda-recipes,martin-mann/bioconda-recipes,omicsnut/bioconda-recipes,peterjc/bioconda-recipes,rvalieris/bioconda-recipes,matthdsm/bioconda-recipes,instituteofpathologyheidelberg/bioconda-recipes,BIMSBbioinfo/bioconda-recipes,ostrokach/bioconda-recipes,mdehollander/bioconda-recipes,omicsnut/bioconda-recipes,daler/bioconda-recipes,peterjc/bioconda-recipes,hardingnj/bioconda-recipes,jasper1918/bioconda-recipes,guowei-he/bioconda-recipes,xguse/bioconda-recipes,bioconda/bioconda-recipes,mdehollander/bioconda-recipes,rvalieris/bioconda-recipes,gvlproject/bioconda-recipes,rob-p/bioconda-recipes,oena/bioconda-recipes,joachimwolff/bioconda-recipes,cokelaer/bioconda-recipes,phac-nml/bioconda-recipes,omicsnut/bioconda-recipes,dmaticzka/bioconda-recipes,saketkc/bioconda-recipes,jfallmann/bioconda-recipes,keuv-grvl/bioconda-recipes,lpantano/recipes,bow/bioconda-recipes,mcornwell1957/bioconda-recipes,xguse/bioconda-recipes,matthdsm/bioconda-recipes,phac-nml/bioconda-recipes,jfallmann/bioconda-recipes,mdehollander/bioconda-recipes,HassanAmr/bioconda-recipes,bioconda/recipes,ostrokach/bioconda-recipes,chapmanb/bioconda-recipes,CGATOxford/bioconda-recipes,jfallmann/bioconda-recipes,joachimwolff/bioconda-recipes,bow/bioconda-recipes,jasper1918/bioconda-recipes,phac-nml/bioconda-recipes,bioconda/bioconda-recipes,ivirshup/bioconda-recipes,saketkc/bioconda-recipes,martin-mann/bioconda-recipes,abims-sbr/bioconda-recipes,npavlovikj/bioconda-recipes,bow/bioconda-recipes,jasper1918/bioconda-recipes,saketkc/bioconda-recipes,abims-sbr/bioconda-recipes,matthdsm/bioconda-recipes,daler/bioconda-recipes,jasper1918/bioconda-recipes,ivirshup/bioconda-recipes,gregvonkuster/bioconda-recipes,abims-sbr/bioconda-recipes,daler/bioconda-recipes,BIMSBbioinfo/bioconda-recipes,rob-p/bioconda-recipes,saketkc/bioconda-recipes,keuv-grvl/bioconda-recipes,keuv-grvl/bioconda-recipes,dmaticzka/bioconda-recipes,rvalieris/bioconda-recipes,dkoppstein/recipes,BIMSBbioinfo/bioconda-recipes,abims-sbr/bioconda-recipes,BIMSBbioinfo/bioconda-recipes,Luobiny/bioconda-recipes,CGATOxford/bioconda-recipes,phac-nml/bioconda-recipes,JenCabral/bioconda-recipes
recipes/metaphlan2/download_metaphlan2_db.py
recipes/metaphlan2/download_metaphlan2_db.py
#!/usr/bin/env python3 import argparse import tarfile import os import urllib2 import shutil METAPHLAN2_URL = 'https://bitbucket.org/biobakery/metaphlan2/get/2.6.0.tar.gz' def download_file(url): """Download a file from a URL Fetches a file from the specified URL. Returns the name that the file is saved with. """ print("Downloading %s" % url) target = os.path.basename(url) print("Saving to %s" % target) open(target, 'wb').write(urllib2.urlopen(url).read()) return target def unpack_tar_archive(filen, wd=None): """Extract files from a TAR archive Given a TAR archive (which optionally can be compressed with either gzip or bz2), extract the files it contains and return a list of the resulting file names and paths. 'wd' specifies the working directory to extract the files to, otherwise they are extracted to the current working directory. Once all the files are extracted the TAR archive file is deleted from the file system. """ file_list = [] if not tarfile.is_tarfile(filen): print("%s: not TAR file") return [filen] t = tarfile.open(filen) t.extractall(".") print("Removing %s" % filen) os.remove(filen) return file_list if __name__ == "__main__": parser = argparse.ArgumentParser( description='Download MetaPhlAn2 database') parser.add_argument('--output', help="Installation directory") args = parser.parse_args() if args.output: output = args.output else: output = os.path.dirname(os.path.realpath(__file__)) print(output) if not os.path.exists(output): os.makedirs(output) metaphlan2_tarfile = download_file(METAPHLAN2_URL) file_list = unpack_tar_archive(metaphlan2_tarfile) print(file_list) shutil.move("biobakery-metaphlan2-c43e40a443ed/db_v20", output)
#!/usr/bin/env python3 import argparse import tarfile import os import urllib2 import shutil METAPHLAN2_URL = 'https://bitbucket.org/biobakery/metaphlan2/get/2.6.0.tar.gz' def download_file(url, target=None, wd=None): """Download a file from a URL Fetches a file from the specified URL. If 'target' is specified then the file is saved to this name; otherwise it's saved as the basename of the URL. If 'wd' is specified then it is used as the 'working directory' where the file will be save on the local system. Returns the name that the file is saved with. """ print "Downloading %s" % url if not target: target = os.path.basename(url) if wd: target = os.path.join(wd, target) print "Saving to %s" % target open(target, 'wb').write(urllib2.urlopen(url).read()) return target def unpack_tar_archive(filen, wd=None): """Extract files from a TAR archive Given a TAR archive (which optionally can be compressed with either gzip or bz2), extract the files it contains and return a list of the resulting file names and paths. 'wd' specifies the working directory to extract the files to, otherwise they are extracted to the current working directory. Once all the files are extracted the TAR archive file is deleted from the file system. """ file_list = [] if not tarfile.is_tarfile(filen): print "%s: not TAR file" return [filen] t = tarfile.open(filen) t.extractall(".") print "Removing %s" % filen os.remove(filen) return file_list if __name__ == "__main__": parser = argparse.ArgumentParser(description='Download MetaPhlAn2 database') parser.add_argument('--output', help="Installation directory") args = parser.parse_args() if args.output: output = args.output else: output = os.path.dirname(os.path.realpath(__file__)) print(output) if not os.path.exists(output): os.makedirs(output) metaphlan2_tarfile = download_file(METAPHLAN2_URL) file_list = unpack_tar_archive(metaphlan2_tarfile) print(file_list) shutil.move("biobakery-metaphlan2-c43e40a443ed/db_v20", output)
mit
Python
d6432e9718a160bda79afe473adc9630d36a9ce5
add v2.4.40 (#25065)
LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack
var/spack/repos/builtin/packages/bedops/package.py
var/spack/repos/builtin/packages/bedops/package.py
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Bedops(MakefilePackage): """BEDOPS is an open-source command-line toolkit that performs highly efficient and scalable Boolean and other set operations, statistical calculations, archiving, conversion and other management of genomic data of arbitrary scale.""" homepage = "https://bedops.readthedocs.io" url = "https://github.com/bedops/bedops/archive/v2.4.39.tar.gz" version('2.4.40', sha256='0670f9ce2da4b68ab13f82c023c84509c7fce5aeb5df980c385fac76eabed4fb') version('2.4.39', sha256='f8bae10c6e1ccfb873be13446c67fc3a54658515fb5071663883f788fc0e4912') version('2.4.35', sha256='da0265cf55ef5094834318f1ea4763d7a3ce52a6900e74f532dd7d3088c191fa') version('2.4.34', sha256='533a62a403130c048d3378e6a975b73ea88d156d4869556a6b6f58d90c52ed95') version('2.4.30', sha256='218e0e367aa79747b2f90341d640776eea17befc0fdc35b0cec3c6184098d462') @property def build_targets(self): # avoid static linking with glibc for all invocations return ['SFLAGS='] def install(self, spec, prefix): mkdirp(prefix.bin) make('install', "BINDIR=%s" % prefix.bin)
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Bedops(MakefilePackage): """BEDOPS is an open-source command-line toolkit that performs highly efficient and scalable Boolean and other set operations, statistical calculations, archiving, conversion and other management of genomic data of arbitrary scale.""" homepage = "https://bedops.readthedocs.io" url = "https://github.com/bedops/bedops/archive/v2.4.39.tar.gz" version('2.4.39', sha256='f8bae10c6e1ccfb873be13446c67fc3a54658515fb5071663883f788fc0e4912') version('2.4.35', sha256='da0265cf55ef5094834318f1ea4763d7a3ce52a6900e74f532dd7d3088c191fa') version('2.4.34', sha256='533a62a403130c048d3378e6a975b73ea88d156d4869556a6b6f58d90c52ed95') version('2.4.30', sha256='218e0e367aa79747b2f90341d640776eea17befc0fdc35b0cec3c6184098d462') @property def build_targets(self): # avoid static linking with glibc for all invocations return ['SFLAGS='] def install(self, spec, prefix): mkdirp(prefix.bin) make('install', "BINDIR=%s" % prefix.bin)
lgpl-2.1
Python
7b486fd84dfca220b1415fa2956a8c4bc32dc470
add version 0.7
LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack
var/spack/repos/builtin/packages/py-ics/package.py
var/spack/repos/builtin/packages/py-ics/package.py
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyIcs(PythonPackage): """Ics.py : iCalendar for Humans Ics.py is a pythonic and easy iCalendar library. Its goals are to read and write ics data in a developer friendly way. iCalendar is a widely-used and useful format but not user friendly. Ics.py is there to give you the ability of creating and reading this format without any knowledge of it. It should be able to parse every calendar that respects the rfc5545 and maybe some more. It also outputs rfc compliant calendars. iCalendar (file extension .ics) is used by Google Calendar, Apple Calendar, Android and many more. Ics.py is available for Python>=3.6 and is Apache2 Licensed. """ homepage = "https://github.com/C4ptainCrunch/ics.py" url = "https://github.com/C4ptainCrunch/ics.py/archive/v0.6.tar.gz" version('0.7', sha256='48c637e5eb8dfc817b1f3f6b3f662ba19cfcc25f8f71eb42f5d07e6f2c573994') version('0.6', sha256='4947263136202d0489d4f5e5c7175dfd2db5d3508b8b003ddeaef96347f68830') depends_on('python@3.6:', type=('build', 'run')) depends_on('py-setuptools', type='build') depends_on('py-python-dateutil', type=('build', 'run')) depends_on('py-arrow@0.11:0.14.99', type=('build', 'run')) depends_on('py-six@1.5:', type=('build', 'run')) depends_on('py-tatsu@4.2:', type=('build', 'run'), when='@0.6:')
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyIcs(PythonPackage): """Ics.py : iCalendar for Humans Ics.py is a pythonic and easy iCalendar library. Its goals are to read and write ics data in a developer friendly way. iCalendar is a widely-used and useful format but not user friendly. Ics.py is there to give you the ability of creating and reading this format without any knowledge of it. It should be able to parse every calendar that respects the rfc5545 and maybe some more. It also outputs rfc compliant calendars. iCalendar (file extension .ics) is used by Google Calendar, Apple Calendar, Android and many more. Ics.py is available for Python>=3.6 and is Apache2 Licensed. """ homepage = "https://github.com/C4ptainCrunch/ics.py" url = "https://github.com/C4ptainCrunch/ics.py/archive/v0.6.tar.gz" version('0.6', sha256='4947263136202d0489d4f5e5c7175dfd2db5d3508b8b003ddeaef96347f68830') depends_on('python@3.6:', type=('build', 'run')) depends_on('py-setuptools', type='build') depends_on('py-python-dateutil', type=('build', 'run')) depends_on('py-arrow@0.11:0.14.99', type=('build', 'run')) depends_on('py-six@1.5:', type=('build', 'run')) depends_on('py-tatsu@4.2:', type=('build', 'run'), when='@0.6:')
lgpl-2.1
Python
4c5c7aa74b2dec2cbfb6b6bd7e24d5922e92c112
Document 'repo status' output
flingone/git-repo,artprogramming/git-repo,duralog/repo,sapiippo/git-repo,HenningSchroeder/git-repo,ronan22/repo,Mioze7Ae/android_tools_repo,vmx/git-repo,flingone/git-repo,petemoore/git-repo,linuxdeepin/git-repo,caicry/android.repo,alanbian/git-repo,lifuzu/repo,martinjina/git-repo,Jokebin/git-repo,mer-tools/git-repo,ossxp-com/repo,ossxp-com/repo,testbetta/repo-pub,xin3liang/git-repo,artprogramming/git-repo,Jokebin/git-repo,MonkeyZZZZ/tools_repo,DavidPu/git-repo,yanjiegit/jiangxin-repo,yrchen/repo,yath/repo,enochcheng/git-repo,eligoenergy/git-repo,josn-jys/git-repo,yanjiegit/jiangxin-repo,Pivosgroup/google-git-repo,cupcicm/git-repo,yanjiegit/jiangxin-repo,dsiganos/git-repo,nucked/git-repo,bhargavkumar040/android-source-browsing.git-repo,testbetta/git-repo-pub,codetutil/git-repo,jcfrank/myrepo,ThangBK2009/android-source-browsing.git-repo,11NJ/git-repo,windyan/git-repo,martinjina/git-repo,petemoore/git-repo,chusiang/git-repo,bestes/repo,Copypeng/git-repo,GatorQue/git-repo-flow,sapiippo/git-repo,lipro-yocto/git-repo,eric011/git-repo,folpindo/git-repo,testbetta/git-repo-pub,wavecomp/git-repo,lightsofapollo/git-repo,ibollen/repo,loungin/git-repo,aosp-mirror/tools_repo,jpzhu/git-repo,zodsoft/git-repo,posix4e/git-repo,muzili/repo,josn-jys/git-repo,lygstate/repo,lewixliu/git-repo,ilansmith/repo,todototry/git-repo,fantasyfly/git-repo,AurelienBallier/git-repo,kangear/git-repo,eligoenergy/git-repo,lipro-yocto/git-repo,enochcheng/git-repo,jmollan/git-repo,lshain/repo,weixiaodong/git-repo,xyyangkun/git-repo,hanchentech/git-repo,mozilla/git-repo,finik/git-repo,darrengarvey/git-repo,ossxp-com/repo,MonkeyZZZZ/tools_repo,hejq0310/git-repo,CedricCabessa/repo,jingyu/git-repo,jcarlson23/repo,masscre/git-repo,rochy2014/repo,djibi2/git-repo,xhteam/git-repo,Omnifarious/git-repo,nbp/git-repo,gabbayo/git-repo,Pankaj-Sakariya/android-source-browsing.git-repo,rochy2014/repo,wzhy90/git-repo,gbraad/git-repo,idwanglu2010/git-repo,vmx/git-repo,bhargavkumar040/android-source-browsing.git-repo,opensourcechipspark/repo,jmollan/git-repo,hackbutty/git-repo,FlymeOS/repo,Omnifarious/git-repo,WanZheng/repo,xin3liang/tools_repo,hanw/git-repo,iAlios/git-repo,couchbasedeps/git-repo,lightsofapollo/git-repo,jcfrank/myrepo,ThangBK2009/android-source-browsing.git-repo,dsiganos/git-repo,demonyangyue/git-repo,qupai/git-repo,todototry/git-repo,linux-knight/repo,CyanogenMod/tools_repo,MIPS/repo,derron/git-repo,sndnvaps/git_repo,xxxrac/git-repo,ceejatec/git-repo,nickpith/git-repo,cupcicm/git-repo,yath/repo,rewu/git-repo,xyyangkun/git-repo,windyan/git-repo,GerritCodeReview/git-repo,FuangCao/repo,zbunix/git-repo,azzurris/git-repo,chenyun90323/git-repo,lchiocca/repo,CyanogenMod/tools_repo,Omnifarious/git-repo,xianyo/git-repo,linuxdeepin/git-repo,mixedpuppy/git-repo,ceejatec/git-repo,4455jkjh/repo,nickpith/git-repo,luoqii/android_repo_copy,lanniaoershi/git-repo,baidurom/repo,lovesecho/xrepo,finik/git-repo,posix4e/git-repo,wzhy90/git-repo,yanjiegit/andriod-repo,kangear/git-repo,simbasailor/git-repo,RuanJG/git-repo,dinpot/git-repo,jangle789/git-repo,biaolv/git-repo,qioixiy/git-repo,yrchen/repo,liaog/git-repo,zemug/repo,bestes/repo,4455jkjh/repo,TheQtCompany/git-repo,luoqii/repo,gabbayo/git-repo,GatorQue/git-repo-flow,HenningSchroeder/git-repo,chzhong/git-repo,frogbywyplay/git-repo,nuclearmistake/repo,xianyo/git-repo,jcarlson23/repo,IbpTeam/repo,amersons/git-repo,daimajia/git-repo,sramaswamy/repo,zbunix/git-repo,nucked/git-repo,qingpingguo/git-repo,wavecomp/git-repo,TheQtCompany/git-repo,chenzilin/git-repo,zodsoft/git-repo,jingyu/git-repo,GerritCodeReview/git-repo,djibi2/git-repo,cubieboard/git-repo,Mioze7Ae/android_tools_repo,testbetta/repo-pub,luohongzhen/git-repo,cubieboard/git-repo,Pankaj-Sakariya/android-source-browsing.git-repo,lifuzu/repo,COrtegaflowcorp/git-repo,LA-Toth/git-repo,luoqii/android_repo_copy,lygstate/repo,yanjiegit/andriod-repo,WanZheng/repo,couchbasedeps/git-repo,ox-it/wl-repo,iAlios/git-repo,xecle/git-repo,FuangCao/git-repo,nuclearmistake/repo,hackbutty/git-repo,Fr4ncky/git-repo,AurelienBallier/git-repo,Fr4ncky/git-repo,aep/repo,alessandro-aglietti/git-repo,lewixliu/git-repo,lshain/repo,weixiaodong/git-repo,llg84/google_repo,loungin/git-repo,alanbian/git-repo,qingpingguo/git-repo,chusiang/git-repo,mohankr/echoes_tools_repo,duralog/repo,xecle/git-repo,venus-solar/git-repo,kdavis-mozilla/repo-repo,armpc/repo,ediTLJ/git-repo,hisilicon/git-repo,sb2008/git-repo,dsiganos/git-repo,ChronoMonochrome/repo,la4430/repo,xin3liang/tools_repo,lovesecho/xrepo,yanjiegit/andriod-repo,aosp-mirror/tools_repo,xin3liang/git-repo,jangle789/git-repo,FuangCao/repo,qioixiy/git-repo,finik/git-repo,hejq0310/git-repo,Pivosgroup/google-git-repo,xxxrac/git-repo,caicry/android.repo,cubieboard/git-repo,sramaswamy/repo,ibollen/repo,aep/repo,FlymeOS/repo,eric011/git-repo,hisilicon/git-repo,ericmckean/git-repo,sb2008/git-repo,qupai/git-repo,hyper123/git-repo,hanchentech/git-repo,opencontrail-ci-admin/git-repo,LA-Toth/git-repo,derron/git-repo,ericmckean/git-repo,hacker-camp/google_repo,11NJ/git-repo,amersons/git-repo,mer-tools/git-repo,SunRain/repo,aep/repo,slfyusufu/repo,azzurris/git-repo,luohongzhen/git-repo,demonyangyue/git-repo,kdavis-mozilla/repo-repo,daimajia/git-repo,dodocat/git-repo,luoqii/repo,abstrakraft/repo,urras/git-repo,SunRain/repo,hyper123/git-repo,mixedpuppy/git-repo,RuanJG/git-repo,frogbywyplay/git-repo,urras/git-repo,CedricCabessa/repo,ediTLJ/git-repo,chenyun90323/git-repo,dinpot/git-repo,esingdo/tools-repo,la4430/repo,MIPS/repo,simbasailor/git-repo,opensourcechipspark/repo,DavidPu/git-repo,opencontrail-ci-admin/git-repo,ronan22/repo,lifuzu/repo,baidurom/repo,codetutil/git-repo,muzili/repo,ChronoMonochrome/repo,nbp/git-repo,chenzilin/git-repo,chzhong/git-repo,jpzhu/git-repo,hanw/git-repo,darrengarvey/git-repo,fantasyfly/git-repo,slfyusufu/repo,idwanglu2010/git-repo,biaolv/git-repo,ilansmith/repo,venus-solar/git-repo,lchiocca/repo,COrtegaflowcorp/git-repo,crashkita/git-repo,crashkita/git-repo,hacker-camp/google_repo,llg84/google_repo,folpindo/git-repo,CedricCabessa/repo,IbpTeam/repo,masscre/git-repo,esingdo/tools-repo,alayi/repo,alessandro-aglietti/git-repo,alayi/repo,SaleJumper/android-source-browsing.git-repo,linux-knight/repo,ox-it/wl-repo,liaog/git-repo,lanniaoershi/git-repo,mozilla/git-repo,sndnvaps/git_repo,linux-knight/repo,FuangCao/git-repo,SaleJumper/android-source-browsing.git-repo,armpc/repo,Copypeng/git-repo,dodocat/git-repo,zbunix/git-repo,mohankr/echoes_tools_repo,xhteam/git-repo,gbraad/git-repo,zemug/repo,lovesecho/xrepo,rewu/git-repo
subcmds/status.py
subcmds/status.py
# # Copyright (C) 2008 The Android Open Source Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from command import PagedCommand class Status(PagedCommand): common = True helpSummary = "Show the working tree status" helpUsage = """ %prog [<project>...] """ helpDescription = """ '%prog' compares the working tree to the staging area (aka index), and the most recent commit on this branch (HEAD), in each project specified. A summary is displayed, one line per file where there is a difference between these three states. Status Display -------------- The status display is organized into three columns of information, for example if the file 'subcmds/status.py' is modified in the project 'repo' on branch 'devwork': project repo/ branch devwork -m subcmds/status.py The first column explains how the staging area (index) differs from the last commit (HEAD). Its values are always displayed in upper case and have the following meanings: -: no difference A: added (not in HEAD, in index ) M: modified ( in HEAD, in index, different content ) D: deleted ( in HEAD, not in index ) R: renamed (not in HEAD, in index, path changed ) C: copied (not in HEAD, in index, copied from another) T: mode changed ( in HEAD, in index, same content ) U: unmerged; conflict resolution required The second column explains how the working directory differs from the index. Its values are always displayed in lower case and have the following meanings: -: new / unknown (not in index, in work tree ) m: modified ( in index, in work tree, modified ) d: deleted ( in index, not in work tree ) """ def Execute(self, opt, args): all = self.GetProjects(args) clean = 0 for project in all: state = project.PrintWorkTreeStatus() if state == 'CLEAN': clean += 1 if len(all) == clean: print 'nothing to commit (working directory clean)'
# # Copyright (C) 2008 The Android Open Source Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from command import PagedCommand class Status(PagedCommand): common = True helpSummary = "Show the working tree status" helpUsage = """ %prog [<project>...] """ def Execute(self, opt, args): all = self.GetProjects(args) clean = 0 for project in all: state = project.PrintWorkTreeStatus() if state == 'CLEAN': clean += 1 if len(all) == clean: print 'nothing to commit (working directory clean)'
apache-2.0
Python