commit
stringlengths
40
40
subject
stringlengths
4
1.73k
repos
stringlengths
5
127k
old_file
stringlengths
2
751
new_file
stringlengths
2
751
new_contents
stringlengths
1
8.98k
old_contents
stringlengths
0
6.59k
license
stringclasses
13 values
lang
stringclasses
23 values
ecfbaded5e03529d1b189b6b5fc8b2f8516c4b31
Add hoster plugin for ARD mediathek
vuolter/pyload,vuolter/pyload,vuolter/pyload,pyblub/pyload,pyblub/pyload
module/plugins/hoster/ARD.py
module/plugins/hoster/ARD.py
import subprocess import re import os.path import os from module.utils import save_join, save_path from module.plugins.Hoster import Hoster # Requires rtmpdump # by Roland Beermann class RTMP: # TODO: Port to some RTMP-library like rtmpy or similar # TODO?: Integrate properly into the API of pyLoad command = "rtmpdump" @classmethod def download_rtmp_stream(cls, url, output_file, playpath=None): opts = [ "-r", url, "-o", output_file, ] if playpath: opts.append("--playpath") opts.append(playpath) cls._invoke_rtmpdump(opts) @classmethod def _invoke_rtmpdump(cls, opts): args = [ cls.command ] args.extend(opts) return subprocess.check_call(args) class ARD(Hoster): __name__ = "ARD Mediathek" __version__ = "0.1" __pattern__ = r"http://www\.ardmediathek\.de/.*" __config__ = [] def process(self, pyfile): site = self.load(pyfile.url) avail_videos = re.findall(r"""mediaCollection.addMediaStream\(0, ([0-9]*), "([^\"]*)", "([^\"]*)", "[^\"]*"\);""", site) avail_videos.sort(key=lambda videodesc: int(videodesc[0]), reverse=True) # The higher the number, the better the quality quality, url, playpath = avail_videos[0] pyfile.name = re.search(r"<h1>([^<]*)</h1>", site).group(1) if url.startswith("http"): # Best quality is available over HTTP. Very rare. self.download(url) else: pyfile.setStatus("downloading") download_folder = self.config['general']['download_folder'] location = save_join(download_folder, pyfile.package().folder) if not os.path.exists(location): os.makedirs(location, int(self.core.config["permission"]["folder"], 8)) if self.core.config["permission"]["change_dl"] and os.name != "nt": try: uid = getpwnam(self.config["permission"]["user"])[2] gid = getgrnam(self.config["permission"]["group"])[2] chown(location, uid, gid) except Exception, e: self.log.warning(_("Setting User and Group failed: %s") % str(e)) output_file = save_join(location, save_path(pyfile.name)) RTMP.download_rtmp_stream(url, playpath=playpath, output_file=output_file)
agpl-3.0
Python
206ef4f7aad6c4ce51e4737a7d506a79061f1047
Add an `import_or_skip` function to testing.
RaoUmer/distarray,enthought/distarray,enthought/distarray,RaoUmer/distarray
distarray/testing.py
distarray/testing.py
import unittest import importlib from functools import wraps from distarray.error import InvalidCommSizeError from distarray.mpiutils import MPI, create_comm_of_size def import_or_skip(name): """Try importing `name`, raise SkipTest on failure. Parameters ---------- name : str Module name to try to import. Returns ------- module : module object Module object imported by importlib. Raises ------ unittest.SkipTest If the attempted import raises an ImportError. Examples -------- >>> h5py = import_or_skip('h5py') >>> h5py.get_config() <h5py.h5.H5PYConfig at 0x103dd5a78> """ try: return importlib.import_module(name) except ImportError: errmsg = '%s not found... skipping.' % name raise unittest.SkipTest(errmsg) def comm_null_passes(fn): """Decorator. If `self.comm` is COMM_NULL, pass.""" @wraps(fn) def wrapper(self, *args, **kwargs): if self.comm == MPI.COMM_NULL: pass else: return fn(self, *args, **kwargs) return wrapper class MpiTestCase(unittest.TestCase): """Base test class for MPI test cases. Overload `get_comm_size` to change the default comm size (default is 4). Overload `more_setUp` to add more to the default `setUp`. """ def get_comm_size(self): return 4 def more_setUp(self): pass def setUp(self): try: self.comm = create_comm_of_size(self.get_comm_size()) except InvalidCommSizeError: msg = "Must run with comm size >= {}." raise unittest.SkipTest(msg.format(self.get_comm_size())) else: self.more_setUp() def tearDown(self): if self.comm != MPI.COMM_NULL: self.comm.Free()
import unittest from functools import wraps from distarray.error import InvalidCommSizeError from distarray.mpiutils import MPI, create_comm_of_size def comm_null_passes(fn): """Decorator. If `self.comm` is COMM_NULL, pass.""" @wraps(fn) def wrapper(self, *args, **kwargs): if self.comm == MPI.COMM_NULL: pass else: return fn(self, *args, **kwargs) return wrapper class MpiTestCase(unittest.TestCase): """Base test class for MPI test cases. Overload `get_comm_size` to change the default comm size (default is 4). Overload `more_setUp` to add more to the default `setUp`. """ def get_comm_size(self): return 4 def more_setUp(self): pass def setUp(self): try: self.comm = create_comm_of_size(self.get_comm_size()) except InvalidCommSizeError: msg = "Must run with comm size >= {}." raise unittest.SkipTest(msg.format(self.get_comm_size())) else: self.more_setUp() def tearDown(self): if self.comm != MPI.COMM_NULL: self.comm.Free()
bsd-3-clause
Python
80c1dba49bbdaf4d0d37e8a06549774d2afd019a
Add cosmo_viewer app
gcasey/cosmotrack,gcasey/cosmotrack
pvapp/cosmo_viewer.py
pvapp/cosmo_viewer.py
################################################################################ # # Copyright 2013 Kitware, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ################################################################################ # import to process args import sys import os import math import json import argparse # import annotations from autobahn.wamp import exportRpc # import paraview modules. from paraview import simple, web, servermanager, web_helper, paraviewweb_wamp, paraviewweb_protocols # Setup global variables timesteps = [] currentTimeIndex = 0 view = None dataPath = None authKey = None def initView(width, height): global view view = simple.GetRenderView() simple.Render() view.ViewSize = [width, height] view.Background = [0.0, 0.0, 0.0] view.OrientationAxesLabelColor = [0, 0, 0] # This class defines the exposed RPC methods for the midas application class CosmoApp(paraviewweb_wamp.ServerProtocol): def initialize(self): global authKey # Bring used components self.registerParaViewWebProtocol(paraviewweb_protocols.ParaViewWebMouseHandler()) self.registerParaViewWebProtocol(paraviewweb_protocols.ParaViewWebViewPort()) self.registerParaViewWebProtocol(paraviewweb_protocols.ParaViewWebViewPortImageDelivery()) self.registerParaViewWebProtocol(paraviewweb_protocols.ParaViewWebViewPortGeometryDelivery()) # Update authentication key to use #self.updateSecret(authKey) @exportRpc("openFile") def openFile(self, filename): fileid = "" if self.reader: try: simple.Delete(self.reader) except: self.reader = None try: self.reader = simple.OpenDataFile(filename) simple.Show() simple.Render() simple.ResetCamera() fileid = self.reader.GetGlobalIDAsString() except: self.reader = None return fileid if __name__ == "__main__": parser = argparse.ArgumentParser( description="Midas+ParaViewWeb application") web.add_arguments(parser) parser.add_argument("--data-dir", default=os.getcwd(), help="path to data directory", dest="path") parser.add_argument("--width", default=575, help="width of the render window", dest="width") parser.add_argument("--height", default=575, help="height of the render window", dest="height") args = parser.parse_args() dataPath = args.path authKey = args.authKey width = args.width height = args.height initView(width, height) web.start_webserver(options=args, protocol=CosmoApp)
apache-2.0
Python
8287876963af72756c3ff9102526c56f3e28a8a2
Test for file resources
lexman/tuttle,lexman/tuttle,lexman/tuttle
tests/functional_tests/test_resources/test_file_resource.py
tests/functional_tests/test_resources/test_file_resource.py
# -*- coding: utf8 -*- from tuttle.resources import FileResource import tuttle.resources from os import path class TestHttpResource(): def test_real_resource_exists(self): """A real resource should exist""" file_url = "file://{}".format(path.abspath(tuttle.resources.__file__)) res = FileResource(file_url) assert res.exists() def test_fictive_resource_exists(self): """A real resource should exist""" res = FileResource("fictive_file") assert not res.exists()
mit
Python
14755cda032b5cb44626b2da66d943517427f947
test for malformed db imports
moronbros/f5go,moronbros/f5go
tests/test_core.py
tests/test_core.py
"""unit tests for core.py""" import pytest import core def test_malformed_linkdatabase(): # pytest.set_trace() with pytest.raises(EOFError): core.LinkDatabase().load(db='tests/garbage.pickle')
mit
Python
3a59057f7465d9982e26b92cddafa0ea9ba48806
Add new package: universal-ctags (#18962)
iulian787/spack,iulian787/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack
var/spack/repos/builtin/packages/universal-ctags/package.py
var/spack/repos/builtin/packages/universal-ctags/package.py
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class UniversalCtags(AutotoolsPackage): """Universal Ctags generates an index (or tag) file of language objects found in source files for many popular programming languages. This index makes it easy for text editors and other tools to locate the indexed items.""" homepage = "https://ctags.io/" git = "https://github.com/universal-ctags/ctags.git" version('master', branch='master') depends_on('autoconf', type='build') depends_on('automake', type='build') depends_on('libtool', type='build') depends_on('m4', type='build')
lgpl-2.1
Python
f29e278a1b661224c9580d8275654a8c6fe7d3cf
add test for http.encode_request
meiraka/python-bbs2ch
tests/test_http.py
tests/test_http.py
"""Test bbs2ch.http module.""" from bbs2ch import http def test_host_path(): """Return hostname and path from url.""" assert (u'hoge.com', '/') == http.host_path(u'http://hoge.com/') def test_encode_request_get(): """Return http request string.""" header = [(u'Key', u'Value'), (u'Key2', u'Value2')] assert ('GET / HTTP/1.1\r\n' 'Key: Value\r\n' 'Key2: Value2\r\n' '\r\n' '\r\n' == http.encode_request('GET', u'/', header)) def test_encode_request_post(): """Return http request string. if body is not empty, add header to Content-length and Content-Type. """ header = [(u'Key', u'Value'), (u'Key2', u'Value2')] body = [(u'key', u'value'), (u'key2', u'value2')] assert ('POST / HTTP/1.1\r\n' 'Key: Value\r\n' 'Key2: Value2\r\n' 'Content-Type: application/x-www-form-urlencoded\r\n' 'Content-Length: 21\r\n' '\r\n' 'key=value&key2=value2\r\n' == http.encode_request(u'POST', u'/', header, body))
bsd-3-clause
Python
f032556bf07b37f9544c71ecad7aed472021bc97
Add script to update giving and teams receiving
gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com
sql/branch.py
sql/branch.py
import sys from gratipay import wireup db = wireup.db(wireup.env()) participants = db.all(""" SELECT p.*::participants FROM participants p WHERE ( SELECT error FROM current_exchange_routes er WHERE er.participant = p.id AND network = 'braintree-cc' ) <> '' """) total = len(participants) print("%s participants with failing cards" % total) counter = 1 for p in participants: sys.stdout.write("\rUpdating (%i/%i)" % (counter, total)) sys.stdout.flush() counter += 1 p.update_giving_and_teams() print("Done!")
mit
Python
da66b2a2a2e2a73ffd986aea6ba5d086d43892fc
Add main smoketest
rshipp/chaser,rshipp/chaser
tests/test_main.py
tests/test_main.py
import unittest import sys from chaser import main class TestMain(unittest.TestCase): def test_smoke_main(self): sys.argv = ["chaser"] main()
bsd-3-clause
Python
ad1d349d49072b5bda6641db4f070704fde81e5f
Add FCC.
divergentdave/inspectors-general,lukerosiak/inspectors-general
inspectors/fcc.py
inspectors/fcc.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import datetime import logging import os from urllib.parse import urljoin from bs4 import BeautifulSoup from utils import utils, inspector # http://transition.fcc.gov/oig/oigreportsaudit.html # Oldest report: 1994 # options: # standard since/year options for a year range to fetch from. # # Notes for IG's web team: AUDIT_REPORTS_URL = "http://transition.fcc.gov/oig/oigreportsaudit.html" SEMIANNUAL_REPORTS_URL = "http://transition.fcc.gov/oig/oigreportssemiannual.html" OTHER_REPORTS_URL = "http://transition.fcc.gov/oig/oigreportsletters.html" def run(options): year_range = inspector.year_range(options) for url in [AUDIT_REPORTS_URL, SEMIANNUAL_REPORTS_URL, OTHER_REPORTS_URL]: doc = beautifulsoup_from_url(url) results = doc.find_all("table", {"border": 2})[0].select("tr") for index, result in enumerate(results): if index < 2: # The first two rows are headers continue report = report_from(result, url, year_range) if report: inspector.save_report(report) def report_from(result, page_url, year_range): if not result.text.strip(): # Nothing in the entire row, just an empty row return report_url = urljoin(page_url, result.select("td a")[0].get('href')) report_filename = report_url.split("/")[-1] report_id, extension = os.path.splitext(report_filename) published_on_text = result.select("td")[0].text.split("\r\n")[0].strip() if len(result.select("td")) == 2: # Semiannual report published_on_text = published_on_text.split("to")[-1].split("through")[-1].strip() published_on = datetime.datetime.strptime(published_on_text, '%B %d, %Y') title = "Semi-Annual Report - {}".format(published_on_text) else: try: published_on = datetime.datetime.strptime(published_on_text, '%m/%d/%y') except ValueError: published_on = datetime.datetime.strptime(published_on_text, '%m/%d/%Y') title = result.select("td")[1].text.strip() if published_on.year not in year_range: logging.debug("[%s] Skipping, not in requested range." % report_url) return report = { 'inspector': 'fcc', 'inspector_url': 'http://fcc.gov/oig/', 'agency': 'fcc', 'agency_name': "Federal Communications Commission", 'report_id': report_id, 'url': report_url, 'title': title, 'published_on': datetime.datetime.strftime(published_on, "%Y-%m-%d"), } return report def beautifulsoup_from_url(url): body = utils.download(url) return BeautifulSoup(body) utils.run(run) if (__name__ == "__main__") else None
cc0-1.0
Python
c510b27dea59eeae229cf30dabc39ae083f286b0
Add better indexes
Ilhasoft/ureport,rapidpro/ureport,Ilhasoft/ureport,rapidpro/ureport,Ilhasoft/ureport,Ilhasoft/ureport,rapidpro/ureport,rapidpro/ureport
ureport/stats/migrations/0017_better_indexes.py
ureport/stats/migrations/0017_better_indexes.py
# Generated by Django 3.2.6 on 2021-09-27 17:49 from django.db import migrations INDEX_POLLSTATS_ORG_RESULT_SQL = """ CREATE INDEX IF NOT EXISTS stats_pollstats_org_result on stats_pollstats (org_id, flow_result_id) WHERE flow_result_id IS NOT NULL; """ INDEX_POLLSTATS_ORG_QST_RST_CAT_SQL = """ CREATE INDEX IF NOT EXISTS stats_pollstats_org_qstn_rslt_cat_age_gndr_schm_date_not_null on stats_pollstats (org_id, question_id, flow_result_id, category_id, flow_result_category_id, age_segment_id, gender_segment_id, scheme_segment_id, location_id, date) WHERE date IS NOT NULL; """ class Migration(migrations.Migration): dependencies = [ ("stats", "0016_pollstats_scheme_segment"), ] operations = [ migrations.RunSQL(INDEX_POLLSTATS_ORG_RESULT_SQL), migrations.RunSQL(INDEX_POLLSTATS_ORG_QST_RST_CAT_SQL), ]
agpl-3.0
Python
b304b1087d69d4142a9df5ad2db339e5aafe3331
Update category
munisisazade/developer_portal,munisisazade/developer_portal,munisisazade/developer_portal
news/views.py
news/views.py
from django.shortcuts import render, redirect,render_to_response from django.template import RequestContext from django.http import HttpResponse from django.urls import reverse from django.views.generic import TemplateView,DetailView # Create your views here from news.models import Slider,How_it_works,ArticleCategory,Contact_us,ArticleCategory,Article,RelationCategoryArticle,ArticleImages """ Just in case test views """ def index(request): return redirect(reverse('main-index')) class TemplateAllData(TemplateView): def get_context_data(self, **kwargs): context = super(TemplateAllData, self).get_context_data(**kwargs) context['categorys'] = ArticleCategory.objects.all() context['contact'] = Contact_us.objects.all() return context class TestView(TemplateAllData): template_name = 'index.html' def get_context_data(self, **kwargs): context = super(TestView, self).get_context_data(**kwargs) context['slider'] = Slider.objects.filter(status=True) context['how_it'] = How_it_works.objects.all().order_by('id') context['feed'] = Article.objects.filter(status=True,home_page_status=True) return context class AboutView(TemplateAllData): template_name = 'index-1.html' class GalleryView(TemplateAllData): template_name = 'index-2.html' def get_context_data(self, **kwargs): context = super(GalleryView, self).get_context_data(**kwargs) context['albom'] = ArticleImages.objects.all() return context class ContactsView(TemplateAllData): template_name = 'index-4.html' class PrivacyView(TemplateAllData): template_name = 'index-5.html' class CategoryDetailView(DetailView): model = ArticleCategory template_name = 'index-3.html' def get_context_data(self, **kwargs): context = super(CategoryDetailView, self).get_context_data(**kwargs) context['categorys'] = ArticleCategory.objects.all() context['contact'] = Contact_us.objects.all() context['cat_feed'] = RelationCategoryArticle.objects.filter(category_obj__slug=self.kwargs.get('slug')) return context
from django.shortcuts import render, redirect,render_to_response from django.template import RequestContext from django.http import HttpResponse from django.urls import reverse from django.views.generic import TemplateView,DetailView # Create your views here from news.models import Slider,How_it_works,ArticleCategory,Contact_us,ArticleCategory,Article,RelationCategoryArticle,ArticleImages """ Just in case test views """ def index(request): return redirect(reverse('main-index')) class TemplateAllData(TemplateView): def get_context_data(self, **kwargs): context = super(TemplateAllData, self).get_context_data(**kwargs) context['categorys'] = ArticleCategory.objects.all() context['contact'] = Contact_us.objects.all() return context class TestView(TemplateAllData): template_name = 'index.html' def get_context_data(self, **kwargs): context = super(TestView, self).get_context_data(**kwargs) context['slider'] = Slider.objects.filter(status=True) context['how_it'] = How_it_works.objects.all().order_by('id') context['feed'] = Article.objects.filter(status=True,home_page_status=True) return context class AboutView(TemplateAllData): template_name = 'index-1.html' class GalleryView(TemplateAllData): template_name = 'index-2.html' def get_context_data(self, **kwargs): context = super(GalleryView, self).get_context_data(**kwargs) context['albom'] = ArticleImages.objects.all() return context class ContactsView(TemplateAllData): template_name = 'index-4.html' class PrivacyView(TemplateAllData): template_name = 'index-5.html' class CategoryDetailView(DetailView): model = ArticleCategory template_name = 'index-3.html' def get_context_data(self, **kwargs): context = super(CategoryDetailView, self).get_context_data(**kwargs) context['categorys'] = ArticleCategory.objects.all() context['contact'] = Contact_us.objects.all() context['cat_feed'] = RelationCategoryArticle.objects.filter(category_obj__slug=kwargs.get('slug')) return context
mit
Python
fb86dcdd6046c7d35e932396ba541671727b4d01
rearrange imports to standards
conslo/ngSe
ngSe/utils.py
ngSe/utils.py
from functools import wraps from time import time, sleep from .exceptions import element_exceptions def retry(f=None, timeout=30, interval=0.1): """ When working with a responsive UI, sometimes elements are not ready at the very second you request it This wrapper will keep on retrying finding or interacting with the element until its ready """ # This allows us to use '@retry' or '@retry(timeout=thing, interval=other_thing)' for custom times if f is None: def rwrapper(f): return retry(f, timeout, interval) return rwrapper @wraps(f) def wrapper(*args, **kwargs): # The wrapped function gets the optional arguments retry_timeout and retry_interval added retry_timeout = kwargs.pop('retry_timeout', timeout) retry_interval = kwargs.pop('retry_interval', interval) prep = kwargs.pop('prep', None) end_time = time() + retry_timeout while True: try: if prep is not None: prep() return f(*args, **kwargs) except element_exceptions: if time() > end_time: # timeout, re-raise the original exception raise sleep(retry_interval) return wrapper
from time import time, sleep from functools import wraps from .exceptions import element_exceptions def retry(f=None, timeout=30, interval=0.1): """ When working with a responsive UI, sometimes elements are not ready at the very second you request it This wrapper will keep on retrying finding or interacting with the element until its ready """ # This allows us to use '@retry' or '@retry(timeout=thing, interval=other_thing)' for custom times if f is None: def rwrapper(f): return retry(f, timeout, interval) return rwrapper @wraps(f) def wrapper(*args, **kwargs): # The wrapped function gets the optional arguments retry_timeout and retry_interval added retry_timeout = kwargs.pop('retry_timeout', timeout) retry_interval = kwargs.pop('retry_interval', interval) prep = kwargs.pop('prep', None) end_time = time() + retry_timeout while True: try: if prep is not None: prep() return f(*args, **kwargs) except element_exceptions: if time() > end_time: # timeout, re-raise the original exception raise sleep(retry_interval) return wrapper
lgpl-2.1
Python
92aab88f88a4a9e3df82dd3f7a94b491a7cb3bd1
add interactive script
darshanime/sandpiles
interactive.py
interactive.py
from collections import defaultdict import matplotlib.pyplot as plt import numpy as np pile = defaultdict(dict) def draw_sandpile(): dim = raw_input("Enter dimensions of grid (eg, 4x4):\n") try: r, c = map(int, dim.strip().split('x')) pile["r"] = r pile["c"] = c except Exception: print("Enter in the form <int>x<int> (eg, 4x4)") raise pad_pile(pile) for row in range(1, r+1): for col in range(1, c+1): pile[(row, col)]["max"] = int(raw_input("Max for row %s, col %s:\n" % (row, col))) pile[(row, col)]["#"] = int(raw_input("Initial for row %s, col %s:\n" % (row, col))) count = 0 while pile_unstable(pile): count += 1 collapse_pile(pile) unpad_pile(pile) print("\nRan for %i iterations" % count) plot(pile) def plot(pile): numpy_array = convert_to_numpy_array(pile) plt.matshow(numpy_array, cmap=plt.get_cmap('gist_rainbow')) plt.colorbar(orientation='horizontal') plt.axis('off') # printing the sand count in the plot it = np.nditer(numpy_array, flags=['multi_index']) print(numpy_array) while not it.finished: plt.text(it.multi_index[1], it.multi_index[0], int(it[0]), va='center', ha='center') it.iternext() plt.show() def convert_to_numpy_array(pile): r = pile["r"] c = pile["c"] np_array = np.empty(shape=(r, c)) for row in range(r): for col in range(c): np_array[row][col] = pile[(row+1, col+1)]["#"] return np_array def pad_pile(pile): r = pile["r"] c = pile["c"] for row in range(r+2): for col in range(c+2): pile[(row, col)]["max"] = 0 pile[(row, col)]["#"] = 0 def unpad_pile(pile): r = pile["r"] c = pile["c"] for col in range(c+2): del pile[(0, col)] del pile[(r+1, col)] for row in range(1, r+1): del pile[(row, 0)] del pile[(row, c+1)] return pile def pile_unstable(pile): r = pile["r"] c = pile["c"] for row in range(1, r+1): for col in range(1, c+1): if pile[(row, col)]["#"] > pile[(row, col)]["max"]: return True return False def get_toppable_squares(pile): toppable_squares = [] r = pile["r"] c = pile["c"] for row in range(1, r+1): for col in range(1, c+1): if pile[(row, col)]["#"] > pile[(row, col)]["max"]: toppable_squares.append((row, col)) return toppable_squares def collapse_pile(pile): toppable_squares = get_toppable_squares(pile) for square in toppable_squares: topple(square, pile) def topple(square, pile): # toppling order is clockwise - LEFT, TOP, RIGHT, BOTTOM r, c = square[0], square[1] if pile[square]["#"] >= 1: pile[square]["#"] -= 1 pile[(r-1, c)]["#"] += 1 if pile[square]["#"] >= 1: pile[square]["#"] -= 1 pile[(r, c+1)]["#"] += 1 if pile[square]["#"] >= 1: pile[square]["#"] -= 1 pile[(r+1, c)]["#"] += 1 if pile[square]["#"] >= 1: pile[square]["#"] -= 1 pile[(r, c-1)]["#"] += 1 return pile if __name__ == '__main__': draw_sandpile()
mit
Python
d648aeb90158cb104ac6548887a39dc13dfa236f
add management cmd make_emails_lowercase
qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
corehq/apps/users/management/commands/make_emails_lowercase.py
corehq/apps/users/management/commands/make_emails_lowercase.py
from django.core.management import BaseCommand from corehq.apps.users.models import CouchUser class Command(BaseCommand): help = "Makes emails into lowercase" def handle(self, *args, **options): for couch_user in CouchUser.all(): if couch_user.email and any(char.isupper() for char in couch_user.email): print couch_user.email couch_user.email = couch_user.email.lower() couch_user.save()
bsd-3-clause
Python
9d2976200965c4ea6b324d0822f6be786a25f2ea
Add file containing filesystem utilities
jrsmith3/refmanage
refmanage/fs_utils.py
refmanage/fs_utils.py
# -*- coding: utf-8 -*-
mit
Python
5a77df8ebd1fd20ac6de34fe19853adbfeea6e31
Add arabic tests
he7d3r/revscoring,wiki-ai/revscoring
revscoring/languages/tests/test_arabic.py
revscoring/languages/tests/test_arabic.py
import pickle from nose.tools import eq_ from .. import arabic from ...datasources import revision from ...dependencies import solve from .util import compare_extraction BAD = [ "احا", "عاهرا", "زندقتهما", "حمار", "لعن", "يلعن", "لعنه", "امك", "لعنتهما", "فلعنهما", "اعزبوا", "عزبوا", "لدحي", "زبي", "كلب", "كافر", "والله", "الحمار", "الزنا", "النيك", "كلابي", "الكلب", "منو", "نجس", "والعياذ", "يتبرز", "الكافر", "تتزر", "منكاحا", "وينكح", "منافق", "الشيطان", ] INFORMAL = [ "كالامازوه", "فغانيون", "ومراف", "زوه", "رلا", "بلوجاتي", "كتمتمان", "سراريه", "اجك", "الجيدي", "مناخرهم", "الجيرل", "وخلاخيل", "اكشفي", "ومحاسنه", "يبزقن", "اجهن", "اطهن", "ستنفض", "خطبهن", "اخدون", "غمزني", "فطلقني", "فحكه", "خرق", "وهل", "اللي", "تحرموا", "الزن", "بالنعلين", "وغلامك", "عليلك", "فتحدثها", "اتمن", "الشنبا", "وروراو", "والفاج", "صوردون", "ورجلاي", "وضاحا", "مختار", "نسب", "شيخ", ] OTHER = [ """يقوم تاريخ علم الأحياء بدراسة الأحياء من الزمن القديم إلى المعاصر. مع أن مفهوم علم الأحياء كمجال واحد متماسك ظهر في القرن التاسع عشر، فإن علوم الأحياء ظهرت من تقاليد الطب والتاريخ الطبيعي المأخوذة من أيورفيدا، الطب المصري القديم وكتابات أرسطو وجالينوس في العصور اليونانية والرومانية القديمة. تم تطوير هذا العمل القديم خلال القرون الوسطى من قبل الأطباء والعلماء المسلمين مثل ابن سينا. خلال عصر النهضة الأوروبية وبداية العصر الحديث، تم تحديث الفكر في علم الأحياء في أوروبا بسبب الاهتمام المتجدد بالفلسفة التجريبية واكتشاف العديد من الكائنات الحية التي لم تكن معروفة """ ] def test_badwords(): compare_extraction(arabic.revision.badwords_list, BAD, OTHER) def test_informals(): compare_extraction(arabic.revision.informals_list, INFORMAL, OTHER) def test_revision(): cache = {revision.text: "يقوم تاريخ علم الأحياء بدراسة الأحياء."} eq_(solve(arabic.revision.words_list, cache=cache), ["يقوم", "تاريخ", "علم", "الأحياء", "بدراسة", "الأحياء"]) def test_pickling(): eq_(arabic, pickle.loads(pickle.dumps(arabic)))
mit
Python
6979bbf6547d689b1980762349a0e78c9c7c026d
Create fibonacci.py
DavidMellul/Projets,DavidMellul/Projets,DavidMellul/Projets,DavidMellul/Projets,DavidMellul/Projets,DavidMellul/Projets,DavidMellul/Projets
python/fibonacci/fibonacci.py
python/fibonacci/fibonacci.py
a = 0 b = 1 c = 0 n = int(input("Nombre de termes : ")) for i in range (1, n+1): c = a+b b = a a= c print(c)
mit
Python
fcf691454b8607fec9d7f5cba43579dc02c26c8b
Check coverage of pgi, vs gi
lazka/pgi,lazka/pgi
tests/pgi_covergage.py
tests/pgi_covergage.py
""" find pgi coverage of all gi.repositorys. you need to have access to both 'gi' and 'pgi' in the current python environment. In a virtualenv this works: $ pip install pgi $ pip install vext.gi $ python pgi_coverage.py """ TYPELIB_DIR="/usr/lib/girepository-1.0" from os.path import basename from glob import glob from textwrap import dedent def test_pgi_coverage(gi_module, pgi_module): name_width = len(max(dir(gi_module), key=len)) print('%s %s' % (gi_module.__name__.rjust(name_width), pgi_module.__name__)) for name in dir(gi_module): if name.startswith('_'): continue status = 'OK' try: getattr(pgi_module, name) except NotImplementedError as e: #status = "FAIL: '%s'" % str(e.__class__.__name__) status = "FAIL" for line in str(e).splitlines(): if line.startswith('NotImplementedError:'): status = status + " " + line print("%s\t%s" % (name.rjust(name_width), status)) print("") def test_coverage(typelib): code = dedent(""" from pgi.repository import {0} as PGI_{0} from gi.repository import {0} as GI_{0} test_pgi_coverage(GI_{0}, PGI_{0}) """.format(typelib)) try: print("PGI coverage of %s" % typelib) exec(code) except Exception as e: print("Skipped because of %s during test" % str(e)) def get_typelibs(): typelibs = [] for typelib in glob(TYPELIB_DIR + "/*.typelib"): fn = basename(typelib).partition("-")[0] typelibs.append(fn) return typelibs if __name__=='__main__': typelibs = get_typelibs() for typelib in typelibs: test_coverage(typelib)
lgpl-2.1
Python
7c1cbc49e6cdc6ef514382eee9679f4e9719257b
add basic-calculator-ii
zeyuanxy/leet-code,EdisonAlgorithms/LeetCode,EdisonAlgorithms/LeetCode,EdisonAlgorithms/LeetCode,zeyuanxy/leet-code,zeyuanxy/leet-code
vol5/basic-calculator-ii/basic-calculator-ii.py
vol5/basic-calculator-ii/basic-calculator-ii.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # @Author: Zeyuan Shang # @Date: 2015-11-18 17:22:37 # @Last Modified by: Zeyuan Shang # @Last Modified time: 2015-11-18 17:22:44 class Solution: operators = ['+', '-', '*', '/'] def getPriority(self, operator): return { '+' : 1, '-' : 1, '*' : 2, '/' : 2, }.get(operator, 0) def toRPN(self, s): tokens, stack = [], [] number = '' for c in s: if c.isdigit(): number += c else: if number: tokens.append(number) number = '' if c in self.operators: while len(stack) and self.getPriority(stack[-1]) >= self.getPriority(c): tokens.append(stack.pop()) stack.append(c) elif c == '(': stack.append(c) elif c == ')': while len(stack) and stack[-1] != '(': tokens.append(stack.pop()) stack.pop() if number: tokens.append(number) while len(stack): tokens.append(stack.pop()) return tokens def calcValue(self, x, y, operator): return { '+': lambda x, y: x + y, '-': lambda x, y: x - y, '*': lambda x, y: x * y, '/': lambda x, y: int(float(x) / y), }[operator](x, y) def evalRPN(self, tokens): operands = [] for token in tokens: if token in self.operators: y, x = operands.pop(), operands.pop() operands.append(self.calcValue(x, y, token)) else: operands.append(int(token)) return operands[0] def calculate(self, s): tokens = self.toRPN(s) return self.evalRPN(tokens)
mit
Python
943383a60f76a13290e540ac35c1ea3a8fc21a3e
Add a utility for downsampling a pair FASTQ files.
cfe-lab/MiCall,cfe-lab/MiCall,cfe-lab/MiCall
micall/utils/sample_fastq.py
micall/utils/sample_fastq.py
#!/usr/bin/env python import argparse import random def parse_args(): parser = argparse.ArgumentParser( description="Randomly sample reads from FASTQ files for quick processing.") parser.add_argument('fastq1', type=argparse.FileType('rU'), help='original FASTQ file of forward reads') parser.add_argument('fastq2', type=argparse.FileType('rU'), help='original FASTQ file of reverse reads') parser.add_argument('short_fastq1', type=argparse.FileType('w'), help='FASTQ file to write forward reads in') parser.add_argument('short_fastq2', type=argparse.FileType('w'), help='FASTQ file to write reverse reads in') parser.add_argument('--count', '-n', type=float, default=10000.0, help='approximate number of read pairs to write') return parser.parse_args() def get_reads(fastq_file): """ Yield reads as tuples of four lines: header, sequence, '+', quality. """ for read in zip(fastq_file, fastq_file, fastq_file, fastq_file): yield read def get_named_reads(fastq_file): """ Yield (name, read) pairs. """ for read in get_reads(fastq_file): header = read[0] name = header.split(' ')[0] yield (name, read) def process_read(name, read, out_file, odds, skipped_names, chosen_names): """ Write a read to the out_file if it is chosen. @param name: the name of the read that is used to match forward and reverse reads @param read: a tuple of four lines that makes up a read @param out_file: an open file to write the chosen reads to @param odds: a float between zero and one that sets the odds of choosing a read @param skipped_names: a set of names that have already been skipped, but their partners have not been seen yet @param chosen_names: a set of names that have already been chosen and written, but their partners have not been seen yet """ try: skipped_names.remove(name) # This name was skipped, and we've seen both reads. We're done. return except KeyError: pass try: chosen_names.remove(name) is_chosen = True except KeyError: # Haven't seen this name yet, decide whether to choose it. is_chosen = random.uniform(0, 1) < odds if is_chosen: chosen_names.add(name) else: skipped_names.add(name) if is_chosen: for line in read: out_file.write(line) def main(): args = parse_args() for line_count, _ in enumerate(args.fastq1, 1): pass args.fastq1.seek(0) read_count = line_count/4 odds = args.count/read_count rev_reads = get_named_reads(args.fastq2) skipped_names = set() chosen_names = set() for fwd_name, fwd_read in get_named_reads(args.fastq1): rev_name, rev_read = rev_reads.next() process_read(fwd_name, fwd_read, args.short_fastq1, odds, skipped_names, chosen_names) process_read(rev_name, rev_read, args.short_fastq2, odds, skipped_names, chosen_names) main()
agpl-3.0
Python
408be8a0d49b7542c74e016a572499a8c4d85351
Add tests to verify team index and add pages render without errors
rtfoley/scorepy,rtfoley/scorepy,rtfoley/scorepy
app/teams/tests.py
app/teams/tests.py
from app.test_base import BaseTestCase class TestTeamBehavior(BaseTestCase): def test_index_page_200(self): self.login() response = self.client.get('/teams/') self.assert200(response) def test_add_page_200(self): self.login() response = self.client.get('/teams/new') self.assert200(response)
mit
Python
5ce6283cff4a3a97911a663d777869a7c7377341
add http_codes
sassoftware/catalog-service,sassoftware/catalog-service,sassoftware/catalog-service
catalogService/http_codes.py
catalogService/http_codes.py
# # Copyright (c) 2008 rPath, Inc. # HTTP_CONTINUE = 100 HTTP_SWITCHING_PROTOCOLS = 101 HTTP_PROCESSING = 102 HTTP_OK = 200 HTTP_CREATED = 201 HTTP_ACCEPTED = 202 HTTP_NON_AUTHORITATIVE = 203 HTTP_NO_CONTENT = 204 HTTP_RESET_CONTENT = 205 HTTP_PARTIAL_CONTENT = 206 HTTP_MULTI_STATUS = 207 HTTP_MULTIPLE_CHOICES = 300 HTTP_MOVED_PERMANENTLY = 301 HTTP_MOVED_TEMPORARILY = 302 HTTP_SEE_OTHER = 303 HTTP_NOT_MODIFIED = 304 HTTP_USE_PROXY = 305 HTTP_TEMPORARY_REDIRECT = 307 HTTP_BAD_REQUEST = 400 HTTP_UNAUTHORIZED = 401 HTTP_PAYMENT_REQUIRED = 402 HTTP_FORBIDDEN = 403 HTTP_NOT_FOUND = 404 HTTP_METHOD_NOT_ALLOWED = 405 HTTP_NOT_ACCEPTABLE = 406 HTTP_PROXY_AUTHENTICATION_REQUIRED = 407 HTTP_REQUEST_TIME_OUT = 408 HTTP_CONFLICT = 409 HTTP_GONE = 410 HTTP_LENGTH_REQUIRED = 411 HTTP_PRECONDITION_FAILED = 412 HTTP_REQUEST_ENTITY_TOO_LARGE = 413 HTTP_REQUEST_URI_TOO_LARGE = 414 HTTP_UNSUPPORTED_MEDIA_TYPE = 415 HTTP_RANGE_NOT_SATISFIABLE = 416 HTTP_EXPECTATION_FAILED = 417 HTTP_UNPROCESSABLE_ENTITY = 422 HTTP_LOCKED = 423 HTTP_FAILED_DEPENDENCY = 424 HTTP_UPGRADE_REQUIRED = 426 HTTP_INTERNAL_SERVER_ERROR = 500 HTTP_NOT_IMPLEMENTED = 501 HTTP_BAD_GATEWAY = 502 HTTP_SERVICE_UNAVAILABLE = 503 HTTP_GATEWAY_TIME_OUT = 504 HTTP_VERSION_NOT_SUPPORTED = 505 HTTP_VARIANT_ALSO_VARIES = 506 HTTP_INSUFFICIENT_STORAGE = 507 HTTP_NOT_EXTENDED = 510
apache-2.0
Python
5c602a98098bdedeffc2b7359a4b3d8407cb1449
Add migration to ensure consistency on file keys.
rdhyee/osf.io,amyshi188/osf.io,sloria/osf.io,zachjanicki/osf.io,mluo613/osf.io,asanfilippo7/osf.io,brandonPurvis/osf.io,doublebits/osf.io,mfraezz/osf.io,laurenrevere/osf.io,pattisdr/osf.io,erinspace/osf.io,bdyetton/prettychart,revanthkolli/osf.io,MerlinZhang/osf.io,caseyrygt/osf.io,TomHeatwole/osf.io,cldershem/osf.io,HalcyonChimera/osf.io,amyshi188/osf.io,HarryRybacki/osf.io,icereval/osf.io,ticklemepierce/osf.io,RomanZWang/osf.io,emetsger/osf.io,revanthkolli/osf.io,fabianvf/osf.io,himanshuo/osf.io,kch8qx/osf.io,jolene-esposito/osf.io,cldershem/osf.io,HalcyonChimera/osf.io,kch8qx/osf.io,saradbowman/osf.io,njantrania/osf.io,lamdnhan/osf.io,MerlinZhang/osf.io,baylee-d/osf.io,cosenal/osf.io,DanielSBrown/osf.io,alexschiller/osf.io,HarryRybacki/osf.io,cslzchen/osf.io,brandonPurvis/osf.io,jnayak1/osf.io,jinluyuan/osf.io,sloria/osf.io,danielneis/osf.io,jnayak1/osf.io,himanshuo/osf.io,binoculars/osf.io,fabianvf/osf.io,samanehsan/osf.io,felliott/osf.io,billyhunt/osf.io,reinaH/osf.io,TomHeatwole/osf.io,haoyuchen1992/osf.io,aaxelb/osf.io,samanehsan/osf.io,Ghalko/osf.io,haoyuchen1992/osf.io,danielneis/osf.io,asanfilippo7/osf.io,mattclark/osf.io,jmcarp/osf.io,monikagrabowska/osf.io,adlius/osf.io,caseyrygt/osf.io,DanielSBrown/osf.io,lamdnhan/osf.io,Ghalko/osf.io,aaxelb/osf.io,jnayak1/osf.io,cosenal/osf.io,cwisecarver/osf.io,RomanZWang/osf.io,jeffreyliu3230/osf.io,GageGaskins/osf.io,sloria/osf.io,doublebits/osf.io,jinluyuan/osf.io,jmcarp/osf.io,brianjgeiger/osf.io,kwierman/osf.io,mfraezz/osf.io,GaryKriebel/osf.io,HalcyonChimera/osf.io,wearpants/osf.io,arpitar/osf.io,icereval/osf.io,hmoco/osf.io,acshi/osf.io,KAsante95/osf.io,jnayak1/osf.io,HarryRybacki/osf.io,leb2dg/osf.io,billyhunt/osf.io,GaryKriebel/osf.io,bdyetton/prettychart,alexschiller/osf.io,rdhyee/osf.io,hmoco/osf.io,RomanZWang/osf.io,TomHeatwole/osf.io,TomHeatwole/osf.io,mluo613/osf.io,arpitar/osf.io,acshi/osf.io,DanielSBrown/osf.io,GageGaskins/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,ticklemepierce/osf.io,hmoco/osf.io,KAsante95/osf.io,kch8qx/osf.io,jolene-esposito/osf.io,amyshi188/osf.io,reinaH/osf.io,bdyetton/prettychart,pattisdr/osf.io,abought/osf.io,chennan47/osf.io,ticklemepierce/osf.io,cldershem/osf.io,caneruguz/osf.io,MerlinZhang/osf.io,jeffreyliu3230/osf.io,chennan47/osf.io,jinluyuan/osf.io,ZobairAlijan/osf.io,petermalcolm/osf.io,hmoco/osf.io,sbt9uc/osf.io,dplorimer/osf,ZobairAlijan/osf.io,kwierman/osf.io,njantrania/osf.io,leb2dg/osf.io,zamattiac/osf.io,acshi/osf.io,fabianvf/osf.io,SSJohns/osf.io,ckc6cz/osf.io,crcresearch/osf.io,petermalcolm/osf.io,billyhunt/osf.io,amyshi188/osf.io,acshi/osf.io,HarryRybacki/osf.io,SSJohns/osf.io,kushG/osf.io,chennan47/osf.io,felliott/osf.io,AndrewSallans/osf.io,Johnetordoff/osf.io,GaryKriebel/osf.io,abought/osf.io,leb2dg/osf.io,doublebits/osf.io,chrisseto/osf.io,dplorimer/osf,dplorimer/osf,jolene-esposito/osf.io,cslzchen/osf.io,baylee-d/osf.io,mluke93/osf.io,njantrania/osf.io,alexschiller/osf.io,samchrisinger/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,petermalcolm/osf.io,cldershem/osf.io,AndrewSallans/osf.io,CenterForOpenScience/osf.io,cwisecarver/osf.io,alexschiller/osf.io,reinaH/osf.io,zamattiac/osf.io,GageGaskins/osf.io,jolene-esposito/osf.io,adlius/osf.io,cslzchen/osf.io,mattclark/osf.io,zkraime/osf.io,jeffreyliu3230/osf.io,cwisecarver/osf.io,reinaH/osf.io,Nesiehr/osf.io,icereval/osf.io,acshi/osf.io,GageGaskins/osf.io,kushG/osf.io,saradbowman/osf.io,lamdnhan/osf.io,bdyetton/prettychart,kwierman/osf.io,cwisecarver/osf.io,felliott/osf.io,chrisseto/osf.io,caneruguz/osf.io,samanehsan/osf.io,mfraezz/osf.io,kwierman/osf.io,Nesiehr/osf.io,chrisseto/osf.io,ckc6cz/osf.io,haoyuchen1992/osf.io,caneruguz/osf.io,TomBaxter/osf.io,SSJohns/osf.io,jmcarp/osf.io,alexschiller/osf.io,arpitar/osf.io,sbt9uc/osf.io,caseyrollins/osf.io,Nesiehr/osf.io,zachjanicki/osf.io,brandonPurvis/osf.io,cosenal/osf.io,rdhyee/osf.io,TomBaxter/osf.io,asanfilippo7/osf.io,ZobairAlijan/osf.io,lyndsysimon/osf.io,njantrania/osf.io,KAsante95/osf.io,asanfilippo7/osf.io,jeffreyliu3230/osf.io,CenterForOpenScience/osf.io,barbour-em/osf.io,kch8qx/osf.io,mluo613/osf.io,dplorimer/osf,haoyuchen1992/osf.io,barbour-em/osf.io,wearpants/osf.io,zachjanicki/osf.io,zkraime/osf.io,monikagrabowska/osf.io,zkraime/osf.io,brandonPurvis/osf.io,billyhunt/osf.io,revanthkolli/osf.io,lamdnhan/osf.io,binoculars/osf.io,Ghalko/osf.io,ckc6cz/osf.io,caneruguz/osf.io,crcresearch/osf.io,felliott/osf.io,lyndsysimon/osf.io,sbt9uc/osf.io,kch8qx/osf.io,abought/osf.io,SSJohns/osf.io,danielneis/osf.io,mluo613/osf.io,zamattiac/osf.io,emetsger/osf.io,mfraezz/osf.io,lyndsysimon/osf.io,himanshuo/osf.io,emetsger/osf.io,lyndsysimon/osf.io,caseyrygt/osf.io,KAsante95/osf.io,GageGaskins/osf.io,mluke93/osf.io,mattclark/osf.io,KAsante95/osf.io,caseyrygt/osf.io,doublebits/osf.io,mluke93/osf.io,caseyrollins/osf.io,petermalcolm/osf.io,Johnetordoff/osf.io,emetsger/osf.io,sbt9uc/osf.io,billyhunt/osf.io,RomanZWang/osf.io,monikagrabowska/osf.io,wearpants/osf.io,caseyrollins/osf.io,adlius/osf.io,chrisseto/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,arpitar/osf.io,monikagrabowska/osf.io,barbour-em/osf.io,samanehsan/osf.io,GaryKriebel/osf.io,cosenal/osf.io,aaxelb/osf.io,baylee-d/osf.io,cslzchen/osf.io,rdhyee/osf.io,crcresearch/osf.io,samchrisinger/osf.io,ZobairAlijan/osf.io,Ghalko/osf.io,adlius/osf.io,brianjgeiger/osf.io,samchrisinger/osf.io,jmcarp/osf.io,erinspace/osf.io,binoculars/osf.io,zachjanicki/osf.io,doublebits/osf.io,zkraime/osf.io,MerlinZhang/osf.io,laurenrevere/osf.io,brandonPurvis/osf.io,ticklemepierce/osf.io,kushG/osf.io,barbour-em/osf.io,mluo613/osf.io,CenterForOpenScience/osf.io,revanthkolli/osf.io,TomBaxter/osf.io,kushG/osf.io,laurenrevere/osf.io,DanielSBrown/osf.io,brianjgeiger/osf.io,himanshuo/osf.io,erinspace/osf.io,zamattiac/osf.io,jinluyuan/osf.io,samchrisinger/osf.io,wearpants/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,fabianvf/osf.io,RomanZWang/osf.io,ckc6cz/osf.io,abought/osf.io,danielneis/osf.io,mluke93/osf.io,Nesiehr/osf.io,monikagrabowska/osf.io
scripts/migrate_inconsistent_file_keys.py
scripts/migrate_inconsistent_file_keys.py
#!/usr/bin/env python # encoding: utf-8 """Find all nodes with different sets of keys for `files_current` and `files_versions`, and ensure that all keys present in the former are also present in the latter. """ from website.models import Node from website.app import init_app def find_file_mismatch_nodes(): """Find nodes with inconsistent `files_current` and `files_versions` field keys. """ return [ node for node in Node.find() if set(node.files_versions.keys()) != set(node.files_current.keys()) ] def migrate_node(node): """Ensure that all keys present in `files_current` are also present in `files_versions`. """ for key, file_id in node.files_current.iteritems(): if key not in node.files_versions: node.files_versions[key] = [file_id] else: if file_id not in node.files_versions[key]: node.files_versions[key].append(file_id) node.save() def main(dry_run=True): init_app() nodes = find_file_mismatch_nodes() print('Migrating {0} nodes'.format(len(nodes))) if dry_run: return for node in nodes: migrate_node(node) if __name__ == '__main__': import sys dry_run = 'dry' in sys.argv main(dry_run=dry_run) from nose.tools import * # noqa from tests.base import OsfTestCase from tests.factories import ProjectFactory from framework.auth import Auth class TestMigrateFiles(OsfTestCase): def clear(self): Node.remove() def setUp(self): super(TestMigrateFiles, self).setUp() self.clear() self.nodes = [] for idx in range(3): node = ProjectFactory() node.add_file( Auth(user=node.creator), 'name', 'contents', len('contents'), 'text/plain', ) self.nodes.append(node) self.nodes[-1].files_versions = {} self.nodes[-1].save() # Sanity check assert_in('name', self.nodes[-1].files_current) assert_not_in('name', self.nodes[-1].files_versions) def tearDown(self): super(TestMigrateFiles, self).tearDown() self.clear() def test_get_targets(self): targets = find_file_mismatch_nodes() assert_equal(len(targets), 1) assert_equal(targets[0], self.nodes[-1]) def test_migrate(self): main(dry_run=False) assert_equal(len(find_file_mismatch_nodes()), 0) assert_in('name', self.nodes[-1].files_versions) assert_equal( self.nodes[-1].files_current['name'], self.nodes[-1].files_versions['name'][0], )
apache-2.0
Python
e5e6506ab6b5191e309aa75e56e25253c0ba7763
Create drivers.py
ariegg/webiopi-drivers,ariegg/webiopi-drivers
chips/memory/file/drivers.py
chips/memory/file/drivers.py
# This code has to be added to the corresponding __init__.py DRIVERS["filememory"] = ["PICKLEFILE", "JSONFILE"]
apache-2.0
Python
386baa36355b0e9378fff59fe768d1baa7e73fec
Add Himax motion detection example.
openmv/openmv,iabdalkader/openmv,openmv/openmv,iabdalkader/openmv,iabdalkader/openmv,iabdalkader/openmv,kwagyeman/openmv,kwagyeman/openmv,kwagyeman/openmv,openmv/openmv,kwagyeman/openmv,openmv/openmv
scripts/examples/Arduino/Portenta-H7/21-Sensor-Control/himax_motion_detection.py
scripts/examples/Arduino/Portenta-H7/21-Sensor-Control/himax_motion_detection.py
# Himax motion detection example. import sensor, image, time, pyb from pyb import Pin, ExtInt sensor.reset() sensor.set_pixformat(sensor.GRAYSCALE) sensor.set_framesize(sensor.QVGA) sensor.set_framerate(15) sensor.ioctl(sensor.IOCTL_HIMAX_MD_THRESHOLD, 0x01) sensor.ioctl(sensor.IOCTL_HIMAX_MD_WINDOW, (0, 0, 320, 240)) sensor.ioctl(sensor.IOCTL_HIMAX_MD_CLEAR) sensor.ioctl(sensor.IOCTL_HIMAX_MD_ENABLE, True) motion_detected = False def on_motion(line): global motion_detected motion_detected = True led = pyb.LED(3) ext = ExtInt(Pin("PC15"), ExtInt.IRQ_RISING, Pin.PULL_DOWN, on_motion) clock = time.clock() while(True): clock.tick() img = sensor.snapshot() if (motion_detected): led.on() time.sleep_ms(500) # Clear motion detection flag sensor.ioctl(sensor.IOCTL_HIMAX_MD_CLEAR) motion_detected = False led.off() print(clock.fps())
mit
Python
b1ab4ef6fbac0ce02d05464e03599d44721fb239
Add an example using partial.
masasin/latexipy
examples/partial.py
examples/partial.py
#!/usr/bin/env python from functools import partial import matplotlib.pyplot as plt import numpy as np from latexipy import latexipy as lp if __name__ == '__main__': lp.latexify() figure = partial(lp.figure, folder='some_images', exts=['png']) x = np.linspace(-np.pi, np.pi) y1 = np.sin(x) y2 = np.cos(x) with figure('sin'): plt.plot(x, y1, label='sine') plt.title('Sine') plt.xlabel(r'$\theta$') plt.ylabel('Value') plt.legend() with figure('cos'): plt.plot(x, y2, label='cosine', c='C1') plt.title('Cosine') plt.xlabel(r'$\theta$') plt.ylabel('Value') plt.legend() with figure('both'): plt.plot(x, y1, label='sine') plt.plot(x, y2, label='cosine') plt.title('Sine and cosine') plt.xlabel(r'$\theta$') plt.ylabel('Value') plt.legend()
mit
Python
e781a1e89b945dad1585f82dfdb77cbffbe8fdeb
add unit tests
olof/svtplay-dl,dalgr/svtplay-dl,iwconfig/svtplay-dl,qnorsten/svtplay-dl,olof/svtplay-dl,selepo/svtplay-dl,spaam/svtplay-dl,dalgr/svtplay-dl,spaam/svtplay-dl,qnorsten/svtplay-dl,iwconfig/svtplay-dl,selepo/svtplay-dl
lib/svtplay_dl/tests/prio_streams.py
lib/svtplay_dl/tests/prio_streams.py
#!/usr/bin/python # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil; coding: utf-8 -*- # ex:ts=4:sw=4:sts=4:et:fenc=utf-8 from __future__ import absolute_import import unittest from svtplay_dl.utils import prio_streams class Stream(object): def __init__(self, proto, bitrate): self.proto = proto self.bitrate = bitrate def name(self): return self.proto def __repr__(self): return '%s(%d)' % (self.proto.upper(), self.bitrate) class PrioStreamsTest(unittest.TestCase): def _gen_proto_case(self, ordered, unordered, default=True, expected=None): streams = [Stream(x, 100) for x in unordered] kwargs = {} if not default: kwargs['protocol_prio'] = ordered if expected is None: expected = [str(Stream(x, 100)) for x in ordered] return self.assertEqual( [str(x) for x in prio_streams(streams, **kwargs)], expected ) def test_default_order(self): return self._gen_proto_case( ['hls', 'hds', 'http', 'rtmp'], ['rtmp', 'hds', 'hls', 'http'] ) def test_custom_order(self): return self._gen_proto_case( ['http', 'rtmp', 'hds', 'hls'], ['rtmp', 'hds', 'hls', 'http'], default=False, ) def test_custom_order_1(self): return self._gen_proto_case( ['http'], ['rtmp', 'hds', 'hls', 'http'], default=False, ) def test_proto_unavail(self): return self._gen_proto_case( ['http', 'rtmp'], ['hds', 'hls', 'https'], default=False, expected=[], )
mit
Python
4e4a8bbb459e6158a7c2d22c04849de9b4de2693
Add directory.py to the directory package
SizzlingVortex/classyfd
classyfd/directory/directory.py
classyfd/directory/directory.py
"""Contains a Directory class to represent real directories"""
mit
Python
0c079b7160cf635c14a016d418d2bc8d3d521f26
add docker start tool
hxsf/OnlineJudge,Timeship/OnlineJudge-1,Timeship/OnlineJudge-1,wangmingjob/OnlineJudge,Timeship/OnlineJudge-1,hxsf/OnlineJudge,uestcxl/OnlineJudge,Timeship/OnlineJudge-QDU,uestcxl/OnlineJudge,hxsf/OnlineJudge,Timeship/OnlineJudge-QDU,Timeship/OnlineJudge-1,hxsf/OnlineJudge,wangmingjob/OnlineJudge,wangmingjob/OnlineJudge,uestcxl/OnlineJudge,Timeship/OnlineJudge-QDU,wangmingjob/OnlineJudge,Timeship/OnlineJudge-QDU
tools/run.py
tools/run.py
# coding=utf-8 import os import json os.system("docker rm -f redis") os.system("docker rm -f mysql") os.system("docker rm -f oj_web_server") if os.system("docker run --name mysql -v /root/data:/var/lib/mysql -v /root/data/my.cnf:/etc/my.cnf -e MYSQL_ROOT_PASSWORD=root -d mysql/mysql-server:latest"): print "Error start mysql" exit() if os.system("docker run --name redis -d redis"): print "Error start redis" exit() if os.system("docker run --name oj_web_server -e oj_env=server -v /root/qduoj:/code -v /root/test_case:/code/test_case -v /root/log:/code/log -v /root/upload:/code/upload -v /root/qduoj/dockerfiles/oj_web_server/supervisord.conf:/etc/supervisord.conf -v /root/qduoj/dockerfiles/oj_web_server/gunicorn.conf:/etc/gunicorn.conf -v /root/qduoj/dockerfiles/oj_web_server/mq.conf:/etc/mq.conf -d -p 127.0.0.1:8080:8080 --link mysql --link=redis oj_web_server"): print "Erro start oj_web_server" exit() inspect_redis = json.loads(os.popen("docker inspect redis").read()) if not inspect_redis: print "Error when inspect redis ip" exit() redis_ip = inspect_redis[0]["NetworkSettings"]["IPAddress"] print "redis ip ", redis_ip inspect_mysql = json.loads(os.popen("docker inspect mysql").read()) if not inspect_mysql: print "Error when inspect mysql ip" exit() mysql_ip = inspect_mysql[0]["NetworkSettings"]["IPAddress"] print "mysql ip ", mysql_ip f = open("/etc/profile", "r") content = "" for line in f.readlines(): if line.startswith("export REDIS_PORT_6379_TCP_ADDR"): content += ("\nexport REDIS_PORT_6379_TCP_ADDR=" + redis_ip + "\n") elif line.startswith("export submission_db_host"): content += ("\nexport submission_db_host=" + mysql_ip + "\n") else: content += line f.close() f = open("/etc/profile", "w") f.write(content) f.close() print "Please run source /etc/profile"
mit
Python
4cf7f6c23bc9d01c6780afa4d27bf9e5e71fb72b
add hacky download
akrherz/pyWWA,akrherz/pyWWA
util/unidata_dl.py
util/unidata_dl.py
import glob import os import datetime sts = datetime.datetime(2016, 11, 21, 0, 0) ets = datetime.datetime(2016, 11, 21, 3, 0) interval = datetime.timedelta(minutes=1) os.chdir('data/nexrad/NIDS') for nexrad in glob.glob('???'): os.chdir(nexrad) for nids in ['N0Q', 'NET', 'N0R', 'EET']: if not os.path.isdir(nids): continue os.chdir(nids) now = sts while now < ets: fp = "%s_%s" % (nids, now.strftime("%Y%m%d_%H%M")) if not os.path.isfile(fp): url = now.strftime(("http://motherlode.ucar.edu/native/radar/" "level3/" + nids + "/" + nexrad + "/%Y%m%d/Level3_" + nexrad + "_" + nids + "_%Y%m%d_%H%M.nids")) cmd = "wget -q -O %s %s" % (fp, url) os.system(cmd) now += interval os.chdir('..') os.chdir('..')
mit
Python
3dbf91d4d447f6dbddece040b3a9dcbeb8ebcd22
Add missing migrations
sakset/getyourdata,sakset/getyourdata,sakset/getyourdata,sakset/getyourdata
getyourdata/data_request/migrations/0023_auto_20160716_0946.py
getyourdata/data_request/migrations/0023_auto_20160716_0946.py
# -*- coding: utf-8 -*- # Generated by Django 1.9.6 on 2016-07-16 09:46 from __future__ import unicode_literals from django.db import migrations, models import django_extensions.db.fields class Migration(migrations.Migration): dependencies = [ ('data_request', '0022_faqcontent_priority'), ] operations = [ migrations.CreateModel( name='RequestContent', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_on', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name=b'Created on')), ('updated_on', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name=b'Updated on')), ('title', models.CharField(default='Default', max_length=255, unique=True)), ('header', models.TextField(blank=True, default='Dear recipient,')), ('header_en', models.TextField(blank=True, default='Dear recipient,', null=True)), ('header_fi', models.TextField(blank=True, default='Dear recipient,', null=True)), ('content1', models.TextField(blank=True, default='content first')), ('content1_en', models.TextField(blank=True, default='content first', null=True)), ('content1_fi', models.TextField(blank=True, default='content first', null=True)), ('content2', models.TextField(blank=True, default='content second')), ('content2_en', models.TextField(blank=True, default='content second', null=True)), ('content2_fi', models.TextField(blank=True, default='content second', null=True)), ('footer', models.TextField(blank=True, default='Regards,')), ('footer_en', models.TextField(blank=True, default='Regards,', null=True)), ('footer_fi', models.TextField(blank=True, default='Regards,', null=True)), ], options={ 'abstract': False, }, ), migrations.DeleteModel( name='EmailContent', ), migrations.RemoveField( model_name='pdfcontents', name='content1_en', ), migrations.RemoveField( model_name='pdfcontents', name='content1_fi', ), migrations.RemoveField( model_name='pdfcontents', name='content2_en', ), migrations.RemoveField( model_name='pdfcontents', name='content2_fi', ), migrations.RemoveField( model_name='pdfcontents', name='footer_en', ), migrations.RemoveField( model_name='pdfcontents', name='footer_fi', ), migrations.RemoveField( model_name='pdfcontents', name='header_en', ), migrations.RemoveField( model_name='pdfcontents', name='header_fi', ), ]
mit
Python
ddb9e1c0160f40fe60330c247906b9b41f18be1b
Create hearthstone_way_to_legend.py
kawaiigamer/py_parsers
hearthstone_way_to_legend.py
hearthstone_way_to_legend.py
import random,statistics winrate = 0.51 iterations = 100 games = [0]*iterations passwinstreak = 5*5 # below Rank 5 for x in range(iterations): # 1-10 11-15 => 15 rank ladderPosition = 5*10 + 4*5 winstrek = 0 while True: games[x] = games[x] + 1 if random.random() <= winrate: winstreak = winstrek + 1 ladderPosition = ladderPosition - 1 if winstrek >= 2 and ladderPosition > passwinstreak: ladderPosition = ladderPosition - 1 else: winstreak = 0 ladderPosition = ladderPosition + 1 if ladderPosition is 0: break print("Total games (mean of " + str(iterations) + " iterations): "+ str(statistics.mean(games))) input()
unlicense
Python
a801deeaa00e443b3c68c1fbcea1e6ff62d90082
Add Python script to generate users
veekaybee/intro-to-sql,veekaybee/intro-to-sql,veekaybee/intro-to-sql
python/addusers.py
python/addusers.py
#!/usr/bin/python # -*- coding: utf-8 -*- """ Adds a sequential number of users into a test database with username: newusern and password newusern Not for production usage """ import MySQLdb hostname = # FILL IN username = # FILL IN password = # FILL IN # Simple routine to run a query on a database and print the results: def doQuery( conn, n_users ) : cur = conn.cursor() try: for i in range(0,n_users): cur.execute("""CREATE USER \'newuser%i\'@\'localhost\' IDENTIFIED BY \'password%i\'""" % (i,i) ) cur.execute( """GRANT ALL PRIVILEGES ON * . * TO \'newuser%i\'@\'localhost\'""" % i ) cur.execute( """FLUSH PRIVILEGES""" ) except MySQLdb.Error, e: try: print ("MySQL Error [%d]: %s" % (e.args[0], e.args[1])) except IndexError: print ("MySQL Error: %s" % str(e)) if __name__ == '__main__': print("Using mysql.connector…") myConnection = MySQLdb.connect( host=hostname, user=username, passwd=password, 20) doQuery( myConnection ) myConnection.close()
mit
Python
821f1b83c441122b28ad2dc869576ca22a4ee642
Create ngram_service.py
dragoon/kilogram,dragoon/kilogram,dragoon/kilogram
ngram_utils/ngram_service.py
ngram_utils/ngram_service.py
from thrift.transport import TSocket from thrift.protocol import TBinaryProtocol from thrift.transport import TTransport from libs.hbase import Hbase class NgramService(object): def __init__(self, mongo_host, hbase_host): mclient = settings.MONGO_CLIENT unigram_db = mclient['unigrams'] bigram_db = mclient['bigrams'] trigram_db = mclient['trigrams'] unigram_col_all = unigram_db['all'] bigram_col_preps = bigram_db['preps'] trigram_col_preps = trigram_db['preps'] # No Determinatives trigram_db_nodt = mclient['tetragrams'] bigram_db_nodt = mclient['bigrams_nodt'] trigram_preps_nodt1 = trigram_db_nodt['preps1'] trigram_preps_nodt2 = trigram_db_nodt['preps2'] bigram_col_preps_nodt = bigram_db_nodt['preps'] # HBASE h_unigrams = 'ngrams1' h_bigrams = 'ngrams2' h_trigrams_skips = 'ngrams3' transport = TTransport.TBufferedTransport(TSocket.TSocket(*settings.HBASE_HOST)) protocol = TBinaryProtocol.TBinaryProtocolAccelerated(transport) client = Hbase.Client(protocol) transport.open() rate = 0 start = time.time()
apache-2.0
Python
897b56183c3b30a0bc4f439e20d42ce8da2b444c
add empty unit test for viewhandler module
julien6387/supervisors,julien6387/supervisors,julien6387/supvisors,julien6387/supvisors,julien6387/supvisors,julien6387/supervisors,julien6387/supvisors,julien6387/supervisors
supvisors/tests/test_viewhandler.py
supvisors/tests/test_viewhandler.py
#!/usr/bin/python #-*- coding: utf-8 -*- # ====================================================================== # Copyright 2016 Julien LE CLEACH # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ====================================================================== import sys import unittest from supvisors.tests.base import DummySupvisors class ViewHandlerTest(unittest.TestCase): """ Test case for the viewhandler module. """ def test_TODO(self): """ Test the values set at construction. """ from supvisors.viewhandler import ViewHandler handler = ViewHandler() self.assertIsNotNone(handler) def test_suite(): return unittest.findTestCases(sys.modules[__name__]) if __name__ == '__main__': unittest.main(defaultTest='test_suite')
apache-2.0
Python
f06d8ccbd066c432a91ffc127d347277253f95c3
Add pynbs module
fizzy81/pynbs
pynbs.py
pynbs.py
from struct import Struct from collections import namedtuple __all__ = ['read', 'File', 'Header', 'Note', 'Layer', 'Instrument'] BYTE = Struct('<b') SHORT = Struct('<h') INT = Struct('<i') Note = namedtuple('Note', ['tick', 'layer', 'instrument', 'key']) Layer = namedtuple('Layer', ['id', 'name', 'volume']) Instrument = namedtuple('Instrument', ['id', 'name', 'file', 'pitch', 'key']) def read(filename): return File(open(filename, 'rb')) class Header(object): def __init__(self, headers): for key, value in headers.items(): setattr(self, key, value) class File(object): def __init__(self, buff): self.filename = buff.name self._buffer = buff self.header = Header(self.parse_header()) self.notes = list(self.parse_notes()) self.layers = list(self.parse_layers()) self.instruments = list(self.parse_instruments()) self._buffer.close() def read_numeric(self, fmt): return fmt.unpack(self._buffer.read(fmt.size))[0] def read_string(self): length = self.read_numeric(INT) return self._buffer.read(length).decode() def _jump(self): value = -1 while True: jump = self.read_numeric(SHORT) if not jump: break value += jump yield value def parse_header(self): return { 'song_length': self.read_numeric(SHORT), 'song_layers': self.read_numeric(SHORT), 'song_name': self.read_string(), 'song_author': self.read_string(), 'original_author': self.read_string(), 'description': self.read_string(), 'tempo': self.read_numeric(SHORT) / 100.0, 'auto_save': self.read_numeric(BYTE) == 1, 'auto_save_duration': self.read_numeric(BYTE), 'time_signature': '{}/4'.format(self.read_numeric(BYTE)), 'minutes_spent': self.read_numeric(INT), 'left_clicks': self.read_numeric(INT), 'right_clicks': self.read_numeric(INT), 'blocks_added': self.read_numeric(INT), 'blocks_removed': self.read_numeric(INT), 'song_origin': self.read_string(), } def parse_notes(self): for current_tick in self._jump(): for current_layer in self._jump(): yield Note(current_tick, current_layer, self.read_numeric(BYTE), self.read_numeric(BYTE)) def parse_layers(self): return (Layer(i, self.read_string(), self.read_numeric(BYTE)) for i in range(self.header.song_layers)) def parse_instruments(self): for i in range(self.read_numeric(BYTE)): yield Instrument(i, self.read_string(), self.read_string(), self.read_numeric(BYTE), self.read_numeric(BYTE))
mit
Python
6050610a5cf34bc55a05fa3a8d8a38f6e8e743af
Add test_ko.py for "ko" locale (#9)
sdispater/pendulum,sdispater/pendulum,sdispater/pendulum
tests/localization_tests/test_ko.py
tests/localization_tests/test_ko.py
# -*- coding: utf-8 -*- from pendulum import Pendulum from .. import AbstractTestCase from . import AbstractLocalizationTestCase class KoTest(AbstractLocalizationTestCase, AbstractTestCase): locale = 'ko' def diff_for_humans(self): with self.wrap_with_test_now(): d = Pendulum.now().sub_second() self.assertEqual('1 초 전', d.diff_for_humans()) d = Pendulum.now().sub_seconds(2) self.assertEqual('2 초 전', d.diff_for_humans()) d = Pendulum.now().sub_minute() self.assertEqual('1 분 전', d.diff_for_humans()) d = Pendulum.now().sub_minutes(2) self.assertEqual('2 분 전', d.diff_for_humans()) d = Pendulum.now().sub_hour() self.assertEqual('1 시간 전', d.diff_for_humans()) d = Pendulum.now().sub_hours(2) self.assertEqual('2 시간 전', d.diff_for_humans()) d = Pendulum.now().sub_day() self.assertEqual('1 일 전', d.diff_for_humans()) d = Pendulum.now().sub_days(2) self.assertEqual('2 일 전', d.diff_for_humans()) d = Pendulum.now().sub_week() self.assertEqual('1 주일 전', d.diff_for_humans()) d = Pendulum.now().sub_weeks(2) self.assertEqual('2 주일 전', d.diff_for_humans()) d = Pendulum.now().sub_month() self.assertEqual('1 개월 전', d.diff_for_humans()) d = Pendulum.now().sub_months(2) self.assertEqual('2 개월 전', d.diff_for_humans()) d = Pendulum.now().sub_year() self.assertEqual('1 년 전', d.diff_for_humans()) d = Pendulum.now().sub_years(2) self.assertEqual('2 년 전', d.diff_for_humans()) d = Pendulum.now().add_second() self.assertEqual('1 초 후', d.diff_for_humans()) d = Pendulum.now().add_second() d2 = Pendulum.now() self.assertEqual('1 초 뒤', d.diff_for_humans(d2)) self.assertEqual('1 초 앞', d2.diff_for_humans(d)) self.assertEqual('1 초', d.diff_for_humans(d2, True)) self.assertEqual('2 초', d2.diff_for_humans(d.add_second(), True))
mit
Python
013c6c57959fd8317ba8b27a2a467a37f0a1d8be
Create __init__.py
eshiofune/shopapp,eshiofune/shopapp,eshiofune/shopapp
stock/__init__.py
stock/__init__.py
mit
Python
1eb1851e4dec9c6425c3cf127e6c4ec5b0d3c987
Add LineNumberTable tests
TkTech/Jawa,TkTech/Jawa
tests/test_line_number_attribute.py
tests/test_line_number_attribute.py
# -*- coding: utf-8 -*- import os.path import pytest from jawa import ClassFile @pytest.fixture def cf(): sample_path = os.path.join( os.path.dirname(__file__), 'data', 'HelloWorldDebug.class' ) with open(sample_path, 'rb') as fin: cf = ClassFile(fin) yield cf def test_exceptions_read(cf): m = cf.methods.find_one(name='main') a = m.code.attributes.find_one(name='LineNumberTable') assert len(a.line_no) == 2 assert a.line_no[0] == (0, 3) assert a.line_no[1] == (8, 4) def test_exceptions_write(cf): m = cf.methods.find_one(name='main') a = m.code.attributes.find_one(name='LineNumberTable') assert a.info == b'\x00\x02\x00\x00\x00\x03\x00\x08\x00\x04'
mit
Python
beb98425423e0278d9d4d5e39e6b5196146425a0
add manual tests
antirais/estnin
manual_tests.py
manual_tests.py
import os import sys import copy from estnin import estnin from estnin import _estnin from datetime import date from timeit import default_timer as timer def target(count): # return [p for p in estnin.create(estnin.FEMALE, date(2199, 12, 1), 0)] for _ in range(count): #estnin(89912319991, set_checksum=False) estnin(estnin.MIN, set_checksum=False) return count def print_person(person): print('='*30) print('to str: %s' % person) print('is male: %s' % person.is_male) print('is female: %s' % person.is_female) print('date: %s' % person.date) print('year: %s' % person.year) print('month: %s' % person.month) print('day: %s' % person.day) print('sequence: %s' % person.sequence) print('checksum: %s' % person.checksum) def performance(): """ [*] creating list of 91999 elements took: 3.30743s, 27815.870 elems/s baseline [*] creating list of 91999 elements took: 3.01910s, 30472.310 elems/s __int__ optimization [*] creating list of 91999 elements took: 2.83526s, 32448.128 elems/s __str__ optimization [*] creating list of 91999 elements took: 2.77732s, 33125.086 elems/s create does not cast to str """ times = [] rounds = 20 for c in range(rounds): print("\r[*] round: {}/{}".format(c+1, rounds), end='') start = timer() persons = target(10000) end = timer() times.append(end - start) print() total = sum(times)/len(times) print("[*] times (ms):", ' '.join(map(lambda time: '{:.2f}'.format(time*100), times))) print("[*] creating list of {} elements took: average {:.3f}ms, {:.3f} elems/s ".format(persons, total*100, persons/total)) def test(): e = estnin(estnin.MIN) print_person(e) o = copy.copy(e) o.month += 1 print_person(o) print((-e)) print_person(e) if __name__ == '__main__': try: person = estnin.create(estnin.MALE, date(1800, 1, 1), 0) print_person(person) performance() test() person = estnin.create(estnin.MALE, date(1800, 1, 1), 0) print(_estnin(3, date(1989, 8 ,28), 27, 1)) except KeyboardInterrupt: sys.exit()
mit
Python
cf0021c664612082c669fc562d98759fcd7a4915
Add setup.py
Chris-Graffagnino/slackelot
setup.py
setup.py
# Always prefer setuptools over distutils from setuptools import setup, find_packages # To use a consistent encoding from codecs import open from os import path here = path.abspath(path.dirname(__file__)) # Get the long description from the README file with open(path.join(here, 'README.md'), encoding='utf-8') as f: long_description = f.read() setup( name='slackelot', # Versions should comply with PEP440. For a discussion on single-sourcing # the version across setup.py and the project code, see # https://packaging.python.org/en/latest/single_source_version.html version='0.0.1', description='A simple wrapper around the Slack web api to post messages', long_description=long_description, # The project's main homepage. url='https://github.com/Chris-Graffagnino/slackelot', # Author details author='Chris Graffagnino', author_email='graffwebdev@gmail.com', # Choose your license license='MIT', # See https://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ # How mature is this project? Common values are # 3 - Alpha # 4 - Beta # 5 - Production/Stable 'Development Status :: 3 - Alpha', # Indicate who your project is intended for 'Intended Audience :: Developers', 'Topic :: Software Development :: Build Tools', # Pick your license as you wish (should match "license" above) 'License :: OSI Approved :: MIT License', # Specify the Python versions you support here. In particular, ensure # that you indicate whether you support Python 2, Python 3 or both. 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ], # What does your project relate to? keywords='slack', # You can just specify the packages manually here if your project is # simple. Or you can use find_packages(). # packages=find_packages(exclude=['contrib', 'docs', 'tests']), # Alternatively, if you want to distribute just a my_module.py, uncomment # this: py_modules=["slackelot"], # List run-time dependencies here. These will be installed by pip when # your project is installed. For an analysis of "install_requires" vs pip's # requirements files see: # https://packaging.python.org/en/latest/requirements.html install_requires=['requests'], # List additional groups of dependencies here (e.g. development # dependencies). You can install these using the following syntax, # for example: # $ pip install -e .[dev,test] # extras_require={ # 'dev': ['check-manifest'], # 'test': ['coverage'], # }, # If there are data files included in your packages that need to be # installed, specify them here. If using Python 2.6 or less, then these # have to be included in MANIFEST.in as well. # package_data={ # 'sample': ['package_data.dat'], # }, # Although 'package_data' is the preferred approach, in some case you may # need to place data files outside of your packages. See: # http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa # In this case, 'data_file' will be installed into '<sys.prefix>/my_data' data_files=[('my_data', ['data/data_file'])], # To provide executable scripts, use entry points in preference to the # "scripts" keyword. Entry points provide cross-platform support and allow # pip to create the appropriate form of executable for the target platform. # entry_points={ # 'console_scripts': [ # 'sample=sample:main', # ], # }, )
mit
Python
e785008aa948e929f7e3ecab3445c1347cb128f3
Add setup.py file
jbittel/django-signage,jbittel/django-signage,jbittel/django-signage
setup.py
setup.py
#!/usr/bin/env python from setuptools import find_packages from setuptools import setup setup( name='django-signage', version='0.0.1', description='A lightweight web-based digital signage application', license='BSD', author='Jason Bittel', author_email='jason.bittel@gmail.com', url='https://github.com/jbittel/django-signage', download_url='https://github.com/jbittel/django-signage', packages=find_packages(), include_package_data=True, classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Programming Language :: Python', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Topic :: Software Development :: Libraries :: Python Modules', ], )
bsd-3-clause
Python
7a21009efda275372be7b801e07635bd2a9e47af
add setup.py
byteweaver/django-coupons,byteweaver/django-coupons
setup.py
setup.py
import os from setuptools import setup, find_packages import coupons def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name='django-coupons', version=coupons.__version__, description='A reuseable Django application for coupon gereration and handling.', long_description=read('README.md'), license=read('LICENSE'), author='byteweaver', author_email='contact@byteweaver.net', url='https://github.com/byteweaver/django-coupons', packages=find_packages(), install_requires=[ 'django', ], tests_require=[ 'django-nose', 'coverage', 'django-coverage', ], test_suite='coupons.tests', )
bsd-3-clause
Python
d0430066830350b3ef1621bb7c9d7ae7ae7045f4
Add setup.py.
gaomy3832/easypyplot,gaomy3832/easypyplot
setup.py
setup.py
""" * Copyright (c) 2016. Mingyu Gao * All rights reserved. * """ import os import re # To use a consistent encoding from codecs import open # Always prefer setuptools over distutils import setuptools here = os.path.abspath(os.path.dirname(__file__)) package = 'easypyplot' version = '0.0.0' desc = 'Python matplotlib utilities and wrappers' # Get version number with open(os.path.join(here, package, '__init__.py'), encoding='utf-8') as fh: matches = re.findall(r'^\s*__version__\s*=\s*[\'"]([^\'"]+)[\'"]', fh.read(), re.M) if matches: version = matches[-1] setuptools.setup( name=package, version=version, description=desc, author='Mingyu Gao', author_email='mgao12@stanford.edu', #long_description='', #url='', #license='', packages=[package], #install_requires=[], )
bsd-3-clause
Python
3f1b78f5156a6ee18020340290dde24d02d01105
Add basic setup.py
chrishylen-wf/ac-flask-hipchat
setup.py
setup.py
""" Flask-AtlassianConnect ------------- This is the description for that library """ from setuptools import setup setup( name='AC-Flask-HipChat', version='0.1-dev', url='https://bitbucket.org/mrdon/ac-flask-hipchat', license='APLv2', author='Don Brown', author_email='mrdon@twdata.org', description='Atlassian Connect library based on Flask for HipChat', long_description=__doc__, packages=['ac_flask', 'ac_flask.hipchat'], zip_safe=False, include_package_data=True, platforms='any', install_requires=[ 'Flask', 'pymongo', 'redis', 'requests', 'PyJWT' ], classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
apache-2.0
Python
ec25f1901d60814a62790cae2becfb6cac0f5e3e
add argparse dep
lavagetto/configdb,lavagetto/configdb
setup.py
setup.py
#!/usr/bin/python from setuptools import setup, find_packages setup( name='configdb', version='0.1', description='database framework for configuration info', author='ale', author_email='ale@incal.net', url='http://git.autistici.org/p/configdb', install_requires=['argparse', 'Flask', 'formencode', 'inflect', 'SQLAlchemy>0.7'], setup_requires=[], zip_safe=True, packages=find_packages(), entry_points={ 'console_scripts': [ 'configdb-api-server = configdb.server.wsgiapp:main', 'configdb-client = configdb.client.cli:main', ], }, )
#!/usr/bin/python from setuptools import setup, find_packages setup( name='configdb', version='0.1', description='database framework for configuration info', author='ale', author_email='ale@incal.net', url='http://git.autistici.org/p/configdb', install_requires=['Flask', 'formencode', 'inflect', 'SQLAlchemy>0.7'], setup_requires=[], zip_safe=True, packages=find_packages(), entry_points={ 'console_scripts': [ 'configdb-api-server = configdb.server.wsgiapp:main', 'configdb-client = configdb.client.cli:main', ], }, )
mit
Python
089b020b07fda88ba4679d161badb4423a75444e
add Python setup script
ivilata/pymultihash
setup.py
setup.py
# Based on PyPA sample project's setup script. """Pymultihash installation script.""" import os.path from setuptools import setup # Load readme file into long description. thisdir = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(thisdir, 'README.rst')) as readme: long_description = readme.read() setup( name='pymultihash', version='0.5.0a1', description="Python implementation of the multihash specification", long_description=long_description, url='https://github.com/ivilata/pymultihash', author="Ivan Vilata-i-Balaguer", author_email='ivan@selidor.net', license='MIT', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3 :: Only', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Security :: Cryptography', ], keywords="multihash hash digest format ASCII encoding", packages=['multihash'], install_requires=[], extras_require={ 'sha3': ['sha3'], 'blake2': ['pyblake2'], }, )
mit
Python
67b5eb144dbe14c134657ccc807343f361c5e249
add setup.py
mydeco-dev-team/sworkflow
setup.py
setup.py
from distutils.core import setup version = '0.1.0.dev0' setup(name='sworkflow', version=version, description='Simple Workflow', url='https://github.com/mydeco-dev-team/sworkflow', packages=['sworkflow'], )
bsd-2-clause
Python
4161de9755b531825e83f684c964441bff9ffa7d
bump version to 1.0.0
jhamman/pynco,nco/pynco
setup.py
setup.py
""" setup.py """ from setuptools import setup version = "1.0.0" setup( name="nco", version=version, author="Joe Hamman", author_email="jhamman@ucar.edu", license="MIT", description="""python bindings to NCO""", packages=["nco"], py_modules=["nco.nco", "nco.custom"], url="https://github.com/nco/pynco", download_url="https://raw2.github.com/nco/pynco/tarball/{0}".format(version), keywords=["netcdf", "climate"], classifiers=[ "Development Status :: 4 - Beta", "Topic :: Utilities", "Operating System :: POSIX", "Programming Language :: Python", ], python_requires='>=3.6', tests_require=["dateutil", "h5py", "netcdf4", "numpy", "pytest", "scipy"], )
""" setup.py """ from setuptools import setup version = "0.0.4" setup( name="nco", version=version, author="Joe Hamman", author_email="jhamman@ucar.edu", license="MIT", description="""python bindings to NCO""", packages=["nco"], py_modules=["nco.nco", "nco.custom"], url="https://github.com/nco/pynco", download_url="https://raw2.github.com/nco/pynco/tarball/{0}".format(version), keywords=["netcdf", "climate"], classifiers=[ "Development Status :: 4 - Beta", "Topic :: Utilities", "Operating System :: POSIX", "Programming Language :: Python", ], python_requires='>=3.6', tests_require=["dateutil", "h5py", "netcdf4", "numpy", "pytest", "scipy"], )
mit
Python
d0b1762a098e78ee9d012628ad96d6a18e8d2565
Create setup.py
geekgao/adsl-proxy-server
setup.py
setup.py
from distutils.core import setup import py2exe setup(console=["./server.py"], data_files=[('.', ['./config.ini'])])
mit
Python
7b6610e03d4485575b18881c375f83e999d20459
Add setup.py #1
uberVU/mongo-pool,uberVU/mongo-pool
setup.py
setup.py
from setuptools import setup import io import os here = os.path.abspath(os.path.dirname(__file__)) def read(*filenames, **kwargs): encoding = kwargs.get('encoding', 'utf-8') sep = kwargs.get('sep', '\n') buf = [] for filename in filenames: with io.open(filename, encoding=encoding) as f: buf.append(f.read()) return sep.join(buf) long_description = read('README.md') setup( name='mongopool', version='0.1', url='http://github.com/ubervu/mongopool/', description='Tool that manages your mongo clients to different clusters and maps databases to clients', long_description=long_description, license='Apache Software License', author='UberVU', install_requires=['pymongo>=2.4'], # author_email='jeff@jeffknupp.com', packages=['mongopool'], include_package_data=True, platforms='any', test_suite='nose.collector', tests_require=['nose', 'mock'], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Database', 'Topic :: Software Development :: Libraries :: Python Modules', ], extras_require={ 'testing': ['nose'], } )
apache-2.0
Python
3a6dd52e3cdfc5eca51d6dac4eb0701a1a04d550
make version 0.3.5
Duke-GCB/DukeDSClient,Duke-GCB/DukeDSClient
setup.py
setup.py
from setuptools import setup setup(name='DukeDSClient', version='0.3.5', description='Command line tool(ddsclient) to upload/manage projects on the duke-data-service.', url='https://github.com/Duke-GCB/DukeDSClient', keywords='duke dds dukedataservice', author='John Bradley', license='MIT', packages=['ddsc','ddsc.core'], install_requires=[ 'requests', 'PyYAML', ], test_suite='nose.collector', tests_require=['nose', 'mock'], entry_points={ 'console_scripts': [ 'ddsclient = ddsc.__main__:main' ] }, classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Utilities', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], )
from setuptools import setup setup(name='DukeDSClient', version='0.3.4', description='Command line tool(ddsclient) to upload/manage projects on the duke-data-service.', url='https://github.com/Duke-GCB/DukeDSClient', keywords='duke dds dukedataservice', author='John Bradley', license='MIT', packages=['ddsc','ddsc.core'], install_requires=[ 'requests', 'PyYAML', ], test_suite='nose.collector', tests_require=['nose', 'mock'], entry_points={ 'console_scripts': [ 'ddsclient = ddsc.__main__:main' ] }, classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Utilities', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], )
mit
Python
b25e21745ecdc5c03b3229ba77ee51b5fdd1561d
Move scapy to scapyproto to avoid breaking import of scapy from inside of protocols
0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,hackerberry/ooni-probe,kdmurray91/ooni-probe,Karthikeyan-kkk/ooni-probe,juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,0xPoly/ooni-probe,juga0/ooni-probe,kdmurray91/ooni-probe,lordappsec/ooni-probe,0xPoly/ooni-probe,lordappsec/ooni-probe,lordappsec/ooni-probe,Karthikeyan-kkk/ooni-probe,hackerberry/ooni-probe,lordappsec/ooni-probe,juga0/ooni-probe,juga0/ooni-probe,kdmurray91/ooni-probe,kdmurray91/ooni-probe,0xPoly/ooni-probe
ooni/protocols/scapyproto.py
ooni/protocols/scapyproto.py
import random from zope.interface import implements from twisted.python import usage from twisted.plugin import IPlugin from twisted.internet import protocol, defer from ooni.plugoo.tests import ITest, OONITest from ooni.plugoo.assets import Asset from ooni.utils import log from ooni.lib.txscapy import txsr, txsend class ScapyTest(OONITest): """ A utility class for writing scapy driven OONI tests. """ receive = True timeout = None pcapfile = 'scapytest.pcap' def initialize(self, reactor=None): if not self.reactor: from twisted.internet import reactor self.reactor = reactor self.request = {} self.response = {} def experiment(self, args): log.msg("Running experiment") if self.receive: log.msg("Sending and receiving packets.") d = txsr(self.build_packets(), pcapfile=self.pcapfile, timeout=self.timeout) else: log.msg("Sending packets.") d = txsend(self.build_packets()) def finished(data): log.msg("Finished sending") return data d.addCallback(finished) return d def build_packets(self): """ Override this method to build scapy packets. """ from scapy.all import IP, TCP return IP()/TCP() def load_assets(self): return {}
bsd-2-clause
Python
3cab374806f9fd8a5fd90265025a4b021e0056f6
add preliminary mergetools tests
PFCM/facebook_data_collection
test_mergetools.py
test_mergetools.py
""" Tests for the mergetools.py script. """ import tempfile import random from contextlib import contextmanager import string import datetime import csv import pandas as pd import mergetools from scraper import CSV_HEADERS def random_str(length, chars=string.ascii_lowercase): return ''.join(random.choice(chars) for _ in range(length)) def random_fb_dataframe(size=100, until=datetime.datetime.now()): """Returns a random dataframe that looks a bit like the data we tend to get from facebook (and has the same columns) but with utterly random content. May not quite make sense as regards comments etc. Will have to ensure that it does before using for testing context.""" # choose a random page name pagename = random_str(10) data = { "page":[pagename for _ in range(size)], "posted by":[random_str(10) for _ in range(size)], "message":[random_str(100, chars=string.ascii_lowercase + ' ') for _ in range(size)], "link":[random_str(25) for _ in range(size)], "shares":[random.randint(0,15) for _ in range(size)], "likes":[random.randint(0,1000) for _ in range(size)], "number of comments":[random.randint(0,50) for _ in range(size)], "pic":['' for _ in range(size)], "url":[random_str(50) for _ in range(size)], "type":[random.choice(['post','comment','comment']) for _ in range(size)] } start_time = until - (datetime.timedelta(1) * size) frame = pd.DataFrame( data=data, # idces should be a date range index=pd.DatetimeIndex(start=start_time, periods=size, freq='D') #columns=CSV_HEADERS, ) return frame def setup_disjoint(): """Make some fake, totally disjoint data. Returns 2 pandas DataFrames with the same columns and different data""" start_a = datetime.datetime.now() start_b = datetime.datetime.now() - datetime.timedelta(50) return (random_fb_dataframe(size=30, until=start_a), random_fb_dataframe(size=30, until=start_b)) @contextmanager def write_dataframes(frames, encoding='utf-16'): """Writes a sequence of dataframes to temporary files and returns the filenames. Should be used as a context manager, will clean up after itself""" files = [] for frame in frames: files.append(tempfile.NamedTemporaryFile(mode='w', delete=False)) frame.to_csv(files[-1], encoding=encoding, index_label='dates', quoting=csv.QUOTE_ALL, sep='\t') # actually write it files[-1].close() # yield the names yield [f.name for f in files] # close the files for f in files: f.delete() # no doubt someday it will make sense to have this very cleverly organised # but right now there is only one functionality to test class Symdiff_Test(object): """Tests for the symmetric difference with context op""" def disjoint_test(self): """Tests that the symmetric difference of two disjoint frames is just their union.""" print('symdiff - testing disjoint') a,b = setup_disjoint() op = mergetools.SymmetricDifference(a,b, write_out=False, do_context=False) result_1 = pd.concat([a,b]) result_2 = op() assert result_1.equals(result_2) def loadfile_test(self): """Make sure it can load data from file and perform an op without errors """ print('symdiff - testing files') with write_dataframes(setup_disjoint()) as data: op = mergetools.SymmetricDifference.from_args(data) op()
bsd-2-clause
Python
992191d290df8d7764a272c3b45e2f7b937456ec
add fib
Akagi201/learning-python,Akagi201/learning-python,Akagi201/learning-python,Akagi201/learning-python,Akagi201/learning-python
misc/py3/fib.py
misc/py3/fib.py
#!/usr/bin/env python # Python 3: Fibonacci series up to n def fib(n): a, b = 0, 1 while a < n: print(a, end=' ') a, b = b, a + b print() fib(1000)
mit
Python
abf7b0ffd86656f8311da7bfde65663d35ffd543
fix for using stencilview
hansent/kivy,janssen/kivy,tony/kivy,Farkal/kivy,janssen/kivy,xiaoyanit/kivy,wangjun/kivy,youprofit/kivy,Shyam10/kivy,arcticshores/kivy,bob-the-hamster/kivy,adamkh/kivy,bob-the-hamster/kivy,jehutting/kivy,CuriousLearner/kivy,eHealthAfrica/kivy,matham/kivy,bionoid/kivy,MiyamotoAkira/kivy,wangjun/kivy,darkopevec/kivy,darkopevec/kivy,ehealthafrica-ci/kivy,autosportlabs/kivy,gonzafirewall/kivy,Cheaterman/kivy,jffernandez/kivy,edubrunaldi/kivy,inclement/kivy,andnovar/kivy,bob-the-hamster/kivy,akshayaurora/kivy,jkankiewicz/kivy,JohnHowland/kivy,xiaoyanit/kivy,yoelk/kivy,rnixx/kivy,niavlys/kivy,kived/kivy,arcticshores/kivy,ehealthafrica-ci/kivy,jffernandez/kivy,wangjun/kivy,JohnHowland/kivy,wangjun/kivy,youprofit/kivy,KeyWeeUsr/kivy,matham/kivy,zennobjects/kivy,el-ethan/kivy,JohnHowland/kivy,xpndlabs/kivy,jegger/kivy,xiaoyanit/kivy,Ramalus/kivy,xpndlabs/kivy,vitorio/kivy,Shyam10/kivy,iamutkarshtiwari/kivy,jehutting/kivy,bhargav2408/kivy,iamutkarshtiwari/kivy,Cheaterman/kivy,yoelk/kivy,kived/kivy,manashmndl/kivy,LogicalDash/kivy,bhargav2408/kivy,Cheaterman/kivy,aron-bordin/kivy,edubrunaldi/kivy,manthansharma/kivy,thezawad/kivy,jffernandez/kivy,bionoid/kivy,Ramalus/kivy,habibmasuro/kivy,ernstp/kivy,arcticshores/kivy,MiyamotoAkira/kivy,gonzafirewall/kivy,Shyam10/kivy,jffernandez/kivy,cbenhagen/kivy,ehealthafrica-ci/kivy,JohnHowland/kivy,hansent/kivy,angryrancor/kivy,autosportlabs/kivy,adamkh/kivy,aron-bordin/kivy,janssen/kivy,Farkal/kivy,vipulroxx/kivy,kivatu/kivy-bak,Davideddu/kivy-forkedtouch,MiyamotoAkira/kivy,Ramalus/kivy,rnixx/kivy,jegger/kivy,cbenhagen/kivy,kivy/kivy,arlowhite/kivy,LogicalDash/kivy,adamkh/kivy,denys-duchier/kivy,manashmndl/kivy,VinGarcia/kivy,KeyWeeUsr/kivy,denys-duchier/kivy,andnovar/kivy,vipulroxx/kivy,jkankiewicz/kivy,zennobjects/kivy,matham/kivy,tony/kivy,habibmasuro/kivy,bhargav2408/kivy,kivatu/kivy-bak,jkankiewicz/kivy,kivy/kivy,bliz937/kivy,autosportlabs/kivy,dirkjot/kivy,xpndlabs/kivy,eHealthAfrica/kivy,dirkjot/kivy,angryrancor/kivy,kivatu/kivy-bak,niavlys/kivy,mSenyor/kivy,manashmndl/kivy,kivy/kivy,cbenhagen/kivy,arcticshores/kivy,el-ethan/kivy,gonzafirewall/kivy,janssen/kivy,manthansharma/kivy,zennobjects/kivy,gonzafirewall/kivy,Shyam10/kivy,mSenyor/kivy,bionoid/kivy,inclement/kivy,thezawad/kivy,LogicalDash/kivy,yoelk/kivy,Farkal/kivy,vipulroxx/kivy,Davideddu/kivy-forkedtouch,hansent/kivy,VinGarcia/kivy,rafalo1333/kivy,habibmasuro/kivy,bliz937/kivy,ernstp/kivy,kived/kivy,vitorio/kivy,niavlys/kivy,Davideddu/kivy-forkedtouch,vipulroxx/kivy,kivatu/kivy-bak,CuriousLearner/kivy,MiyamotoAkira/kivy,dirkjot/kivy,bionoid/kivy,zennobjects/kivy,Farkal/kivy,viralpandey/kivy,KeyWeeUsr/kivy,rafalo1333/kivy,edubrunaldi/kivy,mSenyor/kivy,youprofit/kivy,angryrancor/kivy,arlowhite/kivy,akshayaurora/kivy,eHealthAfrica/kivy,el-ethan/kivy,hansent/kivy,aron-bordin/kivy,akshayaurora/kivy,jegger/kivy,darkopevec/kivy,bliz937/kivy,niavlys/kivy,jehutting/kivy,ehealthafrica-ci/kivy,vitorio/kivy,andnovar/kivy,denys-duchier/kivy,rnixx/kivy,aron-bordin/kivy,yoelk/kivy,CuriousLearner/kivy,VinGarcia/kivy,adamkh/kivy,angryrancor/kivy,arlowhite/kivy,viralpandey/kivy,darkopevec/kivy,manthansharma/kivy,KeyWeeUsr/kivy,ernstp/kivy,thezawad/kivy,iamutkarshtiwari/kivy,Cheaterman/kivy,eHealthAfrica/kivy,dirkjot/kivy,ernstp/kivy,bob-the-hamster/kivy,matham/kivy,viralpandey/kivy,tony/kivy,jkankiewicz/kivy,LogicalDash/kivy,manthansharma/kivy,denys-duchier/kivy,Davideddu/kivy-forkedtouch,rafalo1333/kivy,jegger/kivy,inclement/kivy
kivy/uix/scrollview.py
kivy/uix/scrollview.py
''' Scroll View =========== A ScrollView provides a scrollable/pannable viewport which is clipped to the ScrollView's bounding box. ''' __all__ = ('ScrollView', ) from kivy.uix.stencilview import StencilView from kivy.uix.scatter import ScatterPlane class ScrollView(StencilView): '''ScrollView class. See module documentation for more informations. ''' def __init__(self, **kwargs): self.viewport = ScatterPlane() super(ScrollView, self).__init__(**kwargs) super(ScrollView, self).add_widget(self.viewport) self.viewport.bind(size=self.size) def add_widget(self, widget): self.viewport.add_widget(widget) def remove_widget(self, widget): self.viewport.remove_widget(widget) def clear_widgets(self): self.viewport.clear() def on_touch_down(self, touch): if self.collide_point(*touch.pos): return super(ScrollView, self).on_touch_down(touch) def on_touch_move(self, touch): if self.collide_point(*touch.pos): return super(ScrollView, self).on_touch_move(touch) def on_touch_up(self, touch): if self.collide_point(*touch.pos): return super(ScrollView, self).on_touch_up(touch)
''' ScrollView widget ''' __all__ = ('ScrollView', ) from kivy.uix.stencil import StencilView from kivy.uix.scatter import ScatterPlane class ScrollView(StencilView): ''' ScrollView: A ScrollView provides a scrollable/pannable viewport which is clipped to the ScrollView's bounding box. ''' def __init__(self, **kwargs): self.viewport = ScatterPlane() super(ScrollView, self).__init__(**kwargs) super(ScrollView, self).add_widget(self.viewport) self.viewport.bind(size=self.size) def add_widget(self, widget): self.viewport.add_widget(widget) def remove_widget(self, widget): self.viewport.remove_widget(widget) def clear_widgets(self): self.viewport.clear() def on_touch_down(self, touch): if self.collide_point(*touch.pos): return super(ScrollView, self).on_touch_down(touch) def on_touch_move(self, touch): if self.collide_point(*touch.pos): return super(ScrollView, self).on_touch_move(touch) def on_touch_up(self, touch): if self.collide_point(*touch.pos): return super(ScrollView, self).on_touch_up(touch)
mit
Python
befa79ec76752f0811b49ec323813e6e1931638d
Create solution.py
lilsweetcaligula/Online-Judges,lilsweetcaligula/Online-Judges,lilsweetcaligula/Online-Judges
hackerrank/algorithms/implementation/medium/bigger_is_greater/py/solution.py
hackerrank/algorithms/implementation/medium/bigger_is_greater/py/solution.py
def solution(s): # # The next permutation algorithm. For more information, please look up: # [href.] https://www.nayuki.io/page/next-lexicographical-permutation-algorithm # [href.] https://en.wikipedia.org/wiki/Permutation#Generation_in_lexicographic_order # chars = list(s) i = len(chars) - 1 while i > 0 and chars[i - 1] >= chars[i]: i -= 1 if i == 0: return None j = len(chars) - 1 while chars[j] <= chars[i - 1]: j -= 1 chars[i - 1], chars[j] = chars[j], chars[i - 1] return ''.join(chars[:i] + list(reversed(chars[i:]))) testCount = int(input()) for testId in range(testCount): word = input().strip() greater = solution(word) if greater: print(greater) else: print('no answer')
mit
Python
df8ddd56ad51f0a644696cb0ff12c2e7a17c5913
Create lonely-pixel-i.py
jaredkoontz/leetcode,yiwen-luo/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,jaredkoontz/leetcode,jaredkoontz/leetcode,tudennis/LeetCode---kamyu104-11-24-2015,yiwen-luo/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,yiwen-luo/LeetCode,yiwen-luo/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,jaredkoontz/leetcode,yiwen-luo/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,kamyu104/LeetCode,jaredkoontz/leetcode
Python/lonely-pixel-i.py
Python/lonely-pixel-i.py
# Time: O(m * n) # Space: O(m + n) class Solution(object): def findLonelyPixel(self, picture): """ :type picture: List[List[str]] :rtype: int """ rows, cols = [0] * len(picture), [0] * len(picture[0]) for i in xrange(len(picture)): for j in xrange(len(picture[0])): if picture[i][j] == 'B': rows[i] += 1 cols[j] += 1 result = 0 for i in xrange(len(picture)): if rows[i] == 1: for j in xrange(len(picture[0])): result += picture[i][j] == 'B' and cols[j] == 1 return result class Solution2(object): def findLonelyPixel(self, picture): """ :type picture: List[List[str]] :type N: int :rtype: int """ return sum(col.count('B') == 1 == picture[col.index('B')].count('B') \ for col in zip(*picture))
mit
Python
c1dcb46e95d5b96ecf45db2e1f466b6f99330e1c
Add VimwikiTask cache-ing
Spirotot/taskwiki,phha/taskwiki
taskwiki/cache.py
taskwiki/cache.py
import copy import vim class TaskCache(object): """ A cache that holds all the tasks in the given file and prevents multiple redundant taskwarrior calls. """ def __init__(self, tw): self.task_cache = dict() self.vimwikitask_cache = dict() self.tw = tw def __getitem__(self, key): # String keys refer to the Task objects if type(key) in (str, unicode): task = self.task_cache.get(key) if task is None: task = self.tw.tasks.get(uuid=key) self.task_cache[key] = task return task # Integer keys (line numbers) refer to the VimwikiTask objects elif type(key) is int: vimwikitask = self.vimwikitask_cache.get(key) if vimwikitask is None: vimwikitask = VimwikiTask.from_line(self, key) return vimwikitask # May return None if the line has no task # Anything else is wrong else: raise ValueError("Wrong key type: %s (%s)" % (key, type(key))) def iterate_vimwiki_tasks(self): iterated_cache = copy.copy(self.task_cache) while iterated_cache.keys(): for key in list(iterated_cache.keys()): task = iterated_cache[key] if all([t['line_number'] not in iterated_cache.keys() for t in task.add_dependencies]): del iterated_cache[key] yield task def reset(self): self.task_cache = dict() self.vimwikitask_cache = dict() def update_tasks(self): # Select all tasks in the files that have UUIDs uuids = [t['uuid'] for t in self.task_cache.values() if t.saved] # Get them out of TaskWarrior at once tasks = self.tw.filter(uuid=','.join(tasks)) # Update each task in the cache for task in tasks: self.task_cache[task['uuid']] = task
import copy import vim class TaskCache(object): """ A cache that holds all the tasks in the given file and prevents multiple redundant taskwarrior calls. """ def __init__(self, tw): self.cache = dict() self.tw = tw def __getitem__(self, key): task = self.cache.get(key) if task is None: task = self.tw.tasks.get(uuid=key) self.cache[key] = task return task def __iter__(self): iterated_cache = copy.copy(self.cache) while iterated_cache.keys(): for key in list(iterated_cache.keys()): task = iterated_cache[key] if all([t.line_number not in iterated_cache.keys() for t in task.add_dependencies]): del iterated_cache[key] yield task def reset(self): self.cache = dict() def update_tasks(self): # Select all tasks in the files that have UUIDs uuids = [t['uuid'] for t in self.cache.values() if t.saved] # Get them out of TaskWarrior at once tasks = self.tw.filter(uuid=','.join(tasks)) # Update each task in the cache for task in tasks: self.cache[task['uuid']] = task
mit
Python
67c3c0e3c165dc73f548cff57d6cb390614d5aad
Bring back old watcher module
igboyes/virtool,igboyes/virtool,virtool/virtool,virtool/virtool
virtool/watcher.py
virtool/watcher.py
import os import time import logging from virtool.utils import file_stats from setproctitle import setproctitle from multiprocessing import Process from inotify.adapters import Inotify logger = logging.getLogger(__name__) TYPE_NAME_DICT = { "IN_CREATE": "create", "IN_MODIFY": "modify", "IN_DELETE": "delete", "IN_MOVED_FROM": "delete", "IN_CLOSE_WRITE": "close" } projector = [ "_id", "_version", "name", "size_end", "size_now", "timestamp", "file_type", "created", "reserved", "ready" ] class Watcher(Process): def __init__(self, path, queue, interval=0.300): super().__init__() self.path = path self.queue = queue self.interval = interval self.notifier = Inotify() def run(self): setproctitle("virtool-inotify") self.notifier.add_watch(bytes(self.path, encoding="utf-8")) last_modification = time.time() try: for event in self.notifier.event_gen(): if event is not None: _, type_names, _, filename = event if filename and type_names[0] in TYPE_NAME_DICT: assert len(type_names) == 1 action = TYPE_NAME_DICT[type_names[0]] filename = filename.decode() now = time.time() if action in ["create", "modify", "close"]: file_entry = file_stats(os.path.join(self.path, filename)) file_entry["filename"] = filename if action == "modify" and (now - last_modification) > self.interval: self.queue.put({ "action": action, "file": file_entry }) last_modification = now if action in ["create", "close"]: self.queue.put({ "action": action, "file": file_entry }) if action == "delete": self.queue.put({ "action": "delete", "file": filename }) except KeyboardInterrupt: logging.debug("Stopped file watcher")
mit
Python
4d08ff430eba96ebef3f0824fe83f5bc2a236675
add share_mem
Akagi201/learning-python,Akagi201/learning-python,Akagi201/learning-python,Akagi201/learning-python,Akagi201/learning-python
multiprocessing/share_mem.py
multiprocessing/share_mem.py
#!/usr/bin/env python from multiprocessing import Process, Value, Array def f(n, a): n.value = 3.1415927 for i in range(len(a)): a[i] = -a[i] if __name__ == '__main__': num = Value('d', 0.0) arr = Array('i', range(10)) p = Process(target=f, args=(num, arr)) p.start() p.join() print(num.value) print(arr[:])
mit
Python
119aabe89912c324d1588601c9cbc4b4a48e16ae
Add restarting_flup.py
SimonSapin/snippets,SimonSapin/snippets
restarting_flup.py
restarting_flup.py
#!/usr/bin/env python """ This is the same as the usual .fcgi file[1] for using FastCGI with flup, except that this one terminates itself when the .fcgi file’s modification date changes. Assuming you have something[2] that restarts FastCGI processes as needed (which you should anyway), this effectively allows you to reload the application by just `touch`ing one file. [1] http://flask.pocoo.org/docs/deploying/fastcgi/ [2] Something like Circus, Supervisord, or Lighttpd with `bin-path` configured. """ from os.path import getmtime from flup.server.fcgi import WSGIServer START_TIME = getmtime(__file__) class RestartingServer(WSGIServer): def _mainloopPeriodic(self): WSGIServer._mainloopPeriodic(self) if getmtime(__file__) != START_TIME: self._keepGoing = False from YOUR_APPLICATION import app RestartingServer(app).run()
bsd-3-clause
Python
7496159322a173bb6265aed2dac4e50ad64de858
Add base fullfill service
Ellyuca/BlockChainAnalysis
service.py
service.py
from flask import Flask from flask import jsonify from flask import request app = Flask(__name__) @app.route("/chainBot", methods=['POST']) def chainBot(): print(request.data) return jsonify({ "speech": "My Test Speech", "displayText": "My Test Text", "data": {}, "contextOut": [], "source": "" }), 200, {'Content-Type': 'text/css; charset=utf-8'} if __name__ == "__main__": app.run("0.0.0.0", 80)
mit
Python
5b01f26d92a32964bcc97cbf9429177bce7c89be
add tests for progress indicator
amirkdv/biseqt,amirkdv/biseqt,amirkdv/biseqt
tests/test_util.py
tests/test_util.py
# -*- coding: utf-8 -*- from StringIO import StringIO from biseqt.util import ProgressIndicator def test_progress_indicator(): logs = StringIO() ProgressIndicator.write = lambda self, message: logs.write(message) indic = ProgressIndicator(num_total=1) indic.start() indic.progress() assert logs.getvalue().strip() == '0/1 \r1/1', \ 'Counting progress indicator works' logs = StringIO() indic = ProgressIndicator(num_total=1, percentage=True) indic.start() indic.progress() assert logs.getvalue().strip() == '0% \r100%', \ 'Percentage progress indicator works'
bsd-3-clause
Python
de7aee058348c00d2cdf244df102010b422e941b
Add a place holder for the PSNR metric
qobilidop/srcnn,qobilidop/srcnn
toolbox/metrics.py
toolbox/metrics.py
def psnr(y_true, y_pred): raise NotImplementedError
mit
Python
0da51215709f338e77acfa6e7933595d0c1df95d
Create SIP OPTIONS sender/receiver.
delimitry/networks
networks/sip.py
networks/sip.py
# -*- coding: utf-8 -*- import argparse import socket CRLF = '\r\n' def send_sip_options(server_host, server_port, client_host, client_port, verbose=True): """Sends SIP OPTIONS. :param str server_host: SIP server host (IP address). :param int server_port: SIP server port. :param str client_host: Local client host (IP address). :param int client_port: Local client port. :param bool verbose: If True prints out the request payload. :return: SIP server response. :rtype: str """ with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as sock: sock.connect((server_host, server_port)) payload_fields = ( 'OPTIONS sip:127.0.0.1:5060 SIP/2.0', f'Via: SIP/2.0/UDP {client_host}:{client_port};rport;branch=BRANCH', 'Max-Forwards: 70', f'From: <sip:{client_host}>;tag=TAG', 'To: <sip:127.0.0.1>', 'Call-ID: 1', 'CSeq: 1 OPTIONS', 'Content-Length: 0', ) payload = CRLF.join(payload_fields).encode('utf-8') if verbose: print('===================') print('SIP server request:') print('===================') print(payload.decode().strip()) print('--------------------') print() sock.send(payload) return sock.recv(4096).decode('utf-8') def main(): # prepare argument parser parser = argparse.ArgumentParser() parser.add_argument('server_host', help='SIP server hostname or IP address') parser.add_argument('server_port', nargs='?', default=5060, help='SIP server port (default=5060)') args = parser.parse_args() hostname = socket.gethostname() local_ip = socket.gethostbyname(hostname) client = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) client.bind((local_ip, 0)) # get random port client_host, client_port = client.getsockname() response = send_sip_options(args.server_host, int(args.server_port), client_host, client_port) print('====================') print('SIP server response:') print('====================') print(response.strip()) print('--------------------') if __name__ == '__main__': main()
mit
Python
3b9a0c0b83dda484586ea9c19091b7da1cae55d1
prepare a test file for python
larroy/riak_python3,larroy/riak_python3,larroy/riak_python3
test_riak3k.py
test_riak3k.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import nose from nose.tools import * import riak3k
apache-2.0
Python
17c9e1a16c5c16c1b49836cc376ddd6408b73de0
make the dispatcher a global variable and the deprecation warning more developer friendly
ismaelgaudioso/Coherence,sreichholf/python-coherence,unintended/Cohen,ismaelgaudioso/Coherence,unintended/Cohen,sreichholf/python-coherence,coherence-project/Coherence,furbrain/Coherence,coherence-project/Coherence,opendreambox/python-coherence,furbrain/Coherence,opendreambox/python-coherence
coherence/extern/louie.py
coherence/extern/louie.py
""" Wrapper module for the louie implementation """ import warnings from coherence.dispatcher import Dispatcher class Any(object): pass class All(object): pass class Anonymous(object): pass # fake the API class Dummy(object): pass signal = Dummy() sender = Dummy() #senders sender.Anonymous = Anonymous sender.Any = Any #signals signal.All = All # a slightly less raise-y-ish implementation as louie was not so picky, too class GlobalDispatcher(Dispatcher): def connect(self, signal, callback, *args, **kw): if not signal in self.receivers: # ugly hack self.receivers[signal] = [] return Dispatcher.connect(self, signal, callback, *args, **kw) def _get_receivers(self, signal): try: return self.receivers[signal] except KeyError: return [] global _global_dispatcher _global_dispatcher = GlobalDispatcher() _global_receivers_pool = {} def _display_deprecation_warning(): warnings.warn("extern.louie will soon be deprecated in favor of coherence.dispatcher.") def connect(receiver, signal=All, sender=Any, weak=True): callback = receiver if signal in (Any, All): raise NotImplemented("This is not allowed. Signal HAS to be something") if sender not in (Any, All): _display_deprecation_warning() receiver = _global_dispatcher.connect(signal, callback) _global_receivers_pool[(callback, signal)] = receiver return receiver def disconnect(receiver, signal=All, sender=Any, weak=True): callback = receiver if signal in (Any, All): raise NotImplemented("This is not allowed. Signal HAS to be something") if sender not in (Any, All): _display_deprecation_warning() receiver = _global_receivers_pool.pop((callback, signal)) return _global_dispatcher.disconnect(receiver) def send(signal=All, sender=Anonymous, *arguments, **named): if signal in (Any, All): raise NotImplemented("This is not allowed. Signal HAS to be something") if sender not in (Anonymous, None): _display_deprecation_warning() # the first value of the callback shall always be the signal: return _global_dispatcher.save_emit(signal, *arguments, **named) def send_minimal(signal=All, sender=Anonymous, *arguments, **named): return send(signal, sender, *arguments, **named) def send_exact(signal=All, sender=Anonymous, *arguments, **named): return send(signal, sender, *arguments, **named) def send_robust(signal=All, sender=Anonymous, *arguments, **named): return send(signal, sender, *arguments, **named)
""" Wrapper module for the louie implementation """ import warnings from coherence.dispatcher import Dispatcher class Any(object): pass class All(object): pass class Anonymous(object): pass # fake the API class Dummy(object): pass signal = Dummy() sender = Dummy() #senders sender.Anonymous = Anonymous sender.Any = Any #signals signal.All = All # a slightly less raise-y-ish implementation as louie was not so picky, too class GlobalDispatcher(Dispatcher): def connect(self, signal, callback, *args, **kw): if not signal in self.receivers: # ugly hack self.receivers[signal] = [] return Dispatcher.connect(self, signal, callback, *args, **kw) def _get_receivers(self, signal): try: return self.receivers[signal] except KeyError: return [] _global_dispatcher = GlobalDispatcher() _global_receivers_pool = {} def connect(receiver, signal=All, sender=Any, weak=True): callback = receiver if signal in (Any, All): raise NotImplemented("This is not allowed. Signal HAS to be something") if sender not in (Any, All): warnings.warn("Seriously! Use the coherence.dispatcher. It IS object based") receiver = _global_dispatcher.connect(signal, callback) _global_receivers_pool[(callback, signal)] = receiver return receiver def disconnect(receiver, signal=All, sender=Any, weak=True): callback = receiver if signal in (Any, All): raise NotImplemented("This is not allowed. Signal HAS to be something") if sender not in (Any, All): warnings.warn("Seriously! Use the coherence.dispatcher. It IS object based") receiver = _global_receivers_pool.pop((callback, signal)) return _global_dispatcher.disconnect(receiver) def send(signal=All, sender=Anonymous, *arguments, **named): if signal in (Any, All): raise NotImplemented("This is not allowed. Signal HAS to be something") if sender not in (Anonymous, None): warnings.warn("Seriously! Use the coherence.dispatcher. It IS object based") # the first value of the callback shall always be the signal: return _global_dispatcher.save_emit(signal, *arguments, **named) def send_minimal(signal=All, sender=Anonymous, *arguments, **named): return send(signal, sender, *arguments, **named) def send_exact(signal=All, sender=Anonymous, *arguments, **named): return send(signal, sender, *arguments, **named) def send_robust(signal=All, sender=Anonymous, *arguments, **named): return send(signal, sender, *arguments, **named)
mit
Python
0475e35bb6e0bab1d61c038ddd902e32478211d7
Create whois.py
wannaphongcom/code-python3-blog
whois.py
whois.py
# อ่านบทความได้ที่ https://python3.wannaphong.com/2016/12/ดึงข้อมูล-whois-โดเมนด้วย-python.html # เขียนโดย วรรณพงษ์ ภัททิยไพบูลย์ import whois w = whois.whois('abc.xyz') # กรอกโดเมนที่ต้องการข้อมูล Whois print(w.expiration_date) # วั้นหมดอายุ print(w.text) # รายละเอียดโดเมน
mit
Python
416872a1e7191f62dd2353f3e221a9e9c93c161f
Add tests for utils.
Axelrod-Python/tournament
test_utils.py
test_utils.py
""" Tests for the utils.py file """ import axelrod as axl import unittest import utils import tempfile import csv class TestUtils(unittest.TestCase): """ Simple tests for the utils """ axl.seed(0) players = [s() for s in axl.demo_strategies] tournament = axl.Tournament(players) results = tournament.play() def test_label(self): label = utils.label("Test", self.results) expected_label = "{} - turns: {}, repetitions: {}, strategies: {}. ".format("Test", self.tournament.turns, self.tournament.repetitions, len(self.tournament.players)) def test_summary_data(self): tmpfile = tempfile.NamedTemporaryFile() sd = utils.summary_data(self.results, tmpfile.name) self.assertEqual(len(sd), len(self.tournament.players)) self.assertEqual([player.Name for player in sd], self.results.ranked_names) with open(tmpfile.name, "r") as csvfile: csvreader = csv.reader(csvfile) ranked_names = [row[1] for row in csvreader][1:] self.assertEqual(ranked_names, self.results.ranked_names)
mit
Python
b28ace414c7087936ec14665026b78413b1f3791
Create __init__.py
ruansteve/neutron-dynamic-routing
neutron_dynamic_routing/neutron/cmd/eventlet/agents/__init__.py
neutron_dynamic_routing/neutron/cmd/eventlet/agents/__init__.py
apache-2.0
Python
bf7ad11cc32af83aab6496ac7d7b911bea3d7876
Use new API.
abstract-open-solutions/l10n-italy,linkitspa/l10n-italy,alessandrocamilli/l10n-italy,yvaucher/l10n-italy,andrea4ever/l10n-italy,ApuliaSoftware/l10n-italy,OpenCode/l10n-italy,maxhome1/l10n-italy,hurrinico/l10n-italy,linkitspa/l10n-italy,odoo-isa/l10n-italy,scigghia/l10n-italy,linkitspa/l10n-italy,luca-vercelli/l10n-italy
l10n_it_pec/model/partner.py
l10n_it_pec/model/partner.py
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2014 Associazione Odoo Italia # (<http://www.openerp-italia.org>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import models, fields class ResPartner(models.Model): _inherit = "res.partner" pec_mail = fields.Char(string='PEC Mail')
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2014 Associazione Odoo Italia # (<http://www.openerp-italia.org>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, orm class ResPartner(orm.Model): _inherit = "res.partner" _columns = { 'pec_mail': fields.char( 'PEC Mail' ), }
agpl-3.0
Python
2c590b82b716ecfca9b683afa1181a8368b6cb41
Add some helper methods to deal with sqlite migrations
stackforge/gertty,aspiers/gertty,openstack/gertty
gertty/dbsupport.py
gertty/dbsupport.py
# Copyright 2014 Mirantis Inc. # Copyright 2014 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six import uuid from alembic import op import sqlalchemy def sqlite_alter_columns(table_name, column_defs): """Implement alter columns for SQLite. The ALTER COLUMN command isn't supported by SQLite specification. Instead of calling ALTER COLUMN it uses the following workaround: * create temp table '{table_name}_{rand_uuid}', with some column defs replaced; * copy all data to the temp table; * drop old table; * rename temp table to the old table name. """ connection = op.get_bind() meta = sqlalchemy.MetaData(bind=connection) meta.reflect() changed_columns = {} indexes = [] for col in column_defs: # If we are to have an index on the column, don't create it # immediately, instead, add it to a list of indexes to create # after the table rename. if col.index: indexes.append(('ix_%s_%s' % (table_name, col.name), table_name, [col.name], col.unique)) col.unique = False col.index = False changed_columns[col.name] = col # construct lists of all columns and their names old_columns = [] new_columns = [] column_names = [] for column in meta.tables[table_name].columns: column_names.append(column.name) old_columns.append(column) if column.name in changed_columns.keys(): new_columns.append(changed_columns[column.name]) else: col_copy = column.copy() new_columns.append(col_copy) for key in meta.tables[table_name].foreign_keys: constraint = key.constraint con_copy = constraint.copy() new_columns.append(con_copy) for index in meta.tables[table_name].indexes: # If this is a single column index for a changed column, don't # copy it because we may already be creating a new version of # it (or removing it). idx_columns = [col.name for col in index.columns] if len(idx_columns)==1 and idx_columns[0] in changed_columns.keys(): continue # Otherwise, recreate the index. indexes.append((index.name, table_name, [col.name for col in index.columns], index.unique)) # create temp table tmp_table_name = "%s_%s" % (table_name, six.text_type(uuid.uuid4())) op.create_table(tmp_table_name, *new_columns) meta.reflect() try: # copy data from the old table to the temp one sql_select = sqlalchemy.sql.select(old_columns) connection.execute(sqlalchemy.sql.insert(meta.tables[tmp_table_name]) .from_select(column_names, sql_select)) except Exception: op.drop_table(tmp_table_name) raise # drop the old table and rename temp table to the old table name op.drop_table(table_name) op.rename_table(tmp_table_name, table_name) # (re-)create indexes for index in indexes: op.create_index(op.f(index[0]), index[1], index[2], unique=index[3])
apache-2.0
Python
489d883af246e7de727ea14e01ae4a0cd17f88eb
fix emoji on python3.4
TetraEtc/limbo,TetraEtc/limbo,serverdensity/sdbot,llimllib/limbo,palachu/sdbot,llimllib/limbo
limbo/plugins/emoji.py
limbo/plugins/emoji.py
"""!emoji <n> will return n random emoji""" import re import random from emojicodedict import emojiCodeDict def randomelt(dic): keys = list(dic.keys()) i = random.randint(0, len(keys) - 1) return dic[keys[i]] def emoji(n=1): emoji = [] for i in range(n): emoji.append(randomelt(emojiCodeDict)) return "".join(emoji) def on_message(msg, server): text = msg.get("text", "") match = re.findall(r"(!emoji)\s*(\d+)*", text) if not match: return n = 1 if not match[0][1] else int(match[0][1]) return emoji(n)
"""!emoji <n> will return n random emoji""" import re import random from emojicodedict import emojiCodeDict def randomelt(dic): keys = dic.keys() i = random.randint(0, len(keys) - 1) return dic[keys[i]] def emoji(n=1): emoji = [] for i in range(n): emoji.append(randomelt(emojiCodeDict)) return "".join(emoji) def on_message(msg, server): text = msg.get("text", "") match = re.findall(r"(!emoji)\s*(\d+)*", text) if not match: return n = 1 if not match[0][1] else int(match[0][1]) return emoji(n)
mit
Python
b35908d8ed8257bfde75953c360112f87c0eccd3
add api/urls.py
danirus/django-comments-xtd,danirus/django-comments-xtd,danirus/django-comments-xtd,danirus/django-comments-xtd
django_comments_xtd/api/urls.py
django_comments_xtd/api/urls.py
from django.urls import path, re_path from .views import ( CommentCount, CommentCreate, CommentList, CreateReportFlag, ToggleFeedbackFlag, preview_user_avatar, ) urlpatterns = [ path('comment/', CommentCreate.as_view(), name='comments-xtd-api-create'), path('preview/', preview_user_avatar, name='comments-xtd-api-preview'), re_path(r'^(?P<content_type>\w+[-]{1}\w+)/(?P<object_pk>[-\w]+)/$', CommentList.as_view(), name='comments-xtd-api-list'), re_path( r'^(?P<content_type>\w+[-]{1}\w+)/(?P<object_pk>[-\w]+)/count/$', CommentCount.as_view(), name='comments-xtd-api-count'), path('feedback/', ToggleFeedbackFlag.as_view(), name='comments-xtd-api-feedback'), path('flag/', CreateReportFlag.as_view(), name='comments-xtd-api-flag'), ]
bsd-2-clause
Python
8e422c867f25424fbc2d95e4a11cb76ea4de66ac
Create land.py
apottr/wopr
objects/land.py
objects/land.py
{"rows":[ {"C": [ {"tile_start": "6C","tile_end": "21C", "side": NULL}, {"tile_start": "26C","tile_end": "57C", "side": NULL}, ]}, {"D": [ {"tile_start": "20D","tile_end": "20D", "side": "USA"}, {"tile_start": "38D","tile_end": "42D", "side": "USA"}, {"tile_start": "44D","tile_end": "56D", "side": "USA"} ]}, {"E": [ {"tile_start": "3W","tile_end": "16W", "side": "USA"} ]}, {"F": [ {"tile_start": "3W","tile_end": "16W", "side": "USA"} ]}, {"G": [ {"tile_start": "3W","tile_end": "16W", "side": "USA"} ]}, {"H": [ {"tile_start": "3W","tile_end": "16W", "side": "USA"} ]}, {"I": [ {"tile_start": "3W","tile_end": "16W", "side": "USA"} ]}, {"J": [ {"tile_start": "3W","tile_end": "16W", "side": "USA"} ]}, {"K": [ {"tile_start": "3W","tile_end": "16W", "side": "USA"} ]}, {"L": [ {"tile_start": "3W","tile_end": "16W", "side": "USA"} ]}, {"M": [ {"tile_start": "3W","tile_end": "16W", "side": "USA"} ]}, {"N": [ {"tile_start": "3W","tile_end": "16W", "side": "USA"} ]}, {"O": [ {"tile_start": "3W","tile_end": "16W", "side": "USA"} ]}, {"P": [ {"tile_start": "3W","tile_end": "16W", "side": "USA"} ]}, {"Q": [ {"tile_start": "3W","tile_end": "16W", "side": "USA"} ]}, {"R": [ {"tile_start": "3W","tile_end": "16W", "side": "USA"} ]}, {"S": [ {"tile_start": "3W","tile_end": "16W", "side": "USA"} ]}, {"T": [ {"tile_start": "3W","tile_end": "16W", "side": "USA"} ]}, {"U": [ {"tile_start": "3W","tile_end": "16W", "side": "USA"} ]}, {"V": [ {"tile_start": "3W","tile_end": "16W", "side": "USA"} ]}, {"W": [ {"tile_start": "3W","tile_end": "16W", "side": "USA"} ]}, {"X": [ {"tile_start": "3W","tile_end": "16W", "side": "USA"} ]} ]}
mit
Python
4a9a844353a565a596148e31c17dad6b57cda081
Add text encoding functionality.
tilusnet/thqpylib
txtencoding.py
txtencoding.py
#!/usr/bin/env python3 from chardet.universaldetector import UniversalDetector class TxtEncoding: def __init__(self): # inspired by https://chardet.readthedocs.org/en/latest/usage.html#example-detecting-encodings-of-multiple-files self.detector = UniversalDetector() def detectEncoding(self, fname): '''Detect the encoding of file fname. Returns a dictionary with {'encoding', 'confidence'} fields.''' self.detector.reset() with open(fname, 'rb') as f: for line in f: self.detector.feed(line) if self.detector.done: break self.detector.close() return self.detector.result
mit
Python
ff7d96204d528e65faec8312e98fd727bd163d08
Save and load files.
arkharin/OpenCool
scr/model/model.py
scr/model/model.py
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. """ Model implementation """ import json from pathlib import Path _EXTENSION = '.json' def save(data, file_name, folder='', home_path=Path.home()): fp = Path(home_path, folder) while True: if fp.exists(): break else: user_input = input("This directory doesn't exist. Do you want create it? [yes]/no: ") if _user_decision(user_input): fp.mkdir() break else: folder = input('Write new name: ') fp = Path(home_path, folder) fp = Path(home_path, folder, file_name + _EXTENSION) while True: if fp.exists(): user_input = input('This file already exists. Do you want rename it? [yes]/no: ') if _user_decision(user_input): name = input('Write new file name: ') fp = Path(home_path, folder, name) else: break else: break print('File saved in: ', fp) # Save fp = fp.open('w') json.dump(data, fp, indent=4, ensure_ascii=False, sort_keys=True) fp.close() print('Save successfully!') def load(file_name, folder='', home_path=Path.home()): # Check home_path fp = Path(home_path, folder, file_name + _EXTENSION) if fp.exists() and fp.is_file(): # load fp = fp.open('r') data_loaded = json.load(fp) fp.close() print(file_name, 'loaded successfully') else: fp_dir = Path(home_path, folder, file_name) if fp.exists(): print('Invalid path') elif fp_dir.is_dir(): print("It's a folder, not a file") else: print("This file doesn't exist") data_loaded = {} print('Empty data is loaded') return data_loaded def _user_decision(answer, default_answer='yes'): if answer is '': answer = default_answer if answer == 'yes': return True elif answer == 'no': return False else: print('Invalid answer') answer = input('Please repeat the answer:') return _user_decision(answer, default_answer)
mpl-2.0
Python
24210f31a5b54adf1b3b038fdad73b679656217c
fix mr_unit.py so that it records test failures properly
mathemage/h2o-3,jangorecki/h2o-3,h2oai/h2o-dev,jangorecki/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,jangorecki/h2o-3,h2oai/h2o-3,YzPaul3/h2o-3,YzPaul3/h2o-3,jangorecki/h2o-3,spennihana/h2o-3,mathemage/h2o-3,h2oai/h2o-dev,spennihana/h2o-3,YzPaul3/h2o-3,mathemage/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,mathemage/h2o-3,h2oai/h2o-3,spennihana/h2o-3,jangorecki/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,jangorecki/h2o-3,jangorecki/h2o-3,spennihana/h2o-3,spennihana/h2o-3,h2oai/h2o-dev,mathemage/h2o-3,YzPaul3/h2o-3,h2oai/h2o-3,h2oai/h2o-3,spennihana/h2o-3,mathemage/h2o-3,YzPaul3/h2o-3,h2oai/h2o-dev,michalkurka/h2o-3,mathemage/h2o-3,h2oai/h2o-dev,YzPaul3/h2o-3,YzPaul3/h2o-3,h2oai/h2o-dev,spennihana/h2o-3,h2oai/h2o-dev
scripts/mr_unit.py
scripts/mr_unit.py
import sys, os import csv import MySQLdb import traceback def add_perf_results_to_mr_unit(args): mr_unit = MySQLdb.connect(host='mr-0x8', user='root', passwd=args[1], db='mr_unit') mr_unit.autocommit(False) cursor = mr_unit.cursor() try: for row in csv.reader(file(os.path.join(args[2], "perf.csv"))): row = [r.strip() for r in row] row[3] = row[3].split("/")[-1] cursor.execute('INSERT INTO perf(date, build_id, git_hash, git_branch, machine_ip, test_name, start_time, ' 'end_time, pass, ncpu, os, job_name) VALUES("{0}", "{1}", "{2}", "{3}", "{4}", "{5}", "{6}"' ', "{7}", "{8}", "{9}", "{10}", "{11}")'.format(*row)) mr_unit.commit() except: traceback.print_exc() mr_unit.rollback() assert False, "Failed to add performance results to mr_unit!" if __name__ == '__main__': add_perf_results_to_mr_unit(sys.argv)
import sys, os import csv import MySQLdb import traceback def add_perf_results_to_mr_unit(args): mr_unit = MySQLdb.connect(host='mr-0x8', user='root', passwd=args[1], db='mr_unit') mr_unit.autocommit(False) cursor = mr_unit.cursor() try: for row in csv.reader(file(os.path.join(args[2], "perf.csv"))): row = [r.strip() for r in row] row[3] = row[3].split("/")[-1] row[8] = "TRUE" if row[8] == "1" else "FALSE" cursor.execute('INSERT INTO perf(date, build_id, git_hash, git_branch, machine_ip, test_name, start_time, ' 'end_time, pass, ncpu, os, job_name) VALUES("{0}", "{1}", "{2}", "{3}", "{4}", "{5}", "{6}"' ', "{7}", {8}, "{9}", "{10}", "{11}")'.format(*row)) mr_unit.commit() except: traceback.print_exc() mr_unit.rollback() assert False, "Failed to add performance results to mr_unit!" if __name__ == '__main__': add_perf_results_to_mr_unit(sys.argv)
apache-2.0
Python
f424001f409fd35b0e62be9a82d62b21b438e082
Add missing comma
ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,fajran/django-loginurl,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,vanschelven/cmsplugin-journal,uploadcare/django-loginurl,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website
onetime/urls.py
onetime/urls.py
from django.conf.urls.defaults import * from django.views.generic.simple import redirect_to from onetime.views import cleanup, login urlpatterns = patterns('', (r'^cleanup/$', cleanup), (r'^(?P<key>[a-z0-9+])$', login), (r'^$', redirect_to, {'url': None}), )
from django.conf.urls.defaults import * from django.views.generic.simple import redirect_to from onetime.views import cleanup, login urlpatterns = patterns('' (r'^cleanup/$', cleanup), (r'^(?P<key>[a-z0-9+])$', login), (r'^$', redirect_to, {'url': None}), )
agpl-3.0
Python
6d9bf98b5c077421b2cdaca7ae9adf39f4ed475c
Add a migration to rename "type" field to "status"
zellyn/django-kanboard,zellyn/django-kanboard
src/kanboard/migrations/0002_type_to_status.py
src/kanboard/migrations/0002_type_to_status.py
from south.db import db from django.db import models from kanboard.models import * class Migration: def forwards(self, orm): db.rename_column('kanboard_phase', 'type', 'status') def backwards(self, orm): db.rename_column('kanboard_phase', 'status', 'type') models = { 'kanboard.board': { 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '80'}) }, 'kanboard.card': { 'backlogged_at': ('django.db.models.fields.DateTimeField', [], {}), 'blocked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'blocked_because': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'board': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'cards'", 'to': "orm['kanboard.Board']"}), 'color': ('django.db.models.fields.CharField', [], {'max_length': '7', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'done_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'order': ('django.db.models.fields.SmallIntegerField', [], {}), 'phase': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'cards'", 'to': "orm['kanboard.Phase']"}), 'ready': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'size': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}), 'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '80'}) }, 'kanboard.phase': { 'board': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'phases'", 'to': "orm['kanboard.Board']"}), 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'limit': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}), 'order': ('django.db.models.fields.SmallIntegerField', [], {}), 'status': ('django.db.models.fields.CharField', [], {'default': "'progress'", 'max_length': '25'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '80'}) }, 'kanboard.phaselog': { 'Meta': {'unique_together': "(('phase', 'date'),)"}, 'count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), 'date': ('django.db.models.fields.DateField', [], {}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'phase': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'logs'", 'to': "orm['kanboard.Phase']"}) } } complete_apps = ['kanboard']
bsd-3-clause
Python
8d88bf0808c5249d2c1feace5b8a1db1679e44b6
Create tests_unit.py
mattjhayes/nmeta2
tests_unit.py
tests_unit.py
""" Nmeta2 Unit Tests Uses pytest, install with: sudo apt-get install python-pytest To run test, type in: py.test tests_unit.py """ #*** Testing imports: import mock import unittest #*** Ryu imports: from ryu.base import app_manager # To suppress cyclic import from ryu.controller import controller from ryu.controller import handler from ryu.ofproto import ofproto_v1_3_parser from ryu.ofproto import ofproto_v1_2_parser from ryu.ofproto import ofproto_v1_0_parser from ryu.app.wsgi import ControllerBase from ryu.app.wsgi import WSGIApplication from ryu.app.wsgi import route #*** JSON imports: import json from json import JSONEncoder #*** nmeta2 imports: import switch_abstraction import config import api #*** Instantiate Config class: _config = config.Config() #======================== tc_policy.py Unit Tests ============================ #*** Instantiate class: switches = switch_abstraction.Switches(_config) sock_mock = mock.Mock() addr_mock = mock.Mock() #*** Test Switches and Switch classes that abstract OpenFlow switches: def test_switches(): with mock.patch('ryu.controller.controller.Datapath.set_state'): #*** Set up a fake switch datapath: datapath = controller.Datapath(sock_mock, addr_mock) #*** Add a switch assert switches.add(datapath) == 1 #*** Look up by DPID: assert switches.datapath(datapath.id) == datapath #======================== api.py Unit Tests ============================ class _TestController(ControllerBase): def __init__(self, req, link, data, **config): super(_TestController, self).__init__(req, link, data, **config) eq_(data['test_param'], 'foo') class Test_wsgi(unittest.TestCase): """ Test case for running WSGI controller for API testing """ def setUp(self): wsgi = WSGIApplication() #*** Instantiate API class: self.api = api.Api(self, _config, wsgi) def test_decode_JSON(): #*** The JSON_Body class is in the api.py module. Good JSON: good_json = '{\"foo\": \"123\"}' good = api.JSON_Body(good_json) assert not good.error assert good.error == "" assert good.json == {'foo': '123'} assert good['foo'] == '123' assert good['bar'] == 0 #*** Bad JSON: bad_json = "foo, bar=99" bad = api.JSON_Body(bad_json) assert bad.json == {} assert bad.error == '{\"Error\": \"Bad JSON\"}'
apache-2.0
Python
cba429780061bcdafde6f2bc799e74106e2cc336
Create textevolve.py
nsaquib/slc-mathcsworkshop
textevolve.py
textevolve.py
''' Evolve a piece of text with a simple evolutionary algorithm Author: Saquib 7/27/13 ''' import random def fitness(source, target): fitval = 0 for i in range(0, len(source)): fitval += (ord(target[i]) - ord(source[i])) ** 2 return(fitval) def mutate(source): charpos = random.randint(0, len(source) - 1) parts = list(source) parts[charpos] = chr(ord(parts[charpos]) + random.randint(-1,1)) return(''.join(parts)) source = ";wql* opqlq" target = "hello world" fitval = fitness(source, target) i = 0 while True: i += 1 m = mutate(source) fitval_m = fitness(m, target) if fitval_m < fitval: fitval = fitval_m source = m print "%5i %5i %14s" % (i, fitval_m, m) if fitval == 0: break
mit
Python
ff98bdf9ce263648de784183ad5984864f9d387a
Add ref create api test
virtool/virtool,igboyes/virtool,virtool/virtool,igboyes/virtool
tests/api/test_refs.py
tests/api/test_refs.py
async def test_create(spawn_client, test_random_alphanumeric, static_time): client = await spawn_client(authorize=True, permissions=["create_ref"]) data = { "name": "Test Viruses", "description": "A bunch of viruses used for testing", "data_type": "genome", "organism": "virus", "public": True } resp = await client.post("/api/refs", data) assert resp.status == 201 assert resp.headers["Location"] == "/api/refs/" + test_random_alphanumeric.history[0] assert await resp.json() == dict( data, id=test_random_alphanumeric.history[0], created_at=static_time.iso, user={ "id": "test" }, users=[{ "build": True, "id": "test", "modify": True, "modify_kind": True, "remove": True }] )
mit
Python
7d21b55f2de7cd2c34cd3cd985824178d382398d
add 'stages' code
olebole/astrometry.net,olebole/astrometry.net,olebole/astrometry.net,olebole/astrometry.net,olebole/astrometry.net,olebole/astrometry.net,olebole/astrometry.net,olebole/astrometry.net
util/stages.py
util/stages.py
from astrometry.util.file import * class CallGlobal(object): def __init__(self, pattern, *args, **kwargs): self.pat = pattern self.args = args self.kwargs = kwargs def __call__(self, stage, kwargs): func = self.pat % stage kwa = self.kwargs.copy() kwa.update(kwargs) return func(*self.args, **kwa) def runstage(stage, picklepat, stagefunc, force=[], prereqs={}, **kwargs): print 'Runstage', stage pfn = picklepat % stage if os.path.exists(pfn): if stage in force: print 'Ignoring pickle', pfn, 'and forcing stage', stage else: print 'Reading pickle', pfn R = unpickle_from_file(pfn) return R if stage <= 0: P = {} else: prereq = prereqs.get(stage, stage-1) P = runstage(prereq, picklepat, stagefunc, force=force, prereqs=prereqs, **kwargs) else: P = {} print 'Running stage', stage R = stagefunc(stage, **P) print 'Stage', stage, 'finished' print 'Saving pickle', pfn pickle_to_file(R, pfn) print 'Saved', pfn return R
bsd-3-clause
Python
6a9447b6fb92369496178b1a379c724dfa9eb7aa
add management command to bootstrap Twilio gateway fees for incoming messages
puttarajubr/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq
corehq/apps/smsbillables/management/commands/bootstrap_twilio_gateway_incoming.py
corehq/apps/smsbillables/management/commands/bootstrap_twilio_gateway_incoming.py
import logging from django.core.management.base import LabelCommand from corehq.apps.accounting.models import Currency from corehq.apps.twilio.models import TwilioBackend from corehq.apps.sms.models import INCOMING from corehq.apps.smsbillables.models import SmsGatewayFee, SmsGatewayFeeCriteria logger = logging.getLogger('accounting') def bootstrap_twilio_gateway_incoming(orm): currency_class = orm['accounting.Currency'] if orm else Currency sms_gateway_fee_class = orm['smsbillables.SmsGatewayFee'] if orm else SmsGatewayFee sms_gateway_fee_criteria_class = orm['smsbillables.SmsGatewayFeeCriteria'] if orm else SmsGatewayFeeCriteria # https://www.twilio.com/sms/pricing/us SmsGatewayFee.create_new( TwilioBackend.get_api_id(), INCOMING, 0.0075, country_code=None, currency=currency_class.objects.get(code="USD"), fee_class=sms_gateway_fee_class, criteria_class=sms_gateway_fee_criteria_class, ) logger.info("Updated INCOMING Twilio gateway fees.") class Command(LabelCommand): help = "bootstrap incoming Twilio gateway fees" args = "" label = "" def handle(self, *args, **options): bootstrap_twilio_gateway_incoming(None)
bsd-3-clause
Python
adee3f0763a1119cfac212ce0eca88a08f7c65fa
Create masterStock.py
supthunder/premeStock
masterStock.py
masterStock.py
import requests from bs4 import BeautifulSoup import json def loadMasterStock(): url = "http://www.supremenewyork.com/mobile_stock.json" user = {"User-Agent": "Mozilla/5.0 (iPhone; CPU iPhone OS 10_2_1 like Mac OS X) AppleWebKit/602.4.6 (KHTML, like Gecko) Version/10.0 Mobile/14D27 Safari/602.1"} # user = {"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36"} r = requests.get(url, headers=user) masterStock = json.loads(r.text) with open("masterstock.txt", 'w') as outfile: json.dump(masterStock, outfile, indent=4, sort_keys=True) print("Saved to masterstock.txt") if __name__ == '__main__': loadMasterStock()
mit
Python
8397cd87fc05949f2f1b8d24505ae2b817f5dda1
Add unittest for classification_summary
okuta/chainer,jnishi/chainer,keisuke-umezawa/chainer,niboshi/chainer,okuta/chainer,wkentaro/chainer,hvy/chainer,pfnet/chainer,chainer/chainer,ktnyt/chainer,ktnyt/chainer,hvy/chainer,okuta/chainer,keisuke-umezawa/chainer,jnishi/chainer,niboshi/chainer,tkerola/chainer,wkentaro/chainer,cupy/cupy,ysekky/chainer,chainer/chainer,wkentaro/chainer,ktnyt/chainer,kashif/chainer,okuta/chainer,delta2323/chainer,niboshi/chainer,hvy/chainer,rezoo/chainer,ronekko/chainer,chainer/chainer,chainer/chainer,jnishi/chainer,keisuke-umezawa/chainer,wkentaro/chainer,keisuke-umezawa/chainer,kiyukuta/chainer,cupy/cupy,aonotas/chainer,niboshi/chainer,anaruse/chainer,ktnyt/chainer,cupy/cupy,cupy/cupy,hvy/chainer,jnishi/chainer
tests/chainer_tests/functions_tests/evaluation_tests/test_classification_summary.py
tests/chainer_tests/functions_tests/evaluation_tests/test_classification_summary.py
import unittest import numpy import six import chainer from chainer import cuda from chainer import functions as F from chainer import gradient_check from chainer import testing from chainer.testing import attr from chainer.testing import condition from chainer.utils import type_check def recall(preds, ts, dtype, label_num): tp = numpy.zeros((label_num,), dtype=numpy.int32) support = numpy.zeros((label_num,), dtype=numpy.int32) for p, t in zip(preds.ravel(), ts.ravel()): support[t] += 1 if p == t: tp[t] += 1 return dtype(tp) / support def precision(preds, ts, dtype, label_num): tp = numpy.zeros((label_num,), dtype=numpy.int32) relevant = numpy.zeros((label_num,), dtype=numpy.int32) for p, t in zip(preds.ravel(), ts.ravel()): relevant[p] += 1 if p == t: tp[p] += 1 return dtype(tp) / relevant def f1_score(precision, recall, beta=1.0): beta_square = beta * beta return (1 + beta_square) * precision * recall / (beta_square * precision + recall) def support(ts, dtype, label_num): ret = numpy.zeros((label_num,), dtype=numpy.int32) for t in ts.ravel(): ret[t] += 1 return ret @testing.parameterize( *testing.product_dict( [{'y_shape': (30, 3), 't_shape': (30,)}, {'y_shape': (30, 3, 5), 't_shape': (30, 5)}], [{'dtype': numpy.float16}, {'dtype': numpy.float32}, {'dtype': numpy.float64}], [{'beta': 1.0}, {'beta': 2.0}] ) ) class TestClassificationSummary(unittest.TestCase): def setUp(self): self.label_num = 3 self.y = numpy.random.uniform(-1, 1, self.y_shape).astype(self.dtype) self.t = numpy.random.randint(0, self.label_num, self.t_shape).astype(numpy.int32) self.check_forward_options = {} if self.dtype == numpy.float16: self.check_forward_options = {'atol': 1e-4, 'rtol': 1e-3} def check_forward(self, xp): y = chainer.Variable(xp.asarray(self.y)) t = chainer.Variable(xp.asarray(self.t)) p_acutual, r_acutual, f1_actual, s_actual = F.classification_summary( y, t, self.label_num, self.beta) pred = self.y.argmax(axis=1).reshape(self.t.shape) p_expect = precision(pred, self.t, self.dtype, self.label_num) r_expect = recall(pred, self.t, self.dtype, self.label_num) f1_expect = f1_score(p_expect, r_expect, self.beta) s_expect = support(self.t, self.dtype, self.label_num) chainer.testing.assert_allclose(f1_actual.data, f1_expect, **self.check_forward_options) def test_forward_cpu(self): self.check_forward(numpy) @attr.gpu def test_forward_gpu(self): self.check_forward(cuda.cupy)
mit
Python
570fdc71697fba6180787b9309d3a2d49f512ed2
Add queueing python script
barbarahui/harvester,mredar/harvester,barbarahui/harvester,ucldc/harvester,mredar/harvester,ucldc/harvester
scripts/queue_sync_to_solr.py
scripts/queue_sync_to_solr.py
#! /bin/env python # -*- coding: utf-8 -*- import sys import os import logbook from harvester.config import config as config_harvest from redis import Redis from rq import Queue EMAIL_RETURN_ADDRESS = os.environ.get('EMAIL_RETURN_ADDRESS', 'example@example.com') # csv delim email addresses EMAIL_SYS_ADMIN = os.environ.get('EMAIL_SYS_ADMINS', None) IMAGE_HARVEST_TIMEOUT = 144000 def def_args(): import argparse parser = argparse.ArgumentParser(description='Harvest a collection') parser.add_argument('rq_queue', type=str, help='RQ Queue to put job in') parser.add_argument( 'collection_key', type=int, help='URL for the collection Django tastypie api resource') return parser def queue_image_harvest(redis_host, redis_port, redis_password, redis_timeout, rq_queue, collection_key, url_couchdb=None, object_auth=None, get_if_object=False, harvest_timeout=IMAGE_HARVEST_TIMEOUT): rQ = Queue( rq_queue, connection=Redis( host=redis_host, port=redis_port, password=redis_password, socket_connect_timeout=redis_timeout)) job = rQ.enqueue_call( func=harvester.scripts.sync_couch_collection_to_solr.main kwargs=dict( collection_key=collection_key, ) return job def main(user_email, collection_key, log_handler=None, mail_handler=None, config_file='akara.ini', rq_queue=None, **kwargs): '''Runs a UCLDC sync to solr for collection key''' emails = [user_email] if EMAIL_SYS_ADMIN: emails.extend([u for u in EMAIL_SYS_ADMIN.split(',')]) if not mail_handler: mail_handler = logbook.MailHandler( EMAIL_RETURN_ADDRESS, emails, level='ERROR', bubble=True) mail_handler.push_application() config = config_harvest(config_file=config_file) if not log_handler: log_handler = logbook.StderrHandler(level='DEBUG') log_handler.push_application() print config # the image_harvest should be a separate job, with a long timeout job = queue_image_harvest( config['redis_host'], config['redis_port'], config['redis_password'], config['redis_connect_timeout'], rq_queue=rq_queue, collection_key=collection_key, **kwargs) log_handler.pop_application() mail_handler.pop_application() if __name__ == '__main__': parser = def_args() args = parser.parse_args(sys.argv[1:]) if not args.user_email or not args.url_api_collection: parser.print_help() sys.exit(27) kwargs = {} if args.timeout: kwargs['harvest_timeout'] = int(args.timeout) if args.get_if_object: kwargs['get_if_object'] = args.get_if_object main( args.collection_key, rq_queue=args.rq_queue, **kwargs) # Copyright © 2016, Regents of the University of California # All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # - Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # - Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # - Neither the name of the University of California nor the names of its # contributors may be used to endorse or promote products derived from this # software without specific prior written permission. # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE.
bsd-3-clause
Python
d06b3b41f786fc7cd2c05a6215fed026eef1cb8f
Add misc plugin.
SunDwarf/curiosity
curiosity/plugins/misc.py
curiosity/plugins/misc.py
import curio import sys import curious from curious.commands import command from curious.commands.context import Context from curious.commands.plugin import Plugin from curious.dataclasses.embed import Embed class Misc(Plugin): """ Miscellaneous commands. """ @command() async def info(self, ctx: Context): """ Shows info about the bot. """ em = Embed(title=ctx.guild.me.user.name, description="The official bot for the curious library") em.add_field(name="Curious version", value=curious.__version__) em.add_field(name="Curio version", value=curio.__version__) em.add_field(name="CPython version", value="{}.{}.{}".format(*sys.version_info[0:3])) # bot stats em.add_field(name="Shard ID", value=ctx.event_context.shard_id) em.add_field(name="Shard count", value=ctx.event_context.shard_count) em.add_field(name="Heartbeats", value=ctx.bot._gateways[ctx.event_context.shard_id].heartbeats) await ctx.channel.send(embed=em)
mit
Python
3d7a1ad963a11c8fc425c7d82f5e0f8f877dc861
Add Python benchmark
stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib
lib/node_modules/@stdlib/math/base/special/atan2/benchmark/python/benchmark.py
lib/node_modules/@stdlib/math/base/special/atan2/benchmark/python/benchmark.py
#!/usr/bin/env python """Benchmark atan2.""" import timeit name = "atan2" repeats = 3 iterations = 1000000 def print_version(): """Print the TAP version.""" print("TAP version 13") def print_summary(total, passing): """Print the benchmark summary. # Arguments * `total`: total number of tests * `passing`: number of passing tests """ print("#") print("1.." + str(total)) # TAP plan print("# total " + str(total)) print("# pass " + str(passing)) print("#") print("# ok") def print_results(elapsed): """Print benchmark results. # Arguments * `elapsed`: elapsed time (in seconds) # Examples ``` python python> print_results(0.131009101868) ``` """ rate = iterations / elapsed print(" ---") print(" iterations: " + str(iterations)) print(" elapsed: " + str(elapsed)) print(" rate: " + str(rate)) print(" ...") def benchmark(): """Run the benchmark and print benchmark results.""" setup = "from math import atan2; from random import random;" stmt = "y = atan2(100.0*random()-0.0, 100.0*random()-0.0)" t = timeit.Timer(stmt, setup=setup) print_version() for i in xrange(3): print("# python::" + name) elapsed = t.timeit(number=iterations) print_results(elapsed) print("ok " + str(i+1) + " benchmark finished") print_summary(repeats, repeats) def main(): """Run the benchmark.""" benchmark() if __name__ == "__main__": main()
apache-2.0
Python
14160c8ee729a094b6a980ed7c94b37d11f6dfba
Create xor_recursive.py
rdustinb/GAPy
tests/xor_recursive.py
tests/xor_recursive.py
import sys def xor(*store): print("---------------recursive call----------------") print(len(store)) if(len(store) == 2): print("lowest level") b = store[0] a = store[1] print(b) print(a) return bool((a or b) and not(a and b)) else: print("middle level") b = store[0] remaining = store[1:] print(b) print(remaining) return bool((xor(*remaining) or b) and not(xor(*remaining) and b)) if __name__ == '__main__': print("This is a testfile only, not to be used in production.") sys.exit() print("Expecting False: %s"%xor(0, 0, 0, 0)) # False print("Expecting True : %s"%xor(0, 0, 0, 1)) # True print("Expecting True : %s"%xor(0, 0, 1, 0)) # True print("Expecting False: %s"%xor(0, 0, 1, 1)) # False print("Expecting True : %s"%xor(0, 1, 0, 0)) # True print("Expecting False: %s"%xor(0, 1, 0, 1)) # False print("Expecting False: %s"%xor(0, 1, 1, 0)) # False print("Expecting True : %s"%xor(0, 1, 1, 1)) # True print("Expecting True : %s"%xor(1, 0, 0, 0)) # True print("Expecting False: %s"%xor(1, 0, 0, 1)) # False print("Expecting False: %s"%xor(1, 0, 1, 0)) # False print("Expecting True : %s"%xor(1, 0, 1, 1)) # True print("Expecting False: %s"%xor(1, 1, 0, 0)) # False print("Expecting True : %s"%xor(1, 1, 0, 1)) # True print("Expecting True : %s"%xor(1, 1, 1, 0)) # True print("Expecting False: %s"%xor(1, 1, 1, 1)) # False
mit
Python
d003babe55d8b7a202a50bc6eeb2e1113ef8247f
Add oeis plugin
thomasleese/smartbot-old,tomleese/smartbot,Cyanogenoid/smartbot,Muzer/smartbot
plugins/oeis.py
plugins/oeis.py
import requests import re class Plugin: limit = 5 def on_command(self, bot, msg, stdin, stdout, reply): session = smartbot.utils.web.requests_session() url = "http://oeis.org/search" payload = { "fmt": "text", "q": " ".join(msg["args"][1:]), } response = session.get(url, params=payload) if response.status_code == 200: self.i = -1 # only process lines starting with a percent symbol for line in filter(lambda l: l.startswith("%"), response.text.split("\n")): # content default is set to None flag, identifier, content, *_ = line.split(" ", 2) + [None] # process the line self.process(flag, identifier, content, stdout) # stop when limit is reached if self.i >= self.limit: print("...", file=stdout) break def process(self, flag, identifier, content, stdout): # increase the sequence number if flag[1] == "I": self.i += 1 # print formatted sequence elif flag[1] == "S": sequence = re.sub(",", ", ", content) print("[{}] {}: {}...".format(self.i, identifier, sequence), file=stdout) # print sequence name elif flag[1] == "N": print(content, file=stdout) def on_help(self): return "Usage: oeis <query> (see https://oeis.org/hints.html)"
mit
Python
c122db5ceda59d786bd550f586ea87d808595ab6
Add a script to reimport the LGA boundaries from the GADM.org data
mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola
pombola/nigeria/management/commands/nigeria_update_lga_boundaries_from_gadm.py
pombola/nigeria/management/commands/nigeria_update_lga_boundaries_from_gadm.py
from django.contrib.gis.gdal import DataSource from django.core.management import BaseCommand from django.db import transaction from mapit.management.command_utils import save_polygons, fix_invalid_geos_geometry from mapit.models import Area, Type class Command(BaseCommand): help = "Update the Nigeria boundaries from GADM" args = '<SHP FILENAME>' def get_lga_area(self, lga_name, state_name): lga_name_in_db = { 'Eastern Obolo': 'Eastern O bolo', }.get(lga_name, lga_name) # print "state:", state_name kwargs = { 'type': self.lga_type, 'name__iexact': lga_name_in_db, 'parent_area__name': state_name, } try: area = Area.objects.get(**kwargs) except Area.DoesNotExist: del kwargs['parent_area__name'] area = Area.objects.get(**kwargs) return area def fix_geometry(self, g): # Make a GEOS geometry only to check for validity: geos_g = g.geos if not geos_g.valid: geos_g = fix_invalid_geos_geometry(geos_g) if geos_g is None: print "The geometry was invalid and couldn't be fixed" g = None else: g = geos_g.ogr return g def handle(self, filename, **options): with transaction.atomic(): self.lga_type = Type.objects.get(code='LGA') ds = DataSource(filename) layer = ds[0] for feature in layer: lga_name = unicode(feature['NAME_2']) state_name = unicode(feature['NAME_1']) print "Updating LGA {0} in state {1}".format( lga_name, state_name ) area = self.get_lga_area(lga_name, state_name) g = feature.geom.transform('4326', clone=True) g = self.fix_geometry(g) if g is None: continue poly = [g] save_polygons({area.id: (area, poly)})
agpl-3.0
Python
8c176349d064db3dbc4db505cdc8a2d6a162dd56
Create a consolidated broker initialization script
chambers-brian/SIG_Digital-Strategy_SI_ODP_Backend,chambers-brian/SIG_Digital-Strategy_SI_ODP_Backend,fedspendingtransparency/data-act-broker-backend,fedspendingtransparency/data-act-broker-backend
dataactcore/scripts/initialize.py
dataactcore/scripts/initialize.py
import argparse import logging import os from flask_bcrypt import Bcrypt from dataactvalidator.app import createApp from dataactbroker.scripts.setupEmails import setupEmails from dataactcore.models.userModel import User from dataactcore.interfaces.function_bag import createUserWithPassword from dataactcore.scripts.setupAllDB import setupAllDB from dataactbroker.handlers.aws.session import SessionTable from dataactcore.interfaces.db import GlobalDB from dataactcore.config import CONFIG_BROKER, CONFIG_DB from dataactvalidator.scripts.loadTas import loadTas from dataactvalidator.filestreaming.sqlLoader import SQLLoader from dataactvalidator.filestreaming.schemaLoader import SchemaLoader from dataactvalidator.scripts.loadFile import loadDomainValues from dataactvalidator.scripts.loadSf133 import loadAllSf133 logger = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) basePath = CONFIG_BROKER["path"] validator_config_path = os.path.join(basePath, "dataactvalidator", "config") def setupDB(): """Set up broker database and initialize data.""" logger.info('Setting up databases') print('setting up db') setupAllDB() setupEmails() def createAdmin(): """Create initial admin user.""" logger.info('Creating admin user') adminEmail = CONFIG_BROKER['admin_email'] adminPass = CONFIG_BROKER['admin_password'] with createApp().app_context(): sess = GlobalDB.db().session user = sess.query(User).filter(User.email == adminEmail).one_or_none() if not user: # once the rest of the setup scripts are updated to use # GlobalDB instead of databaseSession, move the app_context # creation up to initialize() user = createUserWithPassword( adminEmail, adminPass, Bcrypt(), permission=2) return user def setupSessionTable(): """Create Dynamo session table.""" logger.info('Setting up DynamoDB session table') SessionTable.createTable(CONFIG_BROKER['local'], CONFIG_DB['dynamo_port']) def loadTasLookup(): """Load/update the TAS table to reflect the latest list.""" logger.info('Loading TAS') loadTas() def loadSqlRules(): """Load the SQL-based validation rules.""" logger.info('Loading SQL-based validation rules') SQLLoader.loadSql("sqlRules.csv") def loadDomainValueFiles(basePath): """Load domain values (e.g., CGAC codes, object class, SF-133).""" logger.info('Loading domain values') loadDomainValues(basePath) def loadSf133(): logger.info('Loading SF-133') # Unlike other domain value files, SF 133 data is stored # on S3. If the application's 'use_aws' option is turned # off, tell the SF 133 load to look for files in the # validator's local config file instead if CONFIG_BROKER['use_aws']: loadAllSf133() else: loadAllSf133(validator_config_path) def loadValidatorSchema(): """Load file-level .csv schemas into the broker database.""" logger.info('Loading validator schemas') SchemaLoader.loadAllFromPath(validator_config_path) parser = argparse.ArgumentParser(description='Initialize the DATA Act Broker.') parser.add_argument('-db', '--setup_db', help='Create broker database and helper tables', action='store_true') parser.add_argument('-a', '--create_admin', help='Create an admin user', action='store_true') parser.add_argument('-r', '--load_rules', help='Load SQL-based validation rules', action='store_true') parser.add_argument('-t', '--update_tas', help='Update broker TAS list', action='store_true') parser.add_argument('-s', '--update_sf133', help='Update broker SF-133 reports', action='store_true') parser.add_argument('-v', '--update_validator', help='Update validator schema', action='store_true') args = parser.parse_args() print(args) if args.setup_db: logger.info('Setting up databases') setupAllDB() setupEmails() setupSessionTable() if args.create_admin: createAdmin() if args.load_rules: loadSqlRules() if args.update_tas: loadTas() if args.update_sf133: loadSf133() if args.update_validator: loadValidatorSchema()
cc0-1.0
Python
e60f13ab304c04e17af91bc87edc1891948a6f7a
Add validation function for python schema events
Parsely/parsely_raw_data,Parsely/parsely_raw_data
parsely_raw_data/validate.py
parsely_raw_data/validate.py
from __future__ import print_function import logging import pprint from collections import defaultdict from six import string_types from .schema import SCHEMA """ Data Pipeline validation functions """ SCHEMA_DICT = None REQ_FIELDS = None CHECKS = {'req': 'Fields "{}" are required. ({} are present)', 'size': 'Field "{}" is too large (size limit {})', 'type': 'Field "{}" should be {}', 'not_in_schema': 'Field "{}" not in schema. {}'} log = logging.getLogger(__name__) def _create_schema_dict(): global SCHEMA_DICT, REQ_FIELDS SCHEMA_DICT = defaultdict(dict) for field in SCHEMA: conditions = {k: field.get(k) for k, _ in CHECKS.items()} if conditions['type'] == object: conditions['type'] = dict if conditions['type'] == str: conditions['type'] = string_types SCHEMA_DICT[field['key']] = conditions REQ_FIELDS = set([k for k, v in SCHEMA_DICT.items() if v['req']]) _create_schema_dict() def _handle_warning(check_type, field, value, cond, raise_error=True): """If raise, raise an error. Otherwise just log.""" msg = CHECKS[check_type].format(field, cond) if raise_error: raise ValueError(msg, value, type(value)) else: log.warn(msg, value, type(value)) return False def validate(event, raise_error=True): """Checks whether an event matches the given schema. :param raise_error: let errors/exceptions bubble up. """ present = REQ_FIELDS.intersection(set(event.keys())) if len(present) != len(REQ_FIELDS): return _handle_warning('req', list(REQ_FIELDS), '', list(present), raise_error=raise_error) for field, value in event.items(): try: field_reqs = SCHEMA_DICT[field] check_type = field_reqs['type'] check_size = field_reqs['size'] # verify type based on schema if value is not None and not isinstance(value, check_type): return _handle_warning('type', field, value, check_type, raise_error=raise_error) # verify size of string values if isinstance(value, string_types) and check_size is not None and len(value) > check_size: return _handle_warning('size', field, value, check_size, raise_error=raise_error) except KeyError as exc: return _handle_warning('not_in_schema', field, value, '', raise_error=raise_error) return True # event passes tests if __name__ == "__main__": log.warn = print # non schema fields d = {k: "test" for k in REQ_FIELDS} d['test'] = "test" assert validate(d, raise_error=False) != True # fields too long d = {k: "test" for k in REQ_FIELDS} d['utm_term'] = 'd' * 90 assert validate(d, raise_error=False) != True # fields wrong type d = {k: "test" for k in REQ_FIELDS} d['timestamp_info_nginx_ms'] = 123456 d['extra_data'] = "not a dict" assert validate(d, raise_error=False) != True d['visitor'] = "true" assert validate(d, raise_error=False) != True d['ip_lat'] = 4 assert validate(d, raise_error=False) != True # not all required fields d = {} assert validate(d, raise_error=False) != True # error catching d = {} err = False try: validate(d) except Exception as e: err = True assert err == True
apache-2.0
Python
271be0bf16692aae2736d40e96447262e75c4a0f
add missing web.py
Mustard-Systems-Ltd/pyzmq,dash-dash/pyzmq,swn1/pyzmq,yyt030/pyzmq,caidongyun/pyzmq,caidongyun/pyzmq,Mustard-Systems-Ltd/pyzmq,ArvinPan/pyzmq,caidongyun/pyzmq,swn1/pyzmq,ArvinPan/pyzmq,yyt030/pyzmq,yyt030/pyzmq,dash-dash/pyzmq,swn1/pyzmq,ArvinPan/pyzmq,Mustard-Systems-Ltd/pyzmq,dash-dash/pyzmq
zmq/web.py
zmq/web.py
#----------------------------------------------------------------------------- # Copyright (C) 2013 Brian Granger, Min Ragan-Kelley # # This file is part of pyzmq # # Distributed under the terms of the New BSD License. The full license is in # the file COPYING.BSD, distributed as part of this software. #----------------------------------------------------------------------------- raise ImportError('\n'.join([ "zmq.web is now maintained separately as zmqweb,", "which can be found at https://github.com/ellisonbg/zmqweb" ]))
bsd-3-clause
Python
0a7f1695f9155bbe10b933e47637e4df0e2e31d4
Create HttpAndWeb.py
jimkiiru/james-kiiru-bc17-week1,jimkiiru/james-kiiru-bc17-week1
day3/HttpAndWeb.py
day3/HttpAndWeb.py
import requests def Get(url, PostId): try: isinstance(int(PostId), int) if int(PostId) <= 100 and int(PostId) > 0: r = requests.get(url + PostId) return r else: print("Number must be between 1 and 100") except ValueError as err: raise(err) return "No Results" def Post(PostUrl,title, body, userId=11): Post= { 'title': title, 'body': body, 'userId': userId } request = requests.post(PostUrl, data=Postdata) return request def main(): print("Python HTTP API command line app %s\n" %("-"*31)) print("Simple Python HTTP API command line app") url = "https://jsonplaceholder.typicode.com/posts/" PostId = input("Enter a number between 1 and 100: ") get = Get(url,PostId) print("GET Response data\n\t%s\n%s\n\tStatus code\n\t%s\n%s\n\tHeaders\n\t%s\n%s" % ("-"*17,get.text, "-"*11, get.status_code,"-"*7, get.headers)) title = input("Enter a title for your post: ") body = input("Enter a body for your post: ") post = Post(url,title,body) print("\tPOST Response data\n\t%s\n%s\n\tStatus code\n\t%s\n%s\n\tHeaders\n\t%s\n%s" % ("-"*17,post.text, "-"*11, post.status_code,"-"*7, post.headers)) if __name__ == '__main__': main()
mit
Python
9fa6ec498d70afdb4f28410d4ac7c29780c60861
Add first stab at origen submodule
ergs/transmutagen,ergs/transmutagen
transmutagen/origen.py
transmutagen/origen.py
from subprocess import run from pyne.origen22 import (nlibs, write_tape5_irradiation, write_tape4, parse_tape9, merge_tape9, write_tape9) from pyne.material import from_atom_frac if __name__ == '__main__': ORIGEN = '/home/origen22/code/o2_therm_linux.exe' xs_TAPE9 = "/Users/aaronmeurer/Documents/origen/C371ALLCP.03/CCC371.03/origen22/libs/pwru50.lib" decay_TAPE9 = "/Users/aaronmeurer/Documents/origen/C371ALLCP.03/CCC371.03/origen22/libs/decay.lib" parsed_xs_tape9 = parse_tape9(xs_TAPE9) parsed_decay_tape9 = parse_tape9(decay_TAPE9) merged_tape9 = merge_tape9([parsed_decay_tape9, parsed_xs_tape9]) # Can set outfile to change directory, but the file name needs to be # TAPE9.INP. write_tape9(merged_tape9) xsfpy_nlb = nlibs(parsed_xs_tape9) time = 2.6e6 # Can set outfile, but the file name should be called TAPE5.INP. write_tape5_irradiation("IRF", time/(60*60*24), 4e14, xsfpy_nlb=xsfpy_nlb, cut_off=0) M = from_atom_frac({"": 1}, mass=1, atoms_per_molecule=1) write_tape4(M) run(ORIGEN) data = parse_tape6() print(data)
bsd-3-clause
Python