commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
027632cbac1cfa42e2f66b53ae6c3f22bcce630e
|
Fix cdd package definition, dotted is a property (#2054)
|
tmerrick1/spack,iulian787/spack,EmreAtes/spack,matthiasdiener/spack,LLNL/spack,lgarren/spack,krafczyk/spack,EmreAtes/spack,skosukhin/spack,mfherbst/spack,mfherbst/spack,lgarren/spack,lgarren/spack,tmerrick1/spack,TheTimmy/spack,EmreAtes/spack,krafczyk/spack,krafczyk/spack,TheTimmy/spack,mfherbst/spack,TheTimmy/spack,mfherbst/spack,skosukhin/spack,skosukhin/spack,lgarren/spack,mfherbst/spack,LLNL/spack,skosukhin/spack,LLNL/spack,tmerrick1/spack,krafczyk/spack,EmreAtes/spack,EmreAtes/spack,iulian787/spack,iulian787/spack,matthiasdiener/spack,LLNL/spack,iulian787/spack,skosukhin/spack,krafczyk/spack,matthiasdiener/spack,iulian787/spack,TheTimmy/spack,tmerrick1/spack,TheTimmy/spack,tmerrick1/spack,matthiasdiener/spack,matthiasdiener/spack,LLNL/spack,lgarren/spack
|
var/spack/repos/builtin/packages/cdd/package.py
|
var/spack/repos/builtin/packages/cdd/package.py
|
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Cdd(Package):
"""The program cdd+ (cdd, respectively) is a C++ (ANSI C)
implementation of the Double Description Method [MRTT53] for
generating all vertices (i.e. extreme points) and extreme rays of
a general convex polyhedron given by a system of linear
inequalities"""
homepage = "https://www.inf.ethz.ch/personal/fukudak/cdd_home/cdd.html"
url = "ftp://ftp.ifor.math.ethz.ch/pub/fukuda/cdd/cdd-061a.tar.gz"
def url_for_version(self, version):
return ("ftp://ftp.ifor.math.ethz.ch/pub/fukuda/cdd/cdd-%s.tar.gz" %
str(version.dotted).replace('.', ''))
version('0.61a', '22c24a7a9349dd7ec0e24531925a02d9')
depends_on("libtool", type="build")
patch("Makefile.spack.patch")
def install(self, spec, prefix):
# The Makefile isn't portable; use our own instead
makeargs = ["-f", "Makefile.spack", "PREFIX=%s" % prefix]
make(*makeargs)
make("install", *makeargs)
|
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Cdd(Package):
"""The program cdd+ (cdd, respectively) is a C++ (ANSI C)
implementation of the Double Description Method [MRTT53] for
generating all vertices (i.e. extreme points) and extreme rays of
a general convex polyhedron given by a system of linear
inequalities"""
homepage = "https://www.inf.ethz.ch/personal/fukudak/cdd_home/cdd.html"
url = "ftp://ftp.ifor.math.ethz.ch/pub/fukuda/cdd/cdd-061a.tar.gz"
def url_for_version(self, version):
return ("ftp://ftp.ifor.math.ethz.ch/pub/fukuda/cdd/cdd-%s.tar.gz" %
str(version.dotted()).replace('.', ''))
version('0.61a', '22c24a7a9349dd7ec0e24531925a02d9')
depends_on("libtool", type="build")
patch("Makefile.spack.patch")
def install(self, spec, prefix):
# The Makefile isn't portable; use our own instead
makeargs = ["-f", "Makefile.spack", "PREFIX=%s" % prefix]
make(*makeargs)
make("install", *makeargs)
|
lgpl-2.1
|
Python
|
59d7854b8988c6525df8eb1ef7f2cde58c63eb0a
|
add build_directory, remove old hack (#10112)
|
LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack
|
var/spack/repos/builtin/packages/clp/package.py
|
var/spack/repos/builtin/packages/clp/package.py
|
# Copyright 2013-2018 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Clp(AutotoolsPackage):
"""Clp (Coin-or linear programming) is an open-source
linear programming solver written in C++."""
homepage = "https://projects.coin-or.org/Clp"
url = "https://www.coin-or.org/download/source/Clp/Clp-1.16.11.tgz"
version('1.16.11', sha256='b525451423a9a09a043e6a13d9436e13e3ee7a7049f558ad41a110742fa65f39')
build_directory = 'spack-build'
|
# Copyright 2013-2018 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Clp(AutotoolsPackage):
"""Clp (Coin-or linear programming) is an open-source
linear programming solver written in C++."""
homepage = "https://projects.coin-or.org/Clp"
url = "https://www.coin-or.org/download/source/Clp/Clp-1.16.11.tgz"
version('1.16.11', sha256='b525451423a9a09a043e6a13d9436e13e3ee7a7049f558ad41a110742fa65f39')
def configure_args(self):
return [
'--with-clp-datadir={0}/Data'.format(self.build_directory),
]
|
lgpl-2.1
|
Python
|
090ca0e14621d287e9f5d1d301589d99d95cd224
|
Remove unused function
|
frostidaho/dynmen
|
src/dynmen/cmd/__init__.py
|
src/dynmen/cmd/__init__.py
|
# -*- coding: utf-8 -*-
from collections import namedtuple as _namedtuple
ProcStatus = _namedtuple('ProcStatus', 'stdout stderr returncode')
|
# -*- coding: utf-8 -*-
from collections import namedtuple as _namedtuple
ProcStatus = _namedtuple('ProcStatus', 'stdout stderr returncode')
def _to_bytes(obj, entry_sep=b''):
if isinstance(obj, bytes):
return obj
return entry_sep.join(obj)
|
mit
|
Python
|
5fbb4ff5d3427c8f4050fc5b75d4a6a2c15351c6
|
Set pygments style to monokai.
|
donnemartin/outdated-donnemartin.github.io,donnemartin/outdated-donnemartin.github.io,donnemartin/outdated-donnemartin.github.io,donnemartin/outdated-donnemartin.github.io,donnemartin/outdated-donnemartin.github.io
|
pelicanconf.py
|
pelicanconf.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = 'Donne Martin'
SITENAME = 'Donne Martin'
SITEURL = ''
PATH = 'content'
TIMEZONE = 'America/New_York'
DEFAULT_LANG = 'en'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
HIDE_SIDEBAR = True
PYGMENTS_STYLE = 'monokai'
# Blogroll
LINKS = (('Pelican', 'http://getpelican.com/'),
('Python.org', 'http://python.org/'),
('Jinja2', 'http://jinja.pocoo.org/'),
('You can modify those links in your config file', '#'),)
# Social widget
SOCIAL = (('You can add links in your config file', '#'),
('Another social link', '#'),)
STATIC_PATHS = [
'images',
'extra/favicon.ico'
]
EXTRA_PATH_METADATA = {
'extra/favicon.ico': {'path': 'favicon.ico'}
}
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
RELATIVE_URLS = True
# THEME = 'pelican-bootstrap3'
# BOOTSTRAP_THEME = 'readable'
THEME = 'startbootstrap-agency'
BOOTSTRAP_THEME = ''
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = 'Donne Martin'
SITENAME = 'Donne Martin'
SITEURL = ''
PATH = 'content'
TIMEZONE = 'America/New_York'
DEFAULT_LANG = 'en'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
HIDE_SIDEBAR = True
# Blogroll
LINKS = (('Pelican', 'http://getpelican.com/'),
('Python.org', 'http://python.org/'),
('Jinja2', 'http://jinja.pocoo.org/'),
('You can modify those links in your config file', '#'),)
# Social widget
SOCIAL = (('You can add links in your config file', '#'),
('Another social link', '#'),)
STATIC_PATHS = [
'images',
'extra/favicon.ico'
]
EXTRA_PATH_METADATA = {
'extra/favicon.ico': {'path': 'favicon.ico'}
}
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
RELATIVE_URLS = True
# THEME = 'pelican-bootstrap3'
# BOOTSTRAP_THEME = 'readable'
THEME = 'startbootstrap-agency'
BOOTSTRAP_THEME = ''
|
mit
|
Python
|
cde48aff408c0d2c413e4316a1ef5c808d2f444c
|
add DISPLAY_NAVBAR to configuration
|
fly/burrito.sh,fly/burrito.sh,bsdlp/burrito.sh,bsdlp/burrito.sh
|
pelicanconf.py
|
pelicanconf.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = u'jchen'
SITENAME = u'BURRITO 4 LYFE'
SITEURL = ''
TIMEZONE = 'ETC/UTC'
DEFAULT_LANG = u'en'
CSS_FILE = 'style.css'
# theme stuff
THEME = '/Users/jchen/git/pelican/burrito'
# plugins
PLUGIN_PATH = '/Users/jchen/git/pelican/plugins-pelican'
PLUGINS = ['gravatar']
# gravatar email
AUTHOR_EMAIL = 'fly@sjchen.net'
# social
TWITTER_USERNAME = 's_jchen'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
DEFAULT_PAGINATION = 10
DISPLAY_CATEGORIES_ON_MENU = False
DISPLAY_MENUITEMS_ON_MENU = False
DISPLAY_NAVBAR = False
DISPLAY_PAGES_ON_MENU = False
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = u'jchen'
SITENAME = u'BURRITO 4 LYFE'
SITEURL = ''
TIMEZONE = 'ETC/UTC'
DEFAULT_LANG = u'en'
CSS_FILE = 'style.css'
# theme stuff
THEME = '/Users/jchen/git/pelican/burrito'
# plugins
PLUGIN_PATH = '/Users/jchen/git/pelican/plugins-pelican'
PLUGINS = ['gravatar']
# gravatar email
AUTHOR_EMAIL = 'fly@sjchen.net'
# social
TWITTER_USERNAME = 's_jchen'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
DEFAULT_PAGINATION = 10
DISPLAY_CATEGORIES_ON_MENU = False
DISPLAY_MENUITEMS_ON_MENU = False
DISPLAY_PAGES_ON_MENU = False
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
|
bsd-3-clause
|
Python
|
a6ea16ded53bb688113012114a25b7d75e6460c0
|
Fix PVM dependencies (#20951)
|
LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack
|
var/spack/repos/builtin/packages/pvm/package.py
|
var/spack/repos/builtin/packages/pvm/package.py
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import subprocess
class Pvm(MakefilePackage):
"""PVM (Parallel Virtual Machine) is a software package that permits a
heterogeneous collection of Unix and/or Windows computers hooked together
by a network to be used as a single large parallel computer."""
homepage = "http://www.csm.ornl.gov/pvm/pvm_home.html"
url = "http://www.netlib.org/pvm3/pvm3.4.6.tgz"
version('3.4.6', sha256='482665e9bc975d826bcdacf1df1d42e43deda9585a2c430fd3b7b7ed08eada44')
depends_on('m4', type='build')
depends_on('libtirpc', type='link')
parallel = False
@property
def pvm_arch(self):
"""Returns the appropriate PVM_ARCH."""
process = subprocess.Popen(['lib/pvmgetarch'], stdout=subprocess.PIPE)
return process.communicate()[0].strip().decode()
def edit(self, spec, prefix):
# Before building PVM, you must set the environment
# variable "PVM_ROOT" to the path where PVM resides
env['PVM_ROOT'] = self.stage.source_path
def setup_build_environment(self, env):
tirpc = self.spec['libtirpc'].prefix
env.prepend_path(
'SPACK_INCLUDE_DIRS',
tirpc.include.tirpc,
)
env.set('SPACK_LDLIBS', '-ltirpc')
def install(self, spec, prefix):
pvm_arch = self.pvm_arch
install_tree(join_path('bin', pvm_arch), prefix.bin)
install_tree('include', prefix.include)
install_tree(join_path('lib', pvm_arch), prefix.lib)
install_tree('man', prefix.man)
def setup_run_environment(self, env):
# Before running PVM, you must set the environment
# variable "PVM_ROOT" to the path where PVM resides
env.set('PVM_ROOT', self.prefix)
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import subprocess
class Pvm(MakefilePackage):
"""PVM (Parallel Virtual Machine) is a software package that permits a
heterogeneous collection of Unix and/or Windows computers hooked together
by a network to be used as a single large parallel computer."""
homepage = "http://www.csm.ornl.gov/pvm/pvm_home.html"
url = "http://www.netlib.org/pvm3/pvm3.4.6.tgz"
version('3.4.6', sha256='482665e9bc975d826bcdacf1df1d42e43deda9585a2c430fd3b7b7ed08eada44')
parallel = False
@property
def pvm_arch(self):
"""Returns the appropriate PVM_ARCH."""
process = subprocess.Popen(['lib/pvmgetarch'], stdout=subprocess.PIPE)
return process.communicate()[0].strip()
def edit(self, spec, prefix):
# Before building PVM, you must set the environment
# variable "PVM_ROOT" to the path where PVM resides
env['PVM_ROOT'] = self.stage.source_path
def install(self, spec, prefix):
pvm_arch = self.pvm_arch
install_tree(join_path('bin', pvm_arch), prefix.bin)
install_tree('include', prefix.include)
install_tree(join_path('lib', pvm_arch), prefix.lib)
install_tree('man', prefix.man)
def setup_run_environment(self, env):
# Before running PVM, you must set the environment
# variable "PVM_ROOT" to the path where PVM resides
env.set('PVM_ROOT', self.prefix)
|
lgpl-2.1
|
Python
|
0ee55bd73369c69638a401f4beef551a79bc820c
|
remove unneeded conf options
|
fly/burrito.sh,bsdlp/burrito.sh,bsdlp/burrito.sh,fly/burrito.sh
|
pelicanconf.py
|
pelicanconf.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = u'jchen'
SITENAME = u'BURRITO 4 LYFE'
SITEURL = ''
TIMEZONE = 'ETC/UTC'
DEFAULT_LANG = u'en'
CSS_FILE = 'style.css'
# theme stuff
THEME = '/Users/jchen/git/pelican/burrito'
# plugins
PLUGIN_PATH = '/Users/jchen/git/pelican/plugins-pelican'
PLUGINS = ['gravatar']
# gravatar email
AUTHOR_EMAIL = 'fly@sjchen.net'
# social
TWITTER_USERNAME = 's_jchen'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = u'jchen'
SITENAME = u'BURRITO 4 LYFE'
SITEURL = ''
TIMEZONE = 'ETC/UTC'
DEFAULT_LANG = u'en'
CSS_FILE = 'style.css'
# theme stuff
THEME = '/Users/jchen/git/pelican/burrito'
# plugins
PLUGIN_PATH = '/Users/jchen/git/pelican/plugins-pelican'
PLUGINS = ['gravatar']
# gravatar email
AUTHOR_EMAIL = 'fly@sjchen.net'
# social
TWITTER_USERNAME = 's_jchen'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
# Blogroll
LINKS = (('Pelican', 'http://getpelican.com/'),
('Python.org', 'http://python.org/'),
('Jinja2', 'http://jinja.pocoo.org/'),
('You can modify those links in your config file', '#'),)
# Social widget
SOCIAL = (('You can add links in your config file', '#'),
('Another social link', '#'),)
# PLUGINS = ['pelican_youtube',]
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
|
bsd-3-clause
|
Python
|
41dea616b22379fa2b7c1d00e21fe0288dc61622
|
Fix whitespace incoherence, causing pylint errors.
|
riannucci/rietveldv2,riannucci/rietveldv2
|
codereview/middleware.py
|
codereview/middleware.py
|
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Custom middleware. Some of this may be generally useful."""
import logging
from google.appengine.api import users
from google.appengine.runtime import apiproxy_errors
from google.appengine.runtime import DeadlineExceededError
from django.http import HttpResponse
from django.template import Context, loader
import models
class AddUserToRequestMiddleware(object):
"""Add a user object and a user_is_admin flag to each request."""
def process_request(self, request):
request.user = users.get_current_user()
request.user_is_admin = users.is_current_user_admin()
# Update the cached value of the current user's Account
account = None
if request.user is not None:
account = models.Account.get_account_for_user(request.user)
models.Account.current_user_account = account
class PropagateExceptionMiddleware(object):
"""Catch exceptions, log them and return a friendly error message."""
def _text_requested(self, request):
"""Returns True if a text/plain response is requested."""
# We could use a better heuristics that takes multiple
# media_ranges and quality factors into account. For now we return
# True iff 'text/plain' is the only media range the request
# accepts.
media_ranges = request.META.get('HTTP_ACCEPT', '').split(',')
return len(media_ranges) == 1 and media_ranges[0] == 'text/plain'
def process_exception(self, request, exception):
if isinstance(exception, apiproxy_errors.CapabilityDisabledError):
msg = ('Rietveld: App Engine is undergoing maintenance. '
'Please try again in a while.')
status = 503
elif isinstance(exception, (DeadlineExceededError, MemoryError)):
msg = ('Rietveld is too hungry at the moment.'
'Please try again in a while.')
status = 503
else:
msg = 'Unhandled exception.'
status = 500
logging.exception('%s: ' % exception.__class__.__name__)
technical = '%s [%s]' % (exception, exception.__class__.__name__)
if self._text_requested(request):
content = '%s\n\n%s\n' % (msg, technical)
content_type = 'text/plain'
else:
tpl = loader.get_template('exception.html')
ctx = Context({'msg': msg, 'technical': technical})
content = tpl.render(ctx)
content_type = 'text/html'
return HttpResponse(content, status=status, content_type=content_type)
|
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Custom middleware. Some of this may be generally useful."""
import logging
from google.appengine.api import users
from google.appengine.runtime import apiproxy_errors
from google.appengine.runtime import DeadlineExceededError
from django.http import HttpResponse
from django.template import Context, loader
import models
class AddUserToRequestMiddleware(object):
"""Add a user object and a user_is_admin flag to each request."""
def process_request(self, request):
request.user = users.get_current_user()
request.user_is_admin = users.is_current_user_admin()
# Update the cached value of the current user's Account
account = None
if request.user is not None:
account = models.Account.get_account_for_user(request.user)
models.Account.current_user_account = account
class PropagateExceptionMiddleware(object):
"""Catch exceptions, log them and return a friendly error message."""
def _text_requested(self, request):
"""Returns True if a text/plain response is requested."""
# We could use a better heuristics that takes multiple
# media_ranges and quality factors into account. For now we return
# True iff 'text/plain' is the only media range the request
# accepts.
media_ranges = request.META.get('HTTP_ACCEPT', '').split(',')
return len(media_ranges) == 1 and media_ranges[0] == 'text/plain'
def process_exception(self, request, exception):
if isinstance(exception, apiproxy_errors.CapabilityDisabledError):
msg = ('Rietveld: App Engine is undergoing maintenance. '
'Please try again in a while.')
status = 503
elif isinstance(exception, (DeadlineExceededError, MemoryError)):
msg = ('Rietveld is too hungry at the moment.'
'Please try again in a while.')
status = 503
else:
msg = 'Unhandled exception.'
status = 500
logging.exception('%s: ' % exception.__class__.__name__)
technical = '%s [%s]' % (exception, exception.__class__.__name__)
if self._text_requested(request):
content = '%s\n\n%s\n' % (msg, technical)
content_type = 'text/plain'
else:
tpl = loader.get_template('exception.html')
ctx = Context({'msg': msg, 'technical': technical})
content = tpl.render(ctx)
content_type = 'text/html'
return HttpResponse(content, status=status, content_type=content_type)
|
apache-2.0
|
Python
|
5dc4641a40ff25b439541f6c3c02639a53346985
|
Fix name of 'Betty' comic
|
klette/comics,jodal/comics,jodal/comics,datagutten/comics,datagutten/comics,klette/comics,datagutten/comics,datagutten/comics,klette/comics,jodal/comics,jodal/comics
|
comics/crawlers/betty.py
|
comics/crawlers/betty.py
|
from comics.crawler.base import BaseComicsComComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Betty'
language = 'en'
url = 'http://comics.com/betty/'
start_date = '1991-01-01'
history_capable_date = '2008-10-13'
schedule = 'Mo,Tu,We,Th,Fr,Sa,Su'
time_zone = -5
rights = 'Delainey & Gerry Rasmussen'
class ComicCrawler(BaseComicsComComicCrawler):
def _get_url(self):
self._get_url_helper('Betty')
|
from comics.crawler.base import BaseComicsComComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Get Fuzzy'
language = 'en'
url = 'http://comics.com/betty/'
start_date = '1991-01-01'
history_capable_date = '2008-10-13'
schedule = 'Mo,Tu,We,Th,Fr,Sa,Su'
time_zone = -5
rights = 'Delainey & Gerry Rasmussen'
class ComicCrawler(BaseComicsComComicCrawler):
def _get_url(self):
self._get_url_helper('Betty')
|
agpl-3.0
|
Python
|
4180c1aa0314ee86356f99a7f209459189fb4a5d
|
Correct dicts comparison
|
pombredanne/django-jet,geex-arts/django-jet,geex-arts/django-jet,geex-arts/django-jet,pombredanne/django-jet,rcotrina94/django-jet,rcotrina94/django-jet,SalahAdDin/django-jet,rcotrina94/django-jet,pombredanne/django-jet,SalahAdDin/django-jet,SalahAdDin/django-jet
|
jet/tests/test_utils.py
|
jet/tests/test_utils.py
|
import json
from django.test import TestCase
from jet.tests.models import TestModel
from jet.utils import JsonResponse, get_model_instance_label
class UtilsTestCase(TestCase):
def test_json_response(self):
response = JsonResponse({'str': 'string', 'int': 1})
response_dict = json.loads(response.content)
expected_dict = {"int": 1, "str": "string"}
self.assertEqual(response_dict, expected_dict)
self.assertEqual(response.get('Content-Type'), 'application/json')
def test_get_model_instance_label(self):
field1 = 'value'
field2 = 2
pinned_application = TestModel.objects.create(field1=field1, field2=field2)
self.assertEqual(get_model_instance_label(pinned_application), '%s%d' % (field1, field2))
|
from django.test import TestCase
from jet.tests.models import TestModel
from jet.utils import JsonResponse, get_model_instance_label
class UtilsTestCase(TestCase):
def test_json_response(self):
response = JsonResponse({'str': 'string', 'int': 1})
self.assertEqual(response.content, '{"int": 1, "str": "string"}')
self.assertEqual(response.get('Content-Type'), 'application/json')
def test_get_model_instance_label(self):
field1 = 'value'
field2 = 2
pinned_application = TestModel.objects.create(field1=field1, field2=field2)
self.assertEqual(get_model_instance_label(pinned_application), '%s%d' % (field1, field2))
|
agpl-3.0
|
Python
|
73866e1ea0066fe45fbbf8a654b438b554d18a26
|
update hedwig version
|
ofpiyush/hedwig-py
|
hedwig/__init__.py
|
hedwig/__init__.py
|
"""
(@,@)
_ _
/\ /\___ __| |_ _(_) __ _
/ /_/ / _ \/ _` \ \ /\ / / |/ _` |
/ __ / __/ (_| |\ V V /| | (_| |
\/ /_/ \___|\__,_| \_/\_/ |_|\__, |
|___/
"""
__title__ = 'Hedwig Python'
__version__ = '0.1.6'
__author__ = 'Piyush'
# Version synonym
VERSION = __version__
import logging
try:
# not available in python 2.6
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Add NullHandler to prevent logging warnings
logging.getLogger(__name__).addHandler(NullHandler())
|
"""
(@,@)
_ _
/\ /\___ __| |_ _(_) __ _
/ /_/ / _ \/ _` \ \ /\ / / |/ _` |
/ __ / __/ (_| |\ V V /| | (_| |
\/ /_/ \___|\__,_| \_/\_/ |_|\__, |
|___/
"""
__title__ = 'Hedwig Python'
__version__ = '0.1.5'
__author__ = 'Piyush'
# Version synonym
VERSION = __version__
import logging
try:
# not available in python 2.6
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Add NullHandler to prevent logging warnings
logging.getLogger(__name__).addHandler(NullHandler())
|
mit
|
Python
|
d8dda85c786c38ad2fbb2e40d44567b917431cb8
|
Update zero_one_normalization.py
|
greenelab/adage,greenelab/adage,greenelab/adage
|
Data_collection_processing/zero_one_normalization.py
|
Data_collection_processing/zero_one_normalization.py
|
'''
Linearly scale the expression range of one gene to be between 0 and 1.
If a reference dataset is provided, then the scaling of one gene in the
target dataset in done using the minimun and range of that gene in the
reference dataset.
'''
import sys
import argparse
sys.path.insert(0, 'Data_collection_processing/')
from pcl import PCLfile
parser = argparse.ArgumentParser(description="Linearly scale the expression range\
of one gene to be between 0 and 1. If a reference dataset is provided, then \
the scaling of one gene in the target dataset in done using the minimum and \
range of that gene in the reference dataset.")
parser.add_argument('tar', help='the target file for zero one normalization')
parser.add_argument('out', help='the output file after zero one normalization')
parser.add_argument('ref', help='the reference file. If reference file\
is \'None\', then zero one normalization will be done based on\
target file itself.')
args = parser.parse_args()
def zero_one_normal(tar=None, out=None, ref=None):
'''
tar: the target file for zero one normalization
out: the output file after zero one normalization
ref: the reference file. If reference file is 'None',
then zero one normalization will be done based on
target file itself.
'''
if ref == 'None':
tar_data = PCLfile(tar, skip_col=0)
tar_data.zero_one_normalization()
tar_data.write_pcl(out)
else:
ref_data = PCLfile(ref, skip_col=0)
tar_data = PCLfile(tar, skip_col=0)
for i in xrange(ref_data.data_matrix.shape[0]):
row_minimum = ref_data.data_matrix[i, :].min()
row_maximum = ref_data.data_matrix[i, :].max()
row_range = row_maximum - row_minimum
tar_data.data_matrix[i, :] =\
(tar_data.data_matrix[i, :] - row_minimum)/row_range
# bound the values to be between 0 and 1
tar_data.data_matrix[i, :] =\
[0 if x < 0 else x for x in tar_data.data_matrix[i, :]]
tar_data.data_matrix[i, :] =\
[1 if x > 1 else x for x in tar_data.data_matrix[i, :]]
tar_data.write_pcl(out)
zero_one_normal(tar=args.tar, out=args.out, ref=args.ref)
|
'''
Linearly scale the expression range of one gene to be between 0 and 1.
If a reference dataset is provided, then the scaling of one gene in the
target dataset in done using the minimun and range of that gene in the
reference dataset.
'''
import sys
import argparse
sys.path.insert(0, 'Data_collection_processing/')
from pcl import PCLfile
parser = argparse.ArgumentParser(description="Linearly scale the expression range\
of one gene to be between 0 and 1. If a reference dataset is provided, then \
the scaling of one gene in the target dataset in done using the minimum and \
range of that gene in the reference dataset.")
parser.add_argument('tar', help='the target file for zero one normalization')
parser.add_argument('out', help='the output file after zero one normalization')
parser.add_argument('ref', help='the reference file. If reference file\
is \'None\', then zero one normalization will be done based on\
target file itself.')
args = parser.parse_args()
def zero_one_normal(tar=None, out=None, ref=None):
'''
tar: the target file for zero one normalization
out: the output file after zero one normalization
ref: the reference file. If reference file is 'None',
then zero one normalization will be done based on
target file itself.
'''
if ref == 'None':
tar_data = PCLfile(tar, skip_col=0)
tar_data.zero_one_normalization()
tar_data.write_pcl(out)
else:
ref_data = PCLfile(ref, skip_col=0)
tar_data = PCLfile(tar, skip_col=0)
for i in xrange(ref_data.data_matrix.shape[0]):
row_minimum = ref_data.data_matrix[i, :].min()
row_maximum = ref_data.data_matrix[i, :].max()
row_range = row_maximum - row_minimum
tar_data.data_matrix[i, :] =\
(tar_data.data_matrix[i, :] - row_minimum)/row_range
# bound the values to be between 0 and 1
tar_data.data_matrix[i, :] =\
[0 if x < 0 else x for x in tar_data.data_matrix[i, :]]
tar_data.data_matrix[i, :] =\
[1 if x > 1 else x for x in tar_data.data_matrix[i, :]]
tar_data.write_pcl(out)
zero_one_normal(tar=args.tar, out=args.out, ref=args.ref)
|
bsd-3-clause
|
Python
|
3b212f6ea6ba1ed355250b587f3d1c1cc462c7ed
|
send ctrl+c signal
|
wufeifei/cobra,wufeifei/cobra,braveghz/cobra,LiGhT1EsS/cobra,LiGhT1EsS/cobra,wufeifei/cobra,wufeifei/cobra,40huo/cobra,40huo/cobra,40huo/cobra,wufeifei/cobra,40huo/cobra,wufeifei/cobra,LiGhT1EsS/cobra,LiGhT1EsS/cobra,LiGhT1EsS/cobra,braveghz/cobra,40huo/cobra,40huo/cobra,braveghz/cobra,LiGhT1EsS/cobra,braveghz/cobra,braveghz/cobra,braveghz/cobra
|
tests/test_apiserver.py
|
tests/test_apiserver.py
|
# -*- coding: utf-8 -*-
"""
tests.apiserver
~~~~~~~~~~~~
Tests cobra.api
:author: 40huo <git@40huo.cn>
:homepage: https://github.com/wufeifei/cobra
:license: MIT, see LICENSE for more details.
:copyright: Copyright (c) 2017 Feei. All rights reserved
"""
import requests
import json
import subprocess
import time
import os
import shutil
import signal
from cobra.config import cobra_main, project_directory
p = subprocess.Popen(['python', cobra_main, '-H', '127.0.0.1', '-P', '5000'])
time.sleep(1)
config_path = os.path.join(project_directory, 'config')
template_path = os.path.join(project_directory, 'config.template')
shutil.copyfile(template_path, config_path)
def test_add_job():
url = "http://127.0.0.1:5000/api/add"
post_data = {
"key": "your_secret_key",
"target": ["https://github.com/shadowsocks/shadowsocks.git"],
}
headers = {
"Content-Type": "application/json",
}
re = requests.post(url=url, data=json.dumps(post_data), headers=headers)
assert "1001" in re.text
assert "Add scan job successfully" in re.text
assert "sid" in re.text
def test_job_status():
url = "http://127.0.0.1:5000/api/status"
post_data = {
"key": "your_secret_key",
"sid": 24,
}
headers = {
"Content-Type": "application/json",
}
re = requests.post(url=url, data=json.dumps(post_data), headers=headers)
assert "1001" in re.text
assert "msg" in re.text
assert "sid" in re.text
assert "status" in re.text
assert "report" in re.text
def test_close_api():
os.remove(config_path)
p.send_signal(signal=signal.SIGINT)
|
# -*- coding: utf-8 -*-
"""
tests.apiserver
~~~~~~~~~~~~
Tests cobra.api
:author: 40huo <git@40huo.cn>
:homepage: https://github.com/wufeifei/cobra
:license: MIT, see LICENSE for more details.
:copyright: Copyright (c) 2017 Feei. All rights reserved
"""
import requests
import json
import subprocess
import time
import os
import shutil
from cobra.config import cobra_main, project_directory
p = subprocess.Popen(['python', cobra_main, '-H', '127.0.0.1', '-P', '5000'])
time.sleep(1)
config_path = os.path.join(project_directory, 'config')
template_path = os.path.join(project_directory, 'config.template')
shutil.copyfile(template_path, config_path)
def test_add_job():
url = "http://127.0.0.1:5000/api/add"
post_data = {
"key": "your_secret_key",
"target": ["https://github.com/shadowsocks/shadowsocks.git"],
}
headers = {
"Content-Type": "application/json",
}
re = requests.post(url=url, data=json.dumps(post_data), headers=headers)
assert "1001" in re.text
assert "Add scan job successfully" in re.text
assert "sid" in re.text
def test_job_status():
url = "http://127.0.0.1:5000/api/status"
post_data = {
"key": "your_secret_key",
"sid": 24,
}
headers = {
"Content-Type": "application/json",
}
re = requests.post(url=url, data=json.dumps(post_data), headers=headers)
assert "1001" in re.text
assert "msg" in re.text
assert "sid" in re.text
assert "status" in re.text
assert "report" in re.text
def test_close_api():
os.remove(config_path)
p.terminate()
|
mit
|
Python
|
759e50abb963fd854989cb79592f7cf6f9d0bc13
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/41b9d692dc31b2b95c47e8c56cb9cd4872adc511.
|
paolodedios/tensorflow,frreiss/tensorflow-fred,paolodedios/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,frreiss/tensorflow-fred,yongtang/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,Intel-Corporation/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,Intel-tensorflow/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,karllessard/tensorflow,frreiss/tensorflow-fred,karllessard/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,gautam1858/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,frreiss/tensorflow-fred,Intel-Corporation/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,frreiss/tensorflow-fred,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,frreiss/tensorflow-fred,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "41b9d692dc31b2b95c47e8c56cb9cd4872adc511"
TFRT_SHA256 = "b7ba9c34b81e53428a745453dc0ae0108a9510c6a7bd25bfc2b01694976ef636"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
)
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "cbc81d9ef4bcea56e841c1b8a84d07db04a926bf"
TFRT_SHA256 = "804f98fab72aa814701e9bc2b56435fce5b0f64af01ccfc5e4783d0e07e7b656"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
)
|
apache-2.0
|
Python
|
b17700b9878a3999c2ad64f622d2ca8566043b2e
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/fb895960cc796437c6d516cc19027c94f2319b4d.
|
gautam1858/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,yongtang/tensorflow,yongtang/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,frreiss/tensorflow-fred,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,frreiss/tensorflow-fred,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,Intel-Corporation/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,frreiss/tensorflow-fred,Intel-Corporation/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,karllessard/tensorflow,frreiss/tensorflow-fred,paolodedios/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "fb895960cc796437c6d516cc19027c94f2319b4d"
TFRT_SHA256 = "9c5c10fa7b516554c8a6725e13c3d959609ad5d8fab2c66b1292f1f1ec447a1f"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
)
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "1b5317be3621f2f9b6415696edb912ef75f54187"
TFRT_SHA256 = "7f430ad049f7270b7dad98787f04d1259b8b2cd1a068c5ad38387b4574ffd9cc"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
)
|
apache-2.0
|
Python
|
a3afc651ba0efeef17293fa1e688a901453e1f05
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/379d33d44c64d5bd83eb4ca4d7a0928eb79b5f54.
|
tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,paolodedios/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,Intel-Corporation/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "379d33d44c64d5bd83eb4ca4d7a0928eb79b5f54"
TFRT_SHA256 = "7be6d4bc2d8ac5f2420249f2c2df645ad945ba5f51d2ef18b87668783a22b8df"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "ae8fb48248557568109089e96d1aaab760dada21"
TFRT_SHA256 = "fd64d9151ce25cda33a12ad1addbf52e853d2d13a740586f40684947e845df27"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
apache-2.0
|
Python
|
e6d4c2bb4ac83a5f573a2a6fc5ef77356cd70ef3
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/78c6401faaebc33df9cc2c78f21e5c235d1670f4.
|
tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,yongtang/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,karllessard/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "78c6401faaebc33df9cc2c78f21e5c235d1670f4"
TFRT_SHA256 = "6a17b2490d44b392ece0015bbc4d5166003651319906603e19ef000da0686f69"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "6e9c9d0a5c1bea5568c7fd837da88da67e71e3c3"
TFRT_SHA256 = "ec37cf4d732794f4a4032e5f25e9ee4e3dd99580350ce3eed5c1369fcc2ce496"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
apache-2.0
|
Python
|
5c823e1197f73701811cf449337c0c8bb970bc5c
|
Improve readability of tests
|
timtroendle/pytus2000
|
tests/test_datadicts.py
|
tests/test_datadicts.py
|
"""Tests for autogenerated data dictionaries.
A data dictionary contains the following:
* Variable Enum: An enum that contains all variables of the data dict as its members.
* Variable Value Enum: An enum containing all values of one variable. Only exists for variables
with a closed set of values.
"""
import pytest
from pytus2000 import diary, diaryepisode, individual, household, weightdiary, worksheet
@pytest.fixture(params=[
diary,
diaryepisode,
individual,
household,
weightdiary,
worksheet
])
def datadict_module(request):
return request.param
@pytest.fixture(params=[
diary,
diaryepisode,
individual,
household,
worksheet
])
def datadict_module_with_variable_value_enums(request):
return request.param
@pytest.fixture
def variable_enum_members(datadict_module):
return [member for member in datadict_module.Variable]
@pytest.fixture
def variable_value_enums(datadict_module_with_variable_value_enums):
enums = []
for member in datadict_module_with_variable_value_enums.Variable:
try:
datadict_module_with_variable_value_enums.__dict__[member.name]
enums.append(datadict_module_with_variable_value_enums.__dict__[member.name])
except KeyError:
pass # nothing to do; there is no enum
return enums
def test_module_has_variable_enum(datadict_module):
datadict_module.Variable
def test_variable_enum_is_not_empty(variable_enum_members):
assert len(variable_enum_members) > 0
def test_variable_enum_has_position(variable_enum_members):
for variable_enum_member in variable_enum_members:
variable_enum_member.position
def test_variable_enum_has_label(variable_enum_members):
for variable_enum_member in variable_enum_members:
variable_enum_member.label
def test_variable_enum_is_ordered_by_position(variable_enum_members):
assert ((variable_enum_members[0] < variable_enum_members[-1]) ==
(variable_enum_members[0].position < variable_enum_members[-1].position))
def test_module_has_variable_value_enums(variable_value_enums):
assert len(variable_value_enums) > 0
def test_variable_value_enums_are_ordered_by_value(variable_value_enums):
for enum in variable_value_enums:
members = [member for member in enum]
assert (members[0] < members[-1]) == (members[0].value < members[-1].value)
|
import pytest
from pytus2000 import diary, diaryepisode, individual, household, weightdiary, worksheet
@pytest.fixture(params=[
diary,
diaryepisode,
individual,
household,
weightdiary,
worksheet
])
def datadict_module(request):
return request.param
@pytest.fixture(params=[
diary,
diaryepisode,
individual,
household,
worksheet
])
def datadict_module_with_variable_enums(request):
return request.param
@pytest.fixture
def variable_enum_members(datadict_module):
return [member for member in datadict_module.Variable]
@pytest.fixture
def variable_enums(datadict_module_with_variable_enums):
enums = []
for member in datadict_module_with_variable_enums.Variable:
try:
datadict_module_with_variable_enums.__dict__[member.name]
enums.append(datadict_module_with_variable_enums.__dict__[member.name])
except KeyError:
pass # nothing to do; there is no enum
return enums
def test_module_has_variable_enum(datadict_module):
datadict_module.Variable
def test_variable_enum_is_not_empty(variable_enum_members):
assert len(variable_enum_members) > 0
def test_variable_enum_has_position(variable_enum_members):
for variable_enum_member in variable_enum_members:
variable_enum_member.position
def test_variable_enum_has_label(variable_enum_members):
for variable_enum_member in variable_enum_members:
variable_enum_member.label
def test_variable_enum_is_ordered_by_position(variable_enum_members):
assert ((variable_enum_members[0] < variable_enum_members[-1]) ==
(variable_enum_members[0].position < variable_enum_members[-1].position))
def test_module_has_variable_enums(variable_enums):
assert len(variable_enums) > 0
def test_variable_enums_are_ordered_by_value(variable_enums):
for enum in variable_enums:
members = [member for member in enum]
assert (members[0] < members[-1]) == (members[0].value < members[-1].value)
|
mit
|
Python
|
89844b7615e704410a9fa0dc6ae4cfe61ab8fefe
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/24d3f64ad78415f008af4bdde57079e331545dbf.
|
tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,karllessard/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "24d3f64ad78415f008af4bdde57079e331545dbf"
TFRT_SHA256 = "00afec2e99d50a817a9a3bdea6f7fdc88fd2c656bdb22def5c20e96308487656"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "af2213b972c9f736109d4249089c9398c09a8add"
TFRT_SHA256 = "ce588cd5aa843316c6b759315cbc0f018be3c4535ad7ef7d83a6bb49e6d3051b"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
apache-2.0
|
Python
|
1ef4cc5ca233eabc8b7002b70b865aa0ed9ff409
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/f09caec27d101ea44157f209bcfdc62ab62da4e4.
|
karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,Intel-Corporation/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,Intel-Corporation/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "f09caec27d101ea44157f209bcfdc62ab62da4e4"
TFRT_SHA256 = "cda2b7652b72ecdd55f1a33a6c3126f403f3e0ac007014b011c8a8280c1f28bf"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "ed84e5f6b49b52f7801e6030fb5ab26f41f2af28"
TFRT_SHA256 = "080d122bb57e5b5b51a8c8f3b1916d2f7ea768e09d9a29e67f04a1c34b3a8504"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
apache-2.0
|
Python
|
d1bb3815f694da6bf867d7b171f02554f874a737
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/553df8c12e9ba5930b9b8065f1d012ea07c6044c.
|
tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "553df8c12e9ba5930b9b8065f1d012ea07c6044c"
TFRT_SHA256 = "477d0374b044c60cd018fdb17e7c6054e190e59e36e1a442eb5d1628efb2341d"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "2db6a815bd262da6d96d2398c4049230548d8c33"
TFRT_SHA256 = "dcc0b3bc54740cabb370d0d773d8026a5113ceab346d85ed22a9f6e46d9db850"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
apache-2.0
|
Python
|
c4a4776a78b9f59bc52e799ffe28d17d0340278f
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/0af176c97ea8e57671df14444c4c759211c70b1a.
|
tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,karllessard/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,yongtang/tensorflow
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "0af176c97ea8e57671df14444c4c759211c70b1a"
TFRT_SHA256 = "be0251342fe9408261e10638787b4eddfeece73c446d12d75692bff26dc5d5c5"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "2f6de37d68a4c69e2ff9eec3cebbf1369e496940"
TFRT_SHA256 = "cf180c95c54d1132366996482a5c853f41695be88a495ff9215a0ee20300e870"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
apache-2.0
|
Python
|
f56dd27d3e94d15af6ca82e3e5a5c4fbaf34771d
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/d543fafbb8dfa546945c5eced829accec1b70b46.
|
tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "d543fafbb8dfa546945c5eced829accec1b70b46"
TFRT_SHA256 = "370f57dc668b4a44b7a0caa6a078cff1d17118356a665699e116f4cf585eb679"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "8678704dfcf48b2a7039e56fde0e9bd58bce7828"
TFRT_SHA256 = "46cd465aab34eec5f21f1ff746076b494cae3f3295b20265376a96745de29da8"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
apache-2.0
|
Python
|
4e0e42544237ce612d5ec3e4dc6a6a8ab8e58df2
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/06855ca4832377a2bdc8fdb3200415a219906c02.
|
tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "06855ca4832377a2bdc8fdb3200415a219906c02"
TFRT_SHA256 = "e0ca743f255e4f24e1a84b0fe60fcce59833b5145c25369f350fae121ea3eb67"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "f66f87bad3576356f286662b0f4a742ffed33c0d"
TFRT_SHA256 = "b35a52bcd37a7aca08b0446b96eb0a6f0276ea303779b9ce1c9292267996b5a3"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
apache-2.0
|
Python
|
190b9ac1aa92c172b102ae7b0d6ff06b823c5a78
|
add missing imports
|
schriftgestalt/NotePalettes
|
FontNote.glyphsPalette/Contents/Resources/plugin.py
|
FontNote.glyphsPalette/Contents/Resources/plugin.py
|
# encoding: utf-8
#######################################################################################
#
# Palette Plugin
#
# Read the docs:
# https://github.com/schriftgestalt/GlyphsSDK/tree/master/Python%20Templates/Palette
#
#######################################################################################
import objc
from GlyphsApp import *
from GlyphsApp.plugins import *
from Foundation import NSLog
import traceback
class FontNote (PalettePlugin):
dialogName = "com.mekkablue.FontNote"
dialog = objc.IBOutlet()
noteTextField = objc.IBOutlet()
def settings(self):
self.name = Glyphs.localize({'en': u'Font Note', 'de': u'Schriftnotizen'})
"""
The minimum/maximum height of the view in pixels. 'max' must be bigger than 'min'.
"""
self.min = 30
self.max = 700
# Load .nib dialog (without .extension)
self.loadNib('IBdialog', __file__)
def start(self):
# Adding a callback:
NSNotificationCenter.defaultCenter().addObserver_selector_name_object_(self, self.update, UPDATEINTERFACE, objc.nil)
def __del__(self):
NSNotificationCenter.defaultCenter().removeObserver_(self)
@objc.IBAction
def setNote_(self, sender):
try:
thisFont = self.windowController().document().font
thisFont.note = self.noteTextField.stringValue()
except Exception as e:
self.logError(traceback.format_exc())
def update(self, sender):
try:
# only update if there is a window:
if self.windowController():
thisFont = self.windowController().document().font
if thisFont:
thisFontNote = thisFont.note
if not thisFontNote:
thisFontNote = ""
self.noteTextField.setStringValue_(thisFontNote)
except:
self.logError(traceback.format_exc())
def __file__(self):
"""Please leave this method unchanged"""
return __file__
# Temporary Fix
# Sort ID for compatibility with v919 to v976
def setSortID_(self, id):
pass
def sortID(self):
return 0
|
# encoding: utf-8
#######################################################################################
#
# Palette Plugin
#
# Read the docs:
# https://github.com/schriftgestalt/GlyphsSDK/tree/master/Python%20Templates/Palette
#
#######################################################################################
from GlyphsApp.plugins import *
from Foundation import NSLog
class FontNote (PalettePlugin):
dialogName = "com.mekkablue.FontNote"
dialog = objc.IBOutlet()
noteTextField = objc.IBOutlet()
def settings(self):
self.name = Glyphs.localize({'en': u'Font Note', 'de': u'Schriftnotizen'})
"""
The minimum/maximum height of the view in pixels. 'max' must be bigger than 'min'.
"""
self.min = 30
self.max = 700
# Load .nib dialog (without .extension)
self.loadNib('IBdialog', __file__)
def start(self):
# Adding a callback:
NSNotificationCenter.defaultCenter().addObserver_selector_name_object_(self, self.update, UPDATEINTERFACE, objc.nil)
def __del__(self):
NSNotificationCenter.defaultCenter().removeObserver_(self)
@objc.IBAction
def setNote_(self, sender):
try:
thisFont = self.windowController().document().font
thisFont.note = self.noteTextField.stringValue()
except Exception as e:
self.logError(traceback.format_exc())
def update(self, sender):
try:
# only update if there is a window:
if self.windowController():
thisFont = self.windowController().document().font
if thisFont:
thisFontNote = thisFont.note
if not thisFontNote:
thisFontNote = ""
self.noteTextField.setStringValue_(thisFontNote)
except:
self.logError(traceback.format_exc())
def __file__(self):
"""Please leave this method unchanged"""
return __file__
# Temporary Fix
# Sort ID for compatibility with v919 to v976
def setSortID_(self, id):
pass
def sortID(self):
return 0
|
apache-2.0
|
Python
|
1759e2bec03935e33fe15950e0b5457a0001aaa5
|
fix conflict
|
Fresnoy/kart,Fresnoy/kart
|
school/migrations/0005_studentapplication.py
|
school/migrations/0005_studentapplication.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import sortedm2m.fields
class Migration(migrations.Migration):
dependencies = [
('assets', '0001_initial'),
('people', '0002_updatefresnoyprofile'),
('school', '0004_rename_newstudent_model'),
]
operations = [
migrations.CreateModel(
name='StudentApplication',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('current_year_application_count', models.CharField(default=None, help_text='Auto generated field (current year - increment number)', max_length=8, blank=True)),
('first_time', models.BooleanField(default=True, help_text=b"If the first time the Artist's applying")),
('last_application_year', models.PositiveSmallIntegerField(null=True, blank=True)),
('created_on', models.DateTimeField(auto_now_add=True)),
('updated_on', models.DateTimeField(auto_now=True)),
('remote_interview', models.BooleanField(default=False)),
('remote_interview_type', models.CharField(help_text='Skype / Gtalk / FaceTime / AppearIn / Other', max_length=50, blank=True)),
('remote_interview_info', models.CharField(help_text=b'ID / Number / ... ', max_length=50, blank=True)),
('selected_for_interview', models.BooleanField(default=False, help_text=b'Is the candidat selected for the Interview')),
('administrative_galleries', sortedm2m.fields.SortedManyToManyField(help_text=None, related_name='certificates', to='assets.Gallery', blank=True)),
('artist', models.ForeignKey(related_name='student_application', to='people.Artist')),
('artwork_galleries', sortedm2m.fields.SortedManyToManyField(help_text=None, related_name='artworks', to='assets.Gallery', blank=True)),
],
),
migrations.AlterField(
model_name='student',
name='artist',
field=models.OneToOneField(related_name='student', to='people.Artist'),
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import sortedm2m.fields
class Migration(migrations.Migration):
dependencies = [
('assets', '0001_initial'),
('people', '0002_updatefresnoyprofile'),
('people', '0001_initial'),
('school', '0004_rename_newstudent_model'),
]
operations = [
migrations.CreateModel(
name='StudentApplication',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('current_year_application_count', models.CharField(default=None, help_text='Auto generated field (current year - increment number)', max_length=8, blank=True)),
('first_time', models.BooleanField(default=True, help_text=b"If the first time the Artist's applying")),
('last_application_year', models.PositiveSmallIntegerField(null=True, blank=True)),
('created_on', models.DateTimeField(auto_now_add=True)),
('updated_on', models.DateTimeField(auto_now=True)),
('remote_interview', models.BooleanField(default=False)),
('remote_interview_type', models.CharField(help_text='Skype / Gtalk / FaceTime / AppearIn / Other', max_length=50, blank=True)),
('remote_interview_info', models.CharField(help_text=b'ID / Number / ... ', max_length=50, blank=True)),
('selected_for_interview', models.BooleanField(default=False, help_text=b'Is the candidat selected for the Interview')),
('administrative_galleries', sortedm2m.fields.SortedManyToManyField(help_text=None, related_name='certificates', to='assets.Gallery', blank=True)),
('artist', models.ForeignKey(related_name='student_application', to='people.Artist')),
('artwork_galleries', sortedm2m.fields.SortedManyToManyField(help_text=None, related_name='artworks', to='assets.Gallery', blank=True)),
],
),
migrations.AlterField(
model_name='student',
name='artist',
field=models.OneToOneField(related_name='student', to='people.Artist'),
),
]
|
agpl-3.0
|
Python
|
fbdaeff6f01ffaf0ac4f9a0d0d962a19c2865b32
|
Add docstring documenting the intended use of LabHubApp.
|
jupyter/jupyterlab,jupyter/jupyterlab,jupyter/jupyterlab,jupyter/jupyterlab,jupyter/jupyterlab
|
jupyterlab/labhubapp.py
|
jupyterlab/labhubapp.py
|
import os
from traitlets import default
from .labapp import LabApp
try:
from jupyterhub.singleuser import SingleUserNotebookApp
except ImportError:
SingleUserLabApp = None
raise ImportError('You must have jupyterhub installed for this to work.')
else:
class SingleUserLabApp(SingleUserNotebookApp, LabApp):
"""
A sublcass of JupyterHub's SingleUserNotebookApp which includes LabApp
as a mixin. This makes the LabApp configurables available to the spawned
jupyter server.
If you don't need to change any of the configurables from their default
values, then this class is not necessary, and you can deploy JupyterLab
by ensuring that its server extension is enabled and setting the
`Spawner.default_url` to '/lab'.
If you do need to configure JupyterLab, then use this application by
setting `Spawner.cmd = ['jupyter-labhub']`.
"""
@default("default_url")
def _default_url(self):
"""when using jupyter-labhub, jupyterlab is default ui"""
return "/lab"
def init_webapp(self, *args, **kwargs):
super().init_webapp(*args, **kwargs)
def main(argv=None):
return SingleUserLabApp.launch_instance(argv)
if __name__ == "__main__":
main()
|
import os
import warnings
from traitlets import default
from .labapp import LabApp
try:
from jupyterhub.singleuser import SingleUserNotebookApp
except ImportError:
SingleUserLabApp = None
raise ImportError('You must have jupyterhub installed for this to work.')
else:
class SingleUserLabApp(SingleUserNotebookApp, LabApp):
@default("default_url")
def _default_url(self):
"""when using jupyter-labhub, jupyterlab is default ui"""
return "/lab"
def init_webapp(self, *args, **kwargs):
warnings.warn(
"SingleUserLabApp is deprecated, use SingleUserNotebookApp and set " + \
"c.Spawner.default_url = '/lab' in jupyterhub_config.py", DeprecationWarning
)
super().init_webapp(*args, **kwargs)
def main(argv=None):
return SingleUserLabApp.launch_instance(argv)
if __name__ == "__main__":
main()
|
bsd-3-clause
|
Python
|
8cf24b479ca3602ac4471d29f90821c4edc56ad7
|
Update CondenseLabel.py
|
DigitalSlideArchive/HistomicsTK,DigitalSlideArchive/HistomicsTK
|
histomicstk/segmentation/label/CondenseLabel.py
|
histomicstk/segmentation/label/CondenseLabel.py
|
import numpy as np
import scipy.ndimage.measurements as ms
def CondenseLabel(Label):
"""
Shifts labels in a label image to fill in gaps corresponding to missing
values.
Parameters
----------
Label : array_like
A label image generated by segmentation methods.
Returns
-------
Condensed : array_like
A label image where all values > 0 are shifted down to fill gaps.
See Also
--------
ShuffleLabel
"""
# get list of unique object labels
Unique = np.unique(Label.flatten())
# remove background objects (Label == 0)
Unique = np.delete(Unique, (Unique == 0).nonzero())
# initialize output
Condensed = Label.copy()
# get extent of each object
Locations = ms.find_objects(Condensed)
# initialize counter
Counter = 1
# fill in new values
for i in np.arange(1, len(Locations)+1):
if Locations[i-1] is not None:
Patch = Label[Locations[i-1]]
Patch[Patch == i] = Counter
Counter += 1
return Condensed
|
import numpy as np
from skimage import measure as ms
def CondenseLabel(Label):
"""
Shifts labels in a label image to fill in gaps corresponding to missing
values.
Parameters
----------
Label : array_like
A label image generated by segmentation methods.
Returns
-------
Condensed : array_like
A label image where all values > 0 are shifted down to fill gaps.
See Also
--------
ShuffleLabel
"""
# get list of unique object labels
Unique = np.unique(Label.flatten())
# remove background objects (Label == 0)
Unique = np.delete(Unique, (Unique == 0).nonzero())
# initialize output
Condensed = np.zeros(Label.shape, dtype=np.uint32)
# get pixel list for each object
Props = ms.regionprops(Label)
# fill in new values
for i in range(len(Unique)):
Coords = Props[i].coords
Condensed[Coords[:, 0], Coords[:, 1]] = i+1
return Condensed
|
apache-2.0
|
Python
|
87f1f5c5198ae8511c7936130f27b0361c5b3187
|
Update logging.py
|
MCPEBukkit/Python_PE,MCPEBukkit/Python_PE
|
src/logging.py
|
src/logging.py
|
import logging
class logger(object):
def info(self, message):
logging.info("[INFO] "+message)
def warning(self, message):
logging.warning("[WARNING] "+message)
def error(self, message):
logging.error("[ERROR] "+message)
def debug(self, message):
logging.debug("[DEBUG] "+message)
def critical(self, message)
logging.critical("[CRITICAL] "+message)
logger = logger()
|
import logging
class logger:
def info(message):
logging.info("[INFO] "+message)
def warning(message):
logging.warning("[WARNING] "+message)
def error(message):
logging.error("[ERROR] "+message)
def debug(message):
logging.debug("[DEBUG] "+message)
def critical(message)
logging.critical("[CRITICAL] "+message)
|
mit
|
Python
|
de9f9c07c6f1dde8d7ad314b6a6fb58a963e1558
|
Return as many results as possible
|
6/GeoDJ,6/GeoDJ
|
geodj/youtube.py
|
geodj/youtube.py
|
from gdata.youtube.service import YouTubeService, YouTubeVideoQuery
from django.utils.encoding import smart_str
import re
class YoutubeMusic:
def __init__(self):
self.service = YouTubeService()
def search(self, artist):
query = YouTubeVideoQuery()
query.vq = artist
query.orderby = 'relevance'
query.racy = 'exclude'
query.format = '5'
query.max_results = 50
query.categories.append("/Music")
feed = self.service.YouTubeQuery(query)
results = []
for entry in feed.entry:
if not self.is_valid_entry(artist, entry):
continue
results.append({
'url': entry.media.player.url,
'title': smart_str(entry.media.title.text),
'duration': int(entry.media.duration.seconds),
})
return {'artist': artist, 'results': results}
def is_valid_entry(self, artist, entry):
duration = int(entry.media.duration.seconds)
title = smart_str(entry.media.title.text).lower()
if entry.rating is not None and float(entry.rating.average) < 3.5:
return False
if duration < (2 * 60) or duration > (9 * 60):
return False
if artist.lower() not in title:
return False
if re.search("\b(concert|cover)\b", title):
return False
return True
|
from gdata.youtube.service import YouTubeService, YouTubeVideoQuery
from django.utils.encoding import smart_str
import re
class YoutubeMusic:
def __init__(self):
self.service = YouTubeService()
def search(self, artist):
query = YouTubeVideoQuery()
query.vq = artist
query.orderby = 'relevance'
query.racy = 'exclude'
query.format = '5'
query.categories.append("/Music")
feed = self.service.YouTubeQuery(query)
results = []
for entry in feed.entry:
if not self.is_valid_entry(artist, entry):
continue
results.append({
'url': entry.media.player.url,
'title': smart_str(entry.media.title.text),
'duration': int(entry.media.duration.seconds),
})
return {'artist': artist, 'results': results}
def is_valid_entry(self, artist, entry):
duration = int(entry.media.duration.seconds)
title = smart_str(entry.media.title.text).lower()
if entry.rating is not None and float(entry.rating.average) < 3.5:
return False
if duration < (2 * 60) or duration > (9 * 60):
return False
if artist.lower() not in title:
return False
if re.search("\b(concert|cover)\b", title):
return False
return True
|
mit
|
Python
|
3eb9891b4671900b90a400c0b18513c2964d22fe
|
Add check to detect if a buildbot slave is running.
|
eunchong/build,eunchong/build,eunchong/build,eunchong/build
|
scripts/tools/swarm_bootstrap/start_slave.py
|
scripts/tools/swarm_bootstrap/start_slave.py
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Returns a swarming bot dimensions and setups automatic startup if needed.
This file is uploaded the swarming server so the swarming bots can declare their
dimensions and startup method easily.
"""
import logging
import os
import sys
import os_utilities # pylint: disable-msg=F0401
import zipped_archive # pylint: disable-msg=F0401
def is_buildbot_slave_running():
"""Returns True if a buildbot slave process is detected.
Sometimes human error occurs, and a bot is double-booked to be both a
buildbot slave and a Swarming bot.
"""
if sys.platform == 'win32':
for drive in ('c', 'e'):
if os.path.isfile(drive + ':\\b\\build\\slave\\twistd.pid'):
return True
else:
if os.path.isfile('/b/build/slave/twistd.pid'):
return True
return False
def get_attributes():
"""Returns the attributes for this machine."""
attributes = os_utilities.get_attributes(None)
if is_buildbot_slave_running():
# Make sure no task is triggered on this bot until this is resolved.
attributes['dimensions'] = {
'error': 'Detected a buildbot slave process!',
'id': attributes['id'],
}
return attributes
def setup_bot():
"""Sets up the bot so it will survive an host restart.
Returns True if it's fine to start the bot right away.
"""
root_dir = os.getcwd()
command = [
sys.executable,
os.path.abspath(zipped_archive.get_main_script_path()),
'start_bot',
]
if sys.platform == 'cygwin':
# Replace the cygwin python command for the native one.
# Find a depot_tools installation at a known location if it exists.
for letter in ('c', 'e'):
path = '/cygdrive/%s/b/depot_tools/python.bat' % letter
if os.path.isfile(path):
command[0] = path
break
else:
logging.error('Unable to find python.bat')
command[0] = 'python'
os_utilities.setup_auto_startup_win(command, root_dir, 'run_swarm_bot.bat')
# Because it was started in cygwin but we want only the bot to run on
# native python, invariably force a reboot. #thisiswindows.
return False
elif sys.platform in 'win32':
# Find a depot_tools installation at a known location if it exists.
for letter in ('c', 'e'):
path = letter + ':\\b\\depot_tools\\python.bat'
if os.path.isfile(path):
command[0] = path
break
else:
logging.error('Unable to find python.bat')
command[0] = 'python'
os_utilities.setup_auto_startup_win(command, root_dir, 'run_swarm_bot.bat')
# Invariably force a reboot. #thisiswindows.
return False
elif sys.platform == 'darwin':
os_utilities.setup_auto_startup_osx(
command, root_dir, 'org.swarm.bot.plist')
# Restart so it is properly started by launchd. setup_bot() could be run via
# ssh, which would break tests requiring UI.
return False
# No need to restart on Ubuntu since the bot is started via initd.
return True
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Returns a swarming bot dimensions and setups automatic startup if needed.
This file is uploaded the swarming server so the swarming bots can declare their
dimensions and startup method easily.
"""
import logging
import os
import sys
import os_utilities # pylint: disable-msg=F0401
import zipped_archive # pylint: disable-msg=F0401
def get_attributes():
"""Returns the attributes for this machine."""
return os_utilities.get_attributes(None)
def setup_bot():
"""Sets up the bot so it will survive an host restart.
Returns True if it's fine to start the bot right away.
"""
root_dir = os.getcwd()
command = [
sys.executable,
os.path.abspath(zipped_archive.get_main_script_path()),
'start_bot',
]
if sys.platform == 'cygwin':
# Replace the cygwin python command for the native one.
# Find a depot_tools installation at a known location if it exists.
for letter in ('c', 'e'):
path = '/cygdrive/%s/b/depot_tools/python.bat' % letter
if os.path.isfile(path):
command[0] = path
break
else:
logging.error('Unable to find python.bat')
command[0] = 'python'
os_utilities.setup_auto_startup_win(command, root_dir, 'run_swarm_bot.bat')
# Because it was started in cygwin but we want only the bot to run on
# native python, invariably force a reboot. #thisiswindows.
return False
elif sys.platform in 'win32':
# Find a depot_tools installation at a known location if it exists.
for letter in ('c', 'e'):
path = letter + ':\\b\\depot_tools\\python.bat'
if os.path.isfile(path):
command[0] = path
break
else:
logging.error('Unable to find python.bat')
command[0] = 'python'
os_utilities.setup_auto_startup_win(command, root_dir, 'run_swarm_bot.bat')
# Invariably force a reboot. #thisiswindows.
return False
elif sys.platform == 'darwin':
os_utilities.setup_auto_startup_osx(
command, root_dir, 'org.swarm.bot.plist')
# Restart so it is properly started by launchd. setup_bot() could be run via
# ssh, which would break tests requiring UI.
return False
# No need to restart on Ubuntu since the bot is started via initd.
return True
|
bsd-3-clause
|
Python
|
498be7a6d3700322aa470a00791a8b0be849cf0c
|
Fix shebang declaration.
|
zolech/zabbix-mesos-template
|
getMesosStats.py
|
getMesosStats.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import urllib2
import json
import argparse
def get_metric(host, port, metric):
response = urllib2.urlopen(
'http://' + host + ':' + port + '/metrics/snapshot')
data = json.load(response)
# print json.dumps(data, indent=4, sort_keys=True)
print data[metric]
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser(
description='Mesos metrics')
arg_parser.add_argument(
'-H', '--host', help="Specify host or ip address", required=True)
arg_parser.add_argument(
'-p', '--port', help="Specify mesos api port", required=True)
arg_parser.add_argument(
'-m', '--metric', help="Specify metric's name", required=True)
arguments = arg_parser.parse_args()
get_metric(arguments.host, arguments.port, arguments.metric)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import urllib2
import json
import argparse
def get_metric(host, port, metric):
response = urllib2.urlopen(
'http://' + host + ':' + port + '/metrics/snapshot')
data = json.load(response)
# print json.dumps(data, indent=4, sort_keys=True)
print data[metric]
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser(
description='Mesos metrics')
arg_parser.add_argument(
'-H', '--host', help="Specify host or ip address", required=True)
arg_parser.add_argument(
'-p', '--port', help="Specify mesos api port", required=True)
arg_parser.add_argument(
'-m', '--metric', help="Specify metric's name", required=True)
arguments = arg_parser.parse_args()
get_metric(arguments.host, arguments.port, arguments.metric)
|
mit
|
Python
|
c49cf131ddb16157ea1a1663eef63133efe0068d
|
Implement read operations for queue.file
|
tilezen/tilequeue,mapzen/tilequeue
|
tilequeue/queue/file.py
|
tilequeue/queue/file.py
|
from tilequeue.tile import serialize_coord
class OutputFileQueue(object):
def __init__(self, fp):
self.fp = fp
def enqueue(self, coord):
payload = serialize_coord(coord)
self.fp.write(payload + '\n')
def enqueue_batch(self, coords):
n = 0
for coord in coords:
self.enqueue(coord)
n += 1
return n, 0
def read(self, max_to_read=1, timeout_seconds=20):
coords = []
for _ in range(max_to_read):
try:
coords.append(next(self.fp))
except StopIteration:
break
return coords
def job_done(self, coord_message):
pass
def clear(self):
self.fp.seek(0)
self.fp.truncate()
return -1
def close(self):
remaining_queue = "".join([ln for ln in self.fp])
self.clear()
self.fp.write(remaining_queue)
self.fp.close()
|
from tilequeue.tile import serialize_coord
class OutputFileQueue(object):
def __init__(self, fp):
self.fp = fp
def enqueue(self, coord):
payload = serialize_coord(coord)
self.fp.write(payload + '\n')
def enqueue_batch(self, coords):
n = 0
for coord in coords:
self.enqueue(coord)
n += 1
return n, 0
def read(self, max_to_read=1, timeout_seconds=20):
raise NotImplementedError
def job_done(self, coord_message):
raise NotImplementedError
def clear(self):
self.fp.truncate()
return -1
def close(self):
self.fp.close()
|
mit
|
Python
|
5418907b13d6a00190cd85c5b9b73d4053be34ed
|
Add licence
|
sunliwen/alfred-mailto,deanishe/alfred-mailto,deanishe/alfred-mailto,sunliwen/alfred-mailto,deanishe/alfred-mailto,sunliwen/alfred-mailto
|
src/compose.py
|
src/compose.py
|
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright © 2013 deanishe@deanishe.net.
#
# MIT Licence. See http://opensource.org/licenses/MIT
#
# Created on 2013-11-01
#
"""
Compose new email to specified recipients (if any) in selected client.
Client is selected using mailto.py
"""
from __future__ import print_function
import sys
import os
from subprocess import check_call
import alfred
from mailto import MailApps
from contacts import get_contacts
# import logging
# logging.basicConfig(filename=os.path.join(os.path.dirname(__file__), u'debug.log'),
# level=logging.DEBUG)
# log = logging.getLogger(u'compose')
def main():
args = alfred.args()
recipients = []
if len(args):
emails = [s.strip() for s in args[0].split(u',') if s.strip()]
contacts = dict(get_contacts()[0]) # email : name
for email in emails:
name = contacts.get(email)
if name and name != email:
recipients.append(u'{} <{}>'.format(name, email))
else:
log.debug(u'Not found : {}'.format(email))
recipients.append(email)
recipients = u','.join(recipients)
else:
recipients = u''
# log.debug(u'args : {} recipients : {}'.format(args, recipients))
# build and execute command
url = u'mailto:{}'.format(recipients)
appname, path = MailApps().default_app
command = [u'open']
if appname is not None:
command.append(u'-a')
command.append(appname)
command.append(url)
# log.debug(u'command : {}'.format(command))
retcode = check_call(command)
return retcode
if __name__ == '__main__':
sys.exit(main())
|
#!/usr/bin/env python
# encoding: utf-8
"""
Compose new email to specified recipients (if any) in selected client.
Client is selected using mailto.py
"""
from __future__ import print_function
import sys
import os
from subprocess import check_call
import alfred
from mailto import MailApps
from contacts import get_contacts
# import logging
# logging.basicConfig(filename=os.path.join(os.path.dirname(__file__), u'debug.log'),
# level=logging.DEBUG)
# log = logging.getLogger(u'compose')
def main():
args = alfred.args()
recipients = []
if len(args):
emails = [s.strip() for s in args[0].split(u',') if s.strip()]
contacts = dict(get_contacts()[0]) # email : name
for email in emails:
name = contacts.get(email)
if name and name != email:
recipients.append(u'{} <{}>'.format(name, email))
else:
log.debug(u'Not found : {}'.format(email))
recipients.append(email)
recipients = u','.join(recipients)
else:
recipients = u''
# log.debug(u'args : {} recipients : {}'.format(args, recipients))
# build and execute command
url = u'mailto:{}'.format(recipients)
appname, path = MailApps().default_app
command = [u'open']
if appname is not None:
command.append(u'-a')
command.append(appname)
command.append(url)
# log.debug(u'command : {}'.format(command))
retcode = check_call(command)
return retcode
if __name__ == '__main__':
sys.exit(main())
|
mit
|
Python
|
d8b163a58941b130ba9903ab26463716d52eeaa0
|
Fix running on patchlevel versions below the highest of that minor version
|
github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql
|
java/kotlin-extractor/kotlin_plugin_versions.py
|
java/kotlin-extractor/kotlin_plugin_versions.py
|
import platform
import re
import subprocess
import sys
def is_windows():
'''Whether we appear to be running on Windows'''
if platform.system() == 'Windows':
return True
if platform.system().startswith('CYGWIN'):
return True
return False
many_versions = [ '1.4.32', '1.5.31', '1.6.10', '1.6.20' ]
def get_single_version():
# TODO: `shell=True` is a workaround to get CI working on Windows. It breaks the build on Linux.
versionOutput = subprocess.run(['kotlinc', '-version'], capture_output=True, text=True, shell=is_windows())
m = re.match(r'.* kotlinc-jvm ([0-9]+)\.([0-9]+)\.([0-9]+) .*', versionOutput.stderr)
if m is None:
raise Exception('Cannot detect version of kotlinc (got ' + str(versionOutput) + ')')
major = m.group(1)
minor = m.group(2)
patch = m.group(3)
current_version = f'{major}.{minor}.{patch}'
matching_minor_versions = [ version for version in many_versions if version.startswith(f'{major}.{minor}') ]
if len(matching_minor_versions) == 0:
raise Exception(f'Cannot find a matching minor version for kotlinc version {current_version} (got {versionOutput}; know about {str(many_versions)})')
matching_minor_versions.sort()
for version in matching_minor_versions:
if current_version >= version:
return version
return matching_minor_versions[-1]
raise Exception(f'No suitable kotlinc version found for {current_version} (got {versionOutput}; know about {str(many_versions)})')
def get_latest_url():
version = many_versions[-1]
url = 'https://github.com/JetBrains/kotlin/releases/download/v' + version + '/kotlin-compiler-' + version + '.zip'
return url
if __name__ == "__main__":
args = sys.argv
if len(args) != 2:
raise Exception("Bad arguments")
command = args[1]
if command == 'latest-url':
print(get_latest_url())
else:
raise Exception("Unknown command: " + command)
|
import platform
import re
import subprocess
import sys
def is_windows():
'''Whether we appear to be running on Windows'''
if platform.system() == 'Windows':
return True
if platform.system().startswith('CYGWIN'):
return True
return False
many_versions = [ '1.4.32', '1.5.31', '1.6.10', '1.6.20' ]
def get_single_version():
# TODO: `shell=True` is a workaround to get CI working on Windows. It breaks the build on Linux.
versionOutput = subprocess.run(['kotlinc', '-version'], capture_output=True, text=True, shell=is_windows())
m = re.match(r'.* kotlinc-jvm ([0-9]+)\.([0-9]+\.)([0-9]+) .*', versionOutput.stderr)
if m is None:
raise Exception('Cannot detect version of kotlinc (got ' + str(versionOutput) + ')')
major = m.group(1)
minor = m.group(2)
patch = m.group(3)
current_version = f'{major}.{minor}.{patch}'
matching_minor_versions = [ version for version in many_versions if version.startswith(f'{major}.{minor}') ]
if len(matching_minor_versions) == 0:
raise Exception(f'Cannot find a matching minor version for kotlinc version {current_version} (got {versionOutput}; know about {str(many_versions)})')
matching_minor_versions.sort()
for version in matching_minor_versions:
if current_version >= version:
return version
return matching_minor_versions[-1]
raise Exception(f'No suitable kotlinc version found for {current_version} (got {versionOutput}; know about {str(many_versions)})')
def get_latest_url():
version = many_versions[-1]
url = 'https://github.com/JetBrains/kotlin/releases/download/v' + version + '/kotlin-compiler-' + version + '.zip'
return url
if __name__ == "__main__":
args = sys.argv
if len(args) != 2:
raise Exception("Bad arguments")
command = args[1]
if command == 'latest-url':
print(get_latest_url())
else:
raise Exception("Unknown command: " + command)
|
mit
|
Python
|
39a9131a0d0ce1d59c970b089acc1da4af006b2b
|
bump version number
|
Jackevansevo/taggy,Jackevansevo/taggy
|
taggy/__init__.py
|
taggy/__init__.py
|
__version__ = "0.2.1"
|
__version__ = "0.2.0"
|
mit
|
Python
|
dfec00fb9d14ec0689dd0378793c4dcaac071c6e
|
Optimize imports
|
bit0001/trajectory_tracking,bit0001/trajectory_tracking
|
src/control.py
|
src/control.py
|
#!/usr/bin/env python
import rospy
from gazebo_msgs.msg import ModelStates
from geometry_msgs.msg import Twist
from constants import DELTA_T, STEPS
from controller import create_controller
from plotter import Plotter
def get_pose(message):
global current_pose
current_pose = message.pose[2]
def compute_control_actions(pose):
global i
controller.compute_control_actions(pose, i)
plotter.add_point(pose)
twist = Twist()
twist.linear.x = controller.v_n
twist.angular.z = controller.w_n
twist_publisher.publish(twist)
i += 1
if __name__ == '__main__':
rospy.init_node('control')
current_pose = None
subscriber = rospy.Subscriber('gazebo/model_states', ModelStates, get_pose)
twist_publisher = rospy.Publisher('computed_control_actions', Twist, queue_size=1)
while current_pose is None:
pass
i = 0
plotter = Plotter()
controller = create_controller()
rate = rospy.Rate(int(1 / DELTA_T))
while not rospy.is_shutdown() and i < STEPS:
compute_control_actions(current_pose)
rate.sleep()
plotter.plot_results()
rospy.spin()
|
#!/usr/bin/env python
import rospy
from gazebo_msgs.msg import ModelStates
from geometry_msgs.msg import Twist, Pose
from constants import DELTA_T, STEPS
from controller import EulerMethodController, create_controller
from plotter import Plotter
def get_pose(message):
global current_pose
current_pose = message.pose[2]
def compute_control_actions(pose):
global i
controller.compute_control_actions(pose, i)
plotter.add_point(pose)
twist = Twist()
twist.linear.x = controller.v_n
twist.angular.z = controller.w_n
twist_publisher.publish(twist)
i += 1
if __name__ == '__main__':
rospy.init_node('control')
current_pose = None
subscriber = rospy.Subscriber('gazebo/model_states', ModelStates, get_pose)
twist_publisher = rospy.Publisher('computed_control_actions', Twist, queue_size=1)
while current_pose is None:
pass
i = 0
plotter = Plotter()
controller = create_controller()
rate = rospy.Rate(int(1 / DELTA_T))
while not rospy.is_shutdown() and i < STEPS:
compute_control_actions(current_pose)
rate.sleep()
plotter.plot_results()
rospy.spin()
|
mit
|
Python
|
bec7885f9ab3314e8627f6b3c384a9dc457fca0f
|
Update markdown to fix /r/RivenMains
|
festinuz/cmcb,festinuz/cmcb
|
cmcb/static_data.py
|
cmcb/static_data.py
|
SECOND = 1
MINUTE = 60*SECOND
HOUR = 60*MINUTE
DAY = 24*HOUR
WEEK = 7*DAY
REDDIT_UPDATE_TIMEOUT = MINUTE
LEAGUE_UPDATE_TIMEOUT = HOUR
TEXT_HEAD = '''
Hello, /r/{subreddit}! This post updates automatically to help you find
desired club or fill your club with some folks! You can find additional info
at the end of the post.\n\n--------\n
'''
TEXT_REGION_TABLE = '''
\n## Available **{region}** clubs:
Club name|Club tag|Owner IGN|Last time online
:--------|:-------|:--------|:---------------
'''
TEXT_CLUB_ROW = '[{}]({} "Go to comment")|{}|[{}]({} "Check on op.gg")|{}\n'
TEXT_EMPTY_REGIONS = '''
Unfortunately, there are no clubs available for following regions at the moment
: {empty_regions}\n'''
TEXT_BOTTOM = '''\n\n----------\n
### How to **join a club**
+ Find a club that you want to join
+ Add club owner to friends in League of legends
+ Ask club owner for a club invite
### How to **add your club** to a list
Write a new comment that looks like an example below:
club
REGION CODE ({regions})
YOUR IGN (used by people to send a friend request)
CLUB NAME
CLUB TAG (leave '$' if you dont have one yet)
You can always update club information by updating your comment, as well as you
can delete your comment when your club is full.
\n--------\n
At the moment, the bot is set to update the post every {redditRevision}
seconds. The "Last time online" column updates every {leagueRevision} minutes.
The bot is currently hosted on Heroku and should be working 24/7!
'''
|
SECOND = 1
MINUTE = 60*SECOND
HOUR = 60*MINUTE
DAY = 24*HOUR
WEEK = 7*DAY
REDDIT_UPDATE_TIMEOUT = MINUTE
LEAGUE_UPDATE_TIMEOUT = HOUR
TEXT_HEAD = '''
Hello, /r/{subreddit}! This post updates automatically to help you find
desired club or fill your club with some folks! You can find additional info
at the end of the post.\n\n--------\n
'''
TEXT_REGION_TABLE = '''
\n# Available {region} clubs:
Club name|Club tag|Owner IGN|Last time online
:--------|:-------|:--------|:---------------
'''
TEXT_CLUB_ROW = '[{}]({} "Go to comment")|{}|[{}]({} "Check on op.gg")|{}\n'
TEXT_EMPTY_REGIONS = '''
Unfortunately, there are no clubs available for following regions at the moment
: {empty_regions}\n'''
TEXT_BOTTOM = '''\n\n----------\n
## How to **join a club**
+ Find a club that you want to join
+ Add club owner to friends in League of legends
+ Ask club owner for a club invite
## How to **add your club** to a list
Write a new comment that looks like an example below:
club
REGION CODE ({regions})
YOUR IGN (used by people to send a friend request)
CLUB NAME
CLUB TAG (leave '$' if you dont have one yet)
You can always update club information by updating your comment, as well as you
can delete your comment when your club is full.
\n--------\n
At the moment, the bot is set to update the post every {redditRevision}
seconds. The "Last time online" column updates every {leagueRevision} minutes.
The bot is currently hosted on Heroku and should be working 24/7!
'''
|
mit
|
Python
|
b4e765a674b5ecaa10d233cd7dca8696bc381589
|
Add default tensorboard docker image
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
polyaxon/polyaxon/config_settings/spawner.py
|
polyaxon/polyaxon/config_settings/spawner.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from polyaxon.utils import config
# Roles
ROLE_LABELS_WORKER = config.get_string('POLYAXON_ROLE_LABELS_WORKER')
ROLE_LABELS_DASHBOARD = config.get_string('POLYAXON_ROLE_LABELS_DASHBOARD')
ROLE_LABELS_LOG = config.get_string('POLYAXON_ROLE_LABELS_LOG')
ROLE_LABELS_API = config.get_string('POLYAXON_ROLE_LABELS_API')
K8S_SERVICE_ACCOUNT_NAME = config.get_string('POLYAXON_K8S_SERVICE_ACCOUNT_NAME')
K8S_RBAC_ENABLED = config.get_boolean('POLYAXON_K8S_RBAC_ENABLED')
K8S_INGRESS_ENABLED = config.get_boolean('POLYAXON_K8S_INGRESS_ENABLED')
K8S_INGRESS_ANNOTATIONS = config.get_string(
'POLYAXON_K8S_INGRESS_ANNOTATIONS', is_optional=True)
TENSORBOARD_PORT_RANGE = [5500, 6500]
# Types
TYPE_LABELS_CORE = config.get_string('POLYAXON_TYPE_LABELS_CORE')
TYPE_LABELS_EXPERIMENT = config.get_string('POLYAXON_TYPE_LABELS_EXPERIMENT')
# Selectors
NODE_SELECTORS_EXPERIMENTS = config.get_string(
'POLYAXON_NODE_SELECTORS_EXPERIMENTS', is_optional=True)
JOB_CONTAINER_NAME = config.get_string('POLYAXON_JOB_CONTAINER_NAME')
JOB_SIDECAR_CONTAINER_NAME = config.get_string('POLYAXON_JOB_SIDECAR_CONTAINER_NAME')
JOB_DOCKER_NAME = config.get_string(
'POLYAXON_JOB_DOCKER_NAME', is_optional=True) or 'polyaxon/polyaxon-lib'
JOB_SIDECAR_DOCKER_IMAGE = config.get_string(
'POLYAXON_JOB_SIDECAR_DOCKER_IMAGE', is_optional=True) or 'polyaxon/polyaxon-sidecar'
TENSORBOARD_DOCKER_IMAGE = config.get_string(
'POLYAXON_TENSORBOARD_DOCKER_IMAGE', is_optional=True) or 'tensorflow/tensorflow:1.4.1-py3'
JOB_SIDECAR_LOG_SLEEP_INTERVAL = config.get_int(
'POLYAXON_JOB_SIDECAR_LOG_SLEEP_INTERVAL', is_optional=True)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from polyaxon.utils import config
# Roles
ROLE_LABELS_WORKER = config.get_string('POLYAXON_ROLE_LABELS_WORKER')
ROLE_LABELS_DASHBOARD = config.get_string('POLYAXON_ROLE_LABELS_DASHBOARD')
ROLE_LABELS_LOG = config.get_string('POLYAXON_ROLE_LABELS_LOG')
ROLE_LABELS_API = config.get_string('POLYAXON_ROLE_LABELS_API')
K8S_SERVICE_ACCOUNT_NAME = config.get_string('POLYAXON_K8S_SERVICE_ACCOUNT_NAME')
K8S_RBAC_ENABLED = config.get_boolean('POLYAXON_K8S_RBAC_ENABLED')
K8S_INGRESS_ENABLED = config.get_boolean('POLYAXON_K8S_INGRESS_ENABLED')
K8S_INGRESS_ANNOTATIONS = config.get_string('POLYAXON_K8S_INGRESS_ANNOTATIONS', is_optional=True)
TENSORBOARD_PORT_RANGE = [5500, 6500]
# Types
TYPE_LABELS_CORE = config.get_string('POLYAXON_TYPE_LABELS_CORE')
TYPE_LABELS_EXPERIMENT = config.get_string('POLYAXON_TYPE_LABELS_EXPERIMENT')
# Selectors
NODE_SELECTORS_EXPERIMENTS = config.get_string('POLYAXON_NODE_SELECTORS_EXPERIMENTS',
is_optional=True)
JOB_CONTAINER_NAME = config.get_string('POLYAXON_JOB_CONTAINER_NAME')
JOB_SIDECAR_CONTAINER_NAME = config.get_string('POLYAXON_JOB_SIDECAR_CONTAINER_NAME')
JOB_DOCKER_NAME = config.get_string('POLYAXON_JOB_DOCKER_NAME',
is_optional=True) or 'polyaxon/polyaxon-lib'
JOB_SIDECAR_DOCKER_IMAGE = config.get_string('POLYAXON_JOB_SIDECAR_DOCKER_IMAGE',
is_optional=True) or 'polyaxon/polyaxon-sidecar'
JOB_SIDECAR_LOG_SLEEP_INTERVAL = config.get_int('POLYAXON_JOB_SIDECAR_LOG_SLEEP_INTERVAL',
is_optional=True)
|
apache-2.0
|
Python
|
db1653c551f71092a7eca96e6a4d1c96ef17e06a
|
Remove unused attributes; also, empty responses after it's flushed.
|
rapidsms/rapidsms-legacy,rapidsms/rapidsms-legacy,rapidsms/rapidsms-legacy
|
lib/rapidsms/message.py
|
lib/rapidsms/message.py
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import copy
class Message(object):
def __init__(self, backend, caller=None, text=None):
self._backend = backend
self.caller = caller
self.text = text
self.responses = []
def __unicode__(self):
return self.text
@property
def backend(self):
# backend is read-only, since it's an
# immutable property of this object
return self._backend
def send(self):
"""Send this message via self.backend, returning
True if the message was sent successfully."""
return self.backend.router.outgoing(self)
def flush_responses (self):
for response in self.responses:
response.send()
self.responses.remove(response)
def respond(self, text):
"""Send the given text back to the original caller of this
message on the same route that it came in on"""
if self.caller:
response = copy.copy(self)
response.text = text
self.responses.append(response)
return True
else:
return False
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import copy
class Message(object):
def __init__(self, backend, caller=None, text=None):
self._backend = backend
self.caller = caller
self.text = text
# initialize some empty attributes
self.received = None
self.sent = None
self.responses = []
def __unicode__(self):
return self.text
@property
def backend(self):
# backend is read-only, since it's an
# immutable property of this object
return self._backend
def send(self):
"""Send this message via self.backend, returning
True if the message was sent successfully."""
return self.backend.router.outgoing(self)
def flush_responses (self):
for response in self.responses:
response.send()
def respond(self, text):
"""Send the given text back to the original caller of this
message on the same route that it came in on"""
if self.caller:
response = copy.copy(self)
response.text = text
self.responses.append(response)
return True
else:
return False
|
bsd-3-clause
|
Python
|
2b299ea1a62c61dbabbe7e27e75d7a566c138e9e
|
remove onchange and set compute right. (#1327)
|
avanzosc/odoo-addons,avanzosc/odoo-addons
|
product_vat_price/models/product_template.py
|
product_vat_price/models/product_template.py
|
# Copyright 2021 Berezi - AvanzOSC
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from odoo import models, fields, api
class ProductTemplate(models.Model):
_inherit = 'product.template'
vat_price = fields.Float(string='VAT price', compute='_compute_vat_price')
@api.depends("list_price", "taxes_id")
def _compute_vat_price(self):
for product in self:
if product.list_price:
product.vat_price = product.list_price
for taxes in product.taxes_id:
vat = product.list_price * taxes.amount / 100
product.vat_price += vat
|
# Copyright 2021 Berezi - AvanzOSC
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from odoo import models, fields, api
class ProductTemplate(models.Model):
_inherit = 'product.template'
vat_price = fields.Float(string='VAT price', compute='_compute_vat_price')
@api.depends("list_price", "taxes_id")
def _compute_vat_price(self):
for product in self:
if product.list_price:
self.vat_price = product.list_price
for taxes in product.taxes_id:
vat = product.list_price * taxes.amount / 100
self.vat_price += vat
@api.onchange("list_price", "taxes_id")
def onchange_vat_price(self):
if self.list_price and self.taxes_id:
self._compute_vat_price()
|
agpl-3.0
|
Python
|
8fe0ec8f85e933d9aa64ff619937aba52945ea6e
|
add LOGGING_LEVEL settings
|
reorx/torext,reorx/torext
|
torext/base_settings.py
|
torext/base_settings.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# variables in this module are essential basements of settings,
# the priority sequence of base_settings.py, settings.py(in project), and cmd options is::
# 1. commandline arguments
# 2. settings.py
# 3. base_settings.py
#############
# essential #
#############
PROJECT = None
LOCALE = 'en_US'
PROCESSES = 1
PORT = 8000
DEBUG = True
TESTING = False
LOGGERS = {
'': {
'level': 'INFO',
'fmt': '%(color)s[%(fixed_levelname)s %(asctime)s %(module)s:%(lineno)d]%(end_color)s %(message)s',
#'fmt': '[%(fixed_levelname)s %(asctime)s %(module)s:%(lineno)d] %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S',
}
}
LOGGING_LEVEL = None
LOGGING_IGNORE_URLS = [
'/favicon.ico',
]
LOG_REQUEST = False
LOG_RESPONSE = False
LOG_RESPONSE_LINE_LIMIT = 0
TIME_ZONE = 'Asia/Shanghai'
STATIC_PATH = 'static'
TEMPLATE_PATH = 'template'
TEMPLATE_ENGINE = 'tornado'
############
# optional #
############
# COOKIE_SECRET = 'P0UTa5iuRaaVlV8QZF2uVR7hHwTOSkQhg2Fol18OKwc='
# SECURE_COOKIE = ''
# SECURE_HEADER = ''
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# variables in this module are essential basements of settings,
# the priority sequence of base_settings.py, settings.py(in project), and cmd options is::
# 1. commandline arguments
# 2. settings.py
# 3. base_settings.py
#############
# essential #
#############
PROJECT = None
LOCALE = 'en_US'
PROCESSES = 1
PORT = 8000
DEBUG = True
TESTING = False
LOGGERS = {
'': {
'level': 'INFO',
'fmt': '%(color)s[%(fixed_levelname)s %(asctime)s %(module)s:%(lineno)d]%(end_color)s %(message)s',
#'fmt': '[%(fixed_levelname)s %(asctime)s %(module)s:%(lineno)d] %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S',
}
}
LOG_REQUEST = False
LOG_RESPONSE = False
LOG_RESPONSE_LINE_LIMIT = 0
TIME_ZONE = 'Asia/Shanghai'
STATIC_PATH = 'static'
TEMPLATE_PATH = 'template'
TEMPLATE_ENGINE = 'tornado'
LOGGING_IGNORE_URLS = [
'/favicon.ico',
]
############
# optional #
############
# COOKIE_SECRET = 'P0UTa5iuRaaVlV8QZF2uVR7hHwTOSkQhg2Fol18OKwc='
# SECURE_COOKIE = ''
# SECURE_HEADER = ''
|
mit
|
Python
|
388653366ee4db58ed8ce8a9c8ab071593b9fc53
|
Use correct default SSH port
|
GaretJax/lancet,GaretJax/lancet
|
lancet/contrib/dploi.py
|
lancet/contrib/dploi.py
|
from shlex import quote
import click
@click.command()
@click.option('-p', '--print/--exec', 'print_cmd', default=False,
help='Print the command instead of executing it.')
@click.argument('environment')
@click.pass_obj
def ssh(lancet, print_cmd, environment):
"""
SSH into the given environment, based on the dploi configuration.
"""
namespace = {}
with open('deployment.py') as fh:
code = compile(fh.read(), 'deployment.py', 'exec')
exec(code, {}, namespace)
config = namespace['settings'][environment]
host = '{}@{}'.format(config['user'], config['hosts'][0])
cmd = ['ssh', '-p', str(config.get('port', 22)), host]
if print_cmd:
click.echo(' '.join(quote(s) for s in cmd))
else:
lancet.defer_to_shell(*cmd)
|
from shlex import quote
import click
@click.command()
@click.option('-p', '--print/--exec', 'print_cmd', default=False,
help='Print the command instead of executing it.')
@click.argument('environment')
@click.pass_obj
def ssh(lancet, print_cmd, environment):
"""
SSH into the given environment, based on the dploi configuration.
"""
namespace = {}
with open('deployment.py') as fh:
code = compile(fh.read(), 'deployment.py', 'exec')
exec(code, {}, namespace)
config = namespace['settings'][environment]
host = '{}@{}'.format(config['user'], config['hosts'][0])
cmd = ['ssh', '-p', str(config.get('port', 20)), host]
if print_cmd:
click.echo(' '.join(quote(s) for s in cmd))
else:
lancet.defer_to_shell(*cmd)
|
mit
|
Python
|
d859bfde2fb6aca986857d4e0460a65e24ee6029
|
fix remaining tests to reflect new behavior of sign(nan)
|
chadnetzer/numpy-gaurdro,teoliphant/numpy-refactor,illume/numpy3k,teoliphant/numpy-refactor,jasonmccampbell/numpy-refactor-sprint,efiring/numpy-work,efiring/numpy-work,jasonmccampbell/numpy-refactor-sprint,illume/numpy3k,efiring/numpy-work,chadnetzer/numpy-gaurdro,Ademan/NumPy-GSoC,teoliphant/numpy-refactor,Ademan/NumPy-GSoC,efiring/numpy-work,chadnetzer/numpy-gaurdro,jasonmccampbell/numpy-refactor-sprint,illume/numpy3k,teoliphant/numpy-refactor,illume/numpy3k,Ademan/NumPy-GSoC,Ademan/NumPy-GSoC,teoliphant/numpy-refactor,jasonmccampbell/numpy-refactor-sprint,chadnetzer/numpy-gaurdro
|
numpy/lib/tests/test_ufunclike.py
|
numpy/lib/tests/test_ufunclike.py
|
"""
>>> import numpy.core as nx
>>> import numpy.lib.ufunclike as U
Test fix:
>>> a = nx.array([[1.0, 1.1, 1.5, 1.8], [-1.0, -1.1, -1.5, -1.8]])
>>> U.fix(a)
array([[ 1., 1., 1., 1.],
[-1., -1., -1., -1.]])
>>> y = nx.zeros(a.shape, float)
>>> U.fix(a, y)
array([[ 1., 1., 1., 1.],
[-1., -1., -1., -1.]])
>>> y
array([[ 1., 1., 1., 1.],
[-1., -1., -1., -1.]])
Test isposinf, isneginf, sign
>>> a = nx.array([nx.Inf, -nx.Inf, nx.NaN, 0.0, 3.0, -3.0])
>>> U.isposinf(a)
array([ True, False, False, False, False, False], dtype=bool)
>>> U.isneginf(a)
array([False, True, False, False, False, False], dtype=bool)
>>> olderr = nx.seterr(invalid='ignore')
>>> nx.sign(a)
array([ 1., -1., NaN, 0., 1., -1.])
>>> olderr = nx.seterr(**olderr)
Same thing with an output array:
>>> y = nx.zeros(a.shape, bool)
>>> U.isposinf(a, y)
array([ True, False, False, False, False, False], dtype=bool)
>>> y
array([ True, False, False, False, False, False], dtype=bool)
>>> U.isneginf(a, y)
array([False, True, False, False, False, False], dtype=bool)
>>> y
array([False, True, False, False, False, False], dtype=bool)
>>> olderr = nx.seterr(invalid='ignore')
>>> nx.sign(a, y)
array([ True, True, True, False, True, True], dtype=bool)
>>> olderr = nx.seterr(**olderr)
>>> y
array([ True, True, True, False, True, True], dtype=bool)
Now log2:
>>> a = nx.array([4.5, 2.3, 6.5])
>>> U.log2(a)
array([ 2.169925 , 1.20163386, 2.70043972])
>>> 2**_
array([ 4.5, 2.3, 6.5])
>>> y = nx.zeros(a.shape, float)
>>> U.log2(a, y)
array([ 2.169925 , 1.20163386, 2.70043972])
>>> y
array([ 2.169925 , 1.20163386, 2.70043972])
"""
from numpy.testing import *
def test():
return rundocs()
if __name__ == "__main__":
run_module_suite()
|
"""
>>> import numpy.core as nx
>>> import numpy.lib.ufunclike as U
Test fix:
>>> a = nx.array([[1.0, 1.1, 1.5, 1.8], [-1.0, -1.1, -1.5, -1.8]])
>>> U.fix(a)
array([[ 1., 1., 1., 1.],
[-1., -1., -1., -1.]])
>>> y = nx.zeros(a.shape, float)
>>> U.fix(a, y)
array([[ 1., 1., 1., 1.],
[-1., -1., -1., -1.]])
>>> y
array([[ 1., 1., 1., 1.],
[-1., -1., -1., -1.]])
Test isposinf, isneginf, sign
>>> a = nx.array([nx.Inf, -nx.Inf, nx.NaN, 0.0, 3.0, -3.0])
>>> U.isposinf(a)
array([ True, False, False, False, False, False], dtype=bool)
>>> U.isneginf(a)
array([False, True, False, False, False, False], dtype=bool)
>>> olderr = nx.seterr(invalid='ignore')
>>> nx.sign(a)
array([ 1., -1., NaN, 0., 1., -1.])
>>> olderr = nx.seterr(**olderr)
Same thing with an output array:
>>> y = nx.zeros(a.shape, bool)
>>> U.isposinf(a, y)
array([ True, False, False, False, False, False], dtype=bool)
>>> y
array([ True, False, False, False, False, False], dtype=bool)
>>> U.isneginf(a, y)
array([False, True, False, False, False, False], dtype=bool)
>>> y
array([False, True, False, False, False, False], dtype=bool)
>>> olderr = nx.seterr(invalid='ignore')
>>> nx.sign(a, y)
array([ True, True, False, False, True, True], dtype=bool)
>>> olderr = nx.seterr(**olderr)
>>> y
array([ True, True, False, False, True, True], dtype=bool)
Now log2:
>>> a = nx.array([4.5, 2.3, 6.5])
>>> U.log2(a)
array([ 2.169925 , 1.20163386, 2.70043972])
>>> 2**_
array([ 4.5, 2.3, 6.5])
>>> y = nx.zeros(a.shape, float)
>>> U.log2(a, y)
array([ 2.169925 , 1.20163386, 2.70043972])
>>> y
array([ 2.169925 , 1.20163386, 2.70043972])
"""
from numpy.testing import *
def test():
return rundocs()
if __name__ == "__main__":
run_module_suite()
|
bsd-3-clause
|
Python
|
83e2d010ff2c05ddc172ccfa0caf1efe941f3184
|
Update combine module namespace
|
redmatter/combine
|
combine/__init__.py
|
combine/__init__.py
|
# Copyright (c) 2010 John Reese
# Licensed under the MIT license
class CombineError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
from combine.formats import Archive, File
from combine.config import Config
from combine.package import Package
from combine.manifest import Manifest
from combine.diff import Diff
|
# Copyright (c) 2010 John Reese
# Licensed under the MIT license
class CombineError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
from combine.manifest import Manifest
from combine.diff import Diff
|
mit
|
Python
|
9b603d07df9be3cbac32d08d0e17ae5c5f9c76ae
|
Update sphinx_runner to use Sphinx >= 2
|
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
|
python/helpers/rest_runners/sphinx_runner.py
|
python/helpers/rest_runners/sphinx_runner.py
|
if __name__ == "__main__":
import sys
try:
import sphinx
except ImportError:
raise NameError("Cannot find sphinx in selected interpreter.")
version = sphinx.version_info
if (version[0] >= 1 and version[1] >= 7) or version[0] >= 2:
from sphinx.cmd import build
build.main(sys.argv[1:])
else:
from sphinx import cmdline
cmdline.main(sys.argv)
|
if __name__ == "__main__":
import sys
try:
import sphinx
except ImportError:
raise NameError("Cannot find sphinx in selected interpreter.")
version = sphinx.version_info
if version[0] >= 1 and version[1] >= 7:
from sphinx.cmd import build
build.main(sys.argv[1:])
else:
from sphinx import cmdline
cmdline.main(sys.argv)
|
apache-2.0
|
Python
|
bc837143d79faca5de0d1919dd63ceb3800c68e3
|
Fix the memory function: remove 'X'.
|
AngelTerrones/Algol,AngelTerrones/Algol
|
Algol/memIO.py
|
Algol/memIO.py
|
#!/usr/bin/env python
# Copyright (c) 2015 Angel Terrones (<angelterrones@gmail.com>)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from myhdl import Signal
from myhdl import modbv
class MemoryOpConstant:
SZ_MT = 3
MT_X = 0
MT_B = 1
MT_H = 2
MT_W = 3
MT_BU = 4
MT_HU = 5
SZ_M = 1
M_X = 0
M_RD = 0
M_WR = 1
class MemPortIO:
def __init__(self):
self.req = MemReq()
self.resp = MemResp()
class MemReq:
def __init__(self):
self.addr = Signal(modbv(0)[32:])
self.data = Signal(modbv(0)[32:])
self.fcn = Signal(modbv(0)[3:])
self.typ = Signal(False)
self.ready = Signal(False)
self.valid = Signal(False)
class MemResp:
def __init__(self):
self.data = Signal(modbv(0)[32:])
self.valid = Signal(False)
# Local Variables:
# flycheck-flake8-maximum-line-length: 120
# flycheck-flake8rc: ".flake8rc"
# End:
|
#!/usr/bin/env python
# Copyright (c) 2015 Angel Terrones (<angelterrones@gmail.com>)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from myhdl import Signal
from myhdl import modbv
class MemoryOpConstant:
SZ_MT = 3
MT_X = 0
MT_B = 1
MT_H = 2
MT_W = 3
MT_BU = 4
MT_HU = 5
SZ_M = 1
M_X = 0
M_XRD = 0
M_XWR = 1
class MemPortIO:
def __init__(self):
self.req = MemReq()
self.resp = MemResp()
class MemReq:
def __init__(self):
self.addr = Signal(modbv(0)[32:])
self.data = Signal(modbv(0)[32:])
self.fcn = Signal(modbv(0)[3:])
self.typ = Signal(False)
self.ready = Signal(False)
self.valid = Signal(False)
class MemResp:
def __init__(self):
self.data = Signal(modbv(0)[32:])
self.valid = Signal(False)
# Local Variables:
# flycheck-flake8-maximum-line-length: 120
# flycheck-flake8rc: ".flake8rc"
# End:
|
mit
|
Python
|
332e4a8a35e065fd5535cab716f4e9009ea0bcd1
|
Set default filter backend for rest framework
|
bjoernricks/trex,bjoernricks/trex
|
trex/settings_global.py
|
trex/settings_global.py
|
# -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
"""
Django settings for trex project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
with open(os.path.join(BASE_DIR, "secret.key"), "r") as secret:
SECRET_KEY = secret.read()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'trex',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'trex.urls'
WSGI_APPLICATION = 'trex.wsgi.application'
AUTH_USER_MODEL = "trex.User"
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'de-de'
TIME_ZONE = 'Europe/Berlin'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
# Session handling
# As a minimum, session expires when the browser is closed.
# We expect this to be once a day for common users.
# If needed this can be enhanced by a time limit.
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
# REST Framework settings
REST_FRAMEWORK = {
# add django-filters backend
'DEFAULT_FILTER_BACKENDS': ('rest_framework.filters.DjangoFilterBackend',)
}
|
# -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
"""
Django settings for trex project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
with open(os.path.join(BASE_DIR, "secret.key"), "r") as secret:
SECRET_KEY = secret.read()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'trex',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'trex.urls'
WSGI_APPLICATION = 'trex.wsgi.application'
AUTH_USER_MODEL = "trex.User"
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'de-de'
TIME_ZONE = 'Europe/Berlin'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
# Session handling
# As a minimum, session expires when the browser is closed.
# We expect this to be once a day for common users.
# If needed this can be enhanced by a time limit.
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
|
mit
|
Python
|
e4cff9c25bb768f0068e9e007e919ba52d7efb6f
|
Fix name prefix for host builds (#8767)
|
nestlabs/connectedhomeip,project-chip/connectedhomeip,project-chip/connectedhomeip,nestlabs/connectedhomeip,project-chip/connectedhomeip,nestlabs/connectedhomeip,project-chip/connectedhomeip,nestlabs/connectedhomeip,nestlabs/connectedhomeip,project-chip/connectedhomeip,nestlabs/connectedhomeip,project-chip/connectedhomeip,nestlabs/connectedhomeip
|
scripts/build/builders/host.py
|
scripts/build/builders/host.py
|
# Copyright (c) 2021 Project CHIP Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
from platform import uname, release
from enum import Enum, auto
from .gn import GnBuilder
class HostApp(Enum):
ALL_CLUSTERS = auto()
CHIP_TOOL = auto()
def ExamplePath(self):
if self == HostApp.ALL_CLUSTERS:
return 'all-clusters-app/linux'
elif self == HostApp.CHIP_TOOL:
return 'chip-tool'
else:
raise Exception('Unknown app type: %r' % self)
def BinaryName(self):
if self == HostApp.ALL_CLUSTERS:
return 'chip-all-clusters-app'
elif self == HostApp.CHIP_TOOL:
return 'chip-tool'
else:
raise Exception('Unknown app type: %r' % self)
def ConcretePlatformName():
uname_result = uname()
return '-'.join([uname_result.system.lower(), release(), uname_result.machine])
class HostBuilder(GnBuilder):
def __init__(self, root, runner, output_prefix: str, app: HostApp):
super(HostBuilder, self).__init__(
root=os.path.join(root, 'examples', app.ExamplePath()),
runner=runner,
output_prefix=output_prefix)
self.app_name = app.BinaryName()
self.map_name = self.app_name + '.map'
def outputs(self):
return {
self.app_name: os.path.join(self.output_dir, self.app_name),
self.map_name : os.path.join(self.output_dir, self.map_name)
}
def SetIdentifier(self, platform: str, board: str, app: str):
super(HostBuilder, self).SetIdentifier(ConcretePlatformName(), board, app)
|
# Copyright (c) 2021 Project CHIP Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
from platform import uname, release
from enum import Enum, auto
from .gn import GnBuilder
class HostApp(Enum):
ALL_CLUSTERS = auto()
CHIP_TOOL = auto()
def ExamplePath(self):
if self == HostApp.ALL_CLUSTERS:
return 'all-clusters-app/linux'
elif self == HostApp.CHIP_TOOL:
return 'chip-tool'
else:
raise Exception('Unknown app type: %r' % self)
def BinaryName(self):
if self == HostApp.ALL_CLUSTERS:
return 'all-clusters-app'
elif self == HostApp.CHIP_TOOL:
return 'chip-tool'
else:
raise Exception('Unknown app type: %r' % self)
def ConcretePlatformName():
uname_result = uname()
return '-'.join([uname_result.system.lower(), release(), uname_result.machine])
class HostBuilder(GnBuilder):
def __init__(self, root, runner, output_prefix: str, app: HostApp):
super(HostBuilder, self).__init__(
root=os.path.join(root, 'examples', app.ExamplePath()),
runner=runner,
output_prefix=output_prefix)
self.app_name = app.BinaryName()
self.map_name = self.app_name + '.map'
def outputs(self):
return {
self.app_name: os.path.join(self.output_dir, self.app_name),
self.map_name : os.path.join(self.output_dir, self.map_name)
}
def SetIdentifier(self, platform: str, board: str, app: str):
super(HostBuilder, self).SetIdentifier(ConcretePlatformName(), board, app)
|
apache-2.0
|
Python
|
e13be048b4a1582b3579a7d007f06c5c2b98a664
|
Fix whitespace incoherence, causing pylint errors.
|
ligthyear/quick-check,ligthyear/quick-check
|
codereview/middleware.py
|
codereview/middleware.py
|
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Custom middleware. Some of this may be generally useful."""
import logging
from google.appengine.api import users
from google.appengine.runtime import apiproxy_errors
from google.appengine.runtime import DeadlineExceededError
from django.http import HttpResponse
from django.template import Context, loader
import models
class AddUserToRequestMiddleware(object):
"""Add a user object and a user_is_admin flag to each request."""
def process_request(self, request):
request.user = users.get_current_user()
request.user_is_admin = users.is_current_user_admin()
# Update the cached value of the current user's Account
account = None
if request.user is not None:
account = models.Account.get_account_for_user(request.user)
models.Account.current_user_account = account
class PropagateExceptionMiddleware(object):
"""Catch exceptions, log them and return a friendly error message."""
def _text_requested(self, request):
"""Returns True if a text/plain response is requested."""
# We could use a better heuristics that takes multiple
# media_ranges and quality factors into account. For now we return
# True iff 'text/plain' is the only media range the request
# accepts.
media_ranges = request.META.get('HTTP_ACCEPT', '').split(',')
return len(media_ranges) == 1 and media_ranges[0] == 'text/plain'
def process_exception(self, request, exception):
if isinstance(exception, apiproxy_errors.CapabilityDisabledError):
msg = ('Rietveld: App Engine is undergoing maintenance. '
'Please try again in a while.')
status = 503
elif isinstance(exception, (DeadlineExceededError, MemoryError)):
msg = ('Rietveld is too hungry at the moment.'
'Please try again in a while.')
status = 503
else:
msg = 'Unhandled exception.'
status = 500
logging.exception('%s: ' % exception.__class__.__name__)
technical = '%s [%s]' % (exception, exception.__class__.__name__)
if self._text_requested(request):
content = '%s\n\n%s\n' % (msg, technical)
content_type = 'text/plain'
else:
tpl = loader.get_template('exception.html')
ctx = Context({'msg': msg, 'technical': technical})
content = tpl.render(ctx)
content_type = 'text/html'
return HttpResponse(content, status=status, content_type=content_type)
|
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Custom middleware. Some of this may be generally useful."""
import logging
from google.appengine.api import users
from google.appengine.runtime import apiproxy_errors
from google.appengine.runtime import DeadlineExceededError
from django.http import HttpResponse
from django.template import Context, loader
import models
class AddUserToRequestMiddleware(object):
"""Add a user object and a user_is_admin flag to each request."""
def process_request(self, request):
request.user = users.get_current_user()
request.user_is_admin = users.is_current_user_admin()
# Update the cached value of the current user's Account
account = None
if request.user is not None:
account = models.Account.get_account_for_user(request.user)
models.Account.current_user_account = account
class PropagateExceptionMiddleware(object):
"""Catch exceptions, log them and return a friendly error message."""
def _text_requested(self, request):
"""Returns True if a text/plain response is requested."""
# We could use a better heuristics that takes multiple
# media_ranges and quality factors into account. For now we return
# True iff 'text/plain' is the only media range the request
# accepts.
media_ranges = request.META.get('HTTP_ACCEPT', '').split(',')
return len(media_ranges) == 1 and media_ranges[0] == 'text/plain'
def process_exception(self, request, exception):
if isinstance(exception, apiproxy_errors.CapabilityDisabledError):
msg = ('Rietveld: App Engine is undergoing maintenance. '
'Please try again in a while.')
status = 503
elif isinstance(exception, (DeadlineExceededError, MemoryError)):
msg = ('Rietveld is too hungry at the moment.'
'Please try again in a while.')
status = 503
else:
msg = 'Unhandled exception.'
status = 500
logging.exception('%s: ' % exception.__class__.__name__)
technical = '%s [%s]' % (exception, exception.__class__.__name__)
if self._text_requested(request):
content = '%s\n\n%s\n' % (msg, technical)
content_type = 'text/plain'
else:
tpl = loader.get_template('exception.html')
ctx = Context({'msg': msg, 'technical': technical})
content = tpl.render(ctx)
content_type = 'text/html'
return HttpResponse(content, status=status, content_type=content_type)
|
apache-2.0
|
Python
|
ed38e4b98b3a5a06761aecb6a977958a2fdd892e
|
Increment version number.
|
rinman24/ucsd_ch
|
coimbra_chamber/__version__.py
|
coimbra_chamber/__version__.py
|
VERSION = (0, 0, 4)
__version__ = '.'.join(map(str, VERSION))
|
VERSION = (0, 0, 3)
__version__ = '.'.join(map(str, VERSION))
|
mit
|
Python
|
7d34b407a35fe917e919fc01b3a6c736a7bdc372
|
Remove admin prefix from url
|
rosti-cz/django-emailsupport
|
helpdesk/urls.py
|
helpdesk/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'helpdesk.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'', include(admin.site.urls)),
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'helpdesk.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
mit
|
Python
|
e2018c1c344e9482a99a3d187d740b32c0fdd7ec
|
Update server.py
|
mikelambson/tcid,mikelambson/tcid,mikelambson/tcid,mikelambson/tcid
|
src/server.py
|
src/server.py
|
#Import flask libraries
import json, re, os, datetime, logging;#Import general libraries
from flask import Flask, jsonify, request, render_template, send_from_directory, redirect;
from flask_socketio import SocketIO, send, emit, join_room, leave_room, close_room;
from flask_mail import Mail, Message;
from flask_socketio import join_room;
from flask_sqlalchemy import SQLAlchemy;
from sqlalchemy import create_engine;#Engine handler
#from PIL import Image;
#from logging.handlers import RotatingFileHandler;
#from logging import Formatter;
#import environment, recorder;#Import custom libraries
FlaskServer = Flask(__name__);#Dynamic web server
Interactive = SocketIO(FlaskServer);#Socket handler
#FlaskServer.config.from_object(os.getenv("SERVER_ENV") if os.getenv("SERVER_ENV") else "environment.Testing");
Mailer = Mail(FlaskServer);#Mail handler
DB = SQLAlchemy(FlaskServer);#Sqlalchemy database handler
import models;
engine = create_engine('mysql://tcid:tcid@localhost/tcid?charset=utf8mb4_unicode_520_ci')
@FlaskServer.route('/')
def index():
return render_template('index.html');
@FlaskServer.route('/assets/<path:resource>')
def serveStaticResource(resource):# Serves misc. resources: css, js.
return send_from_directory('assets/', resource);
if __name__ == "__main__":
FlaskServer.run(host="0.0.0.0", port=int("3000"), debug=True);
|
#Import flask libraries
import json, re, os, datetime, logging;#Import general libraries
from flask import Flask, jsonify, request, render_template, send_from_directory, redirect;
from flask_socketio import SocketIO, send, emit, join_room, leave_room, close_room;
from flask_mail import Mail, Message;
from flask_socketio import join_room;
from flask_sqlalchemy import SQLAlchemy;
from sqlalchemy import create_engine;#Engine handler
#from PIL import Image;
#from logging.handlers import RotatingFileHandler;
#from logging import Formatter;
#import environment, recorder;#Import custom libraries
FlaskServer = Flask(__name__);#Dynamic web server
Interactive = SocketIO(FlaskServer);#Socket handler
#FlaskServer.config.from_object(os.getenv("SERVER_ENV") if os.getenv("SERVER_ENV") else "environment.Testing");
Mailer = Mail(FlaskServer);#Mail handler
DB = SQLAlchemy(FlaskServer);#Sqlalchemy database handler
import models;
engine = create_engine('mysql://tcid:tcid@localhost/tcid?charset=utf8mb4_unicode_520_ci')
@FlaskServer.route('/')
def index():
return render_template('index.html');
@FlaskServer.route('/assets/<path:resource>')
def serveStaticResource(resource):# Serves misc. resources: css, js.
return send_from_directory('assets/', resource);
@FlaskServer.route('/templates/<path:app>')
def serveAppResource(app):
return send_from_directory('templates/', templates);
if __name__ == "__main__":
FlaskServer.run(host="0.0.0.0", port=int("3000"), debug=True);
|
bsd-3-clause
|
Python
|
566f30b14f018b66fe800cdb56dfb3e52b7c15c9
|
Update ipc_lista1.13.py
|
any1m1c/ipc20161
|
lista1/ipc_lista1.13.py
|
lista1/ipc_lista1.13.py
|
#ipc_lista1.13
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Tendo como dados de entrada a altura e o sexo de uma pessoa, constru um algoritmo que calcule seu peso ideal, utilizando as seguintes fórmulas:
#Para homens: (72.7*h) - 58
#Para mulheres: (62.1*h) - 44.7 (h = altura)
#Peça o peso da pessoa e informe se ela está dentro, acima ou abaixo do peso.
h = input("Entre com sua altura: ")
sexo = str(raw_input(Sexo M ou F: "))
peso - input("Qual seu peso: ")
if ("F" == sexo):
resultado = (62.1*h) - 44.7
else:
resultado = (72.7*h) - 58
if(peso > resultado):
print "Você está acima do peso, seu peso ideal e %.1f kg" % (resultado)
else:
print "Voce está abaxio do peso, seu peso ideal e %.1f kg" %
|
#ipc_lista1.13
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Tendo como dados de entrada a altura e o sexo de uma pessoa, constru um algoritmo que calcule seu peso ideal, utilizando as seguintes fórmulas:
#Para homens: (72.7*h) - 58
#Para mulheres: (62.1*h) - 44.7 (h = altura)
#Peça o peso da pessoa e informe se ela está dentro, acima ou abaixo do peso.
h = input("Entre com sua altura: ")
sexo = str(raw_input(Sexo M ou F: "))
peso - input("Qual seu peso: ")
if ("F" == sexo):
resultado = (62.1*h) - 44.7
else:
resultado = (72.7*h) - 58
if(peso > resultado):
print "Você está acima do peso, seu peso ideal e %.1f kg" % (resultado)
else:
print "Voce está abaxio do peso,
|
apache-2.0
|
Python
|
801d2847631daa21325cfbb49e5315e903fcbeb1
|
Update ipc_lista1.14.py
|
any1m1c/ipc20161
|
lista1/ipc_lista1.14.py
|
lista1/ipc_lista1.14.py
|
#ipc_lista1.14
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#
#João Papo-de-Pescador, homem de bem, comprou um microcomputador para controlar o rendimento diário de seu trabalho. Toda vez que ele traz um peso de peixes maior que o estabelecido pelo regulamento de pesca do estado de São Paulo (50 quilos) deve pagar uma multa de R$ 4,00 por quilo excedente. João precisa que você faça um programa que leia a variável peso (peso de peixes) e verifique se há excesso. Se houver, gravar na variável excesso e na variável multa o valor da multa que João deverá pagar. Caso contrário mostrar tais variáveis com o conteúdo ZERO.
#Controle do rendimentoi diário de João
if peso > 50:
excesso = peso - 50
multa =
print
|
#ipc_lista1.14
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#
#João Papo-de-Pescador, homem de bem, comprou um microcomputador para controlar o rendimento diário de seu trabalho. Toda vez que ele traz um peso de peixes maior que o estabelecido pelo regulamento de pesca do estado de São Paulo (50 quilos) deve pagar uma multa de R$ 4,00 por quilo excedente. João precisa que você faça um programa que leia a variável peso (peso de peixes) e verifique se há excesso. Se houver, gravar na variável excesso e na variável multa o valor da multa que João deverá pagar. Caso contrário mostrar tais variáveis com o conteúdo ZERO.
#Controle do rendimentoi diário de João
if peso > 50:
excesso
multa =
print
|
apache-2.0
|
Python
|
cd2b1d7b062d292182df1dde57637878cc5b3cb6
|
Update ipc_lista2.01.py
|
any1m1c/ipc20161
|
lista2/ipc_lista2.01.py
|
lista2/ipc_lista2.01.py
|
#ipc_lista2.1
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que peça dois números e imprima o maior deles.
num1 = float(input("Informe um número: "
|
#ipc_lista2.1
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que peça dois números e imprima o maior deles.
num1 = float(input("Informe um número
|
apache-2.0
|
Python
|
d7091b43fd483de9f6993a070698f24067bebbba
|
Update ipc_lista4.08.py
|
any1m1c/ipc20161
|
lista4/ipc_lista4.08.py
|
lista4/ipc_lista4.08.py
|
"""
lista 4 questao 8:
Faça um Programa que peça a idade e a altura de 5 pessoas,
armazene cada informação no seu respectivo vetor.
Imprima a idade e a altura na ordem inversa a ordem lida.
"""
# EQUIPE 2
#ANA BEATRIZ FROTA - 1615310027
#
#
#
#
#
#Luiz Gustavo Rocha Melo - 1615310015
altura = [] #vetor para altura
alturainv = [] #vetor para a altura na ordem inversa
idade = [] #vetor para idade
idadeinv = [] #vetor para idade na ordem inversa
v = 5 #variável para o indice
c1 = #contador
while (c1 < v):
x = int(input("A idade da pessoa: ")) # X RECEBE O VALOR DA IDADE
idade.append(x) #VETOR RECEBE DO VALOR DE X
y = float(input("A altura da pessoa: ")) # Y RECEBE O VALOR DA ALTURA/
altura.append(y) # VETOR ALTURA RECEBE O VALOR DE Y
c1 += 1 # CONTADOR MAIS 1
while (v > 0):
v -= 1
w = idade[v]
z = altura [v]
idadeinv.append(w)
alturainv.append(z)
print("A ordem inversa da idade",idadeinv)
print("A ordem inversa da altura",alturainv)
|
"""
lista 4 questao 8:
Faça um Programa que peça a idade e a altura de 5 pessoas,
armazene cada informação no seu respectivo vetor.
Imprima a idade e a altura na ordem inversa a ordem lida.
"""
# EQUIPE 2
#ANA BEATRIZ FROTA - 1615310027
#
#
#Luiz Gustavo Rocha Melo - 1615310015
altura = [] #vetor para altura
alturainv = [] #vetor para a altura na ordem inversa
idade = [] #vetor para idade
idadeinv = [] #vetor para idade na ordem inversa
v = 5 #variável para o indice
c1 = #contador
while (c1 < v):
x = int(input("A idade da pessoa: "))
idade.append(x)
y = float(input("A altura da pessoa: "))
altura.append(y)
c1 += 1
while (v > 0):
v -= 1
w = idade[v]
z = altura [v]
idadeinv.append(w)
alturainv.append(z)
print("A ordem inversa da idade",idadeinv)
print("A ordem inversa da altura",alturainv)
|
apache-2.0
|
Python
|
90a9cee8349ccc9ec024b25f17f7d29f75c70524
|
Bump version number
|
TheKysek/MiNode,TheKysek/MiNode
|
src/shared.py
|
src/shared.py
|
# -*- coding: utf-8 -*-
import logging
import os
import queue
import threading
listening_port = 8444
send_outgoing_connections = True
data_directory = 'minode_data/'
source_directory = os.path.dirname(os.path.realpath(__file__))
log_level = logging.DEBUG
magic_bytes = b'\xe9\xbe\xb4\xd9'
protocol_version = 3
services = 3 # NODE_NETWORK, NODE_SSL
stream = 1
nonce = os.urandom(8)
user_agent = b'MiNode-v0.2.0'
timeout = 600
header_length = 24
nonce_trials_per_byte = 1000
payload_length_extra_bytes = 1000
shutting_down = False
vector_advertise_queue = queue.Queue()
address_advertise_queue = queue.Queue()
connections = set()
connections_lock = threading.Lock()
hosts = set()
core_nodes = set()
node_pool = set()
unchecked_node_pool = set()
outgoing_connections = 8
objects = {}
objects_lock = threading.Lock()
|
# -*- coding: utf-8 -*-
import logging
import os
import queue
import threading
listening_port = 8444
send_outgoing_connections = True
data_directory = 'minode_data/'
source_directory = os.path.dirname(os.path.realpath(__file__))
log_level = logging.DEBUG
magic_bytes = b'\xe9\xbe\xb4\xd9'
protocol_version = 3
services = 3 # NODE_NETWORK, NODE_SSL
stream = 1
nonce = os.urandom(8)
user_agent = b'MiNode-v0.1.0'
timeout = 600
header_length = 24
nonce_trials_per_byte = 1000
payload_length_extra_bytes = 1000
shutting_down = False
vector_advertise_queue = queue.Queue()
address_advertise_queue = queue.Queue()
connections = set()
connections_lock = threading.Lock()
hosts = set()
core_nodes = set()
node_pool = set()
unchecked_node_pool = set()
outgoing_connections = 8
objects = {}
objects_lock = threading.Lock()
|
mit
|
Python
|
d2ac7fdc28d3aeede4021fad6b9f51a8d79fe0a9
|
add pre-save check
|
mylokin/mongoext
|
mongoext/models.py
|
mongoext/models.py
|
from __future__ import absolute_import
import mongoext.collection
import mongoext.fields
class MetaModel(type):
def __new__(cls, name, bases, attrs):
fields = {}
for base in bases:
for attr, obj in vars(base).iteritems():
if issubclass(type(obj), mongoext.fields.Field):
fields[attr] = obj
for attr, obj in attrs.iteritems():
if issubclass(type(obj), mongoext.fields.Field):
fields[attr] = obj
attrs['FIELDS'] = fields
return super(MetaModel, cls).__new__(cls, name, bases, attrs)
def __init__(cls, name, bases, attrs):
for attr, obj in vars(cls).iteritems():
if issubclass(type(obj), mongoext.collection.Collection):
obj.model = cls
super(MetaModel, cls).__init__(name, bases, attrs)
class Model(object):
__metaclass__ = MetaModel
FIELDS = None
_id = mongoext.fields.Field()
def __init__(self, **kw):
for name, obj in self.FIELDS.iteritems():
if name in kw:
setattr(self, name, obj(kw[name]))
else:
setattr(self, name, None)
def save(self):
self.__init__(**vars(self))
def __repr__(self):
return '<{}: {}>'.format(type(self).__name__, self._id)
|
from __future__ import absolute_import
import mongoext.collection
import mongoext.fields
class MetaModel(type):
def __new__(cls, name, bases, attrs):
fields = {}
for base in bases:
for attr, obj in vars(base).iteritems():
if issubclass(type(obj), mongoext.fields.Field):
fields[attr] = obj
for attr, obj in attrs.iteritems():
if issubclass(type(obj), mongoext.fields.Field):
fields[attr] = obj
attrs['FIELDS'] = fields
return super(MetaModel, cls).__new__(cls, name, bases, attrs)
def __init__(cls, name, bases, attrs):
for attr, obj in vars(cls).iteritems():
if issubclass(type(obj), mongoext.collection.Collection):
obj.model = cls
super(MetaModel, cls).__init__(name, bases, attrs)
class Model(object):
__metaclass__ = MetaModel
FIELDS = None
_id = mongoext.fields.Field()
def __init__(self, **kw):
for name, obj in self.FIELDS.iteritems():
if name in kw:
setattr(self, name, obj(kw[name]))
else:
setattr(self, name, None)
def __repr__(self):
return '<{}: {}>'.format(type(self).__name__, self._id)
|
mit
|
Python
|
c97abd1aca254b1ede05ef33ecefa8402fdcb0ac
|
add message for notice types creation
|
SoPR/horas,SoPR/horas,SoPR/horas,SoPR/horas
|
apps/core/management/commands/create_notice_types.py
|
apps/core/management/commands/create_notice_types.py
|
from django.core.management.base import BaseCommand
from django.utils.translation import ugettext_noop as _
from django.db.models import signals
from notification import models as notification
class Command(BaseCommand):
def handle(self, *args, **options):
notification.create_notice_type("create_meeting_slot", _("Meeting slot created"), _("your new slot is ready"))
notification.create_notice_type("reserved_meeting_slot", _("Meeting has been accepted"), _("your meeting has accepted"))
notification.create_notice_type("cancel_meeting", _("Meeting cancelled"), _("your meeting has been cancelled"))
notification.create_notice_type("pre_meeting_reminder", _("Your upcoming meeting"), _("your meetings starts in 24 hours"))
notification.create_notice_type("post_meeting_feedback_request", _("Let us know"), _("how did your meeting go?"))
self.stdout.write('--> Created notice types')
|
from django.core.management.base import BaseCommand
from django.utils.translation import ugettext_noop as _
from django.db.models import signals
from notification import models as notification
class Command(BaseCommand):
def handle(self, *args, **options):
notification.create_notice_type("create_meeting_slot", _("Meeting slot created"), _("your new slot is ready"))
notification.create_notice_type("reserved_meeting_slot", _("Meeting has been accepted"), _("your meeting has accepted"))
notification.create_notice_type("cancel_meeting", _("Meeting cancelled"), _("your meeting has been cancelled"))
notification.create_notice_type("pre_meeting_reminder", _("Your upcoming meeting"), _("your meetings starts in 24 hours"))
notification.create_notice_type("post_meeting_feedback_request", _("Let us know"), _("how did your meeting go?"))
|
mit
|
Python
|
9befb6021a55cdf584c39a3f5e9fa3191a415a50
|
Fix code health issues reported by Landscape.io
|
lukeyeager/compare-versions,lukeyeager/compare-versions
|
compare_versions/core.py
|
compare_versions/core.py
|
from . import schemes
VALID_COMPARISONS=['eq','ne','gt','lt','ge','le']
def is_valid(version):
try:
schemes.schemes['semver'](version)
except schemes.InvalidVersionError:
return False
return True
def verify_list(versions, comparison='lt', scheme='semver'):
"""
Verify that a list of versions all match comparison
Returns True if the versions are in order
Arguments:
versions -- a list of version strings
comparison -- the comparison to evaluate on the list
scheme -- the versioning scheme to use
"""
if len(versions) < 2:
raise ValueError('You must provide at least two versions to compare')
if comparison not in VALID_COMPARISONS:
raise ValueError('Invalid comparison "%s" - options are %s' % (comparison, '/'.join(c for c in core.VALID_COMPARISONS)))
if scheme not in schemes.schemes:
raise ValueError('Invalid scheme "%s" - options are %s' % (scheme, '/'.join(s for s in schemes.schemes)))
prev = schemes.schemes[scheme](versions[0])
for curr in versions[1:]:
curr = schemes.schemes[scheme](curr)
if comparison == 'eq':
res = prev == curr
elif comparison == 'ne':
res = prev != curr
elif comparison == 'gt':
res = prev > curr
elif comparison == 'lt':
res = prev < curr
elif comparison == 'ge':
res = prev >= curr
elif comparison == 'le':
res = prev <= curr
if not res:
print('ERROR: %s %s %s' % (prev, comparison_symbol(prev, curr), curr))
return False
prev = curr
return True
def comparison_symbol(v1, v2):
"""
Returns a character representation of the relationship between two objects
"""
if v1 == v2:
return '=='
elif v1 > v2:
return '>'
elif v1 < v2:
return '<'
else:
raise RuntimeError('Could not compare "%s" and "%s"' % (v1, v2))
|
from . import schemes
VALID_COMPARISONS=['eq','ne','gt','lt','ge','le']
def is_valid(version):
try:
schemes.schemes['semver'](version)
except schemes.InvalidVersionError:
return False
return True
def verify_list(versions, comparison='lt', scheme='semver'):
"""
Verify that a list of versions all match comparison
Returns True if the versions are in order
Arguments:
versions -- a list of version strings
comparison -- the comparison to evaluate on the list
scheme -- the versioning scheme to use
"""
if len(versions) < 2:
raise ValueError('You must provide at least two versions to compare')
if comparison not in VALID_COMPARISONS:
raise ValueError('Invalid comparison "%s" - options are %s' % (args.comparison, '/'.join(c for c in core.VALID_COMPARISONS)))
if scheme not in schemes.schemes:
raise ValueError('Invalid scheme "%s" - options are %s' % (args.scheme, '/'.join(s for s in schemes.schemes)))
prev = schemes.schemes[scheme](versions[0])
for curr in versions[1:]:
curr = schemes.schemes[scheme](curr)
if comparison == 'eq':
res = prev == curr
elif comparison == 'ne':
res = prev != curr
elif comparison == 'gt':
res = prev > curr
elif comparison == 'lt':
res = prev < curr
elif comparison == 'ge':
res = prev >= curr
elif comparison == 'le':
res = prev <= curr
if not res:
print('ERROR: %s %s %s' % (prev, comparison_symbol(prev, curr), curr))
return False
prev = curr
return True
def comparison_symbol(v1, v2):
"""
Returns a character representation of the relationship between two objects
"""
if v1 == v2:
return '=='
elif v1 > v2:
return '>'
elif v1 < v2:
return '<'
else:
raise RuntimeError('Could not compare "%s" and "%s"' % (v1, v2))
|
mit
|
Python
|
7568b5c4869a5ab4c5e483393f901b77b70ebced
|
Fix typo in word API test.
|
PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge
|
spec/data/word_api/wordnet/_word_api_spec.py
|
spec/data/word_api/wordnet/_word_api_spec.py
|
from data.word_api import word_api
from spec.mamba import *
_word_api = None
with _description('_word_api'):
with before.all:
global _word_api
_word_api = word_api.get_api('wordnet')
with description('base_form'):
with it('handles plurals'):
expect(_word_api.base_form('snails')).to(equal('snail'))
with description('expand'):
with it('expands fiery'):
expanded = _word_api.expand('fiery')
expect(expanded).to(have_keys('ardent', 'flaming', 'igneous'))
expect(expanded).not_to(have_key('fiery'))
with description('tag'):
with it('tags empty sentences'):
expect(_word_api.tag('')).to(equal([]))
with it('tags real sentence'):
expect(_word_api.tag(
'The positions of CEO, analyst, and accountant'
' are held by Alex, Sheila, and Sarah.')
).to(equal([
('The', 'DT'),
('positions', 'NNS'),
('of', 'IN'),
('CEO', 'NNP'),
(',', ','),
('analyst', 'NN'),
(',', ','),
('and', 'CC'),
('accountant', 'NN'),
('are', 'VBP'),
('held', 'VBN'),
('by', 'IN'),
('Alex', 'NNP'),
(',', ','),
('Sheila', 'NNP'),
(',', ','),
('and', 'CC'),
('Sarah', 'NNP'),
('.', '.'),
]))
with description('synonyms'):
with it('executes without error'):
expect(calling(_word_api.synonyms, 'string')).not_to(raise_error)
with it('returns synonyms'):
results = _word_api.synonyms('string')
expect(results).to(have_keys('fiber', 'cord', 'thread'))
with description('hypernyms'):
with it('executes without error'):
expect(calling(_word_api.hypernyms, 'orange')).not_to(raise_error)
with it('returns hypernyms'):
results = _word_api.hypernyms('orange')
expect(results).to(have_keys('pigment', 'color', 'edible fruit'))
with description('cryptic examples'):
with it('GREENBELT: Inexperienced band intended to limit urban sprawl'):
# expect(_word_api.expand('inexperienced')).to(have_key('green'))
expect(_word_api.expand('band')).to(have_key('belt'))
with it('PASTRY: Fathers attempt to get pie-crust'):
expect(_word_api.expand('fathers')).to(have_key('pas'))
expect(_word_api.expand('attempt')).to(have_key('try'))
|
from data.word_api import word_api
from spec.mamba import *
_word_api = None
with _description('_word_api'):
with before.all:
global _word_api
_word_api = word_api.get_api('wordnet')
with description('base_form'):
with it('handles plurals'):
expect(_word_api.base_form('snails')).to(equal('snail'))
with description('expand'):
with it('expands fiery'):
expanded = _word_api.expand('fiery')
expect(expanded).to(have_keys('ardent', 'flaming', 'igneous'))
expect(expanded).not_to(have_key('fiery'))
with description('tag'):
with it('tags empty sentences'):
expect(_word_api.tag('')).to(equal([]))
with it('tags real sentence'):
expect(_word_api.tag(
'The positions of CEO, analyst, and accountant'
' are held by Alex, Sheila, and Sarah.')
).to(equal([
('The', 'DT'),
('positions', 'NNS'),
('of', 'IN'),
('CEO', 'NNP'),
(',', ','),
('analyst', 'NN'),
(',', ','),
('and', 'CC'),
('accountant', 'NN'),
('are', 'VBP'),
('held', 'VBN'),
('by', 'IN'),
('Alex', 'NNP'),
(',', ','),
('Sheila', 'NNP'),
(',', ','),
('and', 'CC'),
('Sarah', 'NNP'),
('.', '.'),
]))
with description('synonyms'):
with it('executes without error'):
expect(calling(_word_api.synonyms, 'string')).not_to(raise_error)
with it('returns synonyms'):
results = _word_api.synonyms('string')
expect(results).to(have_keys('fiber', 'cord', 'thread'))
with description('hypernyms'):
with it('executes without error'):
expect(calling(_word_api.hypernyms, 'orange')).not_to(raise_error)
with it('returns synonyms'):
results = _word_api.hypernyms('orange')
expect(results).to(have_keys('pigment', 'color', 'edible fruit'))
with description('cryptic examples'):
with it('GREENBELT: Inexperienced band intended to limit urban sprawl'):
# expect(_word_api.expand('inexperienced')).to(have_key('green'))
expect(_word_api.expand('band')).to(have_key('belt'))
with it('PASTRY: Fathers attempt to get pie-crust'):
expect(_word_api.expand('fathers')).to(have_key('pas'))
expect(_word_api.expand('attempt')).to(have_key('try'))
|
mit
|
Python
|
e5c81f533099fc21d1da67ffdd91a2dafda08429
|
fix for both envs?
|
ibiwan/ibcomics,ibiwan/ibcomics
|
ibcomics/wsgi.py
|
ibcomics/wsgi.py
|
"""
WSGI config for ibcomics project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os, sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../../")))
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../")))
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "ibcomics.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
"""
WSGI config for ibcomics project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "ibcomics.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
apache-2.0
|
Python
|
4af2d1286a6a6b8d6bf91f0d5f707b3d999b53d7
|
Set null in the "Elevation" field
|
nkoech/csacompendium,nkoech/csacompendium,nkoech/csacompendium
|
csacompendium/locations/models.py
|
csacompendium/locations/models.py
|
from __future__ import unicode_literals
from django.conf import settings
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.db import models
# from django.db.models.signals import pre_save
# from django.dispatch import receiver
# from django.utils.text import slugify
class LocationManager(models.Manager):
"""
Location model manager
"""
def filter_by_instance(self, instance):
"""
Query a related location object/record from another model's object
:param instance: Object instance
:return: Query result from content type/model
:rtye: object/record
"""
content_type = ContentType.objects.get_for_model(instance.__class__)
obj_id = instance.id
qs = super(LocationManager, self).filter(content_type=content_type, object_id=obj_id)
return qs
class Location(models.Model):
"""
Location model. Creates location entity.
"""
user = models.ForeignKey(settings.AUTH_USER_MODEL, default=1)
modified_by = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='%(app_label)s_%(class)s', default=1)
content_type = models.ForeignKey(ContentType, on_delete=models.PROTECT)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
location_name = models.CharField(max_length=256, blank=True)
latitude = models.DecimalField(max_digits=8, decimal_places=6, unique=True)
longitude = models.DecimalField(max_digits=9, decimal_places=6, unique=True)
elevation = models.FloatField(blank=True, null=True)
last_update = models.DateTimeField(auto_now=True, auto_now_add=False)
time_created = models.DateTimeField(auto_now=False, auto_now_add=True)
objects = LocationManager()
def __unicode__(self):
return self.location_name
def __str__(self):
return self.location_name
class Meta:
unique_together = ['latitude', 'longitude']
ordering = ['-time_created', '-last_update']
verbose_name_plural = 'Locations'
|
from __future__ import unicode_literals
from django.conf import settings
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.db import models
# from django.db.models.signals import pre_save
# from django.dispatch import receiver
# from django.utils.text import slugify
class LocationManager(models.Manager):
"""
Location model manager
"""
def filter_by_instance(self, instance):
"""
Query a related location object/record from another model's object
:param instance: Object instance
:return: Query result from content type/model
:rtye: object/record
"""
content_type = ContentType.objects.get_for_model(instance.__class__)
obj_id = instance.id
qs = super(LocationManager, self).filter(content_type=content_type, object_id=obj_id)
return qs
class Location(models.Model):
"""
Location model. Creates location entity.
"""
user = models.ForeignKey(settings.AUTH_USER_MODEL, default=1)
modified_by = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='%(app_label)s_%(class)s', default=1)
content_type = models.ForeignKey(ContentType, on_delete=models.PROTECT)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
location_name = models.CharField(max_length=256, blank=True)
latitude = models.DecimalField(max_digits=8, decimal_places=6, unique=True)
longitude = models.DecimalField(max_digits=9, decimal_places=6, unique=True)
elevation = models.FloatField(blank=True)
last_update = models.DateTimeField(auto_now=True, auto_now_add=False)
time_created = models.DateTimeField(auto_now=False, auto_now_add=True)
objects = LocationManager()
def __unicode__(self):
return self.location_name
def __str__(self):
return self.location_name
class Meta:
unique_together = ['latitude', 'longitude']
ordering = ['-time_created', '-last_update']
verbose_name_plural = 'Locations'
|
mit
|
Python
|
0079d87a51267e73d1083c339f10df8f31712968
|
Fix Python 2
|
rollforbugs/cscslackbot,rollforbugs/cscslackbot
|
cscslackbot/logconfig/__init__.py
|
cscslackbot/logconfig/__init__.py
|
import logging
import logging.config
import logging.handlers
import six
import sys
from ..utils import from_human_readable
def configure(config):
format = config.get('format', None)
datefmt = config.get('datefmt', None)
fmtstyle = config.get('fmtstyle', '%')
if six.PY2:
formatter = logging.Formatter(format, datefmt)
else:
formatter = logging.Formatter(format, datefmt, fmtstyle)
handlers = []
# Console handler
h = logging.StreamHandler(sys.stdout)
h.setLevel(config['console']['level'])
h.setFormatter(formatter)
handlers.append(h)
# File handlers
for f in config['files']:
file_config = config['files'][f]
maxsize = file_config.get('maxsize', '1M')
maxsize = from_human_readable(str(maxsize))
count = file_config.get('count', 1)
h = logging.handlers.RotatingFileHandler(f, maxBytes=maxsize, backupCount=count)
h.setLevel(file_config['level'])
h.setFormatter(formatter)
handlers.append(h)
logging.getLogger().setLevel(logging.DEBUG)
for h in handlers:
logging.getLogger().addHandler(h)
print(h)
|
import logging
import logging.config
import logging.handlers
import sys
from ..utils import from_human_readable
def configure(config):
format = config.get('format', None)
datefmt = config.get('datefmt', None)
fmtstyle = config.get('fmtstyle', '%')
formatter = logging.Formatter(format, datefmt, fmtstyle)
handlers = []
# Console handler
h = logging.StreamHandler(sys.stdout)
h.setLevel(config['console']['level'])
h.setFormatter(formatter)
handlers.append(h)
# File handlers
for f in config['files']:
file_config = config['files'][f]
maxsize = file_config.get('maxsize', '1M')
maxsize = from_human_readable(str(maxsize))
count = file_config.get('count', 1)
h = logging.handlers.RotatingFileHandler(f, maxBytes=maxsize, backupCount=count)
h.setLevel(file_config['level'])
h.setFormatter(formatter)
handlers.append(h)
logging.getLogger().setLevel(logging.DEBUG)
for h in handlers:
logging.getLogger().addHandler(h)
print(h)
|
mit
|
Python
|
0275ababbed41a6c051938f8cf3a2defe1962fe1
|
Fix wrong finally clause
|
znerol/py-idlk
|
idlk/__init__.py
|
idlk/__init__.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import idlk.base41
if sys.version_info[0] == 3:
_get_byte = lambda c: c
else:
_get_byte = ord
def hash_macroman(data):
h = 0
for c in data:
h = ((h << 8) + h) + _get_byte(c)
return h % 0xFFFEECED
def idlk(filename):
# Convert to lowercase first.
filename = filename.lower()
# The original algorithm seems to prefer Mac Roman encoding as long as
# there are no non-mappable characters in the file name.
try:
macroman_name = filename.encode("macroman")
except UnicodeEncodeError:
pass
else:
hashed = base41.encode(hash_macroman(macroman_name))
base, ext = os.path.splitext(macroman_name)
return "~{:s}~{:s}.idlk".format(base[0:18].decode("macroman"), hashed)
# Regrettably the encoding / hashing algorithm for unicode filenames is
# not currently known. Please file a feature request/patch if you
# discover a working implementation.
return False
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import idlk.base41
if sys.version_info[0] == 3:
_get_byte = lambda c: c
else:
_get_byte = ord
def hash_macroman(data):
h = 0
for c in data:
h = ((h << 8) + h) + _get_byte(c)
return h % 0xFFFEECED
def idlk(filename):
# Convert to lowercase first.
filename = filename.lower()
# The original algorithm seems to prefer Mac Roman encoding as long as
# there are no non-mappable characters in the file name.
try:
macroman_name = filename.encode("macroman")
except UnicodeEncodeError:
pass
finally:
hashed = base41.encode(hash_macroman(macroman_name))
base, ext = os.path.splitext(macroman_name)
return "~{:s}~{:s}.idlk".format(base[0:18].decode("macroman"), hashed)
# Regrettably the encoding / hashing algorithm for unicode filenames is
# not currently known. Please file a feature request/patch if you
# discover a working implementation.
return False
|
mit
|
Python
|
d6caf2f1eb407eb63e0e6dc7e1375a81fcd5ff81
|
Implement data coordinator in CalFlora
|
jnfrye/local_plants_book
|
scripts/observations/scrape/CalFloraScraper.py
|
scripts/observations/scrape/CalFloraScraper.py
|
from selenium import webdriver
import pandas as pd
import argparse
import PyFloraBook.web.communication as scraping
import PyFloraBook.input_output.data_coordinator as dc
# ---------------- INPUT ----------------
# Parse arguments
parser = argparse.ArgumentParser(
description='Scrape CalFlora for species counts for given family')
parser.add_argument("-f", "--families", nargs='+',
help="Names of the families to be analyzed.")
args = parser.parse_args()
families = args.families
# ---------------- SCRAPING ----------------
print("Opening browser...")
browser = webdriver.Firefox()
browser.set_window_size(500, 300)
browser.set_window_position(200, 200)
SITE_NAME = "CalFlora"
OUTPUT_PATH = dc.locate_raw_data_folder() / SITE_NAME
for family in families:
# Load the webpage
try:
browser.get(
"http://www.calflora.org/entry/wgh.html#srch=t&family="
+ family +
"&group=none&fmt=simple&y=39.493&x=-119.6979&z=5&rid=rs940")
except:
pass # lol
scraping.wait_for_load(browser, "CLASS_NAME", "familyColumn")
# Download the rows in the species data table
# Next we skip the first three rows because they contain nonsense
data_table = browser.find_element_by_id("resultSlot")
data_rows = data_table.find_elements_by_tag_name("tr")[3:]
# Extract the species counts
species_list = [
(row.find_element_by_class_name("column1Simple").text,
int(row.find_element_by_class_name("observColumn").text.split()[0]))
for row in data_rows
]
# ---------------- ANALYSIS ----------------
# Convert to friendly format for writing CSV
family_results_path = str(OUTPUT_PATH / (family + "_raw_data.csv"))
all_species = pd.DataFrame(species_list, columns=["full_name", "count"])
all_species.to_csv(
family_results_path,
columns=['full_name', 'count'], index=False
)
# For whatever reason, it won't load the next page unless I do this
browser.get("about:blank")
browser.quit()
|
from selenium import webdriver
import pandas as pd
import argparse
import PyFloraBook.web.communication as scraping
# ---------------- INPUT ----------------
# Parse arguments
parser = argparse.ArgumentParser(
description='Scrape CalFlora for species counts for given family')
parser.add_argument("-f", "--families", nargs='+',
help="Names of the families to be analyzed.")
args = parser.parse_args()
families = args.families
# ---------------- SCRAPING ----------------
print("Opening browser...")
browser = webdriver.Firefox()
browser.set_window_size(500, 300)
browser.set_window_position(200, 200)
browser.set_page_load_timeout(8)
for family in families:
# Load the webpage
try:
browser.get(
"http://www.calflora.org/entry/wgh.html#srch=t&family="
+ family +
"&group=none&fmt=simple&y=39.493&x=-119.6979&z=5&rid=rs940")
except:
pass # lol
scraping.wait_for_load(browser, "CLASS_NAME", "familyColumn")
# Download the rows in the species data table
# Next we skip the first three rows because they contain nonsense
data_table = browser.find_element_by_id("resultSlot")
data_rows = data_table.find_elements_by_tag_name("tr")[3:]
# Extract the species counts
species_list = [
(row.find_element_by_class_name("column1Simple").text,
int(row.find_element_by_class_name("observColumn").text.split()[0]))
for row in data_rows
]
# ---------------- ANALYSIS ----------------
# Convert to friendly format for writing CSV
all_species = pd.DataFrame(species_list, columns=["full_name", "count"])
all_species.to_csv(
"./CalFlora/" + family + "_raw_data.csv",
columns=['full_name', 'count'], index=False
)
# For whatever reason, it won't load the next page unless I do this
browser.get("about:blank")
browser.quit()
# ---------------- OUTPUT ----------------
|
mit
|
Python
|
b1e35e8eea2e91013967b0544088036d56014c34
|
fix style errors
|
trbs/bucky,trbs/bucky,jsiembida/bucky3
|
contrib/statsd_perfomance_test.py
|
contrib/statsd_perfomance_test.py
|
#!/usr/bin/env python
import multiprocessing
import bucky.statsd
import time
import timeit
l10 = range(10)
l100 = range(100)
l1000 = range(1000)
# try:
# import queue
# except ImportError:
# import Queue as queue
queue = multiprocessing.Queue()
handler = bucky.statsd.StatsDHandler(queue, bucky.cfg)
def fill_and_compute_timers(handler):
# Fill timers
for x in l100: # timer name
for y in l1000: # timer value, using random value is not good idea there
handler.handle_timer("timer-%s" % (x), [y])
# Compute metrics
stime = int(time.time())
handler.enqueue_timers(stime)
# Clear queue
while not queue.empty():
queue.get()
# Warmup
print("Warmup")
for i in l10:
fill_and_compute_timers(handler)
print("Test")
trun = timeit.timeit('fill_and_compute_timers(handler)',
'from __main__ import fill_and_compute_timers, handler',
number=100)
print("Result:", trun)
queue.close
|
#!/usr/bin/env python
import multiprocessing
import bucky.statsd
import time
import timeit
l10 = range(10)
l100 = range(100)
l1000 = range(1000)
# try:
# import queue
# except ImportError:
# import Queue as queue
queue = multiprocessing.Queue()
handler = bucky.statsd.StatsDHandler(queue, bucky.cfg)
def fill_and_compute_timers(handler):
# Fill timers
for x in l100: # timer name
for y in l1000: # timer value, using random value is not good idea there
handler.handle_timer("timer-%s" % (x), [y])
# Compute metrics
stime = int(time.time())
handler.enqueue_timers(stime)
# Clear queue
while not queue.empty():
queue.get()
# Warmup
print("Warmup")
for i in l10:
fill_and_compute_timers(handler)
print("Test")
time = timeit.timeit('fill_and_compute_timers(handler)',
'from __main__ import fill_and_compute_timers, handler',
number=100)
print("Result:", time)
queue.close
|
apache-2.0
|
Python
|
52d76647b1fa50a2649335b65f22f88d7877e9d3
|
Return to old setting of repetitions for fast testing
|
bees4ever/spotpy,bees4ever/spotpy,thouska/spotpy,thouska/spotpy,bees4ever/spotpy,thouska/spotpy
|
spotpy/unittests/test_fast.py
|
spotpy/unittests/test_fast.py
|
import unittest
try:
import spotpy
except ImportError:
import sys
sys.path.append(".")
import spotpy
from spotpy.examples.spot_setup_hymod_python import spot_setup
class TestFast(unittest.TestCase):
def setUp(self):
self.spot_setup = spot_setup()
self.rep = 200 # REP must be a multiply of amount of parameters which are in 7 if using hymod
self.timeout = 10 # Given in Seconds
def test_fast(self):
sampler = spotpy.algorithms.fast(self.spot_setup, parallel="seq", dbname='test_FAST', dbformat="ram",
sim_timeout=self.timeout)
results = []
sampler.sample(self.rep)
results = sampler.getdata()
self.assertEqual(203,len(results))
if __name__ == '__main__':
unittest.main()
|
import unittest
try:
import spotpy
except ImportError:
import sys
sys.path.append(".")
import spotpy
from spotpy.examples.spot_setup_hymod_python import spot_setup
class TestFast(unittest.TestCase):
def setUp(self):
self.spot_setup = spot_setup()
self.rep = 200 # REP must be a multiply of amount of parameters which are in 7 if using hymod
self.timeout = 10 # Given in Seconds
def test_fast(self):
sampler = spotpy.algorithms.fast(self.spot_setup, parallel="seq", dbname='test_FAST', dbformat="ram",
sim_timeout=self.timeout)
results = []
sampler.sample(self.rep)
results = sampler.getdata()
self.assertEqual(200,len(results))
if __name__ == '__main__':
unittest.main()
|
mit
|
Python
|
06ef27c5767947c324d787c23c0acb887ea7f914
|
Remove an useless shebang form non-executable file (#1073)
|
PKRoma/httpie,PKRoma/httpie,jakubroztocil/httpie,jakubroztocil/httpie,jakubroztocil/httpie,jkbrzt/httpie,jkbrzt/httpie,jkbrzt/httpie
|
httpie/__main__.py
|
httpie/__main__.py
|
"""The main entry point. Invoke as `http' or `python -m httpie'.
"""
import sys
def main():
try:
from httpie.core import main
exit_status = main()
except KeyboardInterrupt:
from httpie.status import ExitStatus
exit_status = ExitStatus.ERROR_CTRL_C
sys.exit(exit_status.value)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""The main entry point. Invoke as `http' or `python -m httpie'.
"""
import sys
def main():
try:
from httpie.core import main
exit_status = main()
except KeyboardInterrupt:
from httpie.status import ExitStatus
exit_status = ExitStatus.ERROR_CTRL_C
sys.exit(exit_status.value)
if __name__ == '__main__':
main()
|
bsd-3-clause
|
Python
|
0afb9c02a63a4d96fa21f825a98139878df06dfc
|
add a-game-of-stones
|
EdisonCodeKeeper/hacker-rank,EdisonCodeKeeper/hacker-rank,zeyuanxy/hacker-rank,EdisonAlgorithms/HackerRank,zeyuanxy/hacker-rank,EdisonCodeKeeper/hacker-rank,zeyuanxy/hacker-rank,EdisonAlgorithms/HackerRank,EdisonCodeKeeper/hacker-rank,zeyuanxy/hacker-rank,EdisonAlgorithms/HackerRank,EdisonAlgorithms/HackerRank,zeyuanxy/hacker-rank,EdisonAlgorithms/HackerRank,EdisonCodeKeeper/hacker-rank,zeyuanxy/hacker-rank,EdisonCodeKeeper/hacker-rank,EdisonAlgorithms/HackerRank
|
contest/5-days-of-game-theory/a-game-of-stones/a-game-of-stones.py
|
contest/5-days-of-game-theory/a-game-of-stones/a-game-of-stones.py
|
# -*- coding: utf-8 -*-
# @Author: Zeyuan Shang
# @Date: 2016-05-13 13:42:03
# @Last Modified by: Zeyuan Shang
# @Last Modified time: 2016-05-13 13:42:08
T = input()
for _ in xrange(T):
n = input()
dp = [False] * (n + 1)
for i in xrange(n + 1):
res = False
for j in [2, 3, 5]:
if i >= j and not dp[i - j]:
res = True
break
dp[i] = res
if dp[n]:
print 'First'
else:
print 'Second'
|
mit
|
Python
|
|
5c91a2c8dda69d37fd3cd0989ff6c3883851eaef
|
Introduce templatetag for fetching image thumbnails
|
tfroehlich82/saleor,itbabu/saleor,UITools/saleor,maferelo/saleor,tfroehlich82/saleor,tfroehlich82/saleor,car3oon/saleor,KenMutemi/saleor,KenMutemi/saleor,jreigel/saleor,maferelo/saleor,car3oon/saleor,KenMutemi/saleor,mociepka/saleor,car3oon/saleor,HyperManTT/ECommerceSaleor,HyperManTT/ECommerceSaleor,maferelo/saleor,UITools/saleor,HyperManTT/ECommerceSaleor,jreigel/saleor,UITools/saleor,itbabu/saleor,mociepka/saleor,jreigel/saleor,UITools/saleor,UITools/saleor,mociepka/saleor,itbabu/saleor
|
saleor/product/templatetags/product_images.py
|
saleor/product/templatetags/product_images.py
|
import logging
import warnings
from django.template.context_processors import static
from django import template
from django.conf import settings
logger = logging.getLogger(__name__)
register = template.Library()
# cache available sizes at module level
def get_available_sizes():
all_sizes = set()
keys = settings.VERSATILEIMAGEFIELD_RENDITION_KEY_SETS
for size_group, sizes in keys.items():
for size_name, size in sizes:
all_sizes.add(size)
return all_sizes
AVAILABLE_SIZES = get_available_sizes()
@register.simple_tag()
def product_image(instance, size, method='crop'):
if instance:
size_name = '%s__%s' % (method, size)
if (size_name not in AVAILABLE_SIZES and not
settings.VERSATILEIMAGEFIELD_SETTINGS['create_images_on_demand']):
msg = ('Thumbnail size %s is not defined in settings '
'and it won\'t be generated automatically' % size_name)
warnings.warn(msg)
try:
if method == 'crop':
thumbnail = instance.crop[size]
else:
thumbnail = instance.thumbnail[size]
except:
logger.exception('Thumbnail fetch failed',
extra={'instance': instance, 'size': size})
else:
return thumbnail.url
return static('dist/images/product-image-placeholder.png')
|
bsd-3-clause
|
Python
|
|
304e8d68e114eda8fe420e64f0255a816fbc5009
|
Add a very basic test, #1
|
lord63/py-spin
|
test_pyspin.py
|
test_pyspin.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import time
from pyspin import spin
def test_spinner():
spinner = spin.Spinner(spin.Spin9)
assert spinner.length == 4
assert spinner.frames == spin.Spin9
assert spinner.current() == u'←'
assert spinner.next() == u'←'
assert spinner.next() == u'↑'
assert spinner.next() == u'→'
assert spinner.next() == u'↓'
assert spinner.next() == u'←'
assert spinner.next() == u'↑'
spinner.reset()
assert spinner.position == 0
def test_make_spin():
@spin.make_spin(spin.Default, 'Downloading...')
def fake_download():
time.sleep(5)
fake_download()
|
mit
|
Python
|
|
9c1ee652684fec9dc3b9ed487bfd980e886ec9fc
|
Add regression test for #1698
|
explosion/spaCy,spacy-io/spaCy,explosion/spaCy,recognai/spaCy,aikramer2/spaCy,spacy-io/spaCy,honnibal/spaCy,recognai/spaCy,spacy-io/spaCy,explosion/spaCy,recognai/spaCy,aikramer2/spaCy,honnibal/spaCy,spacy-io/spaCy,recognai/spaCy,explosion/spaCy,aikramer2/spaCy,recognai/spaCy,honnibal/spaCy,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy,recognai/spaCy,aikramer2/spaCy,aikramer2/spaCy,aikramer2/spaCy,explosion/spaCy,spacy-io/spaCy
|
spacy/tests/regression/test_issue1698.py
|
spacy/tests/regression/test_issue1698.py
|
# coding: utf8
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize('text', ['test@example.com', 'john.doe@example.co.uk'])
def test_issue1698(en_tokenizer, text):
doc = en_tokenizer(text)
assert len(doc) == 1
assert not doc[0].like_url
|
mit
|
Python
|
|
42a18ef9030f883563c4459aec46563877274794
|
Add test for #1868: Vocab.__contains__ with ints
|
honnibal/spaCy,explosion/spaCy,aikramer2/spaCy,aikramer2/spaCy,spacy-io/spaCy,spacy-io/spaCy,explosion/spaCy,recognai/spaCy,aikramer2/spaCy,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy,aikramer2/spaCy,spacy-io/spaCy,explosion/spaCy,honnibal/spaCy,aikramer2/spaCy,recognai/spaCy,explosion/spaCy,spacy-io/spaCy,honnibal/spaCy,aikramer2/spaCy,recognai/spaCy,spacy-io/spaCy,recognai/spaCy,explosion/spaCy,recognai/spaCy,recognai/spaCy
|
spacy/tests/regression/test_issue1868.py
|
spacy/tests/regression/test_issue1868.py
|
'''Test Vocab.__contains__ works with int keys'''
from __future__ import unicode_literals
from ... vocab import Vocab
def test_issue1868():
vocab = Vocab()
lex = vocab['hello']
assert lex.orth in vocab
assert lex.orth_ in vocab
assert 'some string' not in vocab
int_id = vocab.strings.add('some string')
assert int_id not in vocab
|
mit
|
Python
|
|
2a5b8283bf653e7691b91217c2fe225ab0699571
|
update finalization rainfall
|
spatialexplore/idn_vam_wfp
|
python/finalization_shp_dbf.py
|
python/finalization_shp_dbf.py
|
# sample input = python finalization_shp_dbf.py /var/lib/opengeo/geoserver/data/IDN_GIS/05_Analysis/03_Early_Warning/Rainfall_Anomaly_test/ 2016-07
import shapefile
import sys
import datetime
import dbf
location_geoserver = str(sys.argv[1])
filename = location_geoserver.split('/')[-2]
period = str(sys.argv[2])
period_join = period.replace('-','')
period_year = period.split('-')[0]
period_month = period.split('-')[1]
fileshp = location_geoserver + filename + '.shp'
fileshx = location_geoserver + filename + '.shx'
filedbf = location_geoserver + filename + '.dbf'
filetif = 'idn_cli_chirps-%s.ratio_anom.tif' % period_join
table = dbf.Table(filedbf)
print fileshp, filedbf, filetif
def update_shp():
try:
r = shapefile.Reader(fileshp)
w = shapefile.Writer(r.shapeType)
w.fields = list(r.fields)
w.records.extend(r.records())
w._shapes.extend(r.shapes())
w.poly(parts=[[[94.95000405604239, -11.050000982838242], [94.95000405604239, 5.949999016941131], [141.1500040554428, 5.949999016941131], [141.1500040554428, -11.050000982838242], [94.95000405604239, -11.050000982838242]]])
w.saveShp(fileshp)
w.saveShx(fileshx)
print 'Successed update shp and shx'
except:
print 'Failed update shp'
def update_dbf():
try:
with table:
datum = (filetif, datetime.date(int(period_year), int(period_month), 1))
table.append(datum)
print 'Successed update dbf'
except:
print 'Failed update dbf'
def main():
update_shp()
update_dbf()
if __name__ == '__main__':
main()
|
mit
|
Python
|
|
39e31d6dd129d4acd9adc95ce0bb7a5c9c45dd42
|
Create Dictionary_example.py
|
Jeevan-J/Python_Funcode
|
Python3-5/Dictionary_example.py
|
Python3-5/Dictionary_example.py
|
#With a given integral number n, write a program to generate a dictionary that contains (i, i*i) such that is an integral number between 1 and n (both included). and then the program should print the dictionary.
n=int(input("Please enter a number")); # takes an integer from user
d=dict(); # Creates an empty dictionary
for i in range(1,n+1): # Runs loop for n-times with 'i' varying from 1 to n.
d[i]=i*i; # Maps every element to its square of index.
print (d); # Prints dictionary 'd'
# For example, If input is 8, then output should look like,
# {1:1,2:4,3:9,4:16,5:25,6:36,7:49,8:64}
|
bsd-2-clause
|
Python
|
|
cf8744e8f9d3f4d77093ecf1cce119161f395b78
|
add tests
|
iDevy/rdp,mznkcyr/rdp,fhirschmann/rdp
|
tests.py
|
tests.py
|
import unittest
import numpy as np
from numpy.testing import assert_array_equal as assertAE
from rdp import rdp
class RDPTest(unittest.TestCase):
def test_two(self):
assertAE(rdp(np.array([[0, 0], [4, 4]])),
np.array([[0, 0], [4, 4]]))
def test_hor(self):
assertAE(rdp(np.array([0, 0, 1, 0, 2, 0, 3, 0, 4, 0]).reshape(5, 2)),
np.array([0, 0, 4, 0]).reshape(2, 2))
def test_ver(self):
assertAE(rdp(np.array([0, 0, 0, 1, 0, 2, 0, 3, 0, 4]).reshape(5, 2)),
np.array([0, 0, 0, 4]).reshape(2, 2))
def test_diag(self):
assertAE(rdp(np.array([0, 0, 1, 1, 2, 2, 3, 3, 4, 4]).reshape(5, 2)),
np.array([0, 0, 4, 4]).reshape(2, 2))
def test_eps0(self):
assertAE(rdp(np.array([0, 0, 5, 1, 10, 1]).reshape(3, 2)),
np.array([0, 0, 5, 1, 10, 1]).reshape(3, 2))
def test_eps1(self):
assertAE(rdp(np.array([0, 0, 5, 1, 10, 1]).reshape(3, 2), 1),
np.array([0, 0, 10, 1]).reshape(2, 2))
|
mit
|
Python
|
|
a7bff3fdc7e328fb0c11fbf0450db78997d2e307
|
Create contour.py
|
metissec/contour
|
contour.py
|
contour.py
|
#!/usr/bin/python
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import geotiler
from scipy.stats import gaussian_kde
class Contour():
def __init__(self, in_f,pix_size=2000,inch_size=10,dpi=200,zoom=14):
#init varibles
self.in_f = in_f
self.np = np
self.plt = plt
self.zoom = zoom
self.pix_size = pix_size
self.inch_size = inch_size
self.dpi = dpi
#start fig
self.fig = self.plt.figure(figsize=(self.inch_size, self.inch_size), dpi=self.dpi)
self.ax = self.plt.subplot(111)
#init func.
self.cor_array()
def bounds(self):
#min/max
xmin = self.a[:,0].min()
xmax = self.a[:,0].max()
ymin = self.a[:,1].min()
ymax = self.a[:,1].max()
#center
midx = (xmax + xmin)/2
midy = (ymax + ymin)/2
self.center = (float(midx), float(midy))
self.map_l()
def map_l(self):
#download map and plot map
self.mm = geotiler.Map(center=self.center, zoom=self.zoom, size =(self.pix_size,self.pix_size))
self.img = geotiler.render_map(self.mm)
self.ax.imshow(self.img)
self.contour_l()
def cor_array(self):
#open file
f = open(self.in_f,"r")
#init array
initline = f.readline()
initline = initline.rstrip('\n')
initline = tuple(float(x) for x in initline.split(","))
self.a = np.array((initline[1],initline[0]))
#put rest of data into array
for line in f:
if line == '\n':
break
line = line.rstrip('\n')
line = tuple(float(x) for x in line.split(","))
b = np.array((line[1],line[0]))
self.a = np.vstack((self.a,b))
self.bounds()
def contour_l(self):
#lat,long to x,y
x, y = zip(*(self.mm.rev_geocode(p) for p in self.a))
self.a = np.array([x,y])
#data shaping
X, Y = np.mgrid[0:self.pix_size:100j, 0:self.pix_size:100j]
positions = np.vstack([X.ravel(),Y.ravel()])
kernel = gaussian_kde(self.a)
Z = np.reshape(kernel(positions).T, X.shape)
#plot
self.ax.contourf(X,Y,Z,cmap='rainbow',alpha=.5,linewidth=.4)
self.ax.scatter(x, y, c='black', edgecolor='white', s=10, alpha=0.9)
self.layer_out()
def layer_out(self):
#remove tics
self.plt.gca().xaxis.set_major_locator(plt.NullLocator())
self.plt.gca().yaxis.set_major_locator(plt.NullLocator())
#output png
self.plt.savefig('test.png', bbox_inches='tight')
self.plt.close()
Contour('pnts_ny.txt')
|
mit
|
Python
|
|
bb197fcee1c809e377d235346fcb0a670f35d918
|
Create counter.py
|
praveendareddy21/my-repo,praveendareddy21/my-repo
|
counter.py
|
counter.py
|
from collections import Counter
l=[12,3,4,2,4,2,4,23,4,1,2]
c=Counter(iterable=l)
print c.most_common(2)
print list(c.elements())
c.clear()
|
mit
|
Python
|
|
cb85810364a235426147a440da797d35d114c5a6
|
Test Commit
|
SharedKnowledge/SharkPython,SharedKnowledge/SharkPython,SharedKnowledge/SharkPython
|
raspberry/asip/RelationSemanticTag.py
|
raspberry/asip/RelationSemanticTag.py
|
from SemanticTag import *
#Test
|
agpl-3.0
|
Python
|
|
ef335362b5f601da41377984b8d9cc675d9ed669
|
Create ddns_sync.py
|
sachiel/devops-tools
|
ddns_sync.py
|
ddns_sync.py
|
#!/usr/bin/env python3
import boto3
from get import getjson
query = "http://evolutiva.mx/getip/"
data = getjson(query)
if not data:
exit()
new_ip = dict(data)['ip']
old_ip = None
r53 = boto3.client('route53') #.connect_to_region('us-west-2')
try:
for res in r53.list_resource_record_sets(HostedZoneId='/hostedzone/Z2XLK91YNO8JY8')['ResourceRecordSets']:
if res['Type'] == 'A' and res['Name'] == 'cuchulainn.evolutiva.mx.':
old_ip = res['ResourceRecords'][0]['Value']
except:
pass
if new_ip == old_ip:
print('Sin Cambios')
else:
# Ex: {'ResourceRecords': [{'Value': '187.207.0.253'}], 'TTL': 300, 'Name': 'cuchulainn.evolutiva.mx.', 'Type': 'A'}
CB = {
'Changes':[{
'Action': 'UPSERT',
'ResourceRecordSet': {
'Name': 'cuchulainn.evolutiva.mx',
'Type': 'A',
'TTL':300,
'ResourceRecords': [
{
'Value': new_ip
}
]
}
}]
}
response = r53.change_resource_record_sets(HostedZoneId='/hostedzone/Z2XLK91YNO8JY8', ChangeBatch=CB)
print(response)
|
mit
|
Python
|
|
5cf79e395802ae5db7d21d07cb6e8042793f5c26
|
Add easycrud versions of generic views
|
dekkers/django-easycrud,dekkers/django-easycrud
|
easycrud/views.py
|
easycrud/views.py
|
from django.views.generic import (ListView as DjangoListView, DetailView as DjangoDetailView,
UpdateView as DjangoUpdateView, CreateView as DjangoCreateView,
DeleteView as DjangoDeleteView)
from django.contrib.auth.decorators import login_required
from django.forms import ModelChoiceField
from .models import EasyCrudModel
class EasyCrudMixin(object):
def dispatch(self, request, *args, **kwargs):
self.owner_ref = self.model._easycrud_meta.owner_ref
if self.owner_ref:
# This is a hack to be able to conditionally use the login_required
# decorator.
ret = login_required(lambda request: False)(request)
if ret:
return ret
profile = request.user.get_profile()
self.owner_ref_obj = getattr(profile, self.owner_ref)
return super(EasyCrudMixin, self).dispatch(request, *args, **kwargs)
def get_queryset(self):
queryset = super(EasyCrudMixin, self).get_queryset()
if self.owner_ref:
kwargs = {self.owner_ref: self.owner_ref_obj}
queryset = queryset.filter(**kwargs)
return queryset
def get_context_data(self, **kwargs):
context = super(EasyCrudMixin, self).get_context_data(**kwargs)
context['model_name'] = self.model.model_name
# Implement https://code.djangoproject.com/ticket/16744 here
if 'view' not in context:
context['view'] = self
return context
def get_form_class(self):
form_class = super(EasyCrudMixin, self).get_form_class()
if self.owner_ref:
# Never display the owner field, as we always set it on the object
# in get_form() below
if self.owner_ref in form_class.base_fields:
del form_class.base_fields[self.owner_ref]
# Changing all ModelChoiceFields so the queryset only contains
# objects owned by the current user. This will only list those items
# on the form and also accept only those values during validation.
for field in form_class.base_fields.values():
if (isinstance(field, ModelChoiceField) and
issubclass(field.queryset.model, EasyCrudModel) and
field.queryset.model._easycrud_meta.owner_ref == self.owner_ref):
kwargs = {self.owner_ref: self.owner_ref_obj}
field.queryset = field.queryset.filter(**kwargs)
return form_class
def get_form(self, form_class):
form = super(EasyCrudMixin, self).get_form(form_class)
if self.owner_ref:
setattr(form.instance, self.owner_ref, self.owner_ref_obj)
return form
class ListView(EasyCrudMixin, DjangoListView):
def get_template_names(self):
names = super(ListView, self).get_template_names()
names.append("easycrud/list.html")
return names
class CreateView(EasyCrudMixin, DjangoCreateView):
def get_template_names(self):
names = super(CreateView, self).get_template_names()
names.append("easycrud/createupdate.html")
return names
class DetailView(EasyCrudMixin, DjangoDetailView):
def get_template_names(self):
names = super(DetailView, self).get_template_names()
names.append("easycrud/detail.html")
return names
class UpdateView(EasyCrudMixin, DjangoUpdateView):
def get_template_names(self):
names = super(UpdateView, self).get_template_names()
names.append("easycrud/createupdate.html")
return names
class DeleteView(EasyCrudMixin, DjangoDeleteView):
def get_template_names(self):
names = super(DeleteView, self).get_template_names()
names.append("easycrud/delete.html")
return names
|
bsd-2-clause
|
Python
|
|
bc68c04b9be33329e4c28689053300360b6393b4
|
create Clusters class
|
jesford/cluster-lensing
|
clusters.py
|
clusters.py
|
import numpy as np
import pandas as pd
from astropy.cosmology import Planck13 as cosmo
from astropy import units
import sys
sys.path.insert(1,'/Users/jesford/astrophysics/cofm') #temporary path adjust
from cofm import c_DuttonMaccio
try:
from IPython.display import display
notebook_display = True
except:
notebook_display = False
#default parameters
h = cosmo.H0.value
Om_M = cosmo.Om0
Om_L = 1. - Om_M
class Clusters():
"""Ensemble of galaxy clusters and their properties."""
def __init__(self, redshifts):
if type(redshifts) != np.ndarray:
redshifts = np.array(redshifts)
if redshifts.ndim != 1:
raise ValueError("Input redshift array must have 1 dimension.")
self.describe = "Ensemble of galaxy clusters and their properties."
self.number = redshifts.shape[0]
self.z = redshifts
self._rho_crit = cosmo.critical_density(self.z)
self._massrich_norm = 2.7*10**13
self._massrich_slope = 1.4
self._df = pd.DataFrame(self.z, columns=['z'])
def update_richness(self, richness):
if type(richness) != np.ndarray:
richness = np.array(richness)
if richness.ndim != 1:
raise ValueError("Input richness array must have 1 dimension.")
if richness.shape[0] == self.number:
self.n200 = richness
self._df['n200'] = pd.Series(self.n200, index = self._df.index)
self._update_depends_on_richness()
else:
raise ValueError("Input richness array must be same \
length as current cluster ensemble.")
def _update_depends_on_richness(self):
self._richness_to_mass()
def update_z(self, redshifts):
self.z = redshifts
self._df['z'] = pd.Series(self.z, index = self._df.index)
self._rho_crit = cosmo.critical_density(self.z)
self._update_dependant_variables()
def _update_dependant_variables(self):
self._r200()
self._c200()
self._rs()
#what else depends on z or m or?
def _richness_to_mass(self):
"""Calculate M_200 for simple power-law scaling relation
(with default parameters from arXiv:1409.3571)."""
self.m200 = self._massrich_norm * (self.n200 ** self._massrich_slope)
self._df['m200'] = pd.Series(self.m200, index = self._df.index)
self._update_dependant_variables()
def massrich_parameters(self):
print "\nMass-Richness Power Law: M200 = norm * N200^slope"
print " norm:", self._massrich_norm
print " slope:", self._massrich_slope
def update_massrichrelation(self, norm = None, slope = None):
if norm != None:
self._massrich_norm = norm
if slope != None:
self._massrich_slope = slope
self._richness_to_mass()
def view(self, notebook = notebook_display):
print "\nCluster Ensemble:"
if notebook == True:
display(self._df)
elif notebook == False:
print self._df
self.massrich_parameters()
def _r200(self):
self.r200 = (3.*self.m200 / (800.*np.pi*self._rho_crit))**(1./3.)
self._df['r200'] = pd.Series(self.r200, index = self._df.index)
def _c200(self):
"""Use c(M) from Dutton & Maccio 2014."""
self.c200 = c_DuttonMaccio(self.z,self.m200)
self._df['c200'] = pd.Series(self.c200, index = self._df.index)
def _rs(self):
"""Cluster scale radius."""
self.rs = self.r200 / self.c200
self._df['rs'] = pd.Series(self.rs, index = self._df.index)
|
mit
|
Python
|
|
8821fd5e4678dd8a2baf78d3ed068b652a10d1cd
|
Add initial games unit
|
Harmon758/Harmonbot,Harmon758/Harmonbot
|
units/games.py
|
units/games.py
|
import random
def eightball():
responses = ["It is certain", "It is decidedly so", "Without a doubt", "Yes, definitely", "You may rely on it", "As I see it, yes", "Most likely", "Outlook good", "Yes", "Signs point to yes", "Reply hazy try again", "Ask again later", "Better not tell you now", "Cannot predict now", "Concentrate and ask again", "Don't count on it", "My reply is no", "My sources say no", "Outlook not so good", "Very doubtful"]
return random.choice(responses)
|
mit
|
Python
|
|
0420aa1bf7bb8027379de52de783da87ce253f62
|
add batch upload script
|
ViDA-NYU/genotet,ViDA-NYU/genotet,ViDA-NYU/genotet,ViDA-NYU/genotet,ViDA-NYU/genotet
|
uploadBatch.py
|
uploadBatch.py
|
# This is a python script for uploading batch data to Genotet server.
# The user may write a *.tsv file, with each line as:
# file_path data_name file_type description
# The command line would be:
# python uploadBatch.py username *.tsv
# And then enter your password for Genotet.
from requests_toolbelt import MultipartEncoder
import requests
import sys
import getpass
import json
def upload_file(file_path, data_name, file_type, description, cookies):
upload_url = 'http://localhost:3000/genotet/upload'
file_path_parts = file_path.split('\/')
file_name = file_path_parts[len(file_path_parts) - 1]
params = MultipartEncoder(
fields={'type': file_type,
'name': data_name,
'description': description,
'username': 'anonymous',
'file': (file_name, open(file_path, 'rb'), 'text/plain')})
headers = {'Content-Type': params.content_type}
cookie = {'genotet-session': cookies['genotet-session']}
response = requests.post(upload_url, data=params, headers=headers, cookies=cookie)
print response.status_code
return True
def auth(username, password):
auth_url = 'http://localhost:3000/genotet/user'
params = {
'type': 'sign-in',
'username': username,
'password': password
}
params = {'data': json.dumps(params)}
response = requests.get(auth_url, params=params)
if response.status_code != 200:
return False
return response.cookies, True
def main(argv):
if len(argv) < 3:
print 'input not enough'
return
username = argv[1]
password = getpass.getpass('Password:')
cookies, auth_result = auth(username, password)
if not auth_result:
print 'username/password not correct'
return
else:
print 'sign in success'
file_path = argv[2]
tsv_file = open(file_path, 'r')
for line in tsv_file:
parts = line.split(' ')
result = upload_file(parts[0], parts[1], parts[2], parts[3], cookies)
if not result:
print 'failed to upload ' + parts[0]
return
if __name__ == '__main__':
main(sys.argv)
|
apache-2.0
|
Python
|
|
22ae0ef7d2dfe793c9deb7b6c2027d5b5f69b3e0
|
add lamper script to control colors
|
nuxis/lamper,nuxis/lamper
|
lamper.py
|
lamper.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import requests
import os
import sys
global url
#url = ''
url = os.environ['LAMPER_URL']
global universe
universe = '1'
def web_post(url, payload):
r = requests.post(
url,
data=payload,
headers={'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8'}
)
return r
def payload_creator(data):
empty = {
1: 0,
2: 0,
3: 0,
4: 0,
5: 0,
6: 0,
7: 0,
8: 0,
9: 0,
10: 0,
11: 0,
12: 0,
13: 0,
14: 0,
15: 0,
16: 0,
}
payload = {**empty, **data}
return ','.join(map(str, payload.values()))
def get_dmx():
extra_url = url + '/get_dmx?u=' + universe
print(web_post(extra_url, '').content)
def set_dmx(data):
extra_url = url + '/set_dmx'
payload = {
'u': universe,
'd': payload_creator(data),
}
import pprint
pprint.pprint(payload)
r = web_post(extra_url, payload)
print(r.content)
if __name__ == '__main__':
# 7 er taket, enkanals
# 8-9-10 er noen på pila, trekanals
# 11-12-13-14 er den siste på pila, firekanals
colors = {
'red': {
7: 20, # rød på enkanals
8: 20, # rød på enkanals (trekanals bruker samme)
9: 255, # styrke på trekanals
10: 0, # trekanals. 0 er fint. 255 er fast strobe...
11: 255, # R # rød på firekanals
12: 0, # G
13: 0, # B
14: 0, # White
},
'green': {
7: 40, # grønn på enkanals
8: 40, # grønn på enkanals (trekanals bruker samme)
9: 255, # styrke på trekanals
10: 0, # trekanals. 0 er fint. 255 er fast strobe...
11: 0, # R
12: 255, # G
13: 0, # B
14: 0, # White
},
'yellow': {
7: 80, # gul på enkanals
8: 80, # gul på enkanals (trekanals bruker samme)
9: 255, # styrke på trekanals
10: 0, # trekanals. 0 er fint. 255 er fast strobe...
11: 255, # R
12: 255, # G
13: 0, # B
14: 0, # White
},
}
if len(sys.argv) != 2:
print('yellow, green or red?')
sys.exit()
if sys.argv[1] not in colors.keys():
print('yellow, green or red?')
sys.exit()
color = sys.argv[1]
set_dmx(colors[color])
get_dmx()
|
mit
|
Python
|
|
da373b924cf4dffe639e29543b5fc0e728be1ed9
|
Add orgviz.randomnodes
|
tkf/orgviz
|
orgviz/randomnodes.py
|
orgviz/randomnodes.py
|
import random
import datetime
class RandomDatetime(object):
def __init__(self, datewidth=7):
self.datewidth = datewidth
self.now = datetime.datetime.now()
def datetime(self):
delta = datetime.timedelta(random.randrange(- self.datewidth,
self.datewidth + 1))
return self.now + delta
def date(self):
return datetime.date(*self.datetime().timetuple()[:3])
def node(level, heading, scheduled=None, deadline=None, closed=None,
clock=None):
datestr = lambda x: x.strftime('<%Y-%m-%d %a>')
yield '*' * level
yield ' '
yield heading
yield '\n'
if scheduled or deadline or closed:
yield ' ' * level
for (name, date) in [('CLOSED', closed),
('DEADLINE', deadline),
('SCHEDULED', scheduled)]:
if date:
yield ' '
yield name
yield ': '
yield datestr(date)
if scheduled or deadline or closed:
yield '\n'
def makeorg(num):
heading_pops = ['aaa', 'bbb', 'ccc']
true_or_false = [True, False]
rd = RandomDatetime()
for i in range(num):
kwds = {}
if i == 0:
kwds['level'] = 1
else:
kwds['level'] = random.randrange(1, 4)
kwds['heading'] = random.choice(heading_pops)
for sdc in ['scheduled', 'deadline', 'closed']:
if random.choice(true_or_false):
kwds[sdc] = rd.date()
for s in node(**kwds):
yield s
def writeorg(file, *args, **kwds):
file.writelines(makeorg(*args, **kwds))
def run(num):
import sys
writeorg(sys.stdout, num)
|
mit
|
Python
|
|
a5bffdaa29d2f270a6f8781c34a2756a66a00a87
|
Bump version
|
tobinjt/Flexget,Danfocus/Flexget,JorisDeRieck/Flexget,gazpachoking/Flexget,Flexget/Flexget,Flexget/Flexget,crawln45/Flexget,malkavi/Flexget,ianstalk/Flexget,Danfocus/Flexget,tobinjt/Flexget,Danfocus/Flexget,Flexget/Flexget,malkavi/Flexget,malkavi/Flexget,malkavi/Flexget,Flexget/Flexget,Danfocus/Flexget,crawln45/Flexget,ianstalk/Flexget,JorisDeRieck/Flexget,ianstalk/Flexget,tobinjt/Flexget,JorisDeRieck/Flexget,crawln45/Flexget,crawln45/Flexget,tobinjt/Flexget,gazpachoking/Flexget,JorisDeRieck/Flexget
|
flexget/_version.py
|
flexget/_version.py
|
"""
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '2.20.0.dev'
|
"""
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '2.19.5.dev'
|
mit
|
Python
|
6b5c46238975eb63b36f43eb79002946a744fd68
|
Prepare v2.10.47.dev
|
LynxyssCZ/Flexget,crawln45/Flexget,LynxyssCZ/Flexget,OmgOhnoes/Flexget,JorisDeRieck/Flexget,qk4l/Flexget,gazpachoking/Flexget,Flexget/Flexget,ianstalk/Flexget,LynxyssCZ/Flexget,ianstalk/Flexget,ianstalk/Flexget,JorisDeRieck/Flexget,tobinjt/Flexget,crawln45/Flexget,LynxyssCZ/Flexget,Danfocus/Flexget,malkavi/Flexget,crawln45/Flexget,Danfocus/Flexget,OmgOhnoes/Flexget,jawilson/Flexget,JorisDeRieck/Flexget,Flexget/Flexget,qk4l/Flexget,tobinjt/Flexget,Flexget/Flexget,malkavi/Flexget,crawln45/Flexget,tobinjt/Flexget,qk4l/Flexget,tobinjt/Flexget,jawilson/Flexget,jawilson/Flexget,malkavi/Flexget,jawilson/Flexget,malkavi/Flexget,gazpachoking/Flexget,Danfocus/Flexget,Flexget/Flexget,JorisDeRieck/Flexget,Danfocus/Flexget,OmgOhnoes/Flexget
|
flexget/_version.py
|
flexget/_version.py
|
"""
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '2.10.47.dev'
|
"""
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '2.10.46'
|
mit
|
Python
|
ea41e4cdc515ca8514c3613a1f474fb3627b7dda
|
Remove autosynth / tweaks for 'README.rst' / 'setup.py'. (#5957)
|
tswast/google-cloud-python,jonparrott/google-cloud-python,dhermes/google-cloud-python,GoogleCloudPlatform/gcloud-python,GoogleCloudPlatform/gcloud-python,tswast/google-cloud-python,googleapis/google-cloud-python,jonparrott/gcloud-python,jonparrott/gcloud-python,dhermes/gcloud-python,tseaver/gcloud-python,tseaver/google-cloud-python,googleapis/google-cloud-python,tswast/google-cloud-python,tseaver/google-cloud-python,dhermes/google-cloud-python,dhermes/gcloud-python,tseaver/gcloud-python,dhermes/google-cloud-python,jonparrott/google-cloud-python,tseaver/google-cloud-python
|
tasks/synth.py
|
tasks/synth.py
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script is used to synthesize generated parts of this library."""
import synthtool as s
import synthtool.gcp as gcp
import logging
logging.basicConfig(level=logging.DEBUG)
gapic = gcp.GAPICGenerator()
common = gcp.CommonTemplates()
excludes = [
'README.rst',
'setup.py',
'docs/conf.py',
'docs/index.rst',
]
for version in ['v2beta2', 'v2beta3']:
library = gapic.py_library(
'tasks', version,
config_path=f'artman_cloudtasks_{version}.yaml')
s.copy(library, excludes=excludes)
# Fix unindentation of bullet list second line
s.replace(
f'google/cloud/tasks_{version}/gapic/cloud_tasks_client.py',
'( \* .*\n )([^\s*])',
'\g<1> \g<2>')
s.replace(
f'google/cloud/tasks_{version}/gapic/cloud_tasks_client.py',
'(Google IAM .*?_) ',
'\g<1>_ ')
# Issues with Anonymous ('__') links. Change to named.
s.replace(
f"google/cloud/tasks_{version}/proto/*.py",
">`__",
">`_")
# Issue in v2beta2
s.replace(
f'google/cloud/tasks_v2beta2/gapic/cloud_tasks_client.py',
r'(Sample filter \\"app_engine_http_target: )\*\\".',
'\g<1>\\*\\".')
# Wrapped link fails due to space in link (v2beta2)
s.replace(
f"google/cloud/tasks_v2beta2/proto/queue_pb2.py",
'(uests in queue.yaml/xml) <\n\s+',
'\g<1>\n <')
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script is used to synthesize generated parts of this library."""
import synthtool as s
import synthtool.gcp as gcp
import logging
logging.basicConfig(level=logging.DEBUG)
gapic = gcp.GAPICGenerator()
common = gcp.CommonTemplates()
for version in ['v2beta2', 'v2beta3']:
library = gapic.py_library(
'tasks', version,
config_path=f'artman_cloudtasks_{version}.yaml')
s.copy(library, excludes=['docs/conf.py', 'docs/index.rst'])
# Fix unindentation of bullet list second line
s.replace(
f'google/cloud/tasks_{version}/gapic/cloud_tasks_client.py',
'( \* .*\n )([^\s*])',
'\g<1> \g<2>')
s.replace(
f'google/cloud/tasks_{version}/gapic/cloud_tasks_client.py',
'(Google IAM .*?_) ',
'\g<1>_ ')
# Issues with Anonymous ('__') links. Change to named.
s.replace(
f"google/cloud/tasks_{version}/proto/*.py",
">`__",
">`_")
# Issue in v2beta2
s.replace(
f'google/cloud/tasks_v2beta2/gapic/cloud_tasks_client.py',
r'(Sample filter \\"app_engine_http_target: )\*\\".',
'\g<1>\\*\\".')
# Wrapped link fails due to space in link (v2beta2)
s.replace(
f"google/cloud/tasks_v2beta2/proto/queue_pb2.py",
'(uests in queue.yaml/xml) <\n\s+',
'\g<1>\n <')
# Set Release Status
release_status = 'Development Status :: 3 - Alpha'
s.replace('setup.py',
'(release_status = )(.*)$',
f"\\1'{release_status}'")
# Add Dependencies
s.replace('setup.py',
'dependencies = \[\n*(^.*,\n)+',
"\\g<0> 'grpc-google-iam-v1<0.12dev,>=0.11.4',\n")
# Fix the enable API link
s.replace(
'README.rst',
r'.. _Enable the Cloud Tasks API.: https://cloud.google.com/tasks',
'.. _Enable the Cloud Tasks API.: https://console.cloud.google.com/apis/'
'library/cloudtasks.googleapis.com')
|
apache-2.0
|
Python
|
761ec2bd6492b041eb658ee836a63ffb877469d5
|
Add management command to load all version fixtures
|
refreshoxford/django-cbv-inspector,refreshoxford/django-cbv-inspector,refreshoxford/django-cbv-inspector,refreshoxford/django-cbv-inspector
|
cbv/management/commands/load_all_django_versions.py
|
cbv/management/commands/load_all_django_versions.py
|
import os
import re
from django.conf import settings
from django.core.management import call_command, BaseCommand
class Command(BaseCommand):
"""Load the Django project fixtures and all version fixtures"""
def handle(self, **options):
fixtures_dir = os.path.join(settings.DIRNAME, 'cbv', 'fixtures')
self.stdout.write('Loading project.json')
call_command('loaddata', 'cbv/fixtures/project.json')
version_fixtures = [re.match(r'((?:\d+\.){2,3}json)', filename) for filename in os.listdir(fixtures_dir)]
for match in version_fixtures:
try:
fixture = match.group()
except AttributeError:
continue
self.stdout.write('Loading {}'.format(fixture))
call_command('loaddata', 'cbv/fixtures/{}'.format(fixture))
|
bsd-2-clause
|
Python
|
|
9e2fe5de082c736ec44dbf150d8350a0e164d2ae
|
Create beta_which_operator.py
|
Orange9000/Codewars,Orange9000/Codewars
|
Solutions/beta/beta_which_operator.py
|
Solutions/beta/beta_which_operator.py
|
def whichOper(a, b, oper):
return {'a':lambda x,y: x+y,
's':lambda x,y: x-y,
'm':lambda x,y: x*y,
'd':lambda x,y: x/y}[oper[0]](a,b)
|
mit
|
Python
|
|
e3dcc7ef44bbc8772fd5ad4f0941e5d98bf1ccdd
|
add migration
|
citationfinder/scholarly_citation_finder
|
scholarly_citation_finder/apps/tasks/migrations/0003_auto_20160224_1349.py
|
scholarly_citation_finder/apps/tasks/migrations/0003_auto_20160224_1349.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tasks', '0002_task_starttime'),
]
operations = [
migrations.RemoveField(
model_name='task',
name='taskmeta',
),
migrations.AddField(
model_name='task',
name='taskmeta_id',
field=models.CharField(default='', max_length=100),
preserve_default=False,
),
]
|
mit
|
Python
|
|
0711b2eee10e5e48186d78144697a35640a33cb1
|
Add a passthrough manager
|
educreations/django-mysql-fuzzycount
|
mysql_fuzzycount/managers.py
|
mysql_fuzzycount/managers.py
|
from model_utils.managers import PassThroughManager
from mysql_fuzzycount.queryset import FuzzyCountQuerySet
FuzzyCountManager = PassThroughManager.for_queryset_class(FuzzyCountQuerySet)
|
mit
|
Python
|
|
56b2897655940962a8cfa06cc8a9fcfe22262412
|
Create config_local.py
|
jancelin/geo-poppy,jancelin/geo-poppy
|
pgadmin4/config_local.py
|
pgadmin4/config_local.py
|
# -*- coding: utf-8 -*-
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2016, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
# config_local.py - Core application configuration settings
#
##########################################################################
import os
from distutils.util import strtobool
from logging import *
# Data directory for storage of config settings etc. This shouldn't normally
# need to be changed - it's here as various other settings depend on it.
DATA_DIR = os.getenv('PG_ADMIN_DATA_DIR', '/pgadmin/')
##########################################################################
# Log settings
##########################################################################
DEBUG = strtobool(os.getenv('DEBUG', "False"))
# Log to stdout so that logging is handled by Docker logging drivers
LOG_FILE = '/dev/stdout'
##########################################################################
# Server settings
##########################################################################
SERVER_MODE = False
DEFAULT_SERVER = '0.0.0.0'
DEFAULT_SERVER_PORT = int(os.getenv('PG_ADMIN_PORT', 5050))
##########################################################################
# User account and settings storage
##########################################################################
SQLITE_PATH = os.path.join(DATA_DIR, 'config', 'pgadmin4.db')
SESSION_DB_PATH = '/dev/shm/pgAdmin4_session'
##########################################################################
# Upgrade checks
##########################################################################
# Disable upgrade checks; container should be immutable
UPGRADE_CHECK_ENABLED = False
##########################################################################
# Storage Manager storage url config settings
# If user sets STORAGE_DIR to empty it will show all volumes if platform
# is Windows, '/' if it is Linux, Mac or any other unix type system.
# For example:
# 1. STORAGE_DIR = get_drive("C") or get_drive() # return C:/ by default
# where C can be any drive character such as "D", "E", "G" etc
# 2. Set path manually like
# STORAGE_DIR = "/path/to/directory/"
##########################################################################
STORAGE_DIR = os.path.join(DATA_DIR, 'storage')
|
agpl-3.0
|
Python
|
|
1de5b9746c33add889837e5e0feaf1796fb00eb8
|
add script to generate breakseq index
|
bioinform/breakseq2
|
scripts/breakseq2_gen_bplib.py
|
scripts/breakseq2_gen_bplib.py
|
#!/usr/bin/env python
import argparse
from breakseq2 import breakseq_index, _version
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate breakpoint library FASTA from breakpoint GFF",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
breakseq_index.add_options(parser)
parser.add_argument("--reference", help="Reference FASTA", required=True)
parser.add_argument("--output", help="Output FASTA to generate. Leave unspecified for stdout")
parser.add_argument('--version', action='version', version='%(prog)s ' + _version.__version__)
args = parser.parse_args()
breakseq_index.generate_bplib(args.bplib_gff, args.reference, args.output, args.junction_length, args.format_version)
|
bsd-2-clause
|
Python
|
|
4dde2e3718ecd2e3e8b0ffc7e38433e3a9cda546
|
add beta_vae script
|
probml/pyprobml,probml/pyprobml,probml/pyprobml,probml/pyprobml
|
scripts/vae/models/beta_vae.py
|
scripts/vae/models/beta_vae.py
|
# -*- coding: utf-8 -*-
import torch
import torch.nn as nn
import torch.nn.functional as F
from typing import Optional
def kl_divergence(mean, logvar):
return -0.5 * torch.mean(1 + logvar - torch.square(mean) - torch.exp(logvar))
def loss(config, x, x_hat, z, mu, logvar):
recons_loss = F.mse_loss(x_hat, x, reduction='mean')
kld_loss = kl_divergence(mu, logvar)
loss = recons_loss + config["kl_coeff"] * kld_loss
return loss
class Encoder(nn.Module):
def __init__(self,
in_channels: int = 3,
hidden_dims: Optional[list] = None,
latent_dim: int = 256):
super(Encoder, self).__init__()
modules = []
if hidden_dims is None:
hidden_dims = [32, 64, 128, 256, 512]
# Build Encoder
for h_dim in hidden_dims:
modules.append(
nn.Sequential(
nn.Conv2d(in_channels, out_channels=h_dim,
kernel_size= 3, stride= 2, padding = 1),
nn.BatchNorm2d(h_dim),
nn.LeakyReLU())
)
in_channels = h_dim
self.encoder = nn.Sequential(*modules)
self.fc_mu = nn.Linear(hidden_dims[-1]*4, latent_dim)
self.fc_var = nn.Linear(hidden_dims[-1]*4, latent_dim)
def forward(self, x):
x = self.encoder(x)
x = torch.flatten(x, start_dim=1)
mu = self.fc_mu(x)
log_var = self.fc_var(x)
return mu, log_var
class Decoder(nn.Module):
def __init__(self,
hidden_dims: Optional[list] = None,
latent_dim: int = 256):
super(Decoder, self).__init__()
# Build Decoder
modules = []
if hidden_dims is None:
hidden_dims = [32, 64, 128, 256, 512]
hidden_dims.reverse()
self.decoder_input = nn.Linear(latent_dim, hidden_dims[0] * 4)
for i in range(len(hidden_dims) - 1):
modules.append(
nn.Sequential(
nn.ConvTranspose2d(hidden_dims[i],
hidden_dims[i + 1],
kernel_size=3,
stride = 2,
padding=1,
output_padding=1),
nn.BatchNorm2d(hidden_dims[i + 1]),
nn.LeakyReLU())
)
self.decoder = nn.Sequential(*modules)
self.final_layer = nn.Sequential(
nn.ConvTranspose2d(hidden_dims[-1],
hidden_dims[-1],
kernel_size=3,
stride=2,
padding=1,
output_padding=1),
nn.BatchNorm2d(hidden_dims[-1]),
nn.LeakyReLU(),
nn.Conv2d(hidden_dims[-1], out_channels= 3,
kernel_size= 3, padding= 1),
nn.Sigmoid())
def forward(self, z):
result = self.decoder_input(z)
result = result.view(-1, 512, 2, 2)
result = self.decoder(result)
result = self.final_layer(result)
return result
|
mit
|
Python
|
|
33f455cba56c7ae557cfe9f5494b6a045c68f1d2
|
add simple hello world
|
benoitc/flower,benoitc/flower
|
examples/hello.py
|
examples/hello.py
|
from flower import run, schedule, tasklet
def say(s):
for i in range(5):
schedule()
print(s)
def main():
tasklet(say)("world")
say("hello")
run()
if __name__ == '__main__':
main()
|
mit
|
Python
|
|
152dafeeb35647dbcfb25549f7f1e73a397428a0
|
Add urls for the demo
|
django-blog-zinnia/zinnia-url-shortener-bitly
|
demo_zinnia_bitly/urls.py
|
demo_zinnia_bitly/urls.py
|
"""Urls for the zinnia-bitly demo"""
from django.conf import settings
from django.contrib import admin
from django.conf.urls import url
from django.conf.urls import include
from django.conf.urls import patterns
from django.views.generic.base import RedirectView
from zinnia.sitemaps import TagSitemap
from zinnia.sitemaps import EntrySitemap
from zinnia.sitemaps import CategorySitemap
from zinnia.sitemaps import AuthorSitemap
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^$', RedirectView.as_view(url='/blog/')),
url(r'^blog/', include('zinnia.urls', namespace='zinnia')),
url(r'^comments/', include('django.contrib.comments.urls')),
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^admin/', include(admin.site.urls)),
)
sitemaps = {
'tags': TagSitemap,
'blog': EntrySitemap,
'authors': AuthorSitemap,
'categories': CategorySitemap
}
urlpatterns += patterns(
'django.contrib.sitemaps.views',
url(r'^sitemap.xml$', 'index',
{'sitemaps': sitemaps}),
url(r'^sitemap-(?P<section>.+)\.xml$', 'sitemap',
{'sitemaps': sitemaps}),
)
urlpatterns += patterns(
'',
url(r'^400/$', 'django.views.defaults.bad_request'),
url(r'^403/$', 'django.views.defaults.permission_denied'),
url(r'^404/$', 'django.views.defaults.page_not_found'),
url(r'^500/$', 'django.views.defaults.server_error'),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT})
)
|
bsd-3-clause
|
Python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.