commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
64b14b64c00bc6885acc1ff4d9b76898f66a8a86
|
add new package (#15737)
|
iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,LLNL/spack,iulian787/spack
|
var/spack/repos/builtin/packages/opendx/package.py
|
var/spack/repos/builtin/packages/opendx/package.py
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
class Opendx(AutotoolsPackage):
"""Open Visualization Data Explorer."""
homepage = "https://github.com/Mwoolsey/OpenDX"
git = "https://github.com/Mwoolsey/OpenDX.git"
version('master', branch='master')
depends_on('motif') # lesstif also works, but exhibits odd behaviors
depends_on('gl')
@run_before('autoreconf')
def distclean(self):
make('distclean')
|
lgpl-2.1
|
Python
|
|
7c56318cb545011e64e3a491058054ad3d7cd9c0
|
Create new package. (#5987)
|
tmerrick1/spack,iulian787/spack,mfherbst/spack,iulian787/spack,EmreAtes/spack,LLNL/spack,skosukhin/spack,matthiasdiener/spack,krafczyk/spack,mfherbst/spack,iulian787/spack,skosukhin/spack,tmerrick1/spack,lgarren/spack,krafczyk/spack,lgarren/spack,mfherbst/spack,tmerrick1/spack,LLNL/spack,LLNL/spack,matthiasdiener/spack,skosukhin/spack,krafczyk/spack,mfherbst/spack,skosukhin/spack,skosukhin/spack,matthiasdiener/spack,LLNL/spack,matthiasdiener/spack,EmreAtes/spack,iulian787/spack,EmreAtes/spack,lgarren/spack,mfherbst/spack,matthiasdiener/spack,krafczyk/spack,EmreAtes/spack,tmerrick1/spack,lgarren/spack,krafczyk/spack,iulian787/spack,EmreAtes/spack,tmerrick1/spack,LLNL/spack,lgarren/spack
|
var/spack/repos/builtin/packages/r-aims/package.py
|
var/spack/repos/builtin/packages/r-aims/package.py
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RAims(RPackage):
"""This package contains the AIMS implementation. It contains
necessary functions to assign the five intrinsic molecular
subtypes (Luminal A, Luminal B, Her2-enriched, Basal-like,
Normal-like). Assignments could be done on individual samples
as well as on dataset of gene expression data."""
homepage = "http://bioconductor.org/packages/AIMS/"
url = "https://git.bioconductor.org/packages/AIMS"
version('1.8.0', git='https://git.bioconductor.org/packages/AIMS', commit='86b866c20e191047492c51b43e3f73082c3f8357')
depends_on('r@3.4.0:3.4.9', when='@1.8.0')
depends_on('r-e1071', type=('build', 'run'))
depends_on('r-biobase', type=('build', 'run'))
|
lgpl-2.1
|
Python
|
|
eb429be1fdc7335bec5ba036fcece309778b23f0
|
Add an example that uses filterReactions AND pdep at the same time
|
pierrelb/RMG-Py,nyee/RMG-Py,nickvandewiele/RMG-Py,nyee/RMG-Py,chatelak/RMG-Py,chatelak/RMG-Py,pierrelb/RMG-Py,nickvandewiele/RMG-Py
|
examples/rmg/heptane-filterReactions/input.py
|
examples/rmg/heptane-filterReactions/input.py
|
# Data sources
database(
thermoLibraries = ['primaryThermoLibrary'],
reactionLibraries = [],
seedMechanisms = [],
kineticsDepositories = ['training'],
kineticsFamilies = 'default',
kineticsEstimator = 'rate rules',
)
# Constraints on generated species
generatedSpeciesConstraints(
maximumCarbonAtoms = 7,
)
# List of species
species(
label='n-heptane',
structure=SMILES("CCCCCCC"),
)
species(
label='Ar',
reactive=False,
structure=SMILES("[Ar]"),
)
simpleReactor(
temperature=(1600,'K'),
pressure=(400,'Pa'),
initialMoleFractions={
"n-heptane": 0.02,
"Ar": 0.98,
},
terminationConversion={
'n-heptane': 0.99,
},
terminationTime=(1e6,'s'),
)
simpleReactor(
temperature=(2000,'K'),
pressure=(400,'Pa'),
initialMoleFractions={
"n-heptane": 0.02,
"Ar": 0.98,
},
terminationConversion={
'n-heptane': 0.99,
},
terminationTime=(1e6,'s'),
)
simulator(
atol=1e-16,
rtol=1e-8,
)
model(
toleranceMoveToCore=0.01,
toleranceInterruptSimulation=0.01,
filterReactions=True,
)
pressureDependence(
method='modified strong collision',
maximumGrainSize=(0.5,'kcal/mol'),
minimumNumberOfGrains=250,
temperatures=(300,3000,'K',8),
pressures=(0.001,100,'bar',5),
interpolation=('Chebyshev', 6, 4),
)
|
mit
|
Python
|
|
e5a14054e1e9e95b04baf5ec7c92a2fdde51703b
|
Update __openerp__.py
|
ingadhoc/odoo-personalizations,adhoc-dev/odoo-personalizations
|
scanterra_modifcations/__openerp__.py
|
scanterra_modifcations/__openerp__.py
|
# -*- coding: utf-8 -*-
{
'name': 'Scanterra Modifications',
'version': '8.0.1.1.0',
'category': 'Sales Management',
'sequence': 14,
'summary': 'Sales, Product, Category, Clasification',
'description': """
Scanterra Modifications
=======================
* Restringir que las tareas creadas por un usuario, no las pueda eliminar otro usuario. Es decir que cada usuario solo pueda eliminar las tareas creadas por si mismo.
* Que se registre automáticamente como una nota cuando se cambia alguna de los siguientes campos de la tarea (Resumen de la tarea (titulo), fecha limite, horas iniciales planificadas, fecha de inicio y fecha final) (actualmente solo registra en forma automática los cambios de estado).
* Ocultar el campo probabilidad en crm lead tree view
""",
'author': 'Ingenieria ADHOC',
'website': 'www.ingadhoc.com',
'depends': [
'project',
'project_issue',
'crm',
],
'data': [
'security/project_security.xml',
'crm_lead_view.xml',
'phonecall_view.xml',
'project_task_view.xml',
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
{
'name': 'Scanterra Modifications',
'version': '8.0.1.0.0',
'category': 'Sales Management',
'sequence': 14,
'summary': 'Sales, Product, Category, Clasification',
'description': """
Scanterra Modifications
=======================
* Restringir que las tareas creadas por un usuario, no las pueda eliminar otro usuario. Es decir que cada usuario solo pueda eliminar las tareas creadas por si mismo.
* Que se registre automáticamente como una nota cuando se cambia alguna de los siguientes campos de la tarea (Resumen de la tarea (titulo), fecha limite, horas iniciales planificadas, fecha de inicio y fecha final) (actualmente solo registra en forma automática los cambios de estado).
* Ocultar el campo probabilidad en crm lead tree view
""",
'author': 'Ingenieria ADHOC',
'website': 'www.ingadhoc.com',
'depends': [
'project',
'project_issue',
'crm',
],
'data': [
'security/project_security.xml',
'crm_lead_view.xml',
'phonecall_view.xml',
'project_task_view.xml',
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
Python
|
559ba309a72d277c3b5a78614889d19b8866b7ea
|
add parallel by Alexandre Gramford (mne-python)
|
bert9bert/statsmodels,kiyoto/statsmodels,astocko/statsmodels,bert9bert/statsmodels,rgommers/statsmodels,statsmodels/statsmodels,astocko/statsmodels,jstoxrocky/statsmodels,Averroes/statsmodels,YihaoLu/statsmodels,adammenges/statsmodels,detrout/debian-statsmodels,wkfwkf/statsmodels,wdurhamh/statsmodels,detrout/debian-statsmodels,josef-pkt/statsmodels,musically-ut/statsmodels,jseabold/statsmodels,jstoxrocky/statsmodels,phobson/statsmodels,edhuckle/statsmodels,saketkc/statsmodels,ChadFulton/statsmodels,bsipocz/statsmodels,cbmoore/statsmodels,hainm/statsmodels,wwf5067/statsmodels,edhuckle/statsmodels,cbmoore/statsmodels,bert9bert/statsmodels,yarikoptic/pystatsmodels,huongttlan/statsmodels,saketkc/statsmodels,yarikoptic/pystatsmodels,yl565/statsmodels,wdurhamh/statsmodels,statsmodels/statsmodels,nvoron23/statsmodels,wdurhamh/statsmodels,nvoron23/statsmodels,bavardage/statsmodels,pprett/statsmodels,gef756/statsmodels,jseabold/statsmodels,wdurhamh/statsmodels,alekz112/statsmodels,bert9bert/statsmodels,saketkc/statsmodels,wwf5067/statsmodels,alekz112/statsmodels,yl565/statsmodels,bavardage/statsmodels,yl565/statsmodels,phobson/statsmodels,DonBeo/statsmodels,nguyentu1602/statsmodels,DonBeo/statsmodels,phobson/statsmodels,rgommers/statsmodels,pprett/statsmodels,bsipocz/statsmodels,gef756/statsmodels,kiyoto/statsmodels,gef756/statsmodels,musically-ut/statsmodels,bsipocz/statsmodels,YihaoLu/statsmodels,hainm/statsmodels,bert9bert/statsmodels,DonBeo/statsmodels,edhuckle/statsmodels,bashtage/statsmodels,wzbozon/statsmodels,wkfwkf/statsmodels,Averroes/statsmodels,hainm/statsmodels,josef-pkt/statsmodels,ChadFulton/statsmodels,astocko/statsmodels,bashtage/statsmodels,huongttlan/statsmodels,gef756/statsmodels,nguyentu1602/statsmodels,yl565/statsmodels,jstoxrocky/statsmodels,hlin117/statsmodels,waynenilsen/statsmodels,wzbozon/statsmodels,YihaoLu/statsmodels,bzero/statsmodels,jseabold/statsmodels,bashtage/statsmodels,edhuckle/statsmodels,bashtage/statsmodels,ChadFulton/statsmodels,jseabold/statsmodels,wkfwkf/statsmodels,hlin117/statsmodels,bsipocz/statsmodels,cbmoore/statsmodels,ChadFulton/statsmodels,jstoxrocky/statsmodels,detrout/debian-statsmodels,kiyoto/statsmodels,hlin117/statsmodels,huongttlan/statsmodels,bashtage/statsmodels,hainm/statsmodels,wdurhamh/statsmodels,rgommers/statsmodels,wwf5067/statsmodels,astocko/statsmodels,pprett/statsmodels,bavardage/statsmodels,bzero/statsmodels,huongttlan/statsmodels,YihaoLu/statsmodels,josef-pkt/statsmodels,ChadFulton/statsmodels,bashtage/statsmodels,bavardage/statsmodels,nguyentu1602/statsmodels,josef-pkt/statsmodels,ChadFulton/statsmodels,adammenges/statsmodels,wzbozon/statsmodels,phobson/statsmodels,bzero/statsmodels,Averroes/statsmodels,detrout/debian-statsmodels,alekz112/statsmodels,alekz112/statsmodels,nvoron23/statsmodels,edhuckle/statsmodels,musically-ut/statsmodels,hlin117/statsmodels,cbmoore/statsmodels,YihaoLu/statsmodels,statsmodels/statsmodels,bzero/statsmodels,DonBeo/statsmodels,gef756/statsmodels,wkfwkf/statsmodels,statsmodels/statsmodels,rgommers/statsmodels,wkfwkf/statsmodels,nguyentu1602/statsmodels,adammenges/statsmodels,kiyoto/statsmodels,nvoron23/statsmodels,phobson/statsmodels,josef-pkt/statsmodels,jseabold/statsmodels,yl565/statsmodels,Averroes/statsmodels,kiyoto/statsmodels,yarikoptic/pystatsmodels,saketkc/statsmodels,wzbozon/statsmodels,rgommers/statsmodels,bzero/statsmodels,waynenilsen/statsmodels,wwf5067/statsmodels,wzbozon/statsmodels,nvoron23/statsmodels,bavardage/statsmodels,waynenilsen/statsmodels,pprett/statsmodels,saketkc/statsmodels,statsmodels/statsmodels,waynenilsen/statsmodels,josef-pkt/statsmodels,adammenges/statsmodels,cbmoore/statsmodels,statsmodels/statsmodels,DonBeo/statsmodels,musically-ut/statsmodels
|
scikits/statsmodels/tools/parallel.py
|
scikits/statsmodels/tools/parallel.py
|
"""Parralle util function
"""
# Author: Alexandre Gramfort <gramfort@nmr.mgh.harvard.edu>
#
# License: Simplified BSD
def parallel_func(func, n_jobs, verbose=5):
"""Return parallel instance with delayed function
Util function to use joblib only if available
Parameters
----------
func: callable
A function
n_jobs: int
Number of jobs to run in parallel
verbose: int
Verbosity level
Returns
-------
parallel: instance of joblib.Parallel or list
The parallel object
my_func: callable
func if not parallel or delayed(func)
n_jobs: int
Number of jobs >= 0
"""
try:
from sklearn.externals.joblib import Parallel, delayed
parallel = Parallel(n_jobs, verbose=verbose)
my_func = delayed(func)
if n_jobs == -1:
try:
import multiprocessing
n_jobs = multiprocessing.cpu_count()
except ImportError:
print "multiprocessing not installed. Cannot run in parallel."
n_jobs = 1
except ImportError:
print "joblib not installed. Cannot run in parallel."
n_jobs = 1
my_func = func
parallel = list
return parallel, my_func, n_jobs
|
bsd-3-clause
|
Python
|
|
2c6be657e0024a1a2e162a6a508d2d5716736121
|
add wrapper class for adiabatic approximation
|
jobovy/galpy,followthesheep/galpy,followthesheep/galpy,followthesheep/galpy,jobovy/galpy,jobovy/galpy,jobovy/galpy,followthesheep/galpy
|
galpy/actionAngle_src/actionAngleAdiabatic.py
|
galpy/actionAngle_src/actionAngleAdiabatic.py
|
###############################################################################
# actionAngle: a Python module to calculate actions, angles, and frequencies
#
# class: actionAngleAdiabatic
#
# wrapper around actionAngleAxi (adiabatic approximation) to do
# this for any (x,v)
#
# methods:
# JR
# Jphi
# Jz
# angleR
# anglez
# TR
# Tphi
# Tz
# I
# calcRapRperi
# calcEL
###############################################################################
import math as m
import numpy as nu
from actionAngleAxi import actionAngleAxi
from actionAngle import actionAngle
class actionAngleAdiabatic():
"""Action-angle formalism for axisymmetric potentials using the adiabatic approximation"""
def __init__(self,*args,**kwargs):
"""
NAME:
__init__
PURPOSE:
initialize an actionAngleAdiabatic object
INPUT:
pot= potential or list of potentials (planarPotentials)
OUTPUT:
HISTORY:
2012-07-26 - Written - Bovy (IAS@MPIA)
"""
if not kwargs.has_key('pot'):
raise IOError("Must specify pot= for actionAngleAxi")
self._pot= kwargs['pot']
return None
def __call__(self,*args,**kwargs):
"""
NAME:
__call__
PURPOSE:
evaluate the actions (jr,lz,jz)
INPUT:
Either:
a) R,vR,vT,z,vz
b) Orbit instance: initial condition used if that's it, orbit(t)
if there is a time given as well
scipy.integrate.quadrature keywords
OUTPUT:
(jr,lz,jz), where jr=[jr,jrerr], and jz=[jz,jzerr]
HISTORY:
2012-07-26 - Written - Bovy (IAS@MPIA)
"""
#Set up the actionAngleAxi object
meta= actionAngle(*args)
if isinstance(self._pot,list):
thispot= [p.toPlanar() for p in self._pot]
else:
thispot= self._pot.toPlanar()
if isinstance(self._pot,list):
thisverticalpot= [p.toVertical(meta._R) for p in self._pot]
else:
thisverticalpot= self._pot.toVertical(meta._R)
aAAxi= actionAngleAxi(*args,pot=thispot,
verticalPot=thisverticalpot)
return (aAAxi.JR(**kwargs),aAAxi._R*aAAxi._vT,aAAxi.Jz(**kwargs))
|
bsd-3-clause
|
Python
|
|
3a240005142da25aa49938a15d39ddf68dd7cead
|
Add functional test to verify presence of policy
|
mahak/nova,rahulunair/nova,klmitch/nova,openstack/nova,mikalstill/nova,rahulunair/nova,gooddata/openstack-nova,mikalstill/nova,gooddata/openstack-nova,rahulunair/nova,klmitch/nova,mikalstill/nova,klmitch/nova,mahak/nova,openstack/nova,klmitch/nova,openstack/nova,mahak/nova,gooddata/openstack-nova,gooddata/openstack-nova
|
nova/tests/functional/api/openstack/placement/test_verify_policy.py
|
nova/tests/functional/api/openstack/placement/test_verify_policy.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from nova.api.openstack.placement import direct
from nova.api.openstack.placement import handler
from nova.tests.functional.api.openstack.placement import base
CONF = cfg.CONF
class TestVerifyPolicy(base.TestCase):
"""Verify that all defined placement routes have a policy."""
# Paths that don't need a policy check
EXCEPTIONS = ['/', '']
def _test_request_403(self, client, method, route):
headers = {
'x-auth-token': 'user',
'content-type': 'application/json'
}
request_method = getattr(client, method.lower())
# We send an empty request body on all requests. Because
# policy handling comes before other processing, the value
# of the body is irrelevant.
response = request_method(route, data='', headers=headers)
self.assertEqual(
403, response.status_code,
'method %s on route %s is open for user, status: %s' %
(method, route, response.status_code))
def test_verify_policy(self):
with direct.PlacementDirect(CONF, latest_microversion=True) as client:
for route, methods in handler.ROUTE_DECLARATIONS.items():
if route in self.EXCEPTIONS:
continue
for method in methods:
self._test_request_403(client, method, route)
|
apache-2.0
|
Python
|
|
b416de22866f6ebc05fcb256d5ab97f391481ddc
|
Create CSVStreamReader.py
|
chriswebb/CSVStreamReader,chriswebb/CSVStreamReader
|
CSVStreamReader.py
|
CSVStreamReader.py
|
# The MIT License (MIT)
# Copyright (c) 2016 Chris Webb
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
class CSVStreamReader:
delimiter = '\t'
text_field_identifier = '"'
def __init__(self, delimiter, textfieldIdentifier):
self.delimiter = delimiter
self.text_field_identifier = textfieldIdentifier
def getTotalColumns(self, delimiter, textfieldIdentifier, stream, enc):
curPosition = 0
if stream.seekable():
curPosition = stream.tell()
else:
return False
textFieldDelimiterCount = 0
columnCount = 0
curChar = stream.read(1)
while curChar != 0:
if curChar == textfieldIdentifier:
textFieldDelimiterCount += 1
elif (textFieldDelimiterCount == 0) or (textFieldDelimiterCount % 2 == 0):
if curChar == delimiter:
columnCount += 1
elif curChar == '\n':
break
curChar = stream.read(1)
columnCount += 1
if stream.seekable():
stream.seek(curPosition)
return columnCount
def readLine (self, stream, startPosition):
if not startPosition:
startPosition = 0
if startPosition >= stream.length:
return None
count = 0
record = []
curChar = None
column = 0
output = []
endPosition = startPosition
curChar = stream.read(1)
while curChar != 0:
endPosition += 1
if curChar == textfieldIdentifier:
if lastChar == curChar:
record.append(curChar)
count += 1
continue
elif (count == 0) or (count % 2 == 0):
if curChar == delimiter:
curColumn = column
output.push(record.join(""))
column += 1
record = []
continue
elif curChar == '\r':
continue
elif curChar == '\n':
break
record.push(curChar)
lastChar = curChar
curChar = stream.read(1)
output.push(record.join(""))
return { "output": output, "endPosition": endPosition }
def readLines(self, stream):
pos = 0
output = []
returnObj = self.readLine(input, pos)
while returnObj:
pos = returnObj.endPosition + 1
output.push(returnObj.output)
return output
|
mit
|
Python
|
|
da09de30b376f1ab9e687e8064423499b4cf8d50
|
Add missing file
|
michaelaye/vispy,michaelaye/vispy,ghisvail/vispy,dchilds7/Deysha-Star-Formation,sbtlaarzc/vispy,QuLogic/vispy,inclement/vispy,Eric89GXL/vispy,jdreaver/vispy,sh4wn/vispy,bollu/vispy,sh4wn/vispy,sbtlaarzc/vispy,jay3sh/vispy,jdreaver/vispy,srinathv/vispy,srinathv/vispy,RebeccaWPerry/vispy,kkuunnddaannkk/vispy,dchilds7/Deysha-Star-Formation,michaelaye/vispy,drufat/vispy,sbtlaarzc/vispy,dchilds7/Deysha-Star-Formation,ghisvail/vispy,jay3sh/vispy,srinathv/vispy,inclement/vispy,hronoses/vispy,julienr/vispy,sh4wn/vispy,jay3sh/vispy,drufat/vispy,drufat/vispy,jdreaver/vispy,julienr/vispy,kkuunnddaannkk/vispy,kkuunnddaannkk/vispy,Eric89GXL/vispy,RebeccaWPerry/vispy,hronoses/vispy,QuLogic/vispy,RebeccaWPerry/vispy,ghisvail/vispy,bollu/vispy,julienr/vispy,QuLogic/vispy,bollu/vispy,hronoses/vispy,inclement/vispy,Eric89GXL/vispy
|
vispy/util/context.py
|
vispy/util/context.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2014, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
Functionality to deal with GL Contexts in vispy. This module is not in
app, because we want to make it possible to use parts of vispy without
relying on app.
The GLContext object is more like a placeholder on which different parts
of vispy (or other systems) can keep track of information related to
an OpenGL context.
"""
from copy import deepcopy
import weakref
_default_dict = dict(red_size=8, green_size=8, blue_size=8, alpha_size=8,
depth_size=16, stencil_size=0, double_buffer=True,
stereo=False, samples=0)
def get_default_config():
"""Get the default OpenGL context configuration
Returns
-------
config : dict
Dictionary of config values.
"""
return deepcopy(_default_dict)
class GLContext(object):
"""An object encapsulating data necessary for a shared OpenGL context
The data are backend dependent.
"""
def __init__(self, config=None):
self._value = None # Used by vispy.app to store a ref
self._taken = None # Used by vispy.app to say what backend owns it
self._config = deepcopy(_default_dict)
self._config.update(config or {})
# Check the config dict
for key, val in self._config.items():
if key not in _default_dict:
raise KeyError('Key %r is not a valid GL config key.' % key)
if not isinstance(val, type(_default_dict[key])):
raise TypeError('Context value of %r has invalid type.' % key)
def take(self, value, who, weak=False):
""" Claim ownership of this context. Can only be done if the
context is not yet taken. The value should be a reference to
the actual GL context (which is stored on this object using a
weak reference). The string ``who`` should specify who took it.
"""
if self.istaken:
raise RuntimeError('This GLContext is already taken by %s.' %
self.istaken)
if not weak:
self._value_nonweak = value
self._taken = str(who)
self._value = weakref.ref(value)
@property
def istaken(self):
""" Whether the context is owned by a GUI system. If taken, this
returns the string name of the system that took it.
"""
return self._taken
@property
def value(self):
""" The value that the GUI system set when it took this coontext.
This is stored with a weakref, so it can be None if the value
has been cleaned up.
"""
if self._value:
return self._value()
@property
def config(self):
""" A dictionary describing the configuration of this GL context.
"""
return self._config
def __repr__(self):
backend = self._backend or 'no'
return "<GLContext of %s backend at 0x%x>" % (backend, id(self))
|
bsd-3-clause
|
Python
|
|
7c623e1368f92155edafdbf822b8c1512aebbfaa
|
Create conf.py
|
JohnGriffiths/ConWhAt,JohnGriffiths/ConWhAt
|
docs/conf.py
|
docs/conf.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Use sphinx-quickstart to create your own conf.py file!
# After that, you have to edit a few things. See below.
# Select nbsphinx and, if needed, add a math extension (mathjax or pngmath):
extensions = [
'nbsphinx',
'sphinx.ext.mathjax',
'jupyter_alabaster_theme',
]
# Exclude build directory and Jupyter backup files:
exclude_patterns = ['_build', '**.ipynb_checkpoints']
# Default language for syntax highlighting in reST and Markdown cells
highlight_language = 'none'
# Don't add .txt suffix to source files (available for Sphinx >= 1.5):
html_sourcelink_suffix = ''
# Execute notebooks before conversion: 'always', 'never', 'auto' (default)
#nbsphinx_execute = 'never'
# Use this kernel instead of the one stored in the notebook metadata:
#nbsphinx_kernel_name = 'python3'
# List of arguments to be passed to the kernel that executes the notebooks:
#nbsphinx_execute_arguments = ['--InlineBackend.figure_formats={"png", "pdf"}']
# If True, the build process is continued even if an exception occurs:
#nbsphinx_allow_errors = True
# Controls when a cell will time out (defaults to 30; use -1 for no timeout):
#nbsphinx_timeout = 60
# Default Pygments lexer for syntax highlighting in code cells:
#nbsphinx_codecell_lexer = 'ipython3'
# Width of input/output prompts used in CSS:
#nbsphinx_prompt_width = '8ex'
# If window is narrower than this, input/output prompts are on separate lines:
#nbsphinx_responsive_width = '700px'
# This is processed by Jinja2 and inserted before each notebook
nbsphinx_prolog = r"""
{% set docname = env.doc2path(env.docname, base='doc') %}
.. only:: html
.. role:: raw-html(raw)
:format: html
.. nbinfo::
This page was generated from `{{ docname }}`__.
Interactive online version:
:raw-html:`<a href="https://mybinder.org/v2/gh/spatialaudio/nbsphinx/{{ env.config.release }}?filepath={{ docname }}"><img alt="Binder badge" src="https://mybinder.org/badge.svg" style="vertical-align:text-bottom"></a>`
__ https://github.com/spatialaudio/nbsphinx/blob/
{{ env.config.release }}/{{ docname }}
.. raw:: latex
\vfil\penalty-1\vfilneg
\vspace{\baselineskip}
\textcolor{gray}{The following section was generated from
\texttt{\strut{}{{ docname }}}\\[-0.5\baselineskip]
\noindent\rule{\textwidth}{0.4pt}}
\vspace{-2\baselineskip}
"""
# This is processed by Jinja2 and inserted after each notebook
nbsphinx_epilog = r"""
.. raw:: latex
\textcolor{gray}{\noindent\rule{\textwidth}{0.4pt}\\
\hbox{}\hfill End of
\texttt{\strut{}{{ env.doc2path(env.docname, base='doc') }}}}
\vfil\penalty-1\vfilneg
"""
# -- The settings below this line are not specific to nbsphinx ------------
master_doc = 'index'
project = 'ConWhAt'
author = 'John Griffiths'
copyright = '2018, ' + author
linkcheck_ignore = [r'http://localhost:\d+/']
pygments_style = 'sphinx'
# -- Get version information from Git -------------------------------------
try:
from subprocess import check_output
release = check_output(['git', 'describe', '--tags', '--always'])
release = release.decode().strip()
except Exception:
release = '<unknown>'
# -- Options for HTML output ----------------------------------------------
html_title = project + ' version ' + release
html_theme = 'jupyter_alabaster_theme'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
'papersize': 'a4paper',
'printindex': '',
'preamble': r"""
\usepackage[sc,osf]{mathpazo}
\linespread{1.05} % see http://www.tug.dk/FontCatalogue/urwpalladio/
\renewcommand{\sfdefault}{pplj} % Palatino instead of sans serif
\IfFileExists{zlmtt.sty}{
\usepackage[light,scaled=1.05]{zlmtt} % light typewriter font from lmodern
}{
\renewcommand{\ttdefault}{lmtt} % typewriter font from lmodern
}
""",
}
latex_documents = [
(master_doc, 'nbsphinx.tex', project, author, 'howto'),
]
latex_show_urls = 'footnote'
|
bsd-3-clause
|
Python
|
|
783e7f644e2fc659d432d447bbbe6a01f2ac74c1
|
Fix #390 #450
|
kappataumu/cookiecutter-django,ingenioustechie/cookiecutter-django-openshift,calculuscowboy/cookiecutter-django,drxos/cookiecutter-django-dokku,bopo/cookiecutter-django,drxos/cookiecutter-django-dokku,ingenioustechie/cookiecutter-django-openshift,drxos/cookiecutter-django-dokku,hairychris/cookiecutter-django,ddiazpinto/cookiecutter-django,pydanny/cookiecutter-django,schacki/cookiecutter-django,ddiazpinto/cookiecutter-django,trungdong/cookiecutter-django,mistalaba/cookiecutter-django,luzfcb/cookiecutter-django,hackebrot/cookiecutter-django,aleprovencio/cookiecutter-django,bopo/cookiecutter-django,trungdong/cookiecutter-django,thisjustin/cookiecutter-django,ryankanno/cookiecutter-django,calculuscowboy/cookiecutter-django,kappataumu/cookiecutter-django,luzfcb/cookiecutter-django,pydanny/cookiecutter-django,topwebmaster/cookiecutter-django,Parbhat/cookiecutter-django-foundation,webspired/cookiecutter-django,bopo/cookiecutter-django,gappsexperts/cookiecutter-django,asyncee/cookiecutter-django,mistalaba/cookiecutter-django,thisjustin/cookiecutter-django,hairychris/cookiecutter-django,webyneter/cookiecutter-django,calculuscowboy/cookiecutter-django,hairychris/cookiecutter-django,aleprovencio/cookiecutter-django,pydanny/cookiecutter-django,luzfcb/cookiecutter-django,Parbhat/cookiecutter-django-foundation,gappsexperts/cookiecutter-django,Parbhat/cookiecutter-django-foundation,trungdong/cookiecutter-django,bopo/cookiecutter-django,asyncee/cookiecutter-django,schacki/cookiecutter-django,ryankanno/cookiecutter-django,ryankanno/cookiecutter-django,webyneter/cookiecutter-django,hackebrot/cookiecutter-django,thisjustin/cookiecutter-django,ad-m/cookiecutter-django,luzfcb/cookiecutter-django,hackebrot/cookiecutter-django,pydanny/cookiecutter-django,hackebrot/cookiecutter-django,aleprovencio/cookiecutter-django,ryankanno/cookiecutter-django,webyneter/cookiecutter-django,thisjustin/cookiecutter-django,trungdong/cookiecutter-django,ad-m/cookiecutter-django,schacki/cookiecutter-django,kappataumu/cookiecutter-django,drxos/cookiecutter-django-dokku,hairychris/cookiecutter-django,schacki/cookiecutter-django,kappataumu/cookiecutter-django,ingenioustechie/cookiecutter-django-openshift,topwebmaster/cookiecutter-django,ad-m/cookiecutter-django,topwebmaster/cookiecutter-django,calculuscowboy/cookiecutter-django,webyneter/cookiecutter-django,asyncee/cookiecutter-django,ddiazpinto/cookiecutter-django,mistalaba/cookiecutter-django,asyncee/cookiecutter-django,gappsexperts/cookiecutter-django,mistalaba/cookiecutter-django,webspired/cookiecutter-django,gappsexperts/cookiecutter-django,ad-m/cookiecutter-django,webspired/cookiecutter-django,webspired/cookiecutter-django,ddiazpinto/cookiecutter-django,topwebmaster/cookiecutter-django,aleprovencio/cookiecutter-django,ingenioustechie/cookiecutter-django-openshift,Parbhat/cookiecutter-django-foundation
|
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/admin.py
|
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/admin.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django import forms
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as AuthUserAdmin
from django.contrib.auth.forms import UserChangeForm, UserCreationForm
from .models import User
class MyUserChangeForm(UserChangeForm):
class Meta(UserChangeForm.Meta):
model = User
class MyUserCreationForm(UserCreationForm):
error_message = UserCreationForm.error_messages.update({
'duplicate_username': 'This username has already been taken.'
})
class Meta(UserCreationForm.Meta):
model = User
def clean_username(self):
username = self.cleaned_data["username"]
try:
User.objects.get(username=username)
except User.DoesNotExist:
return username
raise forms.ValidationError(self.error_messages['duplicate_username'])
@admin.register(User)
class MyUserAdmin(AuthUserAdmin):
form = MyUserChangeForm
add_form = MyUserCreationForm
fieldsets = (
('User Profile', {'fields': ('name',)}),
) + AuthUserAdmin.fieldsets
list_display = ('username', 'name', 'is_superuser')
search_fields = ['name']
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django import forms
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as AuthUserAdmin
from django.contrib.auth.forms import UserChangeForm, UserCreationForm
from .models import User
class MyUserChangeForm(UserChangeForm):
class Meta(UserChangeForm.Meta):
model = User
class MyUserCreationForm(UserCreationForm):
error_message = UserCreationForm.error_messages.update({
'duplicate_username': 'This username has already been taken.'
})
class Meta(UserCreationForm.Meta):
model = User
def clean_username(self):
username = self.cleaned_data['username']
try:
User.objects.get(username=username)
except User.DoesNotExist:
return username
raise forms.ValidationError(self.error_messages['duplicate_username'])
@admin.register(User)
class UserAdmin(AuthUserAdmin):
form = MyUserChangeForm
add_form = MyUserCreationForm
|
bsd-3-clause
|
Python
|
207f8ffaffa1eb6c7ed8a5f4c884f91ec3e971f3
|
Format : Add compatibility conversion for old serialised Formats with (0,0) min values.
|
boberfly/gaffer,GafferHQ/gaffer,chippey/gaffer,andrewkaufman/gaffer,johnhaddon/gaffer,lucienfostier/gaffer,chippey/gaffer,appleseedhq/gaffer,johnhaddon/gaffer,lucienfostier/gaffer,hradec/gaffer,andrewkaufman/gaffer,ImageEngine/gaffer,johnhaddon/gaffer,GafferHQ/gaffer,johnhaddon/gaffer,chippey/gaffer,GafferHQ/gaffer,lucienfostier/gaffer,boberfly/gaffer,ivanimanishi/gaffer,hradec/gaffer,hradec/gaffer,ImageEngine/gaffer,hradec/gaffer,andrewkaufman/gaffer,appleseedhq/gaffer,andrewkaufman/gaffer,johnhaddon/gaffer,appleseedhq/gaffer,lucienfostier/gaffer,GafferHQ/gaffer,boberfly/gaffer,ImageEngine/gaffer,ivanimanishi/gaffer,hradec/gaffer,GafferHQ/gaffer,andrewkaufman/gaffer,chippey/gaffer,appleseedhq/gaffer,ivanimanishi/gaffer,ivanimanishi/gaffer,boberfly/gaffer,ImageEngine/gaffer
|
startup/GafferImage/formatCompatibility.py
|
startup/GafferImage/formatCompatibility.py
|
##########################################################################
#
# Copyright (c) 2015, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import re
import inspect
import IECore
import Gaffer
import GafferImage
__currentlyLoadingScript = None
def __getSerialisedGafferVersion( scriptNode ) :
return (
Gaffer.Metadata.nodeValue( scriptNode, "serialiser:milestoneVersion" ),
Gaffer.Metadata.nodeValue( scriptNode, "serialiser:majorVersion" ),
Gaffer.Metadata.nodeValue( scriptNode, "serialiser:minorVersion" ),
Gaffer.Metadata.nodeValue( scriptNode, "serialiser:patchVersion" )
)
def __convertFormat( fmt ):
if not __currentlyLoadingScript :
return fmt
gafferVersion = __getSerialisedGafferVersion( __currentlyLoadingScript )
# TODO : Determine which version of Gaffer inclusive image bounds
# will be included in. Presuming 0.16.0.0 for now.
if ( gafferVersion < ( 0, 16, 0, 0 ) ) :
displayWindow = fmt.getDisplayWindow()
displayWindow.max += IECore.V2i( 1 )
return GafferImage.Format( displayWindow, fmt.getPixelAspect() )
return fmt
def __FormatPlug__setValue( self, *args, **kwargs ) :
if args and isinstance( args[0], GafferImage.Format ) :
args = ( __convertFormat( args[0] ), ) + args[1:]
return self.__originalSetValue( *args, **kwargs )
def __Format__registerFormat( *args, **kwargs ) :
if args and isinstance( args[0], GafferImage.Format ) :
args = ( __convertFormat( args[0] ), ) + args[1:]
return GafferImage.Format.__originalRegisterFormat( *args, **kwargs )
def __ScriptNode__load( self, *args, **kwargs ) :
global __currentlyLoadingScript
__currentlyLoadingScript = self
try:
self.__originalLoad( *args, **kwargs )
finally:
__currentlyLoadingScript = None
Gaffer.ScriptNode.__originalLoad = Gaffer.ScriptNode.load
Gaffer.ScriptNode.load = __ScriptNode__load
GafferImage.FormatPlug.__originalSetValue = GafferImage.FormatPlug.setValue
GafferImage.FormatPlug.setValue = __FormatPlug__setValue
GafferImage.Format.__originalRegisterFormat = GafferImage.Format.registerFormat
GafferImage.Format.registerFormat = __Format__registerFormat
|
bsd-3-clause
|
Python
|
|
9d51e2ef626ce61dd3ae563681477b12a2352881
|
Add test_sprint
|
mslw/sprinttesting,mslw/sprinttesting
|
test_sprint.py
|
test_sprint.py
|
def inc(val):
return val - 1
def test_inc():
assert inc(5) == 6
|
mit
|
Python
|
|
8f6db5945348879a7340f8a4c7da6111a06cd062
|
Add new module to statically host an arbitrary directory
|
Purg/SMQTK,Purg/SMQTK,Purg/SMQTK,kfieldho/SMQTK,kfieldho/SMQTK,kfieldho/SMQTK,Purg/SMQTK,Purg/SMQTK,kfieldho/SMQTK,kfieldho/SMQTK,Purg/SMQTK,Purg/SMQTK,Purg/SMQTK,Purg/SMQTK,kfieldho/SMQTK,kfieldho/SMQTK,kfieldho/SMQTK,kfieldho/SMQTK
|
python/smqtk/web/search_app/modules/static_host.py
|
python/smqtk/web/search_app/modules/static_host.py
|
import flask
__author__ = 'paul.tunison@kitware.com'
class StaticDirectoryHost (flask.Blueprint):
"""
Module that will host a given directory to the given URL prefix (relative to
the parent module's prefix).
Instances of this class will have nothing set to their static URL path, as a
blank string is used. Please reference the URL prefix value.
"""
def __init__(self, name, static_dir, url_prefix):
# make sure URL prefix starts with a slash
if not url_prefix.startswith('/'):
url_prefix = '/' + url_prefix
super(StaticDirectoryHost, self).__init__(name, __name__,
static_folder=static_dir,
static_url_path="",
url_prefix=url_prefix)
|
bsd-3-clause
|
Python
|
|
fdb8f36fd4eed11d5d757d8477b3c2b8619aae8a
|
Add management command to populate last_modified fields
|
qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq
|
corehq/apps/commtrack/management/commands/product_program_last_modified.py
|
corehq/apps/commtrack/management/commands/product_program_last_modified.py
|
from django.core.management.base import BaseCommand
from corehq.apps.commtrack.models import Product, Program
from dimagi.utils.couch.database import iter_docs
from datetime import datetime
import json
class Command(BaseCommand):
help = 'Populate last_modified field for products and programs'
def handle(self, *args, **options):
self.stdout.write("Processing products...\n")
relevant_ids = set([r['id'] for r in Product.get_db().view(
'commtrack/products',
reduce=False,
).all()])
to_save = []
for product in iter_docs(Product.get_db(), relevant_ids):
if 'last_modified' not in product or not product['last_modified']:
print product['_id']
product['last_modified'] = json.dumps(datetime.now().isoformat())
to_save.append(product)
if len(to_save) > 500:
Product.get_db().bulk_save(to_save)
to_save = []
if to_save:
Product.get_db().bulk_save(to_save)
self.stdout.write("Processing programs...\n")
relevant_ids = set([r['id'] for r in Program.get_db().view(
'commtrack/programs',
reduce=False,
).all()])
to_save = []
for program in iter_docs(Program.get_db(), relevant_ids):
if 'last_modified' not in program or not program['last_modified']:
print program['_id']
program['last_modified'] = json.dumps(datetime.now().isoformat())
to_save.append(program)
if len(to_save) > 500:
Program.get_db().bulk_save(to_save)
to_save = []
if to_save:
Program.get_db().bulk_save(to_save)
|
bsd-3-clause
|
Python
|
|
cf6d2e732ab4b7131312323632761561f2aa3a86
|
add field user_email to DynamicSaveInputs
|
zrisher/webapp-public,OpenSourcePolicyCenter/webapp-public,OpenSourcePolicyCenter/PolicyBrain,OpenSourcePolicyCenter/webapp-public,zrisher/webapp-public,OpenSourcePolicyCenter/webapp-public,OpenSourcePolicyCenter/PolicyBrain,zrisher/webapp-public,OpenSourcePolicyCenter/webapp-public,OpenSourcePolicyCenter/PolicyBrain,zrisher/webapp-public,OpenSourcePolicyCenter/PolicyBrain
|
webapp/apps/dynamic/migrations/0002_dynamicsaveinputs_user_email.py
|
webapp/apps/dynamic/migrations/0002_dynamicsaveinputs_user_email.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('dynamic', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='dynamicsaveinputs',
name='user_email',
field=models.CharField(default=None, max_length=50, null=True, blank=True),
),
]
|
mit
|
Python
|
|
6118b05f0efd1c2839eb8bc4de36723af1fcc364
|
Convert snake_case to camelCase or PascalCase (#7028) (#7034)
|
TheAlgorithms/Python
|
strings/snake_case_to_camel_pascal_case.py
|
strings/snake_case_to_camel_pascal_case.py
|
def snake_to_camel_case(input: str, use_pascal: bool = False) -> str:
"""
Transforms a snake_case given string to camelCase (or PascalCase if indicated)
(defaults to not use Pascal)
>>> snake_to_camel_case("some_random_string")
'someRandomString'
>>> snake_to_camel_case("some_random_string", use_pascal=True)
'SomeRandomString'
>>> snake_to_camel_case("some_random_string_with_numbers_123")
'someRandomStringWithNumbers123'
>>> snake_to_camel_case("some_random_string_with_numbers_123", use_pascal=True)
'SomeRandomStringWithNumbers123'
>>> snake_to_camel_case(123)
Traceback (most recent call last):
...
ValueError: Expected string as input, found <class 'int'>
>>> snake_to_camel_case("some_string", use_pascal="True")
Traceback (most recent call last):
...
ValueError: Expected boolean as use_pascal parameter, found <class 'str'>
"""
if not isinstance(input, str):
raise ValueError(f"Expected string as input, found {type(input)}")
if not isinstance(use_pascal, bool):
raise ValueError(
f"Expected boolean as use_pascal parameter, found {type(use_pascal)}"
)
words = input.split("_")
start_index = 0 if use_pascal else 1
words_to_capitalize = words[start_index:]
capitalized_words = [word[0].upper() + word[1:] for word in words_to_capitalize]
initial_word = "" if use_pascal else words[0]
return "".join([initial_word] + capitalized_words)
if __name__ == "__main__":
from doctest import testmod
testmod()
|
mit
|
Python
|
|
4a71e883a469f22995775cbc1eeb6489a2dd71d1
|
add integration test
|
uber/ludwig,uber/ludwig,uber/ludwig
|
tests/integration_tests/test_contrib_wandb.py
|
tests/integration_tests/test_contrib_wandb.py
|
import logging
import os
import shutil
import sys
import ludwig.contrib
from tests.integration_tests.test_experiment import run_experiment
from tests.integration_tests.utils import image_feature
from tests.integration_tests.utils import category_feature
from tests.integration_tests.utils import generate_data
import wandb
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
logging.getLogger("ludwig").setLevel(logging.INFO)
def test_wandb_experiment(csv_filename):
# Test W&B integration
# add wandb arg and detect flag
sys.argv.append('--wandb')
ludwig.contrib.contrib_import()
# disable sync to cloud
os.environ['WANDB_MODE'] = 'dryrun'
# Image Inputs
image_dest_folder = os.path.join(os.getcwd(), 'generated_images')
# Inputs & Outputs
input_features = [image_feature(folder=image_dest_folder)]
output_features = [category_feature()]
rel_path = generate_data(input_features, output_features, csv_filename)
# Run experiment
run_experiment(input_features, output_features, data_csv=rel_path)
# Check a W&B run was created
assert wandb.run is not None
# End session
wandb.join()
# Delete the temporary data created
shutil.rmtree(image_dest_folder)
if __name__ == '__main__':
"""
To run tests individually, run:
```python -m pytest tests/integration_tests/test_contrib_wandb.py::test_name```
"""
pass
|
apache-2.0
|
Python
|
|
da5afa7e96bfa72d6fd0906ed1a42c5f230112f5
|
Add tests for update_taxon_assignments method
|
kbaseapps/GenomeFileUtil,kbaseapps/GenomeFileUtil,kbaseapps/GenomeFileUtil,kbaseapps/GenomeFileUtil
|
test/core/update_taxon_assignments_test.py
|
test/core/update_taxon_assignments_test.py
|
import os
import time
import unittest
from configparser import ConfigParser
from uuid import uuid4
from GenomeFileUtil.GenomeFileUtilImpl import GenomeFileUtil
from GenomeFileUtil.GenomeFileUtilServer import MethodContext
from installed_clients.WorkspaceClient import Workspace as workspaceService
# NOTE: These tests must run against https://ci.kbase.us
_WORKSPACE_ID = 33192
_OBJECT_ID = 33
class UpdateTaxonAssignmentsTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
token = os.environ.get('KB_AUTH_TOKEN', None)
# WARNING: don't call any logging methods on the context object,
# it'll result in a NoneType error
cls.ctx = MethodContext(None)
cls.ctx.update({'token': token,
'provenance': [
{'service': 'GenomeFileUtil',
'method': 'please_never_use_it_in_production',
'method_params': []
}],
'authenticated': 1})
config_file = os.environ['KB_DEPLOYMENT_CONFIG']
cls.cfg = {}
config = ConfigParser()
config.read(config_file)
for nameval in config.items('GenomeFileUtil'):
cls.cfg[nameval[0]] = nameval[1]
cls.wsURL = cls.cfg['workspace-url']
cls.wsClient = workspaceService(cls.wsURL, token=token)
cls.serviceImpl = GenomeFileUtil(cls.cfg)
suffix = int(time.time() * 1000)
cls.wsName = "test_GenomeFileUtil_" + str(suffix)
cls.wsClient.create_workspace({'workspace': cls.wsName})
def test_update_taxon_assignments_valid(self):
"""
Test a valid call to the update_taxon_assignments method.
"""
taxon_key = str(uuid4())
taxon_val = str(uuid4())
taxon_val_new = str(uuid4())
get_obj_params = {
'wsid': _WORKSPACE_ID,
'objid': _OBJECT_ID,
'included': ['/taxon_assignments']
}
# Add a new assignment
self.serviceImpl.update_taxon_assignments(self.ctx, {
'workspace_id': _WORKSPACE_ID,
'object_id': _OBJECT_ID,
'taxon_assignments': {
taxon_key: taxon_val
}
})
# Fetch the object and check the mapping
obj = self.wsClient.get_objects2({'objects': [get_obj_params]})['data'][0]['data']
self.assertTrue(taxon_key in obj['taxon_assignments'])
print(obj['taxon_assignments'])
self.assertEqual(obj['taxon_assignments'][taxon_key], taxon_val)
# Update the assignment we just added
self.serviceImpl.update_taxon_assignments(self.ctx, {
'workspace_id': _WORKSPACE_ID,
'object_id': _OBJECT_ID,
'taxon_assignments': {
taxon_key: taxon_val_new
}
})
# Fetch the object and check the mapping
obj = self.wsClient.get_objects2({'objects': [get_obj_params]})['data'][0]['data']
self.assertTrue(taxon_key in obj['taxon_assignments'])
self.assertEqual(obj['taxon_assignments'][taxon_key], taxon_val_new)
# Remove the assignment we just added
self.serviceImpl.update_taxon_assignments(self.ctx, {
'workspace_id': _WORKSPACE_ID,
'object_id': _OBJECT_ID,
'remove_assignments': [taxon_key]
})
# Fetch the object and check the mapping
obj = self.wsClient.get_objects2({'objects': [get_obj_params]})['data'][0]['data']
self.assertTrue(taxon_key not in obj['taxon_assignments'])
self.assertEqual(obj['taxon_assignments'].get(taxon_key), None)
|
mit
|
Python
|
|
bbc4351a5611a035bbee1f18cb55b74d9583cdcd
|
Create a state for add an object
|
WalkingMachine/sara_behaviors,WalkingMachine/sara_behaviors
|
sara_flexbe_states/src/sara_flexbe_states/Wonderland_Add_Object.py
|
sara_flexbe_states/src/sara_flexbe_states/Wonderland_Add_Object.py
|
#!/usr/bin/env python
# encoding=utf8
import json
import requests
from flexbe_core import EventState, Logger
class Wonderland_Add_Object(EventState):
'''
Add an object to Wonderland.
For the room, enter only ID or Name, not both.
Return the ID of the added human.
># name string name of the human
># roomID string ID on the BDD or name of the room
># x_pos int Position on X
># y_pos int Position on Y
># z_pos int Position on Z
#> id int ID on the BDD of the human
<= done data sent correctly
<= error error while data is reading
'''
def __init__(self):
super(Wonderland_Add_Object, self).__init__(outcomes=['done', 'error'],
output_keys=['id'],
input_keys=['name', 'roomID', 'x_pos', 'y_pos', 'z_pos'])
# generate post key for authentication
self._header = {'api-key': 'asdf'}
def execute(self, userdata):
# Generate URL to contact
if isinstance(userdata.roomID, (int, long)):
dataPost = {'name': userdata.name, 'x': userdata.x_pos, 'y': userdata.y_pos, 'z': userdata.z_pos,
'roomID': userdata.roomID}
else:
dataPost = {'name': userdata.name, 'x': userdata.x_pos, 'y': userdata.y_pos, 'z': userdata.z_pos,
'roomName': userdata.roomID}
# try the request
try:
response = requests.post("http://192.168.0.46:8000/api/object/", headers=self._header, data=dataPost)
except requests.exceptions.RequestException as e:
print e
return 'error'
# read response
data_response = json.loads(response.content)
# have a response
if not data_response["entity"]:
return 'error'
# have an id to read
if 'id' not in data_response["entity"]:
# continue to Error
return 'error'
# return the ID
userdata.id = data_response["entity"]['id']
return 'done'
|
bsd-3-clause
|
Python
|
|
3ce9dcb1ae21ac35ddd97d648ae3ff4b5877adc5
|
add script for downloading and conversion of bhuman dataset after conversion the dataset is in the same format as the berlin-united one
|
BerlinUnited/NaoTH,BerlinUnited/NaoTH,BerlinUnited/NaoTH,BerlinUnited/NaoTH,BerlinUnited/NaoTH,BerlinUnited/NaoTH,BerlinUnited/NaoTH
|
Utils/py/BallDetection/RegressionNetwork/generate_image_db_bhuman.py
|
Utils/py/BallDetection/RegressionNetwork/generate_image_db_bhuman.py
|
"""
Converts the b-human 2019 dataset to the naoth format so we can run performance comparisons
"""
import pickle
import numpy as np
import h5py
from pathlib import Path
from urllib.request import urlretrieve
from urllib.error import HTTPError, URLError
from utility_functions.loader import calculate_mean, subtract_mean
def download_bhuman2019(origin, target):
def dl_progress(count, block_size, total_size):
print('\r', 'Progress: {0:.2%}'.format(min((count * block_size) / total_size, 1.0)), sep='', end='', flush=True)
if not Path(target).exists():
target_folder = Path(target).parent
target_folder.mkdir(parents=True, exist_ok=True)
else:
return
error_msg = 'URL fetch failure on {} : {} -- {}'
try:
try:
urlretrieve(origin, target, dl_progress)
print('\nFinished')
except HTTPError as e:
raise Exception(error_msg.format(origin, e.code, e.reason))
except URLError as e:
raise Exception(error_msg.format(origin, e.errno, e.reason))
except (Exception, KeyboardInterrupt):
if Path(target).exists():
Path(target).unlink()
raise
if __name__ == '__main__':
download_bhuman2019("https://sibylle.informatik.uni-bremen.de/public/datasets/b-alls-2019/b-alls-2019.hdf5",
"data/bhuman/b-alls-2019.hdf5")
download_bhuman2019("https://sibylle.informatik.uni-bremen.de/public/datasets/b-alls-2019/",
"data/bhuman/readme.txt")
# get data
f = h5py.File('data/bhuman/b-alls-2019.hdf5', 'r')
negative_labels = np.array(f.get('negatives/labels'))
positive_labels = np.array(f.get('positives/labels'))
negative_data = np.array(f.get('negatives/data'))
positive_data = np.array(f.get('positives/data'))
labels = np.append(negative_labels, positive_labels, axis=0)
# swap dimensions to convert b-human format to berlin-united format
new_labels = np.copy(labels)
radii = labels[:, 0]
classes = labels[:, -1]
new_labels[:, 0] = classes
new_labels[:, -1] = radii
images = np.append(negative_data, positive_data, axis=0)
mean = calculate_mean(images)
mean_images = subtract_mean(images, mean)
with open("data/bhuman.pkl", "wb") as f:
pickle.dump(mean, f)
pickle.dump(mean_images, f)
pickle.dump(new_labels, f)
|
apache-2.0
|
Python
|
|
78bc96307fb52d95e36eab1da6fa57a66af736e8
|
Add script to delete couch phone numbers
|
qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
corehq/apps/sms/management/commands/delete_messaging_couch_phone_numbers.py
|
corehq/apps/sms/management/commands/delete_messaging_couch_phone_numbers.py
|
from corehq.apps.sms.mixin import VerifiedNumber
from corehq.apps.sms.models import PhoneNumber
from dimagi.utils.couch.database import iter_docs_with_retry, iter_bulk_delete_with_doc_type_verification
from django.core.management.base import BaseCommand
from optparse import make_option
class Command(BaseCommand):
args = ""
help = ("Deletes all messaging phone numbers stored in couch")
option_list = BaseCommand.option_list + (
make_option("--delete-interval",
action="store",
dest="delete_interval",
type="int",
default=5,
help="The number of seconds to wait between each bulk delete."),
)
def get_couch_ids(self):
result = VerifiedNumber.view(
'phone_numbers/verified_number_by_domain',
include_docs=False,
reduce=False,
).all()
return [row['id'] for row in result]
def get_soft_deleted_couch_ids(self):
result = VerifiedNumber.view(
'all_docs/by_doc_type',
startkey=['VerifiedNumber-Deleted'],
endkey=['VerifiedNumber-Deleted', {}],
include_docs=False,
reduce=False,
).all()
return [row['id'] for row in result]
def delete_models(self, delete_interval):
print 'Deleting VerifiedNumbers...'
count = iter_bulk_delete_with_doc_type_verification(
VerifiedNumber.get_db(),
self.get_couch_ids(),
'VerifiedNumber',
wait_time=delete_interval,
max_fetch_attempts=5
)
print 'Deleted %s documents' % count
print 'Deleting Soft-Deleted VerifiedNumbers...'
count = iter_bulk_delete_with_doc_type_verification(
VerifiedNumber.get_db(),
self.get_soft_deleted_couch_ids(),
'VerifiedNumber-Deleted',
wait_time=delete_interval,
max_fetch_attempts=5
)
print 'Deleted %s documents' % count
def handle(self, *args, **options):
self.delete_models(options['delete_interval'])
|
bsd-3-clause
|
Python
|
|
678ef4a2ccd9bbfc12ed9a6077d666c4daebf243
|
remove like indexes from phonelog
|
qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq
|
corehq/ex-submodules/phonelog/migrations/0008_remove_like_indexes.py
|
corehq/ex-submodules/phonelog/migrations/0008_remove_like_indexes.py
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
db.execute("DROP INDEX IF EXISTS phonelog_devicereportentry_device_id_like")
db.execute("DROP INDEX IF EXISTS phonelog_devicereportentry_domain_like")
db.execute("DROP INDEX IF EXISTS phonelog_devicereportentry_type_like")
db.execute("DROP INDEX IF EXISTS phonelog_devicereportentry_user_id_like")
db.execute("DROP INDEX IF EXISTS phonelog_devicereportentry_username_like")
db.execute("DROP INDEX IF EXISTS phonelog_userentry_username_like")
db.execute("DROP INDEX IF EXISTS phonelog_userentry_xform_id_like")
def backwards(self, orm):
# don't add it back
pass
models = {
u'phonelog.devicereportentry': {
'Meta': {'unique_together': "[('xform_id', 'i')]", 'object_name': 'DeviceReportEntry'},
'app_version': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'device_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'db_index': 'True'}),
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'}),
'i': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'msg': ('django.db.models.fields.TextField', [], {}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'user_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'db_index': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'db_index': 'True'}),
'xform_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'phonelog.userentry': {
'Meta': {'unique_together': "[('xform_id', 'i')]", 'object_name': 'UserEntry'},
'i': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sync_token': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'user_id': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'}),
'xform_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
}
}
complete_apps = ['phonelog']
|
bsd-3-clause
|
Python
|
|
20c6f14d4cc76771290f7ce6fc4f3dd5abed07b4
|
write symbol calculus with sympy.
|
DO-CV/sara,DO-CV/sara,DO-CV/sara,DO-CV/sara,DO-CV/sara,DO-CV/sara
|
cpp/src/DO/Sara/MultiViewGeometry/Estimators/five_point_algorithm.py
|
cpp/src/DO/Sara/MultiViewGeometry/Estimators/five_point_algorithm.py
|
from sympy import *
X = Matrix(symbols(' '.join(['X{}'.format(i) for i in range(9)]))).reshape(3, 3)
Y = Matrix(symbols(' '.join(['Y{}'.format(i) for i in range(9)]))).reshape(3, 3)
Z = Matrix(symbols(' '.join(['Z{}'.format(i) for i in range(9)]))).reshape(3, 3)
W = Matrix(symbols(' '.join(['W{}'.format(i) for i in range(9)]))).reshape(3, 3)
x, y, z = symbols('x y z')
E = x * X + y * Y + z * Z + W
a = det(E)
|
mpl-2.0
|
Python
|
|
25b0544c2f2c78dbc3bf971d955fda651d7ed5e9
|
fix is_open_for_signup missing param
|
hairychris/cookiecutter-django,hackebrot/cookiecutter-django,jondelmil/cookiecutter-django,topwebmaster/cookiecutter-django,schacki/cookiecutter-django,jondelmil/cookiecutter-django,aleprovencio/cookiecutter-django,hackebrot/cookiecutter-django,Parbhat/cookiecutter-django-foundation,drxos/cookiecutter-django-dokku,ryankanno/cookiecutter-django,ddiazpinto/cookiecutter-django,crdoconnor/cookiecutter-django,crdoconnor/cookiecutter-django,mistalaba/cookiecutter-django,calculuscowboy/cookiecutter-django,bopo/cookiecutter-django,ryankanno/cookiecutter-django,asyncee/cookiecutter-django,calculuscowboy/cookiecutter-django,webspired/cookiecutter-django,andresgz/cookiecutter-django,drxos/cookiecutter-django-dokku,webyneter/cookiecutter-django,hairychris/cookiecutter-django,ad-m/cookiecutter-django,bopo/cookiecutter-django,schacki/cookiecutter-django,drxos/cookiecutter-django-dokku,webspired/cookiecutter-django,schacki/cookiecutter-django,ingenioustechie/cookiecutter-django-openshift,webspired/cookiecutter-django,thisjustin/cookiecutter-django,ad-m/cookiecutter-django,webspired/cookiecutter-django,drxos/cookiecutter-django-dokku,aeikenberry/cookiecutter-django-rest-babel,gappsexperts/cookiecutter-django,calculuscowboy/cookiecutter-django,gappsexperts/cookiecutter-django,ddiazpinto/cookiecutter-django,webyneter/cookiecutter-django,luzfcb/cookiecutter-django,pydanny/cookiecutter-django,Parbhat/cookiecutter-django-foundation,Parbhat/cookiecutter-django-foundation,thisjustin/cookiecutter-django,nunchaks/cookiecutter-django,bopo/cookiecutter-django,trungdong/cookiecutter-django,thisjustin/cookiecutter-django,hairychris/cookiecutter-django,aleprovencio/cookiecutter-django,aeikenberry/cookiecutter-django-rest-babel,bopo/cookiecutter-django,ad-m/cookiecutter-django,hairychris/cookiecutter-django,kappataumu/cookiecutter-django,asyncee/cookiecutter-django,ryankanno/cookiecutter-django,aeikenberry/cookiecutter-django-rest-babel,luzfcb/cookiecutter-django,nunchaks/cookiecutter-django,asyncee/cookiecutter-django,crdoconnor/cookiecutter-django,aleprovencio/cookiecutter-django,pydanny/cookiecutter-django,ddiazpinto/cookiecutter-django,pydanny/cookiecutter-django,luzfcb/cookiecutter-django,trungdong/cookiecutter-django,andresgz/cookiecutter-django,hackebrot/cookiecutter-django,ryankanno/cookiecutter-django,andresgz/cookiecutter-django,andresgz/cookiecutter-django,asyncee/cookiecutter-django,mistalaba/cookiecutter-django,nunchaks/cookiecutter-django,trungdong/cookiecutter-django,gappsexperts/cookiecutter-django,ingenioustechie/cookiecutter-django-openshift,topwebmaster/cookiecutter-django,thisjustin/cookiecutter-django,mistalaba/cookiecutter-django,webyneter/cookiecutter-django,ddiazpinto/cookiecutter-django,hackebrot/cookiecutter-django,gappsexperts/cookiecutter-django,Parbhat/cookiecutter-django-foundation,crdoconnor/cookiecutter-django,pydanny/cookiecutter-django,kappataumu/cookiecutter-django,topwebmaster/cookiecutter-django,aeikenberry/cookiecutter-django-rest-babel,ingenioustechie/cookiecutter-django-openshift,schacki/cookiecutter-django,jondelmil/cookiecutter-django,trungdong/cookiecutter-django,kappataumu/cookiecutter-django,luzfcb/cookiecutter-django,ad-m/cookiecutter-django,aleprovencio/cookiecutter-django,topwebmaster/cookiecutter-django,webyneter/cookiecutter-django,mistalaba/cookiecutter-django,calculuscowboy/cookiecutter-django,ingenioustechie/cookiecutter-django-openshift,kappataumu/cookiecutter-django,jondelmil/cookiecutter-django,nunchaks/cookiecutter-django
|
{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/users/adapter.py
|
{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/users/adapter.py
|
# -*- coding: utf-8 -*-
from django.conf import settings
from allauth.account.adapter import DefaultAccountAdapter
from allauth.socialaccount.adapter import DefaultSocialAccountAdapter
class AccountAdapter(DefaultAccountAdapter):
def is_open_for_signup(self, request):
return getattr(settings, 'ACCOUNT_ALLOW_REGISTRATION', True)
class SocialAccountAdapter(DefaultSocialAccountAdapter):
def is_open_for_signup(self, request, sociallogin):
return getattr(settings, 'ACCOUNT_ALLOW_REGISTRATION', True)
|
# -*- coding: utf-8 -*-
from django.conf import settings
from allauth.account.adapter import DefaultAccountAdapter
from allauth.socialaccount.adapter import DefaultSocialAccountAdapter
class AccountAdapter(DefaultAccountAdapter):
def is_open_for_signup(self, request):
return getattr(settings, 'ACCOUNT_ALLOW_REGISTRATION', True)
class SocialAccountAdapter(DefaultSocialAccountAdapter):
def is_open_for_signup(self, request):
return getattr(settings, 'ACCOUNT_ALLOW_REGISTRATION', True)
|
bsd-3-clause
|
Python
|
c8143c9f3eca422e48625700aeef11e528131caf
|
add zeroOutDisk.py
|
marshki/pyWipe,marshki/pyWipe
|
zeroOutDisk.py
|
zeroOutDisk.py
|
#!/usr/bin/env python
def zeroOutDisk():
"""Fill selected device (/dev/) with zeros."""
pass=1
for int in range(wipe):
print 'Wiping drives pass %s of $s")%(pass, count))
os.system(("dd if=/dev/zero of=%s")%(device))
pass+=1
|
mit
|
Python
|
|
8b26888dea4e825d06b6ebfed628a1762f6a5455
|
Solve 48.
|
klen/euler
|
048/solution.py
|
048/solution.py
|
# coding: utf-8
""" Project Euler problem #48. """
def problem():
u""" Solve the problem.
The series, 11 + 22 + 33 + ... + 1010 = 10405071317.
Find the last ten digits of the series, 11 + 22 + 33 + ... + 10001000.
Answer: 9110846700
"""
result = long(0)
for num in range(1, 1001):
result += long(num) ** num
print str(result)[-10:]
if __name__ == '__main__':
print problem()
|
mit
|
Python
|
|
e0770c4a671c650f4569036350b4047fcf925506
|
Add a demo app to illustrate the result
|
plumer/codana,plumer/codana
|
AnalysisDemo.py
|
AnalysisDemo.py
|
import wx
#import matplotlib
class AnalysisDemo(wx.Frame):
def __init__(self, *args, **kw):
super(AnalysisDemo, self).__init__(*args, **kw)
self.initMain()
def initMain(self):
pn = wx.Panel(self)
self.showPackage = wx.RadioButton(pn, label='Organize in package')
self.showClass = wx.RadioButton(pn, label='Organize in class')
# self.canvas = matplotlib.figure.Figure()
self.canvas = wx.TextCtrl(pn, style=wx.TE_MULTILINE | wx.HSCROLL)
self.create = wx.Button(pn, label='Create Figure')
self.create.Bind(wx.EVT_BUTTON, self.createFigure)
optionBoxSizer = wx.BoxSizer(wx.VERTICAL)
optionBoxSizer.Add(self.showPackage, proportion=0, flag=wx.TOP, border=5)
optionBoxSizer.Add(self.showClass, proportion=0, flag=wx.TOP, border=5)
optionBoxSizer.Add(self.create, proportion=0, flag=wx.TOP, border=5)
mainBoxSizer = wx.BoxSizer()
mainBoxSizer.Add(self.canvas, proportion=1, flag=wx.EXPAND | wx.ALL, border=5)
mainBoxSizer.Add(optionBoxSizer, proportion=0, flag=wx.EXPAND | wx.TOP | wx.BOTTOM | wx.RIGHT, border=5)
pn.SetSizer(mainBoxSizer)
self.SetTitle('Analysis Demo')
self.SetSize((600,400))
self.Centre()
self.Show(True)
def createFigure(self, event):
pass
def main():
app = wx.App()
AnalysisDemo(None)
app.MainLoop()
if __name__ == '__main__':
main()
|
mit
|
Python
|
|
b19d6a63d80919b3e7a2f3c40cd026085a526614
|
Create Rehash.py
|
UmassJin/Leetcode
|
LintCode/Rehash.py
|
LintCode/Rehash.py
|
'''
Medium Rehashing Show result
25% Accepted
The size of the hash table is not determinate at the very beginning. If the total size of keys is too large (e.g. size >= capacity / 10), we should double the size of the hash table and rehash every keys. Say you have a hash table looks like below:
size=3, capacity=4
[null, 21, 14, null]
↓ ↓
9 null
↓
null
The hash function is:
int hashcode(int key, int capacity) {
return key % capacity;
}
here we have three numbers, 9, 14 and 21, where 21 and 9 share the same position as they all have the same hashcode 1 (21 % 4 = 9 % 4 = 1). We store them in the hash table by linked list.
rehashing this hash table, double the capacity, you will get:
size=3, capacity=8
index: 0 1 2 3 4 5 6 7
hash : [null, 9, null, null, null, 21, 14, null]
Given the original hash table, return the new hash table after rehashing .
Have you met this question in a real interview? Yes
Example
Given [null, 21->9->null, 14->null, null],
return [null, 9->null, null, null, null, 21->null, 14->null, null]
Note
For negative integer in hash table, the position can be calculated as follow:
C++/Java: if you directly calculate -4 % 3 you will get -1. You can use function: a % b = (a % b + b) % b to make it is a non negative integer.
Python: you can directly use -1 % 3, you will get 2 automatically.
'''
"""
Definition of ListNode
class ListNode(object):
def __init__(self, val, next=None):
self.val = val
self.next = next
"""
class Solution:
"""
@param hashTable: A list of The first node of linked list
@return: A list of The first node of linked list which have twice size
"""
def rehashing(self, hashTable):
if not hashTable: return
old_size = len(hashTable)
new_size = old_size * 2
newHashtable = [None for i in xrange(new_size)]
for start in hashTable:
while start != None:
index = start.val % new_size
temp = newHashtable[index]
if temp != None:
while temp and temp.next:
temp = temp.next
temp.next = ListNode(start.val)
else:
newHashtable[index] = ListNode(start.val)
start = start.next
return newHashtable
|
mit
|
Python
|
|
af4a5e92dc7ef8bffa96c6e556671e1c49116a70
|
Test commit
|
kmuehlbauer/wradlib,kmuehlbauer/wradlib,heistermann/wradlib,wradlib/wradlib,heistermann/wradlib,wradlib/wradlib
|
radproc.py
|
radproc.py
|
#-------------------------------------------------------------------------------
# Name: module1
# Purpose:
#
# Author: heistermann
#
# Created: 26.10.2011
# Copyright: (c) heistermann 2011
# Licence: <your licence>
#-------------------------------------------------------------------------------
#!/usr/bin/env python
def main():
pass
if __name__ == '__main__':
main()
|
mit
|
Python
|
|
217b95f7ecb42dd4a9a671703c86a2b83838bb28
|
rename to .py
|
joelstanner/codeeval,joelstanner/codeeval
|
python_solutions/PENULTIMATE_WORD/PENULTIMATE_WORD.py
|
python_solutions/PENULTIMATE_WORD/PENULTIMATE_WORD.py
|
"""
Write a program which finds the next-to-last word in a string.
INPUT SAMPLE:
Your program should accept as its first argument a path to a filename. Input
example is the following:
some line with text
another line
Each line has more than one word.
OUTPUT SAMPLE:
Print the next-to-last word in the following way:
with
another
"""
from sys import argv
def penultimate(input_file):
with open(input_file, 'r') as file:
for line in file:
words = line.rstrip().split()
print(words[-2])
if __name__ == '__main__':
penultimate(argv[1])
|
mit
|
Python
|
|
bc10094ef6250558d713f3636dbd01f295503f15
|
Create NovelAPI.py
|
GetRektByMe/NovelAPI,rachmadaniHaryono/Raitonoberu,GetRektByMe/Raitonoberu
|
NovelAPI.py
|
NovelAPI.py
|
import aiohttp
from bs4 import BeautifulSoup
onlysession = aiohttp.ClientSession()
class NovelUpdatesAPI:
def __init__(self):
self.baseurl = 'http://www.novelupdates.com/'
async def search_novel_updates(self, term: str):
term = term.replace(' ', '+')
params = {'s': term, 'post_type': 'seriesplan'}
with onlysession as session:
async with session.get(self.baseurl, params=params) as response:
assert isinstance(response, aiohttp.ClientResponse)
assert response.status == 200
search = BeautifulSoup(await response.text(), 'lxml')
parsedsearch = search.find('a', class_='w-blog-entry-link').get('href')
return parsedsearch
async def page_info_parser(self, term):
to_parse = await self.search_novel_updates(term)
with onlysession as session:
async with session.get(to_parse) as response:
assert isinstance(response, aiohttp.ClientResponse)
assert response.status == 200
parse_info = BeautifulSoup(await response.text(), 'lxml')
data['title'] = parse_info.find(class_='seriestitle new')
data['cover'] = parse_info.find('img').get('src')
data['type'] = parse_info.find('a', class_='genre type').text()
data['genre'] = parse_info.find_all('a', class_='genre').text()
data['tags'] = parse_info.find_all('a', class_='genre odd').text()
data['rating'] = parse_info.find('span', class_='votetext').text()
data['language'] = parse_info.find('a', class_='genre lang').text()
data['author'] = parse_info.find('a', class_='authtag').text()
data['artist'] = parse_info.find('a', class_='artiststag').text()
data['year'] = parse_info.find('div', id_='edityear').text()
data['novel_status'] = parse_info.find('div', id_='editstatus').text()
data['licensed'] = parse_info.find('div', id_='showlicensed').text()
data['completely_translated'] = parse_info.find('div', id_='showtranslated').text()
data['publisher'] = parse_info.find('a', class_='genre', id_='myopub').text()
data['english_publisher'] = parse_info.find('span', class_='seriesna').text()
data['frequency'] = parse_info.find('h5', class_='seriesother').text()
data['description'] = parse_info.find('div', id_='editdescription').text().strip()
data['aliases'] = parse_info.find('div', id_='editassociated').text()
data['related'] = parse_info.find('h5', class_='seriesother').text()
return data
|
mit
|
Python
|
|
ea3f995775b42784d99ebc19effce949a700eb28
|
Update blocks.py
|
adityaatluri/Urutu,urutu/Urutu
|
Urutu/cl/blocks.py
|
Urutu/cl/blocks.py
|
## OpenCL blocks are initialized here!
## Created by: Aditya Atluri
## Date: Mar 03 2014
def bx(blocks_dec, kernel):
if blocks_dec == False:
string = "int bx = (get_global_id(0) - get_local_id(0)) / get_local_size(0);\n"
kernel = kernel + string
blocks_dec = True
return kernel, blocks_dec
def by(blocks_dec, kernel):
if blocks_dec == False:
string = "int by = (get_global_id(1) - get_local_id(1)) / get_local_size(1);\n"
kernel = kernel + string
blocks_dec = True
return kernel, blocks_dec
def bz(blocks_dec, kernel):
if blocks_dec == False:
string = "int bz = (get_global_id(2) - get_local_id(2)) / get_local_size(2);\n"
kernel = kernel + string
blocks_dec = True
return kernel, blocks_dec
def blocks_decl(stmt, var_nam, var_val, blocks):
equ = stmt.index('=')
if var_nam.count('Bx') < 1:
pos = stmt.index('Bx')
pos_val = stmt[pos + 1 + equ]
var_nam.append(stmt[pos])
var_val.append(int(pos_val))
blocks[0] = int(pos_val)
if var_nam.count('By') < 1:
pos = stmt.index('By')
pos_val = stmt[pos + 1 + equ]
var_nam.append(stmt[pos])
var_val.append(int(pos_val))
blocks[1] = int(pos_val)
if var_nam.count('Bz') < 1:
pos = stmt.index('Bz')
pos_val = stmt[pos + 1 + equ]
var_nam.append(stmt[pos])
var_val.append(int(pos_val))
blocks[2] = int(pos_val)
return var_nam, var_val, blocks
|
## OpenCL blocks are initialized here!
## Created by: Aditya Atluri
## Date: Mar 03 2014
def bx(blocks_dec, kernel):
if blocks_dec == False:
string = "int bx = get_group_id(0);\n"
kernel = kernel + string
blocks_dec = True
return kernel, blocks_dec
def by(blocks_dec, kernel):
if blocks_dec == False:
string = "int by = get_group_id(1);\n"
kernel = kernel + string
blocks_dec = True
return kernel, blocks_dec
def bz(blocks_dec, kernel):
if blocks_dec == False:
string = "int bz = get_group_id(2);\n"
kernel = kernel + string
blocks_dec = True
return kernel, blocks_dec
def blocks_decl(stmt, var_nam, var_val, blocks):
equ = stmt.index('=')
if var_nam.count('Bx') < 1:
pos = stmt.index('Bx')
pos_val = stmt[pos + 1 + equ]
var_nam.append(stmt[pos])
var_val.append(int(pos_val))
blocks[0] = int(pos_val)
if var_nam.count('By') < 1:
pos = stmt.index('By')
pos_val = stmt[pos + 1 + equ]
var_nam.append(stmt[pos])
var_val.append(int(pos_val))
blocks[1] = int(pos_val)
if var_nam.count('Bz') < 1:
pos = stmt.index('Bz')
pos_val = stmt[pos + 1 + equ]
var_nam.append(stmt[pos])
var_val.append(int(pos_val))
blocks[2] = int(pos_val)
return var_nam, var_val, blocks
|
apache-2.0
|
Python
|
f9bad030fffbf6b50ec5833c4024656c725c7679
|
Rename and add item3.
|
Vayne-Lover/Effective
|
Python/item3.py
|
Python/item3.py
|
#!/usr/local/bin/python
# -*- coding:utf-8 -*-
import sys
def to_unicode(unicode_or_str):
if isinstance(unicode_or_str,str):
v=unicode_or_str.decode('utf-8')
else:
v=unicode_or_str
return v
def to_str(unicode_or_str):
if isinstance(unicode_or_str,unicode):
v=unicode_or_str.encode('utf-8')
else:
v=unicode_or_str
return v
str='十大'
#print to_unicode(str)
print sys.getdefaultencoding()
|
mit
|
Python
|
|
09f8a25a80924a5f0b29969ffe3f89ad798ec1b4
|
Create default.py
|
BorgesGabo/gaia,BorgesGabo/gaia
|
default.py
|
default.py
|
# -*- coding: utf-8 -*-
# this file is released under public domain and you can use without limitations
# -------------------------------------------------------------------------
# This is a sample controller
# - index is the default action of any application
# - user is required for authentication and authorization
# - download is for downloading files uploaded in the db (does streaming)
# -------------------------------------------------------------------------
def order():
#this function uploads and handles the form from db.po's table also uploads a query which select in reverse order all data in db.po's table
ordenes=db(db.po.id>0).select(orderby=~db.po.id)
form=SQLFORM(db.po, buttons =[TAG.button('save', _type="submit"),TAG.button('update', _type="button", _onClick ="parent.location='%s'" %URL(order)), TAG.button('next',_type="button", _onClick=" parent.location='%s'" %URL(orderd))])
if form.process().accepted:
response.flash='order accepted'
elif form.errors:
response.flash= 'check the data inserted'
else:
response.flash= 'please fill out the form'
return dict(ordenes=ordenes, form=form)
def orderd():
#this function uploads and handles the form from db.po_detail's table also uploads a query which select in reverse order all data in db.po_detail table
ordenes=db(db.po_detail.id>0).select(orderby=~db.po_detail.po_id)
form=SQLFORM(db.po_detail, buttons = [TAG.button('save',_type="submit"),TAG.button('update',_type="button",_onClick = "parent.location='%s' " % URL(orderd))])
if form.process().accepted:
response.flash = 'form accepted'
elif form.errors:
response.flash = 'form has errors'
else:
response.flash = 'please fill out the form'
return dict(ordenes=ordenes, form=form)
def form1():
#This function creates a form from db.customer's table
form = SQLFORM(db.customer,buttons = [TAG.button('save',_type="submit"),TAG.button('next',_type="button",_onClick = "parent.location='%s' " % URL(form2))])
if form.process().accepted:
response.flash = 'form accepted'
elif form.errors:
response.flash = 'form has errors'
else:
response.flash = 'please fill out the form'
return dict(form=form)
def form2():
#This function creates a form from db.po's table
form = SQLFORM(db.po,buttons = [TAG.button('save',_type="submit"),TAG.button('next',_type="button",_onClick = "parent.location='%s' " % URL(form3))])
if form.process().accepted:
response.flash = 'form accepted'
elif form.errors:
response.flash = 'form has errors'
else:
response.flash = 'please fill out the form'
return dict(form=form)
def form3():
#This function creates a form db.po_detail's form
form = SQLFORM(db.po_detail)
if form.process().accepted:
response.flash = 'form accepted'
elif form.errors:
response.flash = 'form has errors'
else:
response.flash = 'please fill out the form'
return dict(form=form)
def form4():
#This function creates a form from db.product's table
form = SQLFORM(db.product)
if form.process().accepted:
response.flash = 'form accepted'
elif form.errors:
response.flash = 'form has errors'
else:
response.flash = 'please fill out the form'
return dict(form=form)
def form5():
#This function creates a grid form from db.product's table
grid = SQLFORM.grid(db.po_detail, user_signature=False)
return locals()
def index():
"""
example action using the internationalization operator T and flash
rendered by views/default/index.html or views/generic.html
if you need a simple wiki simply replace the two lines below with:
return auth.wiki()
"""
response.flash = T("Hello World")
return dict(message=T('Welcome to web2py!'))
def user():
"""
exposes:
http://..../[app]/default/user/login
http://..../[app]/default/user/logout
http://..../[app]/default/user/register
http://..../[app]/default/user/profile
http://..../[app]/default/user/retrieve_password
http://..../[app]/default/user/change_password
http://..../[app]/default/user/bulk_register
use @auth.requires_login()
@auth.requires_membership('group name')
@auth.requires_permission('read','table name',record_id)
to decorate functions that need access control
also notice there is http://..../[app]/appadmin/manage/auth to allow administrator to manage users
"""
return dict(form=auth())
@cache.action()
def download():
"""
allows downloading of uploaded files
http://..../[app]/default/download/[filename]
"""
return response.download(request, db)
def call():
"""
exposes services. for example:
http://..../[app]/default/call/jsonrpc
decorate with @services.jsonrpc the functions to expose
supports xml, json, xmlrpc, jsonrpc, amfrpc, rss, csv
"""
return service()
|
unlicense
|
Python
|
|
2caf6ab1e43ad29864e4dc652a60e445adfa31bb
|
Add session merger tool
|
DynamoDS/Coulomb,DynamoDS/Coulomb,DynamoDS/Coulomb
|
SessionTools/session_merger.py
|
SessionTools/session_merger.py
|
import gzip
import os
import random
import time
import sys
VERBOSE = True
def log(s):
if VERBOSE:
print (time.strftime("%Y-%m-%d %H:%M:%S") + " " + str(s))
if len(sys.argv) != 3:
print ("Usage: python session_merger.py PathToInSessions PathToTargetSessions")
exit(1)
inSessionsPath = sys.argv[1]
print(inSessionsPath)
outSessionsPath = sys.argv[2]
print(outSessionsPath)
inChecksumMap = {}
outChecksumMap = {}
def countLinesInGzipFile(path):
nr_lines = 0
with gzip.open(path) as f:
for _ in f:
nr_lines = nr_lines + 1
return nr_lines
def checksumFiles(paths):
c = 0
checksumMap = {}
for path in paths:
c += 1
if c % 1000 == 0:
log ('checksumming' + c + ":" + str(c/len(paths)) )
if os.path.isfile(path):
checksumMap[path] = countLinesInGzipFile(path)
else:
checksumMap[path] = 0
return checksumMap
inPaths = []
i = 0
log('Enumerating and checksumming the input files')
for root, subdirs, files in os.walk(inSessionsPath):
for ff in files:
i = i + 1
if i % 1000 == 0:
log (i)
path = os.path.join(root, ff)
if (not path.endswith('.gz')):
continue
inPaths.append(path)
log('Paths to process: ' + str(len(inPaths)))
outPaths = {}
for inPath in inPaths:
outPaths[inPath] = inPath.replace(inSessionsPath, outSessionsPath)
log('Computing file length checksums')
inChecksumMap = checksumFiles(inPaths)
outChecksumMap = checksumFiles(outPaths.itervalues())
i = 0
log('Moving input files')
for inPath in inPaths:
fin = gzip.open(inPath)
outPath = outPaths[inPath]
if not os.path.exists(os.path.dirname(outPath)):
os.makedirs(os.path.dirname(outPath))
fout = gzip.open(outPath, 'ab')
for ln in fin:
fout.write(ln)
# check that the checksum fails when it should by introducing an extra line in a file
# if (outPath == 'session_merger_test_out/a/1/100.gz'):
# fout.write('checksum test\n')
fin.close()
fout.close()
log('Verifying the checksum of the output files')
newOutChecksumMap = checksumFiles(outPaths.itervalues())
for inPath in inPaths:
inChecksum = inChecksumMap[inPath]
outChecksum = outChecksumMap[outPaths[inPath]]
newOutChecksum = newOutChecksumMap[outPaths[inPath]]
if (newOutChecksum != inChecksum + outChecksum):
log("Checksum doesn't match for file " + outPaths[inPath] + ": expected " + str(inChecksum + outChecksum) + ", got " + str(newOutChecksum))
else:
os.remove(inPath)
|
mit
|
Python
|
|
b6531b3712a5c6e42f4cd1241e740f46fd448696
|
add send_setpoint
|
AlexisTM/Indoor_Position_lasers,AlexisTM/Indoor_Position_lasers,AlexisTM/Indoor_Position_lasers,AlexisTM/Indoor_Position_lasers,AlexisTM/Indoor_Position_lasers
|
laserpack/bin/send_setpoint.py
|
laserpack/bin/send_setpoint.py
|
#!/usr/bin/env python
# vim:set ts=4 sw=4 et:
import rospy
import mavros
import time
import tf
import numpy as np
from laserpack.getch import *
from threading import Thread
from geometry_msgs.msg import PoseStamped
from sensor_msgs.msg import Imu
from mavros_msgs.srv import SetMode
from mavros_msgs.msg import State
from mavros_msgs.srv import CommandBool
from mavros.utils import *
def sendSetpoint():
global xSetPoint
global ySetPoint
global zSetPoint
global setPointsCount
setPointsCount = 0
local_setpoint_pub = rospy.Publisher('mavros/setpoint_position/local', PoseStamped, queue_size=10)
rate = rospy.Rate(20.0)
while not rospy.is_shutdown():
msg = PoseStamped()
msg.pose.position.x = xSetPoint
msg.pose.position.y = ySetPoint
msg.pose.position.z = zSetPoint
msg.pose.orientation.x = 0.0
msg.pose.orientation.y = 0.0
msg.pose.orientation.z = 0.0
msg.pose.orientation.w = 1.0
local_setpoint_pub.publish(msg)
rate.sleep()
setPointsCount = setPointsCount + 1
def State_Callback(data):
global state
state = data
def Pose_Callback(data):
global pose
pose = data
def IMU_Callback(data):
global imu
imu = data
def InterfaceKeyboard():
global zSetPoint
global pose
global imu
global disarm
global arming_client
global set_mode_client
what = getch()
if what == "z":
xSetPoint = xSetPoint + 0.1
if what == "s":
xSetPoint = xSetPoint - 0.1
if what == "q":
ySetPoint = ySetPoint + 0.1
if what == "d":
ySetPoint = ySetPoint - 0.1
if what == "u":
zSetPoint = zSetPoint + 0.1
if what == "j":
zSetPoint = zSetPoint - 0.1
if what == "q":
arming_client(False)
if what == "a":
arming_client(True)
if what == "e":
set_mode_client(custom_mode = "OFFBOARD")
if what == "m":
exit()
Q = (
imu.orientation.x,
imu.orientation.y,
imu.orientation.z,
imu.orientation.w)
euler = tf.transformations.euler_from_quaternion(Q)
rospy.loginfo("Positions sent : %i, Setpoints sent : %i",PositionsCount, setPointsCount )
rospy.loginfo("Manual position %s", zPosition)
rospy.loginfo("Position is %s", pose.pose.position.z)
rospy.loginfo("Setpoint is now %s", zSetPoint)
rospy.loginfo("IMU :")
rospy.loginfo("roll : %s", euler[0])
rospy.loginfo("pitch : %s", euler[1])
rospy.loginfo("yaw : %s", euler[2])
def init():
global state
global disarm
global zSetPoint
global setPointsCount
global PositionsCount
global arming_client
global set_mode_client
setPointsCount = 0
PositionsCount = 0
zSetPoint = 0
state = State()
disarm = False
rospy.init_node('laserpack_main')
rate = rospy.Rate(20.0)
# On récupère des informations
# - Pose
# - Imu
# - Etat
pose_sub = rospy.Subscriber('mavros/local_position/pose', PoseStamped, Pose_Callback)
imu_sub = rospy.Subscriber('mavros/imu/data', Imu, IMU_Callback)
state_sub = rospy.Subscriber('mavros/state', State, State_Callback)
rospy.wait_for_service('mavros/cmd/arming')
arming_client = rospy.ServiceProxy('mavros/cmd/arming', CommandBool)
rospy.wait_for_service('mavros/set_mode')
set_mode_client = rospy.ServiceProxy('mavros/set_mode', SetMode)
tSetPoints = Thread(target=sendSetpoint).start()
while not rospy.is_shutdown():
InterfaceKeyboard()
if __name__ == '__main__':
rospy.loginfo("We are ready")
try:
init()
except rospy.ROSInterruptException:
rospy.loginfo("init failed")
pass
|
mit
|
Python
|
|
e9861532a3aa420b68bc797ee54f7429ea73c0e7
|
fix #1
|
Wen777/justKode,Wen777/justKode,Wen777/justKode
|
rest.py
|
rest.py
|
from flask import Flask
app = Flask(__name__)
@app.route("/")
def ping():
return "python server received 'ping' request!"
if __name__ == "__main__":
app.run(debug=True)
|
mit
|
Python
|
|
e40091a2105fc4e04bcce9b2c6ebcc7cf28f22ca
|
add module uwsgi
|
dozymoe/fireh_runner
|
modules/uwsgi.py
|
modules/uwsgi.py
|
"""uwsgi-for-Django module.
Website: https://uwsgi-docs.readthedocs.io/en/latest/
"""
import os
def uwsgi(loader, *args):
loader.setup_virtualenv()
venv_dir = loader.get_virtualenv_dir()
binargs = [os.path.join(venv_dir, 'bin', 'uwsgi')] + list(args)
os.execvp(binargs[0], binargs)
def uwsgi_run(loader, project=None, variant=None, *args): #pylint:disable=keyword-arg-before-vararg
project, variant = loader.setup_project_env(project, variant)
loader.setup_virtualenv()
loader.setup_shell_env()
config = loader.get_project_config()
venv_dir = loader.get_virtualenv_dir()
binargs = [os.path.join(venv_dir, 'bin', 'uwsgi'), '--master',
'--die-on-term']
if not loader.is_production():
binargs.append('--honour-stdin')
work_dir = config.get('work_dir', project)
work_dir = loader.expand_path(work_dir)
os.chdir(work_dir)
binargs += list(args)
os.execvp(binargs[0], binargs)
commands = (uwsgi, uwsgi_run)
|
mit
|
Python
|
|
a1d523c2daf563444761cb7315e18e1e0ee1b506
|
Add ARM program (forgot to add)
|
windelbouwman/ppci-mirror,windelbouwman/ppci-mirror,windelbouwman/ppci-mirror,windelbouwman/ppci-mirror,windelbouwman/ppci-mirror,windelbouwman/ppci-mirror
|
ppci/arch/arm/program.py
|
ppci/arch/arm/program.py
|
from ppci.programs import MachineProgram
class ArmProgram(MachineProgram):
""" Machine code for most mobile devices and e.g. the Raspberry Pi.
"""
def _check_items(self, items):
return items
def _copy(self):
raise NotImplementedError()
def _get_report(self, html):
obj = self._items[0]
lines = []
lines.append(repr(obj))
for section in obj.sections:
lines.append(repr(section))
for symbol in obj.symbols:
lines.append(repr(symbol))
for reloc in obj.relocations:
lines.append(repr(reloc))
return '\n'.join(lines)
# todo: does this make sense for arm?
def as_object(self):
""" Export as binary code object (bytes)
"""
obj = self._items[0]
return bytes(obj.get_section('code').data)
|
bsd-2-clause
|
Python
|
|
10af3d7ee13afda37e8ecf76927bc2fedfe22b6f
|
add expand module
|
biothings/biothings_explorer,biothings/biothings_explorer
|
biothings_explorer/expand/__init__.py
|
biothings_explorer/expand/__init__.py
|
from collections import defaultdict
from ..smartapi_kg import MetaKG
from ..call_apis import APIQueryDispatcher
from ..query.utils import annotateEdgesWithInput
class Expander:
def __init__(self):
self.kg = MetaKG()
self.kg.constructMetaKG(source="local")
def __getEdges(self, semanticType):
"""
Get a list of smart-api edges based on semantic type.
:param semanticType: Type of bioentities to expand
:returns: list of smartapi edges expanding the semantic type
"""
return self.kg.filter(
{
"input_type": semanticType,
"output_type": semanticType,
"predicate": "has_subclass",
}
)
@staticmethod
def __parseResponse(res):
if not res:
return
result = {}
for rec in res:
if (
"$output_id_mapping" in rec
and "resolved_ids" in rec["$output_id_mapping"]
):
result[
rec["$output_id_mapping"]["resolved_ids"]["id"]["identifier"]
] = rec["$output_id_mapping"]["resolved_ids"]
return result
@staticmethod
def __groupIDsbySemanticType(output_ids):
result = defaultdict(list)
for resolved_ids in output_ids:
result[resolved_ids.get("type")].append(resolved_ids)
return result
def expand(self, inputs):
"""
Expand input biomedical objects to its children
:param semanticType: semantic type of the inputs
:param inputs: list of resolved identifiers
"""
grpedIDs = self.__groupIDsbySemanticType(inputs)
bte_edges = []
for semanticType, resolvedIDs in grpedIDs.items():
smartapi_edges = self.__getEdges(semanticType)
if not smartapi_edges:
continue
tmp_edges = annotateEdgesWithInput(smartapi_edges, resolvedIDs)
if not tmp_edges:
continue
bte_edges += tmp_edges
if not bte_edges:
return
dp = APIQueryDispatcher(bte_edges)
res = dp.syncQuery()
return self.__parseResponse(res)
|
apache-2.0
|
Python
|
|
b55e27e7420443b4d9a48da7c4e5501e1de66f44
|
Add some code
|
quantbucket/quantbucket-repo
|
modules/descriptive_statistics/app.py
|
modules/descriptive_statistics/app.py
|
class DescriptiveStatistics():
def __init__(self,data):
return self.main()
def main(self):
return True
|
mit
|
Python
|
|
c505b95c3affe3805bd9274c3aedd1c6640c5ff5
|
Create solution.py
|
lilsweetcaligula/Online-Judges,lilsweetcaligula/Online-Judges,lilsweetcaligula/Online-Judges
|
leetcode/medium/linked_list_cycle_ii/py/solution.py
|
leetcode/medium/linked_list_cycle_ii/py/solution.py
|
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def detectCycle(self, head):
"""
:type head: ListNode
:rtype: ListNode
"""
if head == None:
return None
slow = head
fast = head
while fast != None and fast.next != None:
slow = slow.next
fast = fast.next.next
if slow == fast:
slow = head
while slow != fast:
slow = slow.next
fast = fast.next
return slow
return None
|
mit
|
Python
|
|
ebd410b5f7afe4c38977ecf24735a8803e8bb3fb
|
Advance Version of : Example->Basic->Objects->MultipleCons
|
jdf/processing.py,mashrin/processing.py,jdf/processing.py,mashrin/processing.py,jdf/processing.py,tildebyte/processing.py,tildebyte/processing.py,tildebyte/processing.py,mashrin/processing.py
|
mode/examples/Basics/Objects/MultipleConstructors/MultipleConstructors.pyde
|
mode/examples/Basics/Objects/MultipleConstructors/MultipleConstructors.pyde
|
'''
Multiple Constructors
A class can have multiple constructors that assign the fields in different ways.
Sometimes it's beneficial to specify every aspect of an object's data by assigning
parameters to the fields, but other times it might be appropriate to define only
one or a few.
Advanced example written in Python Mode by: Prabhjot Singh (NITH)
Original example in Java Mode: Example->Basic->Objects->MultipleConstructors
'''
def setup():
size(640, 360)
smooth(4)
noLoop()
ellipseMode(CENTER)
strokeWeight(2.5)
stroke(0)
fill('#FFFF00')
global spots
spots = Spot(), Spot(radius=58), Spot(x=120, y=70),\
Spot(width / 2, height / 2, 120)
def draw():
background(0300)
for sp in spots:
sp.display()
class Spot:
def __init__(self, x=0, y=0, radius=40):
self.x, self.y = x or width / 4, y or height / 2
self.radius, self.diam = radius, radius * 2
def display(self):
ellipse(self.x, self.y, self.diam, self.diam)
|
'''
Multiple constructors
A class can have multiple constructors that assign the fields in different ways.
Sometimes it's beneficial to specify every aspect of an object's data by assigning
parameters to the fields, but other times it might be appropriate to define only
one or a few.
Example written in Python by : Prabhjot Singh (NITH)
original example in java mode : Example->Basic->Objects->MultipleConstructors
'''
def setup():
size(640, 360)
background(204)
noLoop()
global sp1, sp2, sp3, sp0
# Run the constructor without parameters
sp0 = Spot()
# Run the constructor with one parameters
sp1 = Spot(radius=58)
# Run the constructor with two parameters
sp2 = Spot(x=130, y=70)
# Run the constructor with three parameters
sp3 = Spot(width * 0.5, height * 0.5, 120)
def draw():
global sp0, sp1, sp2, sp3
sp1.display()
sp2.display()
sp3.display()
sp0.display()
class Spot:
# First version of the Spot constructor
# the fields are assigned default values
# Second version of the Spot constructor
# the fields are assigned with parameters
def __init__(self, x=None, y=None, radius=None):
''' Constructor for the Spot class'''
if x is None:
self.x = width * 0.25
else:
self.x = x
if y is None:
self.y = height * 0.5
else:
self.y = y
if radius is None:
self.radius = 40
else:
self.radius = radius
def display(self):
ellipse(self.x, self.y, self.radius * 2, self.radius * 2)
|
apache-2.0
|
Python
|
2b7fffd3f6f358df5613b9aa304f9a70f9b04bc2
|
add bloom filter script
|
kgrodzicki/cloud-computing-specialization,kgrodzicki/cloud-computing-specialization,kgrodzicki/cloud-computing-specialization
|
cloud-computing-concepts-part1/scripts/bloom-filter.py
|
cloud-computing-concepts-part1/scripts/bloom-filter.py
|
__author__ = 'grokrz'
m = 32
def hash_function(x, i, m):
return ((pow(x, 2) * pow(x, 3)) * i) % m
def show_bits_set_to_1(val):
for i in range(1, 4):
print "Bit set to 1: " + str(hash_function(val, i, m))
show_bits_set_to_1(2013)
|
mit
|
Python
|
|
c3a83ed6158fcd9335f9253417ca4b24e9ab7934
|
Add test for fqdn thread leak
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
tests/pytests/unit/modules/test_network.py
|
tests/pytests/unit/modules/test_network.py
|
import threading
import pytest
import salt.modules.network as networkmod
from tests.support.mock import patch
@pytest.fixture
def configure_loader_modules():
return {networkmod: {}}
@pytest.fixture
def socket_errors():
# Not sure what kind of errors could be returned by getfqdn or
# gethostbyaddr, but we have reports that thread leaks are happening
with patch("socket.getfqdn", autospec=True, side_effect=Exception), patch(
"socket.gethostbyaddr", autospec=True, side_effect=Exception
):
yield
@pytest.mark.xfail
def test_when_errors_happen_looking_up_fqdns_threads_should_not_leak(socket_errors):
before_threads = threading.active_count()
networkmod.fqdns()
after_threads = threading.active_count()
assert (
before_threads == after_threads
), "Difference in thread count means the thread pool is not correctly cleaning up."
|
apache-2.0
|
Python
|
|
f58589f4bcb2aa233ebbd71831e31b1b9505e2c4
|
Create wiki_img_extractor.py
|
xadahiya/wiki_img_extractor
|
wiki_img_extractor.py
|
wiki_img_extractor.py
|
##/**
## * xa2.js
## * @author Akshay Dahiya - @xadahiya
## * @description Typingeek's tutor script
## */
import wikipedia, requests, lxml.html
###images and caption from wikipedia
##
##main image from table@class="infobox"
## img/@src,img/@alt
##
##wikipedia images from div@class="thumb tright"
## img@class = "thumbimage"/@src
## div@class = "thumbcaption"
## all text including link texts
##
def extract(query):
search_result = wikipedia.search(query)[0]
page = wikipedia.page(search_result)
page_url = page.url
page_title = page.title
print page_title
## doc = lxml.html.parse(page_url)
res = requests.get(page_url)
## doc = lxml.html.parse(res.content)
doc = lxml.html.fromstring(res.content)
## to get images
##Get main image and its alt
print "Main Image and its alt"
main_images = doc.xpath('//table[1][contains(concat(" ",@class," "),"infobox")]/tr/td//img/@src')
main_images_alt = doc.xpath('//table[1][contains(concat(" ",@class," "),"vcard")]/tr/td//img/@alt')
for link in main_images:
print "https:" + link
for alt in main_images_alt:
print alt
##Get thumbimages url and caption
print "\nThumbimage urls"
thumb_imgs = doc.xpath('//img[@class="thumbimage"]/@src')
for link in thumb_imgs:
print "https:" + link
## thumb_caption = doc.xpath('string(//div[@class="thumbcaption"])')
print "\nThumbimage captions"
thumb_caption = doc.xpath('//div[@class="thumbcaption"]')
for a in thumb_caption:
print ' '.join(a.xpath('string()').split())
## print a.xpath('string()')
|
mit
|
Python
|
|
f7328b96275bad6c3d8d9a4f844d47a65fe2bf4b
|
Create test_wrap.py
|
humbhenri/katas,humbhenri/katas,humbhenri/katas
|
wordwrap/test_wrap.py
|
wordwrap/test_wrap.py
|
import unittest
from wrap import wrap
class TestWW(unittest.TestCase):
def test_empty_string(self):
self.assertEqual('', wrap('', 1))
def test_string_smaller_than_col(self):
self.assertEqual('ab', wrap('ab', 3))
def test_string_without_spaces(self):
self.assertEqual('ab\ncd', wrap('abcd', 2))
def test_big_string_without_spaces(self):
self.assertEqual('ab\ncd\nef\ngh', wrap('abcdefgh', 2))
def test_string_with_space_at_column(self):
self.assertEqual('word\nword', wrap('word word', 5))
def test_after_word_boundary(self):
self.assertEqual('word\nword', wrap('word word', 6))
def test_three_words_after_first_space(self):
self.assertEqual('word\nword\nword', wrap('word word word', 6))
def test_three_words_after_second_space(self):
self.assertEqual('word word\nword', wrap('word word word', 10))
|
unlicense
|
Python
|
|
7922f168c1f844d7f1b69dfb383918d051cc312f
|
test when no server is present on the connection
|
jku/telepathy-gabble,jku/telepathy-gabble,Ziemin/telepathy-gabble,Ziemin/telepathy-gabble,Ziemin/telepathy-gabble,mlundblad/telepathy-gabble,jku/telepathy-gabble,Ziemin/telepathy-gabble,mlundblad/telepathy-gabble,mlundblad/telepathy-gabble
|
tests/twisted/search/no-server-property.py
|
tests/twisted/search/no-server-property.py
|
"""
Tests Contact Search channels to a simulated XEP-0055 service, without
passing the Server property
"""
import dbus
from twisted.words.xish import xpath
from gabbletest import exec_test, sync_stream, make_result_iq, acknowledge_iq, elem_iq, elem
from servicetest import EventPattern
from search_helper import call_create, answer_field_query
import constants as cs
import ns
JUD_SERVER = 'jud.localhost'
def server_discovered(q, bus, conn, stream):
conn.Connect()
_, iq_event, disco_event = q.expect_many(
EventPattern('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTED, cs.CSR_REQUESTED]),
EventPattern('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard'),
EventPattern('stream-iq', to='localhost', query_ns=ns.DISCO_ITEMS))
acknowledge_iq(stream, iq_event.stanza)
requests = dbus.Interface(conn, cs.CONN_IFACE_REQUESTS)
# no search server has been discovered yet. Requesting a search channel
# without specifying the Server will fail
call_create(q, requests, server=None)
e = q.expect('dbus-error', method='CreateChannel')
assert e.error.get_dbus_name() == cs.INVALID_ARGUMENT
# reply to IQ query
reply = make_result_iq(stream, disco_event.stanza)
query = xpath.queryForNodes('/iq/query', reply)[0]
item = query.addElement((None, 'item'))
item['jid'] = JUD_SERVER
stream.send(reply)
# wait for the disco#info query
event = q.expect('stream-iq', to=JUD_SERVER, query_ns=ns.DISCO_INFO)
reply = elem_iq(stream, 'result', id=event.stanza['id'], from_=JUD_SERVER)(
elem(ns.DISCO_INFO, 'query')(
elem('identity', category='directory', type='user', name='vCard User Search')(),
elem('feature', var=ns.SEARCH)()))
stream.send(reply)
# Make sure Gabble's received the reply
sync_stream(q, stream)
call_create(q, requests, server=None)
# JUD_SERVER is used as default
answer_field_query(q, stream, JUD_SERVER)
def no_server_discovered(q, bus, conn, stream):
conn.Connect()
_, iq_event, disco_event = q.expect_many(
EventPattern('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTED, cs.CSR_REQUESTED]),
EventPattern('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard'),
EventPattern('stream-iq', to='localhost', query_ns=ns.DISCO_ITEMS))
acknowledge_iq(stream, iq_event.stanza)
requests = dbus.Interface(conn, cs.CONN_IFACE_REQUESTS)
# reply to IQ query. No search server is present
reply = make_result_iq(stream, disco_event.stanza)
stream.send(reply)
# Make sure Gabble's received the reply
sync_stream(q, stream)
# This server doesn't have a search server. We can't create Search channel
# without specifying a Server property
call_create(q, requests, server=None)
e = q.expect('dbus-error', method='CreateChannel')
assert e.error.get_dbus_name() == cs.INVALID_ARGUMENT
if __name__ == '__main__':
exec_test(server_discovered)
exec_test(no_server_discovered)
|
"""
Tests Contact Search channels to a simulated XEP-0055 service, without
passing the Server property
"""
import dbus
from twisted.words.xish import xpath
from gabbletest import exec_test, sync_stream, make_result_iq, acknowledge_iq, elem_iq, elem
from servicetest import EventPattern
from search_helper import call_create, answer_field_query
import constants as cs
import ns
JUD_SERVER = 'jud.localhost'
def test(q, bus, conn, stream):
conn.Connect()
_, iq_event, disco_event = q.expect_many(
EventPattern('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTED, cs.CSR_REQUESTED]),
EventPattern('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard'),
EventPattern('stream-iq', to='localhost', query_ns=ns.DISCO_ITEMS))
acknowledge_iq(stream, iq_event.stanza)
requests = dbus.Interface(conn, cs.CONN_IFACE_REQUESTS)
# no search server has been discovered yet. Requesting a search channel
# without specifying the Server will fail
call_create(q, requests, server=None)
e = q.expect('dbus-error', method='CreateChannel')
assert e.error.get_dbus_name() == cs.INVALID_ARGUMENT
# reply to IQ query
reply = make_result_iq(stream, disco_event.stanza)
query = xpath.queryForNodes('/iq/query', reply)[0]
item = query.addElement((None, 'item'))
item['jid'] = JUD_SERVER
stream.send(reply)
# wait for the disco#info query
event = q.expect('stream-iq', to=JUD_SERVER, query_ns=ns.DISCO_INFO)
reply = elem_iq(stream, 'result', id=event.stanza['id'], from_=JUD_SERVER)(
elem(ns.DISCO_INFO, 'query')(
elem('identity', category='directory', type='user', name='vCard User Search')(),
elem('feature', var=ns.SEARCH)()))
stream.send(reply)
# Make sure Gabble's received the reply
sync_stream(q, stream)
call_create(q, requests, server=None)
# JUD_SERVER is used as default
answer_field_query(q, stream, JUD_SERVER)
if __name__ == '__main__':
exec_test(test)
|
lgpl-2.1
|
Python
|
0a02faf18fd3f05156df1b59dce83cee49a149f5
|
set version in finstance
|
awemulya/fieldsight-kobocat,awemulya/fieldsight-kobocat,awemulya/fieldsight-kobocat,awemulya/fieldsight-kobocat
|
onadata/apps/fsforms/management/commands/save_version_in_finstance.py
|
onadata/apps/fsforms/management/commands/save_version_in_finstance.py
|
from django.core.management.base import BaseCommand
from onadata.apps.fsforms.models import FInstance
class Command(BaseCommand):
help = 'Set version in FInstance for given user'
def add_arguments(self, parser):
parser.add_argument('username', type=str)
def handle(self, *args, **options):
# xls_directory = "/home/xls"
batchsize = options.get("batchsize", 100)
username = options['username']
stop = False
offset = 0
while stop is not True:
limit = offset + batchsize
instances = FInstance.objects.filter(instance__xform__user__username=username, version='')
if instances:
for instance in instances:
instance.set_version()
self.stdout.write(_("Updating instances from #{} to #{}\n").format(
instances[0].id,
instances[-1].id))
else:
stop = True
offset += batchsize
|
bsd-2-clause
|
Python
|
|
0e51e1b32fd51ea2f55ee64c762dcbc87159caa0
|
Add test_visibility_uvfits
|
SKA-ScienceDataProcessor/algorithm-reference-library,SKA-ScienceDataProcessor/algorithm-reference-library,SKA-ScienceDataProcessor/algorithm-reference-library,SKA-ScienceDataProcessor/algorithm-reference-library,SKA-ScienceDataProcessor/algorithm-reference-library
|
tests/processing_components/test_visibility_uvfits.py
|
tests/processing_components/test_visibility_uvfits.py
|
""" Unit tests for visibility operations
"""
import sys
import unittest
import logging
import numpy
from data_models.parameters import arl_path
from data_models.polarisation import PolarisationFrame
from processing_components.visibility.base import create_blockvisibility_from_uvfits, create_visibility_from_uvfits
from processing_components.visibility.operations import integrate_visibility_by_channel
from processing_components.imaging.base import invert_2d, create_image_from_visibility
from processing_components.visibility.coalesce import convert_visibility_to_blockvisibility, \
convert_blockvisibility_to_visibility
log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)
log.addHandler(logging.StreamHandler(sys.stdout))
log.addHandler(logging.StreamHandler(sys.stderr))
class TestCreateMS(unittest.TestCase):
def setUp(self):
return
# def test_create_list(self):
# uvfitsfile = arl_path("data/vis/xcasa.fits")
# self.vis = create_blockvisibility_from_uvfits(uvfitsfile)
# for v in self.vis:
# assert v.vis.data.shape[-1] == 4
# assert v.polarisation_frame.type == "circular"
def test_create_list_spectral(self):
uvfitsfile = arl_path("data/vis/ASKAP_example.fits")
vis_by_channel = list()
nchan_ave = 16
nchan = 192
for schan in range(0, nchan, nchan_ave):
max_chan = min(nchan, schan + nchan_ave)
v = create_visibility_from_uvfits(uvfitsfile, range(schan, max_chan))
vis_by_channel.append(v[0])
assert len(vis_by_channel) == 12
for v in vis_by_channel:
assert v.vis.data.shape[-1] == 4
assert v.polarisation_frame.type == "linear"
def test_create_list_spectral_average(self):
uvfitsfile = arl_path("data/vis/ASKAP_example.fits")
vis_by_channel = list()
nchan_ave = 16
nchan = 192
for schan in range(0, nchan, nchan_ave):
max_chan = min(nchan, schan + nchan_ave)
v = create_blockvisibility_from_uvfits(uvfitsfile, range(schan, max_chan))
vis_by_channel.append(integrate_visibility_by_channel(v[0]))
assert len(vis_by_channel) == 12
for v in vis_by_channel:
assert v.vis.data.shape[-1] == 4
assert v.vis.data.shape[-2] == 1
assert v.polarisation_frame.type == "linear"
def test_invert(self):
uvfitsfile = arl_path("data/vis/ASKAP_example.fits")
nchan_ave = 32
nchan = 192
for schan in range(0, nchan, nchan_ave):
max_chan = min(nchan, schan + nchan_ave)
bv = create_blockvisibility_from_uvfits(uvfitsfile, range(schan, max_chan))[0]
vis = convert_blockvisibility_to_visibility(bv)
from processing_components.visibility.operations import convert_visibility_to_stokesI
vis = convert_visibility_to_stokesI(vis)
print(vis)
model = create_image_from_visibility(vis, npixel=256, polarisation_frame=PolarisationFrame('stokesI'))
dirty, sumwt = invert_2d(vis, model, context='2d')
assert (numpy.max(numpy.abs(dirty.data))) > 0.0
assert dirty.shape == (nchan_ave, 1, 256, 256)
if __name__ == '__main__':
unittest.main()
|
apache-2.0
|
Python
|
|
b7f9bbd7afd64c702a2ea296b9e47cb5f563a4a2
|
Create valid-square.py
|
yiwen-luo/LeetCode,yiwen-luo/LeetCode,jaredkoontz/leetcode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,jaredkoontz/leetcode,tudennis/LeetCode---kamyu104-11-24-2015,yiwen-luo/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,jaredkoontz/leetcode,yiwen-luo/LeetCode,jaredkoontz/leetcode,jaredkoontz/leetcode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,kamyu104/LeetCode,kamyu104/LeetCode,kamyu104/LeetCode,yiwen-luo/LeetCode
|
Python/valid-square.py
|
Python/valid-square.py
|
# Time: O(1)
# Space: O(1)
# Given the coordinates of four points in 2D space,
# return whether the four points could construct a square.
#
# The coordinate (x,y) of a point is represented by an integer array with two integers.
#
# Example:
# Input: p1 = [0,0], p2 = [1,1], p3 = [1,0], p4 = [0,1]
# Output: True
# Note:
#
# All the input integers are in the range [-10000, 10000].
# A valid square has four equal sides with positive length
# and four equal angles (90-degree angles).
# Input points have no order.
class Solution(object):
def validSquare(self, p1, p2, p3, p4):
"""
:type p1: List[int]
:type p2: List[int]
:type p3: List[int]
:type p4: List[int]
:rtype: bool
"""
def dist(p1, p2):
return (p1[0] - p2[0]) ** 2 + (p1[1] - p2[1]) ** 2
counter = collections.Counter([dist(p1, p2), dist(p1, p3),\
dist(p1, p4), dist(p2, p3),\
dist(p2, p4), dist(p3, p4)])
return 0 not in counter and len(counter) == 2
|
mit
|
Python
|
|
9925f3a677b7a855a2242176139bde4ab9d62ba0
|
Add script which will compute the number of 'bonnes boites' per rome
|
StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite
|
labonneboite/scripts/nb_hirings/rome_nb_bonne_boite.py
|
labonneboite/scripts/nb_hirings/rome_nb_bonne_boite.py
|
import pandas as pd
if __name__ == '__main__':
df = pd.read_csv('prediction_per_company_per_rome2019-11-08.csv')
df_rome_nb_bonne_boite = df.groupby(['rome'])['is a bonne boite ?'].sum()
df_rome_nb_bonne_boite.to_csv('nb_bonne_boite_per_rome2019-11-089.csv')
|
agpl-3.0
|
Python
|
|
8f488365c9a4f14bf96eab089d6ac869b675c1b4
|
Add system.version functional test
|
photo/openphoto-python,photo/openphoto-python
|
tests/functional/test_system.py
|
tests/functional/test_system.py
|
import logging
import unittest
import trovebox
from tests.functional import test_base
class TestSystem(test_base.TestBase):
testcase_name = "system"
def setUp(self):
"""
Override the default setUp, since we don't need a populated database
"""
logging.info("\nRunning %s...", self.id())
def test_system_version(self):
"""
Check that the API version string is returned correctly
"""
client = trovebox.Trovebox(config_file=self.config_file)
version = client.system.version()
self.assertEqual(version["api"], "v%s" % trovebox.LATEST_API_VERSION)
@unittest.skip("Diagnostics don't work with the hosted site")
def test_system_diagnostics(self):
"""
Check that the system diagnostics can be performed
"""
client = trovebox.Trovebox(config_file=self.config_file)
diagnostics = client.system.diagnostics()
self.assertIn(diagnostics, "database")
|
apache-2.0
|
Python
|
|
234d53ed185976a65042c136426d7f05022a698d
|
add memnn test
|
EderSantana/seya
|
tests/test_memnn.py
|
tests/test_memnn.py
|
from __future__ import print_function
import unittest
from seya.layers.memnn2 import MemN2N
from keras.models import Sequential
from keras.layers.core import Lambda
from keras import backend as K
import numpy as np
class TestMemNN(unittest.TestCase):
"""Test seya.layers.memnn layer"""
def test_memnn(self):
def identity_init(shape, name=None):
dim = max(shape)
I = np.identity(dim)[:shape[0], :shape[1]]
return K.variable(I, name=name)
input_dim = 20
output_dim = 64
input_length = 9
memory_length = 7
facts = Sequential()
facts.add(Lambda(lambda x: x, input_shape=(memory_length, input_dim),
output_shape=(memory_length, input_dim)))
question = Sequential()
question.add(Lambda(lambda x: x, input_shape=(1, input_dim),
output_shape=(1, input_dim)))
memnn = MemN2N([facts, question], output_dim, input_dim,
input_length, memory_length,
output_shape=(output_dim,))
memnn.build()
model = Sequential()
model.add(memnn)
model.compile("sgd", "mse")
inp = np.random.randint(0, input_dim,
(1, memory_length, input_length))
que = np.random.randint(0, input_dim, (1, 1, input_length))
print(model.predict([inp, que]).shape)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
bsd-3-clause
|
Python
|
|
b15bf5758539c0682ba607e74e34ef8869431a49
|
add tests for TETRA calculations
|
widdowquinn/pyani
|
tests/test_tetra.py
|
tests/test_tetra.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""test_tetra.py
Test tetra.py module.
These tests are intended to be run from the repository root using:
nosetests -v
print() statements will be caught by nosetests unless there is an
error. They can also be recovered with the -s option.
(c) The James Hutton Institute 2017
Author: Leighton Pritchard
Contact:
leighton.pritchard@hutton.ac.uk
Leighton Pritchard,
Information and Computing Sciences,
James Hutton Institute,
Errol Road,
Invergowrie,
Dundee,
DD6 9LH,
Scotland,
UK
The MIT License
Copyright (c) 2017 The James Hutton Institute
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import json
import os
import unittest
import pandas as pd
from nose.tools import (assert_equal, assert_false, assert_true)
from pandas.util.testing import (assert_frame_equal,)
from pyani import (tetra, )
def ordered(obj):
if isinstance(obj, dict):
return sorted((k, ordered(v)) for k, v in obj.items())
elif isinstance(obj, list):
return sorted(ordered(x) for x in obj)
else:
return obj
class TestTETRA(unittest.TestCase):
"""Class defining tests of TETRA algorithm."""
def setUp(self):
"""Define parameters and values for tests."""
self.indir = os.path.join('tests', 'test_input', 'tetra')
self.tgtdir = os.path.join('tests', 'test_targets', 'tetra')
self.seqdir = os.path.join('tests', 'test_input', 'sequences')
self.infile = os.path.join(self.seqdir, 'NC_002696.fna')
self.infiles = [os.path.join(self.seqdir, fname) for fname in
os.listdir(self.seqdir)]
def test_tetraclean(self):
"""detects unambiguous IUPAC symbols correctly."""
assert_false(tetra.tetra_clean('ACGTYACGTACNGTACGWTACGT'))
assert_true(tetra.tetra_clean('ACGTACGTACGTACGTACGTAC'))
def test_zscore(self):
"""TETRA Z-score calculated correctly."""
tetra_z = tetra.calculate_tetra_zscore(self.infile)
with open(os.path.join(self.tgtdir, 'zscore.json'), 'r') as ifh:
target = json.load(ifh)
assert_equal(ordered(tetra_z), ordered(target))
def test_correlations(self):
"""TETRA correlation calculated correctly."""
infiles = ordered(self.infiles)[:2] # only test a single correlation
corr = tetra.calculate_correlations(tetra.calculate_tetra_zscores(infiles))
target = pd.read_csv(os.path.join(self.tgtdir, 'correlation.tab'), sep='\t',
index_col=0)
assert_frame_equal(corr, target)
|
mit
|
Python
|
|
737ac4ddbc3d047fbf41e3d9f7cde20a53d8974a
|
add management command to decrypt eval data
|
project-callisto/callisto-core,SexualHealthInnovations/callisto-core,project-callisto/callisto-core,SexualHealthInnovations/callisto-core
|
evaluation/management/commands/decrypt_eval_data.py
|
evaluation/management/commands/decrypt_eval_data.py
|
from django.core.management.base import BaseCommand
import gnupg
import json
from django.conf import settings
import environ
env = environ.Env()
from evaluation.models import EvalRow
class Command(BaseCommand):
help='decrypts eval data. can only be run in local environments (import data from prod)'
def handle(self, *args, **options):
if not settings.DEBUG:
raise RuntimeError("Don't run this in production!!! Import encrypted prod data to your local environment")
eval_key = env('CALLISTO_EVAL_PRIVATE_KEY')
decrypted_eval_data = []
for row in EvalRow.objects.all():
decrypted_row = {'pk': row.pk,
'user': row.user_identifier,
'record': row.record_identifier,
'action': row.action,
'timestamp': row.timestamp.timestamp()}
gpg = gnupg.GPG()
gpg.import_keys(eval_key)
decrypted_eval_row = str(gpg.decrypt(row.row))
if decrypted_eval_row:
decrypted_row.update(json.loads(decrypted_eval_row))
decrypted_eval_data.append(decrypted_row)
with open('eval_data.json','w') as output:
json.dump(decrypted_eval_data, output)
self.stdout.write("Decrypted eval data written to eval_data.json")
|
agpl-3.0
|
Python
|
|
b977b1e6732255732843aaaad3e5c1f8e2b4d0e0
|
add unit tests for openquake/utils/general.py
|
gem/oq-engine,gem/oq-engine,gem/oq-engine,gem/oq-engine,gem/oq-engine
|
tests/utils_general_unittest.py
|
tests/utils_general_unittest.py
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010-2011, GEM Foundation.
#
# OpenQuake is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License version 3
# only, as published by the Free Software Foundation.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License version 3 for more details
# (a copy is included in the LICENSE file that accompanied this code).
#
# You should have received a copy of the GNU Lesser General Public License
# version 3 along with OpenQuake. If not, see
# <http://www.gnu.org/licenses/lgpl-3.0.txt> for a copy of the LGPLv3 License.
"""
Test related to code in openquake/utils/general.py
"""
|
agpl-3.0
|
Python
|
|
e4e4a52318b857ce315a6f673e72b018e6501a83
|
Add a plotting example.
|
lesserwhirls/scipy-cwt,jasonmccampbell/scipy-refactor,scipy/scipy-svn,scipy/scipy-svn,lesserwhirls/scipy-cwt,scipy/scipy-svn,jasonmccampbell/scipy-refactor,scipy/scipy-svn,jasonmccampbell/scipy-refactor,lesserwhirls/scipy-cwt,jasonmccampbell/scipy-refactor,lesserwhirls/scipy-cwt
|
Lib/sandbox/pyem/examples/plotexamples.py
|
Lib/sandbox/pyem/examples/plotexamples.py
|
#! /usr/bin/env python
# Last Change: Mon Jun 11 03:00 PM 2007 J
# This is a simple test to check whether plotting ellipsoides of confidence and
# isodensity contours match
import numpy as N
from numpy.testing import set_package_path, restore_path
import pylab as P
set_package_path()
import pyem
restore_path()
# Generate a simple mixture model, plot its confidence ellipses + isodensity
# curves for both diagonal and full covariance matrices
d = 3
k = 3
dim = [0, 2]
# diag model
w, mu, va = pyem.GM.gen_param(d, k)
dgm = pyem.GM.fromvalues(w, mu, va)
# full model
w, mu, va = pyem.GM.gen_param(d, k, 'full', spread = 1)
fgm = pyem.GM.fromvalues(w, mu, va)
def plot_model(gm, dim):
X, Y, Z, V = gm.density_on_grid(dim = dim)
h = gm.plot(dim = dim)
[i.set_linestyle('-.') for i in h]
P.contour(X, Y, Z, V)
data = gm.sample(200)
P.plot(data[:, dim[0]], data[:,dim[1]], '.')
# Plot the contours and the ellipsoids of confidence
P.subplot(2, 1, 1)
plot_model(dgm, dim)
P.subplot(2, 1, 2)
plot_model(fgm, dim)
P.show()
|
bsd-3-clause
|
Python
|
|
0caef8ed3bcf369ffd61f83b06f971b31ae0fb70
|
test unittest
|
ojab/bnw,un-def/bnw,un-def/bnw,ojab/bnw,un-def/bnw,ojab/bnw,un-def/bnw,stiletto/bnw,stiletto/bnw,ojab/bnw,stiletto/bnw,stiletto/bnw
|
bnw_core/test_delayed_global.py
|
bnw_core/test_delayed_global.py
|
# coding: utf-8
from delayed_global import DelayedGlobal
def test_delayed_global():
a = DelayedGlobal()
b = dict({100:200})
try:
c = a.get(100)
except AttributeError:
pass
else:
assert 0, "Got result from empty DelayedGlobal"
a.register(b)
assert a.get(100) == 200
|
bsd-2-clause
|
Python
|
|
ae6f5556cc37d72fff49c76932b94ac8a65bcfbf
|
make an example python'
|
Alonreznik/dynamodb-json
|
example.py
|
example.py
|
import time
import uuid
from datetime import datetime
from decimal import Decimal
from json_util import dumps, loads
json_ = {"MyString": "a",
"num": 4,
"MyBool": False,
"my_dict": {"my_date": datetime.utcnow()},
"MyNone": None,
"MyZero": 0,
"myDecimal": Decimal("19.2"), # converts Decimal to float, load it as float
"myLong": long(1938475658493),
"MyNestedDict": {
"my_other_nested": {
"name": "John",
"surname": "Lennon",
"MyOtherNone": None,
"floaty": float(29.4),
"myList": [1, 3, 4, 5, 6, "This Is Sparta!"],
"mySet": {1, 3, 4, 5, 6}, # converts set to list, returns as list
"myUUID": uuid.uuid4(), # converts uuid to string, loads it as string
"time": time.time() # converts it to seconds python float, loads it as float
}
}
}
dynamodb_json = dumps(json_)
print dynamodb_json
print loads(dynamodb_json)
|
mpl-2.0
|
Python
|
|
405d33789a70a04ebdcca491ca2a749e9e48ddfd
|
Add example
|
ABorgna/BluePoV-PC,ABorgna/BluePoV-PC,ABorgna/BluePoV-PC
|
example.py
|
example.py
|
import pygame
import bluePoV
# Varia el color regularmente
x,y = (480,64)
pendiente = 4
# Pygame inits & variables
pygame.init()
pygame.display.set_mode((x,y))
disp = pygame.display.get_surface()
clock = pygame.time.Clock()
# BluePoV init & variables
print ("Port? (default /dev/ttyUSB0)")
port = input()
if not port:
port = "/dev/ttyUSB0"
sckt = bluePoV.SerialSocket()
sckt.connect(port,115200)
driver = bluePoV.Driver(sckt,[x,y],depth=1)
# Colores
r = 0
g = 255
b = 0
# # Pendientes
pR = pendiente
pG = 0
pB = 0
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
quit()
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_q:
quit()
r += pR
g += pG
b += pB
if 255 < r or r < 0 or 255 < g or g < 0 or 255 < b or b < 0:
r = 255 if r >= 255 else 0
g = 255 if g >= 255 else 0
b = 255 if b >= 255 else 0
pTemp = pB
pB = -pG
pG = -pR
pR = -pTemp
disp.fill([r,g,b])
driver.blit(disp)
pygame.display.flip()
clock.tick(10)
|
mit
|
Python
|
|
3a200bbc447ee05c650bbd592a331b2817c9a498
|
Update create_address_doc_from_address_field_in_company.py
|
indictranstech/erpnext,indictranstech/erpnext,geekroot/erpnext,indictranstech/erpnext,indictranstech/erpnext,Aptitudetech/ERPNext,geekroot/erpnext,gsnbng/erpnext,gsnbng/erpnext,geekroot/erpnext,gsnbng/erpnext,geekroot/erpnext,gsnbng/erpnext
|
erpnext/patches/v8_0/create_address_doc_from_address_field_in_company.py
|
erpnext/patches/v8_0/create_address_doc_from_address_field_in_company.py
|
# Copyright (c) 2017, Frappe and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
# new field address_html is created in place of address field for the company's address in PR #8754 (without patch)
# so here is the patch for moving the address details in the address doc
company_list = []
if 'address' in frappe.db.get_table_columns('Company'):
company_list = frappe.db.sql('''select name, address from `tabCompany`
where address is not null and address != ""''', as_dict=1)
for company in company_list:
add_list = company.address.split(" ")
if ',' in company.address:
add_list = company.address.rpartition(',')
elif ' ' in company.address:
add_list = company.address.rpartition(' ')
else:
add_list = [company.address, None, company.address]
doc = frappe.get_doc({
"doctype":"Address",
"address_line1": add_list[0],
"city": add_list[2],
"links": [{
"link_doctype": "Company",
"link_name": company.name
}]
})
doc.save()
|
# Copyright (c) 2017, Frappe and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
# new field address_html is created in place of address field for the company's address in PR #8754 (without patch)
# so here is the patch for moving the address details in the address doc
company_list = []
if 'address' in frappe.db.get_table_columns('Company'):
company_list = frappe.db.sql('''select name, address from `tabCompany` where address is not null''', as_dict=1)
for company in company_list:
add_list = company.address.split(" ")
if ',' in company.address:
add_list = company.address.rpartition(',')
elif ' ' in company.address:
add_list = company.address.rpartition(' ')
else:
add_list = [company.address, None, company.address]
doc = frappe.get_doc({
"doctype":"Address",
"address_line1": add_list[0],
"city": add_list[2],
"links": [{
"link_doctype": "Company",
"link_name": company.name
}]
})
doc.save()
|
agpl-3.0
|
Python
|
a6c8176e3f4602e846888293093fc64b7b20233b
|
Add cmd to force send of cancelled repeat records
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
corehq/motech/repeaters/management/commands/send_cancelled_records.py
|
corehq/motech/repeaters/management/commands/send_cancelled_records.py
|
import csv
import datetime
import re
import time
from django.core.management.base import BaseCommand
from corehq.motech.repeaters.const import RECORD_CANCELLED_STATE
from corehq.motech.repeaters.dbaccessors import iter_repeat_records_by_domain
class Command(BaseCommand):
help = """
Send cancelled repeat records. You may optionally specify a regex to
filter records using --include or --exclude, an a sleep time with --sleep
"""
def add_arguments(self, parser):
parser.add_argument('domain')
parser.add_argument('repeater_id')
parser.add_argument(
'--include',
dest='include_regex',
help=("Regex that will be applied to a record's 'failure_reason' to "
"determine whether to include it."),
)
parser.add_argument(
'--exclude',
dest='exclude_regex',
help=("Regex that will be applied to a record's 'failure_reason' to "
"determine whether to exclude it."),
)
parser.add_argument(
'--sleep',
dest='sleep_time',
help="Time in seconds to sleep between each request.",
)
def handle(self, domain, repeater_id, *args, **options):
sleep_time = options.get('sleep_time')
include_regex = options.get('include_regex')
exclude_regex = options.get('exclude_regex')
if include_regex and exclude_regex:
print "You may not specify both include and exclude"
def meets_filter(record):
if include_regex:
if not record.failure_reason:
return False
return bool(re.search(include_regex, record.failure_reason))
elif exclude_regex:
if not record.failure_reason:
return True
return not bool(re.search(exclude_regex, record.failure_reason))
return True # No filter applied
records = filter(
meets_filter,
iter_repeat_records_by_domain(domain, repeater_id=repeater_id, state=RECORD_CANCELLED_STATE)
)
total_records = len(records)
print "Found {} matching records. Requeue them?".format(total_records)
if not raw_input("(y/n)") == 'y':
print "Aborting"
return
log = [('record_id', 'payload_id', 'state', 'failure_reason')]
for i, record in enumerate(records):
try:
record.fire(force_send=True)
except Exception as e:
print "{}/{}: {} {}".format(i, total_records, 'EXCEPTION', repr(e))
log.append((record._id, record.payload_id, record.state, repr(e)))
else:
print "{}/{}: {}".format(i, total_records, record.state)
log.append((record._id, record.payload_id, record.state, record.failure_reason))
if sleep_time:
time.sleep(float(sleep_time))
filename = "sent_repeat_records-{}.csv".format(datetime.datetime.utcnow().isoformat())
print "Writing log of changes to {}".format(filename)
with open(filename, 'w') as f:
writer = csv.writer(f)
writer.writerows(log)
|
bsd-3-clause
|
Python
|
|
bf66372b2b5b49ba4a93d8ac4f573ceb7857f5b8
|
Fix attach in case of multiple threads.
|
kool79/intellij-community,da1z/intellij-community,retomerz/intellij-community,blademainer/intellij-community,SerCeMan/intellij-community,retomerz/intellij-community,ol-loginov/intellij-community,holmes/intellij-community,kool79/intellij-community,lucafavatella/intellij-community,xfournet/intellij-community,adedayo/intellij-community,alphafoobar/intellij-community,fitermay/intellij-community,petteyg/intellij-community,adedayo/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,youdonghai/intellij-community,caot/intellij-community,asedunov/intellij-community,fnouama/intellij-community,youdonghai/intellij-community,TangHao1987/intellij-community,asedunov/intellij-community,clumsy/intellij-community,FHannes/intellij-community,da1z/intellij-community,samthor/intellij-community,pwoodworth/intellij-community,TangHao1987/intellij-community,idea4bsd/idea4bsd,retomerz/intellij-community,xfournet/intellij-community,clumsy/intellij-community,pwoodworth/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,mglukhikh/intellij-community,ol-loginov/intellij-community,da1z/intellij-community,wreckJ/intellij-community,caot/intellij-community,allotria/intellij-community,clumsy/intellij-community,vladmm/intellij-community,adedayo/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,orekyuu/intellij-community,kdwink/intellij-community,hurricup/intellij-community,tmpgit/intellij-community,youdonghai/intellij-community,blademainer/intellij-community,MER-GROUP/intellij-community,tmpgit/intellij-community,petteyg/intellij-community,fitermay/intellij-community,diorcety/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,apixandru/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,nicolargo/intellij-community,suncycheng/intellij-community,signed/intellij-community,supersven/intellij-community,blademainer/intellij-community,FHannes/intellij-community,amith01994/intellij-community,gnuhub/intellij-community,lucafavatella/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,SerCeMan/intellij-community,blademainer/intellij-community,wreckJ/intellij-community,Lekanich/intellij-community,muntasirsyed/intellij-community,ivan-fedorov/intellij-community,xfournet/intellij-community,TangHao1987/intellij-community,ivan-fedorov/intellij-community,kool79/intellij-community,lucafavatella/intellij-community,clumsy/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,TangHao1987/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,ryano144/intellij-community,SerCeMan/intellij-community,dslomov/intellij-community,FHannes/intellij-community,Lekanich/intellij-community,tmpgit/intellij-community,alphafoobar/intellij-community,retomerz/intellij-community,petteyg/intellij-community,da1z/intellij-community,gnuhub/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,akosyakov/intellij-community,blademainer/intellij-community,vladmm/intellij-community,MichaelNedzelsky/intellij-community,allotria/intellij-community,supersven/intellij-community,amith01994/intellij-community,gnuhub/intellij-community,Lekanich/intellij-community,diorcety/intellij-community,dslomov/intellij-community,fnouama/intellij-community,fitermay/intellij-community,supersven/intellij-community,kool79/intellij-community,idea4bsd/idea4bsd,samthor/intellij-community,holmes/intellij-community,MichaelNedzelsky/intellij-community,signed/intellij-community,youdonghai/intellij-community,salguarnieri/intellij-community,da1z/intellij-community,robovm/robovm-studio,fengbaicanhe/intellij-community,tmpgit/intellij-community,Lekanich/intellij-community,holmes/intellij-community,diorcety/intellij-community,adedayo/intellij-community,alphafoobar/intellij-community,caot/intellij-community,semonte/intellij-community,hurricup/intellij-community,amith01994/intellij-community,clumsy/intellij-community,supersven/intellij-community,mglukhikh/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,alphafoobar/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,samthor/intellij-community,gnuhub/intellij-community,adedayo/intellij-community,amith01994/intellij-community,ftomassetti/intellij-community,nicolargo/intellij-community,robovm/robovm-studio,tmpgit/intellij-community,samthor/intellij-community,xfournet/intellij-community,ol-loginov/intellij-community,michaelgallacher/intellij-community,suncycheng/intellij-community,robovm/robovm-studio,Lekanich/intellij-community,dslomov/intellij-community,SerCeMan/intellij-community,lucafavatella/intellij-community,akosyakov/intellij-community,da1z/intellij-community,michaelgallacher/intellij-community,hurricup/intellij-community,MER-GROUP/intellij-community,ol-loginov/intellij-community,izonder/intellij-community,nicolargo/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,holmes/intellij-community,robovm/robovm-studio,pwoodworth/intellij-community,asedunov/intellij-community,orekyuu/intellij-community,ol-loginov/intellij-community,kool79/intellij-community,fnouama/intellij-community,holmes/intellij-community,muntasirsyed/intellij-community,semonte/intellij-community,xfournet/intellij-community,retomerz/intellij-community,ibinti/intellij-community,samthor/intellij-community,izonder/intellij-community,xfournet/intellij-community,wreckJ/intellij-community,caot/intellij-community,muntasirsyed/intellij-community,Distrotech/intellij-community,ahb0327/intellij-community,wreckJ/intellij-community,slisson/intellij-community,FHannes/intellij-community,ryano144/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,ryano144/intellij-community,TangHao1987/intellij-community,fengbaicanhe/intellij-community,fitermay/intellij-community,fnouama/intellij-community,Distrotech/intellij-community,supersven/intellij-community,ivan-fedorov/intellij-community,ahb0327/intellij-community,dslomov/intellij-community,ftomassetti/intellij-community,fitermay/intellij-community,samthor/intellij-community,semonte/intellij-community,allotria/intellij-community,muntasirsyed/intellij-community,ryano144/intellij-community,samthor/intellij-community,ryano144/intellij-community,MER-GROUP/intellij-community,vvv1559/intellij-community,pwoodworth/intellij-community,Lekanich/intellij-community,supersven/intellij-community,MER-GROUP/intellij-community,da1z/intellij-community,da1z/intellij-community,akosyakov/intellij-community,vvv1559/intellij-community,ivan-fedorov/intellij-community,petteyg/intellij-community,TangHao1987/intellij-community,Lekanich/intellij-community,samthor/intellij-community,wreckJ/intellij-community,signed/intellij-community,akosyakov/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,izonder/intellij-community,michaelgallacher/intellij-community,akosyakov/intellij-community,izonder/intellij-community,MichaelNedzelsky/intellij-community,Distrotech/intellij-community,ftomassetti/intellij-community,muntasirsyed/intellij-community,FHannes/intellij-community,ftomassetti/intellij-community,MER-GROUP/intellij-community,samthor/intellij-community,Distrotech/intellij-community,adedayo/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,allotria/intellij-community,nicolargo/intellij-community,vvv1559/intellij-community,retomerz/intellij-community,orekyuu/intellij-community,michaelgallacher/intellij-community,tmpgit/intellij-community,amith01994/intellij-community,wreckJ/intellij-community,diorcety/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,Lekanich/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,petteyg/intellij-community,fnouama/intellij-community,ThiagoGarciaAlves/intellij-community,amith01994/intellij-community,nicolargo/intellij-community,dslomov/intellij-community,ol-loginov/intellij-community,SerCeMan/intellij-community,hurricup/intellij-community,suncycheng/intellij-community,allotria/intellij-community,slisson/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,ivan-fedorov/intellij-community,adedayo/intellij-community,pwoodworth/intellij-community,asedunov/intellij-community,ivan-fedorov/intellij-community,kool79/intellij-community,FHannes/intellij-community,MichaelNedzelsky/intellij-community,fitermay/intellij-community,ryano144/intellij-community,retomerz/intellij-community,amith01994/intellij-community,suncycheng/intellij-community,akosyakov/intellij-community,tmpgit/intellij-community,robovm/robovm-studio,blademainer/intellij-community,jagguli/intellij-community,signed/intellij-community,MichaelNedzelsky/intellij-community,apixandru/intellij-community,allotria/intellij-community,clumsy/intellij-community,blademainer/intellij-community,akosyakov/intellij-community,youdonghai/intellij-community,jagguli/intellij-community,ftomassetti/intellij-community,petteyg/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,ivan-fedorov/intellij-community,robovm/robovm-studio,FHannes/intellij-community,wreckJ/intellij-community,vladmm/intellij-community,mglukhikh/intellij-community,TangHao1987/intellij-community,hurricup/intellij-community,alphafoobar/intellij-community,diorcety/intellij-community,dslomov/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,nicolargo/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,holmes/intellij-community,salguarnieri/intellij-community,MichaelNedzelsky/intellij-community,youdonghai/intellij-community,MER-GROUP/intellij-community,FHannes/intellij-community,MER-GROUP/intellij-community,ryano144/intellij-community,SerCeMan/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,pwoodworth/intellij-community,michaelgallacher/intellij-community,izonder/intellij-community,petteyg/intellij-community,ahb0327/intellij-community,jagguli/intellij-community,da1z/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,diorcety/intellij-community,kool79/intellij-community,wreckJ/intellij-community,dslomov/intellij-community,dslomov/intellij-community,Distrotech/intellij-community,Distrotech/intellij-community,vladmm/intellij-community,michaelgallacher/intellij-community,slisson/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,TangHao1987/intellij-community,pwoodworth/intellij-community,idea4bsd/idea4bsd,clumsy/intellij-community,retomerz/intellij-community,xfournet/intellij-community,alphafoobar/intellij-community,robovm/robovm-studio,vladmm/intellij-community,ftomassetti/intellij-community,slisson/intellij-community,ivan-fedorov/intellij-community,blademainer/intellij-community,SerCeMan/intellij-community,ahb0327/intellij-community,jagguli/intellij-community,robovm/robovm-studio,MER-GROUP/intellij-community,orekyuu/intellij-community,ryano144/intellij-community,suncycheng/intellij-community,TangHao1987/intellij-community,retomerz/intellij-community,ryano144/intellij-community,blademainer/intellij-community,ibinti/intellij-community,Distrotech/intellij-community,clumsy/intellij-community,vladmm/intellij-community,gnuhub/intellij-community,asedunov/intellij-community,fnouama/intellij-community,ibinti/intellij-community,ibinti/intellij-community,fengbaicanhe/intellij-community,TangHao1987/intellij-community,fengbaicanhe/intellij-community,amith01994/intellij-community,kdwink/intellij-community,ol-loginov/intellij-community,samthor/intellij-community,pwoodworth/intellij-community,retomerz/intellij-community,ThiagoGarciaAlves/intellij-community,fitermay/intellij-community,amith01994/intellij-community,amith01994/intellij-community,da1z/intellij-community,fengbaicanhe/intellij-community,slisson/intellij-community,kool79/intellij-community,jagguli/intellij-community,supersven/intellij-community,vvv1559/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,signed/intellij-community,salguarnieri/intellij-community,ahb0327/intellij-community,fnouama/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,Lekanich/intellij-community,apixandru/intellij-community,kdwink/intellij-community,suncycheng/intellij-community,gnuhub/intellij-community,fnouama/intellij-community,ol-loginov/intellij-community,signed/intellij-community,diorcety/intellij-community,Lekanich/intellij-community,ahb0327/intellij-community,akosyakov/intellij-community,asedunov/intellij-community,fnouama/intellij-community,lucafavatella/intellij-community,semonte/intellij-community,gnuhub/intellij-community,izonder/intellij-community,SerCeMan/intellij-community,tmpgit/intellij-community,kdwink/intellij-community,orekyuu/intellij-community,slisson/intellij-community,ftomassetti/intellij-community,salguarnieri/intellij-community,pwoodworth/intellij-community,fengbaicanhe/intellij-community,ibinti/intellij-community,diorcety/intellij-community,youdonghai/intellij-community,hurricup/intellij-community,samthor/intellij-community,hurricup/intellij-community,apixandru/intellij-community,kdwink/intellij-community,FHannes/intellij-community,akosyakov/intellij-community,SerCeMan/intellij-community,kdwink/intellij-community,asedunov/intellij-community,holmes/intellij-community,kdwink/intellij-community,Distrotech/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,MichaelNedzelsky/intellij-community,robovm/robovm-studio,youdonghai/intellij-community,ol-loginov/intellij-community,semonte/intellij-community,diorcety/intellij-community,nicolargo/intellij-community,idea4bsd/idea4bsd,michaelgallacher/intellij-community,jagguli/intellij-community,orekyuu/intellij-community,slisson/intellij-community,ahb0327/intellij-community,alphafoobar/intellij-community,blademainer/intellij-community,orekyuu/intellij-community,Lekanich/intellij-community,orekyuu/intellij-community,retomerz/intellij-community,mglukhikh/intellij-community,jagguli/intellij-community,signed/intellij-community,pwoodworth/intellij-community,semonte/intellij-community,salguarnieri/intellij-community,adedayo/intellij-community,hurricup/intellij-community,kdwink/intellij-community,youdonghai/intellij-community,holmes/intellij-community,allotria/intellij-community,fnouama/intellij-community,MichaelNedzelsky/intellij-community,vvv1559/intellij-community,supersven/intellij-community,xfournet/intellij-community,ibinti/intellij-community,vladmm/intellij-community,idea4bsd/idea4bsd,robovm/robovm-studio,idea4bsd/idea4bsd,MER-GROUP/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,supersven/intellij-community,kool79/intellij-community,pwoodworth/intellij-community,ahb0327/intellij-community,slisson/intellij-community,slisson/intellij-community,MichaelNedzelsky/intellij-community,ahb0327/intellij-community,semonte/intellij-community,caot/intellij-community,apixandru/intellij-community,signed/intellij-community,Distrotech/intellij-community,salguarnieri/intellij-community,FHannes/intellij-community,nicolargo/intellij-community,vladmm/intellij-community,vladmm/intellij-community,fengbaicanhe/intellij-community,vladmm/intellij-community,retomerz/intellij-community,fitermay/intellij-community,muntasirsyed/intellij-community,xfournet/intellij-community,petteyg/intellij-community,dslomov/intellij-community,hurricup/intellij-community,fengbaicanhe/intellij-community,vladmm/intellij-community,kool79/intellij-community,suncycheng/intellij-community,fengbaicanhe/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,nicolargo/intellij-community,robovm/robovm-studio,amith01994/intellij-community,caot/intellij-community,fitermay/intellij-community,tmpgit/intellij-community,FHannes/intellij-community,akosyakov/intellij-community,blademainer/intellij-community,signed/intellij-community,muntasirsyed/intellij-community,xfournet/intellij-community,semonte/intellij-community,apixandru/intellij-community,asedunov/intellij-community,ol-loginov/intellij-community,signed/intellij-community,hurricup/intellij-community,ftomassetti/intellij-community,muntasirsyed/intellij-community,salguarnieri/intellij-community,jagguli/intellij-community,blademainer/intellij-community,supersven/intellij-community,izonder/intellij-community,SerCeMan/intellij-community,Lekanich/intellij-community,petteyg/intellij-community,ryano144/intellij-community,alphafoobar/intellij-community,caot/intellij-community,orekyuu/intellij-community,MichaelNedzelsky/intellij-community,Distrotech/intellij-community,salguarnieri/intellij-community,jagguli/intellij-community,ivan-fedorov/intellij-community,ivan-fedorov/intellij-community,jagguli/intellij-community,MichaelNedzelsky/intellij-community,muntasirsyed/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,diorcety/intellij-community,lucafavatella/intellij-community,ahb0327/intellij-community,fnouama/intellij-community,michaelgallacher/intellij-community,alphafoobar/intellij-community,adedayo/intellij-community,diorcety/intellij-community,petteyg/intellij-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,dslomov/intellij-community,akosyakov/intellij-community,ftomassetti/intellij-community,wreckJ/intellij-community,gnuhub/intellij-community,orekyuu/intellij-community,gnuhub/intellij-community,dslomov/intellij-community,tmpgit/intellij-community,pwoodworth/intellij-community,idea4bsd/idea4bsd,nicolargo/intellij-community,clumsy/intellij-community,ThiagoGarciaAlves/intellij-community,ftomassetti/intellij-community,supersven/intellij-community,izonder/intellij-community,adedayo/intellij-community,Distrotech/intellij-community,wreckJ/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,izonder/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,izonder/intellij-community,allotria/intellij-community,fitermay/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,ol-loginov/intellij-community,kdwink/intellij-community,holmes/intellij-community,lucafavatella/intellij-community,caot/intellij-community,holmes/intellij-community,izonder/intellij-community,nicolargo/intellij-community,semonte/intellij-community,dslomov/intellij-community,ivan-fedorov/intellij-community,ol-loginov/intellij-community,wreckJ/intellij-community,supersven/intellij-community,alphafoobar/intellij-community,caot/intellij-community,semonte/intellij-community,muntasirsyed/intellij-community,gnuhub/intellij-community,tmpgit/intellij-community,adedayo/intellij-community,SerCeMan/intellij-community,ftomassetti/intellij-community,hurricup/intellij-community,caot/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,orekyuu/intellij-community,ibinti/intellij-community,slisson/intellij-community,lucafavatella/intellij-community,TangHao1987/intellij-community,MichaelNedzelsky/intellij-community,MER-GROUP/intellij-community,jagguli/intellij-community,slisson/intellij-community,akosyakov/intellij-community,wreckJ/intellij-community,fitermay/intellij-community,ryano144/intellij-community,jagguli/intellij-community,diorcety/intellij-community,gnuhub/intellij-community,muntasirsyed/intellij-community,apixandru/intellij-community,ibinti/intellij-community,fitermay/intellij-community,amith01994/intellij-community,retomerz/intellij-community,allotria/intellij-community,asedunov/intellij-community,holmes/intellij-community,vladmm/intellij-community,muntasirsyed/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,nicolargo/intellij-community,lucafavatella/intellij-community,kool79/intellij-community,salguarnieri/intellij-community,apixandru/intellij-community,kdwink/intellij-community,fnouama/intellij-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,kdwink/intellij-community,ibinti/intellij-community,signed/intellij-community,orekyuu/intellij-community,holmes/intellij-community,MER-GROUP/intellij-community,ftomassetti/intellij-community,clumsy/intellij-community,asedunov/intellij-community,clumsy/intellij-community,ryano144/intellij-community,ahb0327/intellij-community,tmpgit/intellij-community,ivan-fedorov/intellij-community,samthor/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,ibinti/intellij-community,da1z/intellij-community,Distrotech/intellij-community,petteyg/intellij-community,TangHao1987/intellij-community,youdonghai/intellij-community,slisson/intellij-community,ahb0327/intellij-community,suncycheng/intellij-community,gnuhub/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,salguarnieri/intellij-community,vvv1559/intellij-community,fengbaicanhe/intellij-community,fengbaicanhe/intellij-community,robovm/robovm-studio,fengbaicanhe/intellij-community,clumsy/intellij-community,ibinti/intellij-community,caot/intellij-community,kdwink/intellij-community,adedayo/intellij-community,caot/intellij-community,petteyg/intellij-community,izonder/intellij-community,SerCeMan/intellij-community
|
python/helpers/pydev/pydevd_attach_to_process/linux/lldb_threads_settrace.py
|
python/helpers/pydev/pydevd_attach_to_process/linux/lldb_threads_settrace.py
|
# This file is meant to be run inside lldb as a command after
# the attach_linux.dylib dll has already been loaded to settrace for all threads.
def __lldb_init_module(debugger, internal_dict):
# Command Initialization code goes here
print('Startup LLDB in Python!')
import lldb
try:
show_debug_info = 1
is_debug = 0
target = debugger.GetSelectedTarget()
if target:
process = target.GetProcess()
if process:
for thread in process:
# Get the first frame
print('Thread %s, suspended %s\n'%(thread, thread.IsStopped()))
process.SetSelectedThread(thread)
if not thread.IsStopped():
error = process.Stop()
print(error)
if thread:
frame = thread.GetSelectedFrame()
if frame:
print('Will settrace in: %s' % (frame,))
res = frame.EvaluateExpression("(int) SetSysTraceFunc(%s, %s)" % (
show_debug_info, is_debug), lldb.eDynamicCanRunTarget)
error = res.GetError()
if error:
print(error)
thread.Resume()
except:
import traceback;traceback.print_exc()
|
# This file is meant to be run inside lldb as a command after
# the attach_linux.dylib dll has already been loaded to settrace for all threads.
def __lldb_init_module(debugger, internal_dict):
# Command Initialization code goes here
print('Startup LLDB in Python!')
try:
show_debug_info = 0
is_debug = 0
target = debugger.GetSelectedTarget()
if target:
process = target.GetProcess()
if process:
for t in process:
# Get the first frame
frame = t.GetFrameAtIndex (t.GetNumFrames()-1)
if frame:
print('Will settrace in: %s' % (frame,))
frame.EvaluateExpression("expr (int) SetSysTraceFunc(%s, %s);" % (
show_debug_info, is_debug))
except:
import traceback;traceback.print_exc()
|
apache-2.0
|
Python
|
1f6595acc01c6dfda899886388b4309f3d8c855b
|
add index to fixed_ips
|
phenoxim/nova,cloudbau/nova,leilihh/nova,yrobla/nova,hanlind/nova,zzicewind/nova,maoy/zknova,rajalokan/nova,shail2810/nova,redhat-openstack/nova,sridevikoushik31/openstack,takeshineshiro/nova,Tehsmash/nova,blueboxgroup/nova,bclau/nova,berrange/nova,plumgrid/plumgrid-nova,badock/nova,SUSE-Cloud/nova,jeffrey4l/nova,bigswitch/nova,varunarya10/nova_test_latest,aristanetworks/arista-ovs-nova,saleemjaveds/https-github.com-openstack-nova,double12gzh/nova,qwefi/nova,OpenAcademy-OpenStack/nova-scheduler,shahar-stratoscale/nova,TwinkleChawla/nova,zzicewind/nova,ewindisch/nova,dawnpower/nova,bgxavier/nova,angdraug/nova,apporc/nova,gspilio/nova,petrutlucian94/nova,projectcalico/calico-nova,BeyondTheClouds/nova,watonyweng/nova,yosshy/nova,sebrandon1/nova,sacharya/nova,berrange/nova,viggates/nova,Brocade-OpenSource/OpenStack-DNRM-Nova,sridevikoushik31/nova,isyippee/nova,Juniper/nova,zhimin711/nova,jianghuaw/nova,tanglei528/nova,tianweizhang/nova,Stavitsky/nova,fajoy/nova,aristanetworks/arista-ovs-nova,rajalokan/nova,gspilio/nova,mandeepdhami/nova,shahar-stratoscale/nova,vladikr/nova_drafts,petrutlucian94/nova_dev,houshengbo/nova_vmware_compute_driver,ruslanloman/nova,MountainWei/nova,zaina/nova,mandeepdhami/nova,Metaswitch/calico-nova,rickerc/nova_audit,Triv90/Nova,belmiromoreira/nova,devendermishrajio/nova_test_latest,Tehsmash/nova,CEG-FYP-OpenStack/scheduler,akash1808/nova,tudorvio/nova,adelina-t/nova,Yusuke1987/openstack_template,mmnelemane/nova,Yuriy-Leonov/nova,cernops/nova,fajoy/nova,tudorvio/nova,dims/nova,houshengbo/nova_vmware_compute_driver,sridevikoushik31/nova,jianghuaw/nova,imsplitbit/nova,blueboxgroup/nova,watonyweng/nova,mikalstill/nova,tealover/nova,edulramirez/nova,affo/nova,Triv90/Nova,dstroppa/openstack-smartos-nova-grizzly,tangfeixiong/nova,shootstar/novatest,cloudbase/nova-virtualbox,noironetworks/nova,maheshp/novatest,alaski/nova,BeyondTheClouds/nova,orbitfp7/nova,maheshp/novatest,devendermishrajio/nova,eonpatapon/nova,eayunstack/nova,scripnichenko/nova,openstack/nova,luogangyi/bcec-nova,mgagne/nova,cernops/nova,sridevikoushik31/openstack,dims/nova,dstroppa/openstack-smartos-nova-grizzly,whitepages/nova,Francis-Liu/animated-broccoli,LoHChina/nova,tangfeixiong/nova,DirectXMan12/nova-hacking,luogangyi/bcec-nova,rajalokan/nova,gooddata/openstack-nova,NeCTAR-RC/nova,jianghuaw/nova,iuliat/nova,ted-gould/nova,cyx1231st/nova,JioCloud/nova_test_latest,alvarolopez/nova,vmturbo/nova,yrobla/nova,raildo/nova,mmnelemane/nova,barnsnake351/nova,spring-week-topos/nova-week,jeffrey4l/nova,Triv90/Nova,devoid/nova,Metaswitch/calico-nova,gooddata/openstack-nova,mahak/nova,rahulunair/nova,yrobla/nova,varunarya10/nova_test_latest,CiscoSystems/nova,barnsnake351/nova,citrix-openstack-build/nova,citrix-openstack-build/nova,sebrandon1/nova,gooddata/openstack-nova,hanlind/nova,bclau/nova,vmturbo/nova,cyx1231st/nova,kimjaejoong/nova,eayunstack/nova,alexandrucoman/vbox-nova-driver,gooddata/openstack-nova,eonpatapon/nova,edulramirez/nova,nikesh-mahalka/nova,klmitch/nova,tealover/nova,belmiromoreira/nova,mahak/nova,alaski/nova,ntt-sic/nova,silenceli/nova,cloudbase/nova,mikalstill/nova,alvarolopez/nova,eharney/nova,virtualopensystems/nova,jianghuaw/nova,adelina-t/nova,leilihh/novaha,spring-week-topos/nova-week,orbitfp7/nova,fnordahl/nova,projectcalico/calico-nova,plumgrid/plumgrid-nova,rahulunair/nova,NeCTAR-RC/nova,houshengbo/nova_vmware_compute_driver,sridevikoushik31/openstack,DirectXMan12/nova-hacking,felixma/nova,redhat-openstack/nova,aristanetworks/arista-ovs-nova,j-carpentier/nova,maoy/zknova,DirectXMan12/nova-hacking,silenceli/nova,cloudbase/nova-virtualbox,sacharya/nova,devoid/nova,cloudbau/nova,leilihh/nova,devendermishrajio/nova,mikalstill/nova,Juniper/nova,CloudServer/nova,bigswitch/nova,zaina/nova,isyippee/nova,MountainWei/nova,noironetworks/nova,rrader/nova-docker-plugin,sridevikoushik31/nova,fnordahl/nova,affo/nova,iuliat/nova,thomasem/nova,LoHChina/nova,CiscoSystems/nova,Juniper/nova,joker946/nova,yatinkumbhare/openstack-nova,rahulunair/nova,maheshp/novatest,felixma/nova,gspilio/nova,zhimin711/nova,virtualopensystems/nova,JianyuWang/nova,Juniper/nova,klmitch/nova,sridevikoushik31/nova,rajalokan/nova,JianyuWang/nova,yosshy/nova,raildo/nova,TieWei/nova,leilihh/novaha,vmturbo/nova,yatinkumbhare/openstack-nova,scripnichenko/nova,rrader/nova-docker-plugin,TwinkleChawla/nova,openstack/nova,SUSE-Cloud/nova,cloudbase/nova,Brocade-OpenSource/OpenStack-DNRM-Nova,hanlind/nova,JioCloud/nova_test_latest,shail2810/nova,joker946/nova,vladikr/nova_drafts,OpenAcademy-OpenStack/nova-scheduler,double12gzh/nova,mahak/nova,Yuriy-Leonov/nova,qwefi/nova,dawnpower/nova,ted-gould/nova,CCI-MOC/nova,TieWei/nova,BeyondTheClouds/nova,phenoxim/nova,viggates/nova,whitepages/nova,Yusuke1987/openstack_template,CEG-FYP-OpenStack/scheduler,Francis-Liu/animated-broccoli,apporc/nova,ntt-sic/nova,CloudServer/nova,maelnor/nova,nikesh-mahalka/nova,JioCloud/nova,tianweizhang/nova,takeshineshiro/nova,maoy/zknova,petrutlucian94/nova,openstack/nova,eharney/nova,sebrandon1/nova,fajoy/nova,vmturbo/nova,tanglei528/nova,ewindisch/nova,rickerc/nova_audit,alexandrucoman/vbox-nova-driver,badock/nova,maelnor/nova,Stavitsky/nova,angdraug/nova,bgxavier/nova,akash1808/nova_test_latest,kimjaejoong/nova,mgagne/nova,klmitch/nova,ruslanloman/nova,saleemjaveds/https-github.com-openstack-nova,petrutlucian94/nova_dev,imsplitbit/nova,CCI-MOC/nova,cernops/nova,dstroppa/openstack-smartos-nova-grizzly,klmitch/nova,akash1808/nova,thomasem/nova,shootstar/novatest,akash1808/nova_test_latest,JioCloud/nova,j-carpentier/nova,cloudbase/nova,devendermishrajio/nova_test_latest
|
nova/db/sqlalchemy/migrate_repo/versions/139_add_indexes_to_fixed_ips.py
|
nova/db/sqlalchemy/migrate_repo/versions/139_add_indexes_to_fixed_ips.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Index, MetaData, Table
from sqlalchemy.exc import IntegrityError
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
t = Table('fixed_ips', meta, autoload=True)
# Based on fixed_ip_delete_associate
# from: nova/db/sqlalchemy/api.py
i = Index('fixed_ips_deleted_allocated_idx',
t.c.address, t.c.deleted, t.c.allocated)
try:
i.create(migrate_engine)
except IntegrityError:
pass
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
t = Table('fixed_ips', meta, autoload=True)
i = Index('fixed_ips_deleted_allocated_idx',
t.c.address, t.c.deleted, t.c.allocated)
i.drop(migrate_engine)
|
apache-2.0
|
Python
|
|
5e8ce7c9fb31d76ec5f372bb0b62e7b846304966
|
Create exerc-4.py
|
rafa-impacta/Exercicio
|
exerc-4.py
|
exerc-4.py
|
arquivo = open("arquivo.txt","w")
for i in range(1):
arquivo.write("Atividade 4 ")
arquivo.close()
arquivo = open("arquivo.txt","r")
for linha in arquivo:
print("Texto: ",linha)
arquivo.close()
arquivo = open("arquivo.txt", "r")
copia = open("copia.txt", "w")
while 1:
texto = arquivo.read(50)
if texto == "":
break
copia.write(texto)
arquivo.close()
copia.close()
copia = open("copia.txt","r")
for linha in copia:
print("Texto copiado: ",linha)
arquivo.close()
|
apache-2.0
|
Python
|
|
a18ef3eb9128ee27d4f14e7952ba8545b510c4ac
|
add e2e test file
|
sup/fill,sup/fill,sup/fill
|
e2e_test.py
|
e2e_test.py
|
# Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use
# this file except in compliance with the License. You may obtain a copy of the
# License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable
# law or agreed to in writing, software distributed under the License is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and
# limitations under the License.
import urllib2
import logging
HOST='http://fill-app.appspot.com'
# [START e2e]
response = urllib2.urlopen("{}/get_author/ulysses".format(HOST))
html = response.read()
assert(html == "James Joyce")
# [END e2e]
|
mit
|
Python
|
|
5695a468f8619ea6bb7c9a01857a375f827de6a1
|
Add parser for 0.4.6 XML report.
|
owtf/ptp,DoomTaper/ptp
|
libptp/tools/arachni/parser.py
|
libptp/tools/arachni/parser.py
|
from libptp.exceptions import NotSupportedVersionError
from libptp.info import Info
from libptp.parser import AbstractParser
class ArachniXMLParser(AbstractParser):
__tool__ = 'arachni'
__format__ = 'xml'
__version__ = ['0.4.6']
def __init__(self, *args, **kwargs):
AbstractParser.__init__(self, *args, **kwargs)
@classmethod
def is_mine(cls, stream):
"""Check if it is a supported report."""
if not cls.__tool__ in stream.tag:
return False
return True
def parse_metadata(self, stream):
"""Parse the metadatas of the report."""
# Find the version of Arachni.
version = stream.find('.//version')
# Reconstruct the metadata
# TODO: Retrieve the other metadata likes the date, etc.
metadata = {version.tag: version.text,}
if self.check_version(metadata):
return metadata
else:
raise NotSupportedVersionError(
'PTP does NOT support this version of Arachni.')
def parse_report(self, stream, scale):
"""Parse the report."""
res = []
vulns = stream.find('.//issues')
for vuln in vulns.findall('.//issue'):
info = Info(
# Convert the severity of the issue thanks to an unified
# ranking scale.
ranking=scale[vuln.find('.//severity').text],)
res.append(info)
return res
|
bsd-3-clause
|
Python
|
|
ddad199df01cf41dda188d501ddb74e17c38d94f
|
add symbiotic3 tool
|
ultimate-pa/benchexec,IljaZakharov/benchexec,IljaZakharov/benchexec,martin-neuhaeusser/benchexec,IljaZakharov/benchexec,sosy-lab/benchexec,dbeyer/benchexec,dbeyer/benchexec,sosy-lab/benchexec,martin-neuhaeusser/benchexec,sosy-lab/benchexec,martin-neuhaeusser/benchexec,IljaZakharov/benchexec,dbeyer/benchexec,sosy-lab/benchexec,martin-neuhaeusser/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,dbeyer/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec
|
benchexec/tools/symbiotic3.py
|
benchexec/tools/symbiotic3.py
|
"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
import subprocess
from os.path import dirname
from os.path import join as joinpath
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool):
"""
Symbiotic tool wrapper object
"""
def executable(self):
"""
Find the path to the executable file that will get executed.
This method always needs to be overridden,
and most implementations will look similar to this one.
The path returned should be relative to the current directory.
"""
return util.find_executable('symbiotic')
def version(self, executable):
"""
Determine a version string for this tool, if available.
"""
return self._version_from_tool(executable)
def name(self):
"""
Return the name of the tool, formatted for humans.
"""
return 'symbiotic'
def cmdline(self, executable, options, tasks, propertyfile=None, rlimits={}):
"""
Compose the command line to execute from the name of the executable
"""
# only one task is supported
assert len(tasks) == 1
if not propertyfile is None:
options.append('--prp={0}'.format(propertyfile))
return [executable] + options + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
if isTimeout:
return 'timeout'
output = output.strip()
if output is None:
return 'error (no output)'
if output == 'TRUE':
return result.RESULT_TRUE_PROP
elif output == 'UNKNOWN':
return result.RESULT_UNKNOWN
elif output == 'FALSE':
return result.RESULT_FALSE_REACH
if returncode != 0:
return 'Failed with returncode: '\
'{0} (signal: {1})'.format(returncode, returnsignal)
else:
return 'error (unknown)'
def program_files(self, executable):
folder = dirname(executable)
def make_path(f, folder):
return joinpath('.', folder, f)
files = [make_path(executable, ''),
make_path('build-fix.sh',folder),
make_path('path_to_ml.pl', folder),
make_path('bin/klee', folder),
make_path('bin/opt', folder),
make_path('bin/clang', folder),
make_path('bin/llvm-link', folder),
make_path('bin/llvm-slicer', folder),
make_path('lib.c', folder),
make_path('lib/libllvmdg.so', folder),
make_path('lib/LLVMsvc15.so', folder),
make_path('lib/klee/runtime/kleeRuntimeIntrinsic.bc', folder),
make_path('lib32/klee/runtime/kleeRuntimeIntrinsic.bc', folder)]
return files
|
apache-2.0
|
Python
|
|
8e9c2a3c31184e789bf2788f5fa0ab06e0db988f
|
Add utilities module
|
inodb/sufam
|
sufam/utils.py
|
sufam/utils.py
|
import errno
import os
import shutil
import sys
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def rm_rf(path):
if os.path.isdir(path):
shutil.rmtree(path)
elif os.path.exists(path):
os.remove(path)
|
mit
|
Python
|
|
efb6072e097a816bb46fdd83541c763e222816c9
|
Add initial tests for the concordance view
|
CentreForResearchInAppliedLinguistics/clic,CentreForResearchInAppliedLinguistics/clic,CentreForCorpusResearch/clic,CentreForCorpusResearch/clic,CentreForResearchInAppliedLinguistics/clic,CentreForCorpusResearch/clic
|
clic/dickens/test_concordance.py
|
clic/dickens/test_concordance.py
|
import unittest
from concordance_new import Concordancer_New
class TestConcordancerNewChapterIndex(unittest.TestCase):
def test_create_concordance(self):
"""
This is a very naive test to run whilst reviewing the create
concordance code. It's goal is simply to evaluate whether that
function is still up an running.
For that purpose it uses a hard-coded example
"""
concordance = Concordancer_New()
fog = concordance.create_concordance(terms="fog",
idxName="chapter-idx",
Materials=["dickens"],
selectWords="whole")
assert len(fog) == 95 # 94 hits + one variable total_count in the list
class TestConcordancerNewQuoteIndex(unittest.TestCase):
def test_create_concordance(self):
"""
This is another naive test focusing on searching in quotes
It also uses a hard-coded example
"""
concordance = Concordancer_New()
maybe = concordance.create_concordance(terms="maybe",
idxName="quote-idx",
Materials=["dickens"],
selectWords="whole")
assert len(maybe) == 46 # 45 hits + one variable total_count in the list
if __name__ == '__main__':
unittest.main()
|
mit
|
Python
|
|
915f94433171c9ee73fa5cf5941c695590e6ee16
|
add fabric config file
|
ianzhengnan/blog-python3,ianzhengnan/blog-python3,ianzhengnan/blog-python3
|
fabfile.py
|
fabfile.py
|
import os, re
from datetime import datetime
from fabric.api import *
#server user name
env.user = 'zhengnan'
# sudo user
env.sudo_user = 'root'
# server address
env.hosts = ['192.168.56.103']
db_user = 'www-data'
db_password = 'www-data'
_TAR_FILE = 'dist-awesome.tar.gz'
def build():
includes = ['static', 'templates', 'favicon.ico', '*.py']
excludes = ['test', '.*', '*.pyc', '*.pyo']
local('rm -f dist/%s' % _TAR_FILE)
with lcd(os.path.join(os.path.abspath('.'), 'www')):
cmd = ['tar', '--dereference', '-czvf', '../dist/%s' % _TAR_FILE]
cmd.extend(['--exclude=\'%s\'' % ex for ex in excludes])
cmd.extend(includes)
local(' '.join(cmd))
_REMOTE_TMP_TAR = '/tmp/%s' % _TAR_FILE
_REMOTE_BASE_DIR = '/srv/awesome'
def deploy():
newdir = 'www-%s' % datetime.now().strftime('%y-%m-%d_%H.%M.%S')
# remove exist tar file
run('rm -f %s' % _REMOTE_TMP_TAR)
# upload new tar file
put('dist/%s' % _TAR_FILE, _REMOTE_TMP_TAR)
# make new dir
with cd(_REMOTE_BASE_DIR):
sudo('mkdir %s' % newdir)
# unzip tar to new directory
with cd('%s/%s' % _REMOTE_TMP_TAR):
sudo('tar -xzvf %s' % _REMOTE_TMP_TAR)
# re-set the soft link
with cd(_REMOTE_BASE_DIR):
sudo('rm -f www')
sudo('ln -s %s www' % newdir)
sudo('chown www-data:www-data www')
sudo('chown -R www-data:www-data %s' % newdir)
# restart python and nginx service
with settings(warn_only=True):
sudo('supervisorctl stop awesome')
sudo('supervisorctl start awesome')
sudo('/etc/init.d/nginx reload')
|
apache-2.0
|
Python
|
|
7e8d1220ef0032c505d7d66838fc9fc9999da810
|
add some python test code
|
natemara/CSE-278,natemara/CSE-278
|
project_02/tests.py
|
project_02/tests.py
|
from unittest import TestCase, main
import subprocess
import re
NUM_PATTERN = re.compile(r'\w+\s*=\s*(?P<num>[10]+)')
def run(num1, num2, operation):
p = subprocess.Popen(
'./proj2.out',
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
stdin=subprocess.PIPE,
bufsize=0
)
p.stdin.write('{}\n{}\n{}\n'.format(num1, num2, operation))
out, err = p.communicate()
search = NUM_PATTERN.search(out)
return search.group('num')
class TestStuff(TestCase):
def test_two_positive_addd(self):
num1 = '00000000000000000000000001011111'
num2 = '00000000000000000000000000110110'
operation = 'add'
expect = '00000000000000000000000010010101'
actual = run(num1, num2, operation)
self.assertEqual(actual, expect)
def test_two_positive_sub_negative_result(self):
num1 = '00000000000000000000000000110110'
num2 = '00000000000000000000000001011111'
operation = 'sub'
expect = '11111111111111111111111111010111'
actual = run(num1, num2, operation)
self.assertEqual(actual, expect)
def test_two_positive_mul(self):
num1 = '00000000000000000000000000110110'
num2 = '00000000000000000000000001011111'
operation = 'mul'
expect = '00000000000000000001010000001010'
actual = run(num1, num2, operation)
self.assertEqual(actual, expect)
def test_two_positive_div(self):
num1 = '00000000000000000000000100101100'
num2 = '00000000000000000000000000000101'
operation = 'div'
expect = '00000000000000000000000000111100'
actual = run(num1, num2, operation)
self.assertEqual(actual, expect)
def test_two_negative_add(self):
num1 = '11111111111111111111111111111110'
num2 = '11111111111111111111111111111100'
operation = 'add'
expect = '11111111111111111111111111111010'
actual = run(num1, num2, operation)
self.assertEqual(actual, expect)
def test_two_negative_sub(self):
num1 = '11111111111111111111111111111110'
num2 = '11111111111111111111111111111100'
operation = 'sub'
expect = '00000000000000000000000000000010'
actual = run(num1, num2, operation)
self.assertEqual(actual, expect)
def test_two_negative_mul(self):
num1 = '11111111111111111111111111111110'
num2 = '11111111111111111111111111111100'
operation = 'mul'
expect = '00000000000000000000000000001000'
actual = run(num1, num2, operation)
self.assertEqual(actual, expect)
def test_two_negative_div(self):
num1 = '11111111111111111111111111111110'
num2 = '11111111111111111111111111111100'
operation = 'div'
expect = '00000000000000000000000000000001'
actual = run(num1, num2, operation)
self.assertEqual(actual, expect)
if __name__ == '__main__':
main()
|
mit
|
Python
|
|
9f5bc55f7cfc5b6d0ee3ee9d6ad8a5317e1fa62b
|
Move all constants here
|
hackoregon/urbandev-backend,hackoregon/urbandev-backend
|
consts.py
|
consts.py
|
## Constants
# Lat / long bounding box of City of Portland
# (-123.0, 44.0, -122.0, 45.0) ??
PDX_BOUNDING_BOX = (-122.9, 45.35, -122.4, 45.7)
|
mit
|
Python
|
|
7185136cf4322397803eabe805ab4e818197edf8
|
Add wsgi startup script
|
18F/regulations-core,ascott1/regulations-core,EricSchles/regulations-core,adderall/regulations-core,willbarton/regulations-core,cmc333333/regulations-core,grapesmoker/regulations-core,eregs/regulations-core
|
core.wsgi
|
core.wsgi
|
import os
import sys
root = os.path.dirname(__file__)
sys.path.insert(0, root)
from core import app as application
from core import index
from core.handlers.search import *
from core.handlers.regulation import *
index.init_schema()
|
cc0-1.0
|
Python
|
|
a76f2bfea735f6b452785185ebc257d1da179ec4
|
Add tests (#5451)
|
souravbadami/oppia,kevinlee12/oppia,brianrodri/oppia,prasanna08/oppia,kevinlee12/oppia,brianrodri/oppia,oppia/oppia,souravbadami/oppia,brianrodri/oppia,souravbadami/oppia,kevinlee12/oppia,prasanna08/oppia,kevinlee12/oppia,prasanna08/oppia,oppia/oppia,oppia/oppia,brianrodri/oppia,souravbadami/oppia,oppia/oppia,oppia/oppia,brianrodri/oppia,prasanna08/oppia,kevinlee12/oppia,prasanna08/oppia,souravbadami/oppia
|
core/platform/taskqueue/gae_taskqueue_services_test.py
|
core/platform/taskqueue/gae_taskqueue_services_test.py
|
# coding: utf-8
#
# Copyright 2018 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the GAE taskqueue API wrapper."""
import json
import operator
from core.platform.taskqueue import gae_taskqueue_services as taskqueue_services
from core.tests import test_utils
import feconf
from google.appengine.ext import deferred
class TaskQueueTests(test_utils.GenericTestBase):
"""Tests for taskqueue-related operations."""
def test_defer(self):
taskqueue_services.defer(
operator.add, taskqueue_services.QUEUE_NAME_DEFAULT, 1, 2)
tasks = self.taskqueue_stub.get_filtered_tasks()
self.assertEqual(len(tasks), 1)
result = deferred.run(tasks[0].payload)
self.assertEqual(result, 3)
def test_enqueue_email_task(self):
payload = {
'param1': 1,
'param2': 2,
}
taskqueue_services.enqueue_email_task(
feconf.TASK_URL_FLAG_EXPLORATION_EMAILS, payload, 0)
tasks = self.taskqueue_stub.get_filtered_tasks(
queue_names=taskqueue_services.QUEUE_NAME_EMAILS)
self.assertEqual(len(tasks), 1)
self.assertEqual(tasks[0].payload, json.dumps(payload))
|
apache-2.0
|
Python
|
|
caa29a940673d42d23d70b31eddd2ec61fc3fb73
|
Add a simple script which monitors the paused domains on a host, checks them against the xapi database, logs anomalies, and optionally destroys the domain if it has been in an error state for longer than a threshold (currently 60s)
|
johnelse/xcp-rrdd,koushikcgit/xcp-rrdd,koushikcgit/xcp-networkd,simonjbeaumont/xcp-rrdd,sharady/xcp-networkd,djs55/xcp-networkd,koushikcgit/xcp-rrdd,koushikcgit/xcp-rrdd,djs55/xcp-rrdd,djs55/xcp-networkd,simonjbeaumont/xcp-rrdd,djs55/squeezed,djs55/xcp-rrdd,robhoes/squeezed,johnelse/xcp-rrdd,koushikcgit/xcp-networkd,sharady/xcp-networkd
|
scripts/examples/python/monitor-unwanted-domains.py
|
scripts/examples/python/monitor-unwanted-domains.py
|
#!/usr/bin/env python
import subprocess, XenAPI, inventory, time, sys
# Script which monitors the domains running on a host, looks for
# paused domains which don't correspond to VMs which are running here
# or are about to run here, logs them and optionally destroys them.
# Return a list of (domid, uuid) tuples, one per paused domain on this host
def list_paused_domains():
results = []
all = subprocess.Popen(["/opt/xensource/bin/list_domains"], stdout=subprocess.PIPE).communicate()[0]
lines = all.split("\n")
for domain in lines[1:]:
bits = domain.split()
if bits <> []:
domid = bits[0]
uuid = bits[2]
state = bits[4]
if 'P' in state:
results.append( (domid, uuid) )
return results
# Given localhost's uuid and a (domid, uuid) tuple, return True if the domain
# be somewhere else i.e. we think it may have leaked here
def should_domain_be_somewhere_else(localhost_uuid, (domid, uuid)):
try:
x = XenAPI.xapi_local()
x.xenapi.login_with_password("root", "")
try:
try:
vm = x.xenapi.VM.get_by_uuid(uuid)
resident_on = x.xenapi.VM.get_resident_on(vm)
current_operations = x.xenapi.VM.get_current_operations(vm)
result = current_operations == {} and resident_on <> localhost_uuid
if result:
log("domid %s uuid %s: is not being operated on and is not resident here" % (domid, uuid))
return result
except XenAPI.Failure, e:
if e.details[0] == "UUID_INVALID":
# VM is totally bogus
log("domid %s uuid %s: is not in the xapi database" % (domid, uuid))
return True
# fail safe for now
return False
finally:
x.xenapi.logout()
except:
return False
def log(str):
print str
# Destroy the given domain
def destroy_domain((domid, uuid)):
log("destroying domid %s uuid %s" % (domid, uuid))
all = subprocess.Popen(["/opt/xensource/debug/destroy_domain", "-domid", domid], stdout=subprocess.PIPE).communicate()[0]
# Keep track of when a domain first looked like it should be here
domain_first_noticed = {}
# Number of seconds after which we conclude that a domain really shouldn't be here
threshold = 60
if __name__ == "__main__":
localhost_uuid = inventory.get_localhost_uuid ()
while True:
time.sleep(1)
paused = list_paused_domains ()
# GC the domain_first_noticed map
for d in domain_first_noticed.keys():
if d not in paused:
log("domid %s uuid %s: looks ok now, forgetting about it" % d)
del domain_first_noticed[d]
for d in list_paused_domains():
if should_domain_be_somewhere_else(localhost_uuid, d):
if d not in domain_first_noticed:
domain_first_noticed[d] = time.time()
noticed_for = time.time() - domain_first_noticed[d]
if noticed_for > threshold:
log("domid %s uuid %s: has been in bad state for over threshold" % d)
if "-destroy" in sys.argv:
destroy_domain(d)
|
lgpl-2.1
|
Python
|
|
ab0868b777101a8442cec80009b84b40e79a3a08
|
add a sample script
|
beetbox/audioread
|
decode.py
|
decode.py
|
"""Command-line tool to decode audio files to raw PCM."""
import audioread
import sys
import os
def decode(filename):
filename = os.path.abspath(os.path.expanduser(filename))
with audioread.audio_open(filename) as f:
print 'Input file: %i channels at %i Hz; %.1f seconds.' % \
(f.channels, f.samplerate, f.duration)
with open(filename + '.pcm', 'wb') as of:
for buf in f:
of.write(buf)
if __name__ == '__main__':
decode(sys.argv[1])
|
mit
|
Python
|
|
b5b22086efe745f5d14207c1103483189e0387ef
|
Add game_state.py
|
lipk/pyzertz
|
pyzertz/game_state.py
|
pyzertz/game_state.py
|
import table
import os
class Player:
def __init__(self, name):
self.marbles = [0,0,0] #white, gray, black
self.name = name
def __str__(self):
return self.name
pl1 = Player("Odon")
pl2 = Player("Bela")
winner = None
act = pl1
t=table.Table(3)
t.get(0,0).type=1
t.get(-3,0).type=1
t.get(-2,-1).type=2
print(t)
def isValidPlace(move, t):
x = move[0]
y = move[1]
color = move[2]
if not (-3<=x<=3 and -3<=y<=3 and -3<=x+y<=3):
return False
if t.marbles[color]<=0:
return False
if t.get(x,y).type != 0:
return False
return True
def place(move, t):
t.marbles[move[2]] -= 1
t.get(move[0],move[1]).type = move[2]+1
def isValidRemove(move,t):
x = move[0]
y = move[1]
if not (-3<=x<=3 and -3<=y<=3 and -3<=x+y<=3):
return False
if t.get(x,y).type != 0:
return False
return True
def remove(move,t):
t.get(move[0],move[1]).type=-1
while (not winner):
os.system('cls')
print(t)
print(act.name + " moves")
print("What do you want? Place a marble, and remove a ring (p), or capture some marbles(c)?")
if input('') == 'p':
print('Where and which marble do you want to place? (4 3 2 means put marble type 2 to (4,3))')
move = input('').split(' ')
if (len(move)>2):
move = list(map(int, move))
if isValidPlace(move,t):
place(move,t)
else:
print('You can not cheat!')
break
else:
print("You will not cheat!")
break
print('Which ring do you want to remove? (4 3 means remove ring (4,3))')
move = input('').split(' ')
if (len(move)>1):
move = list(map(int, move))
if isValidRemove(move,t):
remove(move,t)
else:
print('You can not cheat!')
break
else:
print("You will not cheat!")
break
else:
print('capture')
if (sum(t.marbles) == 0):
winner = "draw"
if (act.marbles[0]==4 or act.marbles[1]==5 or act.marbles[2]==6):
winner = act
if (act.marbles[0]>2 and act.marbles[1]>2 and act.marbles[2]>2):
winner = act
if (act == pl1):
act = pl2
else:
act = pl1
print("The winner is: " + winner)
|
apache-2.0
|
Python
|
|
51055b6fbd81ffc88215b88f9a63d702535bdd93
|
Add missing file
|
sassoftware/jobslave,sassoftware/jobslave,sassoftware/jobslave
|
jobslave/job_data.py
|
jobslave/job_data.py
|
#
# Copyright (c) 2011 rPath, Inc.
#
BUILD_DEFAULTS = {
'autoResolve': False,
'maxIsoSize': '681574400',
'bugsUrl': 'http://issues.rpath.com/',
'natNetworking': False,
'vhdDiskType': 'dynamic',
'anacondaCustomTrove': '',
'stringArg': '',
'mediaTemplateTrove': '',
'baseFileName': '',
'vmSnapshots': False,
'swapSize': 128,
'betaNag': False,
'anacondaTemplatesTrove': '',
'enumArg': '2',
'vmMemory': 256,
'installLabelPath': '',
'intArg': 0,
'freespace': 250,
'boolArg': False,
'mirrorUrl': '',
'zisofs': True,
'diskAdapter': 'lsilogic',
'unionfs': False,
'showMediaCheck': False,
'amiHugeDiskMountpoint': '',
'platformName': '',
}
class JobData(dict):
def getBuildData(self, key):
value = self.get('data', {}).get(key)
if value is None:
value = BUILD_DEFAULTS.get(key)
return value
|
apache-2.0
|
Python
|
|
41cc4d54d5eddcf30e2c9a98179ff1e745a12f90
|
Add missing migration
|
springload/madewithwagtail,springload/madewithwagtail,springload/madewithwagtail,springload/madewithwagtail
|
core/migrations/0009_wagtail112upgrade.py
|
core/migrations/0009_wagtail112upgrade.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-11-21 23:14
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0008_wagtailcompanypage_sites_ordering'),
]
operations = [
migrations.AlterField(
model_name='submitformfield',
name='field_type',
field=models.CharField(choices=[('singleline', 'Single line text'), ('multiline', 'Multi-line text'), ('email', 'Email'), ('number', 'Number'), ('url', 'URL'), ('checkbox', 'Checkbox'), ('checkboxes', 'Checkboxes'), ('dropdown', 'Drop down'), ('multiselect', 'Multiple select'), ('radio', 'Radio buttons'), ('date', 'Date'), ('datetime', 'Date/time')], max_length=16, verbose_name='field type'),
),
]
|
mit
|
Python
|
|
9bc05db6bc3b6b2570c8e37d528639c310a6f162
|
Add login required decorator
|
LibCrowds/libcrowds-analyst,alexandermendes/pybossa-analyst,alexandermendes/pybossa-analyst,alexandermendes/pybossa-analyst
|
pybossa_analyst/login.py
|
pybossa_analyst/login.py
|
# -*- coding: utf8 -*-
from functools import wraps
from flask import session, request, redirect, url_for
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if session.get('api_key') is None:
return redirect(url_for('home.login', next=request.url))
return f(*args, **kwargs)
return decorated_function
|
unknown
|
Python
|
|
24bdb8217a921f3f1833a8e9c1bcd7cc1615ea9b
|
add mgmt command to migrate testruns between projects
|
terceiro/squad,terceiro/squad,terceiro/squad,terceiro/squad
|
squad/core/management/commands/migrate_test_runs.py
|
squad/core/management/commands/migrate_test_runs.py
|
import sys
from django.core.exceptions import ObjectDoesNotExist
from django.core.management.base import BaseCommand
from squad.core.models import Project, Build, Environment
from squad.core.tasks import UpdateProjectStatus
class Command(BaseCommand):
help = """Move test runs identified by environment slug
from one project to another. This action preserves
datetime of the objects and statuses."""
def add_arguments(self, parser):
parser.add_argument(
'--old-project-slug',
dest="old_project_slug",
help="Slug of the project from which to migrate test runs"
)
parser.add_argument(
'--new-project-slug',
dest="new_project_slug",
help="Slug of the project to which to migrate test runs"
)
parser.add_argument(
'--env-slug',
dest="env_slug",
help="Slug of the environment to migrate to new project"
)
def handle(self, *args, **options):
self.options = options
if not self.options['old_project_slug']:
print("ERROR: old_project_slug missing")
sys.exit(1)
if not self.options['new_project_slug']:
print("ERROR: new_project_slug missing")
sys.exit(1)
if not self.options['env_slug']:
print("ERROR: env_slug missing")
sys.exit(1)
old_project = None
new_project = None
env = None
try:
old_project = Project.objects.get(slug=self.options['old_project_slug'])
except ObjectDoesNotExist:
print("Project: %s not found. Exiting" % self.options['old_project_slug'])
sys.exit(0)
try:
new_project = Project.objects.get(slug=self.options['new_project_slug'])
except ObjectDoesNotExist:
print("Project: %s not found. Exiting" % self.options['new_project_slug'])
sys.exit(0)
try:
env = Environment.objects.get(project=old_project, slug=self.options['env_slug'])
except ObjectDoesNotExist:
print("Environment: %s not found. Exiting" % self.options['env_slug'])
sys.exit(0)
print("Migrating testruns from project %s to %s" % (old_project.slug, new_project.slug))
print("All test runs with environment name: %s will be migrated" % env.slug)
for build in old_project.builds.all():
if build.test_runs.filter(environment=env):
print("moving build: %s" % build)
new_build, _ = Build.objects.get_or_create(
version=build.version,
project=new_project,
created_at=build.created_at)
for testrun in build.test_runs.filter(environment=env):
testrun.build = new_build
testrun.save()
testrun.environment.project = new_project
testrun.environment.save()
for testjob in testrun.test_jobs.all():
testjob.target = new_project
testjob.save()
UpdateProjectStatus()(testrun)
new_build.status.created_at = build.status.created_at
new_build.status.last_updated = build.status.last_updated
new_build.status.save()
else:
print("No matching test runs found in build: %s" % build)
|
agpl-3.0
|
Python
|
|
0b09eeede5f8a1533c166fe97844834ceb2ee178
|
remove some hard-coded paths
|
pgdouyon/vim-nim,zah/nim.vim
|
autoload/nimrod_vim.py
|
autoload/nimrod_vim.py
|
import threading, Queue, subprocess, signal, os
try:
import vim
except ImportError:
class Vim:
def command(self, x):
print("Executing vim command: " + x)
vim = Vim()
def disable_sigint():
# Ignore the SIGINT signal by setting the handler to the standard
# signal handler SIG_IGN.
signal.signal(signal.SIGINT, signal.SIG_IGN)
class NimrodThread(threading.Thread):
def __init__(self, project_path):
super(NimrodThread, self).__init__()
self.tasks = Queue.Queue()
self.responses = Queue.Queue()
self.nim = subprocess.Popen(
["nimrod", "serve", "--server.type:stdin", project_path],
cwd = os.path.dirname(project_path),
stdin = subprocess.PIPE,
stdout = subprocess.PIPE,
stderr = subprocess.STDOUT,
universal_newlines = True,
preexec_fn = disable_sigint,
bufsize = 1)
def postNimCmd(self, msg, async = True):
self.tasks.put((msg, async))
if not async:
return self.responses.get()
def run(self):
while True:
(msg, async) = self.tasks.get()
if msg == "quit":
self.nim.terminate()
break
self.nim.stdin.write(msg + "\n")
result = ""
while True:
line = self.nim.stdout.readline()
result += line
if line == "\n":
if not async:
self.responses.put(result)
else:
self.asyncOpComplete(msg, result)
break
def vimEscapeExpr(expr):
return expr.replace("\\", "\\\\").replace('"', "\\\"").replace("\n", "\\n")
class NimrodVimThread(NimrodThread):
def asyncOpComplete(self, msg, result):
cmd = "/usr/local/bin/mvim --remote-expr 'NimrodAsyncCmdComplete(1, \"" + vimEscapeExpr(result) + "\")'"
os.system (cmd)
projects = {}
log = open("/tmp/nim-log.txt", "w")
def execNimCmd(project, cmd, async = True):
target = None
if projects.has_key(project):
target = projects[project]
else:
target = NimrodVimThread(project)
projects[project] = target
target.start()
result = target.postNimCmd(cmd, async)
if result != None:
log.write(result)
log.flush()
if not async:
vim.command('let l:py_res = "' + vimEscapeExpr(result) + '"')
|
import threading, Queue, subprocess, signal, os
try:
import vim
except ImportError:
class Vim:
def command(self, x):
print("Executing vim command: " + x)
vim = Vim()
def disable_sigint():
# Ignore the SIGINT signal by setting the handler to the standard
# signal handler SIG_IGN.
signal.signal(signal.SIGINT, signal.SIG_IGN)
class NimrodThread(threading.Thread):
def __init__(self):
super(NimrodThread, self).__init__()
self.tasks = Queue.Queue()
self.responses = Queue.Queue()
self.nim = subprocess.Popen(
["nimrod", "serve", "--server.type:stdin", "nimrod.nim"],
cwd = "/Users/zahary/Projects/nim/compiler",
stdin = subprocess.PIPE,
stdout = subprocess.PIPE,
stderr = subprocess.STDOUT,
universal_newlines = True,
preexec_fn = disable_sigint,
bufsize = 1)
def postNimCmd(self, msg, async = True):
self.tasks.put((msg, async))
if not async:
return self.responses.get()
def run(self):
while True:
(msg, async) = self.tasks.get()
if msg == "quit":
self.nim.terminate()
break
self.nim.stdin.write(msg + "\n")
result = ""
while True:
line = self.nim.stdout.readline()
result += line
if line == "\n":
if not async:
self.responses.put(result)
else:
self.asyncOpComplete(msg, result)
break
def vimEscapeExpr(expr):
return expr.replace("\\", "\\\\").replace('"', "\\\"").replace("\n", "\\n")
class NimrodVimThread(NimrodThread):
def asyncOpComplete(self, msg, result):
cmd = "/usr/local/bin/mvim --remote-expr 'NimrodAsyncCmdComplete(1, \"" + vimEscapeExpr(result) + "\")'"
os.system (cmd)
projects = {}
log = open("/tmp/nim-log.txt", "w")
def execNimCmd(project, cmd, async = True):
target = None
if projects.has_key(project):
target = projects[project]
else:
target = NimrodVimThread()
projects[project] = target
target.start()
result = target.postNimCmd(cmd, async)
if result != None:
log.write(result)
log.flush()
if not async:
vim.command('let l:py_res = "' + vimEscapeExpr(result) + '"')
|
mit
|
Python
|
0ba2b5f63aeb39a1d1faf6d0f9eb2626bcc86d3e
|
Create urls.py
|
minigun/likes,minigun/likes
|
urls.py
|
urls.py
|
from django.conf.urls import url
from django.contrib import admin
from django.conf.urls.static import static
from django.conf import settings
from likes import *
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^$', index),
url(r'^like', uri_manager, name='likes'),
url(r'^auth', authorize, name='auth'),
]
|
mit
|
Python
|
|
5f7dfe4b83f318cc712bc1a7ed955d6c6374f7f5
|
Create es4_windows.py
|
tjmaher/elemental-selenium-tips,tourdedave/elemental-selenium-tips,tjmaher/elemental-selenium-tips,tjmaher/elemental-selenium-tips,tjmaher/elemental-selenium-tips,tjmaher/elemental-selenium-tips,tourdedave/elemental-selenium-tips,tourdedave/elemental-selenium-tips,tourdedave/elemental-selenium-tips,tourdedave/elemental-selenium-tips,tourdedave/elemental-selenium-tips,tjmaher/elemental-selenium-tips
|
04-work-with-multiple-windows/python/es4_windows.py
|
04-work-with-multiple-windows/python/es4_windows.py
|
# http://elementalselenium.com/tips/4-work-with-multiple-windows
import os
import unittest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys # for send_keys
class ES4_Windows(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox()
def test_example_1(self):
self.driver.get('http://the-internet.herokuapp.com/windows')
self.driver.find_element_by_css_selector('.example a').click()
self.driver.switch_to_window(self.driver.window_handles[0])
assert(self.driver.title != "New Window")
self.driver.switch_to_window(self.driver.window_handles[-1])
assert(self.driver.title == "New Window")
def test_example_2(self):
self.driver.get('http://the-internet.herokuapp.com/windows')
first_window = self.driver.window_handles[0]
self.driver.find_element_by_css_selector('.example a').click()
all_windows = self.driver.window_handles
for window in all_windows:
if window != first_window:
new_window = window
self.driver.switch_to_window(first_window)
assert(self.driver.title != "New Window")
self.driver.switch_to_window(new_window)
assert(self.driver.title == "New Window")
def tearDown(self):
self.driver.quit()
|
mit
|
Python
|
|
8c0612232a79c55b3c58a6230f590e5d6d4e36f9
|
Implement taking info parser
|
skystar-p/graduate-adventure,Jhuni0123/graduate-adventure,Jhuni0123/graduate-adventure,dnsdhrj/graduate-adventure,skystar-p/graduate-adventure,MKRoughDiamond/graduate-adventure,MKRoughDiamond/graduate-adventure,LastOne817/graduate-adventure,LastOne817/graduate-adventure,skystar-p/graduate-adventure,dnsdhrj/graduate-adventure,Jhuni0123/graduate-adventure,LastOne817/graduate-adventure,Jhuni0123/graduate-adventure,skystar-p/graduate-adventure
|
backend/core/parser.py
|
backend/core/parser.py
|
def parse_credit(text):
semester_name = {
'U000200001U000300001': '1',
'U000200001U000300002': 'S',
'U000200002U000300001': '2',
'U000200002U000300002': 'W',
}
lines = text.split('\n')[:-1]
columns = []
infos = []
for line in lines:
line = line.split('\t')
if len(columns) != len(line):
columns = line
continue
dic = to_dict(columns, line)
# 26 : course taking info
# 13 : semester info
# 9 : total info
if dic.haskey('SBJT_CD'):
taking_info = {
'year': int(dic['SCHYY']),
'semester': semester_name[dic['SHTM_FG'] + dic['DETA_SHTM_FG']],
'code': dic['SBJT_CD'],
'number': dic['LT_NO'],
'title': dic['SBJT_NM'],
'credit': int(dic['ACQ_PNT']),
'grade': dic['MRKS_GRD_CD'],
'category': dic['CPTN_SUBMATT_FG_CD_NM']
}
infos.append(taking_info)
return infos
def to_dict(columns, row):
assert len(columns) == len(row)
dic = {}
for column, content in zip(columns, row):
dic[column] = content
return dic
|
mit
|
Python
|
|
e895050634f5161d93520aaea7c7e54e329fadbe
|
add oncotator
|
raonyguimaraes/ngs_metrics,raonyguimaraes/ngs_metrics
|
bam/exome_oncotator.py
|
bam/exome_oncotator.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from multiprocessing import Pool
import subprocess
import shlex
import logging
import argparse
import datetime
import os
from subprocess import call
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help="VCF file")
parser.add_argument("-t", "--target", help="VCF file")
parser.add_argument("-o", "--output", help="Output folder")
parser.add_argument("-q", "--quality", help="Quality Score Threshold")
parser.add_argument("-d", "--depth", help="Depth of Coverage Threshold")
args = parser.parse_args()
vcf_file = args.input
target_file = args.target
output = args.output
quality = int(args.quality)
depth = int(args.depth)
if not os.path.exists(output):
os.makedirs(output)
os.chdir(output)
base=os.path.basename(vcf_file)
base_name = os.path.splitext(base)[0]
gvcftools_path = "/home/ubuntu/projects/programs/gvcftools-0.16/bin"
vcftools_path = "/home/ubuntu/projects/programs/vcftools/vcftools-0.1.14/src"
bcftools_path = "/home/ubuntu/projects/programs/bcftools/bcftools-1.3.1"
snpeff_path = "/home/ubuntu/projects/programs/snpeff/snpEff"
oncotator_path = '/home/ubuntu/projects/input/oncotator/oncotator_v1_ds_Jan262014'
#/home/ubuntu/projects/programs/vcftools/vcftools-0.1.14/src/cpp/
#/home/ubuntu/projects/programs/vcftools/vcftools-0.1.14/src/perl/
#extract vcf from gvcf
print('extract vcf from gvcf')
#gzip -dc ../../input/WGC081270U.g.vcf.gz | ../../programs/gvcftools-0.16/bin/extract_variants | bgzip -c > WGC081270U.vcf.gz
command = """cat %s | %s/extract_variants > %s.variants.vcf""" % (vcf_file, gvcftools_path, base_name)
output = call(command, shell=True)
print(output)
#filter good quality
# command = "%s/bcftools filter -T %s -i'QUAL>100 && FMT/DP>100' %s.variants.vcf > %s.filtered.exons.q100.dp100.vcf" % (bcftools_path, target_file, base_name, base_name)
# output = call(command, shell=True)
# print(output)
# #filter good quality
# command = "%s/bcftools filter -T %s -i'QUAL>50 && FMT/DP>50' %s.variants.vcf > %s.filtered.exons.q50.dp50.vcf" % (bcftools_path, target_file, base_name, base_name)
# output = call(command, shell=True)
# print(output)
#filter good quality
command = "%s/bcftools filter -T %s -i'QUAL>%s && FMT/DP>%s' %s.variants.vcf > %s.filtered.exons.q%s.dp%s.vcf" % (bcftools_path, target_file, quality, depth, base_name, base_name, quality, depth)
output = call(command, shell=True)
print(output)
#clean_nonref
vcf_reader = open("%s.filtered.exons.q%s.dp%s.vcf" % (base_name, quality, depth))
vcf_writer = open("%s.oncotator.vcf" % (base_name), 'w')
for line in vcf_reader:
# print(line)
if line.startswith("#"):
vcf_writer.writelines(line)
else:
variant = line.split('\t')
alt = variant[4]
alt = alt.replace(',<NON_REF>', '')
variant[4] = alt
vcf_writer.writelines("\t".join(variant))
vcf_writer.close()
command = 'oncotator -v --db-dir %s -i VCF %s.oncotator.vcf %s.tsv hg19' % (oncotator_path, base_name, base_name)
output = call(command, shell=True)
print(output)
|
mit
|
Python
|
|
8e16ac329d459d11b971216b0eab032e81b5557e
|
Add flowline benchmarks (#489)
|
TimoRoth/oggm,OGGM/oggm,bearecinos/oggm,TimoRoth/oggm,anoukvlug/oggm,juliaeis/oggm,bearecinos/oggm,OGGM/oggm,juliaeis/oggm,anoukvlug/oggm
|
benchmarks/flowline.py
|
benchmarks/flowline.py
|
# Write the benchmarking functions here.
# See "Writing benchmarks" in the asv docs for more information.
import os
import shutil
import numpy as np
import oggm
from oggm.tests.funcs import init_hef, get_test_dir
from oggm import utils, tasks
from oggm.core import massbalance, flowline
testdir = os.path.join(get_test_dir(), 'benchmarks')
utils.mkdir(testdir, reset=True)
heights = np.linspace(2200, 3600, 120)
years = np.arange(151) + 1850
def teardown():
if os.path.exists(testdir):
shutil.rmtree(testdir)
def setup():
global gdir
gdir = init_hef(border=80)
teardown()
gdir = tasks.copy_to_basedir(gdir, base_dir=testdir, setup='all')
flowline.init_present_time_glacier(gdir)
def time_hef_run_until():
mb_mod = massbalance.RandomMassBalance(gdir, bias=0, seed=0)
fls =gdir.read_pickle('model_flowlines')
model = flowline.FluxBasedModel(fls, mb_model=mb_mod, y0=0.)
model.run_until(200)
def time_hef_run_until_in_steps():
mb_mod = massbalance.RandomMassBalance(gdir, bias=0, seed=0)
fls =gdir.read_pickle('model_flowlines')
model = flowline.FluxBasedModel(fls, mb_model=mb_mod, y0=0.)
for yr in np.linspace(0, 200, 400):
model.run_until(yr)
def time_hef_run_until_and_store():
mb_mod = massbalance.RandomMassBalance(gdir, bias=0, seed=0)
fls =gdir.read_pickle('model_flowlines')
model = flowline.FluxBasedModel(fls, mb_model=mb_mod, y0=0.)
model.run_until_and_store(200)
def time_hef_run_until_and_store_with_nc():
mb_mod = massbalance.RandomMassBalance(gdir, bias=0, seed=0)
fls =gdir.read_pickle('model_flowlines')
model = flowline.FluxBasedModel(fls, mb_model=mb_mod, y0=0.)
model.run_until_and_store(200, run_path=os.path.join(testdir, 'run.nc'),
diag_path=os.path.join(testdir, 'diag.nc'))
time_hef_run_until.setup = setup
time_hef_run_until.teardown = teardown
time_hef_run_until_in_steps.setup = setup
time_hef_run_until_in_steps.teardown = teardown
time_hef_run_until_and_store.setup = setup
time_hef_run_until_and_store.teardown = teardown
time_hef_run_until_and_store_with_nc.setup = setup
time_hef_run_until_and_store_with_nc.teardown = teardown
|
bsd-3-clause
|
Python
|
|
59c173bf4179fc77eacf97f3f08359498d35a635
|
Add switch class for good utility
|
stvreumi/electronic-blackboard,SWLBot/electronic-blackboard,chenyang14/electronic-blackboard,SWLBot/electronic-blackboard,Billy4195/electronic-blackboard,stvreumi/electronic-blackboard,Billy4195/electronic-blackboard,stvreumi/electronic-blackboard,chenyang14/electronic-blackboard,stvreumi/electronic-blackboard,Billy4195/electronic-blackboard,SWLBot/electronic-blackboard,chenyang14/electronic-blackboard,Billy4195/electronic-blackboard,SWLBot/electronic-blackboard
|
util.py
|
util.py
|
# coding=utf-8
"""
The switch class refers to PEP 275 (Marc-André Lemburg <mal at lemburg.com>)
(https://www.python.org/dev/peps/pep-0275/)
"""
class switch(object):
def __init__(self,value):
self.value = value
self.fall = False
def __iter__(self):
"""Return the match method once, then Stop"""
yield self.match
raise StopIteration
def match(self,*args):
"""Indicate whether or not to enter a case suite"""
if self.fall or not args:
return True
elif self.value in args:
self.fall = True
return True
else:
return False
|
apache-2.0
|
Python
|
|
3cb47114dc40ebbbcc946c79946ec59813a7af7f
|
add email module
|
texastribune/salesforce-stripe,texastribune/salesforce-stripe,MinnPost/salesforce-stripe,MinnPost/salesforce-stripe,MinnPost/salesforce-stripe,texastribune/salesforce-stripe
|
emails.py
|
emails.py
|
import smtplib
from config import MAIL_SERVER
from config import MAIL_PORT
from config import MAIL_USERNAME
from config import MAIL_PASSWORD
from config import DEFAULT_MAIL_SENDER
def send_email(recipient, subject, body, sender=None):
if sender is None:
FROM = DEFAULT_MAIL_SENDER
else:
FROM = sender
TO = recipient if type(recipient) is list else [recipient]
SUBJECT = subject
TEXT = body
# Prepare actual message
message = """\From: %s\nTo: %s\nSubject: %s\n\n%s
""" % (FROM, ", ".join(TO), SUBJECT, TEXT)
try:
server = smtplib.SMTP(MAIL_SERVER, MAIL_PORT)
server.ehlo()
server.starttls()
server.login(MAIL_USERNAME, MAIL_PASSWORD)
server.sendmail(FROM, TO, message)
server.close()
print ('successfully sent the mail')
except:
print ('failed to send mail')
|
mit
|
Python
|
|
b3959854da150cf734cda3521efd14ee0565d352
|
Create Rotor.py
|
pkeating/Enigma-Machine
|
EnigmaMachine/Rotor.py
|
EnigmaMachine/Rotor.py
|
class Rotor(object):
def __init__(self, rotor_config, starting_position, config_file):
# If starting_position is not between 0 and 25, a ValueError is raised.
if starting_position < 0 or starting_position > 25:
raise ValueError(
"starting_position in Rotor must be between 0 and 25"
)
# If rotor_config is not in the configuration file, a value error
# is raised.
if config_file.has_section(rotor_config) == False:
print config_file.sections()
raise ValueError('Rotor config %r not found' % rotor_config)
# Sets rotor_id, mainly used for debugging.
self.rotor_id = config_file.getint(rotor_config, 'rotor_id')
# Imports the rotor map, converts it to a list, and converts
# each element of the list to an integer.
self.rotor_map = map(
lambda x: ord(x) - 65,
list(config_file.get(rotor_config, 'rotor_map'))
)
# Imports the turnover positions, converts them to a list and
# converts each element of the list to an integer.
self.turnover_positions = map(
lambda x: ord(x) - 65,
config_file.get(rotor_config, 'turnover_positions').split(',')
)
# Sets current_position equal to starting_position.
self.current_position = starting_position
# Rotates the rotor into the starting position. Rotor_map set above
# assumes a current position equal to zero.
for i in range(0, self.current_position):
self.rotor_map.insert(0, self.rotor_map.pop())
self.rotor_map = map(lambda x: (x + 1) % 26, self.rotor_map)
def convertFwd(self, letter):
# Returns the output location of the electrical current on its initial
# trip through the rotor.
return self.rotor_map[letter]
def convertRev(self, letter):
# Returns the output location of the electical current on its return
# trip through the rotor.
return self.rotor_map.index(letter)
def rotate(self): # Rotates the rotor one position.
# Adds one to the current position. This is used later on in the
# rotate() method to decide if the next rotor should be rotated.
self.current_position = (self.current_position + 1) % 26
# Removes the last item of rotor_map and moves it to the front of the
# list. This simulates the circular nature of the rotors.
self.rotor_map.insert(0, self.rotor_map.pop())
# Adds one to each element in rotor_map. This simulates the physical
# movement of the rotors.
self.rotor_map = map(lambda x: (x + 1) % 26, self.rotor_map)
# If the current position is a turnover position, then True is returned
# otherwise False is returned. This is used to determine if the next
# rotor needs to be rotated.
if self.current_position in self.turnover_positions:
return True
else:
return False
def currentState(self):
# Used for debugging. Prints rotor_id, current_position,
# rotor_map, and turnover_position.
print 'rotor_id = %r' % self.rotor_id
print 'current_position = %r' % self.current_position
print 'turnover_positions = %r' % self.turnover_positions
print 'rotor_map = %r' % self.rotor_map
|
mit
|
Python
|
|
f301aa5ecdc3f9d821a72bb6a03d9857c43e72fc
|
add missing file
|
lxc-webpanel/lxc-rest
|
app/fields/auth.py
|
app/fields/auth.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask_restplus import fields
from app import api
auth_fields_get = api.model('AuthGet', { 'access_token': fields.String })
auth_fields_post = api.model('AuthPost', {
'username': fields.String(required=True),
'password': fields.String(required=True)
}
)
|
mit
|
Python
|
|
2785b8d5b1338126b6887b23fefec106ed877601
|
Add first 16 hexagrams
|
urschrei/hexagrams,urschrei/hexagrams
|
hexagrams.py
|
hexagrams.py
|
# -*- coding: utf-8 -*-
hexagrams = {
"Ch'ien": (1, 1, 1, 1, 1, 1),
"K'un": (0, 0, 0, 0, 0, 0),
"Chun": (0, 1, 0, 0, 0, 1),
"Mêng": (1, 0, 0, 0, 1, 0),
"Hsü": (0, 1, 0, 1, 1, 1 ),
"Sung": (1, 1, 1, 0, 1, 0),
"Shih": (0, 0, 0, 0, 1, 0),
"Pi": (0, 1, 0, 0, 0, 0),
"Hsiao Ch'u": (1, 1, 0, 1, 1, 1),
"Lü": (1, 1, 1, 0, 1, 1),
"T'ai": (0, 0, 0, 1, 1, 1),
"P'i": (1, 1, 1, 0, 0, 0),
"T'ung Jên ": (1, 1, 1, 1, 0, 1),
"Ta Yu": (1, 0, 1, 1, 1, 1),
"Ch'ien": (0, 0, 0, 1, 0, 0),
"Yü": (0, 0, 1, 0, 0, 0)
}
|
mit
|
Python
|
|
11cc94f4a6545b49317042df4edf1804ccc3bdd8
|
add a python script to export the links
|
mhansen/nzwirelessmap,mhansen/nzwirelessmap,mhansen/nzwirelessmap
|
export.py
|
export.py
|
#!/usr/bin/env python
"""
A quick script to turn the database into KML showing pairs of point-to-point links
"""
import sys
import sqlite3
from xml.sax.saxutils import escape
conn = sqlite3.connect("prism.sqlite3")
c = conn.cursor()
sql = open("getpairs.sql").read()
c.execute(sql)
print """<?xml version="1.0" encoding="UTF-8"?>
<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<name>Paths</name>
<description>Examples of paths. Note that the tessellate tag is by default
set to 0. If you want to create tessellated lines, they must be authored
(or edited) directly in KML.</description>
<Style id="yellowLineGreenPoly">
<LineStyle>
<color>7f00ffff</color>
<width>4</width>
</LineStyle>
<PolyStyle>
<color>7f00ff00</color>
</PolyStyle>
</Style>"""
for licenceid, rxlng, rxlat, rxalt, txlng, txlat, txalt, rxname, txname in c:
print """
<Placemark>
<name>Licence ID %s</name>
<description>rx: %s, tx: %s</description>
<styleUrl>#yellowLineGreenPoly</styleUrl>
<LineString>
<extrude>1</extrude>
<altitudeMode>absolute</altitudeMode>
<coordinates>
%s,%s,%s
%s,%s,%s
</coordinates>
</LineString>
</Placemark>""" % (licenceid, escape(rxname).replace("\x12",""), escape(txname).replace("\x12",""), rxlng, rxlat, rxalt, txlng, txlat, txalt)
print """
</Document>
</kml>
"""
|
mit
|
Python
|
|
2d7d4b533999c029a33d7802d7ddfba631e07604
|
Add Django 1.4 style wsgi app settings file
|
rinfo/fst,kamidev/autobuild_fst,rinfo/fst,kamidev/autobuild_fst,kamidev/autobuild_fst,rinfo/fst,kamidev/autobuild_fst,rinfo/fst
|
wsgi.py
|
wsgi.py
|
""" WSGI application settings for FST instance
The default setup at Domstolsverket assumes every instance of FST is
running under it's own virtualenv.
"""
import os
import sys
import site
#VIRTUALENV_PATH = '/path/of/your/virtualenv'
PYTHON_SITE_PACKAGES = 'lib/python2.6/site-packages'
# Specify the site-packages folder of your virtualenv
# ALLDIRS = ['/opt/rinfo/fst/instances/fffs/lib/python2.6/site-packages']
ALLDIRS = [os.path.join(VIRTUALENV_PATH, PYTHON_SITE_PACKAGES)]
# Redirect sys.stdout to sys.stderr for bad libraries like geopy that uses
# print statements for optional import exceptions.
sys.stdout = sys.stderr
prev_sys_path = list(sys.path)
# Add all third-party libraries from your virtualenv
for directory in ALLDIRS:
site.addsitedir(directory)
# Reorder sys.path so new directories come first.
new_sys_path = []
for item in list(sys.path):
if item not in prev_sys_path:
new_sys_path.append(item)
sys.path.remove(item)
sys.path[:0] = new_sys_path
# Activate the virtualenv
activate_this = os.path.join(VIRTUALENV_PATH, 'bin/activate_this.py')
execfile(activate_this, dict(__file__=activate_this))
from os.path import abspath, dirname, join
# Some more path trickery...
sys.path.insert(0, abspath(join(dirname(__file__), "../../")))
# Now we can run our Django app under WSGI!
from django.conf import settings
os.environ["DJANGO_SETTINGS_MODULE"] = "fst_web.settings"
path = os.path.dirname(__file__)
subpath = path + os.sep + "fst_web"
if subpath not in sys.path:
sys.path.insert(0,subpath)
if path not in sys.path:
sys.path.insert(0,path)
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
bsd-3-clause
|
Python
|
|
c131e6108a72c57af4d3bdbe67d182d6c0ddb1eb
|
Add migration to modify on_delete
|
GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin
|
geotrek/feedback/migrations/0008_auto_20200326_1252.py
|
geotrek/feedback/migrations/0008_auto_20200326_1252.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-03-26 12:52
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('feedback', '0007_auto_20200324_1412'),
]
operations = [
migrations.AlterField(
model_name='report',
name='activity',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='feedback.ReportActivity', verbose_name='Activity'),
),
migrations.AlterField(
model_name='report',
name='category',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='feedback.ReportCategory', verbose_name='Category'),
),
migrations.AlterField(
model_name='report',
name='problem_magnitude',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='feedback.ReportProblemMagnitude', verbose_name='Problem magnitude'),
),
]
|
bsd-2-clause
|
Python
|
|
a8f3cb864b39ef3178af652bfee4dcb6775baa71
|
Make sure tags/attributes are converted correctly
|
feend78/evennia,jamesbeebop/evennia,feend78/evennia,feend78/evennia,jamesbeebop/evennia,feend78/evennia,jamesbeebop/evennia
|
evennia/typeclasses/migrations/0009_rename_player_cmdsets_typeclasses.py
|
evennia/typeclasses/migrations/0009_rename_player_cmdsets_typeclasses.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-09 21:33
from __future__ import unicode_literals
import re
from django.db import migrations
CASE_WORD_EXCEPTIONS = []
def _case_sensitive_replace(string, old, new):
"""
Replace text, retaining exact case.
Args:
string (str): String in which to perform replacement.
old (str): Word or substring to replace.
new (str): What to replace `old` with.
Returns:
repl_string (str): Version of string where instances of
`old` has been replaced with `new`, retaining case.
"""
def repl(match):
current = match.group()
# treat multi-word sentences word-by-word
old_words = current.split(" ")
new_words = new.split(" ")
out = []
for old_word, new_word in zip(old_words, new_words):
result = []
all_upper = True
for ind, chr in enumerate(old_word):
if ind >= len(new):
break
if chr.isupper():
result.append(new_word[ind].upper())
else:
result.append(new_word[ind].lower())
all_upper = False
# special cases - keep remaing case)
if new_word.lower() in CASE_WORD_EXCEPTIONS:
result.append(new_word[ind+1:])
# append any remaining characters from new
elif all_upper:
result.append(new_word[ind+1:].upper())
else:
result.append(new_word[ind+1:].lower())
out.append("".join(result))
# if we have more new words than old ones, just add them verbatim
out.extend([new_word for ind, new_word in enumerate(new_words) if ind >= len(old_words)])
return " ".join(out)
if string is None:
return None
regex = re.compile(re.escape(old), re.I)
return regex.sub(repl, string)
def update_typeclasses(apps, schema_editor):
ObjectDB = apps.get_model('objects', 'ObjectDB')
AccountDB = apps.get_model('accounts', 'AccountDB')
ScriptDB = apps.get_model('scripts', 'ScriptDB')
ChannelDB = apps.get_model('comms', 'ChannelDB')
Attributes = apps.get_model('typeclasses', 'Attribute')
Tags = apps.get_model('typeclasses', 'Tag')
for obj in ObjectDB.objects.all():
obj.db_typeclass_path = _case_sensitive_replace(obj.db_typeclass_path, 'player', 'account')
obj.db_cmdset_storage = _case_sensitive_replace(obj.db_cmdset_storage, 'player', 'account')
obj.db_lock_storage = _case_sensitive_replace(obj.db_lock_storage, 'player', 'account')
obj.save(update_fields=['db_typeclass_path', 'db_cmdset_storage', 'db_lock_storage'])
for obj in AccountDB.objects.all():
obj.db_typeclass_path = _case_sensitive_replace(obj.db_typeclass_path, 'player', 'account')
obj.db_cmdset_storage = _case_sensitive_replace(obj.db_cmdset_storage, 'player', 'account')
obj.db_lock_storage = _case_sensitive_replace(obj.db_lock_storage, 'player', 'account')
obj.save(update_fields=['db_typeclass_path', 'db_cmdset_storage', 'db_lock_storage'])
for obj in ScriptDB.objects.all():
obj.db_typeclass_path = _case_sensitive_replace(obj.db_typeclass_path, 'player', 'account')
obj.db_lock_storage = _case_sensitive_replace(obj.db_lock_storage, 'player', 'account')
obj.save(update_fields=['db_typeclass_path', 'db_lock_storage'])
for obj in ChannelDB.objects.all():
obj.db_typeclass_path = _case_sensitive_replace(obj.db_typeclass_path, 'player', 'account')
obj.db_lock_storage = _case_sensitive_replace(obj.db_lock_storage, 'player', 'account')
obj.save(update_fields=['db_typeclass_path', 'db_lock_storage'])
for obj in Attributes.objects.filter(db_model='playerdb'):
obj.db_model = 'accountdb'
obj.save(update_fields=['db_model'])
for obj in Tags.objects.filter(db_model='playerdb'):
obj.db_model = 'accountdb'
obj.save(update_fields=['db_model'])
class Migration(migrations.Migration):
dependencies = [
('typeclasses', '0008_lock_and_perm_rename'),
]
operations = [
migrations.RunPython(update_typeclasses),
]
|
bsd-3-clause
|
Python
|
|
b42c13de01a49e7fe3fb7caa22089ea1cd87f7bf
|
Add sanity tests for baremetal power state commands
|
openstack/python-ironicclient,openstack/python-ironicclient
|
ironicclient/tests/functional/osc/v1/test_baremetal_node_power_states.py
|
ironicclient/tests/functional/osc/v1/test_baremetal_node_power_states.py
|
# Copyright (c) 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ironicclient.tests.functional.osc.v1 import base
class PowerStateTests(base.TestCase):
"""Functional tests for baremetal node power state commands."""
def setUp(self):
super(PowerStateTests, self).setUp()
self.node = self.node_create()
def test_off_reboot_on(self):
"""Reboot node from Power OFF state.
Test steps:
1) Create baremetal node in setUp.
2) Set node Power State OFF as precondition.
3) Call reboot command for baremetal node.
4) Check node Power State ON in node properties.
"""
self.openstack('baremetal node power off {0}'
.format(self.node['uuid']))
show_prop = self.node_show(self.node['uuid'], ['power_state'])
self.assertEqual('power off', show_prop['power_state'])
self.openstack('baremetal node reboot {0}'.format(self.node['uuid']))
show_prop = self.node_show(self.node['uuid'], ['power_state'])
self.assertEqual('power on', show_prop['power_state'])
def test_on_reboot_on(self):
"""Reboot node from Power ON state.
Test steps:
1) Create baremetal node in setUp.
2) Set node Power State ON as precondition.
3) Call reboot command for baremetal node.
4) Check node Power State ON in node properties.
"""
self.openstack('baremetal node power on {0}'.format(self.node['uuid']))
show_prop = self.node_show(self.node['uuid'], ['power_state'])
self.assertEqual('power on', show_prop['power_state'])
self.openstack('baremetal node reboot {0}'.format(self.node['uuid']))
show_prop = self.node_show(self.node['uuid'], ['power_state'])
self.assertEqual('power on', show_prop['power_state'])
|
apache-2.0
|
Python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.