repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
egabancho/invenio
|
invenio/modules/formatter/models.py
|
1
|
4792
|
# -*- coding: utf-8 -*-
#
## This file is part of Invenio.
## Copyright (C) 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
invenio.modules.formatter.models
-----------------------------------
Database access related functions for Formatter engine and
administration pages.
"""
from invenio.ext.sqlalchemy import db
from invenio.modules.records.models import Record as Bibrec
class Format(db.Model):
"""Represents a Format record."""
__tablename__ = 'format'
id = db.Column(
db.MediumInteger(9, unsigned=True),
primary_key=True,
autoincrement=True)
name = db.Column(db.String(255), nullable=False)
code = db.Column(db.String(20), nullable=False, unique=True)
description = db.Column(db.String(255), server_default='')
content_type = db.Column(db.String(255), server_default='')
mime_type = db.Column(db.String(255), unique=True, nullable=True)
visibility = db.Column(
db.TinyInteger(4),
nullable=False,
server_default='1')
last_updated = db.Column(db.DateTime, nullable=True)
@classmethod
def get_export_formats(cls):
return cls.query.filter(db.and_(
Format.content_type != 'text/html',
Format.visibility == 1)
).order_by(Format.name).all()
def set_name(self, name, lang="generic", type='ln'):
"""
Sets the name of an output format.
if 'type' different from 'ln' or 'sn', do nothing
if 'name' exceeds 256 chars, 'name' is truncated to first 256 chars.
The localized names of output formats are located in formatname table.
:param type: either 'ln' (for long name) and 'sn' (for short name)
:param lang: the language in which the name is given
:param name: the name to give to the output format
"""
if len(name) > 256:
name = name[:256]
if type.lower() != "sn" and type.lower() != "ln":
return
if lang == "generic" and type.lower() == "ln":
self.name = name
else:
# Save inside formatname table for name variations
fname = db.session.query(Formatname)\
.get((self.id, lang, type.lower()))
if not fname:
fname = db.session.merge(Formatname())
fname.id_format = self.id
fname.ln = lang
fname.type = type.lower()
fname.value = name
db.session.save(fname)
db.session.add(self)
db.session.commit()
class Formatname(db.Model):
"""Represents a Formatname record."""
__tablename__ = 'formatname'
id_format = db.Column(
db.MediumInteger(9, unsigned=True),
db.ForeignKey(Format.id),
primary_key=True)
ln = db.Column(
db.Char(5),
primary_key=True,
server_default='')
type = db.Column(
db.Char(3),
primary_key=True,
server_default='sn')
value = db.Column(db.String(255), nullable=False)
format = db.relationship(Format, backref='names')
#TODO add association proxy with key (id_format, ln, type)
class Bibfmt(db.Model):
"""Represents a Bibfmt record."""
__tablename__ = 'bibfmt'
id_bibrec = db.Column(
db.MediumInteger(8, unsigned=True),
db.ForeignKey(Bibrec.id),
nullable=False,
server_default='0',
primary_key=True,
autoincrement=False)
format = db.Column(
db.String(10),
nullable=False,
server_default='',
primary_key=True,
index=True)
kind = db.Column(
db.String(10),
nullable=False,
server_default='',
index=True
)
last_updated = db.Column(
db.DateTime,
nullable=False,
server_default='1900-01-01 00:00:00',
index=True)
value = db.Column(db.iLargeBinary)
needs_2nd_pass = db.Column(db.TinyInteger(1), server_default='0')
bibrec = db.relationship(Bibrec, backref='bibfmt')
__all__ = [
'Format',
'Formatname',
'Bibfmt',
]
|
gpl-2.0
| -4,194,392,146,217,412,000 | 27.023392 | 78 | 0.600167 | false |
dlage/spotifykodiweb
|
spotifykodiweb/settings/defaults.py
|
1
|
2986
|
"""
Django settings for spotifykodiweb project.
Generated by 'django-admin startproject' using Django 1.8.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from .secrets import *
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.admindocs',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'social.apps.django_app.default',
'bootstrap3',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'spotifykodiweb.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, '../templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'social.apps.django_app.context_processors.backends',
'social.apps.django_app.context_processors.login_redirect',
],
},
},
]
WSGI_APPLICATION = 'spotifykodiweb.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, '../db.sqlite3'),
}
}
# Authentication Back-ends
AUTHENTICATION_BACKENDS = (
'social.backends.spotify.SpotifyOAuth2',
'django.contrib.auth.backends.ModelBackend',
)
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
|
mit
| -7,200,818,063,775,890,000 | 25.900901 | 75 | 0.689216 | false |
wichert/pyramid_jwt
|
tests/test_policy.py
|
1
|
9249
|
# vim: fileencoding=utf-8
import warnings
from datetime import timedelta
from webob import Request
from zope.interface.verify import verifyObject
from pyramid.security import forget
from pyramid.security import remember
from pyramid.testing import testConfig
from pyramid.testing import DummyRequest
from pyramid.testing import DummySecurityPolicy
from pyramid.interfaces import IAuthenticationPolicy
from pyramid_jwt.policy import (
JWTAuthenticationPolicy,
PyramidJSONEncoderFactory,
JWTCookieAuthenticationPolicy,
)
import uuid
import pytest
from json.encoder import JSONEncoder
from uuid import UUID
def test_interface():
verifyObject(IAuthenticationPolicy, JWTAuthenticationPolicy("secret"))
def test_token_most_be_str():
policy = JWTAuthenticationPolicy("secret")
token = policy.create_token(15)
assert isinstance(token, str)
def test_minimal_roundtrip():
policy = JWTAuthenticationPolicy("secret")
request = Request.blank("/")
request.authorization = ("JWT", policy.create_token(15))
request.jwt_claims = policy.get_claims(request)
assert policy.unauthenticated_userid(request) == 15
def test_audience_valid():
policy = JWTAuthenticationPolicy("secret", audience="example.org")
token = policy.create_token(15, name="Jöhn", admin=True, audience="example.org")
request = Request.blank("/")
request.authorization = ("JWT", token)
jwt_claims = policy.get_claims(request)
assert jwt_claims["aud"] == "example.org"
def test_audience_invalid():
policy = JWTAuthenticationPolicy("secret", audience="example.org")
token = policy.create_token(15, name="Jöhn", admin=True, audience="example.com")
request = Request.blank("/")
request.authorization = ("JWT", token)
jwt_claims = policy.get_claims(request)
assert jwt_claims == {}
def test_algorithm_unsupported():
policy = JWTAuthenticationPolicy("secret", algorithm="SHA1")
with pytest.raises(NotImplementedError):
token = policy.create_token(15, name="Jöhn", admin=True)
def test_extra_claims():
policy = JWTAuthenticationPolicy("secret")
token = policy.create_token(15, name="Jöhn", admin=True)
request = Request.blank("/")
request.authorization = ("JWT", token)
jwt_claims = policy.get_claims(request)
assert jwt_claims["name"] == "Jöhn"
assert jwt_claims["admin"]
def test_wrong_auth_scheme():
policy = JWTAuthenticationPolicy("secret")
request = Request.blank("/")
request.authorization = ("Other", policy.create_token(15))
request.jwt_claims = policy.get_claims(request)
assert policy.unauthenticated_userid(request) is None
def test_invalid_authorization_header():
policy = JWTAuthenticationPolicy("secret")
request = Request.blank("/")
request.environ["HTTP_AUTHORIZATION"] = "token"
request.jwt_claims = policy.get_claims(request)
assert policy.unauthenticated_userid(request) is None
def test_other_header():
policy = JWTAuthenticationPolicy("secret", http_header="X-Token")
request = Request.blank("/")
request.headers["X-Token"] = policy.create_token(15)
request.jwt_claims = policy.get_claims(request)
assert policy.unauthenticated_userid(request) == 15
def test_expired_token():
policy = JWTAuthenticationPolicy("secret", expiration=-1)
request = Request.blank("/")
request.authorization = ("JWT", policy.create_token(15))
request.jwt_claims = policy.get_claims(request)
assert policy.unauthenticated_userid(request) is None
policy.leeway = 5
request.jwt_claims = policy.get_claims(request)
assert policy.unauthenticated_userid(request) == 15
def test_dynamic_expired_token():
policy = JWTAuthenticationPolicy("secret", expiration=-1)
request = Request.blank("/")
request.authorization = ("JWT", policy.create_token(15, expiration=5))
request.jwt_claims = policy.get_claims(request)
assert policy.unauthenticated_userid(request) == 15
policy = JWTAuthenticationPolicy("secret")
request.authorization = ("JWT", policy.create_token(15, expiration=-1))
request.jwt_claims = policy.get_claims(request)
assert policy.unauthenticated_userid(request) is None
request.authorization = ("JWT", policy.create_token(15))
request.jwt_claims = policy.get_claims(request)
assert policy.unauthenticated_userid(request) == 15
def test_remember_warning():
policy = JWTAuthenticationPolicy("secret", http_header="X-Token")
with testConfig() as config:
config.set_authorization_policy(DummySecurityPolicy())
config.set_authentication_policy(policy)
request = DummyRequest()
with warnings.catch_warnings(record=True) as w:
remember(request, 15)
assert len(w) == 1
assert issubclass(w[-1].category, UserWarning)
assert "JWT tokens" in str(w[-1].message)
assert w[-1].filename.endswith("test_policy.py")
def test_forget_warning():
policy = JWTAuthenticationPolicy("secret", http_header="X-Token")
with testConfig() as config:
config.set_authorization_policy(DummySecurityPolicy())
config.set_authentication_policy(policy)
request = DummyRequest()
with warnings.catch_warnings(record=True) as w:
forget(request)
assert len(w) == 1
assert issubclass(w[-1].category, UserWarning)
assert "JWT tokens" in str(w[-1].message)
assert w[-1].filename.endswith("test_policy.py")
def test_default_json_encoder():
policy = JWTAuthenticationPolicy("secret")
assert isinstance(policy.json_encoder, PyramidJSONEncoderFactory)
assert isinstance(policy.json_encoder(), JSONEncoder)
class MyCustomJsonEncoder(JSONEncoder):
def default(self, o):
if type(o) is UUID:
return str(o)
# Let the base class default method raise the TypeError
return JSONEncoder.default(self, o)
def test_custom_json_encoder():
policy = JWTAuthenticationPolicy("secret")
principal_id = uuid.uuid4()
claim_value = uuid.uuid4()
with pytest.raises(TypeError):
token = policy.create_token("subject", uuid_value=claim_value)
policy = JWTAuthenticationPolicy("secret", json_encoder=MyCustomJsonEncoder)
request = Request.blank("/")
request.authorization = (
"JWT",
policy.create_token(principal_id, uuid_value=claim_value),
)
request.jwt_claims = policy.get_claims(request)
assert policy.unauthenticated_userid(request) == str(principal_id)
assert request.jwt_claims.get("uuid_value") == str(claim_value)
def test_cookie_policy_creation():
token_policy = JWTAuthenticationPolicy("secret")
request = Request.blank("/")
cookie_policy = JWTCookieAuthenticationPolicy.make_from(token_policy)
headers = cookie_policy.remember(request, "user")
assert isinstance(headers, list)
assert len(headers) == 1
def test_cookie_policy_creation_fail():
with pytest.raises(ValueError) as e:
JWTCookieAuthenticationPolicy.make_from(object())
assert "Invalid policy type" in str(e.value)
def test_cookie_policy_remember():
policy = JWTCookieAuthenticationPolicy("secret")
request = Request.blank("/")
headers = policy.remember(request, "user")
header, cookie = headers[0]
assert header.lower() == "set-cookie"
chunks = cookie.split("; ")
assert chunks[0].startswith(f"{policy.cookie_name}=")
assert "HttpOnly" in chunks
assert "secure" in chunks
def test_cookie_policy_forget():
policy = JWTCookieAuthenticationPolicy("secret")
request = Request.blank("/")
headers = policy.forget(request)
header, cookie = headers[0]
assert header.lower() == "set-cookie"
chunks = cookie.split("; ")
cookie_values = [c for c in chunks if "=" in c]
assert cookie_values[0].startswith(f"{policy.cookie_name}=")
assert "Max-Age=0" in chunks
assert hasattr(request, "_jwt_cookie_reissue_revoked")
def test_cookie_policy_custom_domain_list():
policy = JWTCookieAuthenticationPolicy("secret")
request = Request.blank("/")
domains = [request.domain, "other"]
headers = policy.remember(request, "user", domains=domains)
assert len(headers) == 2
_, cookie1 = headers[0]
_, cookie2 = headers[1]
assert f"Domain={request.domain}" in cookie1
assert f"Domain=other" in cookie2
def test_insecure_cookie_policy():
policy = JWTCookieAuthenticationPolicy("secret", https_only=False)
request = Request.blank("/")
headers = policy.forget(request)
_, cookie = headers[0]
chunks = cookie.split("; ")
assert "secure" not in chunks
def test_insecure_cookie_policy():
policy = JWTCookieAuthenticationPolicy("secret", https_only=False)
request = Request.blank("/")
headers = policy.forget(request)
_, cookie = headers[0]
chunks = cookie.split("; ")
assert "secure" not in chunks
@pytest.mark.freeze_time
def test_cookie_policy_max_age():
expiry = timedelta(seconds=10)
policy = JWTCookieAuthenticationPolicy("secret", expiration=expiry)
request = Request.blank("/")
headers = policy.forget(request)
_, cookie = headers[0]
chunks = cookie.split("; ")
assert "Max-Age=10" not in chunks
|
bsd-2-clause
| 6,635,171,839,500,509,000 | 31.780142 | 84 | 0.700563 | false |
lukovnikov/teafacto
|
teafacto/blocks/lang/wordembed.py
|
1
|
3771
|
from teafacto.blocks.seq.rnn import SeqEncoder, MaskMode
from teafacto.blocks.seq.rnu import GRU
from teafacto.blocks.basic import IdxToOneHot, Embedder, VectorEmbed
from teafacto.blocks.lang.wordvec import Glove
from teafacto.core.base import *
from teafacto.core.base import tensorops as T
class WordEmbed(Embedder):
def __init__(self, indim=1000, outdim=50, trainfrac=0.0, **kw):
super(WordEmbed, self).__init__(indim, outdim, **kw)
self.emb = VectorEmbed(indim=indim, dim=outdim, trainfrac=trainfrac)
def apply(self, idxs):
return self.emb(idxs)
class WordEmbedGlove(Embedder):
def __init__(self, indim=1000, outdim=50, trainfrac=0.0, **kw):
super(WordEmbedGlove, self).__init__(indim, outdim, **kw)
self.emb = Glove(outdim, vocabsize=indim, trainfrac=trainfrac).block
def apply(self, idxs):
return self.emb(idxs)
class WordEncoder(Block):
def __init__(self, indim=220, outdim=200, maskid=0, **kw): # indim is number of characters
super(WordEncoder, self).__init__(**kw)
self.enc = SeqEncoder(IdxToOneHot(indim),
GRU(dim=indim, innerdim=outdim)).maskoptions(maskid, MaskMode.AUTO)
def apply(self, seq): # seq: (batsize, maxwordlen) of character idxs
enco = self.enc(seq) # enco: (batsize, outdim) of floats
return enco
class WordEmbedPlusGlove(Embedder):
def __init__(self, indim=4000, outdim=100, embdim=50, embtrainfrac=0.0, **kw):
super(WordEmbedPlusGlove, self).__init__(indim, outdim+embdim, **kw)
self.glove = Glove(embdim, vocabsize=indim, trainfrac=embtrainfrac).block
self.emb = VectorEmbed(indim=indim, dim=outdim)
def apply(self, idxs): # (batsize,) word idxs
gemb = self.glove(idxs) # (batsize, embdim)
oemb = self.emb(idxs) # (batsize, outdim),
return T.concatenate([gemb, oemb], axis=1) # (batsize, outdim+embdim)
class WordEncoderPlusGlove(Block):
def __init__(self, numchars=220,
numwords=4e5,
encdim=100,
embdim=50,
embtrainfrac=0.0,
maskid=0,
glovepath=None, **kw):
super(WordEncoderPlusGlove, self).__init__(**kw)
self.glove = Glove(embdim, vocabsize=numwords, trainfrac=embtrainfrac, path=glovepath).block
self.enc = WordEncoder(indim=numchars, outdim=encdim, maskid=maskid)
def apply(self, seq): # seq: (batsize, 1+maxwordlen): first column: Glove idxs, subsequent cols: char ids
if seq.ndim == 2:
emb = self.glove(seq[:, 0]) # (batsize, embdim)
enc = self.enc(seq[:, 1:]) # (batsize, encdim)
return T.concatenate([emb, enc], axis=1) # (batsize, embdim + encdim)
elif seq.ndim == 3: # (batsize, seqlen, 1+maxwordlen)
emb = self.glove(seq[:, :, 0])
o = T.scan(fn=self.recenc, sequences=seq[:, :, 1:].dimswap(1, 0), outputs_info=None)
enc = o.dimswap(1, 0)
return T.concatenate([emb, enc], axis=2) # (batsize, seqlen, embdim + encdim)
def recenc(self, seq): # (batsize, 1+maxwordlen)
return self.enc(seq)
'''
class WordEncoderPlusEmbed(Block):
def __init__(self, numchars=220, numwords=4e5, encdim=100, embdim=50, embtrainfrac=0.0, **kw):
super(WordEncoderPlusEmbed, self).__init__(**kw)
self.emb = VectorEmbed(indim=numwords, dim=embdim)
self.enc = WordEncoder(indim=numchars, outdim=encdim)
def apply(self, seq):
emb = self.emb(seq[:, 0])
enc = self.enc(seq[:, 1:])
return T.concatenate([emb, enc], axis=1)
'''
|
mit
| -9,127,946,652,017,896,000 | 42.344828 | 115 | 0.603023 | false |
kvaps/vdsm
|
vdsm/virt/sampling.py
|
1
|
27239
|
#
# Copyright 2008-2015 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
"""
Support for VM and host statistics sampling.
"""
from collections import defaultdict, deque, namedtuple
import errno
import logging
import os
import re
import threading
import time
from vdsm.constants import P_VDSM_RUN, P_VDSM_CLIENT_LOG
from vdsm import ipwrapper
from vdsm import netinfo
from vdsm import utils
from vdsm.config import config
import v2v
from .utils import ExpiringCache
import caps
_THP_STATE_PATH = '/sys/kernel/mm/transparent_hugepage/enabled'
if not os.path.exists(_THP_STATE_PATH):
_THP_STATE_PATH = '/sys/kernel/mm/redhat_transparent_hugepage/enabled'
JIFFIES_BOUND = 2 ** 32
NETSTATS_BOUND = 2 ** 32
class InterfaceSample(object):
"""
A network interface sample.
The sample is set at the time of initialization and can't be updated.
"""
def readIfaceStat(self, ifid, stat):
"""
Get and interface's stat.
.. note::
Really ugly implementation; from time to time, Linux returns an
empty line. TODO: understand why this happens and fix it!
:param ifid: The ID of the interface you want to query.
:param stat: The type of statistic you want get.
:returns: The value of the statistic you asked for.
:type: int
"""
f = '/sys/class/net/%s/statistics/%s' % (ifid, stat)
tries = 5
while tries:
tries -= 1
try:
with open(f) as fi:
s = fi.read()
except IOError as e:
# silently ignore missing wifi stats
if e.errno != errno.ENOENT:
logging.debug("Could not read %s", f, exc_info=True)
return 0
try:
return int(s)
except:
if s != '':
logging.warning("Could not parse statistics (%s) from %s",
f, s, exc_info=True)
logging.debug('bad %s: (%s)', f, s)
if not tries:
raise
def __init__(self, link):
ifid = link.name
self.rx = self.readIfaceStat(ifid, 'rx_bytes')
self.tx = self.readIfaceStat(ifid, 'tx_bytes')
self.rxDropped = self.readIfaceStat(ifid, 'rx_dropped')
self.txDropped = self.readIfaceStat(ifid, 'tx_dropped')
self.rxErrors = self.readIfaceStat(ifid, 'rx_errors')
self.txErrors = self.readIfaceStat(ifid, 'tx_errors')
self.operstate = 'up' if link.oper_up else 'down'
self.speed = _getLinkSpeed(link)
self.duplex = _getDuplex(ifid)
_LOGGED_ATTRS = ('operstate', 'speed', 'duplex')
def _log_attrs(self, attrs):
return ' '.join(
'%s:%s' % (attr, getattr(self, attr)) for attr in attrs)
def to_connlog(self):
return self._log_attrs(self._LOGGED_ATTRS)
def connlog_diff(self, other):
"""Return a textual description of the interesting stuff new to self
and missing in 'other'."""
return self._log_attrs(
attr for attr in self._LOGGED_ATTRS
if getattr(self, attr) != getattr(other, attr))
class TotalCpuSample(object):
"""
A sample of total CPU consumption.
The sample is taken at initialization time and can't be updated.
"""
def __init__(self):
with open('/proc/stat') as f:
self.user, userNice, self.sys, self.idle = \
map(int, f.readline().split()[1:5])
self.user += userNice
class CpuCoreSample(object):
"""
A sample of the CPU consumption of each core
The sample is taken at initialization time and can't be updated.
"""
CPU_CORE_STATS_PATTERN = re.compile(r'cpu(\d+)\s+(.*)')
def __init__(self):
self.coresSample = {}
with open('/proc/stat') as src:
for line in src:
match = self.CPU_CORE_STATS_PATTERN.match(line)
if match:
coreSample = {}
user, userNice, sys, idle = \
map(int, match.group(2).split()[0:4])
coreSample['user'] = user
coreSample['userNice'] = userNice
coreSample['sys'] = sys
coreSample['idle'] = idle
self.coresSample[match.group(1)] = coreSample
def getCoreSample(self, coreId):
strCoreId = str(coreId)
if strCoreId in self.coresSample:
return self.coresSample[strCoreId]
class NumaNodeMemorySample(object):
"""
A sample of the memory stats of each numa node
The sample is taken at initialization time and can't be updated.
"""
def __init__(self):
self.nodesMemSample = {}
numaTopology = caps.getNumaTopology()
for nodeIndex in numaTopology:
nodeMemSample = {}
if len(numaTopology) < 2:
memInfo = caps.getUMAHostMemoryStats()
else:
memInfo = caps.getMemoryStatsByNumaCell(int(nodeIndex))
nodeMemSample['memFree'] = memInfo['free']
nodeMemSample['memPercent'] = 100 - \
int(100.0 * int(memInfo['free']) / int(memInfo['total']))
self.nodesMemSample[nodeIndex] = nodeMemSample
class PidCpuSample(object):
"""
A sample of the CPU consumption of a process.
The sample is taken at initialization time and can't be updated.
"""
def __init__(self, pid):
with open('/proc/%s/stat' % pid) as stat:
self.user, self.sys = \
map(int, stat.read().split()[13:15])
class TimedSample(object):
def __init__(self):
self.timestamp = time.time()
_PROC_STAT_PATH = '/proc/stat'
def getBootTime():
"""
Returns the boot time of the machine in seconds since epoch.
Raises IOError if file access fails, or ValueError if boot time not
present in file.
"""
with open(_PROC_STAT_PATH) as proc_stat:
for line in proc_stat:
if line.startswith('btime'):
parts = line.split()
if len(parts) > 1:
return int(parts[1])
else:
break
raise ValueError('Boot time not present')
def _get_interfaces_and_samples():
links_and_samples = {}
for link in ipwrapper.getLinks():
try:
links_and_samples[link.name] = InterfaceSample(link)
except IOError as e:
# this handles a race condition where the device is now no
# longer exists and netlink fails to fetch it
if e.errno == errno.ENODEV:
continue
raise
return links_and_samples
class HostSample(TimedSample):
"""
A sample of host-related statistics.
Contains the sate of the host in the time of initialization.
"""
MONITORED_PATHS = ['/tmp', '/var/log', '/var/log/core', P_VDSM_RUN]
def _getDiskStats(self):
d = {}
for p in self.MONITORED_PATHS:
free = 0
try:
stat = os.statvfs(p)
free = stat.f_bavail * stat.f_bsize / (2 ** 20)
except:
pass
d[p] = {'free': str(free)}
return d
def __init__(self, pid):
"""
Initialize a HostSample.
:param pid: The PID of this vdsm host.
:type pid: int
"""
super(HostSample, self).__init__()
self.interfaces = _get_interfaces_and_samples()
self.pidcpu = PidCpuSample(pid)
self.totcpu = TotalCpuSample()
meminfo = utils.readMemInfo()
freeOrCached = (meminfo['MemFree'] +
meminfo['Cached'] + meminfo['Buffers'])
self.memUsed = 100 - int(100.0 * (freeOrCached) / meminfo['MemTotal'])
self.anonHugePages = meminfo.get('AnonHugePages', 0) / 1024
try:
with open('/proc/loadavg') as loadavg:
self.cpuLoad = loadavg.read().split()[1]
except:
self.cpuLoad = '0.0'
self.diskStats = self._getDiskStats()
try:
with open(_THP_STATE_PATH) as f:
s = f.read()
self.thpState = s[s.index('[') + 1:s.index(']')]
except:
self.thpState = 'never'
self.cpuCores = CpuCoreSample()
self.numaNodeMem = NumaNodeMemorySample()
ENGINE_DEFAULT_POLL_INTERVAL = 15
try:
self.recentClient = (
self.timestamp - os.stat(P_VDSM_CLIENT_LOG).st_mtime <
2 * ENGINE_DEFAULT_POLL_INTERVAL)
except OSError as e:
if e.errno == errno.ENOENT:
self.recentClient = False
else:
raise
def to_connlog(self):
text = ', '.join(
('%s:(%s)' % (ifid, ifacesample.to_connlog()))
for (ifid, ifacesample) in self.interfaces.iteritems())
return ('recent_client:%s, ' % self.recentClient) + text
def connlog_diff(self, other):
text = ''
for ifid, sample in self.interfaces.iteritems():
if ifid in other.interfaces:
diff = sample.connlog_diff(other.interfaces[ifid])
if diff:
text += '%s:(%s) ' % (ifid, diff)
else:
text += 'new %s:(%s) ' % (ifid, sample.to_connlog())
for ifid, sample in other.interfaces.iteritems():
if ifid not in self.interfaces:
text += 'dropped %s:(%s) ' % (ifid, sample.to_connlog())
if self.recentClient != other.recentClient:
text += 'recent_client:%s' % self.recentClient
return text
_MINIMUM_SAMPLES = 1
class SampleWindow(object):
"""Keep sliding window of samples."""
def __init__(self, size, timefn=time.time):
if size < _MINIMUM_SAMPLES:
raise ValueError("window size must be not less than %i" %
_MINIMUM_SAMPLES)
self._samples = deque(maxlen=size)
self._timefn = timefn
def append(self, value):
"""
Record the current time and append new sample, removing the oldest
sample if needed.
"""
timestamp = self._timefn()
self._samples.append((timestamp, value))
def stats(self):
"""
Return a tuple in the format: (first, last, difftime), containing
the first and the last samples in the defined 'window' and the
time difference between them.
"""
if len(self._samples) < 2:
return None, None, None
first_timestamp, first_sample = self._samples[0]
last_timestamp, last_sample = self._samples[-1]
elapsed_time = last_timestamp - first_timestamp
return first_sample, last_sample, elapsed_time
def last(self, nth=1):
"""
Return the nth-last collected sample.
"""
if len(self._samples) < nth:
return None
_, sample = self._samples[-nth]
return sample
StatsSample = namedtuple('StatsSample',
['first_value', 'last_value',
'interval', 'stats_age'])
EMPTY_SAMPLE = StatsSample(None, None, None, None)
class StatsCache(object):
"""
Cache for bulk stats samples.
Provide facilities to retrieve per-vm samples, and the glue code to deal
with disappearing per-vm samples.
Rationale for the 'clock()' method and for the odd API of the 'put()'
method with explicit 'monotonic_ts' argument:
QEMU processes can go rogue and block on a sampling operation, most
likely, but not only, because storage becomes unavailable.
In turn, that means that libvirt API that VDSM uses can get stuck,
but eventually those calls can unblock.
VDSM has countermeasures for those cases. Stuck threads are replaced,
thanks to Executor. But still, before to be destroyed, a replaced
thread can mistakenly try to add a sample to a StatsCache.
Because of worker thread replacement, that sample from stuck thread
can be stale.
So, under the assumption that at stable state stats collection has
a time cost negligible with respect the collection interval, we need
to take the sample timestamp BEFORE to start the possibly-blocking call.
If we take the timestamp after the call, we have no means to distinguish
between a well behaving call and an unblocked stuck call.
"""
_log = logging.getLogger("virt.sampling.StatsCache")
def __init__(self, clock=utils.monotonic_time):
self._clock = clock
self._lock = threading.Lock()
self._samples = SampleWindow(size=2, timefn=self._clock)
self._last_sample_time = 0
self._vm_last_timestamp = defaultdict(int)
def add(self, vmid):
"""
Warm up the cache for the given VM.
This is to avoid races during the first sampling and the first
reporting, which may result in a VM wrongly reported as unresponsive.
"""
with self._lock:
self._vm_last_timestamp[vmid] = self._clock()
def remove(self, vmid):
"""
Remove any data from the cache related to the given VM.
"""
with self._lock:
del self._vm_last_timestamp[vmid]
def get(self, vmid):
"""
Return the available StatSample for the given VM.
"""
with self._lock:
first_batch, last_batch, interval = self._samples.stats()
if first_batch is None:
return EMPTY_SAMPLE
first_sample = first_batch.get(vmid)
last_sample = last_batch.get(vmid)
if first_sample is None or last_sample is None:
return EMPTY_SAMPLE
stats_age = self._clock() - self._vm_last_timestamp[vmid]
return StatsSample(first_sample, last_sample,
interval, stats_age)
def clock(self):
"""
Provide timestamp compatible with what put() expects
"""
return self._clock()
def put(self, bulk_stats, monotonic_ts):
"""
Add a new bulk sample to the collection.
`monotonic_ts' is the sample time which must be associated with
the sample.
Discard silently out of order samples, which are assumed to be
returned by unblocked stuck calls, to avoid overwrite fresh data
with stale one.
"""
with self._lock:
last_sample_time = self._last_sample_time
if monotonic_ts >= last_sample_time:
self._samples.append(bulk_stats)
self._last_sample_time = monotonic_ts
self._update_ts(bulk_stats, monotonic_ts)
else:
self._log.warning(
'dropped stale old sample: sampled %f stored %f',
monotonic_ts, last_sample_time)
def _update_ts(self, bulk_stats, monotonic_ts):
# FIXME: this is expected to be costly performance-wise.
for vmid in bulk_stats:
self._vm_last_timestamp[vmid] = monotonic_ts
stats_cache = StatsCache()
# this value can be tricky to tune.
# we should avoid as much as we can to trigger
# false positive fast flows (getAllDomainStats call).
# to do so, we should avoid this value to be
# a multiple of known timeout and period:
# - vm sampling period is 15s (we control that)
# - libvirt (default) qemu monitor timeout is 30s (we DON'T contol this)
_TIMEOUT = 40.0
class VMBulkSampler(object):
def __init__(self, conn, get_vms, stats_cache,
stats_flags=0, timeout=_TIMEOUT):
self._conn = conn
self._get_vms = get_vms
self._stats_cache = stats_cache
self._stats_flags = stats_flags
self._skip_doms = ExpiringCache(timeout)
self._sampling = threading.Semaphore() # used as glorified counter
self._log = logging.getLogger("virt.sampling.VMBulkSampler")
def __call__(self):
timestamp = self._stats_cache.clock()
# we are deep in the hot path. bool(ExpiringCache)
# *is* costly so we should avoid it if we can.
fast_path = (
self._sampling.acquire(blocking=False) and not self._skip_doms)
try:
if fast_path:
# This is expected to be the common case.
# If everything's ok, we can skip all the costly checks.
bulk_stats = self._conn.getAllDomainStats(self._stats_flags)
else:
# A previous call got stuck, or not every domain
# has properly recovered. Thus we must whitelist domains.
doms = self._get_responsive_doms()
self._log.debug('sampling %d domains', len(doms))
if doms:
bulk_stats = self._conn.domainListGetStats(
doms, self._stats_flags)
else:
bulk_stats = []
except Exception:
self._log.exception("vm sampling failed")
else:
self._stats_cache.put(_translate(bulk_stats), timestamp)
finally:
self._sampling.release()
def _get_responsive_doms(self):
vms = self._get_vms()
doms = []
for vm_id, vm_obj in vms.iteritems():
to_skip = self._skip_doms.get(vm_id, False)
if to_skip:
continue
elif not vm_obj.isDomainReadyForCommands():
self._skip_doms[vm_id] = True
else:
doms.append(vm_obj._dom._dom)
return doms
class HostStatsThread(threading.Thread):
"""
A thread that periodically samples host statistics.
"""
AVERAGING_WINDOW = 5
_CONNLOG = logging.getLogger('connectivity')
def __init__(self, log):
self.startTime = time.time()
threading.Thread.__init__(self)
self.daemon = True
self._log = log
self._stopEvent = threading.Event()
self._samples = SampleWindow(size=self.AVERAGING_WINDOW)
self._pid = os.getpid()
self._ncpus = max(os.sysconf('SC_NPROCESSORS_ONLN'), 1)
self._sampleInterval = \
config.getint('vars', 'host_sample_stats_interval')
def stop(self):
self._stopEvent.set()
def run(self):
import vm
try:
# wait a bit before starting to sample
time.sleep(self._sampleInterval)
while not self._stopEvent.isSet():
try:
sample = HostSample(self._pid)
self._samples.append(sample)
second_last = self._samples.last(nth=2)
if second_last is None:
self._CONNLOG.debug('%s', sample.to_connlog())
else:
diff = sample.connlog_diff(second_last)
if diff:
self._CONNLOG.debug('%s', diff)
except vm.TimeoutError:
self._log.exception("Timeout while sampling stats")
self._stopEvent.wait(self._sampleInterval)
except:
if not self._stopEvent.isSet():
self._log.exception("Error while sampling stats")
@utils.memoized
def _boot_time(self):
# Try to get boot time only once, if N/A just log the error and never
# include it in the response.
try:
return getBootTime()
except (IOError, ValueError):
self._log.exception('Failed to get boot time')
return None
def get(self):
stats = self._empty_stats()
first_sample, last_sample, _ = self._samples.stats()
if first_sample is None:
return stats
stats.update(self._getInterfacesStats())
interval = last_sample.timestamp - first_sample.timestamp
jiffies = (
last_sample.pidcpu.user - first_sample.pidcpu.user
) % JIFFIES_BOUND
stats['cpuUserVdsmd'] = jiffies / interval
jiffies = (
last_sample.pidcpu.sys - first_sample.pidcpu.sys
) % JIFFIES_BOUND
stats['cpuSysVdsmd'] = jiffies / interval
jiffies = (
last_sample.totcpu.user - first_sample.totcpu.user
) % JIFFIES_BOUND
stats['cpuUser'] = jiffies / interval / self._ncpus
jiffies = (
last_sample.totcpu.sys - first_sample.totcpu.sys
) % JIFFIES_BOUND
stats['cpuSys'] = jiffies / interval / self._ncpus
stats['cpuIdle'] = max(0.0,
100.0 - stats['cpuUser'] - stats['cpuSys'])
stats['memUsed'] = last_sample.memUsed
stats['anonHugePages'] = last_sample.anonHugePages
stats['cpuLoad'] = last_sample.cpuLoad
stats['diskStats'] = last_sample.diskStats
stats['thpState'] = last_sample.thpState
if self._boot_time():
stats['bootTime'] = self._boot_time()
stats['numaNodeMemFree'] = last_sample.numaNodeMem.nodesMemSample
stats['cpuStatistics'] = self._getCpuCoresStats()
stats['v2vJobs'] = v2v.get_jobs_status()
return stats
def _getCpuCoresStats(self):
"""
:returns: a dict that with the following formats:
{'<cpuId>': {'numaNodeIndex': int, 'cpuSys': 'str',
'cpuIdle': 'str', 'cpuUser': 'str'}, ...}
"""
cpuCoreStats = {}
for nodeIndex, numaNode in caps.getNumaTopology().iteritems():
cpuCores = numaNode['cpus']
for cpuCore in cpuCores:
coreStat = {}
coreStat['nodeIndex'] = int(nodeIndex)
hs0, hs1, _ = self._samples.stats()
interval = hs1.timestamp - hs0.timestamp
jiffies = (hs1.cpuCores.getCoreSample(cpuCore)['user'] -
hs0.cpuCores.getCoreSample(cpuCore)['user']) % \
JIFFIES_BOUND
coreStat['cpuUser'] = ("%.2f" % (jiffies / interval))
jiffies = (hs1.cpuCores.getCoreSample(cpuCore)['sys'] -
hs0.cpuCores.getCoreSample(cpuCore)['sys']) % \
JIFFIES_BOUND
coreStat['cpuSys'] = ("%.2f" % (jiffies / interval))
coreStat['cpuIdle'] = ("%.2f" %
max(0.0, 100.0 -
float(coreStat['cpuUser']) -
float(coreStat['cpuSys'])))
cpuCoreStats[str(cpuCore)] = coreStat
return cpuCoreStats
def _getInterfacesStats(self):
stats = {}
hs0, hs1, _ = self._samples.stats()
if hs0 is None:
return stats
interval = hs1.timestamp - hs0.timestamp
rx = tx = rxDropped = txDropped = 0
stats['network'] = {}
total_rate = 0
for ifid in hs1.interfaces:
# it skips hot-plugged devices if we haven't enough information
# to count stats from it
if ifid not in hs0.interfaces:
continue
ifrate = hs1.interfaces[ifid].speed or 1000
Mbps2Bps = (10 ** 6) / 8
thisRx = (hs1.interfaces[ifid].rx - hs0.interfaces[ifid].rx) % \
NETSTATS_BOUND
thisTx = (hs1.interfaces[ifid].tx - hs0.interfaces[ifid].tx) % \
NETSTATS_BOUND
rxRate = 100.0 * thisRx / interval / ifrate / Mbps2Bps
txRate = 100.0 * thisTx / interval / ifrate / Mbps2Bps
if txRate > 100 or rxRate > 100:
txRate = min(txRate, 100.0)
rxRate = min(rxRate, 100.0)
self._log.debug('Rate above 100%%.')
iface = hs1.interfaces[ifid]
stats['network'][ifid] = {'name': ifid, 'speed': str(ifrate),
'rxDropped': str(iface.rxDropped),
'txDropped': str(iface.txDropped),
'rxErrors': str(iface.rxErrors),
'txErrors': str(iface.txErrors),
'state': iface.operstate,
'rxRate': '%.1f' % rxRate,
'txRate': '%.1f' % txRate,
'rx': str(iface.rx),
'tx': str(iface.tx),
'sampleTime': hs1.timestamp,
}
rx += thisRx
tx += thisTx
rxDropped += hs1.interfaces[ifid].rxDropped
txDropped += hs1.interfaces[ifid].txDropped
total_rate += ifrate
total_bytes_per_sec = (total_rate or 1000) * (10 ** 6) / 8
stats['rxRate'] = 100.0 * rx / interval / total_bytes_per_sec
stats['txRate'] = 100.0 * tx / interval / total_bytes_per_sec
if stats['txRate'] > 100 or stats['rxRate'] > 100:
stats['txRate'] = min(stats['txRate'], 100.0)
stats['rxRate'] = min(stats['rxRate'], 100.0)
logging.debug(stats)
stats['rxDropped'] = rxDropped
stats['txDropped'] = txDropped
return stats
def _empty_stats(self):
return {
'cpuUser': 0.0,
'cpuSys': 0.0,
'cpuIdle': 100.0,
'rxRate': 0.0, # REQUIRED_FOR: engine < 3.6
'txRate': 0.0, # REQUIRED_FOR: engine < 3.6
'cpuSysVdsmd': 0.0,
'cpuUserVdsmd': 0.0,
'elapsedTime': int(time.time() - self.startTime)
}
def _getLinkSpeed(dev):
if dev.isNIC():
speed = netinfo.nicSpeed(dev.name)
elif dev.isBOND():
speed = netinfo.bondSpeed(dev.name)
elif dev.isVLAN():
speed = netinfo.vlanSpeed(dev.name)
else:
speed = 0
return speed
def _getDuplex(ifid):
"""Return whether a device is connected in full-duplex. Return 'unknown' if
duplex state is not known"""
try:
with open('/sys/class/net/%s/duplex' % ifid) as src:
return src.read().strip()
except IOError:
return 'unknown'
def _translate(bulk_stats):
return dict((dom.UUIDString(), stats)
for dom, stats in bulk_stats)
|
gpl-2.0
| -3,896,407,745,218,376,000 | 33.832481 | 79 | 0.556408 | false |
nrz/ylikuutio
|
external/bullet3/examples/pybullet/gym/pybullet_envs/agents/configs.py
|
2
|
4587
|
# Copyright 2017 The TensorFlow Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Example configurations using the PPO algorithm."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
from . import ppo
from . import networks
from pybullet_envs.bullet import minitaur_gym_env
from pybullet_envs.bullet import minitaur_duck_gym_env
from pybullet_envs.bullet import minitaur_env_randomizer
import pybullet_envs.bullet.minitaur_gym_env as minitaur_gym_env
import pybullet_envs
try:
import tensorflow.compat.v1 as tf
except Exception:
import tensorflow as tf
def default():
"""Default configuration for PPO."""
# General
algorithm = ppo.PPOAlgorithm
num_agents = 30
eval_episodes = 30
use_gpu = False
# Network
network = networks.feed_forward_gaussian
weight_summaries = dict(all=r'.*', policy=r'.*/policy/.*', value=r'.*/value/.*')
policy_layers = 200, 100
value_layers = 200, 100
init_mean_factor = 0.1
init_logstd = -1
# Optimization
update_every = 30
update_epochs = 25
optimizer = tf.train.AdamOptimizer
update_epochs_policy = 64
update_epochs_value = 64
learning_rate = 1e-4
# Losses
discount = 0.995
kl_target = 1e-2
kl_cutoff_factor = 2
kl_cutoff_coef = 1000
kl_init_penalty = 1
return locals()
def pybullet_pendulum():
locals().update(default())
env = 'InvertedPendulumBulletEnv-v0'
max_length = 200
steps = 5e7 # 50M
return locals()
def pybullet_doublependulum():
locals().update(default())
env = 'InvertedDoublePendulumBulletEnv-v0'
max_length = 1000
steps = 5e7 # 50M
return locals()
def pybullet_pendulumswingup():
locals().update(default())
env = 'InvertedPendulumSwingupBulletEnv-v0'
max_length = 1000
steps = 5e7 # 50M
return locals()
def pybullet_cheetah():
"""Configuration for MuJoCo's half cheetah task."""
locals().update(default())
# Environment
env = 'HalfCheetahBulletEnv-v0'
max_length = 1000
steps = 1e8 # 100M
return locals()
def pybullet_ant():
locals().update(default())
env = 'AntBulletEnv-v0'
max_length = 1000
steps = 5e7 # 50M
return locals()
def pybullet_kuka_grasping():
"""Configuration for Bullet Kuka grasping task."""
locals().update(default())
# Environment
env = 'KukaBulletEnv-v0'
max_length = 1000
steps = 1e7 # 10M
return locals()
def pybullet_racecar():
"""Configuration for Bullet MIT Racecar task."""
locals().update(default())
# Environment
env = 'RacecarBulletEnv-v0' #functools.partial(racecarGymEnv.RacecarGymEnv, isDiscrete=False, renders=True)
max_length = 10
steps = 1e7 # 10M
return locals()
def pybullet_humanoid():
locals().update(default())
randomizer = (minitaur_env_randomizer.MinitaurEnvRandomizer())
env = 'HumanoidBulletEnv-v0'
max_length = 1000
steps = 3e8 # 300M
return locals()
def pybullet_minitaur():
"""Configuration specific to minitaur_gym_env.MinitaurBulletEnv class."""
locals().update(default())
randomizer = (minitaur_env_randomizer.MinitaurEnvRandomizer())
env = functools.partial(minitaur_gym_env.MinitaurBulletEnv,
accurate_motor_model_enabled=True,
motor_overheat_protection=True,
pd_control_enabled=True,
env_randomizer=randomizer,
render=False)
max_length = 1000
steps = 3e7 # 30M
return locals()
def pybullet_duck_minitaur():
"""Configuration specific to minitaur_gym_env.MinitaurBulletDuckEnv class."""
locals().update(default())
randomizer = (minitaur_env_randomizer.MinitaurEnvRandomizer())
env = functools.partial(minitaur_gym_env.MinitaurBulletDuckEnv,
accurate_motor_model_enabled=True,
motor_overheat_protection=True,
pd_control_enabled=True,
env_randomizer=randomizer,
render=False)
max_length = 1000
steps = 3e7 # 30M
return locals()
|
agpl-3.0
| 6,645,876,306,263,930,000 | 27.141104 | 110 | 0.683453 | false |
wolcomm/prngmgr
|
setup.py
|
1
|
1795
|
#!/usr/bin/env python
# Copyright 2016-2017 Workonline Communications (Pty) Ltd. All rights reserved.
#
# The contents of this file are licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Setup configuration script for prngmgr."""
from setuptools import find_packages, setup
version = open('packaging/VERSION').read().strip()
requirements = open('packaging/requirements.txt').read().split("\n")
description = 'Django webapp for peering session management \
using PeeringDB API'
def readme():
"""Read README file."""
with open('README.rst') as f:
return f.read()
setup(
name='prngmgr',
version=version,
author='Workonline Communications',
author_email='communications@workonkonline.co.za',
description=description,
long_description=readme(),
license='LICENSE',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet',
],
packages=find_packages(),
include_package_data=True,
url='https://github.com/wolcomm/prngmgr',
download_url='https://github.com/prngmgr/%s' % version,
install_requires=requirements,
)
|
apache-2.0
| -2,536,337,764,862,797,000 | 32.240741 | 79 | 0.698607 | false |
assaron/pynlc
|
tests/test_board.py
|
1
|
9235
|
# -*- coding: utf-8 -*-
#
# Copyrigt 2010 Aleksey Sergushichev <alsergbox@gmail.com>
#
# This file is part of pynlc.
# Pynlc is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Pynlc is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with pynlc. If not, see <http://www.gnu.org/licenses/>.
# Adding parent dir to syspath
import sys
from os.path import join, realpath, pardir
sys.path.append(
realpath(join(__file__, pardir, pardir)))
from datetime import date, timedelta
from nose.tools import eq_, assert_raises
from board import *
from util import SimpleServer
from threading import Thread
def test_board():
board = Board()
channels = [ (0, "#Channel0", "This is the zeroth channel"),
(2, "#Channel2", "This is the zeroth channel"),
(3, "#Channel3", "This is the zeroth channel"),
(1, u"#1 канал", u"Первый канал представляет"),]
update_message = "dchannels\t" + "\t\r".join(
["\t".join([unicode(field) for field in channel])
for channel in channels] + [''])
board.handle_update(update_message.encode("cp1251"))
board_channels = [
tuple([getattr(channel,field)()
for field, __ in CHANNEL_PROPERTIES])
for channel in board.channels_list()]
eq_(channels, board_channels)
eq_(None, board.get_channel(10))
eq_(0, board.last_time_id())
messages = [[1, 0, -1, 0, "127.0.0.1", "host1", "nick1", "Hello world",
0, 1, 0, "ff-ff-ff-ff-ff-ff", 0, 0, 0, 0, 1, 0, 0],
[2, 0, 1, 0, "127.0.0.2", "host2", "nick2", u"Привет и тебе",
0, 1, 0, "ff-ff-ff-ff-ff-fe", 0, 0, 0, 0, 2, 1, 0],
[3, 0, 1, 0, "255.255.255.255", "255", u"ник255", u"бугога",
0, 1, 0, "00-00-00-00-00-00", 0, 0, 0, 0, 5, 0, 0],
[4, 0, -1, 0, "127.0.0.2", "host2", "nick2", "Hello<br>\nchannel<br>\n#2",
0, 2, 0, "ff-ff-ff-ff-ff-fe", 0, 0, 0, 0, 3, 1, 0], ]
update_message = "dmagic\r" + ("\t\r".join(
["\t".join([unicode(field) for field in message])
for message in messages] + [''])).replace("\n","\x01")
board.handle_update(update_message.encode("cp1251"))
eq_(5, board.last_time_id())
eq_([len(board.get_channel(i).replies()) for i in xrange(4)],
[0, 1, 0, 0])
eq_([len(board.get_message(i).replies()) for i in xrange(1, 5)],
[1, 0, 0, 0])
eq_([len(board.get_channel(i).replies(True)) for i in xrange(4)],
[0, 1, 1, 0])
eq_([len(board.get_message(i).replies(True)) for i in xrange(1, 5)],
[2, 0, 0, 0])
for message in messages:
board_message = board.get_message(message[0])
for i, (field, decoder) in enumerate(MESSAGE_PROPERTIES):
if not decoder:
def identity(x):
return x
decoder = identity
eq_(decoder(unicode(message[i]).encode("cp1251")), getattr(board_message, field)())
eq_(None, board.get_message(10))
simple_server = SimpleServer()
board.set_sender(simple_server.send)
message_id = 3
board.delete_message(message_id)
eq_("Ddel\t%d\t%s\t\t\n" % (message_id, messages[message_id - 1][6]),
simple_server.recieve().decode('cp1251'))
board.delete_comments(message_id)
eq_("Ddel\t%d\t%s\tReplyOnly\t\n" % (message_id, messages[message_id - 1][6]),
simple_server.recieve().decode('cp1251'))
expiration_date = (date.today() + timedelta(50) - date(1899, 12, 30)).days
new_message = u"измененное\nсообщение"
new_nick = u"измененный ник"
board.edit_message(message_id, new_message, new_nick)
eq_("Dedit\t%d\t%d\t%d\t%d\t%s\t%s\t\t" %
(message_id, messages[message_id - 1][9], expiration_date,
messages[message_id - 1][2], new_nick,
new_message.replace("\n","\r")),
simple_server.recieve().decode('cp1251'))
eq_(messages[2][17], 0)
messages[2][17] = 1
update_message = "dmagic\r" + (
"\t\r".join(
["\t".join([unicode(field) for field in messages[2]])
] + ['']
)
).replace("\n","\x01")
board.handle_update(update_message.encode("cp1251"))
eq_([len(board.get_channel(i).replies()) for i in xrange(4)],
[0, 1, 0, 0])
eq_([len(board.get_message(i).replies()) for i in xrange(1, 5)],
[0, 0, 0, 0])
eq_([len(board.get_channel(i).replies(True)) for i in xrange(4)],
[0, 1, 1, 0])
eq_([len(board.get_message(i).replies(True)) for i in xrange(1, 5)],
[2, 0, 0, 0])
def test_board_reply():
simple_server = SimpleServer()
board = Board(simple_server.send)
reply = u"чё смеешься?"
nick = u"user_Вася"
parent_id = 4
board.reply(parent_id, reply, nick)
eq_("Dreply\t%d\t%s\t%s\t\t\n" % (parent_id, nick, reply),
simple_server.recieve().decode('cp1251'))
def test_board_new():
simple_server = SimpleServer()
board = Board(simple_server.send)
channel_id = 0
message = u"Внимание!"
nick = u"user_Вася"
board.new(channel_id, message, nick)
expiration_date = (date.today() + timedelta(50) - date(1899, 12, 30)).days
eq_("Dadd\t%d\t%d\t%s\t%s\n" % (channel_id, expiration_date, nick, message),
simple_server.recieve().decode('cp1251'))
channel_id = 1
message = u"Срочный вопрос!"
nick = u"user_Петя"
actuality_period = 1
board.new(channel_id, message, nick, actuality_period)
expiration_date = (date.today() +
timedelta(actuality_period) - date(1899, 12, 30)).days
eq_("Dadd\t%d\t%d\t%s\t%s\n" % (channel_id, expiration_date, nick, message),
simple_server.recieve().decode('cp1251'))
def test_board_up_message():
simple_server = SimpleServer()
board = Board(simple_server.send)
message_id = 4
board.up_message(message_id)
eq_("Dup\t%d\n" % message_id, simple_server.recieve().decode('cp1251'))
def test_wait_for_channels():
board = Board()
waiting_thread = Thread(target = board.wait_for_channels)
waiting_thread.start()
eq_(waiting_thread.isAlive(), True)
channels = [ (0, "#Channel0", "This is the zeroth channel"),
(2, "#Channel2", "This is the zeroth channel"),
(3, "#Channel3", "This is the zeroth channel"),
(1, u"#1 канал", u"Первый канал представляет"),]
eq_(waiting_thread.isAlive(), True)
update_message = "dchannels\t" + "\t\r".join(
["\t".join([unicode(field) for field in channel])
for channel in channels] + [''])
eq_(waiting_thread.isAlive(), True)
board.handle_update(update_message.encode("cp1251"))
waiting_thread.join()
def test_iternews():
board = Board(with_iternews=False)
assert_raises(Exception, board.iternews)
board = Board()
news = board.iternews()
channels = [ (0, "#Channel0", "This is the zeroth channel"), ]
update_message = "dchannels\t" + "\t\r".join(
["\t".join([unicode(field) for field in channel])
for channel in channels] + [''])
board.handle_update(update_message)
messages = [[1, 0, -1, 0, "127.0.0.1", "host1", "nick1", "Hello world",
0, 0, 0, "ff-ff-ff-ff-ff-ff", 0, 0, 0, 0, 1, 0, 0],
[2, 0, 1, 0, "127.0.0.2", "host2", "nick2", u"Привет и тебе",
0, 0, 0, "ff-ff-ff-ff-ff-fe", 0, 0, 0, 0, 2, 1, 0],
[3, 0, 1, 0, "255.255.255.255", "255", u"ник255", u"бугога",
0, 0, 0, "00-00-00-00-00-00", 0, 0, 0, 0, 5, 0, 0],
[4, 0, -1, 0, "127.0.0.2", "host2", "nick2", "Hello<br>\nchannel<br>\n#2",
0, 0, 0, "ff-ff-ff-ff-ff-fe", 0, 0, 0, 0, 3, 1, 0], ]
update_message = "dmagic\r" + ("\t\r".join(
["\t".join([unicode(field) for field in message])
for message in messages[0:3]] + [''])).replace("\n","\x01")
board.handle_update(update_message.encode("cp1251"))
eq_(news.next().id(), 1)
update_message = "dmagic\r" + ("\t\r".join(
["\t".join([unicode(field) for field in message])
for message in messages[3:4]] + [''])).replace("\n","\x01")
board.handle_update(update_message.encode("cp1251"))
eq_(news.next().id(), 2)
eq_(news.next().id(), 3)
eq_(news.next().id(), 4)
def test_auto_update():
simple_server = SimpleServer()
board = Board(simple_server.send)
board.handle_update("dnew\n")
eq_("Dlast\t0", simple_server.recieve()[:7])
|
gpl-3.0
| -5,950,442,493,698,498,000 | 35.401606 | 95 | 0.566748 | false |
robocomp/robocomp
|
tools/robocompdsl/templates/templateCPP/templatecpp.py
|
1
|
1432
|
import datetime
import os
from ..common.abstracttemplatesmanager import ComponentTemplatesManager
from ..common.plugin_collection import PluginCollection
from . import plugins
FILE_PATH = os.path.dirname(os.path.realpath(__file__))
class TemplatesManagerCpp(ComponentTemplatesManager):
def __init__(self, component):
self.files = {
'regular': [
'CMakeLists.txt', 'DoxyFile', 'README-RCNODE.txt', 'README.md', 'etc/config', 'src/main.cpp',
'src/CMakeLists.txt', 'src/CMakeListsSpecific.txt', 'src/commonbehaviorI.h', 'src/commonbehaviorI.cpp',
'src/genericmonitor.h', 'src/genericmonitor.cpp', 'src/config.h', 'src/specificmonitor.h',
'src/specificmonitor.cpp', 'src/genericworker.h', 'src/genericworker.cpp', 'src/specificworker.h',
'src/specificworker.cpp', 'src/mainUI.ui'
],
'avoid_overwrite': [
'src/specificworker.h', 'src/specificworker.cpp', 'src/CMakeListsSpecific.txt',
'src/mainUI.ui', 'src/specificmonitor.h', 'src/specificmonitor.cpp', 'README.md',
'etc/config'
],
'servant_files': ["SERVANT.H", "SERVANT.CPP"],
'template_path': "templateCPP/files/"
}
current_plugins = PluginCollection(plugins.__name__)
super(TemplatesManagerCpp, self).__init__(component, current_plugins)
|
gpl-3.0
| -8,428,258,205,923,799,000 | 39.914286 | 119 | 0.621508 | false |
auscompgeek/chickenpie
|
chickenpie/debugger.py
|
1
|
2507
|
#!/usr/bin/env python
from __future__ import print_function
import sys
import traceback
from chickenpie import opcodes
from chickenpie.vm import Machine
def boot(argv=sys.argv):
m = Machine()
m.load_file(argv[1])
if len(argv) >= 3:
m.load_input(argv[2])
return m
def input_reader():
EXIT_COMMANDS = 'bye', 'exit', 'quit', 'q'
read = raw_input if sys.version_info[0] < 3 else input # noqa
try:
inp = read('> ')
while inp not in EXIT_COMMANDS:
yield inp
inp = read('> ')
except EOFError:
print('quit', file=sys.stderr)
def main():
if len(sys.argv) < 2:
print('Usage: chicken-debug SCRIPT.CH [INPUT]', file=sys.stderr)
return
machina = boot()
breakpoints = set()
for line in input_reader():
if line:
if ' ' in line:
cmd, args = line.split(maxsplit=1)
else:
cmd, args = line, ''
if cmd == 'py':
try:
try:
v = eval(args)
if v is not None:
print(v)
except SyntaxError:
exec(args)
except Exception:
traceback.print_exc()
elif cmd == 'r':
for ip, opcode in machina:
lineno = ip - 1
if lineno in breakpoints:
print('breakpoint at line {0}, last instruction: {1}'.format(
lineno, opcodes.get_name(opcode)), file=sys.stderr)
break
else:
print(machina.get_output())
elif cmd == 'restart':
machina = boot()
elif cmd in ('s', 'step', 'n', 'next'):
v = machina.step()
if v:
print('{0}. {1}'.format(v[0] - 2, opcodes.get_name(v[1])))
else:
print('Program has finished executing.', file=sys.stderr)
elif cmd in ('b', 'break'):
breakpoints.add(int(args))
elif cmd in ('ip', 'sp', 'stack'):
print(getattr(machina, cmd))
elif cmd in Machine.__dict__:
f = getattr(machina, cmd)
try:
v = f(*eval(args + ','))
except Exception:
traceback.print_exc()
if v is not None:
print(v)
else:
print('huh?', file=sys.stderr)
if __name__ == '__main__':
main()
|
mit
| -2,740,945,060,612,117,000 | 23.578431 | 81 | 0.462704 | false |
fireeye/flare-ida
|
python/flare/objc2_xrefs_helper.py
|
1
|
21650
|
############################################
##Author: James T. Bennett
##Objective-C xrefs helper IDApython script
############################################
########################################################################
# Copyright 2019 FireEye
#
# FireEye licenses this file to you under the Apache License, Version
# 2.0 (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
########################################################################
import struct
from ida_bytes import get_bytes
from idaapi import add_dref
from idaapi import get_inf_structure
from idautils import Segments
from idautils import XrefsTo
from idc import BADADDR
from idc import Dword,Qword
from idc import GetFunctionName
from idc import GetMnem
from idc import GetString
from idc import Message
from idc import PatchDword
from idc import SegEnd
from idc import SegName
from idc import XREF_USER
from idc import dr_I
DWORD_SIZE=4
QWORD_SIZE=8
POINTER_SIZE=8
ARCH_X86_64=0
ARCH_ARM64=1
ARCH_UNKNOWN=100
class AArch64LDRInstruction(object):
"""A class to decode and patch arm64 LDR (literal) instructions
Decodes instruction byte string into opcode, offset, and register
Provides method to patch offset and regenerate instruction bytestring
"""
ENDIANNESS_BIG=0
ENDIANNESS_LITTLE=1
def __init__(self,instruction_bytes,endianness=1):
"""Decode the byte string for an arm64 LDR (literal) instruction
Arguments:
instruction_bytes {string} -- Four-byte string that represents the instruction
Keyword Arguments:
endianness {number} -- Whether the instruction should be decoded as big or little endian (default: ENDIANNESS_LITTLE )
Raises:
Exception -- [Invalid instruction length]
Exception -- [Invalid endianness]
"""
if not len(instruction_bytes)==4:
raise Exception("Invalid instruction length: %d" % len(instruction_bytes))
if endianness != self.ENDIANNESS_BIG and endianness != self.ENDIANNESS_LITTLE:
raise Exception("Invalid endianness value.")
self.endianness=endianness
self.instruction_bytes=instruction_bytes
self.instruction_int=self.__unpack(instruction_bytes)
self.__decode_ldr()
def __unpack(self,bytes):
if self.endianness==self.ENDIANNESS_LITTLE:
fmt="<I"
else:
fmt=">I"
return struct.unpack(fmt,bytes)[0]
def __pack(self,number):
if self.endianness==self.ENDIANNESS_LITTLE:
fmt="<I"
else:
fmt=">I"
return struct.pack(fmt,number)
def __shiftL32(self,num,count):
return (num<<count)&0xffffffff
def __shiftR32(self,num,count):
return (num>>count)&0xffffffff
def __decode_ldr(self):
ldr_literal_64_op=0b01011000
op=self.__shiftR32(self.instruction_int,24)
if not op==ldr_literal_64_op:
raise Exception("Not a valid LDR (literal) instruction)")
self.op=op
imm19_mask=self.__shiftL32(self.__shiftR32(0x00ffffff,5),5)
imm19=self.__shiftR32((self.instruction_int&imm19_mask),5)
offset=imm19*4 #shift imm19<<2
self.offset=offset
rt_mask=0b11111
rt=self.instruction_int&rt_mask
self.rt=rt
def patch_offset(self,new_offset):
"""Change the memory offset for this instruction
Update the memory offset this instruction should load from and regenerate the byte string.
Arguments:
new_offset {} -- [New offset to be encoded into the LDR instruction]
"""
new_offset=new_offset&0xffffffff
imm19_mask=self.__shiftL32(self.__shiftR32(0x00ffffff,5),5)
imm19=self.__shiftR32(new_offset,2) #rshift 2 because imm19=offset/4
imm19=self.__shiftL32(imm19,5)
imm19=imm19&imm19_mask
op=self.op
op_shifted=self.__shiftL32(op,24)
instruction_int=(op_shifted|imm19|self.rt)&0xffffffff
instruction_bytes=self.__pack(instruction_int)
self.instruction_int=instruction_int
self.instruction_bytes=instruction_bytes
self.__decode_ldr()
class ObjCException(Exception):
pass
class ObjcClass(object):
"""Class to parse an Objective-C class structure
"""
OBJC2_CLASS_RO_OFFSET=0x20 #offset into a _class_t of the ro member
OBJC2_CLASS_RO_BASE_METHODS_OFFSET=0x20 #offset into a _class_ro_t of the baseMethods member
OBJC2_CLASS_RO_NAME_OFFSET=0x18
def __init__(self,objc_class_va,segment_map,arch=ARCH_X86_64):
"""Create a new ObjcClass instance
Arguments:
objc_class_va {number} -- Virtual address of the Objective-C class to parse
segment_map {dictionary} -- A dictionary mapping segment names to a start/end virtual address tuple
Keyword Arguments:
arch {number} -- CPU architecture. Either ARCH_X86_64 or ARM64 (default: {ARCH_X86_64})
"""
self.arch=arch
self.segment_map=segment_map
class_ro_va=Qword(objc_class_va+self.OBJC2_CLASS_RO_OFFSET)
self.name_pointer=Qword(class_ro_va+self.OBJC2_CLASS_RO_NAME_OFFSET)
self.method_list=[]
if class_ro_va == BADADDR or class_ro_va==0:
self.class_ro_va=None
return
self.class_ro_va=class_ro_va
class_methods_va=Qword(class_ro_va+self.OBJC2_CLASS_RO_BASE_METHODS_OFFSET)
if class_methods_va == BADADDR or class_methods_va==0:
self.class_methods_va=None
return
self.class_methods_va=class_methods_va
Message("Class found at virtual address: 0x%x\n" % objc_class_va)
Message("Class name: %s\n" % GetString(self.name_pointer))
#Parse the method_list_t struct and build a list of methods
self.method_list=ObjcMethodList(class_methods_va,segment_map,arch=arch)
def patched_xrefs(self):
if len(self.method_list)>0:
return self.method_list.patched_xrefs()
else:
return []
class ObjCMethodAbstract(object):
"""Abstract class to parse Objective-C method structures
This class cannot be instantiated as-is. It must be extended to override add_method_xref()
"""
OBJC_METHOD_SIZE=0x18 #sizeof(struct _objc_method)
OBJC_METHOD_TYPE_OFFSET=8
OBJC_METHOD_IMP_OFFSET=0x10
#TODO: override for other architectures
CALL_MNEMONIC="call"
def __init__(self,method_va,segment_map):
"""Do not instantiate directly
Arguments:
method_va
segment_map
"""
Message("Found method at virtual address: 0x%x\n" % method_va)
self.method_va=method_va
self.segment_map=segment_map
self.name_pointer=Qword(method_va)
self.method_type=Qword(method_va+self.OBJC_METHOD_TYPE_OFFSET)
self.method_pointer_va=method_va+self.OBJC_METHOD_IMP_OFFSET
self.method_pointer=Qword(self.method_pointer_va)
self.patched_xrefs=[]
objc_selrefs = segment_map["__objc_selrefs"]
objc_msgrefs = segment_map["__objc_msgrefs"]
objc_const = segment_map["__objc_const"]
Message("Method name: %s\n" % GetFunctionName(self.method_pointer))
is_msg_ref,selector_ref,const_ref_count=self.get_xref(objc_selrefs,objc_msgrefs,objc_const)
self.is_msg_ref=is_msg_ref
self.const_ref_count=const_ref_count
if not selector_ref:
Message("No selref found.\n")
self.selector_ref=None
return
if const_ref_count == 1:
#We can only work with unambiguous situations where there is exactly one const reference
#to the selector.
self.selector_ref=selector_ref
else:
Message("Selector ref count not exactly 1. Potentially ambiguous: %d" % const_ref_count)
# Otherwise this same selector is used by more than one class. (Or none at all)
self.selector_ref=None
return
self.sel_ref_va=self.selector_ref.frm
if is_msg_ref:
# adjust pointer to beginning of message ref struct to get xrefs
self.sel_ref_va-=POINTER_SIZE
Message("selref VA: 0x%X - function VA: 0x%X\n" % (self.sel_ref_va, self.method_pointer))
#Find all the references to this *selref* (note: not the string itself but the selref)
#These should precede calls to the method
#Patch the references to the selref with a reference to the method implementation
self.walk_selector_refs()
def get_xref(self,objc_selrefs,objc_msgrefs,objc_const):
#We're looking for references to the selector string (think char **)
#Which is either a selref, a msgref, or a pointer to the selector from the class's const method list
name_ptr = self.name_pointer
is_msg_ref=False
selector_ref=None
#how many references from __objc_const are there? This indicates how many classes
#reference this selector
const_ref_count=0
for xref in XrefsTo(name_ptr):
#Is this cross reference in the range of selector references?
if objc_selrefs and xref.frm >= objc_selrefs[0] and xref.frm < objc_selrefs[1]:
is_msg_ref=False
selector_ref=xref
#else, is this cross reference in the range of msg references?
elif objc_msgrefs and xref.frm >= objc_msgrefs[0] and xref.frm < objc_msgrefs[1]:
is_msg_ref=True
selector_ref=xref
#else, is this cross reference a pointer from a (const) method list?
elif objc_const and xref.frm >= objc_const[0] and xref.frm < objc_const[1]:
const_ref_count += 1
return (is_msg_ref,selector_ref,const_ref_count)
def walk_selector_refs(self):
#sel_ref_va is the address of the selref, which itself is a pointer to the selector string
#we're looking for cross references *to* the the address of the selref
#If we find ones we like and replace them with a cross reference to the actual method implementation, rather than the selector
for xref in XrefsTo(self.sel_ref_va):
if GetMnem(xref.frm) == self.CALL_MNEMONIC:
continue
#We found a xref *from* somewhere *to* our selref. We need to replace that with a reference
#To the actual method implementation
method_xref=self.add_method_xref(xref)
self.patched_xrefs.append(method_xref)
def add_method_xref(self,xref):
raise Exception("Unimplemented. Use architecture specific class that overrides add_method_xref()")
class ObjCMethodX86_64(ObjCMethodAbstract):
"""x86_64-specific class to parse Objective-C method structures
Provides x86_64-specific implementation to patch method references in code.
Extends:
ObjCMethodAbstract
"""
X86_64_MOV_INSTRUCTION_SIZE=7
def __init__(self,method_va,segment_map):
"""
Create an x86-64-specific Objective-C method object
Arguments:
method_va {number} -- Virtual address of the method structure to parse.
segment_map {dictionary} -- A dictionary mapping segment names to a start/end virtual address tuple
"""
super(ObjCMethodX86_64,self).__init__(method_va,segment_map)
def add_method_xref(self,xref):
Message("Adding cross reference to method implementation for %s\n" % GetFunctionName(self.method_pointer))
#TODO: clean this up so it's more clear how we're parsing and patching the instruction
#TODO: handle other potential instructions that could place a method selref into a register
#TODO: sanity check what instruction we're actually working with before blindly deciding
# it's a 7-byte mov instruction
add_dref(xref.frm,self.method_pointer,dr_I|XREF_USER)
#offset is a rip-relative offset that gets added to rip and dereferenced
#when this instruction is executed, rip will be pointing to the next instruction
#meaning it has been incremented by 7 (the length of the mov instruction)
offset=self.method_pointer-xref.frm-self.X86_64_MOV_INSTRUCTION_SIZE
#this replaces mov RSI, &selector with:
# mov RSI, &method
#xref.frm is the address of the mov instruction
#+3 (4th byte of the instruction)
#is where the RIP-relative operand is that
#will get dereferenced as a pointer
PatchDword(xref.frm+3,offset)
return ObjcMethodXref(xref.frm,self.method_pointer,xref.to)
class ObjCMethodArm64(ObjCMethodAbstract):
"""Arm64-specific class to parse Objective-C method structures
Provides Arm64-specific implementation to patch method references in code.
Extends:
ObjCMethodAbstract
"""
ARM64_INSTRUCTION_SIZE=4
def __init__(self,method_va,segment_map):
"""
Create an Arm64-specific Objective-C method object
Arguments:
method_va {number} -- Virtual address of the method structure to parse.
segment_map {dictionary} -- A dictionary mapping segment names to a start/end virtual address tuple
"""
super(ObjCMethodArm64,self).__init__(method_va,segment_map)
def add_method_xref(self,xref):
Message("Adding cross reference to method implementation for %s\n" % GetFunctionName(self.method_pointer))
add_dref(xref.frm,self.method_pointer,dr_I|XREF_USER)
offset=self.method_pointer-xref.frm
instruction_bytes=get_bytes(xref.frm,self.ARM64_INSTRUCTION_SIZE)
#TODO: are there other instructions that could reference a method selector
#and then move the selector reference into a register?
arm64_ldr=AArch64LDRInstruction(instruction_bytes)
arm64_ldr.patch_offset(offset)
PatchDword(xref.frm,arm64_ldr.instruction_int)
return ObjcMethodXref(xref.frm,self.method_pointer,xref.to)
class ObjcMethodList(list):
"""A class to parse Objective-C method list structures
Creates an iterable list of Object-C Method objects
Extends:
list
"""
METHOD_LIST_OFFSET=DWORD_SIZE*2 # method_list array starts after entsize and method_count (both ints)
OBJC_METHOD_CLASSES=[ObjCMethodX86_64,ObjCMethodArm64]
def __init__(self,method_list_va,segment_map,arch=ARCH_X86_64):
"""Create a new list of Objective-C method objects
Arguments:
method_list_va {number} -- Virtual address where the Objective-C method list structure is located
segment_map {dictionary} -- A dictionary mapping segment names to a start/end virtual address tuple
Keyword Arguments:
arch {number} -- CPU architecture. Either ARCH_X86_64 or ARM64 (default: {ARCH_X86_64})
"""
super(ObjcMethodList,self).__init__()
self.ObjCMethod=self.OBJC_METHOD_CLASSES[arch]
self.method_list_va=method_list_va
self.segment_map=segment_map
objc_selrefs = segment_map["__objc_selrefs"]
objc_msgrefs = segment_map["__objc_msgrefs"]
objc_const = segment_map["__objc_const"]
#Walk the method_list_t struct and parse out each _objc_method
self.walk_methods(objc_selrefs,objc_msgrefs,objc_const)
def walk_methods(self,objc_selrefs,objc_msgrefs,objc_const):
Message("Walking methods starting at virtual address: 0x%x\n" % self.method_list_va)
class_methods_va=self.method_list_va
#deref the method list struct to get method count:
count=Dword(class_methods_va+DWORD_SIZE)
method_size=self.ObjCMethod.OBJC_METHOD_SIZE #sizeof(struct _objc_method)
#skip first two dwords in the method_list struct
class_methods_start=class_methods_va+self.METHOD_LIST_OFFSET
class_methods_end=class_methods_start+(method_size*count)
for va in range(class_methods_start,class_methods_end,method_size):
#Parse this method struct and create a method object
#If possible, the method will patch the IDB to replace references to its selector
#with a reference to its implementation
objc_method=self.ObjCMethod(va,self.segment_map)
self.append(objc_method)
def patched_xrefs(self):
# this is equivalent to:
# for method in self:
# for xref in method.patch_xrefs:
# ...add xref to a list...
return [xref for method in self for xref in method.patched_xrefs]
class ObjcMethodXref(object):
"""A class to represent patched method crosss references
"""
def __init__(self,frm_va,to_va,old_to_va):
"""Create a new ObjcMethodXref object
Arguments:
frm_va {number} -- Virtual address location of the reference
to_va {[type]} -- Virtual address that is pointed to by the reference
old_to_va {[type]} -- Virtual address that was pointed to by the reference prior to patching
"""
self.frm_va=frm_va
self.to_va=to_va
self.old_to_va=old_to_va
self.method_name=GetFunctionName(self.to_va)
def __str__(self):
return "[0x%x] --> %s"%(self.frm_va,self.method_name)
class ObjCMethodXRefs(list):
"""A class to parse Objective-C class and method structures
Parses class and method structures and locates cross-references to the method selectors.
If the class that references the selectors is unambiguous, all code references to the selectors
are replaced with references to the method implementation.
What results is an iterable list of all cross references that were patched.
Extends:
list
"""
objc2ClassSize = 0x28
def __init__(self,arch=ARCH_X86_64):
"""
Create a new list of of method cross-references
Keyword Arguments:
arch {number} -- CPU architecture. Either ARCH_X86_64 or ARM64 (default: {ARCH_X86_64})
"""
super(ObjCMethodXRefs,self).__init__()
self.arch=arch
segment_names=["__objc_data","__objc_selrefs","__objc_msgrefs","__objc_const"]
segment_map=self.find_all_segments(segment_names)
# Segment map looks like:
# {
# "__objc_data":(obc_data_start_va,objc_data_end_va),
# ...
# }
for name in segment_names:
if name not in segment_map:
raise ObjCException("Couldn't find segment %s" % name)
#Walk __objc_data and build a list of classes
self.walk_classes(segment_map)
def find_all_segments(self,segment_names):
segments={name:None for name in segment_names}
for seg_va in Segments():
seg_name=SegName(seg_va)
if seg_name in segment_names:
segments[seg_name]=(seg_va,SegEnd(seg_va))
return segments
def walk_classes(self,segment_map):
Message("Walking classes\n")
classes=[]
objc_data_start,objc_data_end=segment_map["__objc_data"]
for va in range(objc_data_start,objc_data_end,self.objc2ClassSize):
objc_class=ObjcClass(va,segment_map,arch=self.arch)
classes.append(objc_class)
self.extend(objc_class.patched_xrefs())
self.classes=classes
def detect_arch():
#heuristically determine what architecture we're on
#only x86-64 and arm64 are supported
is_le=False
bits=0
info = get_inf_structure()
arch=ARCH_UNKNOWN
if info.is_64bit():
bits=64
elif info.is_32bit():
bits=32
else:
bits=16
if not info.is_be():
is_le=True
procname=info.procName
if bits==64 and is_le:
if procname=="ARM":
Message("Detected architecture: arm64\n")
arch=ARCH_ARM64
elif procname=="metapc":
Message("Detected architecture: x86_64\n")
arch=ARCH_X86_64
return arch
def main():
arch=detect_arch()
xref_list=ObjCMethodXRefs(arch=arch)
Message("Patched the following method references:\n")
for xref in xref_list:
Message("%s\n" % str(xref))
if __name__ == '__main__':
main()
|
apache-2.0
| 5,568,455,069,248,737,000 | 35.586806 | 134 | 0.618152 | false |
frastlin/PyAudioGame
|
pyaudiogame/console.py
|
1
|
4516
|
"""Simple example showing how to get keyboard events.
Note that the mouse events don't work very well. Something is wrong with the pipe process that keeps the mouse event process from exiting in the inputs script. The bug has been reported, and as soon as it is fixed, uncommenting the run_mouse() function will work.
print(event.ev_type, event.code, event.state)
"""
import sys
from .inputs import get_key, get_mouse
from pyaudiogame.event_handler import EventHandler, EventTemplate
mod_keys = [
'capslock',
'left shift',
'right shift',
'left ctrl',
'right ctrl',
'f15',
'right meta',
'left meta'
]
event_names = {
'esc': 'escape',
'enter': 'return',
'f15': 'help',
'102nd': '\\',
'minus': '-',
'apostrophe': "`",
'equal': '=',
'backslash': "'",
'semicolon': ';',
'rightbrace': ']',
'leftbrace': '[',
}
duplicated_keys = [
'backspace',
'delete',
'up',
'down',
'right',
'left',
'page up',
'page down',
'home',
'end',
]
mouse_types = {
'Absolute': 'mousemotion',
'Key': 'mousebutton',
}
class Console(EventHandler):
def __init__(self, **kwargs):
EventHandler.__init__(self, **kwargs)
self.mod_keys = mod_keys
self.pressed = set()
self.activated_already = set()
self.last_mouse_coords = (0, 0)
self.event_queue = []
# these below lists lines are queues for each type of event type because the get_event functions are blocking.
self.mouse_events = []
self.keyboard_events = []
def run(self):
self.event_queue = []
self.run_key(self.event_queue)
# self.run_mouse(self.event_queue)
[self.run_handlers(e) for e in self.event_queue if e.input]
def processCode(self, code):
"""makes the key code the same as pygame's"""
code = code.split("_")[-1].lower()
mod_code = code.replace('left', '').replace('right', '')
if 'page' in code:
code = 'page ' + code[4:]
if mod_code in ['ctrl', 'shift', 'meta']:
code = code.replace('left', 'left ').replace('right', 'right ')
code = event_names.get(code, code)
return code
def run_mouse(self, queue):
events = get_mouse(False)
coords = [0,0]
mousemotion_event = None
for event in events:
event_type, code, state = event.ev_type, event.code.lower(), event.state
event_obj = EventTemplate(event)
event.mods = self.pressed
if '_x' in code:
coords[0] = state
mousemotion_event = event
continue
elif '_y' in code:
coords[1] = state
continue
elif 'btn_' in code:
event_obj.set_type('mousebutton')
event_obj.button = code[4:]
event_obj.state = state
if event.state == 1:
self.pressed.add(event)
elif event in self.pressed:
self.pressed.remove(event)
else:
event_obj.set_type(event_type)
event_obj.state = state
queue.append(event_obj)
if coords[0] or coords[1]:
event_obj = EventTemplate(mousemotion_event)
event_obj.set_type('mousemotion')
event.mods = self.pressed
event_obj.mouse_x, event_obj.mouse_y = coords
event_obj.mouse_rel = (self.last_mouse_coords[0] - coords[0], self.last_mouse_coords[1] - coords[1])
self.last_mouse_coords = coords
queue.append(event_obj)
def run_key(self, queue):
events = get_key(False)
for event in events:
event = self.convert_key_event(event)
if not event: # this is to make sure the duplicated events don't run
continue
queue.append(event)
if event.state == 1 and event.input and not event.key in [e.key for e in self.pressed]:
self.pressed.add(event)
def convert_key_event(self, event):
ev_type, code, state = [event.ev_type, event.code, event.state]
code = self.processCode(code)
# make sure no duplicate keys are pressed
if code in duplicated_keys and (code, state) in self.activated_already:
self.activated_already.remove((code, state))
return False
else:
self.activated_already.add((code, state))
in_mods = [e for e in self.pressed if code == e.key]
if not state and in_mods:
self.pressed.remove(in_mods[0])
event_obj = EventTemplate()
event_obj.event = event
event_obj.set_type(ev_type.lower())
event_obj.state = state
event_obj.mods = list(self.pressed)
if event_obj.type == 'key':
event_obj.key = code
return event_obj
if __name__ == "__main__":
def main():
"""Just print out some event infomation when keys are pressed."""
def run_func(event):
if event.type == 'key':
print(event.key)
print("mods are: %s" % event.mods)
elif event.type == "mousebutton" and event.state:
print("clicked!")
console = Console(run_func)
while 1:
console.run()
main()
|
mit
| -3,174,759,603,819,471,400 | 26.375758 | 263 | 0.658547 | false |
SuriyaaKudoIsc/olympia
|
apps/devhub/tests/test_views_versions.py
|
1
|
33530
|
import re
from datetime import datetime, timedelta
import mock
from nose.tools import eq_
from pyquery import PyQuery as pq
from django.core.files import temp
import amo
import amo.tests
from amo.urlresolvers import reverse
from amo.tests import formset, initial
from addons.models import Addon
from applications.models import AppVersion
from devhub.models import ActivityLog
from files.models import File
from users.models import UserProfile
from versions.models import ApplicationsVersions, Version
class TestVersion(amo.tests.TestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
super(TestVersion, self).setUp()
self.client.login(username='del@icio.us', password='password')
self.user = UserProfile.objects.get(email='del@icio.us')
self.addon = self.get_addon()
self.version = Version.objects.get(id=81551)
self.url = self.addon.get_dev_url('versions')
self.disable_url = self.addon.get_dev_url('disable')
self.enable_url = self.addon.get_dev_url('enable')
self.delete_url = reverse('devhub.versions.delete', args=['a3615'])
self.delete_data = {'addon_id': self.addon.pk,
'version_id': self.version.pk}
def get_addon(self):
return Addon.objects.get(id=3615)
def get_doc(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
return pq(res.content)
def test_version_status_public(self):
doc = self.get_doc()
assert doc('.addon-status')
self.addon.update(status=amo.STATUS_DISABLED, disabled_by_user=True)
doc = self.get_doc()
assert doc('.addon-status .status-admin-disabled')
eq_(doc('.addon-status .status-admin-disabled').text(),
'Disabled by Mozilla')
self.addon.update(disabled_by_user=False)
doc = self.get_doc()
eq_(doc('.addon-status .status-admin-disabled').text(),
'Disabled by Mozilla')
self.addon.update(status=amo.STATUS_PUBLIC, disabled_by_user=True)
doc = self.get_doc()
eq_(doc('.addon-status .status-disabled').text(),
'You have disabled this add-on')
def test_no_validation_results(self):
doc = self.get_doc()
v = doc('td.file-validation').text()
eq_(re.sub(r'\s+', ' ', v),
'All Platforms Not validated. Validate now.')
eq_(doc('td.file-validation a').attr('href'),
reverse('devhub.file_validation',
args=[self.addon.slug, self.version.all_files[0].id]))
def test_upload_link_label_in_edit_nav(self):
url = reverse('devhub.versions.edit',
args=(self.addon.slug, self.version.pk))
r = self.client.get(url)
doc = pq(r.content)
eq_(doc('.addon-status>.addon-upload>strong>a').text(),
'Upload a new file')
def test_delete_message(self):
"""Make sure we warn our users of the pain they will feel."""
r = self.client.get(self.url)
doc = pq(r.content)
eq_(doc('#modal-delete p').eq(0).text(),
'Deleting your add-on will permanently remove it from the site '
'and prevent its GUID from being submitted ever again, even by '
'you. The existing users of your add-on will remain on this '
'update channel and never receive updates again.')
def test_delete_message_if_bits_are_messy(self):
"""Make sure we warn krupas of the pain they will feel."""
self.addon.highest_status = amo.STATUS_NULL
self.addon.status = amo.STATUS_UNREVIEWED
self.addon.save()
r = self.client.get(self.url)
doc = pq(r.content)
eq_(doc('#modal-delete p').eq(0).text(),
'Deleting your add-on will permanently remove it from the site '
'and prevent its GUID from being submitted ever again, even by '
'you. The existing users of your add-on will remain on this '
'update channel and never receive updates again.')
def test_delete_message_incomplete(self):
"""
If an addon has highest_status = 0, they shouldn't be bothered with a
blacklisting threat if they hit delete.
"""
self.addon.highest_status = amo.STATUS_NULL
self.addon.status = amo.STATUS_NULL
self.addon.save()
r = self.client.get(self.url)
doc = pq(r.content)
# Normally 2 paragraphs, one is the warning which we should take out.
eq_(doc('#modal-delete p.warning').length, 0)
def test_delete_version(self):
self.client.post(self.delete_url, self.delete_data)
assert not Version.objects.filter(pk=81551).exists()
eq_(ActivityLog.objects.filter(action=amo.LOG.DELETE_VERSION.id)
.count(), 1)
def test_delete_version_then_detail(self):
version, file = self._extra_version_and_file(amo.STATUS_LITE)
self.client.post(self.delete_url, self.delete_data)
res = self.client.get(reverse('addons.detail', args=[self.addon.slug]))
eq_(res.status_code, 200)
def test_cant_delete_version(self):
self.client.logout()
res = self.client.post(self.delete_url, self.delete_data)
eq_(res.status_code, 302)
assert Version.objects.filter(pk=81551).exists()
def test_version_delete_status_null(self):
res = self.client.post(self.delete_url, self.delete_data)
eq_(res.status_code, 302)
eq_(self.addon.versions.count(), 0)
eq_(Addon.objects.get(id=3615).status, amo.STATUS_NULL)
def _extra_version_and_file(self, status):
version = Version.objects.get(id=81551)
version_two = Version(addon=self.addon,
license=version.license,
version='1.2.3')
version_two.save()
file_two = File(status=status, version=version_two)
file_two.save()
return version_two, file_two
def test_version_delete_status(self):
self._extra_version_and_file(amo.STATUS_PUBLIC)
res = self.client.post(self.delete_url, self.delete_data)
eq_(res.status_code, 302)
eq_(self.addon.versions.count(), 1)
eq_(Addon.objects.get(id=3615).status, amo.STATUS_PUBLIC)
def test_version_delete_status_unreviewd(self):
self._extra_version_and_file(amo.STATUS_BETA)
res = self.client.post(self.delete_url, self.delete_data)
eq_(res.status_code, 302)
eq_(self.addon.versions.count(), 1)
eq_(Addon.objects.get(id=3615).status, amo.STATUS_UNREVIEWED)
@mock.patch('files.models.File.hide_disabled_file')
def test_user_can_disable_addon(self, hide_mock):
self.addon.update(status=amo.STATUS_PUBLIC,
disabled_by_user=False)
res = self.client.post(self.disable_url)
eq_(res.status_code, 302)
addon = Addon.objects.get(id=3615)
eq_(addon.disabled_by_user, True)
eq_(addon.status, amo.STATUS_PUBLIC)
assert hide_mock.called
entry = ActivityLog.objects.get()
eq_(entry.action, amo.LOG.USER_DISABLE.id)
msg = entry.to_string()
assert self.addon.name.__unicode__() in msg, ("Unexpected: %r" % msg)
def test_user_get(self):
eq_(self.client.get(self.enable_url).status_code, 405)
def test_user_can_enable_addon(self):
self.addon.update(status=amo.STATUS_PUBLIC, disabled_by_user=True)
res = self.client.post(self.enable_url)
self.assertRedirects(res, self.url, 302)
addon = self.get_addon()
eq_(addon.disabled_by_user, False)
eq_(addon.status, amo.STATUS_PUBLIC)
entry = ActivityLog.objects.get()
eq_(entry.action, amo.LOG.USER_ENABLE.id)
msg = entry.to_string()
assert unicode(self.addon.name) in msg, ("Unexpected: %r" % msg)
def test_unprivileged_user_cant_disable_addon(self):
self.addon.update(disabled_by_user=False)
self.client.logout()
res = self.client.post(self.disable_url)
eq_(res.status_code, 302)
eq_(Addon.objects.get(id=3615).disabled_by_user, False)
def test_non_owner_cant_disable_addon(self):
self.addon.update(disabled_by_user=False)
self.client.logout()
assert self.client.login(username='regular@mozilla.com',
password='password')
res = self.client.post(self.disable_url)
eq_(res.status_code, 403)
eq_(Addon.objects.get(id=3615).disabled_by_user, False)
def test_non_owner_cant_enable_addon(self):
self.addon.update(disabled_by_user=False)
self.client.logout()
assert self.client.login(username='regular@mozilla.com',
password='password')
res = self.client.get(self.enable_url)
eq_(res.status_code, 403)
eq_(Addon.objects.get(id=3615).disabled_by_user, False)
def test_show_disable_button(self):
self.addon.update(disabled_by_user=False)
res = self.client.get(self.url)
doc = pq(res.content)
assert doc('#modal-disable')
assert doc('.disable-addon')
assert not doc('.enable-addon')
def test_not_show_disable(self):
self.addon.update(status=amo.STATUS_DISABLED, disabled_by_user=False)
res = self.client.get(self.url)
doc = pq(res.content)
assert not doc('#modal-disable')
assert not doc('.disable-addon')
def test_show_enable_button(self):
self.addon.update(disabled_by_user=True)
res = self.client.get(self.url)
doc = pq(res.content)
a = doc('.enable-addon')
assert a, "Expected Enable addon link"
eq_(a.attr('href'), self.enable_url)
assert not doc('#modal-disable')
assert not doc('.disable-addon')
def test_cancel_get(self):
cancel_url = reverse('devhub.addons.cancel', args=['a3615'])
eq_(self.client.get(cancel_url).status_code, 405)
def test_cancel_wrong_status(self):
cancel_url = reverse('devhub.addons.cancel', args=['a3615'])
for status in Addon.STATUS_CHOICES:
if status in amo.UNDER_REVIEW_STATUSES + (amo.STATUS_DELETED,):
continue
self.addon.update(status=status)
self.client.post(cancel_url)
eq_(Addon.objects.get(id=3615).status, status)
def test_cancel(self):
cancel_url = reverse('devhub.addons.cancel', args=['a3615'])
self.addon.update(status=amo.STATUS_LITE_AND_NOMINATED)
self.client.post(cancel_url)
eq_(Addon.objects.get(id=3615).status, amo.STATUS_LITE)
for status in (amo.STATUS_UNREVIEWED, amo.STATUS_NOMINATED):
self.addon.update(status=status)
self.client.post(cancel_url)
eq_(Addon.objects.get(id=3615).status, amo.STATUS_NULL)
def test_not_cancel(self):
self.client.logout()
cancel_url = reverse('devhub.addons.cancel', args=['a3615'])
eq_(self.addon.status, amo.STATUS_PUBLIC)
res = self.client.post(cancel_url)
eq_(res.status_code, 302)
eq_(Addon.objects.get(id=3615).status, amo.STATUS_PUBLIC)
def test_cancel_button(self):
for status in Addon.STATUS_CHOICES:
if status not in amo.UNDER_REVIEW_STATUSES:
continue
self.addon.update(status=status)
res = self.client.get(self.url)
doc = pq(res.content)
assert doc('#cancel-review')
assert doc('#modal-cancel')
def test_not_cancel_button(self):
for status in Addon.STATUS_CHOICES:
if status in amo.UNDER_REVIEW_STATUSES:
continue
self.addon.update(status=status)
res = self.client.get(self.url)
doc = pq(res.content)
assert not doc('#cancel-review')
assert not doc('#modal-cancel')
def test_purgatory_request_review(self):
self.addon.update(status=amo.STATUS_PURGATORY)
doc = pq(self.client.get(self.url).content)
buttons = doc('.version-status-actions form button').text()
eq_(buttons, 'Request Preliminary Review Request Full Review')
def test_incomplete_request_review(self):
self.addon.update(status=amo.STATUS_NULL)
doc = pq(self.client.get(self.url).content)
buttons = doc('.version-status-actions form button').text()
eq_(buttons, 'Request Preliminary Review Request Full Review')
def test_rejected_request_review(self):
self.addon.update(status=amo.STATUS_NULL)
self.addon.latest_version.files.update(status=amo.STATUS_DISABLED)
doc = pq(self.client.get(self.url).content)
buttons = doc('.version-status-actions form button').text()
eq_(buttons, None)
def test_days_until_full_nomination(self):
f = File.objects.create(status=amo.STATUS_LITE, version=self.version)
f.update(datestatuschanged=datetime.now() - timedelta(days=4))
self.addon.update(status=amo.STATUS_LITE)
doc = pq(self.client.get(self.url).content)
eq_(doc('.version-status-actions .warning').text(),
'Full nomination will be available in 6 days')
def test_add_version_modal(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
# Make sure checkboxes are visible:
eq_(doc('.supported-platforms input.platform').length, 5)
eq_(set([i.attrib['type'] for i in doc('input.platform')]),
set(['checkbox']))
class TestVersionEditMixin(object):
def get_addon(self):
return Addon.objects.no_cache().get(id=3615)
def get_version(self):
return self.get_addon().current_version
def formset(self, *args, **kw):
defaults = {'approvalnotes': 'xxx'}
defaults.update(kw)
return formset(*args, **defaults)
class TestVersionEditBase(TestVersionEditMixin, amo.tests.TestCase):
fixtures = ['base/users', 'base/addon_3615', 'base/thunderbird']
def setUp(self):
super(TestVersionEditBase, self).setUp()
self.client.login(username='del@icio.us', password='password')
self.addon = self.get_addon()
self.version = self.get_version()
self.url = reverse('devhub.versions.edit',
args=['a3615', self.version.id])
self.v1, _created = AppVersion.objects.get_or_create(
application=amo.FIREFOX.id, version='1.0')
self.v5, _created = AppVersion.objects.get_or_create(
application=amo.FIREFOX.id, version='5.0')
class TestVersionEditMobile(TestVersionEditBase):
def setUp(self):
super(TestVersionEditMobile, self).setUp()
self.version.apps.all().delete()
app_vr = AppVersion.objects.create(application=amo.ANDROID.id,
version='1.0')
ApplicationsVersions.objects.create(version=self.version,
application=amo.ANDROID.id,
min=app_vr, max=app_vr)
self.version.files.update(platform=amo.PLATFORM_ANDROID.id)
def test_mobile_platform_options(self):
ctx = self.client.get(self.url).context
fld = ctx['file_form'].forms[0]['platform'].field
eq_(sorted(amo.PLATFORMS[p[0]].shortname for p in fld.choices),
['android'])
class TestVersionEditDetails(TestVersionEditBase):
def setUp(self):
super(TestVersionEditDetails, self).setUp()
ctx = self.client.get(self.url).context
compat = initial(ctx['compat_form'].forms[0])
files = initial(ctx['file_form'].forms[0])
self.initial = formset(compat, **formset(files, prefix='files'))
def formset(self, *args, **kw):
defaults = dict(self.initial)
defaults.update(kw)
return super(TestVersionEditDetails, self).formset(*args, **defaults)
def test_edit_notes(self):
d = self.formset(releasenotes='xx', approvalnotes='yy')
r = self.client.post(self.url, d)
eq_(r.status_code, 302)
version = self.get_version()
eq_(unicode(version.releasenotes), 'xx')
eq_(unicode(version.approvalnotes), 'yy')
def test_version_number_redirect(self):
url = self.url.replace(str(self.version.id), self.version.version)
r = self.client.get(url, follow=True)
self.assertRedirects(r, self.url)
def test_supported_platforms(self):
res = self.client.get(self.url)
choices = res.context['new_file_form'].fields['platform'].choices
taken = [f.platform for f in self.version.files.all()]
platforms = set(self.version.compatible_platforms()) - set(taken)
eq_(len(choices), len(platforms))
def test_can_upload(self):
self.version.files.all().delete()
r = self.client.get(self.url)
doc = pq(r.content)
assert doc('a.add-file')
def test_not_upload(self):
res = self.client.get(self.url)
doc = pq(res.content)
assert not doc('a.add-file')
def test_add(self):
res = self.client.get(self.url)
doc = pq(res.content)
assert res.context['compat_form'].extra_forms
assert doc('p.add-app')[0].attrib['class'] == 'add-app'
def test_add_not(self):
for id in [18, 52, 59, 60, 61]:
av = AppVersion(application=id, version='1')
av.save()
ApplicationsVersions(application=id, min=av, max=av,
version=self.version).save()
res = self.client.get(self.url)
doc = pq(res.content)
assert not res.context['compat_form'].extra_forms
assert doc('p.add-app')[0].attrib['class'] == 'add-app hide'
def test_should_accept_zip_source_file(self):
tdir = temp.gettempdir()
tmp_file = temp.NamedTemporaryFile
with tmp_file(suffix=".zip", dir=tdir) as source_file:
source_file.write('a' * (2 ** 21))
source_file.seek(0)
data = self.formset(source=source_file)
response = self.client.post(self.url, data)
eq_(response.status_code, 302)
version = Version.objects.get(pk=self.version.pk)
assert version.source
assert version.addon.admin_review
# Check that the corresponding automatic activity log has been created.
log = ActivityLog.objects.get(action=amo.LOG.REQUEST_SUPER_REVIEW.id)
assert log.details['comments'] == (
u'This version has been automatically flagged as admin review, as '
u'it had some source files attached when submitted.')
def test_should_not_accept_exe_source_file(self):
tdir = temp.gettempdir()
tmp_file = temp.NamedTemporaryFile
with tmp_file(suffix=".exe", dir=tdir) as source_file:
source_file.write('a' * (2 ** 21))
source_file.seek(0)
data = self.formset(source=source_file)
response = self.client.post(self.url, data)
eq_(response.status_code, 200)
assert not Version.objects.get(pk=self.version.pk).source
def test_dont_reset_admin_review_flag_if_no_new_source(self):
tdir = temp.gettempdir()
tmp_file = temp.NamedTemporaryFile
with tmp_file(suffix=".zip", dir=tdir) as source_file:
source_file.write('a' * (2 ** 21))
source_file.seek(0)
data = self.formset(source=source_file)
response = self.client.post(self.url, data)
eq_(response.status_code, 302)
version = Version.objects.get(pk=self.version.pk)
assert version.source
assert version.addon.admin_review
# Unset the "admin review" flag, and re save the version. It shouldn't
# reset the flag, as the source hasn't changed.
version.addon.update(admin_review=False)
data = self.formset(name='some other name')
response = self.client.post(self.url, data)
eq_(response.status_code, 302)
version = Version.objects.get(pk=self.version.pk)
assert version.source
assert not version.addon.admin_review
class TestVersionEditSearchEngine(TestVersionEditMixin,
amo.tests.BaseTestCase):
# https://bugzilla.mozilla.org/show_bug.cgi?id=605941
fixtures = ['base/users', 'base/thunderbird', 'base/addon_4594_a9.json']
def setUp(self):
super(TestVersionEditSearchEngine, self).setUp()
self.client.login(username='admin@mozilla.com', password='password')
self.url = reverse('devhub.versions.edit',
args=['a4594', 42352])
def test_search_engine_edit(self):
dd = self.formset(prefix="files", releasenotes='xx',
approvalnotes='yy')
r = self.client.post(self.url, dd)
eq_(r.status_code, 302)
version = Addon.objects.no_cache().get(id=4594).current_version
eq_(unicode(version.releasenotes), 'xx')
eq_(unicode(version.approvalnotes), 'yy')
def test_no_compat(self):
r = self.client.get(self.url)
doc = pq(r.content)
assert not doc("#id_form-TOTAL_FORMS")
def test_no_upload(self):
r = self.client.get(self.url)
doc = pq(r.content)
assert not doc('a.add-file')
@mock.patch('versions.models.Version.is_allowed_upload')
def test_can_upload(self, allowed):
allowed.return_value = True
res = self.client.get(self.url)
doc = pq(res.content)
assert doc('a.add-file')
class TestVersionEditFiles(TestVersionEditBase):
def setUp(self):
super(TestVersionEditFiles, self).setUp()
f = self.client.get(self.url).context['compat_form'].initial_forms[0]
self.compat = initial(f)
def formset(self, *args, **kw):
compat = formset(self.compat, initial_count=1)
compat.update(kw)
return super(TestVersionEditFiles, self).formset(*args, **compat)
def test_delete_file(self):
version = self.addon.current_version
version.files.all()[0].update(status=amo.STATUS_UNREVIEWED)
eq_(self.version.files.count(), 1)
forms = map(initial,
self.client.get(self.url).context['file_form'].forms)
forms[0]['DELETE'] = True
eq_(ActivityLog.objects.count(), 0)
r = self.client.post(self.url, self.formset(*forms, prefix='files'))
eq_(ActivityLog.objects.count(), 2)
log = ActivityLog.objects.order_by('created')[1]
eq_(log.to_string(), u'File delicious_bookmarks-2.1.072-fx.xpi deleted'
' from <a href="/en-US/firefox/addon/a3615'
'/versions/2.1.072">Version 2.1.072</a> of <a '
'href="/en-US/firefox/addon/a3615/">Delicious '
'Bookmarks</a>.')
eq_(r.status_code, 302)
eq_(self.version.files.count(), 0)
r = self.client.get(self.url)
eq_(r.status_code, 200)
def test_unique_platforms(self):
# Move the existing file to Linux.
f = self.version.files.get()
f.update(platform=amo.PLATFORM_LINUX.id)
# And make a new file for Mac.
File.objects.create(version=self.version,
platform=amo.PLATFORM_MAC.id)
forms = map(initial,
self.client.get(self.url).context['file_form'].forms)
forms[1]['platform'] = forms[0]['platform']
r = self.client.post(self.url, self.formset(*forms, prefix='files'))
doc = pq(r.content)
assert doc('#id_files-0-platform')
eq_(r.status_code, 200)
eq_(r.context['file_form'].non_form_errors(),
['A platform can only be chosen once.'])
def test_all_platforms(self):
version = self.addon.current_version
version.files.all()[0].update(status=amo.STATUS_UNREVIEWED)
File.objects.create(version=self.version,
platform=amo.PLATFORM_MAC.id)
forms = self.client.get(self.url).context['file_form'].forms
forms = map(initial, forms)
res = self.client.post(self.url, self.formset(*forms, prefix='files'))
eq_(res.context['file_form'].non_form_errors()[0],
'The platform All cannot be combined with specific platforms.')
def test_all_platforms_and_delete(self):
version = self.addon.current_version
version.files.all()[0].update(status=amo.STATUS_UNREVIEWED)
File.objects.create(
version=self.version, platform=amo.PLATFORM_MAC.id)
forms = self.client.get(self.url).context['file_form'].forms
forms = map(initial, forms)
# A test that we don't check the platform for deleted files.
forms[1]['DELETE'] = 1
self.client.post(self.url, self.formset(*forms, prefix='files'))
eq_(self.version.files.count(), 1)
def add_in_bsd(self):
f = self.version.files.get()
# The default file is All, which prevents the addition of more files.
f.update(platform=amo.PLATFORM_MAC.id)
return File.objects.create(version=self.version,
platform=amo.PLATFORM_BSD.id)
def get_platforms(self, form):
return [amo.PLATFORMS[i[0]].shortname
for i in form.fields['platform'].choices]
# The unsupported platform tests are for legacy addons. We don't
# want new addons uploaded with unsupported platforms but the old files can
# still be edited.
def test_all_unsupported_platforms(self):
self.add_in_bsd()
forms = self.client.get(self.url).context['file_form'].forms
choices = self.get_platforms(forms[1])
assert 'bsd' in choices, (
'After adding a BSD file, expected its platform to be '
'available in: %r' % choices)
def test_all_unsupported_platforms_unchange(self):
bsd = self.add_in_bsd()
forms = self.client.get(self.url).context['file_form'].forms
forms = map(initial, forms)
self.client.post(self.url, self.formset(*forms, prefix='files'))
eq_(File.objects.no_cache().get(pk=bsd.pk).platform,
amo.PLATFORM_BSD.id)
def test_all_unsupported_platforms_change(self):
bsd = self.add_in_bsd()
forms = self.client.get(self.url).context['file_form'].forms
forms = map(initial, forms)
# Update the file platform to Linux:
forms[1]['platform'] = amo.PLATFORM_LINUX.id
self.client.post(self.url, self.formset(*forms, prefix='files'))
eq_(File.objects.no_cache().get(pk=bsd.pk).platform,
amo.PLATFORM_LINUX.id)
forms = self.client.get(self.url).context['file_form'].forms
choices = self.get_platforms(forms[1])
assert 'bsd' not in choices, (
'After changing BSD file to Linux, BSD should no longer be a '
'platform choice in: %r' % choices)
def test_add_file_modal(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
# Make sure radio buttons are visible:
eq_(doc('.platform ul label').text(), 'Linux Mac OS X Windows')
eq_(set([i.attrib['type'] for i in doc('input.platform')]),
set(['radio']))
def test_mobile_addon_supports_only_mobile_platforms(self):
for a in self.version.apps.all():
a.application = amo.ANDROID.id
a.save()
self.version.files.all().update(platform=amo.PLATFORM_ANDROID.id)
forms = self.client.get(self.url).context['file_form'].forms
choices = self.get_platforms(forms[0])
eq_(sorted(choices),
sorted([p.shortname for p in amo.MOBILE_PLATFORMS.values()]))
class TestPlatformSearch(TestVersionEditMixin, amo.tests.BaseTestCase):
fixtures = ['base/users', 'base/thunderbird', 'base/addon_4594_a9.json']
def setUp(self):
super(TestPlatformSearch, self).setUp()
self.client.login(username='admin@mozilla.com', password='password')
self.url = reverse('devhub.versions.edit',
args=['a4594', 42352])
self.version = Version.objects.get(id=42352)
self.file = self.version.files.all()[0]
def test_no_platform_search_engine(self):
response = self.client.get(self.url)
doc = pq(response.content)
assert not doc('#id_files-0-platform')
def test_changing_platform_search_engine(self):
dd = self.formset({'id': int(self.file.pk),
'platform': amo.PLATFORM_LINUX.id},
prefix='files', releasenotes='xx',
approvalnotes='yy')
response = self.client.post(self.url, dd)
eq_(response.status_code, 302)
file_ = Version.objects.no_cache().get(id=42352).files.all()[0]
eq_(amo.PLATFORM_ALL.id, file_.platform)
class TestVersionEditCompat(TestVersionEditBase):
def get_form(self, url=None):
if not url:
url = self.url
av = self.version.apps.get()
eq_(av.min.version, '2.0')
eq_(av.max.version, '4.0')
f = self.client.get(url).context['compat_form'].initial_forms[0]
return initial(f)
def formset(self, *args, **kw):
defaults = formset(prefix='files')
defaults.update(kw)
return super(TestVersionEditCompat, self).formset(*args, **defaults)
def test_add_appversion(self):
f = self.client.get(self.url).context['compat_form'].initial_forms[0]
d = self.formset(initial(f), dict(application=18, min=288, max=298),
initial_count=1)
r = self.client.post(self.url, d)
eq_(r.status_code, 302)
apps = self.get_version().compatible_apps.keys()
eq_(sorted(apps), sorted([amo.FIREFOX, amo.THUNDERBIRD]))
eq_(list(ActivityLog.objects.all().values_list('action')),
[(amo.LOG.MAX_APPVERSION_UPDATED.id,)])
def test_update_appversion(self):
d = self.get_form()
d.update(min=self.v1.id, max=self.v5.id)
r = self.client.post(self.url,
self.formset(d, initial_count=1))
eq_(r.status_code, 302)
av = self.version.apps.get()
eq_(av.min.version, '1.0')
eq_(av.max.version, '5.0')
eq_(list(ActivityLog.objects.all().values_list('action')),
[(amo.LOG.MAX_APPVERSION_UPDATED.id,)])
def test_ajax_update_appversion(self):
url = reverse('devhub.ajax.compat.update',
args=['a3615', self.version.id])
d = self.get_form(url)
d.update(min=self.v1.id, max=self.v5.id)
r = self.client.post(url, self.formset(d, initial_count=1))
eq_(r.status_code, 200)
av = self.version.apps.get()
eq_(av.min.version, '1.0')
eq_(av.max.version, '5.0')
eq_(list(ActivityLog.objects.all().values_list('action')),
[(amo.LOG.MAX_APPVERSION_UPDATED.id,)])
def test_delete_appversion(self):
# Add thunderbird compat so we can delete firefox.
self.test_add_appversion()
f = self.client.get(self.url).context['compat_form']
d = map(initial, f.initial_forms)
d[0]['DELETE'] = True
r = self.client.post(self.url, self.formset(*d, initial_count=2))
eq_(r.status_code, 302)
apps = self.get_version().compatible_apps.keys()
eq_(apps, [amo.THUNDERBIRD])
eq_(list(ActivityLog.objects.all().values_list('action')),
[(amo.LOG.MAX_APPVERSION_UPDATED.id,)])
def test_unique_apps(self):
f = self.client.get(self.url).context['compat_form'].initial_forms[0]
dupe = initial(f)
del dupe['id']
d = self.formset(initial(f), dupe, initial_count=1)
r = self.client.post(self.url, d)
eq_(r.status_code, 200)
# Because of how formsets work, the second form is expected to be a
# tbird version range. We got an error, so we're good.
def test_require_appversion(self):
old_av = self.version.apps.get()
f = self.client.get(self.url).context['compat_form'].initial_forms[0]
d = initial(f)
d['DELETE'] = True
r = self.client.post(self.url, self.formset(d, initial_count=1))
eq_(r.status_code, 200)
eq_(r.context['compat_form'].non_form_errors(),
['Need at least one compatible application.'])
eq_(self.version.apps.get(), old_av)
def test_proper_min_max(self):
f = self.client.get(self.url).context['compat_form'].initial_forms[0]
d = initial(f)
d['min'], d['max'] = d['max'], d['min']
r = self.client.post(self.url, self.formset(d, initial_count=1))
eq_(r.status_code, 200)
eq_(r.context['compat_form'].forms[0].non_field_errors(),
['Invalid version range.'])
def test_same_min_max(self):
f = self.client.get(self.url).context['compat_form'].initial_forms[0]
d = initial(f)
d['min'] = d['max']
r = self.client.post(self.url, self.formset(d, initial_count=1))
eq_(r.status_code, 302)
av = self.version.apps.all()[0]
eq_(av.min, av.max)
|
bsd-3-clause
| 3,534,918,667,949,309,000 | 39.642424 | 79 | 0.606174 | false |
thanm/devel-scripts
|
build-aosp-toolchain.py
|
1
|
8147
|
#!/usr/bin/python3
"""Perform a previously configured AOSP toolchain build.
This script kicks off a series of builds of the toolchain gcc compiler
for Android. It relies on previously established symbolic links set
up by hand or by "setup-aosp-toolchain-build.py".
"""
import getopt
import importlib
import multiprocessing
import os
import sys
import script_utils as u
# Path to working AOSP or NDK repo
flag_aosp_link = "/tmp/AOSP"
# Path to working AOSP or NDK repo
flag_toolchain_link = None
# Echo command before executing
flag_echo = True
# Dry run mode
flag_dryrun = False
# gcc version
flag_gcc_version = "4.9"
# isl version
flag_isl_version = None
# arches to build
flag_arches = None
# build different arches in parallel
flag_parallel = False
# Place where NDK build artifacts will be written
flag_ndk_builddir = "/ssd2/ndkbuild"
# Legal build arches
legal_arches = {"aarch64-linux-android": 1,
"arm-linux-androideabi": 1,
"x86": 1, "mipsel-linux-android": 1,
"x86_64": 1, "mips64el-linux-android": 1}
# Host option to pass to build-gcc.sh
flag_hostopt = ""
# Build a debuggable gcc
flag_build_debuggable = False
# Links in /tmp
aosp_ndk_link = None
aosp_toolchain_link = None
def docmd(cmd):
"""Execute a command."""
if flag_echo:
sys.stderr.write("executing: " + cmd + "\n")
if flag_dryrun:
return
u.docmd(cmd)
def docmdnf(cmd):
"""Execute a command allowing for failure."""
if flag_echo:
sys.stderr.write("executing: " + cmd + "\n")
if flag_dryrun:
return 0
return u.docmdnf(cmd)
def doscmd(cmd):
"""Execute a command."""
if flag_echo:
sys.stderr.write("executing: " + cmd + "\n")
if flag_dryrun:
return
u.doscmd(cmd)
def dochdir(thedir):
"""Switch to dir."""
if flag_echo:
sys.stderr.write("cd " + thedir + "\n")
if flag_dryrun:
return
try:
os.chdir(thedir)
except OSError as err:
u.error("chdir failed: %s" % err)
def set_evar(var, val):
"""Set an environment variable prior to script execution."""
os.environ[var] = val
u.verbose(0, "Setting %s to: %s" % (var, val))
def perform():
"""Perform setups."""
rc = 0
# Import correct module and collect sysroot_path method
loc = "%s/build/lib" % aosp_ndk_link
u.verbose(1, "importing %s/build_support.py" % loc)
sys.path.append(loc)
mod = importlib.import_module("build_support")
sysroot_method = getattr(mod, "sysroot_path")
if not sysroot_method:
u.error("internal error: can't find sysroot_path "
"method in %s/build_support.py" % loc)
# Environment variable settings
set_evar("NDK", aosp_ndk_link)
set_evar("ANDROID_BUILD_TOP", flag_aosp_link)
set_evar("ANDROID_NDK_ROOT", aosp_ndk_link)
set_evar("NDK_BUILDTOOLS_PATH", "%s/build/tools" % aosp_ndk_link)
set_evar("TMPDIR", "%s/tmpdir" % flag_ndk_builddir)
# The script build-gcc.sh inspects the value of GCC_VERSION
if flag_gcc_version:
set_evar("GCC_VERSION", flag_gcc_version)
tmpdir = "%s/tmpdir" % flag_ndk_builddir
ndk_temp = "%s/build" % flag_ndk_builddir
prebuilt_path = "%s/prebuilts" % flag_ndk_builddir
islv = ""
if flag_isl_version:
islv = "--isl-version=%s" % flag_isl_version
sixtyfouropt = "--try-64"
winopt = ""
if flag_hostopt == "windows":
sixtyfouropt = ""
winopt = "--mingw"
elif flag_hostopt == "windows64":
winopt = "--mingw"
dbgopt = ""
if flag_build_debuggable:
dbgopt = "--build-debuggable=yes"
# Change dir
dochdir(aosp_ndk_link)
# Create build dir if needed
docmd("mkdir -p %s" % flag_ndk_builddir)
# Clean
u.verbose(0, "... cleaning temp dirs")
docmd("rm -rf %s %s %s" % (tmpdir, ndk_temp, prebuilt_path))
docmd("mkdir %s %s %s" % (tmpdir, ndk_temp, prebuilt_path))
pool = None
if flag_parallel:
nworkers = multiprocessing.cpu_count()-1
pool = multiprocessing.Pool(processes=nworkers)
# Build
results = []
for arch in flag_arches:
sysroot_setting = sysroot_method(arch)
cmd = ("%s/gcc/build-gcc.sh %s "
"%s %s --package-dir=%s "
"--obscure-prefix=no "
"--sysroot=%s "
"--build-out=%s %s %s %s %s-%s" %
(aosp_toolchain_link, islv,
aosp_toolchain_link, aosp_ndk_link, prebuilt_path,
sysroot_setting, ndk_temp,
sixtyfouropt, winopt, dbgopt,
arch, flag_gcc_version))
u.verbose(1, "build cmd is: %s" % cmd)
if flag_parallel:
r = pool.apply_async(docmd, [cmd])
results.append(r)
else:
res = docmdnf(cmd)
if res != 0:
rc = 1
# Reap results for parallel execution
nr = len(results)
for idx in range(0, nr):
r = results[idx]
u.verbose(1, "waiting on result %d" % idx)
res = r.get(timeout=600)
if res != 0:
rc = 1
return rc
def usage(msgarg):
"""Print usage and exit."""
me = os.path.basename(sys.argv[0])
if msgarg:
sys.stderr.write("error: %s\n" % msgarg)
la = " ".join(list(legal_arches.keys()))
print("""\
usage: %s [options]
options:
-d increase debug msg verbosity level
-b X set root build dir to X (def: /ssd/ndkbuild)
-n X AOSP build link is X (def: /tmp/AOSP)
-t Y AOSP toolchain src link is Y (def: /tmp/AOSP/toolchain)
-q quiet mode (do not echo commands before executing)
-D dryrun mode (echo commands but do not execute)
-g Q build for gcc version Q (def: 4.9)
-a A build target arch A (legal arches: %s)
[may be specified multiple times]
-w build windows 32-bit toolchain
-W build windows 64-bit toolchain
-C build a debuggable copy of GCC
-p build different toolchains in parallel (experimental)
Example 1: set up build with toolchain repo + AOSP dir
%s -t /tmp/AOSP-toolchain -n /tmp/AOSP/ndk
Example 2: set up build with just NDK repo, only aarch64 target
%s -n /tmp/AOSP -a aarch64-linux-android
Example 3: build gcc 5.2 with isl version 0.14 with just NDK repo
%s -n /tmp/AOSP -g 5.2 -i 0.14
""" % (me, la, me, me, me))
sys.exit(1)
def parse_args():
"""Command line argument parsing."""
global flag_aosp_link, flag_toolchain_link
global flag_echo, flag_dryrun, flag_ndk_builddir
global flag_gcc_version, flag_isl_version, flag_arches
global aosp_ndk_link, aosp_toolchain_link, flag_hostopt
global flag_build_debuggable, flag_parallel
try:
optlist, args = getopt.getopt(sys.argv[1:], "da:b:n:t:g:i:qDwCW")
except getopt.GetoptError as err:
# unrecognized option
usage(str(err))
flag_arches = list(legal_arches.keys())
specific_arches = {}
for opt, arg in optlist:
if opt == "-d":
u.increment_verbosity()
elif opt == "-q":
flag_echo = False
elif opt == "-D":
flag_dryrun = True
elif opt == "-C":
flag_build_debuggable = True
elif opt == "-n":
flag_aosp_link = arg
elif opt == "-p":
flag_parallel = True
elif opt == "-t":
flag_toolchain_link = arg
elif opt == "-b":
flag_ndk_builddir = arg
elif opt == "-g":
flag_gcc_version = arg
elif opt == "-i":
flag_isl_version = arg
elif opt == "-w":
flag_hostopt = "windows"
elif opt == "-W":
flag_hostopt = "windows64"
elif opt == "-a":
if arg not in legal_arches:
usage("specified arch %s not part of legal list" % arg)
specific_arches[arg] = 1
if specific_arches:
flag_arches = sorted(specific_arches.keys())
if args:
usage("unknown extra args")
aosp_ndk_link = "%s/ndk" % flag_aosp_link
if flag_toolchain_link:
aosp_toolchain_link = flag_toolchain_link
else:
aosp_toolchain_link = "%s/toolchain" % flag_aosp_link
if not os.path.exists(aosp_ndk_link):
usage("unable to access %s" % aosp_ndk_link)
if not os.path.exists(aosp_toolchain_link):
usage("unable to access %s" % aosp_toolchain_link)
#
#......................................................................
#
# Main portion of script
#
parse_args()
u.setdeflanglocale()
erc = perform()
if erc != 0:
print("*** BUILD FAILED")
exit(erc)
|
apache-2.0
| -7,819,423,786,514,250,000 | 25.028754 | 71 | 0.62342 | false |
madprof/alpha-hub
|
prototype/config.py
|
1
|
1644
|
# put this into ~/.alphahub/config.py and make sure it's not
# readable by anyone else (it contains passwords!)
# the host we run on and want to receive packets on; note
# that "localhost" is probably the wrong thing here, you
# want a host name that refers to an external network so you
# can receive packets from the outside
host = "the.hub.machine.tld"
# SQLite database file to use; eventually this will be a
# "real" database connection URL
database = "hub.db"
# the servers we listen to; for now each box has one port
# and secret on the hub, even if it runs multiple game
# servers; for a setup where one box runs games servers for
# multiple clans, this is not sufficient yet; note that host
# names are resolved to IPs and IPs must be unique; and yes,
# this is where sv_alphaHubHost and sv_alphaHubKey go
servers = {
"some.game.server.tld": (42, "somesecret"),
"some.other.game.tld": (543, "monkeyspam"),
}
# the hubs we listen to for gossip; same restrictions as for
# game servers for now; we probably need more stuff here,
# rate limits, trust levels, and so on
listen = {
"some.hub.server.tld": (9533, "youcantknow"),
}
# the hubs we tell gossip to; same restrictions as for game
# servers for now; same notes as for listen hubs apply
tell = {
"some.hub.server.tld": (453, "secretsofdoom"),
"another.hub.tld": (12345, "itssofun"),
}
# TODO: should there be another level, i.e. hubs that we
# don't just gossip with but keep in sync with 100% (subject
# to the limitations of using UDP that is)? seems like a
# bad idea since it would lead to multiple sources of truth
# that don't necessarily agree
|
agpl-3.0
| 1,029,538,902,745,399,200 | 33.25 | 60 | 0.720195 | false |
ejucovy/django-opendebates
|
opendebates/tests/test_registration.py
|
1
|
4946
|
import os
from django.contrib.auth import get_user_model
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.utils import override_settings
from .factories import UserFactory
class RegisterTest(TestCase):
def setUp(self):
self.url = reverse('registration_register')
self.data = {
'username': 'gwash',
'password1': 'secretpassword',
'password2': 'secretpassword',
'first_name': 'George',
'last_name': 'Washington',
'email': 'gwash@example.com',
'zip': '12345',
'g-recaptcha-response': 'PASSED'
}
os.environ['NORECAPTCHA_TESTING'] = 'True'
def tearDown(self):
os.environ.pop('NORECAPTCHA_TESTING', '')
def test_registration_get(self):
"GET the form successfully."
with self.assertTemplateUsed('registration/registration_form.html'):
rsp = self.client.get(self.url)
self.assertEqual(200, rsp.status_code)
def test_post_success(self):
"POST the form with all required values."
home_url = reverse('list_ideas')
rsp = self.client.post(self.url, data=self.data, follow=True)
self.assertRedirects(rsp, home_url)
new_user = get_user_model().objects.first()
self.assertEqual(new_user.first_name, self.data['first_name'])
def test_post_missing_variable(self):
"POST with a missing required value."
# delete each required key and POST
for key in self.data:
if key == 'g-recaptcha-response':
continue
data = self.data.copy()
del data[key]
rsp = self.client.post(self.url)
self.assertEqual(200, rsp.status_code)
form = rsp.context['form']
self.assertIn(key, form.errors)
self.assertIn('field is required', str(form.errors))
def test_email_must_be_unique(self):
# case insensitive
UserFactory(email=self.data['email'].upper())
rsp = self.client.post(self.url, data=self.data, follow=True)
form = rsp.context['form']
self.assertEqual(200, rsp.status_code)
self.assertIn('email', form.errors)
self.assertIn('already in use', str(form.errors))
def test_twitter_handle_gets_cleaned(self):
"Various forms of twitter_handle entries are cleaned to canonical form."
data = {}
for twitter_handle in [
'@twitter',
'twitter',
'https://twitter.com/twitter',
'http://twitter.com/twitter',
'twitter.com/twitter',
]:
data['twitter_handle'] = twitter_handle
rsp = self.client.post(self.url, data=data)
form = rsp.context['form']
self.assertEqual('twitter', form.cleaned_data['twitter_handle'])
@override_settings(USE_CAPTCHA=False)
def test_disabling_captcha(self):
del self.data['g-recaptcha-response']
del os.environ['NORECAPTCHA_TESTING']
home_url = reverse('list_ideas')
rsp = self.client.post(self.url, data=self.data, follow=True)
self.assertRedirects(rsp, home_url)
new_user = get_user_model().objects.first()
self.assertEqual(new_user.first_name, self.data['first_name'])
class LoginLogoutTest(TestCase):
def setUp(self):
self.username = 'gwash'
self.email = 'gwash@example.com'
self.password = 'secretpassword'
UserFactory(username=self.username,
email=self.email,
password=self.password)
self.login_url = reverse('auth_login')
self.home_url = reverse('list_ideas')
def test_login_with_username(self):
rsp = self.client.post(
self.login_url,
data={'username': self.username, 'password': self.password, 'next': '/'}
)
self.assertRedirects(rsp, self.home_url)
def test_login_with_email(self):
rsp = self.client.post(
self.login_url,
data={'username': self.email, 'password': self.password, 'next': '/'}
)
self.assertRedirects(rsp, self.home_url)
def test_failed_login(self):
rsp = self.client.post(
self.login_url,
data={'username': self.username, 'password': self.password + 'bad', 'next': '/'}
)
self.assertEqual(200, rsp.status_code)
form = rsp.context['form']
self.assertIn('enter a correct username and password', str(form.errors))
def test_logout(self):
self.assertTrue(self.client.login(username=self.username, password=self.password))
logout_url = reverse('auth_logout')
rsp = self.client.get(logout_url)
self.assertRedirects(rsp, self.home_url)
rsp = self.client.get(self.home_url)
self.assertIn('Log in', rsp.content)
|
apache-2.0
| 2,850,143,701,732,513,300 | 35.910448 | 92 | 0.596846 | false |
AdamDynamic/TwitterMetrics
|
TrendingTermsSentiment.py
|
1
|
1025
|
#!/usr/bin/env python
# Scans twitter for trending terms, populates a database with the results and
# then creates a json file showing trends based on the data
import CreateJson as j
import TwitterSearch as t
import KeywordSearch as k
import Log as l
import logging
ProcessResult = False
FN_NAME = "TrendingTermsSentiment"
logging.basicConfig(filename='TwitterMetrics_Sentiment.log', format='%(asctime)s %(levelname)s: %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.INFO)
logging.info('%s - Process Start', FN_NAME)
TimeStamp = l.CreateTimeStamp()
if t.ScanTwitter(TimeStamp) == True:
if k.SearchTweetsForKeywords(TimeStamp) == True:
ProcessResult = True
else:
logging.warning('%s - Function SearchTweetsForKeywords in KeywordSearch module failed to run correctly', FN_NAME)
else:
logging.warning('%s - Function ScanTwitter in TwitterSearch module failed to run correctly', FN_NAME)
logging.info('%s - Process complete with status %s', FN_NAME, ProcessResult)
|
gpl-2.0
| 6,986,279,648,277,726,000 | 26.702703 | 161 | 0.731707 | false |
MOOCworkbench/MOOCworkbench
|
docs_manager/views.py
|
1
|
2518
|
from django.contrib.auth.decorators import login_required
from django.http import JsonResponse
from django.shortcuts import redirect, render
from django.views import View
from experiments_manager.consumers import send_message
from experiments_manager.helper import MessageStatus
from git_manager.helpers.git_helper import GitHelper
from git_manager.helpers.github_helper import GitHubHelper
from helpers.helper import get_package_or_experiment
from helpers.helper_mixins import ExperimentPackageTypeMixin
from .tasks import task_generate_docs
class DocView(View):
"""Shows the documentation for the selected page_slug
via the attached language helper to an experiment"""
def get(self, request, object_id, object_type, page_slug=None):
exp_or_package = get_package_or_experiment(request, object_type, object_id)
language_helper = exp_or_package.language_helper()
if page_slug:
location = language_helper.get_document(page_slug)
else:
location = language_helper.get_document('index')
return redirect(to=location)
class DocStatusView(ExperimentPackageTypeMixin, View):
"""Views the Documentation status: if doc gen is enabled and
the latest docs quality messages"""
def get(self, request, object_id, object_type):
context = {}
django_object = get_package_or_experiment(request, object_type, object_id)
context['object'] = django_object
context['docs'] = django_object.docs
context['object_type'] = object_type
return render(request, 'docs_manager/docs_status.html', context)
@login_required
def toggle_docs_status(request, object_id, object_type):
exp_or_package = get_package_or_experiment(request, object_type, object_id)
docs = exp_or_package.docs
docs.enabled = not docs.enabled
docs.save()
if docs.enabled:
github_helper = GitHubHelper(request.user, exp_or_package.git_repo.name)
git_helper = GitHelper(github_helper)
git_helper.clone_or_pull_repository()
return redirect(exp_or_package.get_absolute_url())
@login_required
def docs_generate(request, object_id, object_type):
"""Start task to regenerate documentation"""
exp_or_package = get_package_or_experiment(request, object_type, object_id)
send_message(exp_or_package.owner.user.username, MessageStatus.INFO,
'Task to regenerate documentation started.')
task_generate_docs.delay(object_type, object_id)
return JsonResponse({})
|
mit
| -6,071,420,683,983,865,000 | 38.968254 | 83 | 0.72359 | false |
AlexandrDemin/flask_app
|
dbProvider.py
|
1
|
6554
|
import json
import random
from operator import itemgetter
# absolutePath = '/home/noidea91/flask_app/'
absolutePath = ''
def randomize(inArray, randomSeed = None, count = None):
tempList = inArray[:]
result = []
if count == None:
count = len(tempList) + 1
for i in range(1, count):
if randomSeed == None:
random.seed()
else:
random.seed(randomSeed)
rnd = random.randrange(len(tempList))
result.append(tempList[rnd])
tempList.pop(rnd)
return result
# Texts
def getTextsCache():
json_data = open(absolutePath + "configs/texts.json").read()
texts = json.loads(json_data)
return texts
_textsCache = getTextsCache()
def getText(action, stringName=""):
if stringName == "":
return _textsCache[action]
else:
return _textsCache[action].get(stringName)
def getContent(action, subdomain, serviceIdStr):
return _textsCache[action].get(subdomain).get(serviceIdStr)
def getRandomizedTexts(action, subdomain, serviceIdStr, randomSeed):
texts = getContent(action, subdomain, serviceIdStr)
return randomize(texts, randomSeed = randomSeed)
# Services
def getServicesCache():
json_data = open(absolutePath + "configs/services.json").read()
texts = json.loads(json_data)
return texts
_servicesCache = getServicesCache()
def getServices():
return _servicesCache
def getServiceByNameTranslit(serviceNameTranslit):
for service in _servicesCache:
if service['nameTranslit'] == serviceNameTranslit:
return service
return None
def getServiceById(serviceId):
for service in _servicesCache:
if service['id'] == serviceId:
return service
return None
def getServiceRandomImgNumber(service, randomSeed):
maxImgCount = service['imagesCount']
if maxImgCount == 0:
return None
random.seed(randomSeed)
rnd = random.randrange(1, maxImgCount)
return rnd
# Regions
def getRegionsCache():
json_data = open(absolutePath + "configs/regions.json").read()
regions = json.loads(json_data)
for region in regions:
region['childrenIds'] = []
for potentialChild in regions:
if region['id'] == potentialChild['parentId'] and region['name'] != potentialChild['name']:
region['childrenIds'].append(potentialChild['id'])
region['hasChildren'] = False if len(region['childrenIds']) == 0 else True
return regions
_regionsCache = getRegionsCache()
def getAllChildrenRegionIds(parentId):
parents = [parentId]
allChildren = []
while len(parents) > 0:
regions = getRegionIdsByParentIds(parents)
allChildren.extend(regions)
parents = regions
return allChildren
def isChildrenRegion(regionId, mainRegionId):
parents = [mainRegionId]
while len(parents) > 0:
regions = getRegionIdsByParentIds(parents)
if regionId in regions:
return True
parents = regions
return False
def getRegionByDativeTranslitAndMainRegion(dativeTranslit, mainRegionId):
for region in _regionsCache:
if region['dativeTranslit'] == dativeTranslit:
if isChildrenRegion(region['id'], mainRegionId):
return region
return None
def getRegionByNameTranslitAndParentId(nameTranslit, parentId):
for region in _regionsCache:
if region["parentId"] == parentId and region["nameTranslit"] == nameTranslit:
return region
return None
def getRegionIdsByParentIds(parentIds):
result = []
for regionId in parentIds:
region = getRegionById(regionId)
result.extend(region['childrenIds'])
return result
def getRegionsByParentIds(parentIds, includeParents = False):
result = []
for regionId in parentIds:
region = getRegionById(regionId)
for childId in region['childrenIds']:
child = getRegionById(childId)
result.append(child)
if includeParents:
result.append(region)
return result
def getRegionParents(regionId):
result = []
region = getRegionById(regionId)
while True:
parent = getRegionById(region['parentId'])
if parent == None:
return result
result.append(parent)
region = parent
def getRegionParentsSorted(regionId, deleteFirst = True):
parentsUnsorted = getRegionParents(regionId)
parents = sorted(parentsUnsorted, key=itemgetter('id'))
if len(parents) > 0 and deleteFirst:
parents.pop(0)
return parents
def getRegionsByLevel(levels):
result = []
for region in _regionsCache:
if region["level"] in levels:
result.append(region)
return result
def getRegionsTree(parentIds=None, depth=1):
result = []
if parentIds == None:
regions = getRegionsByLevel([1])
else:
regions = []
for pid in parentIds:
regions.append(getRegionById(pid))
if depth > 0:
for region in regions:
regionId = region['id']
children = getRegionsByParentIds([regionId])
region['hasChildren'] = False
if len(children) > 0:
region['hasChildren'] = True
if depth > 1 and region['hasChildren']:
children = getRegionsTree(parentIds = region['childrenIds'], depth = depth - 1)
sortedChildren = sorted(children, key=itemgetter('name'))
region['children'] = sortedChildren
result.append(region)
sortedResult = sorted(result, key=itemgetter('name'))
return result
def getRegionById(id):
for region in _regionsCache:
if region["id"] == id:
return region
return None
# Subdomains
def getSubdomainsCache():
json_data = open(absolutePath + "configs/subdomains.json").read()
subdomains = json.loads(json_data)
for subdomain in subdomains:
subdomain['allChildrenRegionIds'] = getAllChildrenRegionIds(subdomain['regionId'])
return subdomains
_subdomainsCache = getSubdomainsCache()
def getRegionBySubdomain(subdomainString):
if subdomainString == '':
subdomainString = 'www'
for subdomain in _subdomainsCache:
if subdomain['subdomain'] == subdomainString:
return getRegionById(subdomain['regionId'])
return None
def getSubdomainByMainRegionId(regionId):
for subdomain in _subdomainsCache:
if subdomain['regionId'] == regionId:
return subdomain['subdomain']
return None
def getSubdomainByRegionId(regionId):
for subdomain in _subdomainsCache:
if regionId in subdomain['allChildrenRegionIds']:
return subdomain['subdomain']
return None
# Phones
def getPhonesCache():
json_data = open(absolutePath + "configs/phones.json").read()
phones = json.loads(json_data)
return phones
_phonesCache = getPhonesCache()
def getDefaultPhone():
# Номер 8800 с RegionId 0
return _phonesCache["0"]
def getPhoneByRegionId(regionId):
return _phonesCache.get(str(regionId), getDefaultPhone())
# Cars
def getCarsCache():
json_data = open(absolutePath + "configs/cars.json").read()
texts = json.loads(json_data)
return texts
_carsCache = getCarsCache()
def getRandomCars(count):
return randomize(_carsCache, count)
|
mit
| -3,368,693,775,385,770,500 | 24.984127 | 94 | 0.742364 | false |
mar10/wsgidav
|
wsgidav/prop_man/couch_property_manager.py
|
1
|
8148
|
# -*- coding: utf-8 -*-
# (c) 2009-2021 Martin Wendt and contributors; see WsgiDAV https://github.com/mar10/wsgidav
# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
"""
Implements a property manager based on CouchDB.
http://wiki.apache.org/couchdb/Reference
http://packages.python.org/CouchDB/views.html
Usage: add this lines to wsgidav.conf::
from wsgidav.prop_man.couch_property_manager import CouchPropertyManager
prop_man_opts = {}
property_manager = CouchPropertyManager(prop_man_opts)
Valid options are (sample shows defaults)::
opts = {"url": "http://localhost:5984/", # CouchDB server
"dbName": "wsgidav-props", # Name of DB to store the properties
}
"""
from __future__ import print_function
from uuid import uuid4
import couchdb
from wsgidav import compat, util
__docformat__ = "reStructuredText"
_logger = util.get_module_logger(__name__)
# ============================================================================
# CouchPropertyManager
# ============================================================================
class CouchPropertyManager(object):
"""Implements a property manager based on CouchDB."""
def __init__(self, options):
self.options = options
self._connect()
def __del__(self):
self._disconnect()
def _connect(self):
opts = self.options
if opts.get("url"):
self.couch = couchdb.Server(opts.get("url"))
else:
self.couch = couchdb.Server()
dbName = opts.get("dbName", "wsgidav_props")
if dbName in self.couch:
self.db = self.couch[dbName]
_logger.info(
"CouchPropertyManager connected to %s v%s"
% (self.db, self.couch.version())
)
else:
self.db = self.couch.create(dbName)
_logger.info(
"CouchPropertyManager created new db %s v%s"
% (self.db, self.couch.version())
)
# Ensure that we have a permanent view
if "_design/properties" not in self.db:
map = """
function(doc) {
if(doc.type == 'properties') {
emit(doc.url, { 'id': doc._id, 'url': doc.url });
}
}
"""
designDoc = {
"_id": "_design/properties",
# "_rev": "42351258",
"language": "javascript",
"views": {
"titles": {
"map": (
"function(doc) { emit(null, { 'id': doc._id, "
"'title': doc.title }); }"
)
},
# http://127.0.0.1:5984/wsgidav_props/_design/properties/_view/by_url
"by_url": {"map": map},
},
}
self.db.save(designDoc)
# pprint(self.couch.stats())
def _disconnect(self):
pass
def __repr__(self):
return "CouchPropertyManager(%s)" % self.db
def _sync(self):
pass
def _check(self, msg=""):
pass
def _dump(self, msg="", out=None):
pass
def _find(self, url):
"""Return properties document for path."""
# Query the permanent view to find a url
vr = self.db.view("properties/by_url", key=url, include_docs=True)
_logger.debug("find(%r) returned %s" % (url, len(vr)))
assert len(vr) <= 1, "Found multiple matches for %r" % url
for row in vr:
assert row.doc
return row.doc
return None
def _find_descendents(self, url):
"""Return properties document for url and all children."""
# Ad-hoc query for URL starting with a prefix
map_fun = """function(doc) {
var url = doc.url + "/";
if(doc.type === 'properties' && url.indexOf('%s') === 0) {
emit(doc.url, { 'id': doc._id, 'url': doc.url });
}
}""" % (
url + "/"
)
vr = self.db.query(map_fun, include_docs=True)
for row in vr:
yield row.doc
return
def get_properties(self, norm_url, environ=None):
_logger.debug("get_properties(%s)" % norm_url)
doc = self._find(norm_url)
propNames = []
if doc:
for name in doc["properties"].keys():
propNames.append(name)
return propNames
def get_property(self, norm_url, name, environ=None):
_logger.debug("get_property(%s, %s)" % (norm_url, name))
doc = self._find(norm_url)
if not doc:
return None
prop = doc["properties"].get(name)
return prop
def write_property(
self, norm_url, name, property_value, dry_run=False, environ=None
):
assert norm_url and norm_url.startswith("/")
assert name
assert property_value is not None
_logger.debug(
"write_property(%s, %s, dry_run=%s):\n\t%s"
% (norm_url, name, dry_run, property_value)
)
if dry_run:
return # TODO: can we check anything here?
doc = self._find(norm_url)
if doc:
doc["properties"][name] = property_value
else:
doc = {
"_id": uuid4().hex, # Documentation suggests to set the id
"url": norm_url,
"title": compat.quote(norm_url),
"type": "properties",
"properties": {name: property_value},
}
self.db.save(doc)
def remove_property(self, norm_url, name, dry_run=False, environ=None):
_logger.debug("remove_property(%s, %s, dry_run=%s)" % (norm_url, name, dry_run))
if dry_run:
# TODO: can we check anything here?
return
doc = self._find(norm_url)
# Specifying the removal of a property that does not exist is NOT an error.
if not doc or doc["properties"].get(name) is None:
return
del doc["properties"][name]
self.db.save(doc)
def remove_properties(self, norm_url, environ=None):
_logger.debug("remove_properties(%s)" % norm_url)
doc = self._find(norm_url)
if doc:
self.db.delete(doc)
return
def copy_properties(self, srcUrl, destUrl, environ=None):
doc = self._find(srcUrl)
if not doc:
_logger.debug(
"copy_properties(%s, %s): src has no properties" % (srcUrl, destUrl)
)
return
_logger.debug("copy_properties(%s, %s)" % (srcUrl, destUrl))
assert not self._find(destUrl)
doc2 = {
"_id": uuid4().hex,
"url": destUrl,
"title": compat.quote(destUrl),
"type": "properties",
"properties": doc["properties"],
}
self.db.save(doc2)
def move_properties(self, srcUrl, destUrl, with_children, environ=None):
_logger.debug("move_properties(%s, %s, %s)" % (srcUrl, destUrl, with_children))
if with_children:
# Match URLs that are equal to <srcUrl> or begin with '<srcUrl>/'
docList = self._find_descendents(srcUrl)
for doc in docList:
newDest = doc["url"].replace(srcUrl, destUrl)
_logger.debug("move property %s -> %s" % (doc["url"], newDest))
doc["url"] = newDest
self.db.save(doc)
else:
# Move srcUrl only
# TODO: use findAndModify()?
doc = self._find(srcUrl)
if doc:
_logger.debug("move property %s -> %s" % (doc["url"], destUrl))
doc["url"] = destUrl
self.db.save(doc)
return
# ============================================================================
#
# ============================================================================
def test():
pass
if __name__ == "__main__":
test()
|
mit
| 6,544,573,960,360,797,000 | 30.952941 | 91 | 0.4946 | false |
ah-anssi/SecuML
|
SecuML/core/Data/Predictions.py
|
1
|
1490
|
# SecuML
# Copyright (C) 2018 ANSSI
#
# SecuML is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# SecuML is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with SecuML. If not, see <http://www.gnu.org/licenses/>.
from . import Annotations
class InvalidPredictions(Exception):
def __init__(self, message):
self.message = message
def __str__(self):
return self.message
class Predictions(Annotations):
def __init__(self, labels, families, predicted_proba, ids):
Annotations.__init__(self, labels, families, ids)
self.predicted_proba = predicted_proba
self.checkValidity()
def checkValidity(self):
message = None
num_instances = self.ids.numInstances()
if len(self.predicted_proba) != num_instances:
message = 'There are ' + str(num_instances) + ' instances '
message += 'but ' + str(len(self.predicted_proba)) + \
' probabilities are provided.'
if message is not None:
raise InvalidPredictions(message)
|
gpl-2.0
| -3,126,649,620,062,743,600 | 32.863636 | 73 | 0.67651 | false |
calben/retino
|
scripts/generate_overview_figure_one.py
|
1
|
6562
|
import pickle
import retino
from retino.plot import *
from retino.utils import *
sns.set_style("ticks")
def plot_axon_growth_direction_algorithm(origin, end, target, ax):
a_origin = origin
a_end = end
desired_direction_weight = 1.1
momentum_direction_weight = 1
desired_direction = get_unit_direction_vector(a_end, target)
momentum_direction = get_unit_direction_vector(a_origin, a_end)
desired_and_momentum = desired_direction_weight * desired_direction + momentum_direction_weight * momentum_direction
desired_and_momentum = get_unit_vector(desired_and_momentum)
prenoise_pol = cart_to_pol(desired_and_momentum)[1]
results = []
for i in range(100):
r = np.random.normal(2.0, 1.0, size=1)[0]
noise = np.random.normal(0, .4, size=1)[0]
theta = prenoise_pol + noise
cart_result = pol_to_cart(np.asarray([r, theta]))
results.append(cart_result)
desired_direction = (desired_direction * 2 + a_end)
momentum_direction = (momentum_direction * 2 + a_end)
desired_and_momentum = (desired_and_momentum * 2 + a_end)
for i in range(25):
ax.plot([a_end[0], results[i][0] + a_end[0]], [a_end[1], results[i][1] + a_end[1]], color="crimson", alpha=.2,
linewidth=1.0)
ax.plot([a_origin[0], a_end[0]], [a_origin[1], a_end[1]], color="gold", linewidth=2.0, label="Segment of Origin")
ax.plot([a_end[0], desired_direction[0]], [a_end[1], desired_direction[1]], color="seagreen", linewidth=2.0,
label="Desired Direction")
ax.plot([a_end[0], momentum_direction[0]], [a_end[1], momentum_direction[1]], color="darkorange", linewidth=2.0,
label="Momentum Direction")
ax.plot([a_end[1], desired_and_momentum[0]], [a_end[1], desired_and_momentum[1]], color="dodgerblue", linewidth=3.0,
label="Weighted Guide Direction")
ax.set_aspect(1)
ax.set_xlim([-4, 4])
ax.set_ylim([-4, 4])
fig, direction_determination = plt.subplots(1, figsize=(5, 5), dpi=300)
origin = np.asarray([-3.5, -3.5])
end = np.asarray([-1.0, -1.0])
target = np.asarray([-5.0, 5.0])
print("Plotting for growth direction fig")
plot_axon_growth_direction_algorithm(origin, end, target, direction_determination)
direction_determination.set_title("Growth Direction Determination")
sns.despine(offset=5)
plt.tight_layout()
plt.savefig("figs/growth-direction-determination.pdf")
plt.close(fig)
#############
print("Plotting axon growth demos")
with open("singleaxonmodel/20.pickle", "rb") as f:
model = pickle.load(f)
fig, growth_t1 = plt.subplots(1, figsize=(5, 5), dpi=300)
add_axon_to_axis(model.axons[0], growth_t1, axon_color="seagreen")
growth_t1.set_aspect(1)
growth_t1.set_xlim([0, retino.TECTUM_SIZE_X])
growth_t1.set_ylim([0, retino.TECTUM_SIZE_Y])
growth_t1.set_title("Axon A1 at 20 Iterations")
sns.despine(offset=5)
plt.tight_layout()
plt.savefig("figs/axon-at-20-iterations.pdf")
plt.close(fig)
#############
with open("singleaxonmodel/70.pickle", "rb") as f:
model = pickle.load(f)
fig, growth_t2 = plt.subplots(1, figsize=(5, 5), dpi=300)
add_axon_to_axis(model.axons[0], growth_t2, axon_color="seagreen")
growth_t2.set_aspect(1)
growth_t2.set_xlim([0, retino.TECTUM_SIZE_X])
growth_t2.set_ylim([0, retino.TECTUM_SIZE_Y])
growth_t2.set_title("A1 at 70 Iterations")
sns.despine(offset=5)
plt.tight_layout()
plt.savefig("figs/axon-at-70-iterations.pdf")
plt.close(fig)
#############
with open("singleaxonmodel/140.pickle", "rb") as f:
model = pickle.load(f)
fig, growth_t3 = plt.subplots(1, figsize=(5, 5), dpi=300)
postsynapses = list(set([synapse.postsynapticcell() for synapse in model.axons[0].synapses]))
add_postsynapticcell_to_axis(postsynapses, growth_t3, color="cornflowerblue", alpha=0.1, size_scale=80)
add_axon_to_axis(model.axons[0], growth_t3, axon_color="seagreen")
growth_t3.set_aspect(1)
growth_t3.set_xlim([0, retino.TECTUM_SIZE_X])
growth_t3.set_ylim([0, retino.TECTUM_SIZE_Y])
growth_t3.set_title("A1 at 140 Iterations")
sns.despine(offset=5)
plt.tight_layout()
plt.savefig("figs/axon-at-140-iterations.pdf")
plt.close(fig)
#############
with open("multipleaxonmodel/25axons.pickle", "rb") as f:
model = pickle.load(f)
fig, [synapse_color_x, synapse_color_y] = plt.subplots(1, 2, figsize=(10, 5), dpi=300)
for axon in model.axons:
add_axon_to_axis(axon, synapse_color_x, axon_color="seagreen", axon_alpha=0.2, target_alpha=0)
add_axon_to_axis(axon, synapse_color_y, axon_color="seagreen", axon_alpha=0.2, target_alpha=0)
add_synapses_to_axarr_by_axon(axon, [synapse_color_x, synapse_color_y],
retino.TECTUM_SIZE_X, retino.TECTUM_SIZE_Y)
synapse_color_x.set_aspect(1)
synapse_color_x.set_xlim([0, retino.TECTUM_SIZE_X])
synapse_color_x.set_ylim([0, retino.TECTUM_SIZE_Y])
synapse_color_x.set_title("Synapses Coloured by X Gradient")
synapse_color_y.set_aspect(1)
synapse_color_y.set_xlim([0, retino.TECTUM_SIZE_X])
synapse_color_y.set_ylim([0, retino.TECTUM_SIZE_Y])
synapse_color_y.set_title("Synapses Coloured by Y Gradient")
sns.despine(offset=5)
plt.tight_layout()
plt.savefig("figs/25axis-synapse-colouring.pdf")
#############
with open("multipleaxonmodel/625axons.pickle", "rb") as f:
model = pickle.load(f)
fig, [other1, other2] = plt.subplots(1, 2, figsize=(10, 5), dpi=500)
for axon in model.axons:
add_synapses_to_axarr_by_axon(axon, [other1, other2],
retino.TECTUM_SIZE_X, retino.TECTUM_SIZE_Y, alpha=0.2)
other1.set_aspect(1)
other1.set_xlim([0, retino.TECTUM_SIZE_X])
other1.set_ylim([0, retino.TECTUM_SIZE_Y])
other1.set_title("Synapses Coloured by X Gradient")
other2.set_aspect(1)
other2.set_xlim([0, retino.TECTUM_SIZE_X])
other2.set_ylim([0, retino.TECTUM_SIZE_Y])
other2.set_title("Synapses Coloured by Y Gradient")
sns.despine(offset=5)
plt.tight_layout()
plt.savefig("figs/625-axon-axis-synapse-colouring.png", dpi=1000)
############
fig, [postsynatic_ax_x, postsynatic_ax_y] = plt.subplots(1, 2)
for postsynapticcell in model.postsynapticcells:
add_postsynapticcell_to_axarr_by_mapping(postsynapticcell, [postsynatic_ax_x, postsynatic_ax_y], 100.0, 100.0)
for ax in [postsynatic_ax_x, postsynatic_ax_y]:
ax.set_aspect(1)
ax.set_xlim([0, retino.TECTUM_SIZE_X])
ax.set_ylim([0, retino.TECTUM_SIZE_Y])
postsynatic_ax_x.set_title("Post Synapse Mapping by X")
postsynatic_ax_y.set_title("Post Synapse Mapping by Y")
sns.despine(offset=5)
plt.tight_layout()
plt.savefig("figs/625-axon-postsynaptic-mapping-colour.pdf")
|
mit
| -5,546,933,092,631,977,000 | 33 | 120 | 0.682414 | false |
xia2/xia2
|
tests/Modules/Scaler/test_DialsScalerHelper.py
|
1
|
14958
|
import random
import pytest
from cctbx import sgtbx
from dials.algorithms.symmetry.cosym._generate_test_data import generate_intensities
from dials.array_family import flex
from dxtbx.model import Beam, Crystal, Experiment, Scan
from dxtbx.model.experiment_list import ExperimentList
from dxtbx.serialize import load
from xia2.Modules.Scaler.DialsScaler import decide_correct_lattice_using_refiner
flex.set_random_seed(42)
random.seed(42)
@pytest.fixture
def helper_directory(ccp4, tmpdir):
"""Initialise a DialsScalerHelper"""
# import kept here as the import depends on CCP4 being present
from xia2.Modules.Scaler.DialsScaler import DialsScalerHelper
helper = DialsScalerHelper()
helper.set_pname_xname("AUTOMATIC", "DEFAULT")
helper.set_working_directory(tmpdir.strpath)
return (helper, tmpdir)
def generated_exp(n=1, space_group="P 2", assign_ids=False, id_=None):
"""Generate an experiment list with two experiments."""
experiments = ExperimentList()
exp_dict = {
"__id__": "crystal",
"real_space_a": [15.0, 0.0, 0.0],
"real_space_b": [0.0, 10.0, 0.0],
"real_space_c": [0.0, 0.0, 20.0],
"space_group_hall_symbol": space_group,
}
crystal = Crystal.from_dict(exp_dict)
scan = Scan(image_range=[0, 90], oscillation=[0.0, 1.0])
beam = Beam(s0=(0.0, 0.0, 1.01))
if assign_ids:
experiments.append(
Experiment(identifier="0", beam=beam, scan=scan, crystal=crystal)
)
elif id_:
experiments.append(
Experiment(identifier=str(id_), beam=beam, scan=scan, crystal=crystal)
)
else:
experiments.append(Experiment(beam=beam, scan=scan, crystal=crystal))
if n > 1:
for i in range(1, n):
if assign_ids:
experiments.append(
Experiment(identifier=str(i), beam=beam, scan=scan, crystal=crystal)
)
else:
experiments.append(Experiment(beam=beam, scan=scan, crystal=crystal))
return experiments
def generate_reflections_in_sg(space_group, id_=0, assign_id=False):
"""Generate reflections with intensities consistent with space group"""
sgi = sgtbx.space_group_info(symbol=space_group)
cs = sgi.any_compatible_crystal_symmetry(volume=3000)
cs = cs.best_cell()
cs = cs.minimum_cell()
intensities = (
generate_intensities(cs, d_min=2.0)
.generate_bijvoet_mates()
.set_observation_type_xray_intensity()
)
intensities = intensities.expand_to_p1()
# needed to give vaguely sensible E_cc_true values
reflections = flex.reflection_table()
reflections["intensity.sum.value"] = intensities.data()
reflections["intensity.sum.variance"] = flex.pow2(intensities.sigmas())
reflections["miller_index"] = intensities.indices()
reflections["d"] = intensities.d_spacings().data()
reflections["id"] = flex.int(reflections.size(), id_)
if assign_id:
reflections.experiment_identifiers()[id_] = str(id_)
reflections.set_flags(
flex.bool(reflections.size(), True), reflections.flags.integrated
)
return reflections
def generate_test_refl(id_=0, assign_id=False):
"""Generate a small reflection table"""
reflections = flex.reflection_table()
reflections["intensity.sum.value"] = flex.double([1.0, 1.0, 2.0, 2.0, 3.0, 3.0])
reflections["variance.sum.variance"] = flex.double([1.0, 1.0, 1.0, 1.0, 1.0, 1.0])
reflections["miller_index"] = flex.miller_index(
[(1, 0, 0), (0, 0, 1), (2, 0, 0), (0, 0, 2), (0, 1, 0), (0, -1, 0)]
)
reflections["id"] = flex.int(6, id_)
if assign_id:
reflections.experiment_identifiers()[id_] = str(id_)
return reflections
symmetry_test_data = [
(
"P 2 ",
"P 2 ",
["mP", "aP", "oP"],
["P 1 2 1", "P 1"],
["P 1 2 1", "P 1 2 1", "P 2 2 2"],
),
(
"P 1 ",
"P 2 ",
["aP", "mP", "oP"],
["P 1", "P 1 2 1"],
["P 1 2 1", "P 1 2 1", "P 2 2 2"],
),
]
@pytest.mark.parametrize(
"""reflection_spacegroup, experiments_spacegroup,
expected_lattices, required_spacegroup_order, other_spacegroups""",
symmetry_test_data,
)
def test_dials_symmetry_decide_pointgroup(
reflection_spacegroup,
experiments_spacegroup,
expected_lattices,
required_spacegroup_order,
other_spacegroups,
helper_directory,
):
"""Test for the dials_symmetry_decide_pointgroup helper function"""
helper, tmpdir = helper_directory
refl_path = (tmpdir / "test.refl").strpath
exp_path = (tmpdir / "test.expt").strpath
generated_exp(space_group=experiments_spacegroup).as_file(exp_path)
generate_reflections_in_sg(reflection_spacegroup).as_file(refl_path)
symmetry_analyser = helper.dials_symmetry_decide_pointgroup([exp_path], [refl_path])
# Note : instabilities have been observed in the order of the end of the
# spacegroup list - this is likely due to the use of unseeded random number
# generation in dials.symmetry symmetry element scoring, but this only seems
# to affect the order of groups with a score near zero. Hence only assert the
# order of the spacegroups that must be in order, near the start of the list.
assert symmetry_analyser.get_possible_lattices() == expected_lattices
spacegroups = symmetry_analyser.get_likely_spacegroups()
assert spacegroups[: len(required_spacegroup_order)] == required_spacegroup_order
assert set(spacegroups[len(required_spacegroup_order) :]) == set(other_spacegroups)
def test_assign_identifiers(helper_directory):
"""Test the call to the assign identifiers wrapper"""
helper, tmpdir = helper_directory
experiments = []
reflections = []
for i in range(0, 3):
refl_path = tmpdir.join("test_%s.refl" % i).strpath
exp_path = tmpdir.join("test_%s.expt" % i).strpath
generate_test_refl().as_file(refl_path)
generated_exp().as_file(exp_path)
experiments.append(exp_path)
reflections.append(refl_path)
assigner = helper.assign_dataset_identifiers(experiments, reflections)
expts = load.experiment_list(assigner.get_output_experiments_filename())
assert len(set(expts.identifiers())) == 3
refls = flex.reflection_table.from_file(assigner.get_output_reflections_filename())
assert refls.experiment_identifiers()[0] == expts[0].identifier
assert refls.experiment_identifiers()[1] == expts[1].identifier
assert refls.experiment_identifiers()[2] == expts[2].identifier
class simple_sweep_info:
"""Simple sweep info class for testing"""
def __init__(self):
self.reflections = ""
self.experiments = ""
def get_integrater(self):
return self
def get_integrated_experiments(self):
return self.experiments
def get_integrated_reflections(self):
return self.reflections
def set_reflections(self, refl):
self.reflections = refl
def get_reflections(self):
return self.reflections
def set_experiments(self, exp):
self.experiments = exp
def get_experiments(self):
return self.experiments
class simple_sweep_handler:
"""Simple sweep handler class for testing"""
def __init__(self, number_of_experiments):
self.number_of_experiments = number_of_experiments
self.sis = [simple_sweep_info() for _ in range(number_of_experiments)]
def get_epochs(self):
"""Return a list of 0...n-1"""
return list(range(self.number_of_experiments))
def get_sweep_information(self, epoch):
"""Return the simple sweep info class for a given epoch"""
return self.sis[epoch]
@pytest.mark.parametrize("number_of_experiments", [2, 10])
def test_split_experiments(number_of_experiments, helper_directory):
"""Test the call to split experiments: should split the dataset on experiment
id, giving single datasets with unique ids from 0..n-1"""
helper, tmpdir = helper_directory
sweephandler = simple_sweep_handler(number_of_experiments)
exp_path = tmpdir.join("test.expt").strpath
refl_path = tmpdir.join("test.refl").strpath
generated_exp(number_of_experiments, assign_ids=True).as_file(exp_path)
reflections = flex.reflection_table()
for i in range(number_of_experiments):
reflections.extend(generate_test_refl(id_=i, assign_id=True))
reflections.as_file(refl_path)
# Now call split_experiments and inspect handler to check result
sweephandler = helper.split_experiments(exp_path, refl_path, sweephandler)
check_data_in_sweep_handler(sweephandler)
def check_data_in_sweep_handler(sweephandler):
"""Check that data in sweep handler has ids set correctly"""
for i, epoch in enumerate(sweephandler.get_epochs()):
si = sweephandler.get_sweep_information(epoch)
r = flex.reflection_table.from_file(si.get_reflections())
assert list(set(r["id"])) == [0]
assert list(r.experiment_identifiers().keys()) == [0]
identifiers = r.experiment_identifiers().values()
assert len(identifiers) == 1
experiment = load.experiment_list(si.get_experiments())
assert len(experiment) == 1
assert experiment[0].identifier == identifiers[0]
def test_assign_and_return_datasets(helper_directory):
"""Test the combined method of assigning ids and setting in the sweep handler"""
n = 3
helper, tmpdir = helper_directory
sweephandler = simple_sweep_handler(n)
for i in range(0, n):
si = sweephandler.get_sweep_information(i)
refl_path = tmpdir.join("test_%s.refl" % i).strpath
exp_path = tmpdir.join("test_%s.expt" % i).strpath
generate_test_refl().as_file(refl_path)
generated_exp().as_file(exp_path)
si.set_experiments(exp_path)
si.set_reflections(refl_path)
sweephandler = helper.assign_and_return_datasets(sweephandler)
check_data_in_sweep_handler(sweephandler)
class simple_refiner:
LATTICE_POSSIBLE = "LATTICE_POSSIBLE"
LATTICE_IMPOSSIBLE = "LATTICE_IMPOSSIBLE"
LATTICE_CORRECT = "LATTICE_CORRECT"
def __init__(self, refiner_lattices):
self.refiner_lattices = (
refiner_lattices # first one should be 'best' one used in refinement
)
self.indexer_done = True
self._refiner_reset = False
def get(self):
return self.refiner_lattices
def set_refiner_asserted_lattice(self, lattice):
"""Replicate asserted_lattice methods of refiner and indexer"""
# calls indexer, if not in list of lattices - returns LATTICE_IMPOSSIBLE
if lattice not in self.refiner_lattices:
return self.LATTICE_IMPOSSIBLE
if lattice == self.refiner_lattices[0]:
"""if (PhilIndex.params.xia2.settings.integrate_p1 and
asserted_lattice != self.get_indexer_lattice() and
asserted_lattice != 'aP'):
if PhilIndex.params.xia2.settings.reintegrate_correct_lattice:
self.set_indexer_done(False)
return self.LATTICE_POSSIBLE"""
return self.LATTICE_CORRECT
# else, - calls eliminate, set indexer done false
while self.get()[0] != lattice:
del self.refiner_lattices[0] # i.e. eliminate
# if (not integrate_p1) or reintegrate_correct_lattice
self.indexer_done = False
self.refiner_reset()
return self.LATTICE_POSSIBLE
def get_refiner_lattice(self):
"""Return first lattice"""
return self.refiner_lattices[0]
def refiner_reset(self):
"""Set refiner reset as True"""
self._refiner_reset = True
def get_refiner_reset(self):
"""Get refiner reset status"""
return self._refiner_reset
# get ntr if symmetry lower than refiner - resets reindex op in integrater and
# sets need_to_return = True, which then sets scaler prepare done as False
# get rerun if symmetry finds higher than refiner or no possible - then in symmetry jiffy sets the
# correct lattice in symmetry and makes it run with that.
# test_data = (refiner lattice, possible lattices, (correct, rerun, ntr))
test_data = [
(["mP", "aP", "oP"], ["mP"], ("mP", False, False)), # symmetry same as from refiner
(
["mP", "aP", "oP"],
["aP"],
("aP", False, True),
), # symmetry is lower than from refiner
(
["mP", "aP", "oP"],
["tP", "mP"],
("mP", True, False),
), # symmetry finds higher than refiner
(["mP", "aP", "oP"], ["tP", "aP"], ("aP", True, True)),
] # symmetry finds higher than refiner,
# but next best is lower than refiner
@pytest.mark.parametrize(
"refiner_lattices, possible_lattices, expected_output", test_data
)
def test_decide_correct_lattice_using_refiner(
ccp4, refiner_lattices, possible_lattices, expected_output
):
refiner = simple_refiner(refiner_lattices)
result = decide_correct_lattice_using_refiner(possible_lattices, refiner)
assert result == expected_output
# refiner lattices, (pg, ntr, pt, refiner_reset, reindex_init)
test_lattices = [
(["mP", "aP", "oP"], ("P 1 2 1", False, False, False, False)),
# symmetry finds consistent lattice, all good
(["tP", "mP", "aP", "oP"], ("P 1 2 1", True, False, True, False)),
# symmetry finds lower than refiner lattice, so need to return to rerefine
(["aP"], ("P 1", False, False, False, True)),
] # symmetry finds higher than refiner - can occur
# if pseudosymmetry, so just drop to lower symmetry of lattice and don't need to rerefine
# as already done in this space group.
@pytest.mark.parametrize("refiner_lattices, expected_output", test_lattices)
def test_dials_symmetry_indexer_jiffy(
refiner_lattices, expected_output, helper_directory
):
"""Test the jiffy"""
helper, tmpdir = helper_directory
n = 1
multisweep = False
# Create list of experiments, reflections and refiners
experiments = []
reflections = []
refiners = []
for i in range(0, n):
refl_path = tmpdir.join("test_%s.refl" % i).strpath
exp_path = tmpdir.join("test_%s.expt" % i).strpath
generate_reflections_in_sg("P 2", id_=i, assign_id=True).as_file(refl_path)
generated_exp(space_group="P 2", id_=i).as_file(exp_path)
experiments.append(exp_path)
reflections.append(refl_path)
refiners.append(simple_refiner(refiner_lattices))
result = helper.dials_symmetry_indexer_jiffy(
experiments, reflections, refiners, multisweep=multisweep
)
pg, reind_op, ntr, pt, reind_refl, reind_exp, reind_init = result
refiner_reset = refiners[0].get_refiner_reset()
assert (pg, ntr, pt, refiner_reset, reind_init) == expected_output
if expected_output[3]:
for refiner in refiners[1:]:
assert refiner.get_refiner_reset()
|
bsd-3-clause
| -598,603,767,036,162,700 | 36.582915 | 98 | 0.653563 | false |
goir/virtualenv-creator
|
create_virtualenv.py
|
1
|
7979
|
#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
import argparse
import hashlib
import inspect
import subprocess
import sys
import os
import urllib2
import shutil
WHEEL_PIP = 'https://pypi.python.org/packages/py2.py3/p/pip/pip-8.0.2-py2.py3-none-any.whl#md5=2056f553d5b593d3a970296f229c1b79'
WHEEL_SETUPTOOLS = 'https://pypi.python.org/packages/3.5/s/setuptools/setuptools-19.4-py2.py3-none-any.whl#md5=45ad8918e4dc2568cdefe3c4138d0760'
class Colors(object):
GREEN = '\033[32m'
YELLOW = '\033[33m'
RED = '\033[31m'
BLUE = '\033[34m'
RESET = '\033[0m'
def call(cmd):
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if args.debug:
print("Executing command: {0}".format(cmd))
print(p.stdout.read())
print(p.stderr.read())
p.wait()
p.stdout.close()
p.stderr.close()
return p.returncode
def color(text, color):
""" Color a string
:param unicode text: the text the color to apply to
:param unicode color: color to use
:return: colored output
:rtype: unicode
"""
return "{color}{text}{reset}".format(color=getattr(Colors, color.upper()),
text=text,
reset=Colors.RESET)
def check_files_exists(req_files):
""" Check if files exist. If not raise a RuntimeError
:param list req_files: files to check for existence
:return: None
:exception: RuntimeError
"""
for req_file in req_files:
if not os.path.isfile(os.path.abspath(req_file)):
raise RuntimeError(color('File {0} not found'.format(req_file), 'red'))
else:
print(color('Using requirements file {0}'.format(req_file), 'green'))
def install_from_pypy(req_files):
""" Install requirements from :param:req_files using live pypy.
:param list req_files: List of requirement filenames
:return: None
"""
call(['pip', 'install', '-U', 'pip-accel'])
for req_file in req_files:
print(color("Installing requirements from {0}".format(req_file), 'green'))
install_cmd = ['pip-accel', 'install', '-r', req_file]
if os.path.isfile('constraints.txt'):
install_cmd.extend(['-c', 'constraints.txt'])
if call(install_cmd) != 0:
raise RuntimeError(color("Installation of requirements from {0} using pypy failed".format(req_file), 'red'))
def download_wheel(url, target_dir):
""" Download a wheel file from pypy. The url must have a #md5= in the url this is used to validate the download.
This does nothing if the file already exists. And raises an Exception if the md5 checksum does not match.
:param unicode url: download url
:param unicode target_dir: Absolute path to directory to put the file in
:return: None
"""
url_split = url.split('#')
filename = os.path.basename(url_split[0])
md5_hash = url_split[1].split('md5=')[1]
destination = os.path.join(target_dir, filename)
# check if file already exists
if os.path.isfile(destination):
print(color('{0} already exists'.format(destination), 'yellow'))
else:
print(color('Downloading {0} to {1} from {2}'.format(filename, destination, url), 'green'))
response = urllib2.urlopen(url)
with open(destination, mode='wb') as fp:
data = response.read()
if md5_hash != hashlib.md5(data).hexdigest():
os.unlink(destination)
raise RuntimeError(color('md5 hash of file {0} does not match'.format(filename), 'red'))
fp.write(data)
def cleanup_wheels(url, target_dir):
filename, _ = os.path.basename(url).split('#')
files = os.listdir(target_dir)
package_name = filename.split('-', 1)[0]
for f in files:
if f.startswith(package_name) and not f == filename:
print(color('Removing old version of {0}: {1}'.format(package_name, f), 'green'))
os.unlink(os.path.join(target_dir, f))
def create_virtualenv(root_path, target, wheels_dir, copy=False):
""" setup virtualenv in :param:target. Downloads Pip and Setuptools if they dont exist in *wheels_dir*.
:param unicode root_path: Absolute path
:param unicode target: Directory name for virtualenv in :param:root_path
:param unicode wheels_dir: Absolute path where the wheels of pip and setuptools live or get downloaded to
:return: None
"""
target_dir = os.path.join(root_path, target)
# this is needed to get this filename even if executed by execfile()
this_file = inspect.getframeinfo(inspect.currentframe()).filename
venv_bin = os.path.join(os.path.abspath(os.path.dirname(this_file)), 'virtualenv.py')
if os.path.isdir(target_dir):
shutil.rmtree(target_dir)
print(color("Deleted old env in {0}".format(target_dir), 'green'))
cleanup_wheels(WHEEL_PIP, wheels_dir)
cleanup_wheels(WHEEL_SETUPTOOLS, wheels_dir)
cmd = [sys.executable, venv_bin, target_dir, '--no-wheel', '--extra-search-dir', wheels_dir]
if copy:
cmd.append('--always-copy')
if call(cmd) != 0:
# most likeley pip and setuptools wheels could not be found
# download them to wheels_dir.
download_wheel(WHEEL_PIP, wheels_dir)
download_wheel(WHEEL_SETUPTOOLS, wheels_dir)
if call(cmd) != 0:
raise RuntimeError(color('Could not setup virtualenv', 'red'))
print(color("Created virtualenv in {0}".format(target_dir), 'green'))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--debug', action='store_true', help="activate debug output")
parser.add_argument('--dev', '-d', action='store_true', help='install development requirements (requirements-dev.txt)')
parser.add_argument('--target', '-t', type=str, default="env", help="where to put the new env (default: %(default)s)")
parser.add_argument('--wheels', '-w', action='store_true', help="install from wheels. If a wheel does not exist it will be created")
parser.add_argument('--wheels-dir', type=str, default=os.path.expanduser('~/.python_wheels'), help="install from wheels. If a wheel does not exist it will be created.")
parser.add_argument('--always-copy', '-c', action='store_true', help='Don\'t create symlinks (use on windows and/or shared folders)')
args = parser.parse_args()
# --wheels and -w does nothing anymore, pip creates wheels on its own and caches them!
if not args.debug:
def a(type, value, traceback):
print(value)
sys.excepthook = a
# check if any environment is active
if hasattr(sys, 'real_prefix'):
raise RuntimeError(color('Please deactivate the current virtualenv using "deactivate"', 'red'))
if 'VIRTUAL_ENV' in os.environ:
del os.environ['VIRTUAL_ENV']
print(color("Using wheels dir {0}".format(args.wheels_dir), 'green'))
# create the wheels dir if we use wheels
try:
os.mkdir(args.wheels_dir, 0o777)
except OSError as e:
# file already exists, ignore this
pass
requirement_files = ['requirements.txt']
if args.dev:
requirement_files.append('requirements-dev.txt')
check_files_exists(requirement_files)
root_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../')
create_virtualenv(root_path, args.target, args.wheels_dir, args.always_copy)
# activate the new virtualenv
activate_this = os.path.join(root_path, "%s/bin/activate_this.py" % args.target)
exec (compile(open(activate_this).read(), activate_this, 'exec'), dict(__file__=activate_this))
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
install_from_pypy(requirement_files)
print(color('Successfully installed all requirements from {0}'.format(', '.join(requirement_files)), 'green'))
|
gpl-2.0
| -580,758,837,980,858,800 | 39.095477 | 172 | 0.652964 | false |
regebro/svg.path
|
src/svg/path/tests/test_tokenizer.py
|
1
|
2492
|
import pytest
from svg.path import parser
PATHS = [
(
"M 100 100 L 300 100 L 200 300 z",
[("M", "100 100"), ("L", "300 100"), ("L", "200 300"), ("z", "")],
[("M", 100 + 100j), ("L", 300 + 100j), ("L", 200 + 300j), ("z",)],
),
(
"M 5 1 v 7.344 A 3.574 3.574 0 003.5 8 3.515 3.515 0 000 11.5 C 0 13.421 1.579 15 3.5 15 "
"A 3.517 3.517 0 007 11.531 v -7.53 h 6 v 4.343 A 3.574 3.574 0 0011.5 8 3.515 3.515 0 008 11.5 "
"c 0 1.921 1.579 3.5 3.5 3.5 1.9 0 3.465 -1.546 3.5 -3.437 V 1 z",
[
("M", "5 1"),
("v", "7.344"),
("A", "3.574 3.574 0 003.5 8 3.515 3.515 0 000 11.5"),
("C", "0 13.421 1.579 15 3.5 15"),
("A", "3.517 3.517 0 007 11.531"),
("v", "-7.53"),
("h", "6"),
("v", "4.343"),
("A", "3.574 3.574 0 0011.5 8 3.515 3.515 0 008 11.5"),
("c", "0 1.921 1.579 3.5 3.5 3.5 1.9 0 3.465 -1.546 3.5 -3.437"),
("V", "1"),
("z", ""),
],
[
("M", 5 + 1j),
("v", 7.344),
("A", 3.574, 3.574, 0, False, False, 3.5 + 8j),
("A", 3.515, 3.515, 0, False, False, 0 + 11.5j),
("C", 0 + 13.421j, 1.579 + 15j, 3.5 + 15j),
("A", 3.517, 3.517, 0, False, False, 7 + 11.531j),
("v", -7.53),
("h", 6),
("v", 4.343),
("A", 3.574, 3.574, 0, False, False, 11.5 + 8j),
("A", 3.515, 3.515, 0, False, False, 8 + 11.5j),
("c", 0 + 1.921j, 1.579 + 3.5j, 3.5 + 3.5j),
("c", 1.9 + 0j, 3.465 - 1.546j, 3.5 - 3.437j),
("V", 1),
("z",),
],
),
(
"M 600,350 L 650,325 A 25,25 -30 0,1 700,300 L 750,275",
[
("M", "600,350"),
("L", "650,325"),
("A", "25,25 -30 0,1 700,300"),
("L", "750,275"),
],
[
("M", 600 + 350j),
("L", 650 + 325j),
("A", 25, 25, -30, False, True, 700 + 300j),
("L", 750 + 275j),
],
),
]
@pytest.mark.parametrize("path, commands, tokens", PATHS)
def test_commandifier(path, commands, tokens):
assert list(parser._commandify_path(path)) == commands
assert list(parser._tokenize_path(path)) == tokens
@pytest.mark.parametrize("path, commands, tokens", PATHS)
def test_parser(path, commands, tokens):
path = parser.parse_path(path)
|
mit
| 8,679,883,281,088,874,000 | 33.611111 | 105 | 0.400482 | false |
JarbasAI/jarbas-core
|
mycroft/jarbas-skills/skill_roll_dice/__init__.py
|
1
|
1300
|
from os.path import dirname
from adapt.intent import IntentBuilder
from mycroft.skills.core import MycroftSkill
from mycroft.util.log import getLogger
import random
__author__ = 'paul'
LOGGER = getLogger(__name__)
class RollDiceSkill(MycroftSkill):
def __init__(self):
super(RollDiceSkill, self).__init__(name="RollDiceSkill")
self.possibilities = ["1", "2", "3", "4", "5", "6"]
def initialize(self):
roll_a_dice_intent = IntentBuilder("RollADiceIntent").\
require("RollADiceKeyword").build()
self.register_intent(roll_a_dice_intent, self.handle_roll_a_dice_intent)
roll_two_dice_intent = IntentBuilder("RollTwoDiceIntent").\
require("RollTwoDiceKeyword").build()
self.register_intent(roll_two_dice_intent, self.handle_roll_two_dice_intent)
def handle_roll_a_dice_intent(self, message):
roll = random.choice(self.possibilities)
self.speak_dialog("roll.a.dice", {"roll": roll})
def handle_roll_two_dice_intent(self, message):
roll1 = random.choice(self.possibilities)
roll2 = random.choice(self.possibilities)
self.speak_dialog("roll.two.dice", {"roll1": roll1, "roll2": roll2})
def stop(self):
pass
def create_skill():
return RollDiceSkill()
|
gpl-3.0
| 3,298,990,753,935,752,000 | 29.232558 | 84 | 0.662308 | false |
karimbahgat/PythonGis
|
pythongis/vector/saver.py
|
1
|
6720
|
# import builtins
import itertools
import math
# import fileformats
import shapefile as pyshp
import pygeoj
# PY3 fix
try:
str = unicode # in py2, make str synonymous with str
zip = itertools.izip
except:
pass
NaN = float("nan")
def is_missing(value):
return value in (None,"") or (isinstance(value, float) and math.isnan(value))
def to_file(fields, rows, geometries, filepath, encoding="utf8", maxprecision=12, **kwargs):
def encode(value):
if isinstance(value, int):
# ints are kept as ints
return value
elif isinstance(value, float):
if value.is_integer():
return int(value)
elif math.isnan(value):
return None
else:
# floats are rounded
return round(value, maxprecision)
elif isinstance(value, str):
# unicode is custom encoded into bytestring
return value.encode(encoding)
elif value is None:
return value
else:
# brute force anything else to string representation
return bytes(value)
def detect_fieldtypes(fields, rows):
# TODO: allow other data types such as dates etc...
# set fields with correct fieldtype
fieldtypes = []
for fieldindex,fieldname in enumerate(fields):
fieldlen = 1
decimals = 0
fieldtype = "N" # assume number until proven otherwise
for row in rows:
value = row[fieldindex]
if is_missing(value):
# empty value, so just keep assuming same type
pass
else:
try:
# make nr fieldtype if content can be made into nr
# convert to nr or throw exception if text
value = float(value)
# rare special case where text is 'nan', is a valid input to float, so raise exception to treat as text
if math.isnan(value):
raise ValueError()
# TODO: also how to handle inf? math.isinf(). Treat as text or nr?
if math.isinf(value):
raise NotImplementedError("Saving infinity values not yet implemented")
# detect nr type
if value.is_integer():
_strnr = bytes(value)
else:
# get max decimals, capped to max precision
_strnr = format(value, ".%sf"%maxprecision).rstrip("0")
decimals = max(( len(_strnr.split(".")[1]), decimals ))
fieldlen = max(( len(_strnr), fieldlen ))
except ValueError:
# but turn to text if any of the cells cannot be made to float bc they are txt
fieldtype = "C"
value = value if isinstance(value, str) else bytes(value)
fieldlen = max(( len(value), fieldlen ))
if fieldtype == "N" and decimals == 0:
fieldlen -= 2 # bc above we measure lengths for ints as if they were floats, ie with an additional ".0"
func = lambda v: "" if is_missing(v) else int(float(v))
elif fieldtype == "N" and decimals:
func = lambda v: "" if is_missing(v) else float(v)
elif fieldtype == "C":
func = lambda v: v #encoding are handled later
else:
raise Exception("Unexpected bug: Detected field should be always N or C")
fieldtypes.append( (fieldtype,func,fieldlen,decimals) )
return fieldtypes
# shapefile
if filepath.endswith(".shp"):
shapewriter = pyshp.Writer(filepath, encoding='utf8')
fieldtypes = detect_fieldtypes(fields,rows)
# set fields with correct fieldtype
for fieldname,(fieldtype,func,fieldlen,decimals) in zip(fields, fieldtypes):
fieldname = fieldname.replace(" ","_")[:10]
shapewriter.field(fieldname, fieldtype, fieldlen, decimals)
# iterate through original shapes
for row,geoj in zip(rows, geometries):
shapewriter.shape(geoj)
row = [func(value) for (typ,func,length,deci),value in zip(fieldtypes,row)]
shapewriter.record(*row)
# save
shapewriter.close()
# geojson file
elif filepath.endswith((".geojson",".json")):
geojwriter = pygeoj.new()
fieldtypes = detect_fieldtypes(fields,rows)
for row,geom in zip(rows,geometries):
# encode row values
row = (func(value) for (typ,func,length,deci),value in zip(fieldtypes,row))
row = (encode(value) for value in row)
rowdict = dict(zip(fields, row))
# create and add feature
geojwriter.add_feature(properties=rowdict,
geometry=geom)
# save
geojwriter.save(filepath, encoding=encoding)
# normal table file without geometry
elif filepath.endswith((".txt",".csv")):
import csv
# TODO: Add option of saving geoms as strings in separate fields
with open(filepath, "wb") as fileobj:
csvopts = dict()
csvopts["delimiter"] = kwargs.get("delimiter", ";") # tab is best for automatically opening in excel...
writer = csv.writer(fileobj, **csvopts)
writer.writerow([f.encode(encoding) for f in fields])
for row,geometry in zip(rows, geometries):
writer.writerow([encode(val) for val in row])
elif filepath.endswith(".xls"):
import xlwt
with open(filepath, "wb") as fileobj:
wb = xlwt.Workbook(encoding=encoding) # module takes care of encoding for us
sheet = wb.add_sheet("Data")
# fields
for c,f in enumerate(fields):
sheet.write(0, c, f)
# rows
for r,(row,geometry) in enumerate(zip(rows, geometries)):
for c,val in enumerate(row):
# TODO: run val through encode() func, must spit out dates as well
sheet.write(r+1, c, val)
# save
wb.save(filepath)
else:
raise Exception("Could not save the vector data to the given filepath: the filetype extension is either missing or not supported")
|
mit
| -1,759,641,893,836,171,000 | 37.4 | 138 | 0.536756 | false |
ereOn/azmq
|
azmq/mechanisms/plain_server.py
|
1
|
2073
|
"""
The PLAIN server mechanism.
"""
from ..log import logger
from .base import Mechanism
class PlainServer(object):
def __call__(self):
return PlainServerMechanism()
class PlainServerMechanism(Mechanism):
name = b'PLAIN'
as_server = True
@classmethod
async def _read_plain_hello(cls, reader):
buffer = await cls._expect_command(
reader=reader,
name=b'HELLO',
)
username_len = buffer[0]
username = buffer[1:username_len + 1]
password_len = buffer[username_len + 1]
password = buffer[username_len + 2:username_len + 2 + password_len]
return username, password
@classmethod
async def _read_plain_initiate(cls, reader):
raw_metadata = await cls._expect_command(
reader=reader,
name=b'INITIATE',
)
return cls._buffer_to_metadata(buffer=raw_metadata)
@classmethod
def _write_plain_ready(cls, writer, metadata):
cls.write_command(
writer=writer,
name=b'READY',
buffers=cls._metadata_to_buffers(metadata)
)
async def negotiate(self, writer, reader, metadata, address, zap_client):
logger.debug("Negotiating PLAIN parameters as server.")
# Wait for a HELLO.
username, password = await self._read_plain_hello(reader=reader)
self.write_command(writer=writer, name=b'WELCOME')
remote_metadata = await self._read_plain_initiate(reader=reader)
if zap_client:
user_id, auth_metadata = await zap_client.authenticate(
domain='',
address=address,
identity=remote_metadata.get(b'identity', b''),
mechanism=self.name,
credentials=[
username,
password,
],
)
else:
user_id, auth_metadata = None, None
self._write_plain_ready(writer=writer, metadata=metadata)
return remote_metadata, user_id, auth_metadata
|
gpl-3.0
| 2,243,719,306,831,642,600 | 27.013514 | 77 | 0.581283 | false |
RasaHQ/rasa_nlu
|
rasa/nlu/featurizers/regex_featurizer.py
|
1
|
5392
|
import io
import logging
import numpy as np
import os
import re
import typing
from typing import Any, Dict, Optional, Text
from rasa.nlu import utils
from rasa.nlu.config import RasaNLUModelConfig
from rasa.nlu.featurizers import Featurizer
from rasa.nlu.training_data import Message, TrainingData
logger = logging.getLogger(__name__)
if typing.TYPE_CHECKING:
from rasa.nlu.model import Metadata
class RegexFeaturizer(Featurizer):
provides = ["text_features"]
requires = ["tokens"]
def __init__(self, component_config=None, known_patterns=None, lookup_tables=None):
super(RegexFeaturizer, self).__init__(component_config)
self.known_patterns = known_patterns if known_patterns else []
lookup_tables = lookup_tables or []
self._add_lookup_table_regexes(lookup_tables)
def train(
self, training_data: TrainingData, config: RasaNLUModelConfig, **kwargs: Any
) -> None:
self.known_patterns = training_data.regex_features
self._add_lookup_table_regexes(training_data.lookup_tables)
for example in training_data.training_examples:
updated = self._text_features_with_regex(example)
example.set("text_features", updated)
def process(self, message: Message, **kwargs: Any) -> None:
updated = self._text_features_with_regex(message)
message.set("text_features", updated)
def _text_features_with_regex(self, message):
if self.known_patterns:
extras = self.features_for_patterns(message)
return self._combine_with_existing_text_features(message, extras)
else:
return message.get("text_features")
def _add_lookup_table_regexes(self, lookup_tables):
# appends the regex features from the lookup tables to
# self.known_patterns
for table in lookup_tables:
regex_pattern = self._generate_lookup_regex(table)
lookup_regex = {"name": table["name"], "pattern": regex_pattern}
self.known_patterns.append(lookup_regex)
def features_for_patterns(self, message):
"""Checks which known patterns match the message.
Given a sentence, returns a vector of {1,0} values indicating which
regexes did match. Furthermore, if the
message is tokenized, the function will mark all tokens with a dict
relating the name of the regex to whether it was matched."""
found_patterns = []
for exp in self.known_patterns:
matches = re.finditer(exp["pattern"], message.text)
matches = list(matches)
found_patterns.append(False)
for token_index, t in enumerate(message.get("tokens", [])):
patterns = t.get("pattern", default={})
patterns[exp["name"]] = False
for match in matches:
if t.offset < match.end() and t.end > match.start():
patterns[exp["name"]] = True
found_patterns[-1] = True
t.set("pattern", patterns)
return np.array(found_patterns).astype(float)
def _generate_lookup_regex(self, lookup_table):
"""creates a regex out of the contents of a lookup table file"""
lookup_elements = lookup_table["elements"]
elements_to_regex = []
# if it's a list, it should be the elements directly
if isinstance(lookup_elements, list):
elements_to_regex = lookup_elements
# otherwise it's a file path.
else:
try:
f = io.open(lookup_elements, "r", encoding="utf-8")
except IOError:
raise ValueError(
"Could not load lookup table {}"
"Make sure you've provided the correct path".format(lookup_elements)
)
with f:
for line in f:
new_element = line.strip()
if new_element:
elements_to_regex.append(new_element)
# sanitize the regex, escape special characters
elements_sanitized = [re.escape(e) for e in elements_to_regex]
# regex matching elements with word boundaries on either side
regex_string = "(?i)(\\b" + "\\b|\\b".join(elements_sanitized) + "\\b)"
return regex_string
@classmethod
def load(
cls,
meta: Dict[Text, Any],
model_dir: Optional[Text] = None,
model_metadata: Optional["Metadata"] = None,
cached_component: Optional["RegexFeaturizer"] = None,
**kwargs: Any
) -> "RegexFeaturizer":
file_name = meta.get("file")
regex_file = os.path.join(model_dir, file_name)
if os.path.exists(regex_file):
known_patterns = utils.read_json_file(regex_file)
return RegexFeaturizer(meta, known_patterns=known_patterns)
else:
return RegexFeaturizer(meta)
def persist(self, file_name: Text, model_dir: Text) -> Optional[Dict[Text, Any]]:
"""Persist this model into the passed directory.
Return the metadata necessary to load the model again."""
file_name = file_name + ".pkl"
regex_file = os.path.join(model_dir, file_name)
utils.write_json_to_file(regex_file, self.known_patterns, indent=4)
return {"file": file_name}
|
apache-2.0
| 6,589,500,145,067,330,000 | 34.708609 | 88 | 0.611091 | false |
javierder/dogestart.me
|
core/migrations/0001_initial.py
|
1
|
11514
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Tag'
db.create_table(u'core_tag', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=200)),
))
db.send_create_signal(u'core', ['Tag'])
# Adding model 'Bounty'
db.create_table(u'core_bounty', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('author', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['account.Account'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=300)),
('description', self.gf('django.db.models.fields.TextField')()),
('completed', self.gf('django.db.models.fields.DateTimeField')(default=None, null=True)),
('funded', self.gf('django.db.models.fields.BooleanField')(default=False)),
('address', self.gf('django.db.models.fields.CharField')(default='', max_length=300)),
('amount', self.gf('django.db.models.fields.FloatField')()),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
))
db.send_create_signal(u'core', ['Bounty'])
# Adding M2M table for field tags on 'Bounty'
m2m_table_name = db.shorten_name(u'core_bounty_tags')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('bounty', models.ForeignKey(orm[u'core.bounty'], null=False)),
('tag', models.ForeignKey(orm[u'core.tag'], null=False))
))
db.create_unique(m2m_table_name, ['bounty_id', 'tag_id'])
# Adding model 'BountyMessage'
db.create_table(u'core_bountymessage', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('author', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['account.Account'])),
('text', self.gf('django.db.models.fields.TextField')()),
('bounty', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['core.Bounty'])),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('approved', self.gf('django.db.models.fields.NullBooleanField')(default=False, null=True, blank=True)),
))
db.send_create_signal(u'core', ['BountyMessage'])
# Adding model 'FullFillMent'
db.create_table(u'core_fullfillment', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('author', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['account.Account'])),
('bounty', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['core.Bounty'])),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
))
db.send_create_signal(u'core', ['FullFillMent'])
# Adding model 'EarnedLog'
db.create_table(u'core_earnedlog', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('account', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['account.Account'])),
('bounty', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['core.Bounty'])),
('message', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['core.BountyMessage'])),
('amount', self.gf('django.db.models.fields.FloatField')(default=0)),
('withdrawn', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal(u'core', ['EarnedLog'])
def backwards(self, orm):
# Deleting model 'Tag'
db.delete_table(u'core_tag')
# Deleting model 'Bounty'
db.delete_table(u'core_bounty')
# Removing M2M table for field tags on 'Bounty'
db.delete_table(db.shorten_name(u'core_bounty_tags'))
# Deleting model 'BountyMessage'
db.delete_table(u'core_bountymessage')
# Deleting model 'FullFillMent'
db.delete_table(u'core_fullfillment')
# Deleting model 'EarnedLog'
db.delete_table(u'core_earnedlog')
models = {
u'account.account': {
'Meta': {'object_name': 'Account'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'en-us'", 'max_length': '10'}),
'timezone': ('account.fields.TimeZoneField', [], {'default': "u''", 'max_length': '100', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "u'account'", 'unique': 'True', 'to': u"orm['auth.User']"})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'core.bounty': {
'Meta': {'object_name': 'Bounty'},
'address': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '300'}),
'amount': ('django.db.models.fields.FloatField', [], {}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['account.Account']"}),
'completed': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'funded': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['core.Tag']", 'symmetrical': 'False'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'})
},
u'core.bountymessage': {
'Meta': {'object_name': 'BountyMessage'},
'approved': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['account.Account']"}),
'bounty': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Bounty']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'text': ('django.db.models.fields.TextField', [], {})
},
u'core.earnedlog': {
'Meta': {'object_name': 'EarnedLog'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['account.Account']"}),
'amount': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'bounty': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Bounty']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.BountyMessage']"}),
'withdrawn': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'core.fullfillment': {
'Meta': {'object_name': 'FullFillMent'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['account.Account']"}),
'bounty': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Bounty']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'core.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
}
}
complete_apps = ['core']
|
mit
| -3,946,435,351,670,234,000 | 62.269231 | 195 | 0.568873 | false |
mrooney/wxbanker
|
wxbanker/tests/currconverttests.py
|
1
|
1752
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# https://launchpad.net/wxbanker
# currconverttests.py: Copyright 2007-2010 Mike Rooney <mrooney@ubuntu.com>
#
# This file is part of wxBanker.
#
# wxBanker is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wxBanker is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with wxBanker. If not, see <http://www.gnu.org/licenses/>.
from wxbanker.tests import testbase
import unittest
from wxbanker import currencies, currconvert
class CurrConvertTest(unittest.TestCase):
def setUp(self):
self.CC = currconvert.CurrencyConverter()
def testConversionToSameCurrencyIsSame(self):
amount = 5.23
self.assertEqual(self.CC.Convert(amount, "EUR", "EUR"), amount)
def testConversionWithStockValuesIsExpected(self):
rate = self.CC.Exchanges['USD'] = 1.2345
self.assertEqual(self.CC.Convert(1, "EUR", "USD"), rate)
self.assertEqual(self.CC.Convert(1, "USD", "EUR"), 1/rate)
def testInvalidCurrencyIsExpectedException(self):
self.assertRaises(currconvert.ConversionException, lambda: self.CC.Convert(1, "FOO", "USD"))
self.assertRaises(currconvert.ConversionException, lambda: self.CC.Convert(1, "USD", "BAR"))
if __name__ == "__main__":
unittest.main()
|
gpl-3.0
| -1,414,807,178,836,616,400 | 39.744186 | 100 | 0.70605 | false |
ruijie/quantum
|
quantum/plugins/rgos/common/config.py
|
1
|
2220
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Ruijie network, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from quantum.openstack.common import cfg
DEFAULT_BRIDGE_MAPPINGS = []
DEFAULT_VLAN_RANGES = []
DEFAULT_SWITCH_SERVER = []
database_opts = [
cfg.StrOpt('sql_connection', default='sqlite://'),
cfg.IntOpt('sql_max_retries', default=-1),
cfg.IntOpt('reconnect_interval', default=2),
]
rgos_opts = [
cfg.StrOpt('integration_bridge', default='br-int'),
cfg.StrOpt('local_ip', default='127.0.0.1'),
cfg.ListOpt('bridge_mappings',
default=DEFAULT_BRIDGE_MAPPINGS,
help="List of <physical_network>:<bridge>"),
cfg.StrOpt('tenant_network_type', default='local',
help="Network type for tenant networks "
"(local, vlan, or none)"),
cfg.ListOpt('network_vlan_ranges',
default=DEFAULT_VLAN_RANGES,
help="List of <physical_network>:<vlan_min>:<vlan_max> "
"or <physical_network>"),
]
switch_opts = [
cfg.StrOpt('remote_switch_server', default=DEFAULT_SWITCH_SERVER,
help="List of <index>:<username>:<password>:<server>:<port>; "),
cfg.IntOpt('ssh_max_retries', default=-1),
cfg.IntOpt('reconnect_interval', default=2),
]
agent_opts = [
cfg.IntOpt('polling_interval', default=2),
cfg.IntOpt('lldp_timeout', default=2),
cfg.StrOpt('root_helper', default='sudo'),
cfg.BoolOpt('rpc', default=True),
]
cfg.CONF.register_opts(database_opts, "DATABASE")
cfg.CONF.register_opts(rgos_opts, "RGOS")
cfg.CONF.register_opts(agent_opts, "AGENT")
cfg.CONF.register_opts(switch_opts, "SWITCHAGENT")
|
apache-2.0
| 6,913,264,159,414,557,000 | 34.806452 | 80 | 0.66036 | false |
looselycoupled/xbus-501-timing-demonstrations
|
1-example.py
|
1
|
1744
|
#!/usr/bin/env python
# 1-example
# A simple demonstration script to find the largest prime number
# below a given limit.
#
# This file is intentionally inefficient in order to demonstrate
# various ways to time execution
#
# Usage:
# time python 1-example.py 10000
#
# Author: Allen Leis <al1075@georgetown.edu>
# Created: Wed Sep 13 21:50:05 2015 -0400
#
# Copyright (C) 2015 georgetown.edu
# For license information, see LICENSE.txt
#
# ID: 1-example.py [] al1075@georgetown.edu $
"""
A simple demonstration script to find the largest prime number
below a given limit.
"""
##########################################################################
## Imports
##########################################################################
import sys
##########################################################################
## Code
##########################################################################
def is_prime(limit):
"""
Using the most time intensive method possible, return True or False
as to whether the supplied number is prime
"""
for number in range(2,limit):
if (limit % number) == 0:
return False
return True
def find_largest_prime(limit):
"""
Find the highest number below the supplied limit/upper bound
that is a prime number
"""
i = 2
largest_prime = None
while i < limit:
if is_prime(i):
largest_prime = i
i += 1
return largest_prime
##########################################################################
## Execution
##########################################################################
if __name__ == '__main__':
upper_bound = int(sys.argv[1])
print find_largest_prime(upper_bound)
|
mit
| 4,661,781,988,558,868,000 | 24.647059 | 74 | 0.484518 | false |
CloudBoltSoftware/cloudbolt-forge
|
actions/cloudbolt_plugins/skeleton_order_approval_action/order_approval_skeleton.py
|
1
|
1792
|
import sys
import time
from orders.models import Order
"""
Plug-in example for an Orchestration Action at the "Order Approval" trigger
point. May change CloudBolt's default approval behavior to implement custom
logic or integrate with an external change management system.
The context passed to the plug-in includes the order.
If the order is already ACTIVE when this hook is executed, this indicates that
the order was previously auto approved. If that was not the case, the current
hook randomly approves or denies the order. This is a skeleton and will
need to be changed to match your particular environment.
Args:
`order`: the order to be approved or denied.
`job`: always None in this context.
`logger`: write messages to the log for this action.
"""
def run(order, job=None, logger=None):
if order.status != 'PENDING':
logger.info('Order approval plugin skipped because order is not pending approval.')
return '', '', ''
owner = order.owner
group = order.group
env = order.environment
# number of servers to be provisioned by this order
number_of_servers = order.prov_server_count()
# "Order items" are the line items added to an order. They may be of
# several types: ProvisionServerOrderItem, ServerModOrderItem,
# BlueprintOrderItem, ProvisionNetworkOrderItem, etc.
# Not all of them involve servers.
items = [oi.cast() for oi in order.orderitem_set.filter()]
conforms = False
# Add your logic to determine if this order conforms to your policies
if conforms:
order.approve()
else:
order.deny(reason='Sorry, your order was invalid because...')
# Return 'success' response even if we have rejected the order, since this
# plug-in action succeeded.
return '', '', ''
|
apache-2.0
| 1,055,587,732,821,589,200 | 33.461538 | 91 | 0.720424 | false |
medworx/MedUX
|
medux/plugins/core/icontext.py
|
1
|
2440
|
#
# Copyright (C) 2013 Christian A. Reiter
#
# This file is part of MedUX.
#
# MedUX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# MedUX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with MedUX. If not, see <http://www.gnu.org/licenses/>.
#
from PyQt4.QtCore import QObject
from medux.plugins.core.itemid import ItemId
__author__ = "Christian A. Reiter"
__email__ = "christian.a.reiter@gmail.com"
class Context:
def __init__(self, c1: ItemId=None, c2: ItemId=None, c3: ItemId=None):
self._ids = []
if c1:
self.add(c1)
if c2:
self.add(c2)
if c3:
self.add(c3)
def __contains__(self, c: ItemId):
"""
:param c: ItemId that should be checked if it is part of this Context
:return: True if this context contains the given ItemId, else False
"""
return c in self._ids
def __len__(self):
return len(self._ids)
def isEmpty(self):
""":returns: True if internal list is empty, else False."""
return len(self._ids) == 0
def __getitem__(self, index: int):
return self._ids[index]
def __iter__(self):
return iter(self._ids)
def indexOf(self, context):
return self._ids.index(context)
def remove(self, value):
self._ids.remove(value)
def prepend(self, context):
self._ids.insert(0, context)
def add(self, value):
"""
:type value: Context or ItemId
"""
if type(value) == ItemId:
self._ids += [value]
else:
self._ids.append(value)
def __eq__(self, other):
"""
:returns: True if other Context has same Ids as subset, else False.
"""
return len(set(self._ids).difference(other._ids)) == 0
class IContext(QObject):
def __init__(self, parent=None):
QObject.__init__(parent)
self.context = Context()
self.widget = None
self.contextHelpId = ""
|
gpl-3.0
| -8,318,707,536,714,407,000 | 26.727273 | 77 | 0.605738 | false |
stefanseefeld/synopsis
|
Synopsis/Formatters/HTML/Parts/Inheritance.py
|
1
|
3851
|
#
# Copyright (C) 2000 Stephen Davies
# Copyright (C) 2000 Stefan Seefeld
# All rights reserved.
# Licensed to the public under the terms of the GNU LGPL (>= 2),
# see the file COPYING for details.
#
from Synopsis.Processor import Parameter
from Synopsis import ASG
from Synopsis.Formatters.HTML.Part import Part
from Synopsis.Formatters.HTML.Fragments import *
from Synopsis.Formatters.HTML.Tags import *
def short_name(decl):
return isinstance(decl, ASG.Function) and decl.real_name[-1] or decl.name[-1]
class Inheritance(Part):
fragments = Parameter([InheritanceFormatter()],
'')
def register(self, view):
Part.register(self, view)
self.__start_list = 0
def process(self, decl):
"Walk the hierarchy to find inherited members to print."
if not isinstance(decl, (ASG.Class, ASG.ClassTemplate)): return
self.write_start()
names = [short_name(d) for d in decl.declarations]
self._process_superclasses(decl, names)
self.write_end()
def _process_class(self, class_, names):
"Prints info for the given class, and calls _process_superclasses after"
sorter = self.processor.sorter.clone(class_.declarations)
child_names = []
# Iterate through the sections
for section in sorter:
# Write a heading
heading = section+' Inherited from '+ str(self.scope().prune(class_.name))
started = 0 # Lazy section start incase no details for this section
# Iterate through the children in this section
for child in sorter[section]:
child_name = short_name(child)
if child_name in names:
continue
# FIXME: This doesn't account for the inheritance type
# (private etc)
if child.accessibility == ASG.PRIVATE:
continue
# Don't include constructors and destructors!
if (isinstance(child, ASG.Function) and
child.file.annotations['language'] == 'C++' and
len(child.real_name) > 1):
if child.real_name[-1] == child.real_name[-2]: continue
elif child.real_name[-1] == "~"+child.real_name[-2]: continue
# FIXME: skip overriden declarations
child_names.append(child_name)
# Check section heading
if not started:
started = 1
self.write_section_start(heading)
child.accept(self)
# Finish the section
if started: self.write_section_end(heading)
self._process_superclasses(class_, names + child_names)
def _process_superclasses(self, class_, names):
"""Iterates through the superclasses of clas and calls _process_clas for
each"""
for inheritance in class_.parents:
parent = inheritance.parent
if isinstance(parent, ASG.DeclaredTypeId):
parent = parent.declaration
if isinstance(parent, ASG.Class):
self._process_class(parent, names)
continue
#print "Ignoring", parent.__class__.__name__, "parent of", clas.name
pass #ignore
def write_section_start(self, heading):
"""Creates a table with one row. The row has a td of class 'heading'
containing the heading string"""
self.write('<table width="100%%" summary="%s">\n'%heading)
self.write('<tr><td colspan="2" class="heading">' + heading + '</td></tr>\n')
self.write('<tr><td class="inherited">')
self.__start_list = 1
def write_section_item(self, text):
"""Adds a table row"""
if self.__start_list:
self.write(text)
self.__start_list = 0
else:
self.write(',\n'+text)
def write_section_end(self, heading):
self.write('</td></tr></table>\n')
|
lgpl-2.1
| -537,067,839,087,499,260 | 33.383929 | 83 | 0.608154 | false |
openmotics/gateway
|
src/platform_utils.py
|
1
|
15704
|
# Copyright (C) 2018 OpenMotics BV
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""""
The platform_utils module contains various classes helping with Hardware, System and Platform abstraction
"""
from __future__ import absolute_import
import logging
import os
import subprocess
import sys
import constants
if False: # MYPY
from typing import Union, Dict
logger = logging.getLogger('openmotics')
class Hardware(object):
"""
Abstracts the hardware related functions
"""
class BoardType(object):
BB = 'BB'
BBB = 'BBB'
BBGW = 'BBGW'
ESAFE = 'ESAFE'
BoardTypes = [BoardType.BB, BoardType.BBB, BoardType.BBGW]
class GPIO_DIRECTION(object):
IN = 'in'
OUT = 'out'
GPIO_BASE_PATH = '/sys/class/gpio'
GPIO_EXPORT_PATH = '{0}/export'.format(GPIO_BASE_PATH)
GPIO_DIRECTION_PATH = '{0}/gpio{{0}}/direction'.format(GPIO_BASE_PATH)
GPIO_VALUE_PATH = '{0}/gpio{{0}}/value'.format(GPIO_BASE_PATH)
class GPIO(object):
RS232_MODE = 77
P1_DATA_ENABLE = 113
P1_CABLE_CONNECTED = 115
# eMMC registers
EXT_CSD_DEVICE_LIFE_TIME_EST_TYP_B = 269
EXT_CSD_DEVICE_LIFE_TIME_EST_TYP_A = 268
EXT_CSD_PRE_EOL_INFO = 267
@staticmethod
def read_mmc_ext_csd():
registers = {
'life_time_est_typ_b': Hardware.EXT_CSD_DEVICE_LIFE_TIME_EST_TYP_B,
'life_time_est_typ_a': Hardware.EXT_CSD_DEVICE_LIFE_TIME_EST_TYP_A,
'eol_info': Hardware.EXT_CSD_PRE_EOL_INFO,
}
with open('/sys/kernel/debug/mmc1/mmc1:0001/ext_csd') as fd:
ecsd = fd.read()
ecsd_info = {}
# NOTE: this only works for fields with length 1
for reg, i in registers.items():
pos = i * 2
ecsd_info[reg] = int(ecsd[pos:pos + 2], 16)
return ecsd_info
@staticmethod
def get_board_type():
try:
with open('/proc/device-tree/model', 'r') as mfh:
board_type = mfh.read().strip('\x00').replace(' ', '_')
if board_type in ['TI_AM335x_BeagleBone', 'TI_AM335x_BeagleBone_Black']:
return Hardware.BoardType.BBB
if board_type in ['TI_AM335x_BeagleBone_Green_Wireless']:
return Hardware.BoardType.BBGW
if board_type in ['TI_AM335x_esafe_Custom']:
return Hardware.BoardType.ESAFE
except IOError:
pass
try:
with open('/proc/meminfo', 'r') as memfh:
mem_total = memfh.readline()
if '254228 kB' in mem_total:
return Hardware.BoardType.BB
if '510716 kB' in mem_total:
return Hardware.BoardType.BBB
except IOError:
pass
logger.warning('could not detect board type, unknown')
return # Unknown
@staticmethod
def get_main_interface():
board_type = Hardware.get_board_type()
if board_type in [Hardware.BoardType.BB, Hardware.BoardType.BBB, Hardware.BoardType.ESAFE]:
return 'eth0'
if board_type == Hardware.BoardType.BBGW:
return 'wlan0'
logger.warning('Could not detect local interface. Fallback: lo')
return 'lo'
@staticmethod
def get_mac_address(): # type: () -> Union[str, None]
""" Get the main interface mac address """
interface = Hardware.get_main_interface()
try:
# This works both on Angstrom and Debian
with open('/sys/class/net/{0}/address'.format(interface)) as mac_address:
return mac_address.read().strip().upper()
except Exception:
return None
@staticmethod
def set_gpio_direction(gpio_pin, direction): # type: (int, str) -> None
try:
with open(Hardware.GPIO_EXPORT_PATH, 'w') as gpio:
gpio.write(str(gpio_pin))
except Exception:
pass # This raises every now and then if the pin was already exported
with open(Hardware.GPIO_DIRECTION_PATH.format(gpio_pin), 'w') as gpio:
gpio.write(direction)
@staticmethod
def set_gpio(gpio_pin, value): # type: (int, bool) -> None
Hardware.set_gpio_direction(gpio_pin=gpio_pin,
direction=Hardware.GPIO_DIRECTION.OUT)
with open(Hardware.GPIO_VALUE_PATH.format(gpio_pin), 'w') as gpio:
gpio.write('1' if value else '0')
@staticmethod
def enable_extension_rs485_port():
current_platform = Platform.get_platform()
if current_platform not in Platform.CoreTypes:
raise RuntimeError('Platform {0} does not support the extension RS485 port')
Hardware.set_gpio(Hardware.GPIO.RS232_MODE, False)
class System(object):
"""
Abstracts the system related functions
"""
SERVICES = ('vpn_service', 'openmotics')
SYSTEMD_UNIT_MAP = {'openmotics': 'openmotics-api.service',
'vpn_service': 'openmotics-vpn.service'}
# runit action map to make sure the executable will be stopped,
# otherwise runit will return timeout, but not have killed the app
RUNIT_ACTION_MAP = {'status': 'status',
'stop': 'force-stop',
'restart': 'force-restart'}
class OS(object):
ANGSTROM = 'angstrom'
DEBIAN = 'debian'
BUILDROOT = 'buildroot'
@staticmethod
def restart_service(service):
# type: (str) -> None
System.run_service_action('restart', service)
@staticmethod
def run_service_action(action, service):
# type: (str, str) -> subprocess.Popen
unit_name = System.SYSTEMD_UNIT_MAP.get(service, service)
is_systemd = False
is_supervisor = False
is_runit = False
try:
subprocess.check_output(['systemctl', 'is-enabled', unit_name])
is_systemd = True
except subprocess.CalledProcessError:
is_systemd = False
except Exception: # Python 3 error (FileNotFoundErr) but is not known in python 2...
is_systemd = False
try:
subprocess.check_output(['supervisorctl', 'status', service])
is_supervisor = True
except subprocess.CalledProcessError:
is_supervisor = False
except Exception: # Python 3 error (FileNotFoundErr) but is not known in python 2...
is_supervisor = False
try:
runit_path = constants.get_runit_service_folder()
subprocess.check_output(['sv', 'status', os.path.join(runit_path, service)])
is_runit = True
except subprocess.CalledProcessError:
is_runit = False
except Exception: # Python 3 error (FileNotFoundErr) but is not known in python 2...
is_runit = False
if is_systemd:
return subprocess.Popen(['systemctl', action, '--no-pager', unit_name],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
close_fds=True)
elif is_supervisor:
return subprocess.Popen(['supervisorctl', action, service],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
close_fds=True)
elif is_runit:
runit_path = constants.get_runit_service_folder()
service_str = os.path.join(runit_path, service)
return subprocess.Popen(['sv', action, service_str],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
close_fds=True)
else:
raise RuntimeError('Could not find the appropriate service manager to run the service action command')
@staticmethod
def get_operating_system():
# type: () -> Dict[str, str]
operating_system = {}
try:
with open('/etc/os-release', 'r') as osfh:
lines = osfh.readlines()
for line in lines:
k, v = line.strip().split('=')
operating_system[k] = v
operating_system['ID'] = operating_system['ID'].lower()
except IOError:
logger.warning('could not detect operating system, unknown')
return operating_system
@staticmethod
def get_ip_address():
""" Get the local ip address. """
interface = Hardware.get_main_interface()
operating_system = System.get_operating_system()
try:
lines = subprocess.check_output('ifconfig {0}'.format(interface), shell=True)
# In python3, lines is a bytes array variable, not a string. -> decoding it into a string
if not isinstance(lines, str):
lines = lines.decode('utf-8')
if operating_system['ID'] == System.OS.ANGSTROM:
return lines.split('\n')[1].strip().split(' ')[1].split(':')[1]
elif operating_system['ID'] == System.OS.DEBIAN:
return lines.split('\n')[1].strip().split(' ')[1]
elif operating_system['ID'] == System.OS.BUILDROOT:
return lines.split('\n')[1].strip().split(' ')[1].replace('addr:','') # The buildroot OS prefixes addresses with 'addr'
else:
return
except Exception:
return
@staticmethod
def get_vpn_service():
return 'openvpn.service' if System.get_operating_system().get('ID') == System.OS.ANGSTROM else 'openvpn-client@omcloud'
@staticmethod
def _use_pyopenssl():
return System.get_operating_system().get('ID') == System.OS.ANGSTROM
@staticmethod
def get_ssl_socket(sock, private_key_filename, certificate_filename):
if System._use_pyopenssl():
from OpenSSL import SSL
context = SSL.Context(SSL.SSLv23_METHOD)
context.use_privatekey_file(private_key_filename)
context.use_certificate_file(certificate_filename)
return SSL.Connection(context, sock)
import ssl
return ssl.wrap_socket(sock,
keyfile=private_key_filename,
certfile=certificate_filename,
ssl_version=ssl.PROTOCOL_SSLv23,
do_handshake_on_connect=False,
suppress_ragged_eofs=False)
@staticmethod
def setup_cherrypy_ssl(https_server):
if System._use_pyopenssl():
https_server.ssl_module = 'pyopenssl'
else:
import ssl
https_server.ssl_module = 'builtin'
if sys.version_info[:3] < (3, 6, 0):
https_server.ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
@staticmethod
def handle_socket_exception(connection, exception, logger):
if System._use_pyopenssl():
import select
from OpenSSL import SSL
if isinstance(exception, SSL.SysCallError):
if exception[0] == 11: # Temporarily unavailable
# This should be ok, just wait for more data to arrive
return True # continue
if exception[0] == -1: # Unexpected EOF
logger.info('Got (unexpected) EOF, aborting due to lost connection')
return False # break
elif isinstance(exception, SSL.WantReadError):
# This should be ok, just wait for more data to arrive
select.select([connection], [], [], 1.0)
return True # continue
else:
import select
import ssl
if isinstance(exception, ssl.SSLEOFError):
logger.info('Got SSLEOFError, aborting due to lost connection')
return False # break
elif isinstance(exception, ssl.SSLError):
if 'The read operation timed out' in str(exception):
# Got read timeout, just wait for data to arrive
return True # continue
raise exception
@staticmethod
def import_libs():
operating_system = System.get_operating_system().get('ID')
# check if running in python 2 mode, otherwise packages should be included in the build (PyInstaller)
if sys.version_info.major == 2:
import site
path = '/opt/openmotics/python-deps/lib/python2.7/site-packages'
if os.path.exists(path):
site.addsitedir(path)
sys.path.remove(path)
sys.path.insert(0, path)
# Patching where/if required
if operating_system == System.OS.ANGSTROM:
from pkg_resources import resource_filename, resource_stream, Requirement
resource_stream(Requirement.parse('requests'), 'requests/cacert.pem')
os.environ['REQUESTS_CA_BUNDLE'] = resource_filename(Requirement.parse('requests'), 'requests/cacert.pem')
class Platform(object):
"""
Abstracts the platform related functions
"""
class Type(object):
DUMMY = 'DUMMY'
CLASSIC = 'CLASSIC'
CORE_PLUS = 'CORE_PLUS'
CORE = 'CORE'
ESAFE = 'ESAFE'
AnyTypes = [Type.DUMMY]
ClassicTypes = [Type.CLASSIC]
CoreTypes = [Type.CORE, Type.CORE_PLUS]
EsafeTypes = [Type.ESAFE]
Types = AnyTypes + ClassicTypes + CoreTypes + EsafeTypes
@staticmethod
def get_platform():
# type: () -> str
from six.moves.configparser import ConfigParser
config = ConfigParser()
config.read(constants.get_config_file())
if config.has_option('OpenMotics', 'platform'):
platform = config.get('OpenMotics', 'platform')
if platform in Platform.Types:
return platform
return Platform.Type.CLASSIC
@staticmethod
def has_master_hardware():
# type: () -> bool
if Platform.get_platform() in [Platform.Type.DUMMY, Platform.Type.ESAFE]:
return False
return True
@staticmethod
def http_port():
# type: () -> int
try:
from six.moves.configparser import ConfigParser
config = ConfigParser()
config.read(constants.get_config_file())
http_port = int(config.get('OpenMotics', 'http_port'))
if http_port is None:
http_port = 80 # default http port
return http_port
except Exception:
return 80
@staticmethod
def https_port():
# type: () -> int
try:
from six.moves.configparser import ConfigParser
config = ConfigParser()
config.read(constants.get_config_file())
https_port = int(config.get('OpenMotics', 'https_port'))
if https_port is None:
https_port = 433 # default https port
return https_port
except Exception:
return 433
|
agpl-3.0
| -1,422,497,468,438,690,600 | 37.679803 | 136 | 0.58068 | false |
pferreir/indico-backup
|
indico/web/http_api/auth.py
|
1
|
4009
|
# -*- coding: utf-8 -*-
##
##
## This file is part of Indico.
## Copyright (C) 2002 - 2014 European Organization for Nuclear Research (CERN).
##
## Indico is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 3 of the
## License, or (at your option) any later version.
##
## Indico is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Indico;if not, see <http://www.gnu.org/licenses/>.
import datetime
import uuid
from persistent import Persistent
from persistent.list import PersistentList
from MaKaC.common.ObjectHolders import ObjectHolder
class APIKeyHolder(ObjectHolder):
idxName = 'apikeys'
def makeKey(self):
key = str(uuid.uuid4())
while self.hasKey(key):
key = str(uuid.uuid4())
return key
class APIKey(Persistent):
def __init__(self, user, key=None, signKey=None):
self._user = user
self._key = key
self._signKey = signKey
self._createdDT = datetime.datetime.now()
self._isBlocked = False
self._lastUsedDT = None
self._lastUsedIP = None
self._useCount = 0
self._lastPath = None
self._lastQuery = None
self._lastUseAuthenticated = False
self._oldKeys = PersistentList()
self._persistentAllowed = False
def __repr__(self):
return '<APIKey({0}, {1!r}, {2})>'.format(self._key, self._user, self._lastUsedDT)
def getUser(self):
return self._user
def setUser(self, user):
self._user = user
def getKey(self):
return self._key
getId = getKey
def setKey(self, key):
akh = APIKeyHolder()
if self.getId() is not None:
akh.remove(self)
if self.getKey():
self._oldKeys.append(self.getKey())
self._key = key
akh.add(self)
def getSignKey(self):
return self._signKey
def setSignKey(self, signKey):
self._signKey = signKey
def getCreatedDT(self):
return self._createdDT
def getLastUsedDT(self):
return self._lastUsedDT
def isBlocked(self):
return self._isBlocked
def setBlocked(self, blocked):
self._isBlocked = blocked
def getLastUsedIP(self):
return self._lastUsedIP
def getUseCount(self):
return self._useCount
def isLastUseAuthenticated(self):
return self._lastUseAuthenticated
def getLastRequest(self):
if not self._lastPath:
return None
if self._lastQuery:
return '%s?%s' % (self._lastPath, self._lastQuery)
return self._lastPath
def getOldKeys(self):
return self._oldKeys
def isPersistentAllowed(self):
return getattr(self, '_persistentAllowed', False)
def setPersistentAllowed(self, val):
self._persistentAllowed = val
def used(self, ip, path, query, authenticated):
self._lastUsedDT = datetime.datetime.now()
self._lastUsedIP = ip
self._lastPath = path
self._lastQuery = query
self._lastUseAuthenticated = authenticated
self._useCount += 1
def newKey(self):
akh = APIKeyHolder()
self.setKey(akh.makeKey())
return self.getKey()
def newSignKey(self):
self.setSignKey(str(uuid.uuid4()))
def create(self):
akh = APIKeyHolder()
if not self.getKey():
self.newKey()
if not self.getSignKey():
self.newSignKey()
self._user.setAPIKey(self)
akh.add(self)
def remove(self):
akh = APIKeyHolder()
self._user.setAPIKey(None)
akh.removeById(self.getKey())
|
gpl-3.0
| 41,385,180,629,257,624 | 27.034965 | 90 | 0.623597 | false |
ingadhoc/account-invoicing
|
account_invoice_partial/wizards/account_invoice_partial_wizard.py
|
1
|
1689
|
##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
##############################################################################
from odoo import models, fields
from odoo.tools import float_round
class AccountInvoicePartialWizard(models.TransientModel):
_name = "account.invoice.partial.wizard"
_description = "Account Invoice Partial Wizard"
invoice_id = fields.Many2one(
'account.move',
default=lambda x: x._context.get('active_id', False),
)
percentage_to_invoice = fields.Float(
required=True,
)
rounding = fields.Float(
string='Rounding Precision',
required=True,
help='Represent the non-zero value smallest coinage'
' (for example, 0.05).',
default=0.01,
)
rounding_method = fields.Selection(
required=True,
selection=[('UP', 'UP'),
('DOWN', 'DOWN'),
('HALF-UP', 'HALF-UP')],
default='HALF-UP',
help='The tie-breaking rule used for float rounding operations',
)
def compute_new_quantity(self):
self.ensure_one()
for line in self.invoice_id.invoice_line_ids.with_context(check_move_validity=False):
quantity = line.quantity * (self.percentage_to_invoice/100)
line.quantity = float_round(
quantity, precision_rounding=self.rounding,
rounding_method=self.rounding_method)
line._onchange_balance()
line.move_id._onchange_invoice_line_ids()
line._onchange_mark_recompute_taxes()
|
agpl-3.0
| -1,892,231,634,111,347,500 | 35.717391 | 93 | 0.557134 | false |
woodem/woo
|
scripts/test-OLD/peri3dController_triaxialCompression.py
|
1
|
3506
|
# peri3dController_triaxialCompression.py
# script, that explains funcionality and input parameters of Peri3dController on the example of
# triaxial compression.
# Firstly, a hydrostatic preassure is applied, then a strain along z axis is increasing
# while x- and y- stress is constant
# The simulation is run on rotated cell to enable localization and strain softening
# (you can also try simulation with command sp.toSimulation() with no rotation,
# in this case there is almost no difference, but in script peri3dController_shear,
# the cell rotation has significant effect)
from woo import pack,plot,qt
# define material
O.materials.append(FrictMat())
# create periodic assembly of particles
initSize=1.2
sp=pack.randomPeriPack(radius=.05,initSize=Vector3(initSize,initSize,initSize),memoizeDb='/tmp/packDb.sqlite')
angle=0
rot=Matrix3(cos(angle),0,-sin(angle), 0,1,0, sin(angle),0,cos(angle))
sp.toSimulation(rot=rot)
# plotting
plot.live=False
plot.plots={'iter':('sx','sy','sz','syz','szx','sxy',),'iter_':('ex','ey','ez','eyz','ezx','exy',),'ez':('sz',)}
def plotAddData():
plot.addData(
iter=O.iter,iter_=O.iter,
sx=p3d.stress[0],sy=p3d.stress[1],sz=p3d.stress[2],
syz=p3d.stress[3],szx=p3d.stress[4],sxy=p3d.stress[5],
ex=p3d.strain[0],ey=p3d.strain[1],ez=p3d.strain[2],
eyz=p3d.strain[3],ezx=p3d.strain[4],exy=p3d.strain[5],
)
O.dt=utils.PWaveTimeStep()/2
# define the first part of simulation, hydrostatic compression
O.engines=[
ForceResetter(),
InsertionSortCollider([Bo1_Sphere_Aabb()]),
InteractionLoop(
[Ig2_Sphere_Sphere_Dem3DofGeom()],
[Ip2_FrictMat_FrictMat_FrictPhys()],[Law2_Dem3DofGeom_FrictPhys_CundallStrack()]),
NewtonIntegrator(),
Peri3dController( goal=(-1e7,-1e7,-1e7, 0,0,0), # Vector6 of prescribed final values
stressMask=0b000111,
nSteps=500,
doneHook='print("Hydrostatic load reached."); O.pause()',
youngEstimation=.5e9, # needed, when only nonzero prescribed values are stress
maxStrain=.5,
label='p3d'
),
PyRunner(command='plotAddData()',iterPeriod=1),
]
O.run(); O.wait()
# second part, z-axis straining and constant transversal stress
O.engines=[
ForceResetter(),
InsertionSortCollider([Bo1_Sphere_Aabb()]),
InteractionLoop(
[Ig2_Sphere_Sphere_Dem3DofGeom()],
[Ip2_FrictMat_FrictMat_FrictPhys()],[Law2_Dem3DofGeom_FrictPhys_CundallStrack()]),
NewtonIntegrator(),
Peri3dController( goal=(-1e7,-1e7,-4e-1, 0,0,0), # Vector6 of prescribed final values
stressMask=0b000011,
nSteps=1000,
xxPath=[(0,1),(1,1)], # the first (time) zero defines the initial value of stress considered nonzero
yyPath=[(0,1),(1,1)],
doneHook='print("Simulation with Peri3dController finished."); O.pause()',
maxStrain=.5,
label='p3d',
strain=p3d.strain, # continue from value reached in previous part
stressIdeal=Vector6(-1e7,-1e7,0, 0,0,0), # continue from value reached in previous part
),
PyRunner(command='plotAddData()',iterPeriod=1),
]
O.run();O.wait()
plot.plot()
|
gpl-2.0
| 8,124,165,752,349,868,000 | 43.379747 | 128 | 0.615231 | false |
knyghty/knyg.ht
|
pages/tests.py
|
1
|
1192
|
from django.core.urlresolvers import reverse
from django.test import TestCase
from .models import Page
class PageTests(TestCase):
def test_404(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 404)
def test_home(self):
Page.objects.create(
url='',
title='Home',
icon='fa fa-home',
content='<p>Test</p>',
position=10
)
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<p>Test</p>')
def test_subpage(self):
root = Page.objects.create(
url='',
title='Home',
icon='fa fa-home',
content='<p>Test</p>',
position=10
)
Page.objects.create(
url='test',
title='Test',
icon='fa fa-test',
content='<p>Subpage</p>',
position=10,
parent=root
)
response = self.client.get(reverse('page', args=('test',)))
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<p>Subpage</p>')
|
bsd-2-clause
| -5,908,421,540,647,864,000 | 25.488889 | 67 | 0.525168 | false |
TAMU-CPT/galaxy-tools
|
tools/gff3/gff3.py
|
1
|
11156
|
import copy
import logging
log = logging.getLogger()
log.setLevel(logging.WARN)
def feature_lambda(
feature_list,
test,
test_kwargs,
subfeatures=True,
parent=None,
invert=False,
recurse=True,
):
"""Recursively search through features, testing each with a test function, yielding matches.
GFF3 is a hierachical data structure, so we need to be able to recursively
search through features. E.g. if you're looking for a feature with
ID='bob.42', you can't just do a simple list comprehension with a test
case. You don't know how deeply burried bob.42 will be in the feature tree. This is where feature_lambda steps in.
:type feature_list: list
:param feature_list: an iterable of features
:type test: function reference
:param test: a closure with the method signature (feature, **kwargs) where
the kwargs are those passed in the next argument. This
function should return True or False, True if the feature is
to be yielded as part of the main feature_lambda function, or
False if it is to be ignored. This function CAN mutate the
features passed to it (think "apply").
:type test_kwargs: dictionary
:param test_kwargs: kwargs to pass to your closure when it is called.
:type subfeatures: boolean
:param subfeatures: when a feature is matched, should just that feature be
yielded to the caller, or should the entire sub_feature
tree for that feature be included? subfeatures=True is
useful in cases such as searching for a gene feature,
and wanting to know what RBS/Shine_Dalgarno_sequences
are in the sub_feature tree (which can be accomplished
with two feature_lambda calls). subfeatures=False is
useful in cases when you want to process (and possibly
return) the entire feature tree, such as applying a
qualifier to every single feature.
:type invert: boolean
:param invert: Negate/invert the result of the filter.
:rtype: yielded list
:return: Yields a list of matching features.
"""
# Either the top level set of [features] or the subfeature attribute
for feature in feature_list:
feature._parent = parent
if not parent:
# Set to self so we cannot go above root.
feature._parent = feature
test_result = test(feature, **test_kwargs)
# if (not invert and test_result) or (invert and not test_result):
if invert ^ test_result:
if not subfeatures:
feature_copy = copy.deepcopy(feature)
feature_copy.sub_features = list()
yield feature_copy
else:
yield feature
if recurse and hasattr(feature, "sub_features"):
for x in feature_lambda(
feature.sub_features,
test,
test_kwargs,
subfeatures=subfeatures,
parent=feature,
invert=invert,
recurse=recurse,
):
yield x
def fetchParent(feature):
if not hasattr(feature, "_parent") or feature._parent is None:
return feature
else:
return fetchParent(feature._parent)
def feature_test_true(feature, **kwargs):
return True
def feature_test_type(feature, **kwargs):
if "type" in kwargs:
return str(feature.type).upper() == str(kwargs["type"]).upper()
elif "types" in kwargs:
for x in kwargs["types"]:
if str(feature.type).upper() == str(x).upper():
return True
return False
raise Exception("Incorrect feature_test_type call, need type or types")
def feature_test_qual_value(feature, **kwargs):
"""Test qualifier values.
For every feature, check that at least one value in
feature.quailfiers(kwargs['qualifier']) is in kwargs['attribute_list']
"""
if isinstance(kwargs["qualifier"], list):
for qualifier in kwargs["qualifier"]:
for attribute_value in feature.qualifiers.get(qualifier, []):
if attribute_value in kwargs["attribute_list"]:
return True
else:
for attribute_value in feature.qualifiers.get(kwargs["qualifier"], []):
if attribute_value in kwargs["attribute_list"]:
return True
return False
def feature_test_location(feature, **kwargs):
if "strand" in kwargs:
if feature.location.strand != kwargs["strand"]:
return False
return feature.location.start <= kwargs["loc"] <= feature.location.end
def feature_test_quals(feature, **kwargs):
"""
Example::
a = Feature(qualifiers={'Note': ['Some notes', 'Aasdf']})
# Check if a contains a Note
feature_test_quals(a, {'Note': None}) # Returns True
feature_test_quals(a, {'Product': None}) # Returns False
# Check if a contains a note with specific value
feature_test_quals(a, {'Note': ['ome']}) # Returns True
# Check if a contains a note with specific value
feature_test_quals(a, {'Note': ['other']}) # Returns False
"""
for key in kwargs:
if key not in feature.qualifiers:
return False
# Key is present, no value specified
if kwargs[key] is None:
return True
# Otherwise there is a key value we're looking for.
# so we make a list of matches
matches = []
# And check all of the feature qualifier valuse
for value in feature.qualifiers[key]:
# For that kwargs[key] value
for x in kwargs[key]:
matches.append(x in value)
# If none matched, then we return false.
if not any(matches):
return False
return True
def feature_test_contains(feature, **kwargs):
if "index" in kwargs:
return feature.location.start < kwargs["index"] < feature.location.end
elif "range" in kwargs:
return (
feature.location.start < kwargs["range"]["start"] < feature.location.end
and feature.location.start < kwargs["range"]["end"] < feature.location.end
)
else:
raise RuntimeError("Must use index or range keyword")
def get_id(feature=None, parent_prefix=None):
result = ""
if parent_prefix is not None:
result += parent_prefix + "|"
if "locus_tag" in feature.qualifiers:
result += feature.qualifiers["locus_tag"][0]
elif "gene" in feature.qualifiers:
result += feature.qualifiers["gene"][0]
elif "Gene" in feature.qualifiers:
result += feature.qualifiers["Gene"][0]
elif "product" in feature.qualifiers:
result += feature.qualifiers["product"][0]
elif "Product" in feature.qualifiers:
result += feature.qualifiers["Product"][0]
elif "Name" in feature.qualifiers:
result += feature.qualifiers["Name"][0]
else:
return feature.id
# Leaving in case bad things happen.
# result += '%s_%s_%s_%s' % (
# feature.id,
# feature.location.start,
# feature.location.end,
# feature.location.strand
# )
return result
def get_gff3_id(gene):
return gene.qualifiers.get("Name", [gene.id])[0]
def ensure_location_in_bounds(start=0, end=0, parent_length=0):
# This prevents frameshift errors
while start < 0:
start += 3
while end < 0:
end += 3
while start > parent_length:
start -= 3
while end > parent_length:
end -= 3
return (start, end)
def coding_genes(feature_list):
for x in genes(feature_list):
if (
len(
list(
feature_lambda(
x.sub_features,
feature_test_type,
{"type": "CDS"},
subfeatures=False,
)
)
)
> 0
):
yield x
def genes(feature_list, feature_type="gene", sort=False):
"""
Simple filter to extract gene features from the feature set.
"""
if not sort:
for x in feature_lambda(
feature_list, feature_test_type, {"type": feature_type}, subfeatures=True
):
yield x
else:
data = list(genes(feature_list, feature_type=feature_type, sort=False))
data = sorted(data, key=lambda feature: feature.location.start)
for x in data:
yield x
def wa_unified_product_name(feature):
"""
Try and figure out a name. We gave conflicting instructions, so
this isn't as trivial as it should be. Sometimes it will be in
'product' or 'Product', othertimes in 'Name'
"""
# Manually applied tags.
protein_product = feature.qualifiers.get(
"product", feature.qualifiers.get("Product", [None])
)[0]
# If neither of those are available ...
if protein_product is None:
# And there's a name...
if "Name" in feature.qualifiers:
if not is_uuid(feature.qualifiers["Name"][0]):
protein_product = feature.qualifiers["Name"][0]
return protein_product
def is_uuid(name):
return name.count("-") == 4 and len(name) == 36
def get_rbs_from(gene):
# Normal RBS annotation types
rbs_rbs = list(
feature_lambda(
gene.sub_features, feature_test_type, {"type": "RBS"}, subfeatures=False
)
)
rbs_sds = list(
feature_lambda(
gene.sub_features,
feature_test_type,
{"type": "Shine_Dalgarno_sequence"},
subfeatures=False,
)
)
# Fraking apollo
apollo_exons = list(
feature_lambda(
gene.sub_features, feature_test_type, {"type": "exon"}, subfeatures=False
)
)
apollo_exons = [x for x in apollo_exons if len(x) < 10]
# These are more NCBI's style
regulatory_elements = list(
feature_lambda(
gene.sub_features,
feature_test_type,
{"type": "regulatory"},
subfeatures=False,
)
)
rbs_regulatory = list(
feature_lambda(
regulatory_elements,
feature_test_quals,
{"regulatory_class": ["ribosome_binding_site"]},
subfeatures=False,
)
)
# Here's hoping you find just one ;)
return rbs_rbs + rbs_sds + rbs_regulatory + apollo_exons
def nice_name(record):
"""
get the real name rather than NCBI IDs and so on. If fails, will return record.id
"""
name = record.id
likely_parental_contig = list(genes(record.features, feature_type="contig"))
if len(likely_parental_contig) == 1:
name = likely_parental_contig[0].qualifiers.get("organism", [name])[0]
return name
def fsort(it):
for i in sorted(it, key=lambda x: int(x.location.start)):
yield i
|
gpl-3.0
| 1,892,358,789,450,623,700 | 31.242775 | 118 | 0.587755 | false |
iphoting/healthchecks
|
hc/front/tests/test_add_sms.py
|
1
|
3660
|
from django.test.utils import override_settings
from hc.api.models import Channel
from hc.test import BaseTestCase
@override_settings(TWILIO_ACCOUNT="foo", TWILIO_AUTH="foo", TWILIO_FROM="123")
class AddSmsTestCase(BaseTestCase):
def setUp(self):
super().setUp()
self.url = "/projects/%s/add_sms/" % self.project.code
def test_instructions_work(self):
self.client.login(username="alice@example.org", password="password")
r = self.client.get(self.url)
self.assertContains(r, "Get a SMS message")
@override_settings(USE_PAYMENTS=True)
def test_it_warns_about_limits(self):
self.profile.sms_limit = 0
self.profile.save()
self.client.login(username="alice@example.org", password="password")
r = self.client.get(self.url)
self.assertContains(r, "upgrade to a")
def test_it_creates_channel(self):
form = {"label": "My Phone", "phone": "+1234567890"}
self.client.login(username="alice@example.org", password="password")
r = self.client.post(self.url, form)
self.assertRedirects(r, self.channels_url)
c = Channel.objects.get()
self.assertEqual(c.kind, "sms")
self.assertEqual(c.phone_number, "+1234567890")
self.assertEqual(c.name, "My Phone")
self.assertEqual(c.project, self.project)
def test_it_rejects_bad_number(self):
for v in ["not a phone number address", False, 15, "+123456789A"]:
form = {"phone": v}
self.client.login(username="alice@example.org", password="password")
r = self.client.post(self.url, form)
self.assertContains(r, "Invalid phone number format.")
def test_it_trims_whitespace(self):
form = {"phone": " +1234567890 "}
self.client.login(username="alice@example.org", password="password")
self.client.post(self.url, form)
c = Channel.objects.get()
self.assertEqual(c.phone_number, "+1234567890")
@override_settings(TWILIO_AUTH=None)
def test_it_requires_credentials(self):
self.client.login(username="alice@example.org", password="password")
r = self.client.get(self.url)
self.assertEqual(r.status_code, 404)
def test_it_requires_rw_access(self):
self.bobs_membership.rw = False
self.bobs_membership.save()
self.client.login(username="bob@example.org", password="password")
r = self.client.get(self.url)
self.assertEqual(r.status_code, 403)
def test_it_strips_invisible_formatting_characters(self):
form = {"label": "My Phone", "phone": "\u202c+1234567890\u202c"}
self.client.login(username="alice@example.org", password="password")
r = self.client.post(self.url, form)
self.assertRedirects(r, self.channels_url)
c = Channel.objects.get()
self.assertEqual(c.phone_number, "+1234567890")
def test_it_strips_hyphens(self):
form = {"label": "My Phone", "phone": "+123-4567890"}
self.client.login(username="alice@example.org", password="password")
r = self.client.post(self.url, form)
self.assertRedirects(r, self.channels_url)
c = Channel.objects.get()
self.assertEqual(c.phone_number, "+1234567890")
def test_it_strips_spaces(self):
form = {"label": "My Phone", "phone": "+123 45 678 90"}
self.client.login(username="alice@example.org", password="password")
r = self.client.post(self.url, form)
self.assertRedirects(r, self.channels_url)
c = Channel.objects.get()
self.assertEqual(c.phone_number, "+1234567890")
|
bsd-3-clause
| 4,603,454,562,896,313,000 | 36.731959 | 80 | 0.637978 | false |
yatmingyatming/LogisticRegressionSGDMapReduce
|
model_encoder.py
|
1
|
1040
|
#!/usr/bin/env python
import sys, json, datetime, uuid, os
def main(separator='\t'):
id = str(uuid.uuid1())
date_created = datetime.datetime.utcnow().isoformat() + 'Z'
mu = float(os.environ['MU']) if os.environ.has_key('MU') else 0.002
eta = float(os.environ['ETA']) if os.environ.has_key('ETA') else 0.5
n_models_key = os.environ['N_MODELS_KEY'] if os.environ.has_key('N_MODELS_KEY') else 'MODEL'
T = os.environ['T'] if os.environ.has_key('T') else 1
parameters = {}
for line in sys.stdin:
(feature, sigma) = line.strip().split(separator)
parameters[feature] = float(sigma)
n_models = float(parameters[n_models_key])
for f, sigma in parameters.items():
parameters[f] = parameters[f] / n_models
del parameters[n_models_key]
print json.dumps({
"id": id,
"date_created": date_created,
"models": n_models,
"mu": mu,
"eta": eta,
"T": T,
"parameters": parameters
})
if __name__ == "__main__":
main()
|
mit
| -4,548,504,203,636,204,500 | 32.580645 | 96 | 0.580769 | false |
shakamunyi/sahara
|
sahara/conductor/manager.py
|
1
|
32113
|
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handles database requests from other Sahara services."""
import copy
from oslo_config import cfg
from sahara.conductor import resource as r
from sahara.db import base as db_base
from sahara.service.castellan import utils as key_manager
from sahara.service.edp.utils import shares
from sahara.utils import configs
from sahara.utils import crypto
CONF = cfg.CONF
CLUSTER_DEFAULTS = {
"cluster_configs": {},
"status": "undefined",
"anti_affinity": [],
"anti_affinity_ratio": 1,
"status_description": "",
"info": {},
"rollback_info": {},
"sahara_info": {},
"is_public": False,
"is_protected": False
}
NODE_GROUP_DEFAULTS = {
"node_processes": [],
"node_configs": {},
"volumes_per_node": 0,
"volumes_size": 0,
"volumes_availability_zone": None,
"volume_mount_prefix": "/volumes/disk",
"volume_type": None,
"floating_ip_pool": None,
"security_groups": None,
"auto_security_group": False,
"availability_zone": None,
"is_proxy_gateway": False,
"volume_local_to_instance": False,
}
NODE_GROUP_TEMPLATE_DEFAULTS = copy.deepcopy(NODE_GROUP_DEFAULTS)
NODE_GROUP_TEMPLATE_DEFAULTS.update({"is_public": False,
"is_protected": False})
INSTANCE_DEFAULTS = {
"volumes": [],
"storage_devices_number": 0
}
DATA_SOURCE_DEFAULTS = {
"credentials": {},
"is_public": False,
"is_protected": False
}
JOB_DEFAULTS = {
"is_public": False,
"is_protected": False
}
JOB_BINARY_DEFAULTS = {
"is_public": False,
"is_protected": False
}
JOB_BINARY_INTERNAL_DEFAULTS = {
"is_public": False,
"is_protected": False
}
JOB_EXECUTION_DEFAULTS = {
"is_public": False,
"is_protected": False
}
def _apply_defaults(values, defaults):
new_values = copy.deepcopy(defaults)
new_values.update(values)
return new_values
class ConductorManager(db_base.Base):
"""This class aimed to conduct things.
The methods in the base API for sahara-conductor are various proxy
operations that allows other services to get specific work done without
locally accessing the database.
Additionally it performs some template-to-object copying magic.
"""
def __init__(self):
super(ConductorManager, self).__init__()
# Common helpers
def _populate_node_groups(self, context, cluster):
node_groups = cluster.get('node_groups')
if not node_groups:
return []
populated_node_groups = []
for node_group in node_groups:
populated_node_group = self._populate_node_group(context,
node_group)
self._cleanup_node_group(populated_node_group)
populated_node_group["tenant_id"] = context.tenant_id
populated_node_groups.append(
populated_node_group)
return populated_node_groups
def _cleanup_node_group(self, node_group):
node_group.pop('id', None)
node_group.pop('created_at', None)
node_group.pop('updated_at', None)
def _populate_node_group(self, context, node_group):
node_group_merged = copy.deepcopy(NODE_GROUP_DEFAULTS)
ng_tmpl_id = node_group.get('node_group_template_id')
ng_tmpl = None
if ng_tmpl_id:
ng_tmpl = self.node_group_template_get(context, ng_tmpl_id)
self._cleanup_node_group(ng_tmpl)
node_group_merged.update(ng_tmpl)
node_group_merged.update(node_group)
if ng_tmpl:
node_group_merged['node_configs'] = configs.merge_configs(
ng_tmpl.get('node_configs'),
node_group.get('node_configs'))
return node_group_merged
# Cluster ops
def cluster_get(self, context, cluster, show_progress=False):
"""Return the cluster or None if it does not exist."""
return self.db.cluster_get(context, cluster, show_progress)
def cluster_get_all(self, context, regex_search=False, **kwargs):
"""Get all clusters filtered by **kwargs.
:param context: The context, and associated authentication, to use with
this operation
:param regex_search: If True, enable regex matching for filter
values. See the user guide for more information
on how regex matching is handled. If False,
no regex matching is done.
:param kwargs: Specifies values for named fields by which
to constrain the search
"""
return self.db.cluster_get_all(context, regex_search, **kwargs)
def cluster_create(self, context, values):
"""Create a cluster from the values dictionary."""
# loading defaults
merged_values = copy.deepcopy(CLUSTER_DEFAULTS)
merged_values['tenant_id'] = context.tenant_id
private_key, public_key = crypto.generate_key_pair()
merged_values['management_private_key'] = private_key
merged_values['management_public_key'] = public_key
cluster_template_id = values.get('cluster_template_id')
c_tmpl = None
if cluster_template_id:
c_tmpl = self.cluster_template_get(context, cluster_template_id)
del c_tmpl['created_at']
del c_tmpl['updated_at']
del c_tmpl['id']
del c_tmpl['is_public']
del c_tmpl['is_protected']
del c_tmpl['tenant_id']
# updating with cluster_template values
merged_values.update(c_tmpl)
# updating with values provided in request
merged_values.update(values)
if c_tmpl:
merged_values['cluster_configs'] = configs.merge_configs(
c_tmpl.get('cluster_configs'),
values.get('cluster_configs'))
merged_values['node_groups'] = self._populate_node_groups(
context, merged_values)
return self.db.cluster_create(context, merged_values)
def cluster_update(self, context, cluster, values):
"""Set the given properties on cluster and update it."""
values = copy.deepcopy(values)
update_shares = values.get('shares')
if update_shares:
original_shares = (
self.db.cluster_get(context, cluster).get('shares', []))
updated_cluster = self.db.cluster_update(context, cluster, values)
if update_shares:
for share in update_shares:
# Only call mount_shares if we have new shares to mount.
# We only need one positive case to bother calling mount_shares
if share not in original_shares:
shares.mount_shares(r.ClusterResource(updated_cluster))
break
# Any shares that were on the original, but not on the updated
# list will be unmounted
unmount_list = [share for share in original_shares
if share not in update_shares]
if len(unmount_list) > 0:
shares.unmount_shares(r.ClusterResource(updated_cluster),
unmount_list)
return updated_cluster
def cluster_destroy(self, context, cluster):
"""Destroy the cluster or raise if it does not exist."""
self.db.cluster_destroy(context, cluster)
# Node Group ops
def node_group_add(self, context, cluster, values):
"""Create a Node Group from the values dictionary."""
values = copy.deepcopy(values)
values = self._populate_node_group(context, values)
values['tenant_id'] = context.tenant_id
return self.db.node_group_add(context, cluster, values)
def node_group_update(self, context, node_group, values):
"""Set the given properties on node_group and update it."""
values = copy.deepcopy(values)
self.db.node_group_update(context, node_group, values)
def node_group_remove(self, context, node_group):
"""Destroy the node_group or raise if it does not exist."""
self.db.node_group_remove(context, node_group)
# Instance ops
def instance_add(self, context, node_group, values):
"""Create an Instance from the values dictionary."""
values = copy.deepcopy(values)
values = _apply_defaults(values, INSTANCE_DEFAULTS)
values['tenant_id'] = context.tenant_id
return self.db.instance_add(context, node_group, values)
def instance_update(self, context, instance, values):
"""Set the given properties on Instance and update it."""
values = copy.deepcopy(values)
self.db.instance_update(context, instance, values)
def instance_remove(self, context, instance):
"""Destroy the Instance or raise if it does not exist."""
self.db.instance_remove(context, instance)
# Volumes ops
def append_volume(self, context, instance, volume_id):
"""Append volume_id to instance."""
self.db.append_volume(context, instance, volume_id)
def remove_volume(self, context, instance, volume_id):
"""Remove volume_id in instance."""
self.db.remove_volume(context, instance, volume_id)
# Cluster Template ops
def cluster_template_get(self, context, cluster_template):
"""Return the cluster_template or None if it does not exist."""
return self.db.cluster_template_get(context, cluster_template)
def cluster_template_get_all(self, context, regex_search=False, **kwargs):
"""Get all cluster templates filtered by **kwargs.
:param context: The context, and associated authentication, to use with
this operation
:param regex_search: If True, enable regex matching for filter
values. See the user guide for more information
on how regex matching is handled. If False,
no regex matching is done.
:param kwargs: Specifies values for named fields by which
to constrain the search
"""
return self.db.cluster_template_get_all(context,
regex_search, **kwargs)
def cluster_template_create(self, context, values):
"""Create a cluster_template from the values dictionary."""
values = copy.deepcopy(values)
values = _apply_defaults(values, CLUSTER_DEFAULTS)
values['tenant_id'] = context.tenant_id
values['node_groups'] = self._populate_node_groups(context, values)
return self.db.cluster_template_create(context, values)
def cluster_template_destroy(self, context, cluster_template,
ignore_prot_on_def=False):
"""Destroy the cluster_template or raise if it does not exist."""
self.db.cluster_template_destroy(context, cluster_template,
ignore_prot_on_def)
def cluster_template_update(self, context, id, values,
ignore_prot_on_def=False):
"""Update a cluster_template from the values dictionary."""
values = copy.deepcopy(values)
values['tenant_id'] = context.tenant_id
values['id'] = id
if 'node_groups' in values:
values['node_groups'] = self._populate_node_groups(context, values)
return self.db.cluster_template_update(context, values,
ignore_prot_on_def)
# Node Group Template ops
def node_group_template_get(self, context, node_group_template):
"""Return the Node Group Template or None if it does not exist."""
return self.db.node_group_template_get(context, node_group_template)
def node_group_template_get_all(self,
context, regex_search=False, **kwargs):
"""Get all NodeGroupTemplates filtered by **kwargs.
:param context: The context, and associated authentication, to use with
this operation
:param regex_search: If True, enable regex matching for filter
values. See the user guide for more information
on how regex matching is handled. If False,
no regex matching is done.
:param kwargs: Specifies values for named fields by which
to constrain the search
"""
return self.db.node_group_template_get_all(context,
regex_search, **kwargs)
def node_group_template_create(self, context, values):
"""Create a Node Group Template from the values dictionary."""
values = copy.deepcopy(values)
values = _apply_defaults(values, NODE_GROUP_TEMPLATE_DEFAULTS)
values['tenant_id'] = context.tenant_id
return self.db.node_group_template_create(context, values)
def node_group_template_destroy(self, context, node_group_template,
ignore_prot_on_def=False):
"""Destroy the Node Group Template or raise if it does not exist."""
self.db.node_group_template_destroy(context, node_group_template,
ignore_prot_on_def)
def node_group_template_update(self, context, id, values,
ignore_prot_on_def=False):
"""Update a Node Group Template from the values dictionary."""
values = copy.deepcopy(values)
values['tenant_id'] = context.tenant_id
values['id'] = id
return self.db.node_group_template_update(context, values,
ignore_prot_on_def)
# Data Source ops
def data_source_get(self, context, data_source):
"""Return the Data Source or None if it does not exist."""
return self.db.data_source_get(context, data_source)
def data_source_get_all(self, context, regex_search=False, **kwargs):
"""Get all Data Sources filtered by **kwargs.
:param context: The context, and associated authentication, to use with
this operation
:param regex_search: If True, enable regex matching for filter
values. See the user guide for more information
on how regex matching is handled. If False,
no regex matching is done.
:param kwargs: Specifies values for named fields by which
to constrain the search
"""
return self.db.data_source_get_all(context, regex_search, **kwargs)
def data_source_count(self, context, **kwargs):
"""Count Data Sources filtered by **kwargs.
Uses sqlalchemy "in_" clause for any tuple values
Uses sqlalchemy "like" clause for any string values containing %
"""
return self.db.data_source_count(context, **kwargs)
def data_source_create(self, context, values):
"""Create a Data Source from the values dictionary."""
values = copy.deepcopy(values)
values = _apply_defaults(values, DATA_SOURCE_DEFAULTS)
values['tenant_id'] = context.tenant_id
# if credentials are being passed in, we use the key_manager
# to store the password.
if (values.get('credentials') and
values['credentials'].get('password')):
values['credentials']['password'] = key_manager.store_secret(
values['credentials']['password'], context)
return self.db.data_source_create(context, values)
def data_source_destroy(self, context, data_source):
"""Destroy the Data Source or raise if it does not exist."""
# in cases where the credentials to access the data source are
# stored with the record and the external key manager is being
# used, we need to delete the key from the external manager.
if (CONF.use_barbican_key_manager and not
CONF.use_domain_for_proxy_users):
ds_record = self.data_source_get(context, data_source)
if (ds_record.get('credentials') and
ds_record['credentials'].get('password')):
key_manager.delete_secret(
ds_record['credentials']['password'], context)
return self.db.data_source_destroy(context, data_source)
def data_source_update(self, context, id, values):
"""Update the Data Source or raise if it does not exist."""
values = copy.deepcopy(values)
values["id"] = id
# in cases where the credentials to access the data source are
# stored with the record and the external key manager is being
# used, we need to delete the old key from the manager and
# create a new one. the other option here would be to retrieve
# the previous key and check to see if it has changed, but it
# seems less expensive to just delete the old and create a new
# one.
# it should be noted that the jsonschema validation ensures that
# if the proxy domain is not in use then credentials must be
# sent with this record.
if (CONF.use_barbican_key_manager and not
CONF.use_domain_for_proxy_users):
# first we retrieve the original record to get the old key
# uuid, and delete it.
ds_record = self.data_source_get(context, id)
if (ds_record.get('credentials') and
ds_record['credentials'].get('password')):
key_manager.delete_secret(
ds_record['credentials']['password'], context)
# next we create the new key.
if (values.get('credentials') and
values['credentials'].get('password')):
values['credentials']['password'] = key_manager.store_secret(
values['credentials']['password'], context)
return self.db.data_source_update(context, values)
# JobExecution ops
def job_execution_get(self, context, job_execution):
"""Return the JobExecution or None if it does not exist."""
return self.db.job_execution_get(context, job_execution)
def job_execution_get_all(self, context, regex_search=False, **kwargs):
"""Get all JobExecutions filtered by **kwargs.
kwargs key values may be the names of fields in a JobExecution
plus the following special values with the indicated meaning:
'cluster.name' -- name of the Cluster referenced by the JobExecution
'job.name' -- name of the Job referenced by the JobExecution
'status' -- JobExecution['info']['status']
:param context: The context, and associated authentication, to use with
this operation
:param regex_search: If True, enable regex matching for filter
values. See the user guide for more information
on how regex matching is handled. If False,
no regex matching is done.
:param kwargs: Specifies values for named fields by which
to constrain the search
"""
return self.db.job_execution_get_all(context, regex_search, **kwargs)
def job_execution_count(self, context, **kwargs):
"""Count number of JobExecutions filtered by **kwargs.
e.g. job_execution_count(cluster_id=12, input_id=123)
"""
return self.db.job_execution_count(context, **kwargs)
def job_execution_create(self, context, values):
"""Create a JobExecution from the values dictionary."""
values = copy.deepcopy(values)
values = _apply_defaults(values, JOB_EXECUTION_DEFAULTS)
values['tenant_id'] = context.tenant_id
return self.db.job_execution_create(context, values)
def job_execution_update(self, context, job_execution, values):
"""Updates a JobExecution from the values dictionary."""
values = copy.deepcopy(values)
return self.db.job_execution_update(context, job_execution, values)
def job_execution_destroy(self, context, job_execution):
"""Destroy the JobExecution or raise if it does not exist."""
return self.db.job_execution_destroy(context, job_execution)
# Job ops
def job_get(self, context, job):
"""Return the Job or None if it does not exist."""
return self.db.job_get(context, job)
def job_get_all(self, context, regex_search=False, **kwargs):
"""Get all Jobs filtered by **kwargs.
:param context: The context, and associated authentication, to use with
this operation
:param regex_search: If True, enable regex matching for filter
values. See the user guide for more information
on how regex matching is handled. If False,
no regex matching is done.
:param kwargs: Specifies values for named fields by which
to constrain the search
"""
return self.db.job_get_all(context, regex_search, **kwargs)
def job_create(self, context, values):
"""Create a Job from the values dictionary."""
values = copy.deepcopy(values)
values = _apply_defaults(values, JOB_DEFAULTS)
values['tenant_id'] = context.tenant_id
return self.db.job_create(context, values)
def job_update(self, context, job, values):
"""Updates a Job from the values dictionary."""
return self.db.job_update(context, job, values)
def job_destroy(self, context, job):
"""Destroy the Job or raise if it does not exist."""
self.db.job_destroy(context, job)
# JobBinary ops
def job_binary_get_all(self, context, regex_search=False, **kwargs):
"""Get all JobBinarys filtered by **kwargs.
:param context: The context, and associated authentication, to use with
this operation
:param regex_search: If True, enable regex matching for filter
values. See the user guide for more information
on how regex matching is handled. If False,
no regex matching is done.
:param kwargs: Specifies values for named fields by which
to constrain the search
e.g. job_binary_get_all(name='wordcount.jar')
"""
return self.db.job_binary_get_all(context,
regex_search, **kwargs)
def job_binary_get(self, context, job_binary_id):
"""Return the JobBinary or None if it does not exist."""
return self.db.job_binary_get(context, job_binary_id)
def job_binary_create(self, context, values):
"""Create a JobBinary from the values dictionary."""
values = copy.deepcopy(values)
values = _apply_defaults(values, JOB_BINARY_DEFAULTS)
values['tenant_id'] = context.tenant_id
# if credentials are being passed in, we use the key_manager
# to store the password.
if values.get('extra') and values['extra'].get('password'):
values['extra']['password'] = key_manager.store_secret(
values['extra']['password'], context)
return self.db.job_binary_create(context, values)
def job_binary_destroy(self, context, job_binary):
"""Destroy the JobBinary or raise if it does not exist."""
# in cases where the credentials to access the job binary are
# stored with the record and the external key manager is being
# used, we need to delete the key from the external manager.
if (CONF.use_barbican_key_manager and not
CONF.use_domain_for_proxy_users):
jb_record = self.job_binary_get(context, job_binary)
if jb_record.get('extra') and jb_record['extra'].get('password'):
key_manager.delete_secret(jb_record['extra']['password'],
context)
self.db.job_binary_destroy(context, job_binary)
def job_binary_update(self, context, id, values):
"""Update a JobBinary from the values dictionary."""
values = copy.deepcopy(values)
values['id'] = id
# in cases where the credentials to access the job binary are
# stored with the record and the external key manager is being
# used, we need to delete the old key from the manager and
# create a new one. the other option here would be to retrieve
# the previous key and check to see if it has changed, but it
# seems less expensive to just delete the old and create a new
# one.
if (CONF.use_barbican_key_manager and not
CONF.use_domain_for_proxy_users):
# first we retrieve the original record to get the old key
# uuid, and delete it.
jb_record = self.job_binary_get(context, id)
if jb_record.get('extra') and jb_record['extra'].get('password'):
key_manager.delete_secret(jb_record['extra']['password'],
context)
# next we create the new key.
if values.get('extra') and values['extra'].get('password'):
values['extra']['password'] = key_manager.store_secret(
values['extra']['password'], context)
return self.db.job_binary_update(context, values)
# JobBinaryInternal ops
def job_binary_internal_get_all(self, context,
regex_search=False, **kwargs):
"""Get all JobBinaryInternals filtered by **kwargs.
The JobBinaryInternals returned do not contain a data field.
:param context: The context, and associated authentication, to use with
this operation
:param regex_search: If True, enable regex matching for filter
values. See the user guide for more information
on how regex matching is handled. If False,
no regex matching is done.
:param kwargs: Specifies values for named fields by which
to constrain the search
"""
return self.db.job_binary_internal_get_all(context,
regex_search, **kwargs)
def job_binary_internal_get(self, context, job_binary_internal_id):
"""Return the JobBinaryInternal or None if it does not exist
The JobBinaryInternal returned does not contain a data field.
"""
return self.db.job_binary_internal_get(context, job_binary_internal_id)
def job_binary_internal_create(self, context, values):
"""Create a JobBinaryInternal from the values dictionary."""
# Since values["data"] is (should be) encoded as a string
# here the deepcopy of values only incs a reference count on data.
# This is nice, since data could be big...
values = copy.deepcopy(values)
values = _apply_defaults(values, JOB_BINARY_INTERNAL_DEFAULTS)
values['tenant_id'] = context.tenant_id
return self.db.job_binary_internal_create(context, values)
def job_binary_internal_destroy(self, context, job_binary_internal):
"""Destroy the JobBinaryInternal or raise if it does not exist."""
self.db.job_binary_internal_destroy(context, job_binary_internal)
def job_binary_internal_get_raw_data(self,
context, job_binary_internal_id):
"""Return the binary data field from a JobBinaryInternal."""
return self.db.job_binary_internal_get_raw_data(
context,
job_binary_internal_id)
def job_binary_internal_update(self, context, id, values):
"""Updates a JobBinaryInternal from the values dictionary."""
return self.db.job_binary_internal_update(context, id, values)
# Events ops
def cluster_provision_step_add(self, context, cluster_id, values):
"""Create a provisioning step assigned to cluster from values dict."""
return self.db.cluster_provision_step_add(context, cluster_id, values)
def cluster_provision_step_update(self, context, provision_step):
"""Update the cluster provisioning step."""
return self.db.cluster_provision_step_update(context, provision_step)
def cluster_provision_progress_update(self, context, cluster_id):
"""Return cluster with provision progress updated field."""
return self.db.cluster_provision_progress_update(context, cluster_id)
def cluster_event_add(self, context, provision_step, values):
"""Assign new event to the specified provision step."""
return self.db.cluster_event_add(context, provision_step, values)
# Cluster verifications / health checks ops
def cluster_verification_add(self, context, cluster_id, values):
"""Return created verification for the specified cluster."""
return self.db.cluster_verification_add(context, cluster_id, values)
def cluster_verification_get(self, context, verification_id):
"""Return verification with the specified verification_id."""
return self.db.cluster_verification_get(context, verification_id)
def cluster_verification_update(self, context, verification_id, values):
"""Return updated verification with the specified verification_id."""
return self.db.cluster_verification_update(
context, verification_id, values)
def cluster_verification_delete(self, context, verification_id):
""""Delete verification with the specified id."""
return self.db.cluster_verification_delete(context, verification_id)
def cluster_health_check_add(self, context, verification_id, values):
"""Return created health check in the specified verification."""
return self.db.cluster_health_check_add(
context, verification_id, values)
def cluster_health_check_get(self, context, health_check_id):
"""Return health check with the specified health_check_id."""
return self.db.cluster_health_check_get(context, health_check_id)
def cluster_health_check_update(self, context, health_check_id, values):
"""Return updated health check with the specified health_check_id."""
return self.db.cluster_health_check_update(
context, health_check_id, values)
def plugin_create(self, context, values):
"""Return created DB entry for plugin."""
return self.db.plugin_create(context, values)
def plugin_get(self, context, name):
"""Return DB entry for plugin."""
return self.db.plugin_get(context, name)
def plugin_get_all(self, context):
"""Return DB entries for all plugins."""
return self.db.plugin_get_all(context)
def plugin_update(self, context, name, values):
"""Return updated DB entry for plugin."""
return self.db.plugin_update(context, name, values)
def plugin_remove(self, context, name):
"""Remove DB entry for plugin."""
return self.db.plugin_remove(context, name)
|
apache-2.0
| 1,892,088,547,725,039,400 | 40.27635 | 79 | 0.616915 | false |
gillett-hernandez/project-euler
|
Python/problem_192.py
|
1
|
9573
|
# -*- coding: utf-8 -*-
# @Author: Gillett Hernandez
# @Date: 2017-12-11 19:30:19
# @Last Modified by: Gillett Hernandez
# @Last Modified time: 2017-12-16 17:49:47
from euler_funcs import ContinuedFraction, solve_pell, analyze_triplets, sconvergents, lag, is_square, timed
from euler_funcs import timed, construct, construct2, error, ContinuedFraction
from euler_funcs import best_rational_approximation
from math import sqrt, gcd, log
from decimal import *
from itertools import count
from fractions import Fraction
import decimal
import fractions
import time
errorf = error
getcontext().prec = 55
# class Pair:
# def __init__(self, x, y):
# self.x = x
# self.y = y
# def __add__(self, other):
# if isinstance(other, (list, tuple)):
# if len(other) != 2:
# raise TypeError("unsupported operand length for +")
# return Pair(self.x + other[0], self.y + other[1])
# return Pair(self.x+other.x, self.y+other.y)
# def __radd__(self, other):
# return self + other
# def __sub__(self, other):
# if isinstance(other, (list, tuple)):
# if len(other) != 2:
# raise TypeError("unsupported operand length for -")
# return Pair(self.x - other[0], self.y - other[1])
# return Pair(self.x-other.x, self.y-other.y)
# def __rsub__(self, other):
# if isinstance(other, (list, tuple)):
# if len(other) != 2:
# raise TypeError("unsupported operand length for -")
# return Pair(other[0] - self.x, other[1] - self.y)
# elif isinstance(other, Pair):
# return Pair(other.x-self.x, other.y-self.y)
# else:
# raise TypeError("oops")
# def __mul__(self, other):
# if isinstance(other, Pair):
# raise TypeError("unsupported operand type(s) for *: 'Pair' and 'Pair'")
# else:
# return Pair(other*self.x, other*self.y)
# def __rmul__(self, other):
# if isinstance(other, Pair):
# raise TypeError("unsupported operand type(s) for *: 'Pair' and 'Pair'")
# else:
# return Pair(other*self.x, other*self.y)
# def __eq__(self, other):
# if isinstance(other, (tuple,list)):
# return self.x == other[0] and self.y == other[1] and len(other) == 2
# elif isinstance(other, Pair):
# return self.x == other.x and self.y == other.y
# else:
# raise NotImplementedError(f"No known conversion of {type(other)} to Pair")
# def __getitem__(self, key):
# return (self.x, self.y)[key]
# def __repr__(self):
# return f"Pair({self.x}, {self.y})"
# def error(d):
# def ed(a,b):
# try:
# if b == 0:
# return float("inf")
# return Decimal(a)/Decimal(b) - Decimal(d).sqrt()
# except:
# print(a,b,d)
# raise
# return ed
# def approximate_unit(d):
# cd = ContinuedFraction.from_(d)
# ed = modified_error(d)
# max_convergence = 0
# unit = 0
# last_error = -1
# for c1, c2 in lag((conv.numerator, conv.denominator) for conv in cd.cng(100)):
# e1 = ed(*c1)
# e2 = ed(*c2)
# if abs(last_error - e1/e2) > max_convergence:
# max_convergence = abs(last_error - e1/e2)
# unit = abs(e1/e2)
# last_error = e1/e2
# return unit
# def reconstruct(l, muls):
# nl = [(pair[0]*m, pair[1]*m) for pair,m in zip(l, muls)]
# s = [0, 0]
# for e in nl:
# s[0] += e[0]
# s[1] += e[1]
# return s
# def overlimit(pair, limit):
# return abs(pair[1]) > limit
# modified_error = lambda d: lambda a,b: (d*b**2-a**2)/((b*(Decimal(d).sqrt())+a))
# # modified_error = lambda d: lambda a,b: Decimal(a)/Decimal(b) - Decimal(d).sqrt()
# def BQA(d, limit=10**13, debug=False):
# cd = ContinuedFraction.from_(d)
# ed = error(d)
# if debug: print(ed)
# ud = approximate_unit(d)
# if debug: print(f"unit for {d} = {ud}")
# sqrtd = Decimal(d).sqrt()
# # print(cd.convergent_list(40))
# # target = target % 1
# l = []
# t1 = time.clock()
# for conv in cd.cng():
# a,b = conv.numerator, conv.denominator
# if b > limit:
# break
# l.append((a,b))
# t2 = time.clock()
# print(t2-t1)
# for i,(a,b) in enumerate(l):
# if debug: print("\n")
# e = ed(a,b)
# na, nb = 2*b-a, b
# if debug: print(f"{i}, {log(b,10)}, {b}*√{d} mod 1 = {e}, {a},{b}")
# # find start
# start = l[0][0]
# offset = int(start*sqrtd)
# answer = Pair(offset, start)
# e = ed(offset, start)
# if debug:
# print("starting answer =",offset-3, start, "e =",e)
# print(start*sqrtd)
# s = 1
# if debug: print("\n")
# orig_len = len(l)
# def getclose(P, l, mlimit=(1+int(ud))):
# # if debug: print("\n")
# if debug: print(f"{' '*(orig_len-len(l))}start of function, P = {P}, l[0] = {l[0]}")
# if len(l) == 0:
# if debug: print(f"{' '*(orig_len-len(l))}P = {P}")
# return [P]
# # get first of Ls currently passed through
# first = l[0]
# closest = float("inf")
# # get second
# second = l[1]
# closest = float("inf")
# if len(l) == 2:
# for mul1 in range(-mlimit, 1+mlimit):
# nP1 = P + mul1*Pair(*first)
# if nP1 == [0, 0]:
# continue
# e1 = ed(*nP1)
# sdiff1 = abs(e1)
# if debug: print(f"{' '*(orig_len-len(l))}{mul1:3}, {e1}, {sdiff1}")
# for mul2 in range(-mlimit, 1+mlimit):
# nP2 = nP1 + mul2*Pair(*second)
# if nP2 == [0, 0]:
# continue
# e2 = ed(*nP2)
# sdiff2 = abs(e2)
# if debug: print(f"{' '*(orig_len-len(l))}{mul2:3}, {e2}, {sdiff2}")
# if overlimit(nP2, limit):
# continue
# if abs(sdiff2) < closest:
# if debug: print(f"{' '*(orig_len-len(l))}getting closer, muls={[mul1,mul2]}, L[:2] = {first, second}, nPs={[nP1,nP2]}")
# closest = sdiff2
# closest_nP = nP1
# closest_nP2 = nP2
# closest_muls = [mul1,mul2]
# if closest == float("inf"):
# print("waatttt error!!!!")
# return []
# return closest_muls + [closest_nP2]
# else:
# for mul1 in range(-mlimit, 1+mlimit):
# nP1 = P + mul1*Pair(*first)
# if nP1 == [0, 0]:
# continue
# e1 = ed(*nP1)
# sdiff1 = abs(e1)
# if debug: print(f"{' '*(orig_len-len(l))}{mul1:3}, {e1}, {sdiff1}")
# if overlimit(nP1, limit):
# continue
# if abs(sdiff1) < closest:
# if debug: print(f"{' '*(orig_len-len(l))}getting closer, muls={mul1}, L[0] = {first}, nPs={nP1}")
# closest = sdiff1
# closest_nP = nP1
# closest_mul = mul1
# return [closest_mul] + getclose(closest_nP, l[1:], mlimit)
# rl = getclose(answer, l)
# reconstructed = reconstruct(l, rl[:-1])
# r = answer + reconstructed
# if debug:
# print(f"r = {r} and tail of rl = {rl[-1]}")
# print(r, rl)
# print("error of r:", ed(*r))
# return (r, answer, l, rl)
# @timed
# def doit1(sn, bound, debug=False):
# return Fraction(*BQA(sn,bound,debug)[0])
# @timed
def doit2(d, bound):
return best_rational_approximation(decimal.Decimal(d).sqrt(), ContinuedFraction.from_(d),bound)
def test():
print(Fraction(10**12+1,10**12))
# print("error1 =", errorf(doit1(13,10**12), decimal.Decimal(13).sqrt()))
temp = doit2(13,10**12)
print(temp)
print("error2 =", errorf(*temp, decimal.Decimal(13)))
# print(doit1(13,100))
print("100",doit2(13,100))
# print(doit1(13,30))
print(doit2(13,30))
# print("error1 =", errorf(doit1(13,100), decimal.Decimal(13).sqrt()))
temp = doit2(13,100)
print("error2 =", errorf(*temp, decimal.Decimal(13)))
# print(best_rational_approximation2(decimal.Decimal(2).sqrt(), ContinuedFraction.from_(2), 100))
import timeit
print(timeit.repeat("best_rational_approximation(decimal.Decimal(2).sqrt(), ContinuedFraction.from_(2), 10**3)",number=1000, globals=globals()))
# print(timeit.repeat("best_rational_approximation(decimal.Decimal(2).sqrt(), ContinuedFraction.from_(2), 10**3)",number=1000, globals=globals()))
# print(doit(26,100))
# print(doit(39,100))
# print(doit(52,100))
@timed
def main():
s = 0
odd = 1
ct = 0
for i in range(1,100001):
if is_square(i):
print(i)
continue
# ct += 1
# if ct == odd:
# odd += 2
# ct = 0
# continue
s += doit2(i, 10**12)[-1]
# print(i, s)
print(s)
if __name__ == '__main__':
test()
main()
# answer ends in 5_927_998_347
|
mit
| 7,420,151,602,678,209,000 | 31.444068 | 150 | 0.491171 | false |
vokimon/menjobe
|
menjobe/models.py
|
1
|
1180
|
from django.db import models
from mptt.models import MPTTModel, TreeForeignKey
class Product(MPTTModel) :
name = models.CharField(
max_length=200,
default=None,
unique=True,
)
group = TreeForeignKey('self',
null=True,
blank=True,
related_name='subgroups',
default=None,
)
class MPTTMeta:
parent_attr = 'group'
def __str__(self) :
return self.name
def retailPoints(self) :
descendantsIds = list(self
.get_descendants(include_self=True)
.values_list("id", flat=True)
)
return RetailPoint.objects.filter(
retailedProducts__id__in = descendantsIds
).distinct()
class RetailPoint(models.Model) :
name = models.CharField(
max_length=200,
default=None,
unique=True,
)
description = models.TextField(default="")
address = models.TextField(default=None, null=True, blank=False)
retailedProducts = models.ManyToManyField(Product, blank=True, null=False)
def descriptionHtml(self) :
import markdown
return markdown.markdown(self.description)
def __str__(self) :
return self.name
def sells(self, *products) :
self.retailedProducts.add(*products)
def products(self) :
return self.retailedProducts.all()
|
agpl-3.0
| 2,554,657,755,027,349,000 | 19.344828 | 75 | 0.713559 | false |
alexanderdreyer/polybori-debian
|
pyroot/polybori/gbcore.py
|
1
|
19759
|
from polybori.nf import *
from polybori.PyPolyBoRi import *
from polybori.ll import eliminate, ll_encode
from time import time
from copy import copy
from itertools import chain
from inspect import getargspec
from polybori.statistics import used_vars, used_vars_set
from polybori.heuristics import dense_system,gauss_on_linear
from polybori.easy_polynomials import easy_linear_polynomials
from itertools import chain
from polybori.interpolate import lex_groebner_basis_for_polynomial_via_variety
from inspect import getargspec
from polybori.fglm import _fglm
def get_options_from_function(f):
(argnames,varargs,varopts,defaults) = getargspec(f)
return dict(
zip(
argnames[-len(defaults):],defaults))
def filter_oldstyle_options(**options):
filtered = dict()
for key in options.keys():
newkey = key
for prefix in ['', 'use_', 'opt_allow_', 'opt_']:
newkey = newkey.replace(prefix, '')
filtered[newkey] = options[key]
return filtered
def filter_newstyle_options(func, **options):
allowed = get_options_from_function(func).keys()
filtered = dict()
for key in options.keys():
for prefix in ['', 'use_', 'opt_', 'opt_allow_']:
if prefix + key in allowed:
filtered[prefix + key] = options[key]
return filtered
def owns_one_constant(I):
"""Determines whether I contains the constant one polynomial."""
for p in I:
if p.is_one():
return True
return False
def want_interpolation_gb(G):
if not G:
return False
if G[0].ring().get_order_code()!=OrderCode.lp:
return False
if len(G)!=1:
return False
p=Polynomial(G[0])
if p.lead_deg()<=1:
return False
if p.set().n_nodes()>1000:
return False
return True
def ll_is_good(I):
lex_lead=set()
for p in I:
if not p.is_zero():
m=p.lex_lead()
if m.deg()==1:
lex_lead.add(iter(m.variables()).next().index())
if len(lex_lead)>=0.8*len(I):
uv=used_vars_set(I).deg()#don't use len here, which will yield 1
if len(lex_lead)>0.9*uv:
if uv- len(lex_lead)>16:
return "llfirstonthefly"
else:
return "llfirst"
return False
def ll_heuristic(d):
d=copy(d)
I=d["I"]
if (not "llfirstonthefly" in d) and (not "llfirst" in d):
hint=ll_is_good(I)
if hint:
d[hint]=True
return d
def change_order_heuristic(d):
d_orig=d
d=copy(d)
I=d["I"]
if not I:
return d
switch_table={OrderCode.lp:OrderCode.dp_asc,OrderCode.dlex:OrderCode.dp_asc}
if not "other_ordering_first" in d:
#TODO after ll situation might look much different, so heuristic is on wrong place
code=iter(I).next().ring().get_order_code()
if code in switch_table:
max_non_linear=len(I)/2
non_linear=0
if code==OrderCode.lp:
for p in I:
if p.lead_deg()>1:
non_linear=non_linear+1
if non_linear>max_non_linear:
break
if (non_linear>max_non_linear) or (code!=OrderCode.lp):
other_ordering_opts=copy(d_orig)
other_ordering_opts["switch_to"]=switch_table[code]
d["other_ordering_first"]=other_ordering_opts
return d
def interpolation_gb_heuristic(d):
d=copy(d)
I=d["I"]
if not d.get("other_ordering_opts",False) and want_interpolation_gb(I):
d["interpolation_gb"]=True
d["other_ordering_first"]=False
return d
def linear_algebra_heuristic(d):
d_orig=d
d=copy(d)
I=d["I"]
def want_la():
if not I:
return False
n_used_vars=None
bound=None
if iter(I).next().ring().has_degree_order():
new_bound=200
n_used_vars=used_vars_set(I,bound=new_bound).deg()
if n_used_vars<new_bound:
return True
bound=new_bound
if dense_system(I):
new_bound=100
if not (bound and new_bound<bound):
n_used_vars=used_vars_set(I,bound=new_bound).deg()
bound=new_bound
if n_used_vars<bound:
return True
return False
if not (("faugere" in d and (not d["faugere"])) or ("noro" in d and d["noro"])):
if ("faugere" in d and d["faugere"]) or want_la():
d["faugere"]=True
if not "red_tail" in d:
d["red_tail"]=False
if not "selection_size" in d:
d["selection_size"]=10000
if not ("ll" in d):
d["ll"]=True
return d
def trivial_heuristic(d):
return d
class HeuristicalFunction(object):
def __call__(self,*args,**kwds):
complete_dict=copy(kwds)
heuristic=True
try:
heuristic=complete_dict["heuristic"]
except KeyError:
pass
for (k,v) in zip(self.argnames,args):
complete_dict[k]=v
if heuristic:
complete_dict=self.heuristicFunction(complete_dict)
return self.f(**complete_dict)
def __init__(self,f,heuristic_function):
(self.argnames,self.varargs,self.varopts,self.defaults)=getargspec(f)
if hasattr(f,"options"):
self.options=f.options
else:
self.options=dict(zip(self.argnames[-len(self.defaults):],self.defaults))
self.heuristicFunction=heuristic_function
self.f=f
self.__doc__=f.__doc__
def with_heuristic(heuristic_function):
def make_wrapper(f):
wrapped=HeuristicalFunction(f,heuristic_function)
wrapped.__name__=f.__name__
return wrapped
return make_wrapper
def clean_polys(I):
I=list(set((Polynomial(p) for p in I if not Polynomial(p).is_zero())))
return I
def clean_polys_pre(I):
return (clean_polys(I),None)
def gb_with_pre_post_option(
option,pre=None,
post=None,if_not_option=tuple(),
default=False):
def make_wrapper(f):
def wrapper(I, **kwds):
prot=kwds.get("prot", False)
for o in if_not_option:
if (o in kwds and kwds[o]) or (o not in kwds and groebner_basis.options[o]):
option_set=False
if not "option_set" in locals():
if option in kwds:
option_set=kwds[option]
else:
option_set=default
kwds=dict(((o,kwds[o]) for o in kwds if o!=option))
state=None
if option_set:
if pre:
pre_args=getargspec(pre)[0]
if prot:
print "preprocessing for option:", option
local_symbols = copy(locals())
(I, state) = pre(**dict([(k,v) for (k,v) in \
local_symbols.iteritems() if k in pre_args]))
I=f(I,**kwds)
if option_set:
if post:
post_args=getargspec(post)[0]
if prot:
print "postprocessing for option:", option
local_symbols = copy(locals())
I = post(**dict([(k,v) for (k,v) \
in local_symbols.iteritems() if k in post_args]))
return I
wrapper.__name__=f.__name__
wrapper.__doc__=f.__doc__
if hasattr(f,"options"):
wrapper.options=copy(f.options)
else:
wrapper.options = get_options_from_function(f)
wrapper.options[option]=default
return wrapper
return make_wrapper
def redsb_post(I,state):
if I==[]:
return []
else:
return I.minimalize_and_tail_reduce()
def minsb_post(I,state):
if I==[]:
return []
else:
return I.minimalize()
def invert_all(I):
return [p.map_every_x_to_x_plus_one() for p in I]
def invert_all_pre(I):
return (invert_all(I),None)
def invert_all_post(I,state):
return invert_all(I)
def llfirst_pre(I,prot):
(eliminated,llnf, I)=eliminate(I,on_the_fly=False,prot=prot)
return (I,eliminated)
def ll_constants_pre(I):
ll_res=[]
while len([p for p in I if p.lex_lead_deg()==1 and
(p+p.lex_lead()).constant()])>0:
I_new=[]
ll=[]
leads=set()
for p in I:
if p.lex_lead_deg()==1:
l=p.lead()
if not (l in leads) and p.is_singleton_or_pair():
tail=p+l
if tail.deg()<=0:
ll.append(p)
leads.add(l)
continue
I_new.append(p)
encoded=ll_encode(ll)
reduced=[]
for p in I_new:
p=ll_red_nf_redsb(p,encoded)
if not p.is_zero():
reduced.append(p)
I=reduced
ll_res.extend(ll)
return (I,ll_res)
def variety_size_from_gb(I):
"""
>>> r=Ring(100)
>>> x = r.variable
>>> variety_size_from_gb([])
1
>>> variety_size_from_gb([Polynomial(0, r)])
1
>>> variety_size_from_gb([Polynomial(1, r)])
0.0
>>> variety_size_from_gb([x(1)])
1.0
>>> variety_size_from_gb([x(1), x(2)])
1.0
>>> variety_size_from_gb([x(1), x(2)*x(3)])
3.0
>>> variety_size_from_gb([x(1), x(1)*x(4), x(2)*x(3)])
6.0
>>> variety_size_from_gb([x(1)*x(2), x(2)*x(3)])
5.0
>>> mons = [Monomial([r.variable(i) for i in xrange(100) if i!=j])\
for j in xrange(100)]
>>> variety_size_from_gb(mons)
1.2676506002282294e+30
"""
I=[Polynomial(p) for p in I]
I=[p for p in I if not p.is_zero()]
if len(I)==0:
return 1
## # TODO Here's something wrong! See the example with 5 solutions.
## # (reverting for now)
## number_of_used_vars = used_vars_set(I).deg()
## leads = set([p.lead() for p in I])
## minimal_leads = BooleSet(leads).minimal_elements()
## number_of_used_vars_minimal_leads =\
## minimal_leads.vars().deg()
## standard_monomials =\
## minimal_leads.include_divisors().diff(minimal_leads)
## return standard_monomials.size_double()*\
## 2**(number_of_used_vars-number_of_used_vars_minimal_leads)
sm=Monomial(used_vars_set(I)).divisors()
for p in I:
m=p.lead()
sm=sm.diff(sm.multiples_of(m))
return sm.size_double()
def other_ordering_pre(I,option_set,kwds):
"""
>>> from polybori.blocks import declare_ring
>>> r = declare_ring(['x0', 'x1', 'x2', 'x3', 'x4'], globals())
>>> id = [x1*x3 + x1 + x2*x3 + x3 + x4, x0*x3 + x0 + x1*x2 + x2 + 1, x1*x3 + x1*x4 + x3*x4 + x4 + 1, x0*x2 + x0*x4 + x1 + x3 + x4]
>>> groebner_basis(id)
[1]
"""
if not I:
return (I, None)
main_kwds=kwds
options=option_set
old_ring=iter(I).next().ring()
ocode=old_ring.get_order_code()
try:
new_ring = old_ring.clone(ordering=options["switch_to"])
kwds=dict((k,options[k]) for k in options if not (k in ("other_ordering_first","switch_to","I")))
kwds["redsb"]=True
I_orig=I
I=groebner_basis([new_ring(poly) for poly in I],**kwds)
variety_size=variety_size_from_gb(I)
if variety_size<50000:
main_kwds["convert_with_fglm_from_ring"]=new_ring
main_kwds["convert_with_fglm_to_ring"]=old_ring
else:
I = [old_ring(poly) for poly in I]
finally:
pass
return (I,None)
def llfirstonthefly_pre(I,prot):
(eliminated,llnf, I)=eliminate(I,on_the_fly=True)
return (I,eliminated)
def gauss_on_linear_pre(I, prot):
return (gauss_on_linear(I), None)
def easy_linear_polynomials_pre(I):
res=[]
for p in I:
res.append(p)
res.extend(easy_linear_polynomials(p))
return (list(set(res)), None)
def llfirst_post(I,state,prot, kwds):
eliminated=state
for p in I:
if p.is_one():
return [p]
else:
if len(eliminated)>0:
I=list(chain(I,eliminated))
#redsb just for safety, as don't know how option is set
kwds=copy(kwds)
kwds.update(
dict(llfirst = False,
llfirstonthefly = False,
ll_constants = False,
deg_bound = False,
other_ordering_first = False,
eliminate_identical_variables = False, redsb = True))
I=groebner_basis(
I, **kwds
)
return I
def ll_constants_post(I,state):
eliminated=state
for p in I:
if p.is_one():
return [p]
else:
if len(eliminated)>0:
I=list(chain(I,eliminated))
#redsb just for safety, as don't know how option is set
return I
def result_to_list_post(I,state):
return list(I)
def fix_deg_bound_post(I,state):
if isinstance(I,GroebnerStrategy):
return I.all_generators()
else:
return I
def incremental_pre(I,prot, kwds):
def sort_key(p):
p=Polynomial(p)
return (p.navigation().value(), -p.deg())
I=sorted(I, key=sort_key)
inc_sys=[]
kwds=copy(kwds)
kwds['incremental']=False
for p in I[:-1]:
inc_sys.append(p)
inc_sys=groebner_basis(inc_sys, **kwds)
if prot:
print "incrementally calculating GB, adding generator:", p
inc_sys.append(I[:-1])
return (inc_sys,None)
def eliminate_identical_variables_pre(I, prot):
changed=True
ll_system=[]
treated_linears=set()
while changed:
changed=False
rules=dict()
for p in I:
t=p+p.lead()
if p.lead_deg()==1:
l=p.lead()
if l in treated_linears:
continue
else:
treated_linears.add(l)
if t.deg()>0:
rules.setdefault(t, [])
leads=rules[t]
leads.append(l)
def my_sort_key(l):
return l.navigation().value()
for (t, leads) in rules.iteritems():
if len(leads)>1:
changed=True
leads=sorted(leads, key=my_sort_key, reverse=True)
chosen=leads[0]
for v in leads[1:]:
ll_system.append(chosen+v)
if len(ll_system)>0:
ll_encoded=ll_encode(ll_system, reduce=True)
I=set([ll_red_nf_redsb(p, ll_encoded) for p in I])
return (I, ll_system)
@gb_with_pre_post_option("clean_arguments",pre=clean_polys_pre,default=True)
@gb_with_pre_post_option("easy_linear_polynomials", pre=easy_linear_polynomials_pre, default=True)
@gb_with_pre_post_option("result_to_list",post=result_to_list_post,default=True)
@with_heuristic(interpolation_gb_heuristic)
@gb_with_pre_post_option("invert",pre=invert_all_pre,post=invert_all_post,default=False)
@gb_with_pre_post_option("gauss_on_linear", pre=gauss_on_linear_pre, default=True)
@gb_with_pre_post_option("ll_constants", pre=ll_constants_pre,post=ll_constants_post,default=True)
@gb_with_pre_post_option("eliminate_identical_variables", pre=eliminate_identical_variables_pre, post=llfirst_post, default=True)
@with_heuristic(ll_heuristic)
@gb_with_pre_post_option("llfirst",if_not_option=["llfirstonthefly"],pre=llfirst_pre,post=llfirst_post,default=False)
@gb_with_pre_post_option("llfirstonthefly",pre=llfirstonthefly_pre,post=llfirst_post,default=False)
@gb_with_pre_post_option("incremental",pre=incremental_pre)
@with_heuristic(change_order_heuristic)
@gb_with_pre_post_option("other_ordering_first",if_not_option=["interpolation_gb"],pre=other_ordering_pre,default=False)
@with_heuristic(linear_algebra_heuristic)
@gb_with_pre_post_option("fix_deg_bound",if_not_option=["interpolation_gb"], post=fix_deg_bound_post,default=True)
@gb_with_pre_post_option("minsb",post=minsb_post,if_not_option=["redsb","deg_bound","interpolation_gb","convert_with_fglm_from_ring"],default=True)
@gb_with_pre_post_option("redsb",post=redsb_post,if_not_option=["deg_bound","interpolation_gb","convert_with_fglm_from_ring"],default=True)
def groebner_basis(I, heuristic=True,unique_ideal_generator=False, interpolation_gb=False,
clean_and_restart_algorithm=False, convert_with_fglm_from_ring=None,
convert_with_fglm_to_ring=None,
modified_linear_algebra=True, preprocessor=None,
deg_bound = False, implementation = "Python",
full_prot = False, prot = False,
draw_matrices = False, preprocess_only = False,
**impl_options):
"""Computes a Groebner basis of a given ideal I, w.r.t options."""
if not I:
return I
if full_prot:
prot=True
if prot:
print "number of passed generators:",len(I)
if not convert_with_fglm_from_ring is None:
from_ring=convert_with_fglm_from_ring
to_ring = convert_with_fglm_to_ring
return _fglm(I, from_ring, to_ring)
if interpolation_gb:
first = iter(I).next()
if len(I)!=1 or first.ring().get_order_code()!=OrderCode.lp:
raise ValueError
return lex_groebner_basis_for_polynomial_via_variety(first)
if deg_bound is False:
deg_bound=100000000L
I=[Polynomial(p) for p in I if not p.is_zero()]
if unique_ideal_generator and I:
prod=1
for p in I:
prod=(p+1)*prod
I=[prod + 1]
if implementation=="Python":
implementation=symmGB_F2_python
else:
implementation=symmGB_F2_C
# custom preprocessing
if preprocessor:
I = preprocessor(I)
if preprocess_only:
for p in I:
print p
import sys
sys.exit(0)
def call_algorithm(I,max_generators=None):
return implementation(I,
deg_bound = deg_bound,
full_prot = False,
prot = False,
max_generators=max_generators, draw_matrices = draw_matrices,
**filter_newstyle_options(implementation, **impl_options))
if clean_and_restart_algorithm:
for max_generators in [1000,10000,50000,100000,200000,300000,400000,None]:
try:
return call_algorithm(I, max_generators=max_generators)
except GeneratorLimitExceeded, e:
I=list(e.strat.all_generators())
del e.strat
if prot:
print "generator limit exceeded:", max_generators, "restarting algorithm"
else:
return call_algorithm(I)
def build_groebner_basis_doc_string():
additional_options_from_buchberger = \
filter_oldstyle_options(**get_options_from_function(symmGB_F2_python))
for k in list(additional_options_from_buchberger):
if k in groebner_basis.options:
del additional_options_from_buchberger[k]
groebner_basis.__doc__=groebner_basis.__doc__+"\nOptions are:\n"+"\n".join(
(k+" : "+repr(groebner_basis.options[k]) for k in groebner_basis.options)) + \
"""
Turn off heuristic by setting heuristic=False
Additional options come from the actual buchberger implementation.
In case of our standard Python implementation these are the following:
""" + "\n".join(
(k+" : "+repr(additional_options_from_buchberger[k]) for k in additional_options_from_buchberger))
build_groebner_basis_doc_string()
def _test():
import doctest
doctest.testmod()
if __name__ == "__main__":
_test()
|
gpl-2.0
| -4,579,446,247,877,077,500 | 31.551895 | 147 | 0.568855 | false |
amirgeva/coide
|
qutepart/rectangularselection.py
|
1
|
10665
|
from PyQt4.QtCore import Qt, QMimeData
from PyQt4.QtGui import QApplication, QKeyEvent, QKeySequence, QPalette, QTextCursor, QTextEdit, QWidget
class RectangularSelection:
"""This class does not represent any object, but is part of Qutepart
It just groups together Qutepart rectangular selection methods and fields
"""
MIME_TYPE = 'text/rectangular-selection'
# any of this modifiers with mouse select text
# if hasattr(Qt, 'AltModifier') to make the docs buildable on rtfd.org
MOUSE_MODIFIERS = (Qt.AltModifier | Qt.ControlModifier,
Qt.AltModifier | Qt.ShiftModifier,
Qt.AltModifier) if hasattr(Qt, 'AltModifier') else None
_MAX_SIZE = 256
def __init__(self, qpart):
self._qpart = qpart
self._start = None
qpart.cursorPositionChanged.connect(self._reset) # disconnected during Alt+Shift+...
qpart.textChanged.connect(self._reset)
qpart.selectionChanged.connect(self._reset) # disconnected during Alt+Shift+...
def _reset(self):
"""Cursor moved while Alt is not pressed, or text modified.
Reset rectangular selection"""
if self._start is not None:
self._start = None
self._qpart._updateExtraSelections()
def isDeleteKeyEvent(self, keyEvent):
"""Check if key event should be handled as Delete command"""
return self._start is not None and \
(keyEvent.matches(QKeySequence.Delete) or \
(keyEvent.key() == Qt.Key_Backspace and keyEvent.modifiers() == Qt.NoModifier))
def delete(self):
"""Del or Backspace pressed. Delete selection"""
with self._qpart:
for cursor in self.cursors():
if cursor.hasSelection():
cursor.deleteChar()
def isExpandKeyEvent(self, keyEvent):
"""Check if key event should expand rectangular selection"""
return keyEvent.modifiers() & Qt.ShiftModifier and \
keyEvent.modifiers() & Qt.AltModifier and \
keyEvent.key() in (Qt.Key_Left, Qt.Key_Right, Qt.Key_Down, Qt.Key_Up,
Qt.Key_PageUp, Qt.Key_PageDown, Qt.Key_Home, Qt.Key_End)
def onExpandKeyEvent(self, keyEvent):
"""One of expand selection key events"""
if self._start is None:
currentBlockText = self._qpart.textCursor().block().text()
line = self._qpart.cursorPosition[0]
visibleColumn = self._realToVisibleColumn(currentBlockText, self._qpart.cursorPosition[1])
self._start = (line, visibleColumn)
modifiersWithoutAltShift = keyEvent.modifiers() & ( ~ (Qt.AltModifier | Qt.ShiftModifier))
newEvent = QKeyEvent(keyEvent.type(),
keyEvent.key(),
modifiersWithoutAltShift,
keyEvent.text(),
keyEvent.isAutoRepeat(),
keyEvent.count())
self._qpart.cursorPositionChanged.disconnect(self._reset)
self._qpart.selectionChanged.disconnect(self._reset)
super(self._qpart.__class__, self._qpart).keyPressEvent(newEvent)
self._qpart.cursorPositionChanged.connect(self._reset)
self._qpart.selectionChanged.connect(self._reset)
# extra selections will be updated, because cursor has been moved
def _visibleCharPositionGenerator(self, text):
currentPos = 0
yield currentPos
for index, char in enumerate(text):
if char == '\t':
currentPos += self._qpart.indentWidth
# trim reminder. If width('\t') == 4, width('abc\t') == 4
currentPos = currentPos / self._qpart.indentWidth * self._qpart.indentWidth
else:
currentPos += 1
yield currentPos
def _realToVisibleColumn(self, text, realColumn):
"""If \t is used, real position of symbol in block and visible position differs
This function converts real to visible
"""
generator = self._visibleCharPositionGenerator(text)
for i in range(realColumn):
val = generator.next()
return generator.next()
def _visibleToRealColumn(self, text, visiblePos):
"""If \t is used, real position of symbol in block and visible position differs
This function converts visible to real.
Bigger value is returned, if visiblePos is in the middle of \t, None if text is too short
"""
if visiblePos == 0:
return 0
elif not '\t' in text:
return visiblePos
else:
currentIndex = 1
for currentVisiblePos in self._visibleCharPositionGenerator(text):
if currentVisiblePos >= visiblePos:
return currentIndex - 1
currentIndex += 1
return None
def cursors(self):
"""Cursors for rectangular selection.
1 cursor for every line
"""
cursors = []
if self._start is not None:
startLine, startVisibleCol = self._start
currentLine, currentCol = self._qpart.cursorPosition
if abs(startLine - currentLine) > self._MAX_SIZE or \
abs(startVisibleCol - currentCol) > self._MAX_SIZE:
# Too big rectangular selection freezes the GUI
self._qpart.userWarning.emit('Rectangular selection area is too big')
self._start = None
return []
currentBlockText = self._qpart.textCursor().block().text()
currentVisibleCol = self._realToVisibleColumn(currentBlockText, currentCol)
for lineNumber in range(min(startLine, currentLine),
max(startLine, currentLine) + 1):
block = self._qpart.document().findBlockByNumber(lineNumber)
cursor = QTextCursor(block)
realStartCol = self._visibleToRealColumn(block.text(), startVisibleCol)
realCurrentCol = self._visibleToRealColumn(block.text(), currentVisibleCol)
if realStartCol is None:
realStartCol = block.length() # out of range value
if realCurrentCol is None:
realCurrentCol = block.length() # out of range value
cursor.setPosition(cursor.block().position() + min(realStartCol, block.length() - 1))
cursor.setPosition(cursor.block().position() + min(realCurrentCol, block.length() - 1),
QTextCursor.KeepAnchor)
cursors.append(cursor)
return cursors
def selections(self):
"""Build list of extra selections for rectangular selection"""
selections = []
cursors = self.cursors()
if cursors:
background = self._qpart.palette().color(QPalette.Highlight)
foreground = self._qpart.palette().color(QPalette.HighlightedText)
for cursor in cursors:
selection = QTextEdit.ExtraSelection()
selection.format.setBackground(background)
selection.format.setForeground(foreground)
selection.cursor = cursor
selections.append(selection)
return selections
def isActive(self):
"""Some rectangle is selected"""
return self._start is not None
def copy(self):
"""Copy to the clipboard"""
data = QMimeData()
text = '\n'.join([cursor.selectedText() \
for cursor in self.cursors()])
data.setText(text)
data.setData(self.MIME_TYPE, text.encode('utf8'))
QApplication.clipboard().setMimeData(data)
def cut(self):
"""Cut action. Copy and delete
"""
cursorPos = self._qpart.cursorPosition
topLeft = (min(self._start[0], cursorPos[0]),
min(self._start[1], cursorPos[1]))
self.copy()
self.delete()
"""Move cursor to top-left corner of the selection,
so that if text gets pasted again, original text will be restored"""
self._qpart.cursorPosition = topLeft
def _indentUpTo(self, text, width):
"""Add space to text, so text width will be at least width.
Return text, which must be added
"""
visibleTextWidth = self._realToVisibleColumn(text, len(text))
diff = width - visibleTextWidth
if diff <= 0:
return ''
elif self._qpart.indentUseTabs and \
all([char == '\t' for char in text]): # if using tabs and only tabs in text
return '\t' * (diff / self._qpart.indentWidth) + \
' ' * (diff % self._qpart.indentWidth)
else:
return ' ' * diff
def paste(self, mimeData):
"""Paste recrangular selection.
Add space at the beginning of line, if necessary
"""
if self.isActive():
self.delete()
elif self._qpart.textCursor().hasSelection():
self._qpart.textCursor().deleteChar()
text = str(mimeData.data(self.MIME_TYPE)).decode('utf8')
lines = text.splitlines()
cursorLine, cursorCol = self._qpart.cursorPosition
if cursorLine + len(lines) > len(self._qpart.lines):
for i in range(cursorLine + len(lines) - len(self._qpart.lines)):
self._qpart.lines.append('')
with self._qpart:
for index, line in enumerate(lines):
currentLine = self._qpart.lines[cursorLine + index]
newLine = currentLine[:cursorCol] + \
self._indentUpTo(currentLine, cursorCol) + \
line + \
currentLine[cursorCol:]
self._qpart.lines[cursorLine + index] = newLine
self._qpart.cursorPosition = cursorLine, cursorCol
def mousePressEvent(self, mouseEvent):
cursor = self._qpart.cursorForPosition(mouseEvent.pos())
self._start = cursor.block().blockNumber(), cursor.positionInBlock()
def mouseMoveEvent(self, mouseEvent):
cursor = self._qpart.cursorForPosition(mouseEvent.pos())
self._qpart.cursorPositionChanged.disconnect(self._reset)
self._qpart.selectionChanged.disconnect(self._reset)
self._qpart.setTextCursor(cursor)
self._qpart.cursorPositionChanged.connect(self._reset)
self._qpart.selectionChanged.connect(self._reset)
# extra selections will be updated, because cursor has been moved
|
gpl-2.0
| -1,751,007,654,381,181,200 | 41.831325 | 104 | 0.595218 | false |
spradeepv/dive-into-python
|
hackerrank/contests/indeed/the_ultimate_question.py
|
1
|
1574
|
"""
Problem Statement
42 is the answer to "The Ultimate Question of Life, The Universe, and Everything". But what The Ultimate Question really is? We may never know!
Given three integers, a, b, and c, insert two operators between them so that the following equation is true: a (operator1) b (operator2) c=42.
You may only use the addition (+) and multiplication (?) operators. You can't change the order of the variables.
If a valid equation exists, print it; otherwise, print This is not the ultimate question.
Input Format
A single line consisting three space-separated integers: a, b, and c.
Constraints:
0?a,b,c?42
Output Format
Print the equation with no whitespace between the operators and the three numbers. If there is no answer, print This is not the ultimate question.
Note: It is guaranteed that there is no more than one valid equation per test case.
Sample Input
Example 1:
12 5 6
Example 2:
10 20 12
Example 3:
5 12 6
Sample Output
Example 1:
12+5*6
Example 2:
10+20+12
Example 3:
This is not the ultimate question
Explanation
Example 3 is not the ultimate question, because no combination of operators will equal 42: 5+12+6=23?42
5+12?6=77?42
5?12+6=66?42
5?12?6=360?42
"""
a, b, c = raw_input().split()
a = int(a)
b = int(b)
c = int(c)
if (a + b +c) == 42:
print str(a)+"+"+str(b)+"+"+str(c)
elif (a + b * c) == 42:
print str(a)+"+"+str(b)+"*"+str(c)
elif (a * b + c) == 42:
print str(a)+"*"+str(b)+"+"+str(c)
elif (a * b * c) == 42:
print str(a)+"*"+str(b)+"*"+str(c)
else:
print "This is not the ultimate question"
|
mit
| -3,525,466,171,098,270,000 | 22.161765 | 146 | 0.687421 | false |
zmlabe/IceVarFigs
|
Scripts/SeaIce/NSIDCseaice_quartiles.py
|
1
|
7079
|
"""
Reads in current year's Arctic sea ice extent from Sea Ice Index 3 (NSIDC)
Website : ftp://sidads.colorado.edu/DATASETS/NOAA/G02135/north/daily/data/
Author : Zachary M. Labe
Date : 5 September 2016
"""
### Import modules
import numpy as np
import urllib.request
import urllib as UL
import datetime
import matplotlib.pyplot as plt
### Directory and time
directoryfigure = './Figures/'
now = datetime.datetime.now()
currentmn = str(now.month)
currentdy = str(now.day)
currentyr = str(now.year)
currenttime = currentmn + '_' + currentdy + '_' + currentyr
currentdoy = now.timetuple().tm_yday
### Load url
url = 'ftp://sidads.colorado.edu/DATASETS/NOAA/G02135/north/daily/data/' \
'N_seaice_extent_daily_v3.0.csv'
### Read file
raw_data = UL.request.urlopen(url)
dataset = np.genfromtxt(raw_data, skip_header=2,delimiter=',',
usecols=[0,1,2,3,4])
print('\nCompleted: Read sea ice data!')
### Set missing data to nan
dataset[np.where(dataset==-9999)] = np.nan
### Variables
year = dataset[:,0]
month = dataset[:,1]
day = dataset[:,2]
ice = dataset[:,3]
missing = dataset[:,4]
### Call present year
yr2018 = np.where(year == 2018)[0]
ice18 = ice[yr2018]
### Ice Conversion
iceval = ice18 * 1e6
### Printing info
print('\n----- NSIDC Arctic Sea Ice -----')
print('Current Date =', now.strftime("%Y-%m-%d %H:%M"), '\n')
print('SIE Date = %s/%s/%s' % (int(month[-1]),int(day[-1]),int(year[-1])))
print('Current SIE = %s km^2 \n' % (iceval[-1]))
print('1-day change SIE = %s km^2' % (iceval[-1]-iceval[-2]))
print('7-day change SIE = %s km^2 \n' % (iceval[-1]-iceval[-8]))
###########################################################################
###########################################################################
###########################################################################
### Reads in 1981-2010 means
### Load url
url2 = 'ftp://sidads.colorado.edu/DATASETS/NOAA/G02135/north/daily/data/' \
'N_seaice_extent_climatology_1981-2010_v3.0.csv'
### Read file
raw_data2 = UL.request.urlopen(url2)
dataset2 = np.genfromtxt(raw_data2, skip_header=2,delimiter=',',
usecols=[0,1,2,3,4,5,6,7])
### Create variables
doy = dataset2[:,0]
meanice = dataset2[:,1] * 1e6
std = dataset2[:,2]
### Quartiles
quartile10 = dataset2[:,3]
quartile25 = dataset2[:,4]
quartile50 = dataset2[:,5]
quartile75 = dataset2[:,6]
quartile90 = dataset2[:,7]
### Anomalies
currentanom = iceval[-1]-meanice[currentdoy-2]
### Printing info
print('Current anomaly = %s km^2 \n' % currentanom)
### Selected other years for comparisons
yr2007 = np.where(year == 2007)[0]
yr2012 = np.where(year == 2012)[0]
yr2016 = np.where(year == 2016)[0]
sie7 = ice[yr2007]
sie12 = ice[yr2012]
sie16 = ice[yr2016]
###########################################################################
###########################################################################
###########################################################################
### Create plot
plt.rc('text',usetex=True)
plt.rc('font',**{'family':'sans-serif','sans-serif':['Avant Garde']})
plt.rc('savefig',facecolor='black')
plt.rc('axes',edgecolor='white')
plt.rc('xtick',color='white')
plt.rc('ytick',color='white')
plt.rc('axes',labelcolor='white')
plt.rc('axes',facecolor='black')
fig = plt.figure()
ax = plt.subplot(111)
xlabels = [r'Jan',r'Feb',r'Mar',r'Apr',r'May',r'Jun',r'Jul',
r'Aug',r'Sep',r'Oct',r'Nov',r'Dec',r'Jan']
plt.xticks(np.arange(0,361,30.4),xlabels,rotation=0)
ylabels = map(str,np.arange(2,19,2))
plt.yticks(np.arange(2,19,2),ylabels)
plt.ylim([2,18])
plt.xlim([0,360])
strmonth = xlabels[int(currentmn)-1]
asof = strmonth + ' ' + currentdy + ', ' + currentyr
### Adjust axes in time series plots
def adjust_spines(ax, spines):
for loc, spine in ax.spines.items():
if loc in spines:
spine.set_position(('outward', 5))
else:
spine.set_color('none')
if 'left' in spines:
ax.yaxis.set_ticks_position('left')
else:
ax.yaxis.set_ticks([])
if 'bottom' in spines:
ax.xaxis.set_ticks_position('bottom')
else:
ax.xaxis.set_ticks([])
ax.tick_params('both',length=5.5,width=2,which='major')
adjust_spines(ax, ['left','bottom'])
ax.spines['top'].set_color('none')
ax.spines['right'].set_color('none')
ax.spines['bottom'].set_linewidth(2)
ax.spines['left'].set_linewidth(2)
upper2std = (meanice/1e6)+(std*2)
lower2std = (meanice/1e6)-(std*2)
ax.grid(zorder=1,color='w',alpha=0.2)
plt.plot(ice18,linewidth=1.8,color='aqua',zorder=9,label=r'Current Year (2018)')
plt.plot(doy,upper2std,color='white',alpha=0.7,zorder=3,linewidth=0.1)
plt.plot(doy,lower2std,color='white',alpha=0.7,zorder=4,linewidth=0.1)
plt.plot(doy,quartile10,color='m',alpha=0.7,zorder=3,linewidth=0.4)
plt.plot(doy,quartile25,color='cornflowerblue',alpha=0.7,zorder=4,linewidth=0.4)
plt.plot(doy,quartile75,color='cornflowerblue',alpha=0.7,zorder=4,linewidth=0.4)
plt.plot(doy,quartile90,color='m',alpha=0.7,zorder=3,linewidth=0.4)
ax.fill_between(doy, lower2std, upper2std, facecolor='white', alpha=0.35,
label=r'$\pm$2 standard deviations',zorder=2)
plt.plot(doy,quartile50,color='gold',alpha=1,zorder=3,linewidth=2,
label=r'Median (1981-2010)')
ax.fill_between(doy, quartile90, quartile75, facecolor='m', alpha=0.55,
label=r'10-90th percentiles',zorder=2)
ax.fill_between(doy, quartile10, quartile25, facecolor='m', alpha=0.55,
zorder=2)
ax.fill_between(doy, quartile25, quartile50, facecolor='cornflowerblue', alpha=0.6,
zorder=2)
ax.fill_between(doy, quartile50, quartile75, facecolor='cornflowerblue', alpha=0.6,
label=r'25-75th percentiles',zorder=2)
plt.scatter(doy[currentdoy-3],ice[-1],s=10,color='aqua',zorder=9)
plt.ylabel(r'\textbf{Extent} [$\times$10$^{6}$ km$^2$]',fontsize=15,
color='darkgrey')
le = plt.legend(shadow=False,fontsize=6,loc='upper left',
bbox_to_anchor=(0.473, 1.011),fancybox=True,ncol=2)
for text in le.get_texts():
text.set_color('w')
plt.title(r'\textbf{ARCTIC SEA ICE}',
fontsize=21,color='darkgrey')
plt.text(doy[currentdoy]-5,ice[-1]-1.35,r'\textbf{2018}',
fontsize=13.5,rotation='horizontal',ha='left',color='aqua')
plt.text(0.5,3.1,r'\textbf{DATA:} National Snow \& Ice Data Center, Boulder CO',
fontsize=5.5,rotation='horizontal',ha='left',color='darkgrey')
plt.text(0.5,2.6,r'\textbf{SOURCE:} ftp://sidads.colorado.edu/DATASETS/NOAA/G02135/',
fontsize=5.5,rotation='horizontal',ha='left',color='darkgrey')
plt.text(0.5,2.1,r'\textbf{GRAPHIC:} Zachary Labe (@ZLabe)',
fontsize=5.5,rotation='horizontal',ha='left',color='darkgrey')
fig.subplots_adjust(top=0.91)
### Save figure
plt.savefig(directoryfigure + 'nsidc_sie_quartiles_currentyear.png',dpi=300)
|
mit
| -8,125,626,366,478,504,000 | 33.705882 | 85 | 0.599237 | false |
C-Codes/jupyterbar
|
jupyterbar/jupyterbar.py
|
1
|
4049
|
#!/usr/bin/env python
from __future__ import print_function
import os, sys
import time, datetime
import rumps
import webbrowser
from jupyter import Jupyter
class JupyterStatusBarApp(rumps.App):
#def __init__(self):
# super(JupyterStatusBarApp, self).__init__("jupyter")
# self.title="jupyter"
# self.icon="jupyter-logo.png"
# self.menu = ["Active", "Preferences", "Status"]
def __init__(self):
self.jp_handler = Jupyter()
super(JupyterStatusBarApp, self).__init__("iPy", quit_button=None) #jupyter
@rumps.clicked("Open Notebooks")
def onoff(self, sender):
#sender.state = not sender.state
#since we are only running on OS X anyways ...
safari = webbrowser.get('safari')
safari.open("http://localhost:8888")
#@rumps.clicked("Active")
#def onoff(self, sender):
# sender.state = not sender.state
@rumps.clicked("Set Notebook dir")
def prefs(self, _):
#rumps.alert("No preferences available!")
window = rumps.Window(message='Set notebook-dir', title='JupiterBar Preferences',
default_text=self.jp_handler.get_notebook_dir(),
ok=None, cancel=None, dimensions=(220, 24))
response = window.run()
if (not response.clicked is 1):
return
notebook_path = str(response.text)
self.jp_handler.set_notebook_dir(notebook_path)
@rumps.clicked("Set $PATH")
def prefs(self, _):
#rumps.alert("No preferences available!")
window = rumps.Window(message='Set $PATH ennvironment variable', title='JupiterBar Preferences',
default_text=self.jp_handler.get_path(),
ok=None, cancel=None, dimensions=(220, 24))
response = window.run()
if (not response.clicked is 1):
return
path = str(response.text)
self.jp_handler.set_path(path)
@rumps.clicked("Set $PYTHONPATH")
def prefs(self, _):
#rumps.alert("No preferences available!")
window = rumps.Window(message='Set $PYTHONPATH ennvironment variable', title='JupiterBar Preferences',
default_text=self.jp_handler.get_pythonpath(),
ok=None, cancel=None, dimensions=(220, 24))
response = window.run()
if (not response.clicked is 1):
return
python_path = str(response.text)
self.jp_handler.set_pythonpath(python_path)
@rumps.clicked("Reset Settings")
def prefs(self, _):
settings_file_path = self.jp_handler.reset_settings()
time_st = datetime.datetime.fromtimestamp(time.time()).strftime('%H:%M:%S') #'%Y-%m-%d %H:%M:%S'
rumps.notification("jupyter "+str(time_st), "Settings file has been reset.", str(settings_file_path))
@rumps.clicked("Status")
def status(self, _):
'''
Checking status of jupyter / ipython
'''
status,msg = self.jp_handler.get_status()
time_st = datetime.datetime.fromtimestamp(time.time()).strftime('%H:%M:%S') #'%Y-%m-%d %H:%M:%S'
rumps.notification("jupyter "+str(time_st), "Status: "+str(status), str(msg))
@rumps.clicked("Restart")
def prefs(self, _):
#rumps.alert("Warning: All notebooks will be shut down!")
self.jp_handler.restart()
@rumps.clicked("Shut down")
def prefs(self, _):
#rumps.alert("Warning: All notebooks will be shut down!")
self.jp_handler.shut_down()
@rumps.clicked('Quit')
def clean_quit_application(self, _):
self.jp_handler.shut_down()
rumps.quit_application()
def main(argv):
#jp_bar = JupyterStatusBarApp("jupyter")
#jp_bar = JupyterStatusBarApp("iPy")
jp_bar = JupyterStatusBarApp()
icon_bar = "jupyter-logo-bw.png"
if os.path.isfile(icon_bar):
jp_bar.icon=icon_bar
jp_bar.template = True
jp_bar.run()
if __name__ == "__main__":
main(sys.argv[1:])
|
mit
| 3,329,891,740,349,573,600 | 31.134921 | 110 | 0.593727 | false |
regosen/gallery_get
|
gallery_plugins/plugin_shimmie.py
|
1
|
1738
|
# Plugin for gallery_get.
import re
from gallery_utils import *
# Each definition can be one of the following:
# - a string
# - a regex string
# - a function that takes source as a parameter and returns an array or a string. (You may assume that re and urllib are already imported.)
# If you comment out a parameter, it will use the default defined in __init__.py
# identifier (default = name of this plugin after "plugin_") : If there's a match, we'll attempt to download images using this plugin.
# title: parses the gallery page for a title. This will be the folder name of the output gallery.
# redirect: if the links in the gallery page go to an html instead of an image, use this to parse the gallery page.
def redirect(source):
redirects = []
base_url = "http://shimmie.shishnet.org"
cur_url = re.findall(r'href="(.+?)"\>Random</a>', source)[0].rsplit("/",1)[0]
index = 0
while True:
indexed_page = "%s%s/%s" % (base_url, cur_url, index)
print("Crawling " + indexed_page)
try:
indexed_source = urlopen_text(indexed_page)
links = re.findall("href='(.+?)' class='thumb", indexed_source)
if links:
redirects += map(lambda x: base_url + x, links)
index += 1
else:
break
except:
break
return redirects
# direct_links: if redirect is non-empty, this parses each redirect page for a single image. Otherwise, this parses the gallery page for all images.
direct_links = r"name='ubb_full-img' value='\[img\](.+?)\[/img\]'"
# same_filename (default=False): if True, uses filename specified on remote link. Otherwise, creates own filename with incremental index.
same_filename = True
|
mit
| 6,606,265,935,221,166,000 | 41.414634 | 149 | 0.655926 | false |
zchee/deoplete-jedi
|
rplugin/python3/deoplete/sources/deoplete_jedi.py
|
1
|
10772
|
import copy
import logging
import os
import re
from importlib.util import find_spec
from deoplete.base.source import Base
from deoplete.util import bytepos2charpos, getlines, load_external_module
load_external_module(__file__, 'sources')
from deoplete_jedi import profiler # isort:skip # noqa: E402
# Type mapping. Empty values will use the key value instead.
# Keep them 5 characters max to minimize required space to display.
_types = {
'import': 'imprt',
'class': '',
'function': 'def',
'globalstmt': 'var',
'instance': 'var',
'statement': 'var',
'keyword': 'keywd',
'module': 'mod',
'param': 'arg',
'property': 'prop',
'bool': '',
'bytes': 'byte',
'complex': 'cmplx',
'dict': '',
'list': '',
'float': '',
'int': '',
'object': 'obj',
'set': '',
'slice': '',
'str': '',
'tuple': '',
'mappingproxy': 'dict', # cls.__dict__
'member_descriptor': 'cattr',
'getset_descriptor': 'cprop',
'method_descriptor': 'cdef',
}
def sort_key(item):
w = item.get('name')
z = len(w) - len(w.lstrip('_'))
return (('z' * z) + w.lower()[z:], len(w))
class Source(Base):
def __init__(self, vim):
Base.__init__(self, vim)
self.name = 'jedi'
self.mark = '[jedi]'
self.rank = 500
self.filetypes = ['python', 'cython', 'pyrex']
self.input_pattern = (r'[\w\)\]\}\'\"]+\.\w*$|'
r'^\s*@\w*$|'
r'^\s*from\s+[\w\.]*(?:\s+import\s+(?:\w*(?:,\s*)?)*)?|'
r'^\s*import\s+(?:[\w\.]*(?:,\s*)?)*')
self._async_keys = set()
self.workers_started = False
self._jedi = None
def on_init(self, context):
vars = context['vars']
self.statement_length = 50
if 'deoplete#sources#jedi#statement_length' in vars:
self.statement_length = vars[
'deoplete#sources#jedi#statement_length']
self.enable_typeinfo = True
if 'deoplete#sources#jedi#enable_typeinfo' in vars:
self.enable_typeinfo = vars[
'deoplete#sources#jedi#enable_typeinfo']
self.enable_short_types = False
if 'deoplete#sources#jedi#enable_short_types' in vars:
self.enable_short_types = vars[
'deoplete#sources#jedi#enable_short_types']
self.short_types_map = copy.copy(_types)
if 'deoplete#sources#jedi#short_types_map' in vars:
self.short_types_map.update(vars[
'deoplete#sources#jedi#short_types_map'])
self.show_docstring = False
if 'deoplete#sources#jedi#show_docstring' in vars:
self.show_docstring = vars[
'deoplete#sources#jedi#show_docstring']
self.ignore_errors = False
if 'deoplete#sources#jedi#ignore_errors' in vars:
self.ignore_errors = vars[
'deoplete#sources#jedi#ignore_errors']
self.ignore_private_members = False
if 'deoplete#sources#jedi#ignore_private_members' in vars:
self.ignore_private_members = vars[
'deoplete#sources#jedi#ignore_private_members']
# TODO(blueyed)
self.extra_path = ''
if 'deoplete#sources#jedi#extra_path' in vars:
self.extra_path = vars[
'deoplete#sources#jedi#extra_path']
if not self.is_debug_enabled:
root_log = logging.getLogger('deoplete')
child_log = root_log.getChild('jedi')
child_log.propagate = False
self._python_path = None
"""Current Python executable."""
self._env = None
"""Current Jedi Environment."""
self._envs = {}
"""Cache for Jedi Environments."""
if find_spec('jedi'):
import jedi # noqa: E402
self._jedi = jedi
else:
self.print_error(
'jedi module is not found. You need to install it.')
@profiler.profile
def set_env(self, python_path):
if not python_path:
import shutil
python_path = shutil.which('python')
self._python_path = python_path
try:
self._env = self._envs[python_path]
except KeyError:
self._env = self._jedi.api.environment.Environment(
python_path, env_vars={'PYTHONPATH': str(self.extra_path)})
self.debug('Using Jedi environment: %r', self._env)
@profiler.profile
def get_script(self, source, filename, environment):
return self._jedi.Script(code=source, path=filename, environment=self._env)
@profiler.profile
def get_completions(self, script, line, col):
return script.complete(line, col)
@profiler.profile
def finalize_completions(self, completions):
out = []
tmp_filecache = {}
for c in completions:
out.append(self.parse_completion(c, tmp_filecache))
if self.ignore_private_members:
out = [x for x in out if not x['name'].startswith('__')]
# partly from old finalized_cached
out = [self.finalize(x) for x in sorted(out, key=sort_key)]
return out
@profiler.profile
def gather_candidates(self, context):
if not self._jedi:
return []
python_path = None
if 'deoplete#sources#jedi#python_path' in context['vars']:
python_path = context['vars'][
'deoplete#sources#jedi#python_path']
if python_path != self._python_path or self.extra_path:
self.set_env(python_path)
line = context['position'][1]
col = bytepos2charpos(
context['encoding'], context['input'],
context['complete_position'])
buf = self.vim.current.buffer
filename = str(buf.name)
# Only use source if buffer is modified, to skip transferring, joining,
# and splitting the buffer lines unnecessarily.
modified = buf.options['modified']
if not modified and os.path.exists(filename):
source = None
else:
source = '\n'.join(getlines(self.vim))
if (line != self.vim.call('line', '.')
or context['complete_position'] >= self.vim.call('col', '$')):
return []
self.debug('Line: %r, Col: %r, Filename: %r, modified: %r',
line, col, filename, modified)
script = self.get_script(source, filename, environment=self._env)
try:
completions = self.get_completions(script, line, col)
except BaseException:
if not self.ignore_errors:
raise
return []
return self.finalize_completions(completions)
def get_complete_position(self, context):
if not self._jedi:
return -1
pattern = r'\w*$'
if context['input'].lstrip().startswith(('from ', 'import ')):
m = re.search(r'[,\s]$', context['input'])
if m:
return m.end()
m = re.search(pattern, context['input'])
return m.start() if m else -1
def mix_boilerplate(self, completions):
seen = set()
for item in self.boilerplate + completions:
if item['name'] in seen:
continue
seen.add(item['name'])
yield item
def finalize(self, item):
abbr = item['name']
desc = item['doc']
if item['params']:
sig = '{}({})'.format(item['name'], ', '.join(item['params']))
sig_len = len(sig)
desc = sig + '\n\n' + desc
if self.statement_length > 0 and sig_len > self.statement_length:
params = []
length = len(item['name']) + 2
for p in item['params']:
p = p.split('=', 1)[0]
length += len(p)
params.append(p)
length += 2 * (len(params) - 1)
# +5 for the ellipsis and separator
while length + 5 > self.statement_length and len(params):
length -= len(params[-1]) + 2
params = params[:-1]
if len(item['params']) > len(params):
params.append('...')
sig = '{}({})'.format(item['name'], ', '.join(params))
abbr = sig
if self.enable_short_types:
kind = item['short_type'] or item['type']
else:
kind = item['type']
return {
'word': item['name'],
'abbr': abbr,
'kind': kind,
'info': desc.strip(),
'dup': 1,
}
def completion_dict(self, name, type_, comp):
"""Final construction of the completion dict."""
doc = ''
if self.show_docstring:
try:
doc = comp.docstring()
except BaseException:
if not self.ignore_errors:
raise
i = doc.find('\n\n')
if i != -1:
doc = doc[i:]
params = None
try:
if type_ in ('function', 'class'):
params = []
for i, p in enumerate(comp.params):
desc = p.description.strip()
if i == 0 and desc == 'self':
continue
if '\\n' in desc:
desc = desc.replace('\\n', '\\x0A')
# Note: Hack for jedi param bugs
if desc.startswith('param ') or desc == 'param':
desc = desc[5:].strip()
if desc:
params.append(desc)
except Exception:
params = None
return {
'name': name,
'type': type_,
'short_type': self.short_types_map.get(type_),
'doc': doc.strip(),
'params': params,
}
def parse_completion(self, comp, cache):
"""Return a tuple describing the completion.
Returns (name, type, description, abbreviated)
"""
name = comp.name
if self.enable_typeinfo:
type_ = comp.type
else:
type_ = ''
if self.show_docstring:
desc = comp.description
else:
desc = ''
if type_ == 'instance' and desc.startswith(('builtins.', 'posix.')):
# Simple description
builtin_type = desc.rsplit('.', 1)[-1]
if builtin_type in _types:
return self.completion_dict(name, builtin_type, comp)
return self.completion_dict(name, type_, comp)
|
mit
| -6,750,035,902,205,924,000 | 30.869822 | 86 | 0.512811 | false |
irregulator/ganetimgr
|
accounts/migrations/0009_auto__del_field_customregistrationprofile_admin_activated__del_field_c.py
|
1
|
7670
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'CustomRegistrationProfile.admin_activated'
db.delete_column('accounts_customregistrationprofile', 'admin_activated')
# Wrongly identified migrations, field was simply renamed.
# # Deleting field 'CustomRegistrationProfile.activated'
# db.delete_column('accounts_customregistrationprofile', 'activated')
# # Adding field 'CustomRegistrationProfile.validated'
# db.add_column('accounts_customregistrationprofile', 'validated',
# self.gf('django.db.models.fields.BooleanField')(default=False),
# keep_default=False)
db.rename_column('accounts_customregistrationprofile', 'activated', 'validated')
def backwards(self, orm):
# Adding field 'CustomRegistrationProfile.admin_activated'
db.add_column('accounts_customregistrationprofile', 'admin_activated',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
# Wrongly identified migrations, field was simply renamed.
# # Adding field 'CustomRegistrationProfile.activated'
# db.add_column('accounts_customregistrationprofile', 'activated',
# self.gf('django.db.models.fields.BooleanField')(default=False),
# keep_default=False)
# # Deleting field 'CustomRegistrationProfile.validated'
# db.delete_column('accounts_customregistrationprofile', 'validated')
db.rename_column('accounts_customregistrationprofile', 'validated', 'activated',)
models = {
'accounts.customregistrationprofile': {
'Meta': {'object_name': 'CustomRegistrationProfile', '_ormbases': ['registration.RegistrationProfile']},
'admin_activation_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'registrationprofile_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['registration.RegistrationProfile']", 'unique': 'True', 'primary_key': 'True'}),
'validated': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'accounts.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'first_login': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'force_logout_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['apply.Organization']", 'null': 'True', 'blank': 'True'}),
'telephone': ('django.db.models.fields.CharField', [], {'max_length': '13', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'apply.organization': {
'Meta': {'ordering': "['title']", 'object_name': 'Organization'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'tag': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'website': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'registration.registrationprofile': {
'Meta': {'object_name': 'RegistrationProfile'},
'activation_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
}
}
complete_apps = ['accounts']
|
gpl-3.0
| -5,851,417,538,446,304,000 | 65.704348 | 185 | 0.57927 | false |
csyhuang/hn2016_falwa
|
hn2016_falwa/beta_version.py
|
1
|
20465
|
def input_jk_output_index(j,k,kmax):
return j*(kmax) + k
def extrap1d(interpolator):
xs = interpolator.x
ys = interpolator.y
def pointwise(x):
if x < xs[0]:
return ys[0]+(x-xs[0])*(ys[1]-ys[0])/(xs[1]-xs[0])
elif x > xs[-1]:
return ys[-1]+(x-xs[-1])*(ys[-1]-ys[-2])/(xs[-1]-xs[-2])
else:
return interpolator(x)
def ufunclike(xs):
from scipy import array
return array(map(pointwise, array(xs)))
return ufunclike
def solve_uref_both_bc(tstamp, zmum, FAWA_cos, ylat, ephalf2, Delta_PT,
zm_PT, Input_B0, Input_B1, use_real_Data=True,
plot_all_ref_quan=False):
"""
Compute equivalent latitude and wave activity on a barotropic sphere.
Parameters
----------
tstamp : string
Time stamp of the snapshot of the field.
znum : ndarray
Zonal mean wind.
FAWA_cos : ndarray
Zonal mean finite-amplitude wave activity.
ylat : sequence or array_like
1-d numpy array of latitude (in degree) with equal spacing in ascending order; dimension = nlat.
ephalf2 : ndarray
Epsilon in Nakamura and Solomon (2010).
Delta_PT : ndarray
\Delta \Theta in Nakamura and Solomon (2010); upper-boundary conditions.
zm_PT : ndarray
Zonal mean potential temperature.
Input_B0 : sequence or array_like
Zonal-mean surface wave activity for the lowest layer (k=0). Part of the lower-boundary condition.
Input_B1 : sequence or array_like
Zonal-mean surface wave activity for the second lowest layer (k=1). Part of the lower-boundary condition.
use_real_Data : boolean
Whether to use input data to compute the reference states. By detault True. If false, randomly generated arrays will be used.
plot_all_ref_quan : boolean
Whether to plot the solved reference states using matplotlib library. By default False. For debugging.
Returns
-------
u_MassCorr_regular_noslip : ndarray
2-d numpy array of mass correction \Delta u in NS10 with no-slip lower boundary conditions; dimension = (kmax,nlat).
u_Ref_regular_noslip : ndarray
2-d numpy array of zonal wind reference state u_ref in NS10 with no-slip lower boundary conditions; dimension = (kmax,nlat).
T_MassCorr_regular_noslip : ndarray
2-d numpy array of adjustment in reference temperature \Delta T in NS10 with no-slip lower boundary conditions; dimension = (kmax,nlat).
T_Ref_regular_noslip : ndarray
2-d numpy array of adjustment in reference temperature T_ref in NS10 with no-slip lower boundary conditions; dimension = (kmax,nlat).
u_MassCorr_regular_adiab : ndarray
2-d numpy array of mass correction \Delta u in NS10 with adiabatic lower boundary conditions; dimension = (kmax,nlat).
u_Ref_regular_adiab : ndarray
2-d numpy array of zonal wind reference state u_ref in NS10 with adiabatic lower boundary conditions; dimension = (kmax,nlat).
T_MassCorr_regular_adiab : ndarray
2-d numpy array of adjustment in reference temperature \Delta T in NS10 with adiabatic lower boundary conditions; dimension = (kmax,nlat).
T_Ref_regular_adiab : ndarray
2-d numpy array of adjustment in reference temperature T_ref in NS10 with adiabatic lower boundary conditions; dimension = (kmax,nlat).
"""
# zm_PT = zonal mean potential temperature
# Import necessary modules
from math import pi, exp
from scipy import interpolate
from scipy.sparse import csc_matrix
from scipy.sparse.linalg import spsolve
from copy import copy
import numpy as np
import itertools
if plot_all_ref_quan:
import matplotlib.pyplot as plt
# === Parameters (should be input externally. To be modified) ===
dz = 1000. # vertical z spacing (m)
aa = 6378000. # planetary radius
r0 = 287. # gas constant
hh = 7000. # scale height
cp = 1004. # specific heat
rkappa = r0/cp
om = 7.29e-5 # angular velocity of the earth
# === These changes with input variables' dimensions ===
nlat = FAWA_cos.shape[-1]
jmax1 = nlat//4
dm = 1./float(jmax1+1) # gaussian latitude spacing
gl = np.array([(j+1)*dm for j in range(jmax1)]) # This is sin / mu
gl_2 = np.array([j*dm for j in range(jmax1+2)]) # This is sin / mu
cosl = np.sqrt(1.-gl**2)
#cosl_2 = np.sqrt(1.-gl_2**2)
alat = np.arcsin(gl)*180./pi
alat_2 = np.arcsin(gl_2)*180./pi
dmdz = (dm/dz)
# **** Get from input these parameters ****
kmax = FAWA_cos.shape[0]
#height = np.array([i for i in range(kmax)]) # in [km]
# **** Initialize Coefficients ****
c_a = np.zeros((jmax1, kmax))
c_b = np.zeros((jmax1, kmax))
c_c = np.zeros((jmax1, kmax))
c_d = np.zeros((jmax1, kmax))
c_e = np.zeros((jmax1, kmax))
c_f = np.zeros((jmax1, kmax))
# --- Initialize interpolated variables ---
zmu1 = np.zeros((jmax1, kmax))
cx1 = np.zeros((jmax1, kmax))
cor1 = np.zeros((jmax1, kmax))
ephalf = np.zeros((jmax1, kmax))
Delta_PT1 = np.zeros((jmax1+2))
zm_PT1 = np.zeros((jmax1, kmax))
Input_B0_1 = np.zeros((jmax1+2))
Input_B1_1 = np.zeros((jmax1+2))
# --- Define Epsilon as a function of y and z ---
# **** Interpolate to gaussian latitude ****
if use_real_Data:
# print 'use_real_Data'
for vv1,vvm in zip([zmu1,cx1,zm_PT1] , [zmum,FAWA_cos,zm_PT]):
f_toGaussian = interpolate.interp1d(ylat[:],vvm[:,:].T,axis=0, kind='linear') #[jmax x kmax]
vv1[:,:] = f_toGaussian(alat[:])
#vv1[:,:] = vvm[:,:]
#vv1[-1,:] = vvm[:,-1]
# --- Interpolation of ephalf ---
f_ep_toGaussian = interpolate.interp1d(ylat[:],ephalf2[:,:].T,axis=0, kind='linear') #[jmax x kmax]
ephalf[:,:] = f_ep_toGaussian(alat[:])
# --- Interpolation of Delta_PT ---
#f_DT_toGaussian = extrap1d( interpolate.interp1d(ylat[:],Delta_PT[:], kind='linear') ) # This is txt in Noboru's code
f_DT_toGaussian = interpolate.interp1d(ylat[:],Delta_PT[:],
kind='linear',fill_value='extrapolate')
Delta_PT1[:] = f_DT_toGaussian(alat_2[:])
# --- Interpolation of Input_B0_1 ---
#f_B0_toGaussian = extrap1d( interpolate.interp1d(ylat[:],Input_B0[:], kind='linear') ) # This is txt in Noboru's code
f_B0_toGaussian = interpolate.interp1d(ylat[:],Input_B0[:],
kind='linear',fill_value='extrapolate') # This is txt in Noboru's code
Input_B0_1[:] = f_B0_toGaussian(alat_2[:])
# --- Interpolation of Input_B1_1 ---
# f_B1_toGaussian = extrap1d( interpolate.interp1d(ylat[:],Input_B1[:], kind='linear') ) # This is txt in Noboru's code
f_B1_toGaussian = interpolate.interp1d(ylat[:],Input_B1[:],
kind='linear',fill_value='extrapolate') # This is txt in Noboru's code
Input_B1_1[:] = f_B1_toGaussian(alat_2[:])
else:
# Use random matrix here just to test!
zmu1 = np.random.rand(jmax1, kmax)+np.ones((jmax1, kmax))*1.e-8
cx1 = np.random.rand(jmax1, kmax)+np.ones((jmax1, kmax))*1.e-8
#cor1 = np.random.rand(jmax1, kmax)+np.ones((jmax1, kmax))*1.e-8
# --- Added on Aug 1, 2016 ---
cor1 = 2.*om*gl[:,np.newaxis] * np.ones((jmax1, kmax))
#cor1[0] = cor1[1]*0.5
# OLD: qxx0 = -cx1*cosl[:,np.newaxis]/cor1 #qxx0 = np.empty((jmax1, kmax))
qxx0 = -cx1/cor1 # Input of LWA has cosine.
c_f[0,:] = qxx0[1,:] - 2*qxx0[0,:]
c_f[-1,:] = qxx0[-2,:] - 2*qxx0[-1,:]
c_f[1:-1,:] = qxx0[:-2,:] + qxx0[2:,:] - 2*qxx0[1:-1,:]
#c_f[:,0] = 0.0
# --- Aug 9: Lower Adiabatic boundary conditions ---
Input_dB0 = np.zeros((jmax1))
Input_dB1 = np.zeros((jmax1))
uz1 = np.zeros((jmax1))
# prefac = - r0 * cosl[1:-1]**2 * dz / (cor1[1:-1,-2]**2 * aa**2 * hh * dm**2) * exp(-rkappa*(kmax-2.)/7.)
# OLD: Input_dB0[:] = Input_B0_1[:-2]*cosl_2[:-2] + Input_B0_1[2:]*cosl_2[2:] - 2*Input_B0_1[1:-1]*cosl_2[1:-1]
Input_dB0[:] = Input_B0_1[:-2] + Input_B0_1[2:] - 2*Input_B0_1[1:-1]
# OLD: Input_dB1[:] = Input_B1_1[:-2]*cosl_2[:-2] + Input_B1_1[2:]*cosl_2[2:] - 2*Input_B1_1[1:-1]*cosl_2[1:-1]
Input_dB1[:] = Input_B1_1[:-2] + Input_B1_1[2:] - 2*Input_B1_1[1:-1]
# This is supposed to be correct but gave weird results.
uz1[:] = - r0 * cosl[:]**2 * Input_dB1[:] * 2*dz / (cor1[:,1]**2 * aa**2 * hh * dm**2) * exp(-rkappa*(1.)/7.) \
- r0 * cosl[:]**2 * Input_dB0[:] * 2*dz / (cor1[:,0]**2 * aa**2 * hh * dm**2) * exp(-rkappa*(0.)/7.)
# **** Upper Boundary Condition (Come back later) ****
uz2 = np.zeros((jmax1))
dDelta_PT1 = (Delta_PT1[2:]-Delta_PT1[:-2]) # Numerical trick: Replace uz2[1] with an extrapolated value
# Original correct one:
# uz2[1:-1] = - r0 * cosl[1:-1]**2 * exp(-rkappa*(kmax-2.)/7.) * dDelta_PT1 / (cor1[1:-1,-2]**2 * aa * hh * dmdz)
uz2[:] = - r0 * cosl[:]**2 * exp(-rkappa*(kmax-2.)/7.) * dDelta_PT1 / (cor1[:,-2]**2 * aa * hh * dmdz)
# **** Initialize the coefficients a,b,c,d,e,f ****
c_a[:,:] = 1.0
c_b[:,:] = 1.0
c_c[:,1:-1] = dmdz**2 *ephalf[:,1:-1]*exp(-dz/(2*hh)) # This one should be correct
c_d[:,1:-1] = dmdz**2 *ephalf[:,0:-2]*exp(dz/(2*hh)) # Check convention of ephalf
c_e[:,1:-1] = -(c_a[:,1:-1]+c_b[:,1:-1]+c_c[:,1:-1]+c_d[:,1:-1])
b = np.zeros((jmax1*kmax))
row_index=[]
col_index=[]
coeff = []
jrange = range(jmax1)
krange = range(1,kmax-1)
for j, k in itertools.product(jrange, krange):
# for j in range(jmax1):
# for k in range(1,kmax-1):
ind = input_jk_output_index(j,k,kmax)
b[ind] = c_f[j,k]
if (j<jmax1-1):
# A[ind,input_jk_output_index(j+1,k,kmax)] = c_a[j,k]
row_index.append(ind)
col_index.append(input_jk_output_index(j+1,k,kmax))
coeff.append(c_a[j,k])
if (j>0):
# A[ind,input_jk_output_index(j-1,k,kmax)] = c_b[j,k]
row_index.append(ind)
col_index.append(input_jk_output_index(j-1,k,kmax))
coeff.append(c_b[j,k])
# A[ind,input_jk_output_index(j,k+1,kmax)] = c_c[j,k]
row_index.append(ind)
col_index.append(input_jk_output_index(j,k+1,kmax))
coeff.append(c_c[j,k])
# A[ind,input_jk_output_index(j,k-1,kmax)] = c_d[j,k]
row_index.append(ind)
col_index.append(input_jk_output_index(j,k-1,kmax))
coeff.append(c_d[j,k])
# A[ind,input_jk_output_index(j,k,kmax)] = c_e[j,k]
row_index.append(ind)
col_index.append(input_jk_output_index(j,k,kmax))
coeff.append(c_e[j,k])
# ==== Upper boundary condition - thermal wind ====
# for j in range(1,jmax1-1):
for j in range(jmax1):
ind1 = input_jk_output_index(j,kmax-1,kmax)
b[ind1] = uz2[j] #- r0 * cosl[j]**2 * exp(-rkappa*(kmax-2.)/7.) * (Delta_PT1[j+1]-Delta_PT1[j-1])/ (cor1[j,-2]**2 * aa * hh * dmdz)
# A[ind1,ind1] = 1.0
row_index.append(ind1)
col_index.append(ind1)
coeff.append(1.0)
# A[ind1,input_jk_output_index(j,kmax-3,kmax)] = -1.0
row_index.append(ind1)
col_index.append(input_jk_output_index(j,kmax-3,kmax))
coeff.append(-1.0)
# Try sparse matrix
# print 'try sparse matrix'
# A = csc_matrix((coeff_noslip, (row_index, col_index)), shape=(jmax1*kmax,jmax1*kmax))
# print 'shape of A=',A.shape
# print 'Does it work?'
#
# csc_matrix((data, (row_ind, col_ind)), [shape=(M, N)])
# where data, row_ind and col_ind satisfy the relationship a[row_ind[k], col_ind[k]] = data[k].
# A[ind1,input_jk_output_index(j,kmax-3,kmax)] = -1.0
#uz2[1:-1] = - r0 * cosl[1:-1]**2 * exp(-rkappa*(kmax-2.)/7.) * (Delta_PT1[2:]-Delta_PT1[:-2]) / (cor1[1:-1,-2]**2 * aa * hh * dmdz)
# === Make a copy to deal with adiabatic boundary condition ===
# A: no-slip
# A_adiab: adiabatic boundary conditions
row_index_adiab = copy(row_index)
col_index_adiab = copy(col_index)
coeff_adiab = copy(coeff)
b_adiab = np.copy(b)
# print 'does it work till here?'
# A_adiab = np.copy(A)
# ==== Lower boundary condition - adiabatic (k=0) ====
for j in range(jmax1):
ind0 = input_jk_output_index(j,0,kmax)
b_adiab[ind0] = uz1[j]
# A_adiab[ind0,ind0] = -1.0 # k=0
row_index_adiab.append(ind0)
col_index_adiab.append(ind0)
coeff_adiab.append(-1.0)
# A_adiab[ind0,input_jk_output_index(j,2,kmax)] = 1.0 # k=2
row_index_adiab.append(ind0)
col_index_adiab.append(input_jk_output_index(j,2,kmax))
coeff_adiab.append(1.0)
A_adiab = csc_matrix((coeff_adiab, (row_index_adiab, col_index_adiab)), shape=(jmax1*kmax,jmax1*kmax))
# ==== Lower boundary condition - no-slip (k=0) ====
for j in range(jmax1):
ind = input_jk_output_index(j,0,kmax)
b[ind] = zmu1[j,0]*cosl[j]/cor1[j,0]
# A[ind,ind] = 1.0
row_index.append(ind)
col_index.append(ind)
coeff.append(1.0)
A = csc_matrix((coeff, (row_index, col_index)), shape=(jmax1*kmax,jmax1*kmax))
# print 'is it ok till here????'
# === Solving the linear system ===
u2_adiab = spsolve(A_adiab, b_adiab)
u2 = spsolve(A, b)
# === Mapping back to 2D matrix ===
u_adiab = np.zeros((jmax1+2,kmax))
u = np.zeros((jmax1+2,kmax))
for j in range(jmax1):
for k in range(kmax):
u_adiab[j+1,k] = u2_adiab[j*kmax + k]
u[j+1,k] = u2[j*kmax + k]
u_MassCorr_adiab = np.zeros_like(u_adiab)
u_MassCorr_noslip = np.zeros_like(u)
# u_MassCorr[1:-1,:] = u[1:-1,:] * cor1[1:-1,:] / cosl[1:-1,np.newaxis]
u_MassCorr_adiab[1:-1,:] = u_adiab[1:-1,:] * cor1 / cosl[:,np.newaxis]
u_MassCorr_noslip[1:-1,:] = u[1:-1,:] * cor1 / cosl[:,np.newaxis]
# --- Initialize T_MassCorr to be output ---
u_Ref_regular_adiab = np.zeros_like(zmum)
u_Ref_regular_noslip = np.zeros_like(zmum)
u_MassCorr_regular_adiab = np.zeros_like(zmum)
u_MassCorr_regular_noslip = np.zeros_like(zmum)
T_Ref_regular_adiab = np.zeros_like(zmum)
T_Ref_regular_noslip = np.zeros_like(zmum)
T_MassCorr_regular_adiab = np.zeros_like(zmum)
T_MassCorr_regular_noslip = np.zeros_like(zmum)
for u_MassCorr,u_MassCorr_regular,u_Ref_regular,T_MassCorr_regular,T_Ref_regular,BCstring in \
zip([u_MassCorr_adiab,u_MassCorr_noslip],\
[u_MassCorr_regular_adiab,u_MassCorr_regular_noslip],\
[u_Ref_regular_adiab,u_Ref_regular_noslip],\
[T_MassCorr_regular_adiab,T_MassCorr_regular_noslip],\
[T_Ref_regular_adiab,T_Ref_regular_noslip],\
['Adiabatic','Noslip']):
# ---- Back out temperature correction here -----
T_MassCorr = np.zeros_like(u_MassCorr)
for k in range(1,kmax-2):
for j in range(2,jmax1,2): # This is temperature not potential temperature!!! Need to check.
# print 'alat['+str(j)+']=',alat[j]
# T_MassCorr[j,k] = T_MassCorr[j-2,k] - (2.*om*gl[j])*aa*hh*dmdz / (r0 * cosl[j]) * (u_MassCorr[j,k+1]-u_MassCorr[j,k-1])
T_MassCorr[j,k] = T_MassCorr[j-2,k] - (2.*om*gl[j-1])*aa*hh*dmdz / (r0 * cosl[j-1]) * (u_MassCorr[j-1,k+1]-u_MassCorr[j-1,k-1])
# ---- First do interpolation (gl is regular grid) ----
# f_Todd = interpolate.interp1d(gl[:-1:2],T_MassCorr[1:-1:2,k]) #[jmax x kmax]
#f_Todd = interpolate.interp1d(gl_2[::2],T_MassCorr[::2,k]) #[jmax x kmax]
#f_Todd_ex = extrap1d(f_Todd)
f_Todd = interpolate.interp1d(gl_2[::2],T_MassCorr[::2,k],
kind='linear',fill_value='extrapolate')
T_MassCorr[:,k] = f_Todd(gl_2[:])
# T_MassCorr[:,k] = f_Todd_ex(gl_2[:]) # Get all the points interpolated
# ---- Then do domain average ----
T_MC_mean = np.mean(T_MassCorr[:,k])
T_MassCorr[:,k] -= T_MC_mean
# --- First, interpolate MassCorr back to regular grid first ---
f_u_MassCorr = interpolate.interp1d(alat_2,u_MassCorr,axis=0, kind='linear') #[jmax x kmax]
u_MassCorr_regular[:,-nlat//2:] = f_u_MassCorr(ylat[-nlat//2:]).T
f_T_MassCorr = interpolate.interp1d(alat_2,T_MassCorr,axis=0, kind='linear') #[jmax x kmax]
T_MassCorr_regular[:,-nlat//2:] = f_T_MassCorr(ylat[-nlat//2:]).T
u_Ref = zmum[:,-nlat//2:] - u_MassCorr_regular[:,-nlat//2:]
T_ref = zm_PT[:,-nlat//2:] * np.exp(-np.arange(kmax)/7. * rkappa)[:,np.newaxis] - T_MassCorr_regular[:,-nlat//2:]
u_Ref_regular[:,-nlat//2:] = u_Ref
T_Ref_regular[:,-nlat//2:] = T_ref
#
#plot_all_ref_quan = False
if plot_all_ref_quan:
# --- height coordinate ---
height = np.array([i for i in range(kmax)]) # in [km]
# --- Colorbar scale ---
contour_int = np.arange(-120,145,5)
dT_contour_int = np.arange(-120,81,5)
T_contour_int = np.arange(160,321,5)
# --- Start plotting figure ---
fig = plt.subplots(figsize=(12,12))
plt.subplot(221)
plt.contourf(ylat[-nlat//2:],height[:-2],u_MassCorr_regular[:-2,-nlat//2:],contour_int)
plt.colorbar()
c1=plt.contour(ylat[-nlat//2:],height[:-2],u_MassCorr_regular[:-2,-nlat//2:],contour_int[::2],colors='k')
plt.clabel(c1,c1.levels,inline=True, fmt='%d', fontsize=10)
plt.title('$\Delta$ u '+tstamp)
plt.ylabel('height (km)')
plt.subplot(222)
plt.contourf(ylat[-nlat//2:],height[:-2],u_Ref[:-2,:],contour_int)
plt.colorbar()
c2=plt.contour(ylat[-nlat//2:],height[:-2],u_Ref[:-2,:],contour_int[::2],colors='k')
plt.clabel(c2,c2.levels,inline=True, fmt='%d', fontsize=10)
plt.title('$u_{REF}$ ('+BCstring+' BC)')
plt.subplot(223)
plt.contourf(ylat[-nlat//2:],height[:-2],T_MassCorr_regular[:-2,-nlat//2:],dT_contour_int)
plt.colorbar()
c3=plt.contour(ylat[-nlat//2:],height[:-2],T_MassCorr_regular[:-2,-nlat//2:],dT_contour_int,colors='k')
plt.clabel(c3,c3.levels,inline=True, fmt='%d', fontsize=10)
plt.title('$\Delta$ T')
plt.ylabel('height (km)')
plt.subplot(224)
plt.contourf(ylat[-nlat//2:],height[:-2],T_ref[:-2,:],T_contour_int)
plt.colorbar()
c4=plt.contour(ylat[-nlat//2:],height[:-2],T_ref[:-2,:],T_contour_int[::2],colors='k')
plt.clabel(c4,c4.levels,inline=True, fmt='%d', fontsize=10)
plt.title('$T_{REF}$')
plt.ylabel('height (km)')
plt.tight_layout()
plt.show()
#plt.savefig('/home/csyhuang/Dropbox/Research-code/Sep12_test3_'+BCstring+'_'+tstamp+'.png')
plt.close()
# This is for only outputing Delta_u and Uref for no-slip and adiabatic boundary conditions.
return u_MassCorr_regular_noslip,u_Ref_regular_noslip,T_MassCorr_regular_noslip,T_Ref_regular_noslip, u_MassCorr_regular_adiab,u_Ref_regular_adiab,T_MassCorr_regular_adiab,T_Ref_regular_adiab
# --- As a test whether the function Solve_Uref is working ---
if __name__ == "__main__":
import matplotlib.pyplot as plt
import numpy as np
nlat = 121
kmax = 49
jmax1 = nlat
# The codes below is just for testing purpose
tstamp = 'random'
ylat = np.linspace(-90,90,121,endpoint=True)
t1 = np.random.rand(nlat,kmax)+np.ones((nlat,kmax))*0.001
t2 = np.random.rand(nlat,kmax)+np.ones((nlat,kmax))*0.001
t3 = np.random.rand(nlat,kmax)+np.ones((nlat,kmax))*0.001
Delta_PT = np.random.rand(nlat)+np.ones((nlat))*0.001
zm_PT = np.random.rand(nlat,kmax)+np.ones((nlat,kmax))*0.001
Input_B0 = np.random.rand(nlat)+np.ones((nlat))*0.001
Input_B1 = np.random.rand(nlat)+np.ones((nlat))*0.001
eh = np.random.rand(jmax1, kmax)+np.ones((jmax1, kmax))*0.001
Delta_PT = np.sort(np.random.rand(jmax1))
xxx = solve_uref_both_bc(tstamp,t1,t2,ylat,t3,Delta_PT,zm_PT,Input_B0,Input_B1,use_real_Data=True)
print(xxx)
|
mit
| -4,612,132,919,616,612,000 | 43.200864 | 195 | 0.574151 | false |
jimklo/LRSignature
|
src/LRSignature/sign/Sign.py
|
1
|
6882
|
'''
Copyright 2011 SRI International
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Created on Apr 27, 2011
@author: jklo
'''
import re
import hashlib
import gnupg
import types
import os, copy
from LRSignature.errors import UnknownKeyException
from LRSignature.bencode import bencode
def _cmp_version(version1, version2):
def normalize(v):
return [int(x) for x in re.sub(r'(\.0+)*$','', v).split(".")]
return cmp(normalize(version1), normalize(version2))
class Sign_0_21(object):
'''
Class for signing LR envelopes following version 0.21.0 of the LR Specification:
see: https://docs.google.com/document/d/191BTary350To_4JokBUFZLFRMOEfGYrl_EHE6QZxUr8/edit
'''
def __init__(self, privateKeyID=None, passphrase=None, gnupgHome=os.path.expanduser(os.path.join("~", ".gnupg")), gpgbin="/usr/local/bin/gpg", publicKeyLocations=[], sign_everything=True):
'''
Constructor
'''
self.signatureMethod = "LR-PGP.1.0"
self.privateKeyID = privateKeyID
self.passphrase = passphrase
self.gnupgHome = gnupgHome
self.gpgbin = gpgbin
self.publicKeyLocations = publicKeyLocations
self.min_doc_version = "0.21.0"
self.sign_everything = sign_everything
self.gpg = gnupg.GPG(gnupghome=self.gnupgHome, gpgbinary=self.gpgbin)
secretKeys = self.gpg.list_keys(secret=True)
if privateKeyID != None:
privateKeyAvailable = False
for skey in secretKeys:
if skey["keyid"] == self.privateKeyID:
privateKeyAvailable = True
self.privateKeyInfo = skey
break
if skey["fingerprint"] == self.privateKeyID:
privateKeyAvailable = True
self.privateKeyInfo = skey
if privateKeyAvailable == False:
raise UnknownKeyException(self.privateKeyID)
def _version_check(self, doc):
return _cmp_version(doc["doc_version"], self.min_doc_version) >= 0
def _bnormal(self, obj = {}):
if isinstance(obj, types.NoneType):
return "null"
# Boolean needs to be checked before numeric types as Booleans are also IntType
elif isinstance(obj, types.BooleanType):
return str(obj).lower()
elif isinstance(obj, (types.FloatType, types.IntType, types.LongType, types.ComplexType)):
print "Dropping number: {0}\n".format(obj)
raise TypeError("Numbers not permitted")
# return str(obj)
elif isinstance(obj, types.StringType):
return obj
elif isinstance(obj, types.UnicodeType):
return obj
elif isinstance(obj, types.ListType):
nobj = []
for child in obj:
try:
nobj.append(self._bnormal(child))
except TypeError:
pass
return nobj
elif isinstance(obj, types.DictType):
for key in obj.keys():
try:
obj[key] = self._bnormal(obj[key])
except TypeError:
pass
return obj
else:
return obj
def _stripEnvelope(self, envelope={}):
fields = ["digital_signature", "publishing_node", "update_timestamp", "node_timestamp", "create_timestamp", "doc_ID", "_id", "_rev"]
sigObj = copy.deepcopy(envelope)
for field in fields:
if sigObj.has_key(field):
del sigObj[field]
return sigObj
def _buildCanonicalString(self, envelope = {}):
encoded = bencode(envelope)
return encoded
def _hash(self, msg):
hashedDigest = hashlib.sha256(msg.encode("utf-8")).hexdigest()
return hashedDigest
def get_message(self, envelope):
'''
Hashes the contents of an LR envelope in the following manner:
1. remove all fields from envelope except for the following:
"doc_type", "doc_version", "resource_data_type", "active", "submitter_type", "submitter",
"submitter_timestamp", "submitter_TTL", "submission_TOS", "submission_attribution",
"resource_locator", "keys", "resource_TTL", "payload_placement"
2. Bencode the remaining envelope [http://en.wikipedia.org/wiki/Bencode]
3. Hash the Bencoded string using a SHA256
4. Convert SHA256 to hexadecimal digest.
Returns digest as string.
'''
stripped = self._stripEnvelope(envelope)
normalized = self._bnormal(stripped)
canonical = self._buildCanonicalString(normalized)
hashedDigest = self._hash(canonical)
return hashedDigest
def _get_privatekey_owner(self):
if self.privateKeyInfo.has_key("uids") and isinstance(self.privateKeyInfo["uids"], types.ListType):
return ", ".join(self.privateKeyInfo["uids"])
return None
def _get_sig_block(self, sigdata):
signature = {
"signature": sigdata,
"key_location": self.publicKeyLocations,
"signing_method": self.signatureMethod
}
pkowner = self._get_privatekey_owner()
if pkowner != None:
signature["key_owner"] = pkowner
return signature
def sign(self, envelope):
'''
Hashes and Signs a LR envelope according to the version 2.0 LR Specification
'''
if self._version_check(envelope) or self.sign_everything:
msg = self.get_message(envelope)
signPrefs = {
"keyid": self.privateKeyID,
"passphrase": self.passphrase,
"clearsign": True
}
result = self.gpg.sign(msg, **signPrefs)
envelope["digital_signature"] = self._get_sig_block(result.data)
return envelope
if __name__ == "__main__":
sign = Sign_0_21("C37C805D164B052C", passphrase="2dimples")
|
apache-2.0
| -3,173,528,914,175,409,700 | 35.226316 | 192 | 0.574397 | false |
PeridexisErrant/python-lnp
|
core/keybinds.py
|
1
|
2131
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Keybinding management."""
from __future__ import print_function, unicode_literals, absolute_import
import os, shutil
from . import helpers, paths, log
def read_keybinds():
"""Returns a list of keybinding files."""
return tuple([
os.path.basename(o) for o in helpers.get_text_files(
paths.get('keybinds'))])
def load_keybinds(filename):
"""
Overwrites Dwarf Fortress keybindings from a file.
Params:
filename
The keybindings file to use.
"""
if not filename.endswith('.txt'):
filename = filename + '.txt'
target = paths.get('init', 'interface.txt')
log.i('Loading ' + filename + 'keybinds')
shutil.copyfile(paths.get('keybinds', filename), target)
def keybind_exists(filename):
"""
Returns whether or not a keybindings file already exists.
Params:
filename
The filename to check.
"""
if not filename.endswith('.txt'):
filename = filename + '.txt'
return os.access(paths.get('keybinds', filename), os.F_OK)
def save_keybinds(filename):
"""
Save current keybindings to a file.
Params:
filename
The name of the new keybindings file.
"""
if not filename.endswith('.txt'):
filename = filename + '.txt'
filename = paths.get('keybinds', filename)
log.i('Saving current keybinds as ' + filename)
shutil.copyfile(paths.get('init', 'interface.txt'), filename)
def delete_keybinds(filename):
"""
Deletes a keybindings file.
Params:
filename
The filename to delete.
"""
if not filename.endswith('.txt'):
filename = filename + '.txt'
log.i('Deleting ' + filename + 'keybinds')
os.remove(os.path.join(paths.get('keybinds'), filename))
def get_installed_file():
"""Returns the name of the currently installed keybindings."""
files = helpers.get_text_files(paths.get('keybinds'))
current = paths.get('init', 'interface.txt')
result = helpers.detect_installed_file(current, files)
return os.path.basename(result)
|
isc
| 7,327,233,250,507,444,000 | 28.191781 | 72 | 0.63069 | false |
spetitjean/XMG-2
|
contributions/core/pylibs/compgen/brick_parser.py
|
1
|
3655
|
import xmg.compgen.Symbol
import xmg.compgen.Grammar
# Punctuation
semicolon=xmg.compgen.Symbol.T(";")
colon=xmg.compgen.Symbol.T(":")
pipe=xmg.compgen.Symbol.T("|")
quote=xmg.compgen.Symbol.T("'")
arrow=xmg.compgen.Symbol.T("->")
equal=xmg.compgen.Symbol.T("=")
coma=xmg.compgen.Symbol.T(',')
openpred=xmg.compgen.Symbol.T("(")
closepred=xmg.compgen.Symbol.T(")")
star=xmg.compgen.Symbol.T("*")
plus=xmg.compgen.Symbol.T("+")
question=xmg.compgen.Symbol.T("?")
sepMacro=xmg.compgen.Symbol.T("//")
endsection=xmg.compgen.Symbol.T("%%")
# Terminals
action=xmg.compgen.Symbol.T("action")
_id=xmg.compgen.Symbol.T("identifier")
sqstring=xmg.compgen.Symbol.T("sqstring")
# Non Terminals
Macro=xmg.compgen.Symbol.NT("Macro")
ID_MACRO=xmg.compgen.Symbol.NT("ID_MACRO")
MacroOp=xmg.compgen.Symbol.NT("MacroOp")
MacroOpP=xmg.compgen.Symbol.NT("MacroOpP")
MacroOpS=xmg.compgen.Symbol.NT("MacroOpS")
MacroOpQ=xmg.compgen.Symbol.NT("MacroOpQ")
IDS=xmg.compgen.Symbol.NT("IDS")
ID=xmg.compgen.Symbol.NT("ID")
RID=xmg.compgen.Rule.Rule(IDS,())
RID2=xmg.compgen.Rule.Rule(IDS,(ID_MACRO,IDS))
RID3=xmg.compgen.Rule.Rule(ID,(_id,))
RID4=xmg.compgen.Rule.Rule(ID,(sqstring,))
RID5=xmg.compgen.Rule.Rule(ID_MACRO,(Macro,))
RID6=xmg.compgen.Rule.Rule(ID_MACRO,(ID,))
RID7=xmg.compgen.Rule.Rule(Macro,(openpred,ID,closepred,MacroOp))
RID71=xmg.compgen.Rule.Rule(Macro,(openpred,ID,sepMacro,ID,closepred,MacroOp))
RID8=xmg.compgen.Rule.Rule(MacroOp,(MacroOpP,))
RID9=xmg.compgen.Rule.Rule(MacroOp,(MacroOpS,))
RID10=xmg.compgen.Rule.Rule(MacroOp,(MacroOpQ,))
RID11=xmg.compgen.Rule.Rule(MacroOpP,(plus,))
RID12=xmg.compgen.Rule.Rule(MacroOpS,(star,))
RID13=xmg.compgen.Rule.Rule(MacroOpQ,(question,))
# Parsing Terminals
TD=xmg.compgen.Symbol.NT("TD")
T=xmg.compgen.Symbol.T("%token")
RT=xmg.compgen.Rule.Rule(TD,(T,_id))
# Parsing Non-terminals
NTD=xmg.compgen.Symbol.NT("NTD")
NT=xmg.compgen.Symbol.T("%type")
RNT=xmg.compgen.Rule.Rule(NTD,(NT,_id,_id))
# Parsing Externs
EXTD=xmg.compgen.Symbol.NT("EXTD")
EXT=xmg.compgen.Symbol.T("%ext")
REXT=xmg.compgen.Rule.Rule(EXTD,(EXT,_id,_id))
# Parsing Rules
RuD=xmg.compgen.Symbol.NT("RuD")
RuleParts=xmg.compgen.Symbol.NT("RuleParts")
RulePart=xmg.compgen.Symbol.NT("RulePart")
RRu=xmg.compgen.Rule.Rule(RuD,(_id,colon,RuleParts))
RRu1=xmg.compgen.Rule.Rule(RuleParts,(RulePart,semicolon))
RRu2=xmg.compgen.Rule.Rule(RuleParts,(RulePart,pipe,RuleParts))
RRu3=xmg.compgen.Rule.Rule(RulePart,(IDS,))
RRu4=xmg.compgen.Rule.Rule(RulePart,(IDS,action))
# Grammar file
S=xmg.compgen.Symbol.NT("S")
Decls=xmg.compgen.Symbol.NT("Decls")
Decl=xmg.compgen.Symbol.NT("Decl")
Rules=xmg.compgen.Symbol.NT("Rules")
RDecls=xmg.compgen.Rule.Rule(Decls,())
RDecls1=xmg.compgen.Rule.Rule(Decls,(Decl,Decls))
RDecl=xmg.compgen.Rule.Rule(Decl,(TD,))
RDecl1=xmg.compgen.Rule.Rule(Decl,(NTD,))
RDecl2=xmg.compgen.Rule.Rule(Decl,(EXTD,))
RRules=xmg.compgen.Rule.Rule(Rules,(RuD,))
RRules1=xmg.compgen.Rule.Rule(Rules,(RuD,Rules,))
R=xmg.compgen.Rule.Rule(S,(Decls,endsection,Rules,endsection))
# S ::= Decls endsection Rules endSection
# Decls ::= Decl
# | Decl Decls
# Rules ::= RuD
# | RuD Rules
# RuD ::= Id : RuleParts
# RuleParts ::= RulePart ;
# | RulePart|RuleParts
# RulePart ::= Ids | Ids action
# Ids ::= Id_or_Macro | Id_or_Macro Ids
# Id_or_Macro ::= ( Ids ) MacOp | Id
# MacOp ::= MacOpP | MacOpS
# MacOpP ::= +
# MacOpS ::= *
# Decl ::= TD | NTD
# TD ::= %token Id
# NTD ::= %type Id Id
G=xmg.compgen.Grammar.Grammar((R,RID,RID2,RID3,RID4,RID5,RID6,RID7,RID71,RID8,RID9,RID10,RID11,RID12,RID13,RT,RNT,REXT,RRu,RRu1,RRu2,RRu3,RRu4,RDecls,RDecls1,RDecl,RDecl1,RDecl2,RRules,RRules1))
|
gpl-3.0
| -1,720,021,076,280,352,800 | 28.715447 | 194 | 0.720657 | false |
isbadawi/maze-generation
|
io/canvas.py
|
1
|
1094
|
import Tkinter as tk
import sys
class Canvas(object):
def __init__(self, width, height, size):
self.width = width
self.height = height
self.size = size
self.root = tk.Tk()
self.root.title('Maze Generation Visualizer')
self.canvas = tk.Canvas(
self.root,
width=width*size,
height=height*size
)
self.canvas.grid(row=0, column=0)
def do(self, func):
self.rect((0, 0), (self.width, self.height))
self.root.after(50, func)
self.root.mainloop()
def line(self, (x1, y1), (x2, y2), color='white'):
x1 *= self.size
y1 *= self.size
x2 *= self.size
y2 *= self.size
rect = self.canvas.create_line((x1, y1, x2, y2), fill=color)
self.canvas.update_idletasks()
def rect(self, (x1, y1), (x2, y2), color='white'):
x1 *= self.size
y1 *= self.size
x2 *= self.size
y2 *= self.size
self.canvas.create_rectangle((x1, y1, x2, y2), fill=color)
self.canvas.update_idletasks()
|
mit
| -2,285,130,573,094,090,200 | 27.789474 | 68 | 0.538391 | false |
park-bench/gpgmailer
|
src/usr/share/gpgmailer/gpgmailerd.py
|
1
|
25135
|
#!/usr/bin/python3
# Copyright 2015-2021 Joel Allen Luellwitz and Emily Frost
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Daemon for sending PGP encrypted e-mail."""
# TODO: Eventually consider running in a chroot or jail. (gpgmailer issue 17)
__author__ = 'Joel Luellwitz, Emily Frost, and Brittney Scaccia'
__version__ = '0.8'
import datetime
import grp
import logging
import os
import pwd
import signal
import stat
import subprocess
import sys
import time
import traceback
import configparser
import daemon
from lockfile import pidlockfile
import psutil
import gnupg
from parkbenchcommon import confighelper
import gpgkeyring
import gpgkeyverifier
import gpgmailer
import gpgmailmessage
# Constants
PROGRAM_NAME = 'gpgmailer'
CONFIGURATION_PATHNAME = os.path.join('/etc', PROGRAM_NAME, '%s.conf' % PROGRAM_NAME)
SYSTEM_PID_DIR = '/run'
PROGRAM_PID_DIRS = PROGRAM_NAME
PID_FILE = '%s.pid' % PROGRAM_NAME
LOG_DIR = os.path.join('/var/log', PROGRAM_NAME)
LOG_FILE = '%s.log' % PROGRAM_NAME
SYSTEM_SPOOL_DIR = '/var/spool'
PARTIAL_DIR = 'partial'
OUTBOX_DIR = 'outbox'
OUTBOX_PATHNAME = os.path.join(SYSTEM_SPOOL_DIR, PROGRAM_NAME, OUTBOX_DIR)
PROCESS_USERNAME = PROGRAM_NAME
PROCESS_GROUP_NAME = PROGRAM_NAME
PROGRAM_UMASK = 0o027 # -rw-r----- and drwxr-x---
class InitializationException(Exception):
"""Indicates an expected fatal error occurred during program initialization.
Initialization is implied to mean, before daemonization.
"""
def get_user_and_group_ids():
"""Get user and group information for dropping privileges.
Returns the user and group IDs that the program should eventually run as.
"""
try:
program_user = pwd.getpwnam(PROCESS_USERNAME)
except KeyError as key_error:
message = 'User %s does not exist.' % PROCESS_USERNAME
raise InitializationException(message) from key_error
try:
program_group = grp.getgrnam(PROCESS_GROUP_NAME)
except KeyError as key_error:
message = 'Group %s does not exist.' % PROCESS_GROUP_NAME
raise InitializationException(message) from key_error
return program_user.pw_uid, program_group.gr_gid
def read_configuration_and_create_logger(program_uid, program_gid):
"""Reads the configuration file and creates the application logger. This is done in the
same function because part of the logger creation is dependent upon reading the
configuration file.
program_uid: The system user ID this program should drop to before daemonization.
program_gid: The system group ID this program should drop to before daemonization.
Returns the read system config, a confighelper instance, and a logger instance.
"""
print('Reading %s...' % CONFIGURATION_PATHNAME)
if not os.path.isfile(CONFIGURATION_PATHNAME):
raise InitializationException(
'Configuration file %s does not exist. Quitting.' % CONFIGURATION_PATHNAME)
config_file = configparser.RawConfigParser()
config_file.read(CONFIGURATION_PATHNAME)
config = {}
config_helper = confighelper.ConfigHelper()
# Figure out the logging options so that can start before anything else.
# TODO: Eventually add a verify_string_list method. (issue 20)
config['log_level'] = config_helper.verify_string_exists(config_file, 'log_level')
# Create logging directory. drwxr-x--- gpgmailer gpgmailer
log_mode = stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IXGRP
# TODO: Look into defaulting the logging to the console until the program gets more
# bootstrapped. (issue 18)
print('Creating logging directory %s.' % LOG_DIR)
if not os.path.isdir(LOG_DIR):
# Will throw exception if directory cannot be created.
os.makedirs(LOG_DIR, log_mode)
os.chown(LOG_DIR, program_uid, program_gid)
os.chmod(LOG_DIR, log_mode)
# Temporarily drop permissions and create the handle to the logger.
print('Configuring logger.')
os.setegid(program_gid)
os.seteuid(program_uid)
config_helper.configure_logger(os.path.join(LOG_DIR, LOG_FILE), config['log_level'])
logger = logging.getLogger(__name__)
logger.info('Verifying non-logging configuration.')
config['use_ramdisk_spool'] = config_helper.verify_boolean_exists(
config_file, 'use_ramdisk_spool')
# Reads the key configuration.
config['gpg_dir'] = config_helper.verify_string_exists(config_file, 'gpg_dir')
config['sender_string'] = config_helper.verify_string_exists(config_file, 'sender')
config['sender'] = {}
config['sender']['password'] = config_helper.verify_password_exists(
config_file, 'signing_key_passphrase')
config['recipients_string'] = config_helper.verify_string_exists(
config_file, 'recipients')
# Convert the key expiration threshold into seconds because expiry dates are
# stored in unix time. The config value should be days.
expiration_warning_threshold_days = config_helper.verify_integer_within_range(
config_file, 'expiration_warning_threshold', lower_bound=1)
config['expiration_warning_threshold'] = expiration_warning_threshold_days * 86400
config['main_loop_delay'] = config_helper.verify_number_within_range(
config_file, 'main_loop_delay', lower_bound=0.000001) # In seconds.
config['main_loop_duration'] = config_helper.verify_number_within_range(
config_file, 'main_loop_duration', lower_bound=0.000001) # In seconds.
config['key_check_interval'] = config_helper.verify_number_within_range(
config_file, 'key_check_interval', lower_bound=0.000001) # In seconds.
config['default_subject'] = config_helper.get_string_if_exists(
config_file, 'default_subject')
# TODO: Eventually add verify_boolean_exists. (issue 19)
config['allow_expired_signing_key'] = (config_helper.verify_string_exists(
config_file, 'allow_expired_signing_key').lower() == 'true')
return config, config_helper, logger
def raise_exception(exception):
"""Raises an exception.
exception: Any exception.
"""
# TODO: Add custom error message and chain this exception when we move to Python 3.
# (issue 15)
raise exception
# TODO: Consider checking ACLs. (issue 22)
def verify_safe_file_permissions(config, program_uid):
"""Crashes the application if unsafe file and directory permissions exist on application
configuration files.
config: The program config dictionary to read the application GPG keyring location from.
program_uid: The system user ID that should own the GPG keyring.
"""
# The configuration file should be owned by root.
config_file_stat = os.stat(CONFIGURATION_PATHNAME)
if config_file_stat.st_uid != 0:
raise InitializationException(
'File %s must be owned by root.' % CONFIGURATION_PATHNAME)
if bool(config_file_stat.st_mode & stat.S_IWGRP):
raise InitializationException(
"File %s cannot be writable via the group access permission."
% CONFIGURATION_PATHNAME)
if bool(config_file_stat.st_mode & (stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)):
raise InitializationException(
"File %s cannot have 'other user' access permissions set."
% CONFIGURATION_PATHNAME)
if not os.path.isdir(config['gpg_dir']):
raise InitializationException('GPG keyring %s does not exist.' % config['gpg_dir'])
logger.debug('Recursively checking %s for correct permissions.', config['gpg_dir'])
for directory, subdirectories, files in os.walk(
config['gpg_dir'], onerror=raise_exception, followlinks=True):
for index, filename in enumerate(files):
files[index] = os.path.join(directory, filename)
for inode in [directory] + files:
gpg_dir_stat = os.stat(inode)
if gpg_dir_stat.st_uid != program_uid:
raise InitializationException(
'Directory %s and all its contents must be owned by %s.' % (
config['gpg_dir'], PROGRAM_NAME))
if bool(os.stat(config['gpg_dir']).st_mode & (
stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)):
raise InitializationException(
"Directory %s cannot have 'other user' access permissions set." %
config['gpg_dir'])
def parse_key_config_string(configuration_option, key_config_string):
"""Parses the e-mail:fingerprint format used in the application config file to specify
e-mail/GPG key pairs.
configuration_option: The name of the configuration option being parsed.
key_config_string: The formatted string to parse.
"""
key_split = key_config_string.split(':')
if len(key_split) != 2:
raise InitializationException(
'Key config %s for %s does not contain a colon or is malformed.' %
(key_config_string, configuration_option))
if not key_split[0]:
raise InitializationException(
'Key config %s for %s is missing an e-mail address.' %
(key_config_string, configuration_option))
if not key_split[1]:
raise InitializationException(
'Key config %s for %s is missing a key fingerprint.' %
(key_config_string, configuration_option))
# TODO: Eventually verify e-mail format. (issue 34)
key_dict = {'email': key_split[0].strip(),
'fingerprint': key_split[1].strip()}
return key_dict
def parse_key_config(config):
"""Does further processing on the config dictionary to parse and store GPG key
information.
config: The config dictionary to process.
"""
sender_key_data = parse_key_config_string('sender', config['sender_string'])
config['sender']['fingerprint'] = sender_key_data['fingerprint']
config['sender']['email'] = sender_key_data['email']
recipients_config_list = config['recipients_string'].split(',')
recipients = []
for recipient_config in recipients_config_list:
recipients.append(parse_key_config_string('recipients', recipient_config))
config['recipients'] = recipients
def signature_test(gpg_home, fingerprint, passphrase):
"""Tests if it is possible for a GPG key to sign an arbitrary string.
gpg_home: The GnuPG directory to read keys from.
fingerprint: The fingerprint of the key used to sign.
passphrase: The passphrase for the signing key.
Returns True if there are no signing errors. False otherwise.
"""
# Clear the GPG agent cache so we can be sure that the supplied passphrase is the correct
# passphrase.
clear_gpg_agent_cache()
# TODO: Eventually, parse gpg output to notify that the password was wrong. (issue 47)
success = False
gpg = gnupg.GPG(gnupghome=gpg_home)
signature_test_result = gpg.sign(
"I've got a lovely bunch of coconuts.", detach=True, keyid=fingerprint,
passphrase=passphrase)
if str(signature_test_result).strip() == '':
logger.debug("Signature test for %s failed. Check the sender key's passphrase.",
fingerprint)
else:
logger.info('Signature test for %s passed.', fingerprint)
success = True
return success
def clear_gpg_agent_cache():
""" Clears the gpg-agent cache. """
for process in psutil.process_iter(['create_time', 'name', 'pid', 'username']):
if process.name() == 'gpg-agent' and process.username() == PROCESS_USERNAME:
process.send_signal(signal.SIGHUP)
def check_sender_key(gpg_keyring, config, expiration_date):
"""Checks the sender GPG key in the config file and exits if it is missing from the key
ring, untrusted, unsigned, or is not a 40-character hex string. Also checks and stores
whether the sender key can be used to sign messages.
gpg_keyring: The GpgKeyring object in which to look for GPG keys.
config: The config dict to read the sender GPG key information from.
expiration_date: The date the singing key is validated to not expire through.
"""
logger.info('Checking sender key for validity and expiration.')
if not gpg_keyring.is_trusted(config['sender']['fingerprint']):
raise InitializationException('Signing key is not ultimately trusted. Exiting.')
elif not gpg_keyring.is_signed(config['sender']['fingerprint']):
raise InitializationException('Signing key is not signed. Exiting.')
elif not gpg_keyring.is_current(config['sender']['fingerprint'], expiration_date):
formatted_expiration_date = datetime.datetime.fromtimestamp(
gpg_keyring.get_key_expiration_date(
config['sender']['fingerprint'])).strftime('%Y-%m-%d %H:%M:%S')
logger.warning('Sender key expired on %s.', formatted_expiration_date)
config['sender']['can_sign'] = False
elif not signature_test(
config['gpg_dir'], config['sender']['fingerprint'],
config['sender']['password']):
raise InitializationException('Sender key failed the signature test and the key is '
"not expired. Check the sender key's passphrase.")
else:
logger.debug('Sender key passed signature test.')
config['sender']['can_sign'] = True
def verify_signing_config(config):
"""Checks the sending GPG key and the program configuration to determine if sending
unsigned e-mail is allowed. Crashes if the sending key cannot sign and sending unsigned
e-mail is disabled.
config: The program config dictionary to read the key configuration from.
"""
if not config['allow_expired_signing_key'] and not config['sender']['can_sign']:
raise InitializationException(
'The sender key with fingerprint %s can not sign and unsigned e-mail is not '
'allowed. Exiting.' % config['sender']['fingerprint'])
elif not config['sender']['can_sign']:
logger.warning('The sender key is unable to sign because it has probably expired. '
'Gpgmailer will send unsigned messages.')
else:
logger.debug('Outgoing e-mails will be signed.')
def create_directory(system_path, program_dirs, uid, gid, mode):
"""Creates directories if they do not exist and sets the specified ownership and
permissions.
system_path: The system path that the directories should be created under. These are
assumed to already exist. The ownership and permissions on these directories are not
modified.
program_dirs: A string representing additional directories that should be created under
the system path that should take on the following ownership and permissions.
uid: The system user ID that should own the directory.
gid: The system group ID that should be associated with the directory.
mode: The unix standard 'mode bits' that should be associated with the directory.
"""
logger.info('Creating directory %s.', os.path.join(system_path, program_dirs))
path = system_path
for directory in program_dirs.strip('/').split('/'):
path = os.path.join(path, directory)
if not os.path.isdir(path):
# Will throw exception if file cannot be created.
os.makedirs(path, mode)
os.chown(path, uid, gid)
os.chmod(path, mode)
def check_if_mounted_as_ramdisk(pathname):
"""Checks if a directory is mounted as a ramdisk.
pathname: The directory to check.
Returns true if the directory is mounted as a ramdisk. False otherwise.
"""
return 'none on {0} type tmpfs'.format(pathname) in str(subprocess.check_output('mount'))
def create_spool_directories(use_ramdisk, program_uid, program_gid):
"""Mounts the program spool directory as a ramdisk and creates the partial and outbox
subfolders. Exit if any part of this method fails.
use_ramdisk: A boolean indicating whether to mount the spool directory as a ramdisk.
program_uid: The system user ID that should own all the spool directories.
program_gid: The system group ID that should be assigned to all the spool directories.
"""
logger.info('Creating spool directories.')
try:
create_directory(
SYSTEM_SPOOL_DIR, PROGRAM_NAME, program_uid, program_gid,
# drwx--x--- gpgmailer gpgmailer
stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IXGRP)
except Exception as exception:
logger.critical('Could not create program spool directory. %s: %s',
type(exception).__name__, str(exception))
raise exception
spool_dir = os.path.join(SYSTEM_SPOOL_DIR, PROGRAM_NAME)
# TODO: Log a warning when use_ramdisk_spool is false and ramdisk exists. (issue 56)
if use_ramdisk:
# TODO: Use parkbenchcommon.ramdisk here. (issue 51)
mounted_as_ramdisk = check_if_mounted_as_ramdisk(spool_dir)
# If directory is not mounted as a ramdisk and there is something in the directory,
# log a warning.
if os.listdir(spool_dir) != [] and not mounted_as_ramdisk:
logger.warning('Program spool directory %s is configured to be a ramdisk, but '
'the directory is not empty and not already mounted as a '
'ramdisk.', spool_dir)
# If the program spool directory is not already mounted as a ramdisk, mount it as a
# ramdisk.
if not mounted_as_ramdisk:
logger.info('Attempting to mount the program spool directory as a ramdisk.')
subprocess.call(['mount', '-t', 'tmpfs', '-o', 'size=25%', 'none', spool_dir])
if not check_if_mounted_as_ramdisk(spool_dir):
raise InitializationException(
'Program spool directory could not be mounted as a ramdisk. Startup failed.')
try:
# TODO: File Permissions Alone Should Be Enough to Protect Files In
# 'partial' and 'outbox'. (issue 26)
create_directory(
spool_dir, PARTIAL_DIR, program_uid, program_gid,
stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IWGRP | stat.S_IXGRP |
stat.S_ISGID | stat.S_ISVTX) # drwx-ws--T gpgmailer gpgmailer
create_directory(
spool_dir, OUTBOX_DIR, program_uid, program_gid,
stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IWGRP | stat.S_IXGRP |
stat.S_ISGID | stat.S_ISVTX) # drwx-ws--T gpgmailer gpgmailer
except Exception as exception:
logger.critical('Could not create required spool sub-directories. %s: %s',
type(exception).__name__, str(exception))
raise exception
def drop_permissions_forever(uid, gid):
"""Drops escalated permissions forever to the specified user and group.
uid: The system user ID to drop to.
gid: The system group ID to drop to.
"""
logger.info('Dropping permissions for user %s.', PROCESS_USERNAME)
os.initgroups(PROCESS_USERNAME, gid)
os.setgid(gid)
os.setuid(uid)
def send_expiration_warning_message(gpg_keyring, config, expiration_date):
"""If needed, queues a warning message about keys that have expired or will be expiring
soon.
gpg_keyring: The GpgKeyring object in which to look for GPG keys.
config: The config dict to read sender and recipient GPG key information from.
expiration_date: The date the singing key was validated to not expire through.
Returns a GpgKeyVerifier object initalized with gpg_keyring and config. This is used
later.
"""
gpg_key_verifier = gpgkeyverifier.GpgKeyVerifier(gpg_keyring, config)
expiration_warning_message = gpg_key_verifier.get_expiration_warning_message(
expiration_date)
if expiration_warning_message is not None:
logger.warning('Sending expiration warning message email.')
# gpgmailer.py will prepend the actual warning message.
message = 'Gpgmailer has just restarted.'
mail_message = gpgmailmessage.GpgMailMessage()
mail_message.set_subject(config['default_subject'])
mail_message.set_body(message)
mail_message.queue_for_sending()
logger.debug('Finished initial key check.')
return gpg_key_verifier
def sig_term_handler(signal, stack_frame):
"""Signal handler for SIGTERM. Quits when SIGTERM is received.
signal: Object representing the signal thrown.
stack_frame: Represents the stack frame.
"""
logger.info('SIGTERM received. Quitting.')
sys.exit(0)
def setup_daemon_context(log_file_handle, program_uid, program_gid):
"""Creates the daemon context. Specifies daemon permissions, PID file information, and
the signal handler.
log_file_handle: The file handle to the log file.
program_uid: The system user ID that should own the daemon process.
program_gid: The system group ID that should be assigned to the daemon process.
Returns the daemon context.
"""
daemon_context = daemon.DaemonContext(
working_directory='/',
pidfile=pidlockfile.PIDLockFile(
os.path.join(SYSTEM_PID_DIR, PROGRAM_PID_DIRS, PID_FILE)),
umask=PROGRAM_UMASK,
)
daemon_context.signal_map = {
signal.SIGTERM: sig_term_handler,
}
daemon_context.files_preserve = [log_file_handle]
# Set the UID and GID to 'gpgmailer' user and group.
daemon_context.uid = program_uid
daemon_context.gid = program_gid
return daemon_context
def main():
"""The parent function for the entire program. It loads and verifies configuration,
daemonizes, and starts the main program loop.
"""
os.umask(PROGRAM_UMASK)
program_uid, program_gid = get_user_and_group_ids()
global logger
config, config_helper, logger = read_configuration_and_create_logger(
program_uid, program_gid)
try:
verify_safe_file_permissions(config, program_uid)
parse_key_config(config)
# Re-establish root permissions to create required directories.
os.seteuid(os.getuid())
os.setegid(os.getgid())
# Non-root users cannot create files in /run, so create a directory that can be
# written to. Full access to user only. drwx------ gpgmailer gpgmailer
create_directory(SYSTEM_PID_DIR, PROGRAM_PID_DIRS, program_uid, program_gid,
stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
# Do this relatively last because gpgmailmessage assumes the daemon has started if
# these directories exist.
create_spool_directories(config['use_ramdisk_spool'], program_uid, program_gid)
# Configuration has been read and directories setup. Now drop permissions forever.
drop_permissions_forever(program_uid, program_gid)
# Make sure the sender key isn't going to expire during the first loop iteration.
expiration_date = time.time() + config['main_loop_duration']
gpg_keyring = gpgkeyring.GpgKeyRing(config['gpg_dir'])
check_sender_key(gpg_keyring, config, expiration_date)
verify_signing_config(config)
# We do this here because we don't want to queue an e-mail if a configuration setting
# can cause the program to crash later. This is to avoid a lot of identical queued
# warning e-mails.
gpg_key_verifier = send_expiration_warning_message(
gpg_keyring, config, expiration_date)
logger.info('Verification complete.')
daemon_context = setup_daemon_context(
config_helper.get_log_file_handle(), program_uid, program_gid)
logger.debug('Initializing GpgMailer.')
gpg_mailer = gpgmailer.GpgMailer(
config, gpg_keyring, gpg_key_verifier, OUTBOX_PATHNAME)
logger.info('Daemonizing...')
with daemon_context:
gpg_mailer.start_monitoring()
except BaseException as exception:
if isinstance(exception, Exception):
logger.critical('Fatal %s: %s\n%s', type(exception).__name__, str(exception),
traceback.format_exc())
# Kill the gpg-agent owned by gpgmailer because otherwise systemd will think
# gpgmailer is still running because gpg-agent is keeping the CGroup alive.
for process in psutil.process_iter(['create_time', 'name', 'pid', 'username']):
if process.name() == 'gpg-agent' and process.username() == PROCESS_USERNAME:
process.kill()
raise exception
if __name__ == "__main__":
main()
|
gpl-3.0
| -2,618,577,787,207,613,400 | 40.003263 | 93 | 0.677541 | false |
sajuptpm/neutron-ipam
|
neutron/plugins/vmware/dbexts/networkgw_db.py
|
1
|
24050
|
# Copyright 2013 VMware, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sqlalchemy as sa
from sqlalchemy import orm
from sqlalchemy.orm import exc as sa_orm_exc
from webob import exc as web_exc
from neutron.api.v2 import attributes
from neutron.api.v2 import base
from neutron.common import exceptions
from neutron.db import model_base
from neutron.db import models_v2
from neutron.openstack.common import log as logging
from neutron.openstack.common import uuidutils
from neutron.plugins.vmware.extensions import networkgw
LOG = logging.getLogger(__name__)
DEVICE_OWNER_NET_GW_INTF = 'network:gateway-interface'
NETWORK_ID = 'network_id'
SEGMENTATION_TYPE = 'segmentation_type'
SEGMENTATION_ID = 'segmentation_id'
ALLOWED_CONNECTION_ATTRIBUTES = set((NETWORK_ID,
SEGMENTATION_TYPE,
SEGMENTATION_ID))
# Constants for gateway device operational status
STATUS_UNKNOWN = "UNKNOWN"
STATUS_ERROR = "ERROR"
STATUS_ACTIVE = "ACTIVE"
STATUS_DOWN = "DOWN"
class GatewayInUse(exceptions.InUse):
message = _("Network Gateway '%(gateway_id)s' still has active mappings "
"with one or more neutron networks.")
class GatewayNotFound(exceptions.NotFound):
message = _("Network Gateway %(gateway_id)s could not be found")
class GatewayDeviceInUse(exceptions.InUse):
message = _("Network Gateway Device '%(device_id)s' is still used by "
"one or more network gateways.")
class GatewayDeviceNotFound(exceptions.NotFound):
message = _("Network Gateway Device %(device_id)s could not be found.")
class NetworkGatewayPortInUse(exceptions.InUse):
message = _("Port '%(port_id)s' is owned by '%(device_owner)s' and "
"therefore cannot be deleted directly via the port API.")
class GatewayConnectionInUse(exceptions.InUse):
message = _("The specified mapping '%(mapping)s' is already in use on "
"network gateway '%(gateway_id)s'.")
class MultipleGatewayConnections(exceptions.NeutronException):
message = _("Multiple network connections found on '%(gateway_id)s' "
"with provided criteria.")
class GatewayConnectionNotFound(exceptions.NotFound):
message = _("The connection %(network_mapping_info)s was not found on the "
"network gateway '%(network_gateway_id)s'")
class NetworkGatewayUnchangeable(exceptions.InUse):
message = _("The network gateway %(gateway_id)s "
"cannot be updated or deleted")
# Add exceptions to HTTP Faults mappings
base.FAULT_MAP.update({GatewayInUse: web_exc.HTTPConflict,
NetworkGatewayPortInUse: web_exc.HTTPConflict,
GatewayConnectionInUse: web_exc.HTTPConflict,
GatewayConnectionNotFound: web_exc.HTTPNotFound,
MultipleGatewayConnections: web_exc.HTTPConflict})
class NetworkConnection(model_base.BASEV2, models_v2.HasTenant):
"""Defines a connection between a network gateway and a network."""
# We use port_id as the primary key as one can connect a gateway
# to a network in multiple ways (and we cannot use the same port form
# more than a single gateway)
network_gateway_id = sa.Column(sa.String(36),
sa.ForeignKey('networkgateways.id',
ondelete='CASCADE'))
network_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id', ondelete='CASCADE'))
segmentation_type = sa.Column(
sa.Enum('flat', 'vlan',
name='networkconnections_segmentation_type'))
segmentation_id = sa.Column(sa.Integer)
__table_args__ = (sa.UniqueConstraint(network_gateway_id,
segmentation_type,
segmentation_id),)
# Also, storing port id comes back useful when disconnecting a network
# from a gateway
port_id = sa.Column(sa.String(36),
sa.ForeignKey('ports.id', ondelete='CASCADE'),
primary_key=True)
class NetworkGatewayDeviceReference(model_base.BASEV2):
id = sa.Column(sa.String(36), primary_key=True)
network_gateway_id = sa.Column(sa.String(36),
sa.ForeignKey('networkgateways.id',
ondelete='CASCADE'),
primary_key=True)
interface_name = sa.Column(sa.String(64), primary_key=True)
class NetworkGatewayDevice(model_base.BASEV2, models_v2.HasId,
models_v2.HasTenant):
nsx_id = sa.Column(sa.String(36))
# Optional name for the gateway device
name = sa.Column(sa.String(255))
# Transport connector type. Not using enum as range of
# connector types might vary with backend version
connector_type = sa.Column(sa.String(10))
# Transport connector IP Address
connector_ip = sa.Column(sa.String(64))
# operational status
status = sa.Column(sa.String(16))
class NetworkGateway(model_base.BASEV2, models_v2.HasId,
models_v2.HasTenant):
"""Defines the data model for a network gateway."""
name = sa.Column(sa.String(255))
# Tenant id is nullable for this resource
tenant_id = sa.Column(sa.String(36))
default = sa.Column(sa.Boolean())
devices = orm.relationship(NetworkGatewayDeviceReference,
backref='networkgateways',
cascade='all,delete')
network_connections = orm.relationship(NetworkConnection, lazy='joined')
class NetworkGatewayMixin(networkgw.NetworkGatewayPluginBase):
gateway_resource = networkgw.GATEWAY_RESOURCE_NAME
device_resource = networkgw.DEVICE_RESOURCE_NAME
def _get_network_gateway(self, context, gw_id):
try:
gw = self._get_by_id(context, NetworkGateway, gw_id)
except sa_orm_exc.NoResultFound:
raise GatewayNotFound(gateway_id=gw_id)
return gw
def _make_gw_connection_dict(self, gw_conn):
return {'port_id': gw_conn['port_id'],
'segmentation_type': gw_conn['segmentation_type'],
'segmentation_id': gw_conn['segmentation_id']}
def _make_network_gateway_dict(self, network_gateway, fields=None):
device_list = []
for d in network_gateway['devices']:
device_list.append({'id': d['id'],
'interface_name': d['interface_name']})
res = {'id': network_gateway['id'],
'name': network_gateway['name'],
'default': network_gateway['default'],
'devices': device_list,
'tenant_id': network_gateway['tenant_id']}
# Query gateway connections only if needed
if (fields and 'ports' in fields) or not fields:
res['ports'] = [self._make_gw_connection_dict(conn)
for conn in network_gateway.network_connections]
return self._fields(res, fields)
def _set_mapping_info_defaults(self, mapping_info):
if not mapping_info.get('segmentation_type'):
mapping_info['segmentation_type'] = 'flat'
if not mapping_info.get('segmentation_id'):
mapping_info['segmentation_id'] = 0
def _validate_network_mapping_info(self, network_mapping_info):
self._set_mapping_info_defaults(network_mapping_info)
network_id = network_mapping_info.get(NETWORK_ID)
if not network_id:
raise exceptions.InvalidInput(
error_message=_("A network identifier must be specified "
"when connecting a network to a network "
"gateway. Unable to complete operation"))
connection_attrs = set(network_mapping_info.keys())
if not connection_attrs.issubset(ALLOWED_CONNECTION_ATTRIBUTES):
raise exceptions.InvalidInput(
error_message=(_("Invalid keys found among the ones provided "
"in request body: %(connection_attrs)s."),
connection_attrs))
seg_type = network_mapping_info.get(SEGMENTATION_TYPE)
seg_id = network_mapping_info.get(SEGMENTATION_ID)
if not seg_type and seg_id:
msg = _("In order to specify a segmentation id the "
"segmentation type must be specified as well")
raise exceptions.InvalidInput(error_message=msg)
elif seg_type and seg_type.lower() == 'flat' and seg_id:
msg = _("Cannot specify a segmentation id when "
"the segmentation type is flat")
raise exceptions.InvalidInput(error_message=msg)
return network_id
def _retrieve_gateway_connections(self, context, gateway_id,
mapping_info={}, only_one=False):
filters = {'network_gateway_id': [gateway_id]}
for k, v in mapping_info.iteritems():
if v and k != NETWORK_ID:
filters[k] = [v]
query = self._get_collection_query(context,
NetworkConnection,
filters)
return only_one and query.one() or query.all()
def _unset_default_network_gateways(self, context):
with context.session.begin(subtransactions=True):
context.session.query(NetworkGateway).update(
{NetworkGateway.default: False})
def _set_default_network_gateway(self, context, gw_id):
with context.session.begin(subtransactions=True):
gw = (context.session.query(NetworkGateway).
filter_by(id=gw_id).one())
gw['default'] = True
def prevent_network_gateway_port_deletion(self, context, port):
"""Pre-deletion check.
Ensures a port will not be deleted if is being used by a network
gateway. In that case an exception will be raised.
"""
if port['device_owner'] == DEVICE_OWNER_NET_GW_INTF:
raise NetworkGatewayPortInUse(port_id=port['id'],
device_owner=port['device_owner'])
def create_network_gateway(self, context, network_gateway):
gw_data = network_gateway[self.gateway_resource]
tenant_id = self._get_tenant_id_for_create(context, gw_data)
with context.session.begin(subtransactions=True):
gw_db = NetworkGateway(
id=gw_data.get('id', uuidutils.generate_uuid()),
tenant_id=tenant_id,
name=gw_data.get('name'))
# Device list is guaranteed to be a valid list
# TODO(salv-orlando): Enforce that gateway device identifiers
# in this list are among the tenant's NSX network gateway devices
# to avoid risk a tenant 'guessing' other tenant's network devices
gw_db.devices.extend([NetworkGatewayDeviceReference(**device)
for device in gw_data['devices']])
context.session.add(gw_db)
LOG.debug(_("Created network gateway with id:%s"), gw_db['id'])
return self._make_network_gateway_dict(gw_db)
def update_network_gateway(self, context, id, network_gateway):
gw_data = network_gateway[self.gateway_resource]
with context.session.begin(subtransactions=True):
gw_db = self._get_network_gateway(context, id)
if gw_db.default:
raise NetworkGatewayUnchangeable(gateway_id=id)
# Ensure there is something to update before doing it
if any([gw_db[k] != gw_data[k] for k in gw_data]):
gw_db.update(gw_data)
LOG.debug(_("Updated network gateway with id:%s"), id)
return self._make_network_gateway_dict(gw_db)
def get_network_gateway(self, context, id, fields=None):
gw_db = self._get_network_gateway(context, id)
return self._make_network_gateway_dict(gw_db, fields)
def delete_network_gateway(self, context, id):
with context.session.begin(subtransactions=True):
gw_db = self._get_network_gateway(context, id)
if gw_db.network_connections:
raise GatewayInUse(gateway_id=id)
if gw_db.default:
raise NetworkGatewayUnchangeable(gateway_id=id)
context.session.delete(gw_db)
LOG.debug(_("Network gateway '%s' was destroyed."), id)
def get_network_gateways(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
marker_obj = self._get_marker_obj(
context, 'network_gateway', limit, marker)
return self._get_collection(context, NetworkGateway,
self._make_network_gateway_dict,
filters=filters, fields=fields,
sorts=sorts, limit=limit,
marker_obj=marker_obj,
page_reverse=page_reverse)
def connect_network(self, context, network_gateway_id,
network_mapping_info):
network_id = self._validate_network_mapping_info(network_mapping_info)
LOG.debug(_("Connecting network '%(network_id)s' to gateway "
"'%(network_gateway_id)s'"),
{'network_id': network_id,
'network_gateway_id': network_gateway_id})
with context.session.begin(subtransactions=True):
gw_db = self._get_network_gateway(context, network_gateway_id)
tenant_id = self._get_tenant_id_for_create(context, gw_db)
# TODO(salvatore-orlando): Leverage unique constraint instead
# of performing another query!
if self._retrieve_gateway_connections(context,
network_gateway_id,
network_mapping_info):
raise GatewayConnectionInUse(mapping=network_mapping_info,
gateway_id=network_gateway_id)
# TODO(salvatore-orlando): Creating a port will give it an IP,
# but we actually do not need any. Instead of wasting an IP we
# should have a way to say a port shall not be associated with
# any subnet
try:
# We pass the segmentation type and id too - the plugin
# might find them useful as the network connection object
# does not exist yet.
# NOTE: they're not extended attributes, rather extra data
# passed in the port structure to the plugin
# TODO(salvatore-orlando): Verify optimal solution for
# ownership of the gateway port
port = self.create_port(context, {
'port':
{'tenant_id': tenant_id,
'network_id': network_id,
'mac_address': attributes.ATTR_NOT_SPECIFIED,
'admin_state_up': True,
'fixed_ips': [],
'device_id': network_gateway_id,
'device_owner': DEVICE_OWNER_NET_GW_INTF,
'name': '',
'gw:segmentation_type':
network_mapping_info.get('segmentation_type'),
'gw:segmentation_id':
network_mapping_info.get('segmentation_id')}})
except exceptions.NetworkNotFound:
err_msg = (_("Requested network '%(network_id)s' not found."
"Unable to create network connection on "
"gateway '%(network_gateway_id)s") %
{'network_id': network_id,
'network_gateway_id': network_gateway_id})
LOG.error(err_msg)
raise exceptions.InvalidInput(error_message=err_msg)
port_id = port['id']
LOG.debug(_("Gateway port for '%(network_gateway_id)s' "
"created on network '%(network_id)s':%(port_id)s"),
{'network_gateway_id': network_gateway_id,
'network_id': network_id,
'port_id': port_id})
# Create NetworkConnection record
network_mapping_info['port_id'] = port_id
network_mapping_info['tenant_id'] = tenant_id
gw_db.network_connections.append(
NetworkConnection(**network_mapping_info))
port_id = port['id']
# now deallocate and recycle ip from the port
for fixed_ip in port.get('fixed_ips', []):
self._delete_ip_allocation(context, network_id,
fixed_ip['subnet_id'],
fixed_ip['ip_address'])
LOG.debug(_("Ensured no Ip addresses are configured on port %s"),
port_id)
return {'connection_info':
{'network_gateway_id': network_gateway_id,
'network_id': network_id,
'port_id': port_id}}
def disconnect_network(self, context, network_gateway_id,
network_mapping_info):
network_id = self._validate_network_mapping_info(network_mapping_info)
LOG.debug(_("Disconnecting network '%(network_id)s' from gateway "
"'%(network_gateway_id)s'"),
{'network_id': network_id,
'network_gateway_id': network_gateway_id})
with context.session.begin(subtransactions=True):
# Uniquely identify connection, otherwise raise
try:
net_connection = self._retrieve_gateway_connections(
context, network_gateway_id,
network_mapping_info, only_one=True)
except sa_orm_exc.NoResultFound:
raise GatewayConnectionNotFound(
network_mapping_info=network_mapping_info,
network_gateway_id=network_gateway_id)
except sa_orm_exc.MultipleResultsFound:
raise MultipleGatewayConnections(
gateway_id=network_gateway_id)
# Remove gateway port from network
# FIXME(salvatore-orlando): Ensure state of port in NVP is
# consistent with outcome of transaction
self.delete_port(context, net_connection['port_id'],
nw_gw_port_check=False)
# Remove NetworkConnection record
context.session.delete(net_connection)
def _make_gateway_device_dict(self, gateway_device, fields=None,
include_nsx_id=False):
res = {'id': gateway_device['id'],
'name': gateway_device['name'],
'status': gateway_device['status'],
'connector_type': gateway_device['connector_type'],
'connector_ip': gateway_device['connector_ip'],
'tenant_id': gateway_device['tenant_id']}
if include_nsx_id:
# Return the NSX mapping as well. This attribute will not be
# returned in the API response anyway. Ensure it will not be
# filtered out in field selection.
if fields:
fields.append('nsx_id')
res['nsx_id'] = gateway_device['nsx_id']
return self._fields(res, fields)
def _get_gateway_device(self, context, device_id):
try:
return self._get_by_id(context, NetworkGatewayDevice, device_id)
except sa_orm_exc.NoResultFound:
raise GatewayDeviceNotFound(device_id=device_id)
def _is_device_in_use(self, context, device_id):
query = self._get_collection_query(
context, NetworkGatewayDeviceReference, {'id': [device_id]})
return query.first()
def get_gateway_device(self, context, device_id, fields=None,
include_nsx_id=False):
return self._make_gateway_device_dict(
self._get_gateway_device(context, device_id),
fields, include_nsx_id)
def get_gateway_devices(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False, include_nsx_id=False):
marker_obj = self._get_marker_obj(
context, 'gateway_device', limit, marker)
query = self._get_collection_query(context,
NetworkGatewayDevice,
filters=filters,
fields=fields,
sorts=sorts,
limit=limit,
marker_obj=marker_obj,
page_reverse=page_reverse)
return [self._make_gateway_device_dict(row, fields, include_nsx_id)
for row in query]
def create_gateway_device(self, context, gateway_device,
initial_status=STATUS_UNKNOWN):
device_data = gateway_device[self.device_resource]
tenant_id = self._get_tenant_id_for_create(context, device_data)
with context.session.begin(subtransactions=True):
device_db = NetworkGatewayDevice(
id=device_data.get('id', uuidutils.generate_uuid()),
tenant_id=tenant_id,
name=device_data.get('name'),
connector_type=device_data['connector_type'],
connector_ip=device_data['connector_ip'],
status=initial_status)
context.session.add(device_db)
LOG.debug(_("Created network gateway device: %s"), device_db['id'])
return self._make_gateway_device_dict(device_db)
def update_gateway_device(self, context, gateway_device_id,
gateway_device, include_nsx_id=False):
device_data = gateway_device[self.device_resource]
with context.session.begin(subtransactions=True):
device_db = self._get_gateway_device(context, gateway_device_id)
# Ensure there is something to update before doing it
if any([device_db[k] != device_data[k] for k in device_data]):
device_db.update(device_data)
LOG.debug(_("Updated network gateway device: %s"),
gateway_device_id)
return self._make_gateway_device_dict(
device_db, include_nsx_id=include_nsx_id)
def delete_gateway_device(self, context, device_id):
with context.session.begin(subtransactions=True):
# A gateway device should not be deleted
# if it is used in any network gateway service
if self._is_device_in_use(context, device_id):
raise GatewayDeviceInUse(device_id=device_id)
device_db = self._get_gateway_device(context, device_id)
context.session.delete(device_db)
LOG.debug(_("Deleted network gateway device: %s."), device_id)
|
apache-2.0
| 6,341,488,234,011,928,000 | 47.196393 | 79 | 0.580042 | false |
hogasa/normalizador-amba
|
usig_normalizador_amba/Direccion.py
|
1
|
3312
|
# coding: UTF-8
'''
Created on Apr 16, 2014
@author: hernan
'''
from __future__ import absolute_import
import re
from usig_normalizador_amba.settings import CALLE_ALTURA, CALLE_Y_CALLE, INVALIDO
from usig_normalizador_amba.Calle import Calle
class Direccion:
'''
@ivar calle: Calle de la direccion
@type calle: Calle
@ivar altura: Altura de la calle
@type altura: Integer
@ivar cruce: Calle con la que se cruza
@type cruce: Calle
@ivar tipo: tipo de la calle
@type tipo: {CALLE_ALTURA = 0, CALLE_Y_CALLE = 1}
@ivar smp: Seccion-Manzana-Parcela
@type smp: String
@ivar coordenadas: Geocodificacion
@type coordenadas: Punto
@ivar partido: Partido de la direccion
@type partido: Partido
'''
calle = None
altura = 0
cruce = None
tipo = INVALIDO
coordenadas = None
partido = None
localidad = ''
def __init__(self, calle, altura=0, cruce=None):
'''
@ivar calle: Calle de la direccion
@type calle: Calle
@ivar altura: Altura de la calle
@type altura: Integer
@ivar cruce: Calle con la que se cruza
@type cruce: Calle
'''
try:
if(isinstance(calle, Calle)):
self.calle = calle
self.partido = calle.partido
self.localidad = calle.localidad
else:
raise TypeError('calle must be a Calle object.')
self.altura = int(altura)
if (cruce is None or isinstance(cruce, Calle)):
self.cruce = cruce
else:
raise TypeError('cruce must be a Calle object.')
if self.altura > 0:
self.tipo = CALLE_ALTURA
elif cruce is not None:
self.tipo = CALLE_Y_CALLE
else:
self.tipo = INVALIDO
except Exception, e:
raise e
def __str__(self):
return self.__unicode__().encode('utf8', 'ignore')
def __unicode__(self):
retval = u'''-- Dirección
calle = {0}
altura = {1}
cruce = {2}
coordenadas = {3}
partido = {4}
localidad = {5}'''
return retval.format(self.calle.nombre,
self.altura,
self.cruce.nombre if self.cruce is not None else '',
self.coordenadas,
self.partido.nombre,
self.localidad)
def toString(self):
'''
Devuelve un string con la direccion escrita correctamente para mostrar
@return: Direccion como texto
@rtype: String
'''
if (self.tipo == CALLE_ALTURA):
if(self.altura > 0):
altura = self.altura
else:
altura = 'S/N'
retval = u'{0} {1}, {2}'.format(self.calle.nombre, altura, self.partido.nombre)
elif (self.tipo == CALLE_Y_CALLE):
if(re.match('(?i)(I|Hi|HI)', self.cruce.nombre) is not None):
separador = 'e'
else:
separador = 'y'
retval = u'{0} {1} {2}, {3}'.format(self.calle.nombre, separador, self.cruce.nombre, self.partido.nombre)
else:
retval = ''
return retval
|
mit
| 1,672,229,273,734,386,700 | 28.5625 | 117 | 0.530353 | false |
pandada8/mirrord
|
mirrord/daemon.py
|
1
|
1173
|
import yaml
from task import RsyncTask
import asyncio
import logging
import argparse
class Application():
def __init__(self):
self.tasks = {}
def load_config(self, config):
with open(config) as fp:
data = yaml.safe_load(fp)
for i, j in data["task"].items():
self.tasks[i] = RsyncTask(i, j)
logging.info("loaded config")
def find_to_update(self):
for i in self.tasks.values():
if i.can_run():
return i
async def run(self):
while True:
task = self.find_to_update()
if task:
logging.info("Schedule %s", task)
task.start()
await asyncio.sleep(1) # emit no more than 1 task per minute
def start(self):
el = asyncio.get_event_loop()
logging.info("Daemon Start >_<")
el.run_until_complete(self.run())
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
a = Application()
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--config")
args = parser.parse_args()
a.load_config(args.config)
a.start()
|
mit
| 6,706,579,774,450,081,000 | 25.659091 | 73 | 0.55584 | false |
rosudrag/Freemium-winner
|
VirtualEnvironment/Lib/site-packages/jinja2/loaders.py
|
1
|
17346
|
# -*- coding: utf-8 -*-
"""
jinja2.loaders
~~~~~~~~~~~~~~
Jinja loader classes.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
import weakref
from types import ModuleType
from os import path
from hashlib import sha1
from jinja2.exceptions import TemplateNotFound
from jinja2.utils import open_if_exists, internalcode
from jinja2._compat import string_types, iteritems
def split_template_path(template):
"""Split a path into segments and perform a sanity check. If it detects
'..' in the path it will raise a `TemplateNotFound` error.
"""
pieces = []
for piece in template.split('/'):
if path.sep in piece \
or (path.altsep and path.altsep in piece) or \
piece == path.pardir:
raise TemplateNotFound(template)
elif piece and piece != '.':
pieces.append(piece)
return pieces
class BaseLoader(object):
"""Baseclass for all loaders. Subclass this and override `get_source` to
implement a custom loading mechanism. The environment provides a
`get_template` method that calls the loader's `load` method to get the
:class:`Template` object.
A very basic example for a loader that looks up templates on the file
system could look like this::
from jinja2 import BaseLoader, TemplateNotFound
from os.path import join, exists, getmtime
class MyLoader(BaseLoader):
def __init__(self, path):
self.path = path
def get_source(self, environment, template):
path = join(self.path, template)
if not exists(path):
raise TemplateNotFound(template)
mtime = getmtime(path)
with file(path) as f:
source = f.read().decode('utf-8')
return source, path, lambda: mtime == getmtime(path)
"""
#: if set to `False` it indicates that the loader cannot provide access
#: to the source of templates.
#:
#: .. versionadded:: 2.4
has_source_access = True
def get_source(self, environment, template):
"""Get the template source, filename and reload helper for a template.
It's passed the environment and template name and has to return a
tuple in the form ``(source, filename, uptodate)`` or raise a
`TemplateNotFound` error if it can't locate the template.
The source part of the returned tuple must be the source of the
template as unicode string or a ASCII bytestring. The filename should
be the name of the file on the filesystem if it was loaded from there,
otherwise `None`. The filename is used by python for the tracebacks
if no loader extension is used.
The last item in the tuple is the `uptodate` function. If auto
reloading is enabled it's always called to check if the template
changed. No arguments are passed so the function must store the
old state somewhere (for example in a closure). If it returns `False`
the template will be reloaded.
"""
if not self.has_source_access:
raise RuntimeError('%s cannot provide access to the source' %
self.__class__.__name__)
raise TemplateNotFound(template)
def list_templates(self):
"""Iterates over all templates. If the loader does not support that
it should raise a :exc:`TypeError` which is the default behavior.
"""
raise TypeError('this loader cannot iterate over all templates')
@internalcode
def load(self, environment, name, globals=None):
"""Loads a template. This method looks up the template in the cache
or loads one by calling :meth:`get_source`. Subclasses should not
override this method as loaders working on collections of other
loaders (such as :class:`PrefixLoader` or :class:`ChoiceLoader`)
will not call this method but `get_source` directly.
"""
code = None
if globals is None:
globals = {}
# first we try to get the source for this template together
# with the filename and the uptodate function.
source, filename, uptodate = self.get_source(environment, name)
# try to load the code from the bytecode cache if there is a
# bytecode cache configured.
bcc = environment.bytecode_cache
if bcc is not None:
bucket = bcc.get_bucket(environment, name, filename, source)
code = bucket.code
# if we don't have code so far (not cached, no longer up to
# date) etc. we compile the template
if code is None:
code = environment.compile(source, name, filename)
# if the bytecode cache is available and the bucket doesn't
# have a code so far, we give the bucket the new code and put
# it back to the bytecode cache.
if bcc is not None and bucket.code is None:
bucket.code = code
bcc.set_bucket(bucket)
return environment.template_class.from_code(environment, code,
globals, uptodate)
class FileSystemLoader(BaseLoader):
"""Loads templates from the file system. This loader can find templates
in folders on the file system and is the preferred way to load them.
The loader takes the path to the templates as string, or if multiple
locations are wanted a list of them which is then looked up in the
given order::
>>> loader = FileSystemLoader(gui)
>>> loader = FileSystemLoader([gui, '/other/path'])
Per default the template encoding is ``'utf-8'`` which can be changed
by setting the `encoding` parameter to something else.
To follow symbolic links, set the *followlinks* parameter to ``True``::
>>> loader = FileSystemLoader('/path/to/templates', followlinks=True)
.. versionchanged:: 2.8+
The *followlinks* parameter was added.
"""
def __init__(self, searchpath, encoding='utf-8', followlinks=False):
if isinstance(searchpath, string_types):
searchpath = [searchpath]
self.searchpath = list(searchpath)
self.encoding = encoding
self.followlinks = followlinks
def get_source(self, environment, template):
pieces = split_template_path(template)
for searchpath in self.searchpath:
filename = path.join(searchpath, *pieces)
f = open_if_exists(filename)
if f is None:
continue
try:
contents = f.read().decode(self.encoding)
finally:
f.close()
mtime = path.getmtime(filename)
def uptodate():
try:
return path.getmtime(filename) == mtime
except OSError:
return False
return contents, filename, uptodate
raise TemplateNotFound(template)
def list_templates(self):
found = set()
for searchpath in self.searchpath:
walk_dir = os.walk(searchpath, followlinks=self.followlinks)
for dirpath, dirnames, filenames in walk_dir:
for filename in filenames:
template = os.path.join(dirpath, filename) \
[len(searchpath):].strip(os.path.sep) \
.replace(os.path.sep, '/')
if template[:2] == './':
template = template[2:]
if template not in found:
found.add(template)
return sorted(found)
class PackageLoader(BaseLoader):
"""Load templates from python eggs or packages. It is constructed with
the name of the python package and the path to the templates in that
package::
loader = PackageLoader('mypackage', 'views')
If the package path is not given, ``'templates'`` is assumed.
Per default the template encoding is ``'utf-8'`` which can be changed
by setting the `encoding` parameter to something else. Due to the nature
of eggs it's only possible to reload templates if the package was loaded
from the file system and not a zip file.
"""
def __init__(self, package_name, package_path='templates',
encoding='utf-8'):
from pkg_resources import DefaultProvider, ResourceManager, \
get_provider
provider = get_provider(package_name)
self.encoding = encoding
self.manager = ResourceManager()
self.filesystem_bound = isinstance(provider, DefaultProvider)
self.provider = provider
self.package_path = package_path
def get_source(self, environment, template):
pieces = split_template_path(template)
p = '/'.join((self.package_path,) + tuple(pieces))
if not self.provider.has_resource(p):
raise TemplateNotFound(template)
filename = uptodate = None
if self.filesystem_bound:
filename = self.provider.get_resource_filename(self.manager, p)
mtime = path.getmtime(filename)
def uptodate():
try:
return path.getmtime(filename) == mtime
except OSError:
return False
source = self.provider.get_resource_string(self.manager, p)
return source.decode(self.encoding), filename, uptodate
def list_templates(self):
path = self.package_path
if path[:2] == './':
path = path[2:]
elif path == '.':
path = ''
offset = len(path)
results = []
def _walk(path):
for filename in self.provider.resource_listdir(path):
fullname = path + '/' + filename
if self.provider.resource_isdir(fullname):
_walk(fullname)
else:
results.append(fullname[offset:].lstrip('/'))
_walk(path)
results.sort()
return results
class DictLoader(BaseLoader):
"""Loads a template from a python dict. It's passed a dict of unicode
strings bound to template names. This loader is useful for unittesting:
>>> loader = DictLoader({'index.html': 'source here'})
Because auto reloading is rarely useful this is disabled per default.
"""
def __init__(self, mapping):
self.mapping = mapping
def get_source(self, environment, template):
if template in self.mapping:
source = self.mapping[template]
return source, None, lambda: source == self.mapping.get(template)
raise TemplateNotFound(template)
def list_templates(self):
return sorted(self.mapping)
class FunctionLoader(BaseLoader):
"""A loader that is passed a function which does the loading. The
function receives the name of the template and has to return either
an unicode string with the template source, a tuple in the form ``(source,
filename, uptodatefunc)`` or `None` if the template does not exist.
>>> def load_template(name):
... if name == 'index.html':
... return '...'
...
>>> loader = FunctionLoader(load_template)
The `uptodatefunc` is a function that is called if autoreload is enabled
and has to return `True` if the template is still up to date. For more
details have a look at :meth:`BaseLoader.get_source` which has the same
return value.
"""
def __init__(self, load_func):
self.load_func = load_func
def get_source(self, environment, template):
rv = self.load_func(template)
if rv is None:
raise TemplateNotFound(template)
elif isinstance(rv, string_types):
return rv, None, None
return rv
class PrefixLoader(BaseLoader):
"""A loader that is passed a dict of loaders where each loader is bound
to a prefix. The prefix is delimited from the template by a slash per
default, which can be changed by setting the `delimiter` argument to
something else::
loader = PrefixLoader({
'app1': PackageLoader('mypackage.app1'),
'app2': PackageLoader('mypackage.app2')
})
By loading ``'app1/index.html'`` the file from the app1 package is loaded,
by loading ``'app2/index.html'`` the file from the second.
"""
def __init__(self, mapping, delimiter='/'):
self.mapping = mapping
self.delimiter = delimiter
def get_loader(self, template):
try:
prefix, name = template.split(self.delimiter, 1)
loader = self.mapping[prefix]
except (ValueError, KeyError):
raise TemplateNotFound(template)
return loader, name
def get_source(self, environment, template):
loader, name = self.get_loader(template)
try:
return loader.get_source(environment, name)
except TemplateNotFound:
# re-raise the exception with the correct fileame here.
# (the one that includes the prefix)
raise TemplateNotFound(template)
@internalcode
def load(self, environment, name, globals=None):
loader, local_name = self.get_loader(name)
try:
return loader.load(environment, local_name, globals)
except TemplateNotFound:
# re-raise the exception with the correct fileame here.
# (the one that includes the prefix)
raise TemplateNotFound(name)
def list_templates(self):
result = []
for prefix, loader in iteritems(self.mapping):
for template in loader.list_templates():
result.append(prefix + self.delimiter + template)
return result
class ChoiceLoader(BaseLoader):
"""This loader works like the `PrefixLoader` just that no prefix is
specified. If a template could not be found by one loader the next one
is tried.
>>> loader = ChoiceLoader([
... FileSystemLoader('/path/to/user/templates'),
... FileSystemLoader('/path/to/system/templates')
... ])
This is useful if you want to allow users to override builtin templates
from a different location.
"""
def __init__(self, loaders):
self.loaders = loaders
def get_source(self, environment, template):
for loader in self.loaders:
try:
return loader.get_source(environment, template)
except TemplateNotFound:
pass
raise TemplateNotFound(template)
@internalcode
def load(self, environment, name, globals=None):
for loader in self.loaders:
try:
return loader.load(environment, name, globals)
except TemplateNotFound:
pass
raise TemplateNotFound(name)
def list_templates(self):
found = set()
for loader in self.loaders:
found.update(loader.list_templates())
return sorted(found)
class _TemplateModule(ModuleType):
"""Like a normal module but with support for weak references"""
class ModuleLoader(BaseLoader):
"""This loader loads templates from precompiled templates.
Example usage:
>>> loader = ChoiceLoader([
... ModuleLoader('/path/to/compiled/templates'),
... FileSystemLoader('/path/to/templates')
... ])
Templates can be precompiled with :meth:`Environment.compile_templates`.
"""
has_source_access = False
def __init__(self, path):
package_name = '_jinja2_module_templates_%x' % id(self)
# create a fake module that looks for the templates in the
# path given.
mod = _TemplateModule(package_name)
if isinstance(path, string_types):
path = [path]
else:
path = list(path)
mod.__path__ = path
sys.modules[package_name] = weakref.proxy(mod,
lambda x: sys.modules.pop(package_name, None))
# the only strong reference, the sys.modules entry is weak
# so that the garbage collector can remove it once the
# loader that created it goes out of business.
self.module = mod
self.package_name = package_name
@staticmethod
def get_template_key(name):
return 'tmpl_' + sha1(name.encode('utf-8')).hexdigest()
@staticmethod
def get_module_filename(name):
return ModuleLoader.get_template_key(name) + '.py'
@internalcode
def load(self, environment, name, globals=None):
key = self.get_template_key(name)
module = '%s.%s' % (self.package_name, key)
mod = getattr(self.module, module, None)
if mod is None:
try:
mod = __import__(module, None, None, ['root'])
except ImportError:
raise TemplateNotFound(name)
# remove the entry from sys.modules, we only want the attribute
# on the module object we have stored on the loader.
sys.modules.pop(module, None)
return environment.template_class.from_module_dict(
environment, mod.__dict__, globals)
|
mit
| -6,711,097,313,728,890,000 | 35.06237 | 78 | 0.612418 | false |
jinankjain/zamboni
|
mkt/versions/tests/test_serializers.py
|
1
|
1791
|
from django.core.urlresolvers import reverse
from nose.tools import eq_, ok_
from test_utils import RequestFactory
from amo.tests import app_factory, TestCase
from versions.models import Version
from mkt.versions.serializers import VersionSerializer
class TestVersionSerializer(TestCase):
def setUp(self):
self.app = app_factory()
self.features = self.app.current_version.features
self.request = RequestFactory().get('/')
self.serializer = VersionSerializer(context={'request': self.request})
def native(self, obj=None, **kwargs):
if not obj:
obj = self.app.current_version
obj.update(**kwargs)
return self.serializer.to_native(obj)
def test_renamed_fields(self):
native = self.native()
removed_keys = self.serializer.Meta.field_rename.keys()
added_keys = self.serializer.Meta.field_rename.values()
ok_(all(not k in native for k in removed_keys))
ok_(all(k in native for k in added_keys))
def test_addon(self):
eq_(self.native()['app'], reverse('app-detail',
kwargs={'pk': self.app.pk}))
def test_is_current_version(self):
old_version = Version.objects.create(addon=self.app, version='0.1')
ok_(self.native()['is_current_version'])
ok_(not self.native(obj=old_version)['is_current_version'])
def test_features(self, **kwargs):
if kwargs:
self.features.update(**kwargs)
native = self.native()
for key in dir(self.features):
if key.startswith('has_') and getattr(self.features, key):
ok_(key.replace('has_', '') in native['features'])
def test_features_updated(self):
self.test_features(has_fm=True)
|
bsd-3-clause
| -7,867,765,686,828,374,000 | 34.82 | 78 | 0.630374 | false |
omarayad1/cantkeepup
|
app/core/helpers.py
|
1
|
1103
|
from json import dumps # pragma: no cover
from sqlalchemy.orm import class_mapper # pragma: no cover
from app.models import User, Group # pragma: no cover
def serialize(obj, columns):
# then we return their values in a dict
return dict((c, getattr(obj, c)) for c in columns)
def queryAllToJson(model,conditions):
# we can then use this for your particular example
columns = [c.key for c in class_mapper(model).columns]
serialized_objs = [
serialize(obj,columns)
for obj in model.query.filter_by(**conditions)
]
return dumps(serialized_objs)
def objectToJson(obj):
columns = [c.key for c in class_mapper(obj.__class__).columns]
serialized_obj = serialize(obj, columns)
return dumps(serialized_obj)
def getUserId(username):
user = User.query.filter_by(username=username).first()
if user is None:
raise Exception('username %s not found in database' % username)
else:
return user.id
def getGroupId(groupname):
group = Group.query.filter_by(groupname=groupname).first()
if group is None:
raise Exception('groupname %s not found in database' % groupname)
else:
return group.id
|
mit
| -3,075,995,795,545,022,500 | 30.542857 | 67 | 0.741614 | false |
bayesimpact/bob-emploi
|
frontend/server/mail/jobbing.py
|
1
|
2417
|
"""Focus email module for finding jobbing ideas."""
import typing
from typing import Any, Dict
from bob_emploi.frontend.api import user_pb2
from bob_emploi.frontend.api import reorient_jobbing_pb2
from bob_emploi.frontend.server import i18n
from bob_emploi.frontend.server import mongo
from bob_emploi.frontend.server import scoring
from bob_emploi.frontend.server.mail import campaign
def _get_jobbing_vars(
user: user_pb2.User, *, database: mongo.NoPiiMongoDatabase,
**unused_kwargs: Any) -> Dict[str, Any]:
"""Compute vars for the "Jobbing" email."""
project = user.projects[0]
if not any(s.strategy_id == 'diploma-free-job' for s in project.opened_strategies):
raise campaign.DoNotSend(
'The user has not started a strategy to get a job without a diploma')
scoring_project = scoring.ScoringProject(project, user, database)
model = scoring.get_scoring_model('advice-reorient-jobbing')
if not model:
raise campaign.DoNotSend('The advice-reorient-jobbing model is not implemented')
reorient_jobs = typing.cast(
reorient_jobbing_pb2.JobbingReorientJobs,
model.get_expanded_card_data(scoring_project),
).reorient_jobbing_jobs
if not reorient_jobs:
raise campaign.DoNotSend("We didn't find any jobbing jobs to reorient to for the user")
if project.target_job.name:
of_job_name = scoring_project.populate_template('%ofJobName')
else:
# This is not translated to fr@tu because the email templates are only in fr for now.
of_job_name = 'de definir votre projet professionnel'
return dict(campaign.get_default_coaching_email_vars(user), **{
'inDepartement': scoring_project.populate_template('%inDepartement'),
'jobs': [{'name': job.name} for job in reorient_jobs],
'loginUrl': campaign.create_logged_url(user.user_id, f'/projet/{project.project_id}'),
'ofJobName': of_job_name,
})
campaign.register_campaign(campaign.Campaign(
campaign_id='jobbing',
mongo_filters={
'projects': {'$elemMatch': {
'isIncomplete': {'$ne': True},
'openedStrategies.strategyId': 'diploma-free-job',
}},
},
get_vars=_get_jobbing_vars,
sender_name=i18n.make_translatable_string("Joanna et l'équipe de Bob"),
sender_email='joanna@bob-emploi.fr',
is_coaching=True,
is_big_focus=False,
))
|
gpl-3.0
| 2,725,181,951,954,456,000 | 37.349206 | 95 | 0.685017 | false |
chrislit/abydos
|
abydos/tokenizer/_c_or_v_cluster.py
|
1
|
5291
|
# Copyright 2019-2020 by Christopher C. Little.
# This file is part of Abydos.
#
# Abydos is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Abydos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Abydos. If not, see <http://www.gnu.org/licenses/>.
"""abydos.tokenizer._c_or_v_cluster.
Consonant or vowel cluster tokenizer.
This tokenizer first performs wordpunct tokenization, so words are split into
separate units and non-letter characters are added as their own units.
Following this, words are further divided into strings of consonants only and
strings of vowels only.
"""
import re
import unicodedata
from typing import Callable, Optional, Set, Union
from ._tokenizer import _Tokenizer
__all__ = ['COrVClusterTokenizer']
class COrVClusterTokenizer(_Tokenizer):
"""A C- or V-cluster tokenizer.
.. versionadded:: 0.4.0
"""
def __init__(
self,
scaler: Optional[Union[str, Callable[[float], float]]] = None,
consonants: Optional[Set[str]] = None,
vowels: Optional[Set[str]] = None,
) -> None:
"""Initialize tokenizer.
Parameters
----------
scaler : None, str, or function
A scaling function for the Counter:
- None : no scaling
- 'set' : All non-zero values are set to 1.
- 'length' : Each token has weight equal to its length.
- 'length-log' : Each token has weight equal to the log of its
length + 1.
- 'length-exp' : Each token has weight equal to e raised to its
length.
- a callable function : The function is applied to each value
in the Counter. Some useful functions include math.exp,
math.log1p, math.sqrt, and indexes into interesting integer
sequences such as the Fibonacci sequence.
consonants : None or set(str)
The set of characters to treat as consonants
vowels : None or set(str)
The set of characters to treat as vowels
.. versionadded:: 0.4.0
"""
super(COrVClusterTokenizer, self).__init__(scaler=scaler)
if consonants:
self._consonants = consonants
else:
self._consonants = set('bcdfghjklmnpqrstvwxzßBCDFGHJKLMNPQRSTVWXZ')
if vowels:
self._vowels = vowels
else:
self._vowels = set('aeiouyAEIOUY')
self._regexp = re.compile(r'\w+|[^\w\s]+', flags=0)
def tokenize(self, string: str) -> 'COrVClusterTokenizer':
"""Tokenize the term and store it.
The tokenized term is stored as an ordered list and as a Counter
object.
Parameters
----------
string : str
The string to tokenize
Examples
--------
>>> COrVClusterTokenizer().tokenize('seven-twelfths')
COrVClusterTokenizer({'s': 1, 'e': 3, 'v': 1, 'n': 1, '-': 1, 'tw': 1,
'lfths': 1})
>>> COrVClusterTokenizer().tokenize('character')
COrVClusterTokenizer({'ch': 1, 'a': 2, 'r': 2, 'ct': 1, 'e': 1})
.. versionadded:: 0.4.0
"""
self._string = string
self._ordered_tokens = []
token_list = self._regexp.findall(self._string)
for token in token_list:
if (
token[0] not in self._consonants
and token[0] not in self._vowels
):
self._ordered_tokens.append(token)
else:
token = unicodedata.normalize('NFD', token)
mode = 0 # 0 = starting mode, 1 = cons, 2 = vowels
new_token = '' # noqa: S105
for char in token:
if char in self._consonants:
if mode == 2:
self._ordered_tokens.append(new_token)
new_token = char
else:
new_token += char
mode = 1
elif char in self._vowels:
if mode == 1:
self._ordered_tokens.append(new_token)
new_token = char
else:
new_token += char
mode = 2
else: # This should cover combining marks, marks, etc.
new_token += char
self._ordered_tokens.append(new_token)
self._ordered_tokens = [
unicodedata.normalize('NFC', token)
for token in self._ordered_tokens
]
self._scale_and_counterize()
return self
if __name__ == '__main__':
import doctest
doctest.testmod(optionflags=doctest.NORMALIZE_WHITESPACE)
|
gpl-3.0
| 110,880,648,427,213,950 | 33.129032 | 79 | 0.554253 | false |
chrislit/abydos
|
tests/distance/test_distance_fleiss_levin_paik.py
|
1
|
4910
|
# Copyright 2019-2020 by Christopher C. Little.
# This file is part of Abydos.
#
# Abydos is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Abydos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Abydos. If not, see <http://www.gnu.org/licenses/>.
"""abydos.tests.distance.test_distance_fleiss_levin_paik.
This module contains unit tests for abydos.distance.FleissLevinPaik
"""
import unittest
from abydos.distance import FleissLevinPaik
class FleissLevinPaikTestCases(unittest.TestCase):
"""Test FleissLevinPaik functions.
abydos.distance.FleissLevinPaik
"""
cmp = FleissLevinPaik()
cmp_no_d = FleissLevinPaik(alphabet=0)
def test_fleiss_levin_paik_sim(self):
"""Test abydos.distance.FleissLevinPaik.sim."""
# Base cases
self.assertEqual(self.cmp.sim('', ''), 1.0)
self.assertEqual(self.cmp.sim('a', ''), 0.9987228607918263)
self.assertEqual(self.cmp.sim('', 'a'), 0.9987228607918263)
self.assertEqual(self.cmp.sim('abc', ''), 0.9974424552429667)
self.assertEqual(self.cmp.sim('', 'abc'), 0.9974424552429667)
self.assertEqual(self.cmp.sim('abc', 'abc'), 1.0)
self.assertEqual(self.cmp.sim('abcd', 'efgh'), 0.993581514762516)
self.assertAlmostEqual(self.cmp.sim('Nigel', 'Niall'), 0.9961439589)
self.assertAlmostEqual(self.cmp.sim('Niall', 'Nigel'), 0.9961439589)
self.assertAlmostEqual(self.cmp.sim('Colin', 'Coiln'), 0.9961439589)
self.assertAlmostEqual(self.cmp.sim('Coiln', 'Colin'), 0.9961439589)
self.assertAlmostEqual(
self.cmp.sim('ATCAACGAGT', 'AACGATTAG'), 0.9954751131
)
# Tests with alphabet=0 (no d factor)
self.assertEqual(self.cmp_no_d.sim('', ''), 0.0)
self.assertEqual(self.cmp_no_d.sim('a', ''), 0.0)
self.assertEqual(self.cmp_no_d.sim('', 'a'), 0.0)
self.assertEqual(self.cmp_no_d.sim('abc', ''), 0.0)
self.assertEqual(self.cmp_no_d.sim('', 'abc'), 0.0)
self.assertEqual(self.cmp_no_d.sim('abc', 'abc'), 0.0)
self.assertEqual(self.cmp_no_d.sim('abcd', 'efgh'), 0.0)
self.assertAlmostEqual(self.cmp_no_d.sim('Nigel', 'Niall'), 0.0)
self.assertAlmostEqual(self.cmp_no_d.sim('Niall', 'Nigel'), 0.0)
self.assertAlmostEqual(self.cmp_no_d.sim('Colin', 'Coiln'), 0.0)
self.assertAlmostEqual(self.cmp_no_d.sim('Coiln', 'Colin'), 0.0)
self.assertAlmostEqual(
self.cmp_no_d.sim('ATCAACGAGT', 'AACGATTAG'), 0.0
)
def test_fleiss_levin_paik_dist(self):
"""Test abydos.distance.FleissLevinPaik.dist."""
# Base cases
self.assertEqual(self.cmp.dist('', ''), 0.0)
self.assertEqual(self.cmp.dist('a', ''), 0.0012771392081737387)
self.assertEqual(self.cmp.dist('', 'a'), 0.0012771392081737387)
self.assertEqual(self.cmp.dist('abc', ''), 0.002557544757033292)
self.assertEqual(self.cmp.dist('', 'abc'), 0.002557544757033292)
self.assertEqual(self.cmp.dist('abc', 'abc'), 0.0)
self.assertEqual(self.cmp.dist('abcd', 'efgh'), 0.006418485237484006)
self.assertAlmostEqual(self.cmp.dist('Nigel', 'Niall'), 0.0038560411)
self.assertAlmostEqual(self.cmp.dist('Niall', 'Nigel'), 0.0038560411)
self.assertAlmostEqual(self.cmp.dist('Colin', 'Coiln'), 0.0038560411)
self.assertAlmostEqual(self.cmp.dist('Coiln', 'Colin'), 0.0038560411)
self.assertAlmostEqual(
self.cmp.dist('ATCAACGAGT', 'AACGATTAG'), 0.0045248869
)
# Tests with alphabet=0 (no d factor)
self.assertEqual(self.cmp_no_d.dist('', ''), 1.0)
self.assertEqual(self.cmp_no_d.dist('a', ''), 1.0)
self.assertEqual(self.cmp_no_d.dist('', 'a'), 1.0)
self.assertEqual(self.cmp_no_d.dist('abc', ''), 1.0)
self.assertEqual(self.cmp_no_d.dist('', 'abc'), 1.0)
self.assertEqual(self.cmp_no_d.dist('abc', 'abc'), 1.0)
self.assertEqual(self.cmp_no_d.dist('abcd', 'efgh'), 1.0)
self.assertAlmostEqual(self.cmp_no_d.dist('Nigel', 'Niall'), 1.0)
self.assertAlmostEqual(self.cmp_no_d.dist('Niall', 'Nigel'), 1.0)
self.assertAlmostEqual(self.cmp_no_d.dist('Colin', 'Coiln'), 1.0)
self.assertAlmostEqual(self.cmp_no_d.dist('Coiln', 'Colin'), 1.0)
self.assertAlmostEqual(
self.cmp_no_d.dist('ATCAACGAGT', 'AACGATTAG'), 1.0
)
if __name__ == '__main__':
unittest.main()
|
gpl-3.0
| 9,109,054,514,301,569,000 | 43.636364 | 77 | 0.641752 | false |
stormtrader/gw_trade
|
dysms_python/mns_python_sdk/mns/mns_client.py
|
1
|
37956
|
#coding=utf-8
# Copyright (C) 2015, Alibaba Cloud Computing
#Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import time
import hashlib
import hmac
import base64
import string
import platform
from . import mns_pkg_info
from .mns_xml_handler import *
from .mns_exception import *
from .mns_request import *
from .mns_tool import *
from .mns_http import *
#from mns.mns_xml_handler import *
#from mns.mns_exception import *
#from mns.mns_request import *
#from mns.mns_tool import *
#from mns.mns_http import *
URISEC_QUEUE = "queues"
URISEC_MESSAGE = "messages"
URISEC_TOPIC = "topics"
URISEC_SUBSCRIPTION = "subscriptions"
class MNSClient(object):
#__metaclass__ = type
def __init__(self, host, access_id, access_key, version = "2015-06-06", security_token = "", logger=None):
self.host, self.is_https = self.process_host(host)
self.access_id = access_id
self.access_key = access_key
self.version = version
self.security_token = security_token
self.logger = logger
self.http = MNSHttp(self.host, logger=logger, is_https=self.is_https)
if self.logger:
self.logger.info("InitClient Host:%s Version:%s" % (host, version))
def set_log_level(self, log_level):
if self.logger:
MNSLogger.validate_loglevel(log_level)
self.logger.setLevel(log_level)
self.http.set_log_level(log_level)
def close_log(self):
self.logger = None
self.http.close_log()
def set_connection_timeout(self, connection_timeout):
self.http.set_connection_timeout(connection_timeout)
def set_keep_alive(self, keep_alive):
self.http.set_keep_alive(keep_alive)
def close_connection(self):
self.http.conn.close()
#===============================================queue operation===============================================#
def set_account_attributes(self, req, resp):
#check parameter
SetAccountAttributesValidator.validate(req)
#make request internal
req_inter = RequestInternal(req.method, "/?accountmeta=true")
req_inter.data = SetAccountAttrEncoder.encode(req)
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
def get_account_attributes(self, req, resp):
#make request internal
req_inter = RequestInternal(req.method, "/?accountmeta=true")
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
if resp.error_data == "":
account_attr = GetAccountAttrDecoder.decode(resp_inter.data, req_inter.get_req_id())
resp.logging_bucket = account_attr["LoggingBucket"]
if self.logger:
self.logger.info("GetAccountAttributes RequestId:%s LoggingBucket:%s" % (resp.get_requestid(), resp.logging_bucket))
def create_queue(self, req, resp):
#check parameter
CreateQueueValidator.validate(req)
#make request internal
req_inter = RequestInternal(req.method, "/%s/%s" % (URISEC_QUEUE, req.queue_name))
req_inter.data = QueueEncoder.encode(req)
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
if resp.error_data == "":
lheader = self.lower_header(resp.header)
resp.queue_url = lheader["location"]
if self.logger:
self.logger.info("CreateQueue RequestId:%s QueueName:%s QueueURL:%s" % \
(resp.get_requestid(), req.queue_name, resp.queue_url))
def delete_queue(self, req, resp):
#check parameter
DeleteQueueValidator.validate(req)
#make request internal
req_inter = RequestInternal(req.method, "/%s/%s" % (URISEC_QUEUE, req.queue_name))
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
if self.logger:
self.logger.info("DeleteQueue RequestId:%s QueueName:%s" % (resp.get_requestid(), req.queue_name))
def list_queue(self, req, resp):
#check parameter
ListQueueValidator.validate(req)
#make request internal
req_inter = RequestInternal(req.method, "/%s" % URISEC_QUEUE)
if req.prefix != u"":
req_inter.header["x-mns-prefix"] = req.prefix
if req.ret_number != -1:
req_inter.header["x-mns-ret-number"] = str(req.ret_number)
if req.marker != u"":
req_inter.header["x-mns-marker"] = str(req.marker)
if req.with_meta:
req_inter.header["x-mns-with-meta"] = u"true"
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
if resp.error_data == "":
resp.queueurl_list, resp.next_marker, resp.queuemeta_list = ListQueueDecoder.decode(resp_inter.data, req.with_meta, req_inter.get_req_id())
if self.logger:
firstQueueURL = "" if resp.queueurl_list == [] else resp.queueurl_list[0]
lastQueueURL = "" if resp.queueurl_list == [] else resp.queueurl_list[len(resp.queueurl_list)-1]
self.logger.info("ListQueue RequestId:%s Prefix:%s RetNumber:%s Marker:%s QueueCount:%s FirstQueueURL:%s LastQueueURL:%s NextMarker:%s" % \
(resp.get_requestid(), req.prefix, req.ret_number, req.marker, \
len(resp.queueurl_list), firstQueueURL, lastQueueURL, resp.next_marker))
def set_queue_attributes(self, req, resp):
#check parameter
SetQueueAttrValidator.validate(req)
#make request internal
req_inter = RequestInternal(req.method, "/%s/%s?metaoverride=true" % (URISEC_QUEUE, req.queue_name))
req_inter.data = QueueEncoder.encode(req, False)
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
if self.logger:
self.logger.info("SetQueueAttributes RequestId:%s QueueName:%s" % (resp.get_requestid(), req.queue_name))
def get_queue_attributes(self, req, resp):
#check parameter
GetQueueAttrValidator.validate(req)
#make request internal
req_inter = RequestInternal(req.method, "/%s/%s" % (URISEC_QUEUE, req.queue_name))
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
if resp.error_data == "":
queue_attr = GetQueueAttrDecoder.decode(resp_inter.data, req_inter.get_req_id())
resp.active_messages = int(queue_attr["ActiveMessages"])
resp.create_time = int(queue_attr["CreateTime"])
resp.delay_messages = int(queue_attr["DelayMessages"])
resp.delay_seconds = int(queue_attr["DelaySeconds"])
resp.inactive_messages = int(queue_attr["InactiveMessages"])
resp.last_modify_time = int(queue_attr["LastModifyTime"])
resp.maximum_message_size = int(queue_attr["MaximumMessageSize"])
resp.message_retention_period = int(queue_attr["MessageRetentionPeriod"])
resp.queue_name = queue_attr["QueueName"]
resp.visibility_timeout = int(queue_attr["VisibilityTimeout"])
resp.polling_wait_seconds = int(queue_attr["PollingWaitSeconds"])
resp.logging_enabled = True if queue_attr["LoggingEnabled"].lower() == "true" else False
if self.logger:
self.logger.info("GetQueueAttributes RequestId:%s QueueName:%s" % (resp.get_requestid(), req.queue_name))
def send_message(self, req, resp):
#check parameter
SendMessageValidator.validate(req)
#make request internal
req_inter = RequestInternal(req.method, uri = "/%s/%s/%s" % (URISEC_QUEUE, req.queue_name, URISEC_MESSAGE))
req_inter.data = MessageEncoder.encode(req)
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
if resp.error_data == "":
resp.message_id, resp.message_body_md5 = SendMessageDecoder.decode(resp_inter.data, req_inter.get_req_id())
if self.logger:
self.logger.info("SendMessage RequestId:%s QueueName:%s Priority:%s DelaySeconds:%s MessageId:%s MessageBodyMD5:%s" % \
(resp.get_requestid(), req.queue_name, req.priority, \
req.delay_seconds, resp.message_id, resp.message_body_md5))
def batch_send_message(self, req, resp):
#check parameter
BatchSendMessageValidator.validate(req)
#make request internal
req_inter = RequestInternal(req.method, uri = "/%s/%s/%s" % (URISEC_QUEUE, req.queue_name, URISEC_MESSAGE))
req_inter.data = MessagesEncoder.encode(req.message_list, req.base64encode)
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp, BatchSendMessageDecoder)
if resp.error_data == "":
resp.message_list = BatchSendMessageDecoder.decode(resp_inter.data, req_inter.get_req_id())
if self.logger:
self.logger.info("BatchSendMessage RequestId:%s QueueName:%s MessageCount:%s MessageInfo\n%s" % \
(resp.get_requestid(), req.queue_name, len(req.message_list), \
"\n".join(["MessageId:%s MessageBodyMD5:%s" % (msg.message_id, msg.message_body_md5) for msg in resp.message_list])))
def receive_message(self, req, resp):
#check parameter
ReceiveMessageValidator.validate(req)
#make request internal
req_url = "/%s/%s/%s" % (URISEC_QUEUE, req.queue_name, URISEC_MESSAGE)
if req.wait_seconds != -1:
req_url += "?waitseconds=%s" % req.wait_seconds
req_inter = RequestInternal(req.method, req_url)
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
if resp.error_data == "":
data = RecvMessageDecoder.decode(resp_inter.data, req.base64decode, req_inter.get_req_id())
self.make_recvresp(data, resp)
if self.logger:
self.logger.info("ReceiveMessage RequestId:%s QueueName:%s WaitSeconds:%s MessageId:%s MessageBodyMD5:%s NextVisibilityTime:%s ReceiptHandle:%s EnqueueTime:%s DequeueCount:%s" % \
(resp.get_requestid(), req.queue_name, req.wait_seconds, resp.message_id, \
resp.message_body_md5, resp.next_visible_time, resp.receipt_handle, resp.enqueue_time, resp.dequeue_count))
def batch_receive_message(self, req, resp):
#check parameter
BatchReceiveMessageValidator.validate(req)
#make request internal
req_url = "/%s/%s/%s?numOfMessages=%s" % (URISEC_QUEUE, req.queue_name, URISEC_MESSAGE, req.batch_size)
if req.wait_seconds != -1:
req_url += "&waitseconds=%s" % req.wait_seconds
req_inter = RequestInternal(req.method, req_url)
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
if resp.error_data == "":
resp.message_list = BatchRecvMessageDecoder.decode(resp_inter.data, req.base64decode, req_inter.get_req_id())
if self.logger:
self.logger.info("BatchReceiveMessage RequestId:%s QueueName:%s WaitSeconds:%s BatchSize:%s MessageCount:%s \
MessagesInfo\n%s" % (resp.get_requestid(), req.queue_name, req.wait_seconds, req.batch_size, len(resp.message_list),\
"\n".join(["MessageId:%s MessageBodyMD5:%s NextVisibilityTime:%s ReceiptHandle:%s EnqueueTime:%s DequeueCount:%s" % \
(msg.message_id, msg.message_body_md5, msg.next_visible_time, msg.receipt_handle, msg.enqueue_time, msg.dequeue_count) for msg in resp.message_list])))
def delete_message(self, req, resp):
#check parameter
DeleteMessageValidator.validate(req)
#make request internal
req_inter = RequestInternal(req.method, "/%s/%s/%s?ReceiptHandle=%s" %
(URISEC_QUEUE, req.queue_name, URISEC_MESSAGE, req.receipt_handle))
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
if self.logger:
self.logger.info("DeleteMessage RequestId:%s QueueName:%s ReceiptHandle:%s" % \
(resp.get_requestid(), req.queue_name, req.receipt_handle))
def batch_delete_message(self, req, resp):
#check parameter
BatchDeleteMessageValidator.validate(req)
#make request internal
req_inter = RequestInternal(req.method, "/%s/%s/%s" % (URISEC_QUEUE, req.queue_name, URISEC_MESSAGE))
req_inter.data = ReceiptHandlesEncoder.encode(req.receipt_handle_list)
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp, BatchDeleteMessageDecoder)
if self.logger:
self.logger.info("BatchDeleteMessage RequestId:%s QueueName:%s ReceiptHandles\n%s" % \
(resp.get_requestid(), req.queue_name, "\n".join(req.receipt_handle_list)))
def peek_message(self, req, resp):
#check parameter
PeekMessageValidator.validate(req)
#make request internal
req_inter = RequestInternal(req.method, "/%s/%s/%s?peekonly=true" %
(URISEC_QUEUE, req.queue_name, URISEC_MESSAGE))
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
if resp.error_data == "":
data = PeekMessageDecoder.decode(resp_inter.data, req.base64decode, req_inter.get_req_id())
self.make_peekresp(data, resp)
if self.logger:
self.logger.info("PeekMessage RequestId:%s QueueName:%s MessageInfo \
MessageId:%s BodyMD5:%s EnqueueTime:%s DequeueCount:%s" % \
(resp.get_requestid(), req.queue_name, resp.message_id, resp.message_body_md5,\
resp.enqueue_time, resp.dequeue_count))
def batch_peek_message(self, req, resp):
#check parameter
BatchPeekMessageValidator.validate(req)
#make request internal
req_inter = RequestInternal(req.method, "/%s/%s/%s?peekonly=true&numOfMessages=%s" %
(URISEC_QUEUE, req.queue_name, URISEC_MESSAGE, req.batch_size))
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
if resp.error_data == "":
resp.message_list = BatchPeekMessageDecoder.decode(resp_inter.data, req.base64decode, req_inter.get_req_id())
if self.logger:
self.logger.info("BatchPeekMessage RequestId:%s QueueName:%s BatchSize:%s MessageCount:%s MessageInfo\n%s" % \
(resp.get_requestid(), req.queue_name, req.batch_size, len(resp.message_list), \
"\n".join(["MessageId:%s BodyMD5:%s EnqueueTime:%s DequeueCount:%s" % \
(msg.message_id, msg.message_body_md5, msg.enqueue_time, msg.dequeue_count) for msg in resp.message_list])))
def change_message_visibility(self, req, resp):
#check parameter
ChangeMsgVisValidator.validate(req)
#make request internal
req_inter = RequestInternal(req.method, "/%s/%s/%s?ReceiptHandle=%s&VisibilityTimeout=%d" %
(URISEC_QUEUE, req.queue_name, URISEC_MESSAGE, req.receipt_handle, req.visibility_timeout))
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
if resp.error_data == "":
resp.receipt_handle, resp.next_visible_time = ChangeMsgVisDecoder.decode(resp_inter.data, req_inter.get_req_id())
if self.logger:
self.logger.info("ChangeMessageVisibility RequestId:%s QueueName:%s ReceiptHandle:%s VisibilityTimeout:%s NewReceiptHandle:%s NextVisibleTime:%s" % \
(resp.get_requestid(), req.queue_name, req.receipt_handle, req.visibility_timeout,\
resp.receipt_handle, resp.next_visible_time))
#===============================================topic operation===============================================#
def create_topic(self, req, resp):
#check parameter
CreateTopicValidator.validate(req)
#make request internal
req_inter = RequestInternal(req.method, "/%s/%s" % (URISEC_TOPIC, req.topic_name))
req_inter.data = TopicEncoder.encode(req)
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
if resp.error_data == "":
resp.topic_url = self.lower_header(resp.header)["location"]
if self.logger:
self.logger.info("CreateTopic RequestId:%s TopicName:%s TopicURl:%s" % \
(resp.get_requestid(), req.topic_name, resp.topic_url))
def delete_topic(self, req, resp):
#check parameter
DeleteTopicValidator.validate(req)
#make request internal
req_inter = RequestInternal(req.method, "/%s/%s" % (URISEC_TOPIC, req.topic_name))
self.build_header(req, req_inter)
#send reqeust
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
if self.logger:
self.logger.info("DeleteTopic RequestId:%s TopicName:%s" % (resp.get_requestid(), req.topic_name))
def list_topic(self, req, resp):
#check parameter
ListTopicValidator.validate(req)
#make request internal
req_inter = RequestInternal(req.method, "/%s" % URISEC_TOPIC)
if req.prefix != "":
req_inter.header["x-mns-prefix"] = req.prefix
if req.ret_number != -1:
req_inter.header["x-mns-ret-number"] = str(req.ret_number)
if req.marker != "":
req_inter.header["x-mns-marker"] = str(req.marker)
if req.with_meta:
req_inter.header["x-mns-with-meta"] = "true"
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
if resp.error_data == "":
resp.topicurl_list, resp.next_marker, resp.topicmeta_list = ListTopicDecoder.decode(resp_inter.data, req.with_meta, req_inter.get_req_id())
first_topicurl = "" if len(resp.topicurl_list) == 0 else resp.topicurl_list[0]
last_topicurl = "" if len(resp.topicurl_list) == 0 else resp.topicurl_list[len(resp.topicurl_list)-1]
if self.logger:
self.logger.info("ListTopic RequestId:%s Prefix:%s RetNumber:%s Marker:%s TopicCount:%s FirstTopicURL:%s LastTopicURL:%s NextMarker:%s" % \
(resp.get_requestid(), req.prefix, req.ret_number, req.marker,\
len(resp.topicurl_list), first_topicurl, last_topicurl, resp.next_marker))
def set_topic_attributes(self, req, resp):
#check parameter
SetTopicAttrValidator.validate(req)
#make request internal
req_inter = RequestInternal(req.method, "/%s/%s?metaoverride=true" % (URISEC_TOPIC, req.topic_name))
req_inter.data = TopicEncoder.encode(req)
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
if self.logger:
self.logger.info("SetTopicAttributes RequestId:%s TopicName:%s" % (resp.get_requestid(), req.topic_name))
def get_topic_attributes(self, req, resp):
#check parameter
GetTopicAttrValidator.validate(req)
#make request internal
req_inter = RequestInternal(req.method, "/%s/%s" % (URISEC_TOPIC, req.topic_name))
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
if resp.error_data == "":
topic_attr = GetTopicAttrDecoder.decode(resp_inter.data, req_inter.get_req_id())
resp.message_count = int(topic_attr["MessageCount"])
resp.create_time = int(topic_attr["CreateTime"])
resp.last_modify_time = int(topic_attr["LastModifyTime"])
resp.maximum_message_size = int(topic_attr["MaximumMessageSize"])
resp.message_retention_period = int(topic_attr["MessageRetentionPeriod"])
resp.topic_name = topic_attr["TopicName"]
resp.logging_enabled = True if topic_attr["LoggingEnabled"].lower() == "true" else False
if self.logger:
self.logger.info("GetTopicAttributes RequestId:%s TopicName:%s" % (resp.get_requestid(), req.topic_name))
def publish_message(self, req, resp):
#check parameter
PublishMessageValidator.validate(req)
#make request internal
req_inter = RequestInternal(req.method, uri = "/%s/%s/%s" % (URISEC_TOPIC, req.topic_name, URISEC_MESSAGE))
req_inter.data = TopicMessageEncoder.encode(req)
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
if resp.error_data == "":
resp.message_id, resp.message_body_md5 = PublishMessageDecoder.decode(resp_inter.data, req_inter.get_req_id())
if self.logger:
self.logger.info("PublishMessage RequestId:%s TopicName:%s MessageId:%s MessageBodyMD5:%s" % \
(resp.get_requestid(), req.topic_name, resp.message_id, resp.message_body_md5))
def subscribe(self, req, resp):
#check parameter
SubscribeValidator.validate(req)
#make request internal
req_inter = RequestInternal(req.method, uri="/%s/%s/%s/%s" % (URISEC_TOPIC, req.topic_name, URISEC_SUBSCRIPTION, req.subscription_name))
req_inter.data = SubscriptionEncoder.encode(req)
self.build_header(req, req_inter)
#send request
req_inter.data = req_inter.data.decode('utf-8')
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
if resp.error_data == "":
lheader = self.lower_header(resp.header)
resp.subscription_url = lheader["location"]
if self.logger:
self.logger.info("Subscribe RequestId:%s TopicName:%s SubscriptionName:%s SubscriptionURL:%s" % \
(resp.get_requestid(), req.topic_name, req.subscription_name, resp.subscription_url))
def unsubscribe(self, req, resp):
#check parameter
UnsubscribeValidator.validate(req)
#make request internal
req_inter = RequestInternal(req.method, "/%s/%s/%s/%s" % (URISEC_TOPIC, req.topic_name, URISEC_SUBSCRIPTION, req.subscription_name))
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
if self.logger:
self.logger.info("Unsubscribe RequestId:%s TopicName:%s SubscriptionName:%s" % (resp.get_requestid(), req.topic_name, req.subscription_name))
def list_subscription_by_topic(self, req, resp):
#check parameter
ListSubscriptionByTopicValidator.validate(req)
#make request internal
req_inter = RequestInternal(req.method, "/%s/%s/%s" % (URISEC_TOPIC, req.topic_name, URISEC_SUBSCRIPTION))
if req.prefix != "":
req_inter.header["x-mns-prefix"] = req.prefix
if req.ret_number != -1:
req_inter.header["x-mns-ret-number"] = str(req.ret_number)
if req.marker != "":
req_inter.header["x-mns-marker"] = req.marker
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
if resp.error_data == "":
resp.subscriptionurl_list, resp.next_marker = ListSubscriptionByTopicDecoder.decode(resp_inter.data, req_inter.get_req_id())
if self.logger:
first_suburl = "" if len(resp.subscriptionurl_list) == 0 else resp.subscriptionurl_list[0]
last_suburl = "" if len(resp.subscriptionurl_list) == 0 else resp.subscriptionurl_list[len(resp.subscriptionurl_list)-1]
self.logger.info("ListSubscriptionByTopic RequestId:%s TopicName:%s Prefix:%s RetNumber:%s \
Marker:%s SubscriptionCount:%s FirstSubscriptionURL:%s LastSubscriptionURL:%s" % \
(resp.get_requestid(), req.topic_name, req.prefix, req.ret_number, \
req.marker, len(resp.subscriptionurl_list), first_suburl, last_suburl))
def set_subscription_attributes(self, req, resp):
#check parameter
SetSubscriptionAttrValidator.validate(req)
#make request internal
req_inter = RequestInternal(req.method, "/%s/%s/%s/%s?metaoverride=true" % (URISEC_TOPIC, req.topic_name, URISEC_SUBSCRIPTION, req.subscription_name))
req_inter.data = SubscriptionEncoder.encode(req, set=True)
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
if self.logger:
self.logger.info("SetSubscriptionAttributes RequestId:%s TopicName:%s SubscriptionName:%s" % \
(resp.get_requestid(), req.topic_name, req.subscription_name))
def get_subscription_attributes(self, req, resp):
#check parameter
GetSubscriptionAttrValidator.validate(req)
#make request internal
req_inter = RequestInternal(req.method, "/%s/%s/%s/%s" % (URISEC_TOPIC, req.topic_name, URISEC_SUBSCRIPTION, req.subscription_name))
self.build_header(req, req_inter)
#send request
resp_inter = self.http.send_request(req_inter)
#handle result, make response
resp.status = resp_inter.status
resp.header = resp_inter.header
self.check_status(req_inter, resp_inter, resp)
if resp.error_data == "":
subscription_attr = GetSubscriptionAttrDecoder.decode(resp_inter.data, req_inter.get_req_id())
resp.topic_owner = subscription_attr["TopicOwner"]
resp.topic_name = subscription_attr["TopicName"]
resp.subscription_name = subscription_attr["SubscriptionName"]
resp.endpoint = subscription_attr["Endpoint"]
resp.filter_tag = subscription_attr["FilterTag"] if "FilterTag" in subscription_attr.keys() else ""
resp.notify_strategy = subscription_attr["NotifyStrategy"]
resp.notify_content_format = subscription_attr["NotifyContentFormat"]
resp.create_time = int(subscription_attr["CreateTime"])
resp.last_modify_time = int(subscription_attr["LastModifyTime"])
if self.logger:
self.logger.info("GetSubscriptionAttributes RequestId:%s TopicName:%s SubscriptionName:%s" % \
(resp.get_requestid(), req.topic_name, req.subscription_name))
###################################################################################################
#----------------------internal-------------------------------------------------------------------#
def build_header(self, req, req_inter):
if req.request_id is not None:
req_inter.header["x-mns-user-request-id"] = req.request_id
if self.http.is_keep_alive():
req_inter.header["Connection"] = "Keep-Alive"
if req_inter.data != "":
#req_inter.header["content-md5"] = base64.b64encode(hashlib.md5(req_inter.data).hexdigest())
#req_inter.header["content-md5"] = base64.b64encode(hashlib.md5(req_inter.data.encode("utf-8")).hexdigest().encode("utf-8")).decode("utf-8")
req_inter.header["content-md5"] = base64.b64encode(hashlib.md5(req_inter.data).hexdigest().encode("utf-8")).decode("utf-8")
req_inter.header["content-type"] = "text/xml;charset=UTF-8"
req_inter.header["x-mns-version"] = self.version
req_inter.header["host"] = self.host
req_inter.header["date"] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
req_inter.header["user-agent"] = "aliyun-sdk-python/%s(%s/%s;%s)" % \
(mns_pkg_info.version, platform.system(), platform.release(), platform.python_version())
req_inter.header["Authorization"] = self.get_signature(req_inter.method, req_inter.header, req_inter.uri)
if self.security_token != "":
req_inter.header["security-token"] = self.security_token
def get_signature(self,method,headers,resource):
content_md5 = self.get_element('content-md5', headers)
content_type = self.get_element('content-type', headers)
date = self.get_element('date', headers)
canonicalized_resource = resource
canonicalized_mns_headers = ""
if len(headers) > 0:
x_header_list = headers.keys()
#x_header_list.sort()
x_header_list = sorted(x_header_list)
for k in x_header_list:
if k.startswith('x-mns-'):
canonicalized_mns_headers += k + ":" + headers[k] + "\n"
string_to_sign = "%s\n%s\n%s\n%s\n%s%s" % (method, content_md5, content_type, date, canonicalized_mns_headers, canonicalized_resource)
#hmac only support str in python2.7
#tmp_key = self.access_key.encode('utf-8') if isinstance(self.access_key, unicode) else self.access_key
tmp_key = self.access_key.encode('utf-8')
h = hmac.new(tmp_key, string_to_sign.encode('utf-8'), hashlib.sha1)
signature = base64.b64encode(h.digest())
signature = "MNS " + self.access_id + ":" + signature.decode('utf-8')
return signature
def get_element(self, name, container):
if name in container:
return container[name]
else:
return ""
def check_status(self, req_inter, resp_inter, resp, decoder=ErrorDecoder):
if resp_inter.status >= 200 and resp_inter.status < 400:
resp.error_data = ""
else:
resp.error_data = resp_inter.data
if resp_inter.status >= 400 and resp_inter.status <= 600:
excType, excMessage, reqId, hostId, subErr = decoder.decodeError(resp.error_data, req_inter.get_req_id())
if reqId is None:
reqId = resp.header["x-mns-request-id"]
raise MNSServerException(excType, excMessage, reqId, hostId, subErr)
else:
raise MNSClientNetworkException("UnkownError", resp_inter.data, req_inter.get_req_id())
def make_recvresp(self, data, resp):
resp.dequeue_count = int(data["DequeueCount"])
resp.enqueue_time = int(data["EnqueueTime"])
resp.first_dequeue_time = int(data["FirstDequeueTime"])
resp.message_body = data["MessageBody"]
resp.message_id = data["MessageId"]
resp.message_body_md5 = data["MessageBodyMD5"]
resp.next_visible_time = int(data["NextVisibleTime"])
resp.receipt_handle = data["ReceiptHandle"]
resp.priority = int(data["Priority"])
def make_peekresp(self, data, resp):
resp.dequeue_count = int(data["DequeueCount"])
resp.enqueue_time = int(data["EnqueueTime"])
resp.first_dequeue_time = int(data["FirstDequeueTime"])
resp.message_body = data["MessageBody"]
resp.message_id = data["MessageId"]
resp.message_body_md5 = data["MessageBodyMD5"]
resp.priority = int(data["Priority"])
def process_host(self, host):
if host.startswith("http://"):
if host.endswith("/"):
host = host[:-1]
host = host[len("http://"):]
return host, False
elif host.startswith("https://"):
if host.endswith("/"):
host = host[:-1]
host = host[len("https://"):]
return host, True
else:
raise MNSClientParameterException("InvalidHost", "Only support http prototol. Invalid host:%s" % host)
@staticmethod
def lower_header(header):
lower_header = dict()
for k, v in header.items():
k = k.lower()
lower_header[k] = v
return lower_header
|
gpl-3.0
| 1,831,573,942,415,423,500 | 45.975248 | 461 | 0.621035 | false |
YeEmrick/learning
|
stanford-tensorflow/2017/examples/07_convnet_mnist_starter.py
|
1
|
6444
|
""" Using convolutional net on MNIST dataset of handwritten digit
(http://yann.lecun.com/exdb/mnist/)
Author: Chip Huyen
Prepared for the class CS 20SI: "TensorFlow for Deep Learning Research"
cs20si.stanford.edu
"""
from __future__ import print_function
from __future__ import division
from __future__ import print_function
import os
os.environ['TF_CPP_MIN_LOG_LEVEL']='2'
import time
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
import utils
N_CLASSES = 10
# Step 1: Read in data
# using TF Learn's built in function to load MNIST data to the folder data/mnist
mnist = input_data.read_data_sets("/data/mnist", one_hot=True)
# Step 2: Define paramaters for the model
LEARNING_RATE = 0.001
BATCH_SIZE = 128
SKIP_STEP = 10
DROPOUT = 0.75
N_EPOCHS = 1
# Step 3: create placeholders for features and labels
# each image in the MNIST data is of shape 28*28 = 784
# therefore, each image is represented with a 1x784 tensor
# We'll be doing dropout for hidden layer so we'll need a placeholder
# for the dropout probability too
# Use None for shape so we can change the batch_size once we've built the graph
with tf.name_scope('data'):
X = tf.placeholder(tf.float32, [None, 784], name="X_placeholder")
Y = tf.placeholder(tf.float32, [None, 10], name="Y_placeholder")
dropout = tf.placeholder(tf.float32, name='dropout')
# Step 4 + 5: create weights + do inference
# the model is conv -> relu -> pool -> conv -> relu -> pool -> fully connected -> softmax
global_step = tf.Variable(0, dtype=tf.int32, trainable=False, name='global_step')
utils.make_dir('checkpoints')
utils.make_dir('checkpoints/convnet_mnist')
with tf.variable_scope('conv1') as scope:
# first, reshape the image to [BATCH_SIZE, 28, 28, 1] to make it work with tf.nn.conv2d
# use the dynamic dimension -1
images = tf.reshape(X, shape=[-1, 28, 28, 1])
# TO DO
# create kernel variable of dimension [5, 5, 1, 32]
# use tf.truncated_normal_initializer()
# TO DO
# create biases variable of dimension [32]
# use tf.constant_initializer(0.0)
# TO DO
# apply tf.nn.conv2d. strides [1, 1, 1, 1], padding is 'SAME'
# TO DO
# apply relu on the sum of convolution output and biases
# TO DO
# output is of dimension BATCH_SIZE x 28 x 28 x 32
with tf.variable_scope('pool1') as scope:
# apply max pool with ksize [1, 2, 2, 1], and strides [1, 2, 2, 1], padding 'SAME'
# TO DO
# output is of dimension BATCH_SIZE x 14 x 14 x 32
with tf.variable_scope('conv2') as scope:
# similar to conv1, except kernel now is of the size 5 x 5 x 32 x 64
kernel = tf.get_variable('kernels', [5, 5, 32, 64],
initializer=tf.truncated_normal_initializer())
biases = tf.get_variable('biases', [64],
initializer=tf.random_normal_initializer())
conv = tf.nn.conv2d(pool1, kernel, strides=[1, 1, 1, 1], padding='SAME')
conv2 = tf.nn.relu(conv + biases, name=scope.name)
# output is of dimension BATCH_SIZE x 14 x 14 x 64
with tf.variable_scope('pool2') as scope:
# similar to pool1
pool2 = tf.nn.max_pool(conv2, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1],
padding='SAME')
# output is of dimension BATCH_SIZE x 7 x 7 x 64
with tf.variable_scope('fc') as scope:
# use weight of dimension 7 * 7 * 64 x 1024
input_features = 7 * 7 * 64
# create weights and biases
# TO DO
# reshape pool2 to 2 dimensional
pool2 = tf.reshape(pool2, [-1, input_features])
# apply relu on matmul of pool2 and w + b
fc = tf.nn.relu(tf.matmul(pool2, w) + b, name='relu')
# TO DO
# apply dropout
fc = tf.nn.dropout(fc, dropout, name='relu_dropout')
with tf.variable_scope('softmax_linear') as scope:
# this you should know. get logits without softmax
# you need to create weights and biases
# TO DO
# Step 6: define loss function
# use softmax cross entropy with logits as the loss function
# compute mean cross entropy, softmax is applied internally
with tf.name_scope('loss'):
# you should know how to do this too
# TO DO
# Step 7: define training op
# using gradient descent with learning rate of LEARNING_RATE to minimize cost
# don't forgot to pass in global_step
# TO DO
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
saver = tf.train.Saver()
# to visualize using TensorBoard
writer = tf.summary.FileWriter('./my_graph/mnist', sess.graph)
##### You have to create folders to store checkpoints
ckpt = tf.train.get_checkpoint_state(os.path.dirname('checkpoints/convnet_mnist/checkpoint'))
# if that checkpoint exists, restore from checkpoint
if ckpt and ckpt.model_checkpoint_path:
saver.restore(sess, ckpt.model_checkpoint_path)
initial_step = global_step.eval()
start_time = time.time()
n_batches = int(mnist.train.num_examples / BATCH_SIZE)
total_loss = 0.0
for index in range(initial_step, n_batches * N_EPOCHS): # train the model n_epochs times
X_batch, Y_batch = mnist.train.next_batch(BATCH_SIZE)
_, loss_batch = sess.run([optimizer, loss],
feed_dict={X: X_batch, Y:Y_batch, dropout: DROPOUT})
total_loss += loss_batch
if (index + 1) % SKIP_STEP == 0:
print('Average loss at step {}: {:5.1f}'.format(index + 1, total_loss / SKIP_STEP))
total_loss = 0.0
saver.save(sess, 'checkpoints/convnet_mnist/mnist-convnet', index)
print("Optimization Finished!") # should be around 0.35 after 25 epochs
print("Total time: {0} seconds".format(time.time() - start_time))
# test the model
n_batches = int(mnist.test.num_examples/BATCH_SIZE)
total_correct_preds = 0
for i in range(n_batches):
X_batch, Y_batch = mnist.test.next_batch(BATCH_SIZE)
_, loss_batch, logits_batch = sess.run([optimizer, loss, logits],
feed_dict={X: X_batch, Y:Y_batch, dropout: DROPOUT})
preds = tf.nn.softmax(logits_batch)
correct_preds = tf.equal(tf.argmax(preds, 1), tf.argmax(Y_batch, 1))
accuracy = tf.reduce_sum(tf.cast(correct_preds, tf.float32))
total_correct_preds += sess.run(accuracy)
print("Accuracy {0}".format(total_correct_preds/mnist.test.num_examples))
|
apache-2.0
| 3,841,000,186,712,797,000 | 33.465241 | 97 | 0.652855 | false |
tiramiseb/abandoned_ospfm
|
ospfm/core/preference.py
|
1
|
2559
|
# Copyright 2012-2013 Sebastien Maccagnoni-Munch
#
# This file is part of OSPFM.
#
# OSPFM is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OSPFM is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OSPFM. If not, see <http://www.gnu.org/licenses/>.
from ospfm import db
from ospfm.core import models
from ospfm.objects import Object
class Preference(Object):
def __own_preference(self, preferencename):
return models.UserPreference.query.filter(
db.and_(
models.UserPreference.user_username == self.username,
models.UserPreference.name == preferencename
)
).first()
def list(self):
preferences = models.UserPreference.query.options(
).filter(
models.UserPreference.user_username == self.username
).all()
return [p.as_dict() for p in preferences]
def create(self):
# Do not "create" preferences, just update them...
self.forbidden(
'Please update the preference: if it does not exist, it will be created')
def read(self, preferencename):
preference = self.__own_preference(preferencename)
if preference:
return preference.as_dict()
else:
return {'name':preferencename, 'value':'UNSET'}
def update(self, preferencename):
preference = self.__own_preference(preferencename)
if not preference:
preference = models.UserPreference(
user_username = self.username,
name = preferencename
)
db.session.add(preference)
preference.value = self.args['value']
db.session.commit()
return preference.as_dict()
def delete(self, preferencename):
preference = self.__own_preference(preferencename)
if not preference:
self.notfound('Nonexistent preference cannot be deleted')
db.session.delete(preference)
db.session.commit()
|
agpl-3.0
| -5,509,308,294,706,889,000 | 36.086957 | 79 | 0.62837 | false |
DataKind-SG/healthcare_ASEAN
|
src/data/download.py
|
1
|
1439
|
# Call download scripts
#from download import *
import download.SG_disease
import download.SG_weather
import download.MY_dengue
import download.BN_disease
import download.TH_disease
import download.ID_malaria
import download.wunderground
import download.apps_who_int
import os
import sys
import logging
import logging.config
logger = logging.getLogger()
logger.addHandler(logging.NullHandler())
DIRECTORY = '../../Data/raw'
def main():
logger.info('Downloading raw weekly MY dengue data')
if not os.path.exists(DIRECTORY):
os.makedirs(DIRECTORY)
# Singapore
download.SG_disease.download()
download.SG_weather.download()
# Brunei
download.BN_disease.download()
# Malaysia
download.MY_dengue.download()
#Indonesia
download.ID_malaria.download()
return
def temp():
logger.info('Downloading raw TH data')
download.TH_disease.download()
logger.info('Finished downloading raw TH data')
logger.info('Downloading wunderground data')
download.wunderground.download()
logger.info('Finished downloading wunderground data')
logger.info('Downloading apps.who.int data / malaria reported confirmed cases')
download.apps_who_int.download()
logger.info('Finished downloading apps.who.int data / malaria reported confirmed cases')
return
if __name__ == '__main__':
logging.config.fileConfig('logconf.ini')
main()
|
mit
| 2,998,777,852,395,966,000 | 23.389831 | 92 | 0.712995 | false |
Chris1221/Bandwidth-Daemon
|
daemon/teksavvy_cron.py
|
1
|
2966
|
#!/opt/local/bin/python
#you might have to change the above to point to your local python
from __future__ import division
import httplib, json, time
#replace this with your API key
APIKEY = "API_KEY"
#pull from api
headers = {"TekSavvy-APIKey": APIKEY}
conn = httplib.HTTPSConnection("api.teksavvy.com")
conn.request('GET', '/web/Usage/UsageSummaryRecords?$filter=IsCurrent%20eq%20true', '', headers)
response = conn.getresponse()
jsonData = response.read()
data = json.loads(jsonData)
#label
pd = data["value"][0]["OnPeakDownload"]
pu = data["value"][0]["OnPeakUpload"]
opd = data["value"][0]["OffPeakDownload"]
opu = data["value"][0]["OffPeakUpload"]
sd = data["value"][0]["StartDate"]
ed = data["value"][0]["EndDate"]
#monthly limits: input yours
monthly_limit = 150
total_down_used = pd + opd
#take percents
percentage_of_data_used = total_down_used/monthly_limit * 100
percentage_of_data_used = round(percentage_of_data_used, 2)
#this could be done better, not sure how
now = (time.strftime("%x %X"))
today = time.strftime("%A")
month = time.strftime("%b")
current_month = int(now[0:2])
current_day = int(now[3:5])
list_of_number_of_days_in_months = [31,28,31,30,31,30,31,31,30,31,30,31]
number_of_days_in_current_month = list_of_number_of_days_in_months[current_month-1]
percentage_of_month_passed = current_day/number_of_days_in_current_month * 100
percentage_of_month_passed = round(percentage_of_month_passed, 2)
on_peak_dl_perc = pd/total_down_used*100
on_peak_dl_perc = round(on_peak_dl_perc, 2)
#send email
def send_email():
import smtplib
gmail_user = "email@gmail.com" #change to yours
gmail_pwd = "pass" #change to yours
FROM = 'email@gmail.com' #change to yours
TO = ['email@gmail.com'] #must be a list
SUBJECT = "Bandwidth Tracking %s %s %s" % (today, month, current_day)
TEXT = "Good morning! \nToday is %s. \nYou have used %s percent of your data this month, which is equal to %s Gb of data usage. You are %s percent through this month. %s percent of this data is on peak." %(today, percentage_of_data_used, total_down_used, percentage_of_month_passed, on_peak_dl_perc)
#"peak down: %s, peak up: %s, offpeak up: %s, offpeak up: %s" % (pd, pu, opd, opu)
# Prepare actual message
message = "\From: %s\nTo: %s\nSubject: %s\n\n%s " % (FROM, ", ".join(TO), SUBJECT, TEXT)
try:
#server = smtplib.SMTP(SERVER)
server = smtplib.SMTP("smtp.gmail.com", 587) #or port 465 doesn't seem to work!
server.ehlo()
server.starttls()
server.login(gmail_user, gmail_pwd)
server.sendmail(FROM, TO, message)
#server.quit()
server.close()
print 'successfully sent the mail'
except:
print "failed to send mail"
send_email()
|
gpl-2.0
| -318,330,581,550,085,900 | 37.038462 | 311 | 0.629467 | false |
maxwu/cistat
|
test/test_circleci_request.py
|
1
|
4687
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Test CircleCI interfaces
.. moduleauthor:: Max Wu <http://maxwu.me>
"""
import unittest
import numbers
import json
from cistat import config
from cistat.model import Xunitrpt
from cistat.reqs import CircleCiReq
class CircleCiReqTest(unittest.TestCase):
def setUp(self):
token = config.get_circleci_token()
vcs = 'github'
username = 'maxwu'
project = 'cistat'
self.args1 = {'token': token, 'vcs': vcs, 'username': username, 'project': project}
def test_30builds(self):
build_nums = CircleCiReq.get_recent_build_nums(**self.args1)
self.assertEqual(30, len(build_nums))
self.assertTrue(all(isinstance(i, numbers.Number) for i in build_nums), 'Not all build_num are arabic numbers')
def test_recent_build_json(self):
builds = CircleCiReq.get_recent_builds(**self.args1)
self.assertEqual(30, len(builds))
print(json.dumps(builds[0], indent=2))
for bld in builds:
print("build_num:{}, branch:{}, outcome:{}, commit:{}".format(bld['build_num'],
bld['branch'],
bld['outcome'],
bld['all_commit_details'][0]['commit']))
pass
def test_2builds(self):
builds = CircleCiReq.get_recent_build_nums(token=config.get_circleci_token(),
vcs='github',
username='maxwu',
project='cucumber-java-toy',
limit=2)
self.assertEqual(2, len(builds))
# @unittest.skip("temporarily disabled, test one single artifact list instead")
def test_artifacts(self):
artifacts = CircleCiReq.get_recent_artifacts(token=config.get_circleci_token(),
vcs='github', username='maxwu',
project='cucumber-java-toy',
)
for artifact in artifacts:
self.assertTrue(artifact.startswith('http'), 'artifact url does not start with http')
def test_artifacts80(self):
artifacts = CircleCiReq.get_build_artifacts(token=config.get_circleci_token(),
vcs='github', username='maxwu',
project='cucumber-java-toy', build_num=80)
for artifact in artifacts:
print 'XML artifact: {}'.format(artifact)
self.assertTrue(artifact.endswith('.xml'), 'all artifacts of build 80 are XML files')
self.assertEqual(4, len(artifacts), 'build 80 shall have 4 artifacts')
def test_get_artifact_report_from_build_num(self):
artifacts = CircleCiReq.get_build_artifacts(token=config.get_circleci_token(),
vcs='github', username='maxwu',
project='cucumber-java-toy', build_num=80)
for artifact in artifacts:
report = CircleCiReq.get_artifact_report(url=artifact)
print 'XUnit artifact: {}'.format(artifact)
self.assertTrue(Xunitrpt.is_xunit_report(report))
def test_get_artifact_report(self):
url = 'https://80-77958022-gh.circle-artifacts.com/0/tmp/circle-junit.BxjS188/junit/TEST-org.maxwu.jrefresh.HttpApi.SourceIpApiTest.xml'
str_xunit = CircleCiReq.get_artifact_report(url=url)
print("----Artifact:----\n{}".format(str_xunit))
report = Xunitrpt(xunit=str_xunit)
print("----XUnitObj:----\n{}".format(report))
self.assertDictEqual(report.get_case('org.maxwu.jrefresh.HttpApi.SourceIpApiTest.httpFreegeoipJsonTest'),
{
"fail": 0,
"sum": 1,
"skip": 0,
"rate": 1.0,
"pass": 1,
'time': 0.365,
}
)
def test_get_artifact_report_none_url(self):
self.assertIsNone(CircleCiReq.get_artifact_report(timeout=5))
def test_get_artifact_report_empty_url(self):
self.assertIsNone(CircleCiReq.get_artifact_report(url=''))
if __name__ == '__main__':
unittest.main()
|
mit
| 644,672,477,948,567,000 | 42.803738 | 144 | 0.510988 | false |
robcarver17/pysystemtrade
|
systems/accounts/pandl_calculators/pandl_generic_costs.py
|
1
|
3494
|
import pandas as pd
from systems.accounts.pandl_calculators.pandl_calculation import pandlCalculation, apply_weighting
curve_types = ['gross', 'net', 'costs']
GROSS_CURVE = 'gross'
NET_CURVE = 'net'
COSTS_CURVE = 'costs'
class pandlCalculationWithGenericCosts(pandlCalculation):
def weight(self, weight: pd.Series):
weighted_capital = apply_weighting(weight, self.capital)
weighted_positions = apply_weighting(weight, self.positions)
return pandlCalculationWithGenericCosts(self.price,
positions = weighted_positions,
fx = self.fx,
capital = weighted_capital,
value_per_point = self.value_per_point,
roundpositions = self.roundpositions,
delayfill = self.delayfill)
def as_pd_series(self, percent = False, curve_type=NET_CURVE):
if curve_type==NET_CURVE:
if percent:
return self.net_percentage_pandl()
else:
return self.net_pandl_in_base_currency()
elif curve_type==GROSS_CURVE:
if percent:
return self.percentage_pandl()
else:
return self.pandl_in_base_currency()
elif curve_type==COSTS_CURVE:
if percent:
return self.costs_percentage_pandl()
else:
return self.costs_pandl_in_base_currency()
else:
raise Exception("Curve type %s not recognised! Must be one of %s" % (curve_type, curve_types))
def net_percentage_pandl(self) -> pd.Series:
gross = self.percentage_pandl()
costs = self.costs_percentage_pandl()
net = _add_gross_and_costs(gross, costs)
return net
def net_pandl_in_base_currency(self) -> pd.Series:
gross = self.pandl_in_base_currency()
costs = self.costs_pandl_in_base_currency()
net = _add_gross_and_costs(gross, costs)
return net
def net_pandl_in_instrument_currency(self) -> pd.Series:
gross = self.pandl_in_instrument_currency()
costs = self.costs_pandl_in_instrument_currency()
net = _add_gross_and_costs(gross, costs)
return net
def net_pandl_in_points(self) -> pd.Series:
gross = self.pandl_in_points()
costs = self.costs_pandl_in_points()
net = _add_gross_and_costs(gross, costs)
return net
def costs_percentage_pandl(self) -> pd.Series:
costs_in_base = self.costs_pandl_in_base_currency()
costs = self._percentage_pandl_given_pandl(costs_in_base)
return costs
def costs_pandl_in_base_currency(self) -> pd.Series:
costs_in_instr_ccy = self.costs_pandl_in_instrument_currency()
costs_in_base = self._base_pandl_given_currency_pandl(costs_in_instr_ccy)
return costs_in_base
def costs_pandl_in_instrument_currency(self) -> pd.Series:
costs_in_points = self.costs_pandl_in_points()
costs_in_instr_ccy = self._pandl_in_instrument_ccy_given_points_pandl(costs_in_points)
return costs_in_instr_ccy
def costs_pandl_in_points(self) -> pd.Series:
raise NotImplementedError
def _add_gross_and_costs(gross: pd.Series,
costs: pd.Series):
cumsum_costs = costs.cumsum()
cumsum_costs_aligned = cumsum_costs.reindex(gross.index, method="ffill")
costs_aligned = cumsum_costs_aligned.diff()
net = gross + costs_aligned
return net
|
gpl-3.0
| 7,537,832,286,019,710,000 | 31.351852 | 106 | 0.620206 | false |
denisenkom/django-sqlserver
|
broken-tests/schema/tests.py
|
1
|
101650
|
import datetime
import itertools
import unittest
from copy import copy
import django
from django.db import (
DatabaseError, IntegrityError, OperationalError, connection,
)
from django.db.models import Model
from django.db.models.deletion import CASCADE, PROTECT
from django.db.models.fields import (
AutoField, BigIntegerField, BinaryField, BooleanField,
CharField, DateField, DateTimeField, IntegerField, PositiveIntegerField,
SlugField, TextField, TimeField,
)
if django.VERSION >= (1, 10, 0):
from django.db.models.fields import BigAutoField
from django.db.models.fields.related import (
ForeignKey, ForeignObject, ManyToManyField, OneToOneField,
)
if django.VERSION >= (1, 11, 0):
from django.db.models.indexes import Index
from django.db.transaction import TransactionManagementError, atomic
from django.test import (
TransactionTestCase, mock, skipIfDBFeature, skipUnlessDBFeature,
)
from django.test.utils import CaptureQueriesContext
if django.VERSION >= (1, 10, 0):
from django.test.utils import isolate_apps
from django.utils import timezone
from .fields import (
CustomManyToManyField, InheritedManyToManyField, MediumBlobField,
)
from .models import (
Author, AuthorWithDefaultHeight, AuthorWithEvenLongerName,
AuthorWithIndexedName, Book, BookForeignObj, BookWeak, BookWithLongName,
BookWithO2O, BookWithoutAuthor, BookWithSlug, IntegerPK, Node, Note,
NoteRename, Tag, TagIndexed, TagM2MTest, TagUniqueRename, Thing,
UniqueTest, new_apps,
)
class SchemaTests(TransactionTestCase):
"""
Tests for the schema-alteration code.
Be aware that these tests are more liable than most to false results,
as sometimes the code to check if a test has worked is almost as complex
as the code it is testing.
"""
available_apps = []
models = [
Author, AuthorWithDefaultHeight, AuthorWithEvenLongerName, Book,
BookWeak, BookWithLongName, BookWithO2O, BookWithSlug, IntegerPK, Node,
Note, Tag, TagIndexed, TagM2MTest, TagUniqueRename, Thing, UniqueTest,
]
# Utility functions
def setUp(self):
# local_models should contain test dependent model classes that will be
# automatically removed from the app cache on test tear down.
self.local_models = []
def tearDown(self):
# Delete any tables made for our models
self.delete_tables()
new_apps.clear_cache()
for model in new_apps.get_models():
model._meta._expire_cache()
if 'schema' in new_apps.all_models:
for model in self.local_models:
for many_to_many in model._meta.many_to_many:
through = many_to_many.remote_field.through
if through and through._meta.auto_created:
del new_apps.all_models['schema'][through._meta.model_name]
del new_apps.all_models['schema'][model._meta.model_name]
def delete_tables(self):
"Deletes all model tables for our models for a clean test environment"
converter = connection.introspection.table_name_converter
with connection.schema_editor() as editor:
connection.disable_constraint_checking()
table_names = connection.introspection.table_names()
for model in itertools.chain(SchemaTests.models, self.local_models):
tbl = converter(model._meta.db_table)
if tbl in table_names:
editor.delete_model(model)
table_names.remove(tbl)
connection.enable_constraint_checking()
def column_classes(self, model):
with connection.cursor() as cursor:
columns = {
d[0]: (connection.introspection.get_field_type(d[1], d), d)
for d in connection.introspection.get_table_description(
cursor,
model._meta.db_table,
)
}
# SQLite has a different format for field_type
for name, (type, desc) in columns.items():
if isinstance(type, tuple):
columns[name] = (type[0], desc)
# SQLite also doesn't error properly
if not columns:
raise DatabaseError("Table does not exist (empty pragma)")
return columns
def get_primary_key(self, table):
with connection.cursor() as cursor:
return connection.introspection.get_primary_key_column(cursor, table)
def get_indexes(self, table):
"""
Get the indexes on the table using a new cursor.
"""
with connection.cursor() as cursor:
return [
c['columns'][0]
for c in connection.introspection.get_constraints(cursor, table).values()
if c['index'] and len(c['columns']) == 1
]
def get_constraints(self, table):
"""
Get the constraints on a table using a new cursor.
"""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def get_constraints_for_column(self, model, column_name):
constraints = self.get_constraints(model._meta.db_table)
constraints_for_column = []
for name, details in constraints.items():
if details['columns'] == [column_name]:
constraints_for_column.append(name)
return sorted(constraints_for_column)
def check_added_field_default(self, schema_editor, model, field, field_name, expected_default,
cast_function=None):
with connection.cursor() as cursor:
schema_editor.add_field(model, field)
cursor.execute("SELECT {} FROM {};".format(field_name, model._meta.db_table))
database_default = cursor.fetchall()[0][0]
if cast_function and not type(database_default) == type(expected_default):
database_default = cast_function(database_default)
self.assertEqual(database_default, expected_default)
def get_constraints_count(self, table, column, fk_to):
"""
Return a dict with keys 'fks', 'uniques, and 'indexes' indicating the
number of foreign keys, unique constraints, and indexes on
`table`.`column`. The `fk_to` argument is a 2-tuple specifying the
expected foreign key relationship's (table, column).
"""
with connection.cursor() as cursor:
constraints = connection.introspection.get_constraints(cursor, table)
counts = {'fks': 0, 'uniques': 0, 'indexes': 0}
for c in constraints.values():
if c['columns'] == [column]:
if c['foreign_key'] == fk_to:
counts['fks'] += 1
if c['unique']:
counts['uniques'] += 1
elif c['index']:
counts['indexes'] += 1
return counts
def assertIndexOrder(self, table, index, order):
constraints = self.get_constraints(table)
self.assertIn(index, constraints)
index_orders = constraints[index]['orders']
self.assertTrue(all([(val == expected) for val, expected in zip(index_orders, order)]))
def assertForeignKeyExists(self, model, column, expected_fk_table):
"""
Fail if the FK constraint on `model.Meta.db_table`.`column` to
`expected_fk_table`.id doesn't exist.
"""
constraints = self.get_constraints(model._meta.db_table)
constraint_fk = None
for name, details in constraints.items():
if details['columns'] == [column] and details['foreign_key']:
constraint_fk = details['foreign_key']
break
self.assertEqual(constraint_fk, (expected_fk_table, 'id'))
def assertForeignKeyNotExists(self, model, column, expected_fk_table):
with self.assertRaises(AssertionError):
self.assertForeignKeyExists(model, column, expected_fk_table)
# Tests
def test_creation_deletion(self):
"""
Tries creating a model's table, and then deleting it.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# The table is there
list(Author.objects.all())
# Clean up that table
with connection.schema_editor() as editor:
editor.delete_model(Author)
# The table is gone
with self.assertRaises(DatabaseError):
list(Author.objects.all())
@skipUnlessDBFeature('supports_foreign_keys')
def test_fk(self):
"Creating tables out of FK order, then repointing, works"
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Book)
editor.create_model(Author)
editor.create_model(Tag)
# Initial tables are there
list(Author.objects.all())
list(Book.objects.all())
# Make sure the FK constraint is present
with self.assertRaises(IntegrityError):
Book.objects.create(
author_id=1,
title="Much Ado About Foreign Keys",
pub_date=datetime.datetime.now(),
)
# Repoint the FK constraint
old_field = Book._meta.get_field("author")
new_field = ForeignKey(Tag, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
self.assertForeignKeyExists(Book, 'author_id', 'schema_tag')
@skipUnlessDBFeature('supports_foreign_keys')
def test_fk_to_proxy(self):
"Creating a FK to a proxy model creates database constraints."
class AuthorProxy(Author):
class Meta:
app_label = 'schema'
apps = new_apps
proxy = True
class AuthorRef(Model):
author = ForeignKey(AuthorProxy, on_delete=CASCADE)
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [AuthorProxy, AuthorRef]
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(AuthorRef)
self.assertForeignKeyExists(AuthorRef, 'author_id', 'schema_author')
@skipUnlessDBFeature('supports_foreign_keys')
def test_fk_db_constraint(self):
"The db_constraint parameter is respected"
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
editor.create_model(Author)
editor.create_model(BookWeak)
# Initial tables are there
list(Author.objects.all())
list(Tag.objects.all())
list(BookWeak.objects.all())
self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author')
# Make a db_constraint=False FK
new_field = ForeignKey(Tag, CASCADE, db_constraint=False)
new_field.set_attributes_from_name("tag")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
self.assertForeignKeyNotExists(Author, 'tag_id', 'schema_tag')
# Alter to one with a constraint
new_field2 = ForeignKey(Tag, CASCADE)
new_field2.set_attributes_from_name("tag")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
self.assertForeignKeyExists(Author, 'tag_id', 'schema_tag')
# Alter to one without a constraint again
new_field2 = ForeignKey(Tag, CASCADE)
new_field2.set_attributes_from_name("tag")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field2, new_field, strict=True)
self.assertForeignKeyNotExists(Author, 'tag_id', 'schema_tag')
if django.VERSION >= (1, 10, 0):
@isolate_apps('schema')
def test_no_db_constraint_added_during_primary_key_change(self):
"""
When a primary key that's pointed to by a ForeignKey with
db_constraint=False is altered, a foreign key constraint isn't added.
"""
class Author(Model):
class Meta:
app_label = 'schema'
class BookWeak(Model):
author = ForeignKey(Author, CASCADE, db_constraint=False)
class Meta:
app_label = 'schema'
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWeak)
self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author')
old_field = Author._meta.get_field('id')
new_field = BigAutoField(primary_key=True)
new_field.model = Author
new_field.set_attributes_from_name('id')
# @isolate_apps() and inner models are needed to have the model
# relations populated, otherwise this doesn't act as a regression test.
self.assertEqual(len(new_field.model._meta.related_objects), 1)
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author')
def _test_m2m_db_constraint(self, M2MFieldClass):
class LocalAuthorWithM2M(Model):
name = CharField(max_length=255)
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalAuthorWithM2M]
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
editor.create_model(LocalAuthorWithM2M)
# Initial tables are there
list(LocalAuthorWithM2M.objects.all())
list(Tag.objects.all())
# Make a db_constraint=False FK
new_field = M2MFieldClass(Tag, related_name="authors", db_constraint=False)
new_field.contribute_to_class(LocalAuthorWithM2M, "tags")
# Add the field
with connection.schema_editor() as editor:
editor.add_field(LocalAuthorWithM2M, new_field)
self.assertForeignKeyNotExists(new_field.remote_field.through, 'tag_id', 'schema_tag')
@skipUnlessDBFeature('supports_foreign_keys')
def test_m2m_db_constraint(self):
self._test_m2m_db_constraint(ManyToManyField)
@skipUnlessDBFeature('supports_foreign_keys')
def test_m2m_db_constraint_custom(self):
self._test_m2m_db_constraint(CustomManyToManyField)
@skipUnlessDBFeature('supports_foreign_keys')
def test_m2m_db_constraint_inherited(self):
self._test_m2m_db_constraint(InheritedManyToManyField)
def test_add_field(self):
"""
Tests adding fields to models
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no age field
columns = self.column_classes(Author)
self.assertNotIn("age", columns)
# Add the new field
new_field = IntegerField(null=True)
new_field.set_attributes_from_name("age")
with CaptureQueriesContext(connection) as ctx, connection.schema_editor() as editor:
editor.add_field(Author, new_field)
drop_default_sql = editor.sql_alter_column_no_default % {
'column': editor.quote_name(new_field.name),
}
self.assertFalse(any(drop_default_sql in query['sql'] for query in ctx.captured_queries))
# Ensure the field is right afterwards
columns = self.column_classes(Author)
self.assertEqual(columns['age'][0], "IntegerField")
self.assertEqual(columns['age'][1][6], True)
def test_add_field_temp_default(self):
"""
Tests adding fields to models with a temporary default
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no age field
columns = self.column_classes(Author)
self.assertNotIn("age", columns)
# Add some rows of data
Author.objects.create(name="Andrew", height=30)
Author.objects.create(name="Andrea")
# Add a not-null field
new_field = CharField(max_length=30, default="Godwin")
new_field.set_attributes_from_name("surname")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
self.assertEqual(columns['surname'][0], "CharField")
self.assertEqual(columns['surname'][1][6],
connection.features.interprets_empty_strings_as_nulls)
def test_add_field_temp_default_boolean(self):
"""
Tests adding fields to models with a temporary default where
the default is False. (#21783)
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no age field
columns = self.column_classes(Author)
self.assertNotIn("age", columns)
# Add some rows of data
Author.objects.create(name="Andrew", height=30)
Author.objects.create(name="Andrea")
# Add a not-null field
new_field = BooleanField(default=False)
new_field.set_attributes_from_name("awesome")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
# BooleanField are stored as TINYINT(1) on MySQL.
field_type = columns['awesome'][0]
self.assertEqual(
field_type,
connection.features.introspected_boolean_field_type(new_field, created_separately=True)
)
def test_add_field_default_transform(self):
"""
Tests adding fields to models with a default that is not directly
valid in the database (#22581)
"""
class TestTransformField(IntegerField):
# Weird field that saves the count of items in its value
def get_default(self):
return self.default
def get_prep_value(self, value):
if value is None:
return 0
return len(value)
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add some rows of data
Author.objects.create(name="Andrew", height=30)
Author.objects.create(name="Andrea")
# Add the field with a default it needs to cast (to string in this case)
new_field = TestTransformField(default={1: 2})
new_field.set_attributes_from_name("thing")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure the field is there
columns = self.column_classes(Author)
field_type, field_info = columns['thing']
self.assertEqual(field_type, 'IntegerField')
# Make sure the values were transformed correctly
self.assertEqual(Author.objects.extra(where=["thing = 1"]).count(), 2)
def test_add_field_binary(self):
"""
Tests binary fields get a sane default (#22851)
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add the new field
new_field = BinaryField(blank=True)
new_field.set_attributes_from_name("bits")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
# MySQL annoyingly uses the same backend, so it'll come back as one of
# these two types.
self.assertIn(columns['bits'][0], ("BinaryField", "TextField"))
@unittest.skipUnless(connection.vendor == 'mysql', "MySQL specific")
def test_add_binaryfield_mediumblob(self):
"""
Test adding a custom-sized binary field on MySQL (#24846).
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add the new field with default
new_field = MediumBlobField(blank=True, default=b'123')
new_field.set_attributes_from_name('bits')
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
# Introspection treats BLOBs as TextFields
self.assertEqual(columns['bits'][0], "TextField")
def test_alter(self):
"""
Tests simple altering of fields
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the field is right to begin with
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "CharField")
self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls))
# Alter the name field to a TextField
old_field = Author._meta.get_field("name")
new_field = TextField(null=True)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "TextField")
self.assertEqual(columns['name'][1][6], True)
# Change nullability again
new_field2 = TextField(null=False)
new_field2.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "TextField")
self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls))
def test_alter_text_field(self):
# Regression for "BLOB/TEXT column 'info' can't have a default value")
# on MySQL.
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Note)
old_field = Note._meta.get_field("info")
new_field = TextField(blank=True)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
@skipUnlessDBFeature('can_defer_constraint_checks', 'can_rollback_ddl')
def test_alter_fk_checks_deferred_constraints(self):
"""
#25492 - Altering a foreign key's structure and data in the same
transaction.
"""
with connection.schema_editor() as editor:
editor.create_model(Node)
old_field = Node._meta.get_field('parent')
new_field = ForeignKey(Node, CASCADE)
new_field.set_attributes_from_name('parent')
parent = Node.objects.create()
with connection.schema_editor() as editor:
# Update the parent FK to create a deferred constraint check.
Node.objects.update(parent=parent)
editor.alter_field(Node, old_field, new_field, strict=True)
def test_alter_text_field_to_date_field(self):
"""
#25002 - Test conversion of text field to date field.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
Note.objects.create(info='1988-05-05')
old_field = Note._meta.get_field('info')
new_field = DateField(blank=True)
new_field.set_attributes_from_name('info')
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
# Make sure the field isn't nullable
columns = self.column_classes(Note)
self.assertFalse(columns['info'][1][6])
def test_alter_text_field_to_datetime_field(self):
"""
#25002 - Test conversion of text field to datetime field.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
Note.objects.create(info='1988-05-05 3:16:17.4567')
old_field = Note._meta.get_field('info')
new_field = DateTimeField(blank=True)
new_field.set_attributes_from_name('info')
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
# Make sure the field isn't nullable
columns = self.column_classes(Note)
self.assertFalse(columns['info'][1][6])
def test_alter_text_field_to_time_field(self):
"""
#25002 - Test conversion of text field to time field.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
Note.objects.create(info='3:16:17.4567')
old_field = Note._meta.get_field('info')
new_field = TimeField(blank=True)
new_field.set_attributes_from_name('info')
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
# Make sure the field isn't nullable
columns = self.column_classes(Note)
self.assertFalse(columns['info'][1][6])
@skipIfDBFeature('interprets_empty_strings_as_nulls')
def test_alter_textual_field_keep_null_status(self):
"""
Changing a field type shouldn't affect the not null status.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
with self.assertRaises(IntegrityError):
Note.objects.create(info=None)
old_field = Note._meta.get_field("info")
new_field = CharField(max_length=50)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
with self.assertRaises(IntegrityError):
Note.objects.create(info=None)
def test_alter_numeric_field_keep_null_status(self):
"""
Changing a field type shouldn't affect the not null status.
"""
with connection.schema_editor() as editor:
editor.create_model(UniqueTest)
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=None, slug='aaa')
old_field = UniqueTest._meta.get_field("year")
new_field = BigIntegerField()
new_field.set_attributes_from_name("year")
with connection.schema_editor() as editor:
editor.alter_field(UniqueTest, old_field, new_field, strict=True)
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=None, slug='bbb')
def test_alter_null_to_not_null(self):
"""
#23609 - Tests handling of default values when altering from NULL to NOT NULL.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the field is right to begin with
columns = self.column_classes(Author)
self.assertTrue(columns['height'][1][6])
# Create some test data
Author.objects.create(name='Not null author', height=12)
Author.objects.create(name='Null author')
# Verify null value
self.assertEqual(Author.objects.get(name='Not null author').height, 12)
self.assertIsNone(Author.objects.get(name='Null author').height)
# Alter the height field to NOT NULL with default
old_field = Author._meta.get_field("height")
new_field = PositiveIntegerField(default=42)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
self.assertFalse(columns['height'][1][6])
# Verify default value
self.assertEqual(Author.objects.get(name='Not null author').height, 12)
self.assertEqual(Author.objects.get(name='Null author').height, 42)
def test_alter_charfield_to_null(self):
"""
#24307 - Should skip an alter statement on databases with
interprets_empty_strings_as_null when changing a CharField to null.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Change the CharField to null
old_field = Author._meta.get_field('name')
new_field = copy(old_field)
new_field.null = True
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
def test_alter_textfield_to_null(self):
"""
#24307 - Should skip an alter statement on databases with
interprets_empty_strings_as_null when changing a TextField to null.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Note)
# Change the TextField to null
old_field = Note._meta.get_field('info')
new_field = copy(old_field)
new_field.null = True
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
@skipUnlessDBFeature('supports_combined_alters')
def test_alter_null_to_not_null_keeping_default(self):
"""
#23738 - Can change a nullable field with default to non-nullable
with the same default.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(AuthorWithDefaultHeight)
# Ensure the field is right to begin with
columns = self.column_classes(AuthorWithDefaultHeight)
self.assertTrue(columns['height'][1][6])
# Alter the height field to NOT NULL keeping the previous default
old_field = AuthorWithDefaultHeight._meta.get_field("height")
new_field = PositiveIntegerField(default=42)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(AuthorWithDefaultHeight, old_field, new_field, strict=True)
# Ensure the field is right afterwards
columns = self.column_classes(AuthorWithDefaultHeight)
self.assertFalse(columns['height'][1][6])
@skipUnlessDBFeature('supports_foreign_keys')
def test_alter_fk(self):
"""
Tests altering of FKs
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the field is right to begin with
columns = self.column_classes(Book)
self.assertEqual(columns['author_id'][0], "IntegerField")
self.assertForeignKeyExists(Book, 'author_id', 'schema_author')
# Alter the FK
old_field = Book._meta.get_field("author")
new_field = ForeignKey(Author, CASCADE, editable=False)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
# Ensure the field is right afterwards
columns = self.column_classes(Book)
self.assertEqual(columns['author_id'][0], "IntegerField")
self.assertForeignKeyExists(Book, 'author_id', 'schema_author')
@skipUnlessDBFeature('supports_foreign_keys')
def test_alter_to_fk(self):
"""
#24447 - Tests adding a FK constraint for an existing column
"""
class LocalBook(Model):
author = IntegerField()
title = CharField(max_length=100, db_index=True)
pub_date = DateTimeField()
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalBook]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(LocalBook)
# Ensure no FK constraint exists
constraints = self.get_constraints(LocalBook._meta.db_table)
for name, details in constraints.items():
if details['foreign_key']:
self.fail('Found an unexpected FK constraint to %s' % details['columns'])
old_field = LocalBook._meta.get_field("author")
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(LocalBook, old_field, new_field, strict=True)
self.assertForeignKeyExists(LocalBook, 'author_id', 'schema_author')
@skipUnlessDBFeature('supports_foreign_keys')
def test_alter_o2o_to_fk(self):
"""
#24163 - Tests altering of OneToOneField to ForeignKey
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithO2O)
# Ensure the field is right to begin with
columns = self.column_classes(BookWithO2O)
self.assertEqual(columns['author_id'][0], "IntegerField")
# Ensure the field is unique
author = Author.objects.create(name="Joe")
BookWithO2O.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now())
with self.assertRaises(IntegrityError):
BookWithO2O.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now())
BookWithO2O.objects.all().delete()
self.assertForeignKeyExists(BookWithO2O, 'author_id', 'schema_author')
# Alter the OneToOneField to ForeignKey
old_field = BookWithO2O._meta.get_field("author")
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(BookWithO2O, old_field, new_field, strict=True)
# Ensure the field is right afterwards
columns = self.column_classes(Book)
self.assertEqual(columns['author_id'][0], "IntegerField")
# Ensure the field is not unique anymore
Book.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now())
Book.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now())
self.assertForeignKeyExists(Book, 'author_id', 'schema_author')
@skipUnlessDBFeature('supports_foreign_keys')
def test_alter_fk_to_o2o(self):
"""
#24163 - Tests altering of ForeignKey to OneToOneField
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the field is right to begin with
columns = self.column_classes(Book)
self.assertEqual(columns['author_id'][0], "IntegerField")
# Ensure the field is not unique
author = Author.objects.create(name="Joe")
Book.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now())
Book.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now())
Book.objects.all().delete()
self.assertForeignKeyExists(Book, 'author_id', 'schema_author')
# Alter the ForeignKey to OneToOneField
old_field = Book._meta.get_field("author")
new_field = OneToOneField(Author, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
# Ensure the field is right afterwards
columns = self.column_classes(BookWithO2O)
self.assertEqual(columns['author_id'][0], "IntegerField")
# Ensure the field is unique now
BookWithO2O.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now())
with self.assertRaises(IntegrityError):
BookWithO2O.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now())
self.assertForeignKeyExists(BookWithO2O, 'author_id', 'schema_author')
def test_alter_field_fk_to_o2o(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
expected_fks = 1 if connection.features.supports_foreign_keys else 0
# Check the index is right to begin with.
counts = self.get_constraints_count(
Book._meta.db_table,
Book._meta.get_field('author').column,
(Author._meta.db_table, Author._meta.pk.column),
)
self.assertEqual(counts, {'fks': expected_fks, 'uniques': 0, 'indexes': 1})
old_field = Book._meta.get_field('author')
new_field = OneToOneField(Author, CASCADE)
new_field.set_attributes_from_name('author')
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
counts = self.get_constraints_count(
Book._meta.db_table,
Book._meta.get_field('author').column,
(Author._meta.db_table, Author._meta.pk.column),
)
# The index on ForeignKey is replaced with a unique constraint for OneToOneField.
self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0})
def test_alter_field_fk_keeps_index(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
expected_fks = 1 if connection.features.supports_foreign_keys else 0
# Check the index is right to begin with.
counts = self.get_constraints_count(
Book._meta.db_table,
Book._meta.get_field('author').column,
(Author._meta.db_table, Author._meta.pk.column),
)
self.assertEqual(counts, {'fks': expected_fks, 'uniques': 0, 'indexes': 1})
old_field = Book._meta.get_field('author')
# on_delete changed from CASCADE.
new_field = ForeignKey(Author, PROTECT)
new_field.set_attributes_from_name('author')
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
counts = self.get_constraints_count(
Book._meta.db_table,
Book._meta.get_field('author').column,
(Author._meta.db_table, Author._meta.pk.column),
)
# The index remains.
self.assertEqual(counts, {'fks': expected_fks, 'uniques': 0, 'indexes': 1})
def test_alter_field_o2o_to_fk(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithO2O)
expected_fks = 1 if connection.features.supports_foreign_keys else 0
# Check the unique constraint is right to begin with.
counts = self.get_constraints_count(
BookWithO2O._meta.db_table,
BookWithO2O._meta.get_field('author').column,
(Author._meta.db_table, Author._meta.pk.column),
)
self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0})
old_field = BookWithO2O._meta.get_field('author')
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name('author')
with connection.schema_editor() as editor:
editor.alter_field(BookWithO2O, old_field, new_field, strict=True)
counts = self.get_constraints_count(
BookWithO2O._meta.db_table,
BookWithO2O._meta.get_field('author').column,
(Author._meta.db_table, Author._meta.pk.column),
)
# The unique constraint on OneToOneField is replaced with an index for ForeignKey.
self.assertEqual(counts, {'fks': expected_fks, 'uniques': 0, 'indexes': 1})
def test_alter_field_o2o_keeps_unique(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithO2O)
expected_fks = 1 if connection.features.supports_foreign_keys else 0
# Check the unique constraint is right to begin with.
counts = self.get_constraints_count(
BookWithO2O._meta.db_table,
BookWithO2O._meta.get_field('author').column,
(Author._meta.db_table, Author._meta.pk.column),
)
self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0})
old_field = BookWithO2O._meta.get_field('author')
# on_delete changed from CASCADE.
new_field = OneToOneField(Author, PROTECT)
new_field.set_attributes_from_name('author')
with connection.schema_editor() as editor:
editor.alter_field(BookWithO2O, old_field, new_field, strict=True)
counts = self.get_constraints_count(
BookWithO2O._meta.db_table,
BookWithO2O._meta.get_field('author').column,
(Author._meta.db_table, Author._meta.pk.column),
)
# The unique constraint remains.
self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0})
def test_alter_db_table_case(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Alter the case of the table
old_table_name = Author._meta.db_table
with connection.schema_editor() as editor:
editor.alter_db_table(Author, old_table_name, old_table_name.upper())
def test_alter_implicit_id_to_explicit(self):
"""
Should be able to convert an implicit "id" field to an explicit "id"
primary key field.
"""
with connection.schema_editor() as editor:
editor.create_model(Author)
old_field = Author._meta.get_field("id")
new_field = AutoField(primary_key=True)
new_field.set_attributes_from_name("id")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
# This will fail if DROP DEFAULT is inadvertently executed on this
# field which drops the id sequence, at least on PostgreSQL.
Author.objects.create(name='Foo')
Author.objects.create(name='Bar')
def test_alter_int_pk_to_autofield_pk(self):
"""
Should be able to rename an IntegerField(primary_key=True) to
AutoField(primary_key=True).
"""
with connection.schema_editor() as editor:
editor.create_model(IntegerPK)
old_field = IntegerPK._meta.get_field('i')
new_field = AutoField(primary_key=True)
new_field.model = IntegerPK
new_field.set_attributes_from_name('i')
with connection.schema_editor() as editor:
editor.alter_field(IntegerPK, old_field, new_field, strict=True)
def test_alter_int_pk_to_int_unique(self):
"""
Should be able to rename an IntegerField(primary_key=True) to
IntegerField(unique=True).
"""
class IntegerUnique(Model):
i = IntegerField(unique=True)
j = IntegerField(primary_key=True)
class Meta:
app_label = 'schema'
apps = new_apps
db_table = 'INTEGERPK'
with connection.schema_editor() as editor:
editor.create_model(IntegerPK)
# model requires a new PK
old_field = IntegerPK._meta.get_field('j')
new_field = IntegerField(primary_key=True)
new_field.model = IntegerPK
new_field.set_attributes_from_name('j')
with connection.schema_editor() as editor:
editor.alter_field(IntegerPK, old_field, new_field, strict=True)
old_field = IntegerPK._meta.get_field('i')
new_field = IntegerField(unique=True)
new_field.model = IntegerPK
new_field.set_attributes_from_name('i')
with connection.schema_editor() as editor:
editor.alter_field(IntegerPK, old_field, new_field, strict=True)
# Ensure unique constraint works.
IntegerUnique.objects.create(i=1, j=1)
with self.assertRaises(IntegrityError):
IntegerUnique.objects.create(i=1, j=2)
def test_rename(self):
"""
Tests simple altering of fields
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the field is right to begin with
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "CharField")
self.assertNotIn("display_name", columns)
# Alter the name field's name
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=254)
new_field.set_attributes_from_name("display_name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
self.assertEqual(columns['display_name'][0], "CharField")
self.assertNotIn("name", columns)
@skipIfDBFeature('interprets_empty_strings_as_nulls')
def test_rename_keep_null_status(self):
"""
Renaming a field shouldn't affect the not null status.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
with self.assertRaises(IntegrityError):
Note.objects.create(info=None)
old_field = Note._meta.get_field("info")
new_field = TextField()
new_field.set_attributes_from_name("detail_info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
columns = self.column_classes(Note)
self.assertEqual(columns['detail_info'][0], "TextField")
self.assertNotIn("info", columns)
with self.assertRaises(IntegrityError):
NoteRename.objects.create(detail_info=None)
def _test_m2m_create(self, M2MFieldClass):
"""
Tests M2M fields on models during creation
"""
class LocalBookWithM2M(Model):
author = ForeignKey(Author, CASCADE)
title = CharField(max_length=100, db_index=True)
pub_date = DateTimeField()
tags = M2MFieldClass("TagM2MTest", related_name="books")
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalBookWithM2M]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(TagM2MTest)
editor.create_model(LocalBookWithM2M)
# Ensure there is now an m2m table there
columns = self.column_classes(LocalBookWithM2M._meta.get_field("tags").remote_field.through)
self.assertEqual(columns['tagm2mtest_id'][0], "IntegerField")
def test_m2m_create(self):
self._test_m2m_create(ManyToManyField)
def test_m2m_create_custom(self):
self._test_m2m_create(CustomManyToManyField)
def test_m2m_create_inherited(self):
self._test_m2m_create(InheritedManyToManyField)
def _test_m2m_create_through(self, M2MFieldClass):
"""
Tests M2M fields on models during creation with through models
"""
class LocalTagThrough(Model):
book = ForeignKey("schema.LocalBookWithM2MThrough", CASCADE)
tag = ForeignKey("schema.TagM2MTest", CASCADE)
class Meta:
app_label = 'schema'
apps = new_apps
class LocalBookWithM2MThrough(Model):
tags = M2MFieldClass("TagM2MTest", related_name="books", through=LocalTagThrough)
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalTagThrough, LocalBookWithM2MThrough]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(LocalTagThrough)
editor.create_model(TagM2MTest)
editor.create_model(LocalBookWithM2MThrough)
# Ensure there is now an m2m table there
columns = self.column_classes(LocalTagThrough)
self.assertEqual(columns['book_id'][0], "IntegerField")
self.assertEqual(columns['tag_id'][0], "IntegerField")
def test_m2m_create_through(self):
self._test_m2m_create_through(ManyToManyField)
def test_m2m_create_through_custom(self):
self._test_m2m_create_through(CustomManyToManyField)
def test_m2m_create_through_inherited(self):
self._test_m2m_create_through(InheritedManyToManyField)
def _test_m2m(self, M2MFieldClass):
"""
Tests adding/removing M2M fields on models
"""
class LocalAuthorWithM2M(Model):
name = CharField(max_length=255)
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalAuthorWithM2M]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(LocalAuthorWithM2M)
editor.create_model(TagM2MTest)
# Create an M2M field
new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors")
new_field.contribute_to_class(LocalAuthorWithM2M, "tags")
# Ensure there's no m2m table there
with self.assertRaises(DatabaseError):
self.column_classes(new_field.remote_field.through)
# Add the field
with connection.schema_editor() as editor:
editor.add_field(LocalAuthorWithM2M, new_field)
# Ensure there is now an m2m table there
columns = self.column_classes(new_field.remote_field.through)
self.assertEqual(columns['tagm2mtest_id'][0], "IntegerField")
# "Alter" the field. This should not rename the DB table to itself.
with connection.schema_editor() as editor:
editor.alter_field(LocalAuthorWithM2M, new_field, new_field, strict=True)
# Remove the M2M table again
with connection.schema_editor() as editor:
editor.remove_field(LocalAuthorWithM2M, new_field)
# Ensure there's no m2m table there
with self.assertRaises(DatabaseError):
self.column_classes(new_field.remote_field.through)
# Make sure the model state is coherent with the table one now that
# we've removed the tags field.
opts = LocalAuthorWithM2M._meta
opts.local_many_to_many.remove(new_field)
del new_apps.all_models['schema'][new_field.remote_field.through._meta.model_name]
opts._expire_cache()
def test_m2m(self):
self._test_m2m(ManyToManyField)
def test_m2m_custom(self):
self._test_m2m(CustomManyToManyField)
def test_m2m_inherited(self):
self._test_m2m(InheritedManyToManyField)
def _test_m2m_through_alter(self, M2MFieldClass):
"""
Tests altering M2Ms with explicit through models (should no-op)
"""
class LocalAuthorTag(Model):
author = ForeignKey("schema.LocalAuthorWithM2MThrough", CASCADE)
tag = ForeignKey("schema.TagM2MTest", CASCADE)
class Meta:
app_label = 'schema'
apps = new_apps
class LocalAuthorWithM2MThrough(Model):
name = CharField(max_length=255)
tags = M2MFieldClass("schema.TagM2MTest", related_name="authors", through=LocalAuthorTag)
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalAuthorTag, LocalAuthorWithM2MThrough]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(LocalAuthorTag)
editor.create_model(LocalAuthorWithM2MThrough)
editor.create_model(TagM2MTest)
# Ensure the m2m table is there
self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3)
# "Alter" the field's blankness. This should not actually do anything.
old_field = LocalAuthorWithM2MThrough._meta.get_field("tags")
new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors", through=LocalAuthorTag)
new_field.contribute_to_class(LocalAuthorWithM2MThrough, "tags")
with connection.schema_editor() as editor:
editor.alter_field(LocalAuthorWithM2MThrough, old_field, new_field, strict=True)
# Ensure the m2m table is still there
self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3)
def test_m2m_through_alter(self):
self._test_m2m_through_alter(ManyToManyField)
def test_m2m_through_alter_custom(self):
self._test_m2m_through_alter(CustomManyToManyField)
def test_m2m_through_alter_inherited(self):
self._test_m2m_through_alter(InheritedManyToManyField)
def _test_m2m_repoint(self, M2MFieldClass):
"""
Tests repointing M2M fields
"""
class LocalBookWithM2M(Model):
author = ForeignKey(Author, CASCADE)
title = CharField(max_length=100, db_index=True)
pub_date = DateTimeField()
tags = M2MFieldClass("TagM2MTest", related_name="books")
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalBookWithM2M]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(LocalBookWithM2M)
editor.create_model(TagM2MTest)
editor.create_model(UniqueTest)
# Ensure the M2M exists and points to TagM2MTest
if connection.features.supports_foreign_keys:
self.assertForeignKeyExists(
LocalBookWithM2M._meta.get_field("tags").remote_field.through,
'tagm2mtest_id',
'schema_tagm2mtest',
)
# Repoint the M2M
old_field = LocalBookWithM2M._meta.get_field("tags")
new_field = M2MFieldClass(UniqueTest)
new_field.contribute_to_class(LocalBookWithM2M, "uniques")
with connection.schema_editor() as editor:
editor.alter_field(LocalBookWithM2M, old_field, new_field, strict=True)
# Ensure old M2M is gone
with self.assertRaises(DatabaseError):
self.column_classes(LocalBookWithM2M._meta.get_field("tags").remote_field.through)
# This model looks like the new model and is used for teardown.
opts = LocalBookWithM2M._meta
opts.local_many_to_many.remove(old_field)
# Ensure the new M2M exists and points to UniqueTest
if connection.features.supports_foreign_keys:
self.assertForeignKeyExists(new_field.remote_field.through, 'uniquetest_id', 'schema_uniquetest')
def test_m2m_repoint(self):
self._test_m2m_repoint(ManyToManyField)
def test_m2m_repoint_custom(self):
self._test_m2m_repoint(CustomManyToManyField)
def test_m2m_repoint_inherited(self):
self._test_m2m_repoint(InheritedManyToManyField)
@skipUnlessDBFeature('supports_column_check_constraints')
def test_check_constraints(self):
"""
Tests creating/deleting CHECK constraints
"""
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the constraint exists
constraints = self.get_constraints(Author._meta.db_table)
for name, details in constraints.items():
if details['columns'] == ["height"] and details['check']:
break
else:
self.fail("No check constraint for height found")
# Alter the column to remove it
old_field = Author._meta.get_field("height")
new_field = IntegerField(null=True, blank=True)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
constraints = self.get_constraints(Author._meta.db_table)
for name, details in constraints.items():
if details['columns'] == ["height"] and details['check']:
self.fail("Check constraint for height found")
# Alter the column to re-add it
new_field2 = Author._meta.get_field("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
constraints = self.get_constraints(Author._meta.db_table)
for name, details in constraints.items():
if details['columns'] == ["height"] and details['check']:
break
else:
self.fail("No check constraint for height found")
def test_unique(self):
"""
Tests removing and adding unique constraints to a single column.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
# Ensure the field is unique to begin with
Tag.objects.create(title="foo", slug="foo")
with self.assertRaises(IntegrityError):
Tag.objects.create(title="bar", slug="foo")
Tag.objects.all().delete()
# Alter the slug field to be non-unique
old_field = Tag._meta.get_field("slug")
new_field = SlugField(unique=False)
new_field.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(Tag, old_field, new_field, strict=True)
# Ensure the field is no longer unique
Tag.objects.create(title="foo", slug="foo")
Tag.objects.create(title="bar", slug="foo")
Tag.objects.all().delete()
# Alter the slug field to be unique
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(Tag, new_field, new_field2, strict=True)
# Ensure the field is unique again
Tag.objects.create(title="foo", slug="foo")
with self.assertRaises(IntegrityError):
Tag.objects.create(title="bar", slug="foo")
Tag.objects.all().delete()
# Rename the field
new_field3 = SlugField(unique=True)
new_field3.set_attributes_from_name("slug2")
with connection.schema_editor() as editor:
editor.alter_field(Tag, new_field2, new_field3, strict=True)
# Ensure the field is still unique
TagUniqueRename.objects.create(title="foo", slug2="foo")
with self.assertRaises(IntegrityError):
TagUniqueRename.objects.create(title="bar", slug2="foo")
Tag.objects.all().delete()
def test_unique_together(self):
"""
Tests removing and adding unique_together constraints on a model.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(UniqueTest)
# Ensure the fields are unique to begin with
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.create(year=2011, slug="foo")
UniqueTest.objects.create(year=2011, slug="bar")
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.all().delete()
# Alter the model to its non-unique-together companion
with connection.schema_editor() as editor:
editor.alter_unique_together(UniqueTest, UniqueTest._meta.unique_together, [])
# Ensure the fields are no longer unique
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.all().delete()
# Alter it back
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_unique_together(UniqueTest, [], UniqueTest._meta.unique_together)
# Ensure the fields are unique again
UniqueTest.objects.create(year=2012, slug="foo")
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.all().delete()
def test_unique_together_with_fk(self):
"""
Tests removing and adding unique_together constraints that include
a foreign key.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the fields are unique to begin with
self.assertEqual(Book._meta.unique_together, ())
# Add the unique_together constraint
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [], [['author', 'title']])
# Alter it back
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [['author', 'title']], [])
def test_unique_together_with_fk_with_existing_index(self):
"""
Tests removing and adding unique_together constraints that include
a foreign key, where the foreign key is added after the model is
created.
"""
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithoutAuthor)
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name('author')
editor.add_field(BookWithoutAuthor, new_field)
# Ensure the fields aren't unique to begin with
self.assertEqual(Book._meta.unique_together, ())
# Add the unique_together constraint
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [], [['author', 'title']])
# Alter it back
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [['author', 'title']], [])
def test_index_together(self):
"""
Tests removing and adding index_together constraints on a model.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
# Ensure there's no index on the year/slug columns first
self.assertEqual(
False,
any(
c["index"]
for c in self.get_constraints("schema_tag").values()
if c['columns'] == ["slug", "title"]
),
)
# Alter the model to add an index
with connection.schema_editor() as editor:
editor.alter_index_together(Tag, [], [("slug", "title")])
# Ensure there is now an index
self.assertEqual(
True,
any(
c["index"]
for c in self.get_constraints("schema_tag").values()
if c['columns'] == ["slug", "title"]
),
)
# Alter it back
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_index_together(Tag, [("slug", "title")], [])
# Ensure there's no index
self.assertEqual(
False,
any(
c["index"]
for c in self.get_constraints("schema_tag").values()
if c['columns'] == ["slug", "title"]
),
)
def test_index_together_with_fk(self):
"""
Tests removing and adding index_together constraints that include
a foreign key.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the fields are unique to begin with
self.assertEqual(Book._meta.index_together, ())
# Add the unique_together constraint
with connection.schema_editor() as editor:
editor.alter_index_together(Book, [], [['author', 'title']])
# Alter it back
with connection.schema_editor() as editor:
editor.alter_index_together(Book, [['author', 'title']], [])
def test_create_index_together(self):
"""
Tests creating models with index_together already defined
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(TagIndexed)
# Ensure there is an index
self.assertEqual(
True,
any(
c["index"]
for c in self.get_constraints("schema_tagindexed").values()
if c['columns'] == ["slug", "title"]
),
)
def test_db_table(self):
"""
Tests renaming of the table
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the table is there to begin with
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "CharField")
# Alter the table
with connection.schema_editor() as editor:
editor.alter_db_table(Author, "schema_author", "schema_otherauthor")
# Ensure the table is there afterwards
Author._meta.db_table = "schema_otherauthor"
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "CharField")
# Alter the table again
with connection.schema_editor() as editor:
editor.alter_db_table(Author, "schema_otherauthor", "schema_author")
# Ensure the table is still there
Author._meta.db_table = "schema_author"
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "CharField")
def test_add_remove_index(self):
"""
Tests index addition and removal
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the table is there and has no index
self.assertNotIn('title', self.get_indexes(Author._meta.db_table))
# Add the index
index = Index(fields=['name'], name='author_title_idx')
with connection.schema_editor() as editor:
editor.add_index(Author, index)
self.assertIn('name', self.get_indexes(Author._meta.db_table))
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn('name', self.get_indexes(Author._meta.db_table))
def test_remove_db_index_doesnt_remove_custom_indexes(self):
"""
Changing db_index to False doesn't remove indexes from Meta.indexes.
"""
with connection.schema_editor() as editor:
editor.create_model(AuthorWithIndexedName)
# Ensure the table has its index
self.assertIn('name', self.get_indexes(AuthorWithIndexedName._meta.db_table))
# Add the custom index
index = Index(fields=['-name'], name='author_name_idx')
author_index_name = index.name
with connection.schema_editor() as editor:
db_index_name = editor._create_index_name(
model=AuthorWithIndexedName,
column_names=('name',),
)
if connection.features.uppercases_column_names:
author_index_name = author_index_name.upper()
db_index_name = db_index_name.upper()
try:
AuthorWithIndexedName._meta.indexes = [index]
with connection.schema_editor() as editor:
editor.add_index(AuthorWithIndexedName, index)
old_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table)
self.assertIn(author_index_name, old_constraints)
self.assertIn(db_index_name, old_constraints)
# Change name field to db_index=False
old_field = AuthorWithIndexedName._meta.get_field('name')
new_field = CharField(max_length=255)
new_field.set_attributes_from_name('name')
with connection.schema_editor() as editor:
editor.alter_field(AuthorWithIndexedName, old_field, new_field, strict=True)
new_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table)
self.assertNotIn(db_index_name, new_constraints)
# The index from Meta.indexes is still in the database.
self.assertIn(author_index_name, new_constraints)
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(AuthorWithIndexedName, index)
finally:
AuthorWithIndexedName._meta.indexes = []
def test_order_index(self):
"""
Indexes defined with ordering (ASC/DESC) defined on column
"""
with connection.schema_editor() as editor:
editor.create_model(Author)
# The table doesn't have an index
self.assertNotIn('title', self.get_indexes(Author._meta.db_table))
index_name = 'author_name_idx'
# Add the index
index = Index(fields=['name', '-weight'], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(Author, index)
if connection.features.supports_index_column_ordering:
if connection.features.uppercases_column_names:
index_name = index_name.upper()
self.assertIndexOrder(Author._meta.db_table, index_name, ['ASC', 'DESC'])
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
def test_indexes(self):
"""
Tests creation/altering of indexes
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the table is there and has the right index
self.assertIn(
"title",
self.get_indexes(Book._meta.db_table),
)
# Alter to remove the index
old_field = Book._meta.get_field("title")
new_field = CharField(max_length=100, db_index=False)
new_field.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
# Ensure the table is there and has no index
self.assertNotIn(
"title",
self.get_indexes(Book._meta.db_table),
)
# Alter to re-add the index
new_field2 = Book._meta.get_field("title")
with connection.schema_editor() as editor:
editor.alter_field(Book, new_field, new_field2, strict=True)
# Ensure the table is there and has the index again
self.assertIn(
"title",
self.get_indexes(Book._meta.db_table),
)
# Add a unique column, verify that creates an implicit index
new_field3 = BookWithSlug._meta.get_field("slug")
with connection.schema_editor() as editor:
editor.add_field(Book, new_field3)
self.assertIn(
"slug",
self.get_indexes(Book._meta.db_table),
)
# Remove the unique, check the index goes with it
new_field4 = CharField(max_length=20, unique=False)
new_field4.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(BookWithSlug, new_field3, new_field4, strict=True)
self.assertNotIn(
"slug",
self.get_indexes(Book._meta.db_table),
)
def test_primary_key(self):
"""
Tests altering of the primary key
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
# Ensure the table is there and has the right PK
self.assertEqual(self.get_primary_key(Tag._meta.db_table), 'id')
# Alter to change the PK
id_field = Tag._meta.get_field("id")
old_field = Tag._meta.get_field("slug")
new_field = SlugField(primary_key=True)
new_field.set_attributes_from_name("slug")
new_field.model = Tag
with connection.schema_editor() as editor:
editor.remove_field(Tag, id_field)
editor.alter_field(Tag, old_field, new_field)
# Ensure the PK changed
self.assertNotIn(
'id',
self.get_indexes(Tag._meta.db_table),
)
self.assertEqual(self.get_primary_key(Tag._meta.db_table), 'slug')
def test_context_manager_exit(self):
"""
Ensures transaction is correctly closed when an error occurs
inside a SchemaEditor context.
"""
class SomeError(Exception):
pass
try:
with connection.schema_editor():
raise SomeError
except SomeError:
self.assertFalse(connection.in_atomic_block)
@skipIfDBFeature('can_rollback_ddl')
def test_unsupported_transactional_ddl_disallowed(self):
message = (
"Executing DDL statements while in a transaction on databases "
"that can't perform a rollback is prohibited."
)
with atomic(), connection.schema_editor() as editor:
with self.assertRaisesMessage(TransactionManagementError, message):
editor.execute(editor.sql_create_table % {'table': 'foo', 'definition': ''})
@skipUnlessDBFeature('supports_foreign_keys')
def test_foreign_key_index_long_names_regression(self):
"""
Regression test for #21497.
Only affects databases that supports foreign keys.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(AuthorWithEvenLongerName)
editor.create_model(BookWithLongName)
# Find the properly shortened column name
column_name = connection.ops.quote_name("author_foreign_key_with_really_long_field_name_id")
column_name = column_name[1:-1].lower() # unquote, and, for Oracle, un-upcase
# Ensure the table is there and has an index on the column
self.assertIn(
column_name,
self.get_indexes(BookWithLongName._meta.db_table),
)
@skipUnlessDBFeature('supports_foreign_keys')
def test_add_foreign_key_long_names(self):
"""
Regression test for #23009.
Only affects databases that supports foreign keys.
"""
# Create the initial tables
with connection.schema_editor() as editor:
editor.create_model(AuthorWithEvenLongerName)
editor.create_model(BookWithLongName)
# Add a second FK, this would fail due to long ref name before the fix
new_field = ForeignKey(AuthorWithEvenLongerName, CASCADE, related_name="something")
new_field.set_attributes_from_name("author_other_really_long_named_i_mean_so_long_fk")
with connection.schema_editor() as editor:
editor.add_field(BookWithLongName, new_field)
def test_add_foreign_object(self):
with connection.schema_editor() as editor:
editor.create_model(BookForeignObj)
new_field = ForeignObject(Author, on_delete=CASCADE, from_fields=['author_id'], to_fields=['id'])
new_field.set_attributes_from_name('author')
with connection.schema_editor() as editor:
editor.add_field(BookForeignObj, new_field)
def test_creation_deletion_reserved_names(self):
"""
Tries creating a model's table, and then deleting it when it has a
SQL reserved name.
"""
# Create the table
with connection.schema_editor() as editor:
try:
editor.create_model(Thing)
except OperationalError as e:
self.fail("Errors when applying initial migration for a model "
"with a table named after an SQL reserved word: %s" % e)
# The table is there
list(Thing.objects.all())
# Clean up that table
with connection.schema_editor() as editor:
editor.delete_model(Thing)
# The table is gone
with self.assertRaises(DatabaseError):
list(Thing.objects.all())
def test_remove_constraints_capital_letters(self):
"""
#23065 - Constraint names must be quoted if they contain capital letters.
"""
def get_field(*args, **kwargs):
kwargs['db_column'] = "CamelCase"
field = kwargs.pop('field_class', IntegerField)(*args, **kwargs)
field.set_attributes_from_name("CamelCase")
return field
model = Author
field = get_field()
table = model._meta.db_table
column = field.column
with connection.schema_editor() as editor:
editor.create_model(model)
editor.add_field(model, field)
constraint_name = "CamelCaseIndex"
editor.execute(
editor.sql_create_index % {
"table": editor.quote_name(table),
"name": editor.quote_name(constraint_name),
"using": "",
"columns": editor.quote_name(column),
"extra": "",
}
)
if connection.features.uppercases_column_names:
constraint_name = constraint_name.upper()
self.assertIn(constraint_name, self.get_constraints(model._meta.db_table))
editor.alter_field(model, get_field(db_index=True), field, strict=True)
self.assertNotIn(constraint_name, self.get_constraints(model._meta.db_table))
constraint_name = "CamelCaseUniqConstraint"
editor.execute(
editor.sql_create_unique % {
"table": editor.quote_name(table),
"name": editor.quote_name(constraint_name),
"columns": editor.quote_name(field.column),
}
)
if connection.features.uppercases_column_names:
constraint_name = constraint_name.upper()
self.assertIn(constraint_name, self.get_constraints(model._meta.db_table))
editor.alter_field(model, get_field(unique=True), field, strict=True)
self.assertNotIn(constraint_name, self.get_constraints(model._meta.db_table))
if connection.features.supports_foreign_keys:
constraint_name = "CamelCaseFKConstraint"
editor.execute(
editor.sql_create_fk % {
"table": editor.quote_name(table),
"name": editor.quote_name(constraint_name),
"column": editor.quote_name(column),
"to_table": editor.quote_name(table),
"to_column": editor.quote_name(model._meta.auto_field.column),
"deferrable": connection.ops.deferrable_sql(),
}
)
if connection.features.uppercases_column_names:
constraint_name = constraint_name.upper()
self.assertIn(constraint_name, self.get_constraints(model._meta.db_table))
editor.alter_field(model, get_field(Author, CASCADE, field_class=ForeignKey), field, strict=True)
self.assertNotIn(constraint_name, self.get_constraints(model._meta.db_table))
def test_add_field_use_effective_default(self):
"""
#23987 - effective_default() should be used as the field default when
adding a new field.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no surname field
columns = self.column_classes(Author)
self.assertNotIn("surname", columns)
# Create a row
Author.objects.create(name='Anonymous1')
# Add new CharField to ensure default will be used from effective_default
new_field = CharField(max_length=15, blank=True)
new_field.set_attributes_from_name("surname")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure field was added with the right default
with connection.cursor() as cursor:
cursor.execute("SELECT surname FROM schema_author;")
item = cursor.fetchall()[0]
self.assertEqual(item[0], None if connection.features.interprets_empty_strings_as_nulls else '')
def test_add_field_default_dropped(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no surname field
columns = self.column_classes(Author)
self.assertNotIn("surname", columns)
# Create a row
Author.objects.create(name='Anonymous1')
# Add new CharField with a default
new_field = CharField(max_length=15, blank=True, default='surname default')
new_field.set_attributes_from_name("surname")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure field was added with the right default
with connection.cursor() as cursor:
cursor.execute("SELECT surname FROM schema_author;")
item = cursor.fetchall()[0]
self.assertEqual(item[0], 'surname default')
# And that the default is no longer set in the database.
field = next(
f for f in connection.introspection.get_table_description(cursor, "schema_author")
if f.name == "surname"
)
if connection.features.can_introspect_default:
self.assertIsNone(field.default)
def test_alter_field_default_dropped(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Create a row
Author.objects.create(name='Anonymous1')
self.assertIsNone(Author.objects.get().height)
old_field = Author._meta.get_field('height')
# The default from the new field is used in updating existing rows.
new_field = IntegerField(blank=True, default=42)
new_field.set_attributes_from_name('height')
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(Author.objects.get().height, 42)
# The database default should be removed.
with connection.cursor() as cursor:
field = next(
f for f in connection.introspection.get_table_description(cursor, "schema_author")
if f.name == "height"
)
if connection.features.can_introspect_default:
self.assertIsNone(field.default)
def test_add_textfield_unhashable_default(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Create a row
Author.objects.create(name='Anonymous1')
# Create a field that has an unhashable default
new_field = TextField(default={})
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_add_indexed_charfield(self):
field = CharField(max_length=255, db_index=True)
field.set_attributes_from_name('nom_de_plume')
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.add_field(Author, field)
# Should create two indexes; one for like operator.
self.assertEqual(
self.get_constraints_for_column(Author, 'nom_de_plume'),
['schema_author_nom_de_plume_7570a851', 'schema_author_nom_de_plume_7570a851_like'],
)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_add_unique_charfield(self):
field = CharField(max_length=255, unique=True)
field.set_attributes_from_name('nom_de_plume')
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.add_field(Author, field)
# Should create two indexes; one for like operator.
self.assertEqual(
self.get_constraints_for_column(Author, 'nom_de_plume'),
['schema_author_nom_de_plume_7570a851_like', 'schema_author_nom_de_plume_key']
)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_alter_field_add_index_to_charfield(self):
# Create the table and verify no initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Author)
self.assertEqual(self.get_constraints_for_column(Author, 'name'), [])
# Alter to add db_index=True and create 2 indexes.
old_field = Author._meta.get_field('name')
new_field = CharField(max_length=255, db_index=True)
new_field.set_attributes_from_name('name')
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Author, 'name'),
['schema_author_name_1fbc5617', 'schema_author_name_1fbc5617_like']
)
# Remove db_index=True to drop both indexes.
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Author, 'name'), [])
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_alter_field_add_unique_to_charfield(self):
# Create the table and verify no initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Author)
self.assertEqual(self.get_constraints_for_column(Author, 'name'), [])
# Alter to add unique=True and create 2 indexes.
old_field = Author._meta.get_field('name')
new_field = CharField(max_length=255, unique=True)
new_field.set_attributes_from_name('name')
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Author, 'name'),
['schema_author_name_1fbc5617_like', 'schema_author_name_1fbc5617_uniq']
)
# Remove unique=True to drop both indexes.
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Author, 'name'), [])
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_alter_field_add_index_to_textfield(self):
# Create the table and verify no initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Note)
self.assertEqual(self.get_constraints_for_column(Note, 'info'), [])
# Alter to add db_index=True and create 2 indexes.
old_field = Note._meta.get_field('info')
new_field = TextField(db_index=True)
new_field.set_attributes_from_name('info')
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Note, 'info'),
['schema_note_info_4b0ea695', 'schema_note_info_4b0ea695_like']
)
# Remove db_index=True to drop both indexes.
with connection.schema_editor() as editor:
editor.alter_field(Note, new_field, old_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Note, 'info'), [])
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_alter_field_add_unique_to_charfield_with_db_index(self):
# Create the table and verify initial indexes.
with connection.schema_editor() as editor:
editor.create_model(BookWithoutAuthor)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, 'title'),
['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like']
)
# Alter to add unique=True (should replace the index)
old_field = BookWithoutAuthor._meta.get_field('title')
new_field = CharField(max_length=100, db_index=True, unique=True)
new_field.set_attributes_from_name('title')
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, 'title'),
['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq']
)
# Alter to remove unique=True (should drop unique index)
new_field2 = CharField(max_length=100, db_index=True)
new_field2.set_attributes_from_name('title')
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, 'title'),
['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like']
)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_alter_field_remove_unique_and_db_index_from_charfield(self):
# Create the table and verify initial indexes.
with connection.schema_editor() as editor:
editor.create_model(BookWithoutAuthor)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, 'title'),
['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like']
)
# Alter to add unique=True (should replace the index)
old_field = BookWithoutAuthor._meta.get_field('title')
new_field = CharField(max_length=100, db_index=True, unique=True)
new_field.set_attributes_from_name('title')
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, 'title'),
['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq']
)
# Alter to remove both unique=True and db_index=True (should drop all indexes)
new_field2 = CharField(max_length=100)
new_field2.set_attributes_from_name('title')
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True)
self.assertEqual(self.get_constraints_for_column(BookWithoutAuthor, 'title'), [])
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_alter_field_swap_unique_and_db_index_with_charfield(self):
# Create the table and verify initial indexes.
with connection.schema_editor() as editor:
editor.create_model(BookWithoutAuthor)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, 'title'),
['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like']
)
# Alter to set unique=True and remove db_index=True (should replace the index)
old_field = BookWithoutAuthor._meta.get_field('title')
new_field = CharField(max_length=100, unique=True)
new_field.set_attributes_from_name('title')
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, 'title'),
['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq']
)
# Alter to set db_index=True and remove unique=True (should restore index)
new_field2 = CharField(max_length=100, db_index=True)
new_field2.set_attributes_from_name('title')
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, 'title'),
['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like']
)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_alter_field_add_db_index_to_charfield_with_unique(self):
# Create the table and verify initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Tag)
self.assertEqual(
self.get_constraints_for_column(Tag, 'slug'),
['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key']
)
# Alter to add db_index=True
old_field = Tag._meta.get_field('slug')
new_field = SlugField(db_index=True, unique=True)
new_field.set_attributes_from_name('slug')
with connection.schema_editor() as editor:
editor.alter_field(Tag, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Tag, 'slug'),
['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key']
)
# Alter to remove db_index=True
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name('slug')
with connection.schema_editor() as editor:
editor.alter_field(Tag, new_field, new_field2, strict=True)
self.assertEqual(
self.get_constraints_for_column(Tag, 'slug'),
['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key']
)
def test_alter_field_add_index_to_integerfield(self):
# Create the table and verify no initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Author)
self.assertEqual(self.get_constraints_for_column(Author, 'weight'), [])
# Alter to add db_index=True and create index.
old_field = Author._meta.get_field('weight')
new_field = IntegerField(null=True, db_index=True)
new_field.set_attributes_from_name('weight')
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
expected = 'schema_author_weight_587740f9'
if connection.features.uppercases_column_names:
expected = expected.upper()
self.assertEqual(self.get_constraints_for_column(Author, 'weight'), [expected])
# Remove db_index=True to drop index.
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Author, 'weight'), [])
def test_alter_pk_with_self_referential_field(self):
"""
Changing the primary key field name of a model with a self-referential
foreign key (#26384).
"""
if connection.vendor == 'mysql' and connection.mysql_version < (5, 6, 6):
self.skipTest('Skip known bug renaming primary keys on older MySQL versions (#24995).')
with connection.schema_editor() as editor:
editor.create_model(Node)
old_field = Node._meta.get_field('node_id')
new_field = AutoField(primary_key=True)
new_field.set_attributes_from_name('id')
with connection.schema_editor() as editor:
editor.alter_field(Node, old_field, new_field, strict=True)
@mock.patch('django.db.backends.base.schema.datetime')
@mock.patch('django.db.backends.base.schema.timezone')
def test_add_datefield_and_datetimefield_use_effective_default(self, mocked_datetime, mocked_tz):
"""
effective_default() should be used for DateField, DateTimeField, and
TimeField if auto_now or auto_add_now is set (#25005).
"""
now = datetime.datetime(month=1, day=1, year=2000, hour=1, minute=1)
now_tz = datetime.datetime(month=1, day=1, year=2000, hour=1, minute=1, tzinfo=timezone.utc)
mocked_datetime.now = mock.MagicMock(return_value=now)
mocked_tz.now = mock.MagicMock(return_value=now_tz)
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Check auto_now/auto_now_add attributes are not defined
columns = self.column_classes(Author)
self.assertNotIn("dob_auto_now", columns)
self.assertNotIn("dob_auto_now_add", columns)
self.assertNotIn("dtob_auto_now", columns)
self.assertNotIn("dtob_auto_now_add", columns)
self.assertNotIn("tob_auto_now", columns)
self.assertNotIn("tob_auto_now_add", columns)
# Create a row
Author.objects.create(name='Anonymous1')
# Ensure fields were added with the correct defaults
dob_auto_now = DateField(auto_now=True)
dob_auto_now.set_attributes_from_name('dob_auto_now')
self.check_added_field_default(
editor, Author, dob_auto_now, 'dob_auto_now', now.date(),
cast_function=lambda x: x.date(),
)
dob_auto_now_add = DateField(auto_now_add=True)
dob_auto_now_add.set_attributes_from_name('dob_auto_now_add')
self.check_added_field_default(
editor, Author, dob_auto_now_add, 'dob_auto_now_add', now.date(),
cast_function=lambda x: x.date(),
)
dtob_auto_now = DateTimeField(auto_now=True)
dtob_auto_now.set_attributes_from_name('dtob_auto_now')
self.check_added_field_default(
editor, Author, dtob_auto_now, 'dtob_auto_now', now,
)
dt_tm_of_birth_auto_now_add = DateTimeField(auto_now_add=True)
dt_tm_of_birth_auto_now_add.set_attributes_from_name('dtob_auto_now_add')
self.check_added_field_default(
editor, Author, dt_tm_of_birth_auto_now_add, 'dtob_auto_now_add', now,
)
tob_auto_now = TimeField(auto_now=True)
tob_auto_now.set_attributes_from_name('tob_auto_now')
self.check_added_field_default(
editor, Author, tob_auto_now, 'tob_auto_now', now.time(),
cast_function=lambda x: x.time(),
)
tob_auto_now_add = TimeField(auto_now_add=True)
tob_auto_now_add.set_attributes_from_name('tob_auto_now_add')
self.check_added_field_default(
editor, Author, tob_auto_now_add, 'tob_auto_now_add', now.time(),
cast_function=lambda x: x.time(),
)
@unittest.skipUnless(connection.vendor == 'oracle', 'Oracle specific db_table syntax')
def test_creation_with_db_table_double_quotes(self):
oracle_user = connection.creation._test_database_user()
class Student(Model):
name = CharField(max_length=30)
class Meta:
app_label = 'schema'
apps = new_apps
db_table = '"%s"."DJANGO_STUDENT_TABLE"' % oracle_user
class Document(Model):
name = CharField(max_length=30)
students = ManyToManyField(Student)
class Meta:
app_label = 'schema'
apps = new_apps
db_table = '"%s"."DJANGO_DOCUMENT_TABLE"' % oracle_user
self.local_models = [Student, Document]
with connection.schema_editor() as editor:
editor.create_model(Student)
editor.create_model(Document)
doc = Document.objects.create(name='Test Name')
student = Student.objects.create(name='Some man')
doc.students.add(student)
|
mit
| 101,262,551,295,524,510 | 43.061552 | 114 | 0.621702 | false |
dapengchen123/code_v1
|
reid/models/resnet_btfu.py
|
1
|
3793
|
from __future__ import absolute_import
import torch.nn.functional as F
import torch.nn.init as init
from torch import nn
from torchvision.models import resnet18, resnet34, resnet50, resnet101, \
resnet152
class ResNet_btfu(nn.Module):
__factory = {
18: resnet18,
34: resnet34,
50: resnet50,
101: resnet101,
152: resnet152,
}
def __init__(self, depth, pretrained=True, cut_at_pooling=False,
num_classes=0, num_features=0, norm=False, dropout=0):
super(ResNet_btfu, self).__init__()
self.depth = depth
self.pretrained = pretrained
self.cut_at_pooling = cut_at_pooling
# Construct base (pretrained) resnet
if depth not in ResNet_btfu.__factory:
raise KeyError("Unsupported depth:", depth)
### At the bottom of CNN network
## for new additional feature
conv0 = nn.Conv2d(2, 64, kernel_size=7, stride=2, padding=3, bias=False)
init.kaiming_normal(conv0.weight, mode='fan_out')
self.conv0 = conv0
self.base = ResNet_btfu.__factory[depth](pretrained=pretrained)
if not self.cut_at_pooling:
self.num_classes = num_classes
self.num_features = num_features
self.norm = norm
self.dropout = dropout
self.has_embedding = num_features > 0
out_planes = self.base.fc.in_features
# Append new layers
if self.has_embedding:
self.feat = nn.Linear(out_planes, self.num_features)
self.feat_bn = nn.BatchNorm1d(self.num_features)
init.kaiming_normal(self.feat.weight, mode='fan_out')
init.constant(self.feat.bias, 0)
init.constant(self.feat_bn.weight, 1)
init.constant(self.feat_bn.bias, 0)
else:
# Change the num_features to CNN output channels
self.num_features = out_planes
if self.dropout > 0:
self.drop = nn.Dropout(self.dropout)
if self.num_classes > 0:
self.classifier = nn.Linear(self.num_features, self.num_classes)
init.normal(self.classifier.weight, std=0.001)
init.constant(self.classifier.bias, 0)
if not self.pretrained:
self.reset_params()
def forward(self, x):
for name, module in self.base._modules.items():
if name == 'conv1':
x_img = x[:, 0:3, ...]
x_motion = x[:, 3:5, ...]
x = module(x_img)+self.conv0(x_motion)
continue
if name == 'avgpool':
break
x = module(x)
if self.cut_at_pooling:
return x
x = F.avg_pool2d(x, x.size()[2:])
x = x.view(x.size(0), -1)
if self.has_embedding:
x = self.feat(x)
x = self.feat_bn(x)
if self.norm:
x = x / x.norm(2, 1).expand_as(x)
elif self.has_embedding:
x = F.relu(x)
if self.dropout > 0:
x = self.drop(x)
if self.num_classes > 0:
x = self.classifier(x)
return x
def reset_params(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
init.kaiming_normal(m.weight, mode='fan_out')
if m.bias is not None:
init.constant(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
init.constant(m.weight, 1)
init.constant(m.bias, 0)
elif isinstance(m, nn.Linear):
init.normal(m.weight, std=0.001)
if m.bias is not None:
init.constant(m.bias, 0)
|
mit
| -1,654,766,878,559,819,300 | 31.418803 | 80 | 0.530187 | false |
SophieIPP/ipp-macro-series-parser
|
ipp_macro_series_parser/demographie/parser.py
|
1
|
3235
|
# -*- coding: utf-8 -*-
# TAXIPP -- A French microsimulation model
# By: IPP <taxipp@ipp.eu>
#
# Copyright (C) 2012, 2013, 2014, 2015 IPP
# https://github.com/taxipp
#
# This file is part of TAXIPP.
#
# TAXIPP is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# TAXIPP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import os
import pandas
import pkg_resources
from ipp_macro_series_parser.config import Config
config_parser = Config(
config_files_directory = os.path.join(pkg_resources.get_distribution('ipp-macro-series-parser').location)
)
xls_directory = os.path.join(config_parser.get('data', 'demographie_directory'), 'xls')
log = logging.getLogger(__name__)
def create_demographie_data_frame():
data_frame = pandas.DataFrame()
for year in range(1999, 2015 + 1):
file_path = os.path.join(xls_directory, u'pyramide-des-ages-{}.xls'.format(year))
skiprows = 5 - (year == 1999)
parse_cols = "A:E"
slice_start = 0
slice_end = 101
sheetname = 'France'
if year <= 2010:
sheetnames = ['France', u'France métropolitaine']
elif year == 2011:
sheetnames = ['{} France'.format(year), u"{} métropole".format(year)]
else:
sheetnames = ['Pyramide {} France'.format(year), u'Pyramide {} métropole'.format(year)]
for sheetname in sheetnames:
try:
df = pandas.read_excel(
file_path,
# na_values = '-',
sheetname = sheetname,
skiprows = skiprows,
parse_cols = parse_cols).iloc[slice_start:slice_end]
df['year'] = year
if sheetname in ['France', u'France métropolitaine']:
df['champ'] = sheetname
else:
df['champ'] = u'France métropolitaine' if u'métropole' in sheetname else 'France'
# All column name on one line
remove_cr = dict(
(column, column.replace(u"\n", " ").replace(" ", " ")) for column in df.columns)
df.rename(columns = remove_cr, inplace = True)
# Femmes _> Nombre de femmes etc
df.rename(columns = dict(
Femmes = "Nombre de femmes",
Hommes = "Nombre d'hommes"), inplace = True)
data_frame = pandas.concat((data_frame, df))
del df
except Exception, e:
print year
print sheetname
raise(e)
return pandas.melt(data_frame, id_vars = ['year', 'champ', u'Âge révolu', u'Année de naissance'])
|
gpl-3.0
| 6,083,599,587,234,240,000 | 37.404762 | 109 | 0.588965 | false |
waynew/simple-importer
|
importer/importer.py
|
1
|
1305
|
from __future__ import print_function, division
try:
import tkinter as tk
import configparser as configparser
from tkinter.filedialog import askdirectory
except ImportError:
# I guess we're running Python2
import Tkinter as tk
import ConfigParser as configparser
from tkFileDialog import askdirectory
def get_directory(entry):
dir_ = askdirectory()
print(dir_)
entry.delete(0, tk.END)
entry.insert(0, dir_)
root = tk.Tk()
root.title('Simple Importer')
row = 0
label = tk.Label(root, text='Source:')
label.grid(row=row, column=0, sticky=tk.W)
src_entry = tk.Entry(root)
src_entry.grid(row=row, column=1, sticky=tk.E+tk.W)
btn_src_browse = tk.Button(root, text='Browse...', command=lambda: get_directory(src_entry))
btn_src_browse.grid(row=row, column=2, sticky=tk.W)
row = 1
label = tk.Label(root, text='Destination:')
label.grid(row=row, column=0, sticky=tk.W)
dst_entry = tk.Entry(root)
dst_entry.grid(row=row, column=1, sticky=tk.E+tk.W)
btn_dst_browse = tk.Button(root, text='Browse...', command=lambda: get_directory(dst_entry))
btn_dst_browse.grid(row=row, column=2, sticky=tk.W)
row = 2
btn_import = tk.Button(root, text='Import')
btn_import.grid(row=row, column=0, columnspan=3, sticky=tk.E+tk.W)
if __name__ == '__main__':
root.mainloop()
|
mit
| -3,465,383,677,578,508,300 | 27.369565 | 92 | 0.699617 | false |
lacatus/TFM
|
__init__.py
|
1
|
1514
|
#!/usr/bin/env python
"""
Initialization of the project global variables
"""
import cv2
import os
from var import variables
def init():
initvariables()
def initvariables():
variables.app_path = os.path.dirname(os.path.realpath(__file__))
variables.datasets_path = variables.app_path + '/data'
"""
variables.datasets_name = {
1: 'grazptz1',
2: 'grazptz2',
3: 'pets091',
4: 'pets092',
5: 'pets093',
6: 'pets094',
7: 'pets095',
8: 'pets096',
9: 'pets097',
10: 'pets098',
11: 'pets099',
12: 'oxtown'}
"""
"""
variables.datasets_name = {
1: 'caviar01',
2: 'caviar02',
3: 'caviar03',
4: 'caviar04',
5: 'caviar05'
}
"""
variables.datasets_name = {
1: 'pets01_crop',
2: 'pets091',
3: 'ewap01',
4: 'oxtown',
5: 'grazptz1',
6: 'pets094',
7: 'pets095'
}
variables.app_window_name = 'Main Window'
variables.app_window = cv2.namedWindow(
variables.app_window_name, cv2.WINDOW_NORMAL)
variables.app_window_trackbar_name = 'Main Background Window'
variables.app_window_trackbar = cv2.namedWindow(
variables.app_window_trackbar_name, cv2.WINDOW_NORMAL)
variables.app_window_trackbar_name_2 = 'Secondary Background Window'
variables.app_window_trackbar_2 = cv2.namedWindow(
variables.app_window_trackbar_name_2, cv2.WINDOW_NORMAL)
|
apache-2.0
| 715,981,144,961,745,200 | 21.939394 | 72 | 0.570674 | false |
nhhegde/minimalist_item_catalog
|
Item_Catalog/add_test_user.py
|
1
|
1698
|
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from database_setup import Base, User, Item
engine = create_engine('postgresql://postgres:postgresql@localhost:5432/user_item_catalog')
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
test_user = User(username='test_user', password='nopass', id="TEST")
tu_in_database = session.query(User).filter_by(id=test_user.id)
try:
if tu_in_database is not None and tu_in_database.one().id != test_user.id:
session.add(test_user)
session.commit()
snowboard_item = Item(name='snowboard', category='outdoor',
description='Cannot wait for next season!',
user=test_user)
session.add(snowboard_item)
session.commit()
frying_pan_item = Item(name='frying pan', category='kitchen',
description='My daily cooking tool', user=test_user)
session.add(frying_pan_item)
session.commit()
print('Added test_user, and a snowboard and frying pan to their catalog!')
else:
print('test_user already in database')
except Exception as e:
print(e)
session.add(test_user)
session.commit()
snowboard_item = Item(name='snowboard', category='outdoor',
description='Cannot wait for next season!',
user=test_user)
session.add(snowboard_item)
session.commit()
frying_pan_item = Item(name='frying pan', category='kitchen',
description='My daily cooking tool', user=test_user)
session.add(frying_pan_item)
session.commit()
|
agpl-3.0
| -8,792,218,446,548,541,000 | 32.653061 | 91 | 0.631331 | false |
cfelton/minnesota
|
examples/boards/nexys/fpgalink/fpgalink.py
|
1
|
2931
|
from myhdl import *
from mn.cores.usbext import fpgalink
from mn.cores.usbext import m_fpgalink_fx2
def fpgalink_nexys(
# ~~ FX2 interface signals ~~
IFCLK, # 48 MHz clock from FX2
RST, # active low async reset
SLWR, # active low write strobe
SLRD, # active low read strobe
SLOE, # active low output enable
FDI, # input data bus
FDO, # output data bus
FDS, # data select
ADDR, # 2bit address (fifo select)
FLAGA, # not used
FLAGB, # gotroom
FLAGC, # gotdata
FLAGD, # not used
PKTEND, # submit partial (less than 512)
# ~~ peripherals interfaces ~~
LEDS # external LEDs
):
"""
"""
# get the local references for the top-level
clock,reset,fx2_bus,fl_bus = fpgalink.get_interfaces()
clock = IFCLK
reset = RST
fx2_bus.data_i = FDI
fx2_bus.data_o = FDO
fx2_bus.data_t = FDS
fx2_bus.gotdata = FLAGC
fx2_bus.gotroom = FLAGB
fx2_bus.write = SLWR
fx2_bus.read = SLRD
#SLOE = SLRD now shadowed signals for conversion
fx2_bus.pktend = PKTEND
# instantiate the fpgalink interface
g_fli = m_fpgalink_fx2(clock, reset, fx2_bus, fl_bus)
# ~~~~~~
lreg = Signal(intbv(0)[7:])
treg = Signal(intbv(0)[1:])
tcnt = Signal(modbv(0, min=0, max=2**24))
# aliases
f2hValid_in = fl_bus.valid_i
h2fReady_in = fl_bus.ready_i
h2fValid_out = fl_bus.valid_o
chanAddr_out = fl_bus.chan_addr
f2hData_in = fl_bus.data_i
h2fData_out = fl_bus.data_o
fifosel = fx2_bus.fifosel
@always_comb
def hdl_assigns():
ADDR.next[0] = False
ADDR.next[1] = fifosel
SLOE.next = SLRD
f2hValid_in.next = True
h2fReady_in.next = True
LEDS.next[7:] = lreg
LEDS.next[7] = treg
if chanAddr_out == 0:
f2hData_in.next = 0xCE
elif chanAddr_out == 1:
f2hData_in.next = lreg
else:
f2hData_in.next = 0x55
@always_seq(clock.posedge, reset=reset)
def hdl_fl():
if h2fValid_out and chanAddr_out == 1:
lreg.next = h2fData_out
tcnt.next = tcnt + 1
if tcnt[23]:
treg.next = not treg
return g_fli, hdl_fl, hdl_assigns
def convert():
FDO = Signal(intbv(0)[8:])
FDI = Signal(intbv(0)[8:])
FDS = Signal(bool(0))
SLWR,SLRD,SLOE = [Signal(bool(0)) for ii in range(3)]
FLAGA,FLAGB,FLAGC,FLAGD = [Signal(bool(0)) for ii in range(4)]
ADDR = Signal(intbv(0)[2:])
IFCLK = Signal(bool(0))
RST = ResetSignal(bool(1), active=0, async=True)
LEDS = Signal(intbv(0)[8:])
PKTEND = Signal(bool(0))
toVerilog(fpgalink_nexys, IFCLK, RST, SLWR, SLRD, SLOE,
FDI, FDO, FDS, ADDR, FLAGA, FLAGB, FLAGC, FLAGD, PKTEND,
LEDS)
if __name__ == '__main__':
convert()
|
gpl-3.0
| 7,793,902,359,878,588,000 | 25.889908 | 70 | 0.564654 | false |
mitsuhiko/celery
|
celery/backends/amqp.py
|
1
|
7035
|
"""celery.backends.amqp"""
import socket
import time
from datetime import timedelta
from carrot.messaging import Consumer, Publisher
from celery import conf
from celery import states
from celery.backends.base import BaseDictBackend
from celery.exceptions import TimeoutError
from celery.messaging import establish_connection
from celery.utils import timeutils
class ResultPublisher(Publisher):
exchange = conf.RESULT_EXCHANGE
exchange_type = conf.RESULT_EXCHANGE_TYPE
delivery_mode = conf.RESULT_PERSISTENT and 2 or 1
serializer = conf.RESULT_SERIALIZER
durable = conf.RESULT_PERSISTENT
auto_delete = True
def __init__(self, connection, task_id, **kwargs):
super(ResultPublisher, self).__init__(connection,
routing_key=task_id.replace("-", ""),
**kwargs)
class ResultConsumer(Consumer):
exchange = conf.RESULT_EXCHANGE
exchange_type = conf.RESULT_EXCHANGE_TYPE
durable = conf.RESULT_PERSISTENT
no_ack = True
auto_delete = True
def __init__(self, connection, task_id, expires=None, **kwargs):
routing_key = task_id.replace("-", "")
if expires is not None:
pass
#self.queue_arguments = {"x-expires": expires}
super(ResultConsumer, self).__init__(connection,
queue=routing_key, routing_key=routing_key, **kwargs)
class AMQPBackend(BaseDictBackend):
"""AMQP backend. Publish results by sending messages to the broker
using the task id as routing key.
**NOTE:** Results published using this backend is read-once only.
After the result has been read, the result is deleted. (however, it's
still cached locally by the backend instance).
"""
_connection = None
def __init__(self, connection=None, exchange=None, exchange_type=None,
persistent=None, serializer=None, auto_delete=None,
expires=None, **kwargs):
self._connection = connection
self.exchange = exchange
self.exchange_type = exchange_type
self.persistent = persistent
self.serializer = serializer
self.auto_delete = auto_delete
self.expires = expires
if self.expires is None:
self.expires = conf.TASK_RESULT_EXPIRES
if isinstance(self.expires, timedelta):
self.expires = timeutils.timedelta_seconds(self.expires)
if self.expires is not None:
self.expires = int(self.expires)
super(AMQPBackend, self).__init__(**kwargs)
def _create_publisher(self, task_id, connection):
delivery_mode = self.persistent and 2 or 1
# Declares the queue.
self._create_consumer(task_id, connection).close()
return ResultPublisher(connection, task_id,
exchange=self.exchange,
exchange_type=self.exchange_type,
delivery_mode=delivery_mode,
serializer=self.serializer,
auto_delete=self.auto_delete)
def _create_consumer(self, task_id, connection):
return ResultConsumer(connection, task_id,
exchange=self.exchange,
exchange_type=self.exchange_type,
durable=self.persistent,
auto_delete=self.auto_delete,
expires=self.expires)
def store_result(self, task_id, result, status, traceback=None):
"""Send task return value and status."""
result = self.encode_result(result, status)
meta = {"task_id": task_id,
"result": result,
"status": status,
"traceback": traceback}
publisher = self._create_publisher(task_id, self.connection)
try:
publisher.send(meta)
finally:
publisher.close()
return result
def get_task_meta(self, task_id, cache=True):
return self.poll(task_id)
def wait_for(self, task_id, timeout=None, cache=True):
if task_id in self._cache:
meta = self._cache[task_id]
else:
try:
meta = self.consume(task_id, timeout=timeout)
except socket.timeout:
raise TimeoutError("The operation timed out.")
if meta["status"] == states.SUCCESS:
return meta["result"]
elif meta["status"] in states.PROPAGATE_STATES:
raise self.exception_to_python(meta["result"])
def poll(self, task_id):
consumer = self._create_consumer(task_id, self.connection)
result = consumer.fetch()
try:
if result:
payload = self._cache[task_id] = result.payload
return payload
else:
# Use previously received status if any.
if task_id in self._cache:
return self._cache[task_id]
return {"status": states.PENDING, "result": None}
finally:
consumer.close()
def consume(self, task_id, timeout=None):
results = []
def callback(message_data, message):
results.append(message_data)
wait = self.connection.drain_events
consumer = self._create_consumer(task_id, self.connection)
consumer.register_callback(callback)
consumer.consume()
try:
time_start = time.time()
while True:
# Total time spent may exceed a single call to wait()
if timeout and time.time() - time_start >= timeout:
raise socket.timeout()
wait(timeout=timeout)
if results:
# Got event on the wanted channel.
break
finally:
consumer.close()
self._cache[task_id] = results[0]
return results[0]
def close(self):
if self._connection is not None:
self._connection.close()
@property
def connection(self):
if not self._connection:
self._connection = establish_connection()
return self._connection
def reload_task_result(self, task_id):
raise NotImplementedError(
"reload_task_result is not supported by this backend.")
def reload_taskset_result(self, task_id):
"""Reload taskset result, even if it has been previously fetched."""
raise NotImplementedError(
"reload_taskset_result is not supported by this backend.")
def save_taskset(self, taskset_id, result):
"""Store the result and status of a task."""
raise NotImplementedError(
"save_taskset is not supported by this backend.")
def restore_taskset(self, taskset_id, cache=True):
"""Get the result of a taskset."""
raise NotImplementedError(
"restore_taskset is not supported by this backend.")
|
bsd-3-clause
| -1,894,539,541,368,101,600 | 33.655172 | 76 | 0.589055 | false |
plish/Trolly
|
trolly/organisation.py
|
1
|
3394
|
from . import trelloobject
class Organisation(trelloobject.TrelloObject):
def __init__(self, trello_client, organisation_id, name='', **kwargs):
super(Organisation, self).__init__(trello_client, **kwargs)
self.id = organisation_id
self.name = name
self.base_uri = '/organizations/' + self.id
def get_organisation_information(self, query_params=None):
'''
Get information fot this organisation. Returns a dictionary of values.
'''
return self.fetch_json(
uri_path=self.base_uri,
query_params=query_params or {}
)
def get_boards(self, **query_params):
'''
Get all the boards for this organisation. Returns a list of Board s.
Returns:
list(Board): The boards attached to this organisation
'''
boards = self.get_boards_json(self.base_uri, query_params=query_params)
boards_list = []
for board_json in boards:
boards_list.append(self.create_board(board_json))
return boards_list
def get_members(self, **query_params):
'''
Get all members attached to this organisation. Returns a list of
Member objects
Returns:
list(Member): The members attached to this organisation
'''
members = self.get_members_json(self.base_uri,
query_params=query_params)
members_list = []
for member_json in members:
members_list.append(self.create_member(member_json))
return members_list
def update_organisation(self, query_params=None):
'''
Update this organisations information. Returns a new organisation
object.
'''
organisation_json = self.fetch_json(
uri_path=self.base_uri,
http_method='PUT',
query_params=query_params or {}
)
return self.create_organisation(organisation_json)
def remove_member(self, member_id):
'''
Remove a member from the organisation.Returns JSON of all members if
successful or raises an Unauthorised exception if not.
'''
return self.fetch_json(
uri_path=self.base_uri + '/members/%s' % member_id,
http_method='DELETE'
)
def add_member_by_id(self, member_id, membership_type='normal'):
'''
Add a member to the board using the id. Membership type can be
normal or admin. Returns JSON of all members if successful or raises an
Unauthorised exception if not.
'''
return self.fetch_json(
uri_path=self.base_uri + '/members/%s' % member_id,
http_method='PUT',
query_params={
'type': membership_type
}
)
def add_member(self, email, fullname, membership_type='normal'):
'''
Add a member to the board. Membership type can be normal or admin.
Returns JSON of all members if successful or raises an Unauthorised
exception if not.
'''
return self.fetch_json(
uri_path=self.base_uri + '/members',
http_method='PUT',
query_params={
'email': email,
'fullName': fullname,
'type': membership_type
}
)
|
mit
| 5,201,938,132,651,041,000 | 31.018868 | 79 | 0.57307 | false |
hasgeek/outreach
|
tests/test_kharcha.py
|
1
|
1236
|
import json
import unittest
from flask import url_for
from outreach import app
from outreach.models import (db, SaleItem)
from fixtures import init_data
class TestKharchaAPI(unittest.TestCase):
def setUp(self):
self.ctx = app.test_request_context()
self.ctx.push()
db.drop_all()
db.create_all()
init_data()
self.client = app.test_client()
def test_basic_kharcha_workflow(self):
first_item = SaleItem.query.filter_by(name='conference-ticket').first()
quantity = 2
kharcha_req = {'line_items': [{'item_id': unicode(first_item.id), 'quantity': quantity}]}
resp = self.client.post(url_for('calculate'), data=json.dumps(kharcha_req), content_type='application/json', headers=[('X-Requested-With', 'XMLHttpRequest'), ('Origin', app.config['BASE_URL'])])
self.assertEquals(resp.status_code, 200)
resp_json = json.loads(resp.get_data())
# Test that the price is correct
self.assertEquals(resp_json.get('line_items')[unicode(first_item.id)].get('final_amount'),
quantity * first_item.current_price().amount)
def tearDown(self):
db.session.rollback()
db.drop_all()
self.ctx.pop()
|
agpl-3.0
| -1,900,229,001,393,846,500 | 35.352941 | 202 | 0.644013 | false |
phenoxim/nova
|
nova/api/openstack/compute/fping.py
|
1
|
4844
|
# Copyright 2011 Grid Dynamics
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import itertools
import os
from webob import exc
from oslo_concurrency import processutils
from nova.api.openstack.api_version_request \
import MAX_PROXY_API_SUPPORT_VERSION
from nova.api.openstack import common
from nova.api.openstack.compute.schemas import fping as schema
from nova.api.openstack import wsgi
from nova.api import validation
from nova import compute
import nova.conf
from nova.i18n import _
from nova.policies import fping as fping_policies
CONF = nova.conf.CONF
class FpingController(wsgi.Controller):
def __init__(self, network_api=None):
self.compute_api = compute.API()
self.last_call = {}
def check_fping(self):
if not os.access(CONF.api.fping_path, os.X_OK):
raise exc.HTTPServiceUnavailable(
explanation=_("fping utility is not found."))
@staticmethod
def fping(ips):
fping_ret = processutils.execute(CONF.api.fping_path, *ips,
check_exit_code=False)
if not fping_ret:
return set()
alive_ips = set()
for line in fping_ret[0].split("\n"):
ip = line.split(" ", 1)[0]
if "alive" in line:
alive_ips.add(ip)
return alive_ips
@staticmethod
def _get_instance_ips(context, instance):
ret = []
for network in common.get_networks_for_instance(
context, instance).values():
all_ips = itertools.chain(network["ips"], network["floating_ips"])
ret += [ip["address"] for ip in all_ips]
return ret
@wsgi.Controller.api_version("2.1", MAX_PROXY_API_SUPPORT_VERSION)
@validation.query_schema(schema.index_query)
@wsgi.expected_errors(503)
def index(self, req):
context = req.environ["nova.context"]
search_opts = dict(deleted=False)
if "all_tenants" in req.GET:
context.can(fping_policies.POLICY_ROOT % 'all_tenants')
else:
context.can(fping_policies.BASE_POLICY_NAME)
if context.project_id:
search_opts["project_id"] = context.project_id
else:
search_opts["user_id"] = context.user_id
self.check_fping()
include = req.GET.get("include", None)
if include:
include = set(include.split(","))
exclude = set()
else:
include = None
exclude = req.GET.get("exclude", None)
if exclude:
exclude = set(exclude.split(","))
else:
exclude = set()
instance_list = self.compute_api.get_all(
context, search_opts=search_opts)
ip_list = []
instance_ips = {}
instance_projects = {}
for instance in instance_list:
uuid = instance.uuid
if uuid in exclude or (include is not None and
uuid not in include):
continue
ips = [str(ip) for ip in self._get_instance_ips(context, instance)]
instance_ips[uuid] = ips
instance_projects[uuid] = instance.project_id
ip_list += ips
alive_ips = self.fping(ip_list)
res = []
for instance_uuid, ips in instance_ips.items():
res.append({
"id": instance_uuid,
"project_id": instance_projects[instance_uuid],
"alive": bool(set(ips) & alive_ips),
})
return {"servers": res}
@wsgi.Controller.api_version("2.1", MAX_PROXY_API_SUPPORT_VERSION)
@wsgi.expected_errors((404, 503))
def show(self, req, id):
context = req.environ["nova.context"]
context.can(fping_policies.BASE_POLICY_NAME)
self.check_fping()
instance = common.get_instance(self.compute_api, context, id)
ips = [str(ip) for ip in self._get_instance_ips(context, instance)]
alive_ips = self.fping(ips)
return {
"server": {
"id": instance.uuid,
"project_id": instance.project_id,
"alive": bool(set(ips) & alive_ips),
}
}
|
apache-2.0
| -8,144,105,723,976,514,000 | 33.848921 | 79 | 0.58815 | false |
openstack/heat
|
heat/engine/resources/template_resource.py
|
1
|
14723
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from oslo_serialization import jsonutils
from requests import exceptions
from heat.common import exception
from heat.common import grouputils
from heat.common.i18n import _
from heat.common import template_format
from heat.common import urlfetch
from heat.engine import attributes
from heat.engine import environment
from heat.engine import properties
from heat.engine.resources import stack_resource
from heat.engine import template
from heat.rpc import api as rpc_api
LOG = logging.getLogger(__name__)
REMOTE_SCHEMES = ('http', 'https')
LOCAL_SCHEMES = ('file',)
STACK_ID_OUTPUT = 'OS::stack_id'
def generate_class_from_template(name, data, param_defaults):
tmpl = template.Template(template_format.parse(data))
props, attrs = TemplateResource.get_schemas(tmpl, param_defaults)
cls = type(name, (TemplateResource,),
{'properties_schema': props,
'attributes_schema': attrs,
'__doc__': tmpl.t.get(tmpl.DESCRIPTION)})
return cls
class TemplateResource(stack_resource.StackResource):
"""A resource implemented by a nested stack.
This implementation passes resource properties as parameters to the nested
stack. Outputs of the nested stack are exposed as attributes of this
resource.
"""
def __init__(self, name, json_snippet, stack):
self._parsed_nested = None
self.stack = stack
self.validation_exception = None
tri = self._get_resource_info(json_snippet)
self.properties_schema = {}
self.attributes_schema = {}
# run Resource.__init__() so we can call self.nested()
super(TemplateResource, self).__init__(name, json_snippet, stack)
self.resource_info = tri
if self.validation_exception is None:
self._generate_schema()
self.reparse()
def _get_resource_info(self, rsrc_defn):
try:
tri = self.stack.env.get_resource_info(
rsrc_defn.resource_type,
resource_name=rsrc_defn.name,
registry_type=environment.TemplateResourceInfo)
except exception.EntityNotFound:
self.validation_exception = ValueError(_(
'Only Templates with an extension of .yaml or '
'.template are supported'))
else:
self._template_name = tri.template_name
self.resource_type = tri.name
self.resource_path = tri.path
if tri.user_resource:
self.allowed_schemes = REMOTE_SCHEMES
else:
self.allowed_schemes = REMOTE_SCHEMES + LOCAL_SCHEMES
return tri
@staticmethod
def get_template_file(template_name, allowed_schemes):
try:
return urlfetch.get(template_name, allowed_schemes=allowed_schemes)
except (IOError, exceptions.RequestException) as r_exc:
args = {'name': template_name, 'exc': str(r_exc)}
msg = _('Could not fetch remote template '
'"%(name)s": %(exc)s') % args
raise exception.NotFound(msg_fmt=msg)
@staticmethod
def get_schemas(tmpl, param_defaults):
return ((properties.Properties.schema_from_params(
tmpl.param_schemata(param_defaults))),
(attributes.Attributes.schema_from_outputs(
tmpl[tmpl.OUTPUTS])))
def _generate_schema(self):
self._parsed_nested = None
try:
tmpl = template.Template(self.child_template())
except (exception.NotFound, ValueError) as download_error:
self.validation_exception = download_error
tmpl = template.Template(
{"HeatTemplateFormatVersion": "2012-12-12"})
# re-generate the properties and attributes from the template.
self.properties_schema, self.attributes_schema = self.get_schemas(
tmpl, self.stack.env.param_defaults)
self.attributes_schema.update(self.base_attributes_schema)
self.attributes.set_schema(self.attributes_schema)
def child_params(self):
"""Override method of child_params for the resource.
:return: parameter values for our nested stack based on our properties
"""
params = {}
for pname, pval in iter(self.properties.props.items()):
if not pval.implemented():
continue
try:
val = self.properties.get_user_value(pname)
except ValueError:
if self.action == self.INIT:
prop = self.properties.props[pname]
val = prop.get_value(None)
else:
raise
if val is not None:
# take a list and create a CommaDelimitedList
if pval.type() == properties.Schema.LIST:
if len(val) == 0:
params[pname] = ''
elif isinstance(val[0], dict):
flattened = []
for (count, item) in enumerate(val):
for (ik, iv) in iter(item.items()):
mem_str = '.member.%d.%s=%s' % (count, ik, iv)
flattened.append(mem_str)
params[pname] = ','.join(flattened)
else:
# When None is returned from get_attr, creating a
# delimited list with it fails during validation.
# we should sanitize the None values to empty strings.
# FIXME(rabi) this needs a permanent solution
# to sanitize attributes and outputs in the future.
params[pname] = ','.join(
(x if x is not None else '') for x in val)
else:
# for MAP, the JSON param takes either a collection or
# string, so just pass it on and let the param validate
# as appropriate
params[pname] = val
return params
def child_template(self):
if not self._parsed_nested:
self._parsed_nested = template_format.parse(self.template_data(),
self.template_url)
return self._parsed_nested
def regenerate_info_schema(self, definition):
self._get_resource_info(definition)
self._generate_schema()
@property
def template_url(self):
return self._template_name
def template_data(self):
# we want to have the latest possible template.
# 1. look in files
# 2. try download
# 3. look in the db
reported_excp = None
t_data = self.stack.t.files.get(self.template_url)
stored_t_data = t_data
if t_data is None:
LOG.debug('TemplateResource data file "%s" not found in files.',
self.template_url)
if not t_data and self.template_url.endswith((".yaml", ".template")):
try:
t_data = self.get_template_file(self.template_url,
self.allowed_schemes)
except exception.NotFound as err:
if self.action == self.UPDATE:
raise
reported_excp = err
if t_data is None:
nested_identifier = self.nested_identifier()
if nested_identifier is not None:
nested_t = self.rpc_client().get_template(self.context,
nested_identifier)
t_data = jsonutils.dumps(nested_t)
if t_data is not None:
if t_data != stored_t_data:
self.stack.t.files[self.template_url] = t_data
self.stack.t.env.register_class(self.resource_type,
self.template_url,
path=self.resource_path)
return t_data
if reported_excp is None:
reported_excp = ValueError(_('Unknown error retrieving %s') %
self.template_url)
raise reported_excp
def _validate_against_facade(self, facade_cls):
facade_schemata = properties.schemata(facade_cls.properties_schema)
for n, fs in facade_schemata.items():
if fs.required and n not in self.properties_schema:
msg = (_("Required property %(n)s for facade %(type)s "
"missing in provider") % {'n': n, 'type': self.type()})
raise exception.StackValidationFailed(message=msg)
ps = self.properties_schema.get(n)
if (n in self.properties_schema and
(fs.allowed_param_prop_type() != ps.type)):
# Type mismatch
msg = (_("Property %(n)s type mismatch between facade %(type)s"
" (%(fs_type)s) and provider (%(ps_type)s)") % {
'n': n, 'type': self.type(),
'fs_type': fs.type, 'ps_type': ps.type})
raise exception.StackValidationFailed(message=msg)
for n, ps in self.properties_schema.items():
if ps.required and n not in facade_schemata:
# Required property for template not present in facade
msg = (_("Provider requires property %(n)s "
"unknown in facade %(type)s") % {
'n': n, 'type': self.type()})
raise exception.StackValidationFailed(message=msg)
facade_attrs = facade_cls.attributes_schema.copy()
facade_attrs.update(facade_cls.base_attributes_schema)
for attr in facade_attrs:
if attr not in self.attributes_schema:
msg = (_("Attribute %(attr)s for facade %(type)s "
"missing in provider") % {
'attr': attr, 'type': self.type()})
raise exception.StackValidationFailed(message=msg)
def validate(self):
# Calls validate_template()
result = super(TemplateResource, self).validate()
try:
self.template_data()
except ValueError as ex:
msg = _("Failed to retrieve template data: %s") % ex
raise exception.StackValidationFailed(message=msg)
# If we're using an existing resource type as a facade for this
# template, check for compatibility between the interfaces.
try:
fri = self.stack.env.get_resource_info(
self.type(),
resource_name=self.name,
ignore=self.resource_info)
except exception.EntityNotFound:
pass
else:
facade_cls = fri.get_class(files=self.stack.t.files)
self._validate_against_facade(facade_cls)
return result
def validate_template(self):
if self.validation_exception is not None:
msg = str(self.validation_exception)
raise exception.StackValidationFailed(message=msg)
return super(TemplateResource, self).validate_template()
def handle_adopt(self, resource_data=None):
return self.create_with_template(self.child_template(),
self.child_params(),
adopt_data=resource_data)
def handle_create(self):
return self.create_with_template(self.child_template(),
self.child_params())
def metadata_update(self, new_metadata=None):
"""Refresh the metadata if new_metadata is None."""
if new_metadata is None:
self.metadata_set(self.t.metadata())
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
self.properties = json_snippet.properties(self.properties_schema,
self.context)
return self.update_with_template(self.child_template(),
self.child_params())
def get_reference_id(self):
if self.resource_id is None:
return str(self.name)
if STACK_ID_OUTPUT in self.attributes.cached_attrs:
return self.attributes.cached_attrs[STACK_ID_OUTPUT]
stack_identity = self.nested_identifier()
reference_id = stack_identity.arn()
try:
if self._outputs is not None:
reference_id = self.get_output(STACK_ID_OUTPUT)
elif STACK_ID_OUTPUT in self.attributes:
output = self.rpc_client().show_output(self.context,
dict(stack_identity),
STACK_ID_OUTPUT)
if rpc_api.OUTPUT_ERROR in output:
raise exception.TemplateOutputError(
resource=self.name,
attribute=STACK_ID_OUTPUT,
message=output[rpc_api.OUTPUT_ERROR])
reference_id = output[rpc_api.OUTPUT_VALUE]
except exception.TemplateOutputError as err:
LOG.info('%s', err)
except exception.NotFound:
pass
self.attributes.set_cached_attr(STACK_ID_OUTPUT, reference_id)
return reference_id
def get_attribute(self, key, *path):
if self.resource_id is None:
return None
# first look for explicit resource.x.y
if key.startswith('resource.'):
return grouputils.get_nested_attrs(self, key, False, *path)
# then look for normal outputs
try:
return attributes.select_from_attribute(self.get_output(key),
path)
except exception.NotFound:
raise exception.InvalidTemplateAttribute(resource=self.name,
key=key)
|
apache-2.0
| -2,389,991,223,551,367,000 | 39.671271 | 79 | 0.55865 | false |
aaronmckinstry706/twitter-crime-prediction
|
src/jobs/crime_prediction/grid.py
|
1
|
4226
|
import bisect
import collections
import math
"""A class which defines a square by its upper and lower lat/lon bounds."""
LatLonSquare = collections.namedtuple('Square', ['lat_min', 'lat_max', 'lon_min', 'lon_max'])
class LatLonGrid(object):
"""
A data structure representing a rectangular grid of latitude and longitude regions. The latitude
boundaries are placed at even intervals of lat_step until lat_max is passed or reached. The
final latitude boundary is then lat_max. The longitude boundaries are placed similarly.
Edge case, for clarification: LatLonGrid(0, 1, 0.15, ...) will have latitude boundaries of
approximately [0, 0.15, 0.3, 0.45, 0.6, 0.75, 0.9, 1.0]. LatLonGrid(0, 1, 0.5) will have
latitude boundaries of [0, 0.5, 1.0]. The same holds for longitude boundaries when the
longitude max, min and step are all defined as in the two examples given.
A unique integer index in range(self.grid_size) is also assigned to each grid square. This data
structure provides an efficient mapping operation from a latitude-longitude point to the id of
the grid square in which that point lies.
"""
def __init__(self, lat_min, lat_max, lon_min, lon_max, lat_step, lon_step):
# Make sure all args are floats.
lat_min = float(lat_min)
lat_max = float(lat_max)
lon_min = float(lon_min)
lon_max = float(lon_max)
lat_step = float(lat_step)
lon_step = float(lon_step)
num_lat_bounds = (lat_max - lat_min) / lat_step
self._lat_bounds = []
for b in range(0, int(num_lat_bounds) + 1, 1):
self._lat_bounds.append((lat_max - lat_min) * (b / num_lat_bounds) + lat_min)
self._lat_bounds.append(lat_max)
self._lat_bounds = tuple(self._lat_bounds)
num_lon_bounds = (lon_max - lon_min) / lon_step
self._lon_bounds = []
for b in range(0, int(num_lon_bounds) + 1, 1):
self._lon_bounds.append((lon_max - lon_min) * (b / num_lon_bounds) + lon_min)
self._lon_bounds.append(lon_max)
self._lon_bounds = tuple(self._lon_bounds)
def grid_square_index(self, lat, lon):
"""Given a position defined by (lat, lon), this function returns the index of the grid
square in which this position lies. If the position lies outside of the grid, then -1 is
returned; otherwise, an integer in range(self.grid_size) is returned."""
lat_index = bisect.bisect_left(self._lat_bounds, lat)
lon_index = bisect.bisect_left(self._lon_bounds, lon)
if lat_index == 0 or lat_index == len(self._lat_bounds) \
or lon_index == 0 or lon_index == len(self._lon_bounds):
return -1
lat_index = lat_index - 1
lon_index = lon_index - 1
return lat_index * self.lat_grid_dimension + lon_index
@property
def lat_grid_dimension(self):
return len(self._lat_bounds) - 1
@property
def lon_grid_dimension(self):
return len(self._lon_bounds) - 1
@property
def grid_size(self):
return self.lat_grid_dimension * self.lon_grid_dimension
def get_lon_delta(meters_delta, current_lat):
"""At a given latitude, this function calculates how many degrees in longitude
one would need to change in order to change position by a given number of meters."""
# current_lat should be in degrees, obv
earth_radius_meters = 6371008.0 # https://nssdc.gsfc.nasa.gov/planetary/factsheet/earthfact.html
meters_delta = float(meters_delta)
current_lat = float(current_lat)
return (meters_delta / (earth_radius_meters * math.cos(current_lat / 180.0 * math.pi))) \
* 180.0 / math.pi
def get_lat_delta(meters_delta):
"""This function calculates how many degrees in latitude one would need to change in order to
change position by a given number of meters."""
earth_radius_meters = 6371008.0 # https://nssdc.gsfc.nasa.gov/planetary/factsheet/earthfact.html
meters_delta = float(meters_delta)
return meters_delta / earth_radius_meters * 180 / math.pi
|
gpl-3.0
| -5,509,849,249,049,962,000 | 47.139535 | 100 | 0.635116 | false |
mcs07/PubChemPy
|
tests/test_errors.py
|
1
|
1060
|
# -*- coding: utf-8 -*-
"""
test_errors
~~~~~~~~~~~~~
Test errors.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import pytest
from pubchempy import *
def test_invalid_identifier():
"""BadRequestError should be raised if identifier is not a positive integer."""
with pytest.raises(BadRequestError):
Compound.from_cid('aergaerhg')
with pytest.raises(BadRequestError):
get_compounds('srthrthsr')
with pytest.raises(BadRequestError):
get_substances('grgrqjksa')
def test_notfound_identifier():
"""NotFoundError should be raised if identifier is a positive integer but record doesn't exist."""
with pytest.raises(NotFoundError):
Compound.from_cid(999999999)
with pytest.raises(NotFoundError):
Substance.from_sid(999999999)
def test_notfound_search():
"""No error should be raised if a search returns no results."""
get_compounds(999999999)
get_substances(999999999)
|
mit
| -7,337,709,465,256,332,000 | 24.853659 | 102 | 0.698113 | false |
MobProgramming/MobTimer.Python
|
Frames/MinimalScreenBlockerFrame.py
|
1
|
8157
|
from datetime import datetime
from tkinter import ttk, N, E, W
from Infrastructure import MobberManager
from Infrastructure.ImageUtility import ImageUtility
from Infrastructure.PathUtility import PathUtility
from Infrastructure.ScreenUtility import ScreenUtility
class MinimalScreenBlockerFrame(ttk.Frame):
def __init__(self, master, controller, time_options_manager, mobber_manager, countdown_manager, settings_manager,
tips_manager, theme_manager,
**kwargs):
super().__init__(master, **kwargs)
self.showing_extend_time_button = False
self.master = master
self.controller = controller
self.countdown_manager = countdown_manager
self.time_options_manager = time_options_manager
self.mobber_manager = mobber_manager # type: MobberManager
self.settings_manager = settings_manager
self.theme_manager = theme_manager
self.tips_manager = tips_manager
self.build_window_content()
self.mobber_manager.subscribe_to_mobber_list_change(self.mobber_list_change_callback)
if self.settings_manager.get_continue_screen_blocker_show_current_time():
self.countdown_manager.subscribe_to_time_changes(self.update_current_time)
def update_current_time(self, days, minutes, seconds):
self.current_time_label["text"] = datetime.now().strftime('%Y-%m-%d %I:%M %p')
def build_window_content(self):
scale = self.master.monitor.height / ScreenUtility.get_expected_height()
unique_theme = self.theme_manager.get_unique_theme_for_scale(scale)
center_frame = ttk.Frame(self)
center_frame.grid()
row_index = 0
image_utility = ImageUtility(self.theme_manager)
icon_size = int(75*scale)
invisible_path = PathUtility.normalize_path('images\\invisible.png')
self.invisible_icon = image_utility.load(invisible_path, icon_size, icon_size)
self.fade_label = ttk.Label(center_frame, image=self.invisible_icon)
self.fade_label.grid(row=0, column=0, sticky=(N, W))
self.fade_label.bind("<Enter>", lambda event: self.controller.fade_app())
self.fade_label.bind("<Leave>", lambda event: self.controller.unfade_app())
if self.settings_manager.get_general_use_logo_image():
self.image_utility = ImageUtility(self.theme_manager)
image_width =int(800*scale)
image_height = int(200*scale)
self.background_image = self.image_utility.load(self.settings_manager.get_general_logo_image_name(), image_width,
image_height, self.settings_manager.get_general_auto_theme_logo())
title = ttk.Label(center_frame, image=self.background_image)
else:
title = ttk.Label(center_frame, text="Mobbing Timer", style=unique_theme.title_style_id)
title_padx = int(150*scale)
pad_y = int(10*scale)
title.grid(row=row_index, column=0, columnspan=6, padx=title_padx, pady=pad_y)
row_index += 1
self.keyboard_icon = image_utility.load(PathUtility.normalize_path('images\\keyboard.png'), icon_size, icon_size)
self.keyboard_label = ttk.Label(center_frame, image=self.keyboard_icon)
self.keyboard_label.grid(row=row_index, column=1, sticky=(N, E))
self.current_mobber_label = ttk.Label(center_frame, text="", style=unique_theme.current_mobber_label_style_id)
self.current_mobber_label.grid(row=row_index, column=2, columnspan=1, sticky=(N, W))
self.current_mobber_label.bind("<Button-1>", lambda event: self.mobber_manager.switch_next_driver())
self.minions_icon = image_utility.load(PathUtility.normalize_path('images\\minions.png'), icon_size, icon_size)
self.minions_label = ttk.Label(center_frame, image=self.minions_icon)
self.minions_label.grid(row=row_index, column=3, sticky=(N, E))
self.next_mobber_label = ttk.Label(center_frame, text="", style=unique_theme.next_mobber_label_style_id)
self.next_mobber_label.grid(row=row_index, column=4, columnspan=1, sticky=(N, W))
row_index += 1
start_button = ttk.Button(center_frame, text="Continue Mobbing!", style=unique_theme.start_button_style_id)
start_button.grid(row=row_index, column=1, columnspan=4, sticky=N + E + W, padx=pad_y, pady=pad_y)
start_button.bind("<Button-1>", lambda event: self.controller.show_transparent_countdown_frame())
row_index += 1
if self.settings_manager.get_general_enable_tips():
self.tip_text = ttk.Label(center_frame, text="", style=unique_theme.label_style_id, wraplength=500)
self.tip_text.grid(row=row_index, column=1, columnspan=4, padx=int(30*scale), pady=pad_y, sticky=(N))
row_index += 1
if self.settings_manager.get_continue_screen_blocker_show_current_time():
self.current_time_label = ttk.Label(center_frame, text="current time", style=unique_theme.label_style_id)
self.current_time_label.grid(row=row_index, column=1, columnspan=4, padx=int(30*scale), pady=pad_y, sticky=(N))
row_index += 1
if self.settings_manager.get_timer_extension_enabled() and not self.settings_manager.get_randomize_randomize_next_driver():
minutes = self.settings_manager.get_timer_extension_minutes()
seconds = self.settings_manager.get_timer_extension_seconds()
self.extend_time_button = ttk.Button(center_frame, text=self.get_extend_time_button_text(), style=unique_theme.button_style_id)
self.extend_time_button.grid(row=row_index, column=1, columnspan=4, sticky=N + E + W, padx=int(90*scale), pady=pad_y)
self.showing_extend_time_button = True
self.extend_time_button.bind("<Button-1>",
lambda event: self.controller.rewind_and_extend(minutes, seconds))
row_index += 1
setup_button = ttk.Button(center_frame, text="Mob Setup & Time",style=unique_theme.button_style_id)
setup_button.grid(row=row_index, column=1, columnspan=4, sticky=N + E + W, padx=int(90*scale), pady=pad_y)
setup_button.bind("<Button-1>", lambda event: self.controller.show_screen_blocker_frame())
row_index += 1
quit_button = ttk.Button(center_frame, text="Quit Mobbing",style=unique_theme.button_style_id)
quit_button.grid(row=row_index, column=1, columnspan=4, sticky=N + E + W, padx=int(90*scale), pady=pad_y)
quit_button.bind("<Button-1>", lambda event: self.controller.quit_and_destroy_session())
row_index += 1
def get_extend_time_button_text(self):
minutes = self.settings_manager.get_timer_extension_minutes()
seconds = self.settings_manager.get_timer_extension_seconds()
return "Extend Time By {:0>2}:{:0>2} ({})".format(minutes, seconds,
self.controller.timer_extension_count - self.controller.extensions_used)
def mobber_list_change_callback(self, mobber_list, driver_index, navigator_index):
self.current_mobber_label['text'] = ""
self.next_mobber_label['text'] = ""
for index in range(0, mobber_list.__len__()):
name = mobber_list[index]
if index == driver_index:
self.current_mobber_label['text'] = "{} ".format(name)
if index == navigator_index:
self.next_mobber_label['text'] = "{}".format(name)
if self.settings_manager.get_general_enable_tips():
self.tip_text['text'] = self.tips_manager.get_random_tip()
def show_extend_time_button(self):
if self.settings_manager.get_timer_extension_enabled() and not self.settings_manager.get_randomize_randomize_next_driver():
if self.controller.extensions_used < self.controller.timer_extension_count:
self.extend_time_button["text"] = self.get_extend_time_button_text()
self.extend_time_button.grid()
else:
self.extend_time_button.grid_remove()
|
mit
| -553,520,416,050,646,300 | 56.443662 | 139 | 0.653181 | false |
libyal/libmsiecf
|
tests/pymsiecf_test_file.py
|
1
|
7245
|
#!/usr/bin/env python
#
# Python-bindings file type test script
#
# Copyright (C) 2009-2021, Joachim Metz <joachim.metz@gmail.com>
#
# Refer to AUTHORS for acknowledgements.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import argparse
import os
import sys
import unittest
import pymsiecf
class FileTypeTests(unittest.TestCase):
"""Tests the file type."""
def test_signal_abort(self):
"""Tests the signal_abort function."""
msiecf_file = pymsiecf.file()
msiecf_file.signal_abort()
def test_open(self):
"""Tests the open function."""
test_source = unittest.source
if not test_source:
raise unittest.SkipTest("missing source")
msiecf_file = pymsiecf.file()
msiecf_file.open(test_source)
with self.assertRaises(IOError):
msiecf_file.open(test_source)
msiecf_file.close()
with self.assertRaises(TypeError):
msiecf_file.open(None)
with self.assertRaises(ValueError):
msiecf_file.open(test_source, mode="w")
def test_open_file_object(self):
"""Tests the open_file_object function."""
test_source = unittest.source
if not test_source:
raise unittest.SkipTest("missing source")
if not os.path.isfile(test_source):
raise unittest.SkipTest("source not a regular file")
msiecf_file = pymsiecf.file()
with open(test_source, "rb") as file_object:
msiecf_file.open_file_object(file_object)
with self.assertRaises(IOError):
msiecf_file.open_file_object(file_object)
msiecf_file.close()
with self.assertRaises(TypeError):
msiecf_file.open_file_object(None)
with self.assertRaises(ValueError):
msiecf_file.open_file_object(file_object, mode="w")
def test_close(self):
"""Tests the close function."""
test_source = unittest.source
if not test_source:
raise unittest.SkipTest("missing source")
msiecf_file = pymsiecf.file()
with self.assertRaises(IOError):
msiecf_file.close()
def test_open_close(self):
"""Tests the open and close functions."""
test_source = unittest.source
if not test_source:
return
msiecf_file = pymsiecf.file()
# Test open and close.
msiecf_file.open(test_source)
msiecf_file.close()
# Test open and close a second time to validate clean up on close.
msiecf_file.open(test_source)
msiecf_file.close()
if os.path.isfile(test_source):
with open(test_source, "rb") as file_object:
# Test open_file_object and close.
msiecf_file.open_file_object(file_object)
msiecf_file.close()
# Test open_file_object and close a second time to validate clean up on close.
msiecf_file.open_file_object(file_object)
msiecf_file.close()
# Test open_file_object and close and dereferencing file_object.
msiecf_file.open_file_object(file_object)
del file_object
msiecf_file.close()
def test_set_ascii_codepage(self):
"""Tests the set_ascii_codepage function."""
supported_codepages = (
"ascii", "cp874", "cp932", "cp936", "cp949", "cp950", "cp1250",
"cp1251", "cp1252", "cp1253", "cp1254", "cp1255", "cp1256", "cp1257",
"cp1258")
msiecf_file = pymsiecf.file()
for codepage in supported_codepages:
msiecf_file.set_ascii_codepage(codepage)
unsupported_codepages = (
"iso-8859-1", "iso-8859-2", "iso-8859-3", "iso-8859-4", "iso-8859-5",
"iso-8859-6", "iso-8859-7", "iso-8859-8", "iso-8859-9", "iso-8859-10",
"iso-8859-11", "iso-8859-13", "iso-8859-14", "iso-8859-15",
"iso-8859-16", "koi8_r", "koi8_u")
for codepage in unsupported_codepages:
with self.assertRaises(RuntimeError):
msiecf_file.set_ascii_codepage(codepage)
def test_get_size(self):
"""Tests the get_size function and size property."""
test_source = unittest.source
if not test_source:
raise unittest.SkipTest("missing source")
msiecf_file = pymsiecf.file()
msiecf_file.open(test_source)
size = msiecf_file.get_size()
self.assertIsNotNone(size)
self.assertIsNotNone(msiecf_file.size)
msiecf_file.close()
def test_get_ascii_codepage(self):
"""Tests the get_ascii_codepage function and ascii_codepage property."""
test_source = unittest.source
if not test_source:
raise unittest.SkipTest("missing source")
msiecf_file = pymsiecf.file()
msiecf_file.open(test_source)
ascii_codepage = msiecf_file.get_ascii_codepage()
self.assertIsNotNone(ascii_codepage)
self.assertIsNotNone(msiecf_file.ascii_codepage)
msiecf_file.close()
def test_get_number_of_cache_directories(self):
"""Tests the get_number_of_cache_directories function and number_of_cache_directories property."""
test_source = unittest.source
if not test_source:
raise unittest.SkipTest("missing source")
msiecf_file = pymsiecf.file()
msiecf_file.open(test_source)
number_of_cache_directories = msiecf_file.get_number_of_cache_directories()
self.assertIsNotNone(number_of_cache_directories)
self.assertIsNotNone(msiecf_file.number_of_cache_directories)
msiecf_file.close()
def test_get_number_of_items(self):
"""Tests the get_number_of_items function and number_of_items property."""
test_source = unittest.source
if not test_source:
raise unittest.SkipTest("missing source")
msiecf_file = pymsiecf.file()
msiecf_file.open(test_source)
number_of_items = msiecf_file.get_number_of_items()
self.assertIsNotNone(number_of_items)
self.assertIsNotNone(msiecf_file.number_of_items)
msiecf_file.close()
def test_get_number_of_recovered_items(self):
"""Tests the get_number_of_recovered_items function and number_of_recovered_items property."""
test_source = unittest.source
if not test_source:
raise unittest.SkipTest("missing source")
msiecf_file = pymsiecf.file()
msiecf_file.open(test_source)
number_of_recovered_items = msiecf_file.get_number_of_recovered_items()
self.assertIsNotNone(number_of_recovered_items)
self.assertIsNotNone(msiecf_file.number_of_recovered_items)
msiecf_file.close()
if __name__ == "__main__":
argument_parser = argparse.ArgumentParser()
argument_parser.add_argument(
"source", nargs="?", action="store", metavar="PATH",
default=None, help="path of the source file.")
options, unknown_options = argument_parser.parse_known_args()
unknown_options.insert(0, sys.argv[0])
setattr(unittest, "source", options.source)
unittest.main(argv=unknown_options, verbosity=2)
|
lgpl-3.0
| -3,855,130,104,679,396,000 | 28.096386 | 102 | 0.683368 | false |
arkanis/minidyndns
|
references/minidns.py
|
1
|
3101
|
#!/usr/bin/env python
# From https://code.google.com/p/minidns
import sys
import socket
import fcntl
import struct
# DNSQuery class from http://code.activestate.com/recipes/491264-mini-fake-dns-server/
class DNSQuery:
def __init__(self, data):
self.data=data
self.domain=''
tipo = (ord(data[2]) >> 3) & 15 # Opcode bits
if tipo == 0: # Standard query
ini=12
lon=ord(data[ini])
while lon != 0:
self.domain+=data[ini+1:ini+lon+1]+'.'
ini+=lon+1
lon=ord(data[ini])
def respuesta(self, ip):
packet=''
if self.domain:
packet+=self.data[:2] + "\x81\x80"
packet+=self.data[4:6] + self.data[4:6] + '\x00\x00\x00\x00' # Questions and Answers Counts
packet+=self.data[12:] # Original Domain Name Question
packet+='\xc0\x0c' # Pointer to domain name
packet+='\x00\x01\x00\x01\x00\x00\x00\x3c\x00\x04' # Response type, ttl and resource data length -> 4 bytes
packet+=str.join('',map(lambda x: chr(int(x)), ip.split('.'))) # 4bytes of IP
return packet
# get_ip_address code from http://code.activestate.com/recipes/439094-get-the-ip-address-associated-with-a-network-inter/
def get_ip_address(ifname):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', ifname[:15])
)[20:24])
except:
return None
def usage():
print ""
print "Usage:"
print ""
print "\t# minidns [ip | interface]"
print ""
print "Description:"
print ""
print "\tMiniDNS will respond to all DNS queries with a single IPv4 address."
print ""
print "\tYou may specify the IP address to be returned as the first argument on the command line:\n"
print "\t\t# minidns 1.2.3.4\n"
print "\tAlternatively, you may specify an interface name and MiniDNS will use the IP address currently assigned to that interface:\n"
print "\t\t# minidns eth0\n"
print "\tIf no interface or IP address is specified, the IP address of eth0 will be used."
print ""
sys.exit(1)
if __name__ == '__main__':
ip = None
iface = 'eth0'
if len(sys.argv) == 2:
if sys.argv[-1] == '-h' or sys.argv[-1] == '--help':
usage()
else:
if len(sys.argv[-1].split('.')) == 4:
ip=sys.argv[-1]
else:
iface = sys.argv[-1]
if ip is None:
ip = get_ip_address(iface)
if ip is None:
print "ERROR: Invalid IP address or interface name specified!"
usage()
try:
udps = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
udps.bind(('',53))
except Exception, e:
print "Failed to create socket on UDP port 53:", e
sys.exit(1)
print 'miniDNS :: * 60 IN A %s\n' % ip
try:
while 1:
data, addr = udps.recvfrom(1024)
p=DNSQuery(data)
udps.sendto(p.respuesta(ip), addr)
print 'Request: %s -> %s' % (p.domain, ip)
except KeyboardInterrupt:
print '\nBye!'
udps.close()
|
mit
| -9,193,250,370,974,597,000 | 27.712963 | 136 | 0.596259 | false |
ozgurgunes/django-filizver
|
filizver/utils/views.py
|
1
|
1468
|
# -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.views.generic import View, ListView
from django.http import HttpResponse
from django.utils import simplejson as json
from filizver.forms import TopicForm
class ExtraContextMixin(View):
"""
A mixin that passes ``extra_context`` dictionary as template context.
"""
extra_context = {}
def get_context_data(self, **kwargs):
context = super(ExtraContextMixin, self).get_context_data(**kwargs)
context.update(self.extra_context)
return context
class AjaxResponseMixin(View):
"""
A mixin that prefix the ``template_name`` with an underscore (_) if request is AJAX.
"""
def dispatch(self, request, *args, **kwargs):
if request.is_ajax():
folder, sep, file = self.template_name.rpartition('/')
self.template_name = folder + sep + '_' + file
return super(AjaxResponseMixin, self).dispatch(request, *args, **kwargs)
class JSONResponseMixin(object):
"""
A mixin that can be used to render a JSON response.
"""
response_class = HttpResponse
def render_to_response(self, context, **response_kwargs):
"""
Returns a JSON response, transforming 'context' to make the payload.
"""
response_kwargs['content_type'] = 'application/json'
return self.response_class(self.json.dumps(context), **response_kwargs)
|
mit
| 2,325,083,346,132,843,500 | 30.234043 | 88 | 0.654632 | false |
alsoicode/django-maintenancemode-2
|
testproject/testproject/settings.py
|
1
|
4846
|
"""
Django settings for testproject project.
Generated by 'django-admin startproject' using Django 1.8.7.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import django
import os
DJANGO_MAJOR_VERSION = django.VERSION[0]
DJANGO_MINOR_VERSION = django.VERSION[1]
# Optionally, specify a custom 503 template path
# MAINTENANCE_503_TEMPLATE = 'errors/503.html'
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'k=t$%@*+@=5c57g&f^&8$)evztgb1%b4l23zt*!2e-1))3@vue'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
SITE_ID = 1
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.staticfiles',
'app',
'maintenancemode',
)
if DJANGO_MAJOR_VERSION == 1 and DJANGO_MAJOR_VERSION < 10:
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
else:
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
TEMPLATE_DIRECTORY = os.path.join(BASE_DIR, 'templates')
if DJANGO_MAJOR_VERSION >= 1:
# Templates
if DJANGO_MINOR_VERSION < 8 and DJANGO_MAJOR_VERSION == 1:
TEMPLATE_DIRS = (
TEMPLATE_DIRECTORY,
)
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.i18n',
'django.core.context_processors.request',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.contrib.messages.context_processors.messages',
)
else:
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [TEMPLATE_DIRECTORY],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
# Sessions
if DJANGO_MAJOR_VERSION == 1 and DJANGO_MINOR_VERSION == 7:
MIDDLEWARE_CLASSES += (
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
)
elif DJANGO_MAJOR_VERSION == 1:
MIDDLEWARE_CLASSES += (
'django.middleware.security.SecurityMiddleware',
)
if DJANGO_MAJOR_VERSION == 1 and DJANGO_MAJOR_VERSION < 10:
MIDDLEWARE_CLASSES += ('maintenancemode.middleware.MaintenanceModeMiddleware',)
else:
MIDDLEWARE += ['maintenancemode.middleware.MaintenanceModeMiddleware']
ROOT_URLCONF = 'testproject.urls'
WSGI_APPLICATION = 'testproject.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
|
apache-2.0
| 4,587,498,070,320,832,000 | 28.730061 | 83 | 0.656211 | false |
yujikato/DIRAC
|
src/DIRAC/Resources/Computing/test/Test_SSHComputingElement.py
|
1
|
2483
|
#!/bin/env python
"""
tests for SSHComputingElement module
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import shutil
import subprocess32 as subprocess
import shlex
import pytest
import DIRAC
from DIRAC.Resources.Computing.SSHComputingElement import SSHComputingElement
from DIRAC.Resources.Computing.BatchSystems.executeBatch import executeBatchContent
@pytest.mark.parametrize("batchSystem", ["Condor", "GE", "Host", "LSF", "OAR", "SLURM", "Torque"])
def test_generateControlScript(batchSystem):
""" Test that the control script generated by the merging operation
between a BatchSystem and executeBatch.py is:
* complete: contains the content of both files
* executable and doesn't raise any syntax error.
Example: it may check that a __future__ import is not misplaced in the script due to the
merging of the files.
"""
ce = SSHComputingElement('Test_SSHCE')
# Change the batch system file used during the control script generation
ce.batchSystem = batchSystem
# Get the local control script
result = ce._generateControlScript()
assert result['OK'] is True
source = result['Value']
dest = 'execute_batch.py'
# Simulate operation done by the scpCall method
# Copy the local control script into the "remote" control script
# As the source can be composed of multiple files, we have to copy the content of each file
sources = source.split(' ')
with open(dest, "wb") as dst:
for sourceFile in sources:
with open(sourceFile, "rb") as src:
shutil.copyfileobj(src, dst)
# Test that the control script is complete
with open(dest, 'r') as dst:
dataDest = dst.read()
batchSystemDir = os.path.join(os.path.dirname(DIRAC.__file__), "Resources", "Computing", "BatchSystems")
batchSystemScript = os.path.join(batchSystemDir, '%s.py' % batchSystem)
with open(batchSystemScript, 'r') as bsc:
dataBatchSystemScript = bsc.read()
assert executeBatchContent in dataDest
assert dataBatchSystemScript in dataDest
# Test the execution of the remote control script
cmd = 'python -m py_compile %s' % dest
args = shlex.split(cmd)
process = subprocess.Popen(args, universal_newlines=True)
process.communicate()
assert process.returncode == 0
# Delete the control script and the .pyc file associated
os.remove(source)
os.remove(dest)
if os.path.isfile('%sc' % dest):
os.remove('%sc' % dest)
|
gpl-3.0
| -5,751,452,210,755,022,000 | 32.106667 | 106 | 0.72654 | false |
tempbottle/Nuitka
|
nuitka/nodes/ExecEvalNodes.py
|
1
|
10309
|
# Copyright 2015, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Part of "Nuitka", an optimizing Python compiler that is compatible and
# integrates with CPython, but also works on its own.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Nodes concern with exec and eval builtins.
These are the dynamic codes, and as such rather difficult. We would like
to eliminate or limit their impact as much as possible, but it's difficult
to do.
"""
from nuitka.Builtins import calledWithBuiltinArgumentNamesDecorator
from nuitka.utils import Utils
from .NodeBases import ExpressionChildrenHavingBase, StatementChildrenHavingBase
from .NodeMakingHelpers import (
convertNoneConstantToNone,
makeStatementOnlyNodesFromExpressions
)
class ExpressionBuiltinEval(ExpressionChildrenHavingBase):
kind = "EXPRESSION_BUILTIN_EVAL"
named_children = (
"source",
"globals",
"locals"
)
def __init__(self, source_code, globals_arg, locals_arg, source_ref):
ExpressionChildrenHavingBase.__init__(
self,
values = {
"source" : source_code,
"globals" : globals_arg,
"locals" : locals_arg,
},
source_ref = source_ref
)
getSourceCode = ExpressionChildrenHavingBase.childGetter("source")
getGlobals = ExpressionChildrenHavingBase.childGetter("globals")
getLocals = ExpressionChildrenHavingBase.childGetter("locals")
def computeExpression(self, constraint_collection):
# TODO: Attempt for constant values to do it.
return self, None, None
# Note: Python3 only so far.
if Utils.python_version >= 300:
class ExpressionBuiltinExec(ExpressionBuiltinEval):
kind = "EXPRESSION_BUILTIN_EXEC"
def __init__(self, source_code, globals_arg, locals_arg, source_ref):
ExpressionBuiltinEval.__init__(
self,
source_code = source_code,
globals_arg = globals_arg,
locals_arg = locals_arg,
source_ref = source_ref
)
def needsLocalsDict(self):
return False
def computeExpression(self, constraint_collection):
# TODO: Attempt for constant values to do it.
return self, None, None
def computeExpressionDrop(self, statement, constraint_collection):
if self.getParentVariableProvider().isEarlyClosure():
result = StatementExec(
source_code = self.getSourceCode(),
globals_arg = self.getGlobals(),
locals_arg = self.getLocals(),
source_ref = self.getSourceReference()
)
return result, "new_statements", """\
Replaced builtin exec call to exec statement in early closure context."""
else:
return statement, None, None
# Note: Python2 only
if Utils.python_version < 300:
class ExpressionBuiltinExecfile(ExpressionBuiltinEval):
kind = "EXPRESSION_BUILTIN_EXECFILE"
named_children = ("source", "globals", "locals")
def __init__(self, source_code, globals_arg, locals_arg, source_ref):
ExpressionBuiltinEval.__init__(
self,
source_code = source_code,
globals_arg = globals_arg,
locals_arg = locals_arg,
source_ref = source_ref
)
def needsLocalsDict(self):
return True
def computeExpressionDrop(self, statement, constraint_collection):
# In this case, the copy-back must be done and will only be done
# correctly by the code for exec statements.
provider = self.getParentVariableProvider()
if provider.isExpressionFunctionBody() and \
provider.isClassDictCreation():
result = StatementExec(
source_code = self.getSourceCode(),
globals_arg = self.getGlobals(),
locals_arg = self.getLocals(),
source_ref = self.getSourceReference()
)
return result, "new_statements", """\
Changed execfile to exec on class level."""
else:
return statement, None, None
class StatementExec(StatementChildrenHavingBase):
kind = "STATEMENT_EXEC"
named_children = (
"source",
"globals",
"locals"
)
def __init__(self, source_code, globals_arg, locals_arg, source_ref):
StatementChildrenHavingBase.__init__(
self,
values = {
"globals" : globals_arg,
"locals" : locals_arg,
"source" : source_code
},
source_ref = source_ref,
)
def setChild(self, name, value):
if name in ("globals", "locals"):
value = convertNoneConstantToNone(value)
return StatementChildrenHavingBase.setChild(self, name, value)
getSourceCode = StatementChildrenHavingBase.childGetter("source")
getGlobals = StatementChildrenHavingBase.childGetter("globals")
getLocals = StatementChildrenHavingBase.childGetter("locals")
def needsLocalsDict(self):
return self.getLocals().mayBeNone()
def computeStatement(self, constraint_collection):
constraint_collection.onExpression(
expression = self.getSourceCode()
)
source_code = self.getSourceCode()
if source_code.willRaiseException(BaseException):
result = source_code
return (
result,
"new_raise",
"""\
Exec statement raises implicitly when determining source code argument."""
)
constraint_collection.onExpression(
expression = self.getGlobals(),
allow_none = True
)
globals_arg = self.getGlobals()
if globals_arg is not None and \
globals_arg.willRaiseException(BaseException):
result = makeStatementOnlyNodesFromExpressions(
expressions = (
source_code,
globals_arg
)
)
return (
result,
"new_raise",
"""\
Exec statement raises implicitly when determining globals argument."""
)
constraint_collection.onExpression(
expression = self.getLocals(),
allow_none = True
)
locals_arg = self.getLocals()
if locals_arg is not None and \
locals_arg.willRaiseException(BaseException):
result = makeStatementOnlyNodesFromExpressions(
expressions = (
source_code,
globals_arg,
locals_arg
)
)
return (
result,
"new_raise",
"""\
Exec statement raises implicitly when determining locals argument."""
)
str_value = self.getSourceCode().getStrValue()
if False and str_value is not None:
# TODO: Don't forget to consider side effects of source code.
# TODO: This needs to be re-done.
exec_body = None
return (
exec_body,
"new_statements",
"In-lined constant exec statement."
)
return self, None, None
class StatementLocalsDictSync(StatementChildrenHavingBase):
kind = "STATEMENT_LOCALS_DICT_SYNC"
named_children = (
"locals",
)
@calledWithBuiltinArgumentNamesDecorator
def __init__(self, locals_arg, source_ref):
StatementChildrenHavingBase.__init__(
self,
values = {
"locals" : locals_arg,
},
source_ref = source_ref,
)
def computeStatement(self, constraint_collection):
if self.getParentVariableProvider().isPythonModule():
return None, "new_statements", "Removed sync back to locals without locals."
constraint_collection.removeAllKnowledge()
return self, None, None
getLocals = ExpressionChildrenHavingBase.childGetter("locals")
def mayRaiseException(self, exception_type):
return False
class ExpressionBuiltinCompile(ExpressionChildrenHavingBase):
kind = "EXPRESSION_BUILTIN_COMPILE"
named_children = (
"source",
"filename",
"mode",
"flags",
"dont_inherit",
"optimize"
)
def __init__(self, source_code, filename, mode, flags, dont_inherit,
optimize, source_ref):
ExpressionChildrenHavingBase.__init__(
self,
values = {
"source" : source_code,
"filename" : filename,
"mode" : mode,
"flags" : flags,
"dont_inherit" : dont_inherit,
"optimize" : optimize
},
source_ref = source_ref
)
getSourceCode = ExpressionChildrenHavingBase.childGetter("source")
getFilename = ExpressionChildrenHavingBase.childGetter("filename")
getMode = ExpressionChildrenHavingBase.childGetter("mode")
getFlags = ExpressionChildrenHavingBase.childGetter("flags")
getDontInherit = ExpressionChildrenHavingBase.childGetter("dont_inherit")
getOptimize = ExpressionChildrenHavingBase.childGetter("optimize")
def computeExpression(self, constraint_collection):
# TODO: Attempt for constant values to do it.
return self, None, None
|
apache-2.0
| -8,143,572,845,159,348,000 | 31.520505 | 88 | 0.588515 | false |
peterstace/project-euler
|
OLD_PY_CODE/project_euler_old_old/102/102.py
|
1
|
1554
|
"""
Three distinct points are plotted at random on a Cartesian plane, for which -1000 <= x, y <= 1000, such that a triangle is formed.
Consider the following two triangles:
A(-340,495), B(-153,-910), C(835,-947)
X(-175,41), Y(-421,-714), Z(574,-645)
It can be verified that triangle ABC contains the origin, whereas triangle XYZ does not.
Using triangles.txt (right click and 'Save Link/Target As...'), a 27K text file containing the co-ordinates of one thousand "random" triangles, find the number of triangles for which the interior contains the origin.
"""
def cross(a, b):
"""Takes 2 2D vectors a and b and adds 0 to them to make them 3D. The
cross product is then performed and the last dimension of the result is
returned (as a float).
"""
a1, a2 = a
b1, b2 = b
return a1*b2 - a2*b1
def sub(a, b):
"""Calculate a - b for 2D vectors a and b."""
return a[0]-b[0], a[1]-b[1]
def contains_origin(a, b, c):
"""Checks if point x is inside the triangle formed by points a, b, and c."""
ab = sub(b, a)
bc = sub(c, b)
ca = sub(a, c)
if cross(ab, ca) * cross(ab, a) < 0: return False
if cross(bc, ab) * cross(bc, b) < 0: return False
if cross(ca, bc) * cross(ca, c) < 0: return False
return True
def main():
total = 0
for line in open('triangles.txt'):
s = line.split(',')
a = int(s[0]), int(s[1])
b = int(s[2]), int(s[3])
c = int(s[4]), int(s[5])
if contains_origin(a, b, c):
total += 1
print total
main()
|
unlicense
| 7,183,141,705,756,104,000 | 30.08 | 216 | 0.60296 | false |
CiscoSystems/networking-cisco
|
networking_cisco/plugins/cisco/l3/schedulers/l3_routertype_aware_agent_scheduler.py
|
1
|
4496
|
# Copyright 2014 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from sqlalchemy import sql
from neutron.common import constants
from neutron.db import l3_agentschedulers_db
from neutron.db import l3_db
from neutron.scheduler import l3_agent_scheduler
from networking_cisco.plugins.cisco.db.l3 import l3_models
from networking_cisco.plugins.cisco.extensions import routertype
LOG = logging.getLogger(__name__)
AGENT_TYPE_L3 = constants.AGENT_TYPE_L3
class L3RouterTypeAwareScheduler(l3_agent_scheduler.L3Scheduler):
"""A router type aware l3 agent scheduler for Cisco router service plugin.
It schedules Neutron routers with router type representing network
namespace based routers to l3 agents.
"""
def _get_unscheduled_routers(self, context, plugin):
"""Get routers with no agent binding."""
# TODO(gongysh) consider the disabled agent's router
no_agent_binding = ~sql.exists().where(
l3_db.Router.id ==
l3_agentschedulers_db.RouterL3AgentBinding.router_id)
# Modified to only include routers of network namespace type
ns_routertype_id = plugin.get_namespace_router_type_id(context)
query = context.session.query(l3_db.Router.id)
query = query.join(l3_models.RouterHostingDeviceBinding)
query = query.filter(
l3_models.RouterHostingDeviceBinding.router_type_id ==
ns_routertype_id, no_agent_binding)
unscheduled_router_ids = [router_id_[0] for router_id_ in query]
if unscheduled_router_ids:
return plugin.get_routers(
context, filters={'id': unscheduled_router_ids})
return []
def _filter_unscheduled_routers(self, context, plugin, routers):
"""Filter from list of routers the ones that are not scheduled."""
unscheduled_routers = []
for router in routers:
if (router[routertype.TYPE_ATTR] !=
plugin.get_namespace_router_type_id(context)):
# ignore non-namespace routers
continue
l3_agents = plugin.get_l3_agents_hosting_routers(
context, [router['id']])
if l3_agents:
LOG.debug('Router %(router_id)s has already been '
'hosted by L3 agent %(agent_id)s',
{'router_id': router['id'],
'agent_id': l3_agents[0]['id']})
else:
unscheduled_routers.append(router)
return unscheduled_routers
def schedule(self, plugin, context, router, candidates=None,
hints=None):
# Only network namespace based routers should be scheduled here
ns_routertype_id = plugin.get_namespace_router_type_id(context)
# Not very happy about these checks but since we want to work with
# existing l3 agent scheduler they cannot be avoided
if isinstance(router, dict):
router_type_id = router[routertype.TYPE_ATTR]
router_id = router['id']
else:
router_id = router
r = plugin.get_router(context, router_id)
router_type_id = r[routertype.TYPE_ATTR]
if router_type_id == ns_routertype_id:
# Do the traditional Neutron router scheduling
return plugin.l3agent_scheduler.schedule(plugin, context,
router_id, candidates)
else:
return
def _choose_router_agent(self, plugin, context, candidates):
return plugin.l3agent_scheduler._choose_router_agent(plugin, context,
candidates)
def _choose_router_agents_for_ha(self, plugin, context, candidates):
return plugin.l3agent_scheduler._choose_router_agents_for_ha(
plugin, context, candidates)
|
apache-2.0
| 486,952,411,445,596,800 | 42.650485 | 78 | 0.637011 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.